diff --git a/jacobi/mpi/comp_comm/src/jacobi_2D_mpi_comp_comm.c b/jacobi/mpi/comp_comm/src/jacobi_2D_mpi_comp_comm.c
index b63dca19721624732a77f8b109acd9922bb84ec6..1b620e9773cdf0a2e63b16db51c9c45572c3dc56 100644
--- a/jacobi/mpi/comp_comm/src/jacobi_2D_mpi_comp_comm.c
+++ b/jacobi/mpi/comp_comm/src/jacobi_2D_mpi_comp_comm.c
@@ -110,6 +110,15 @@ int main(int argc, char **argv)
       exit(EXIT_FAILURE);
     }
 
+  if (rank == MASTERTASK)
+    {
+#if MPI_VERSION > 4
+      printf("\n\t Using MPI_Isendrecv \n");
+#else
+      printf("\n\t Using MPI_Irecv / MPI_Isend \n");
+#endif
+    }
+
   /* get the comm size */
   MPI_Comm_size(ThisTask.comm2d, &ThisTask.nranks);
   
@@ -455,25 +464,44 @@ void Jacobi_Communication(MyData      **const restrict Phi,
   const int data_row_size = ThisTask->domain.dim[Y];
   
   /* First task: issue the communication */
-  MPI_Request request[4];
 
   MyData **const restrict buffer = Phi0;
-  
+
+#if MPI_VERSION > 4  
+
+  MPI_Request request[4];
+
   MPI_Isendrecv(&buffer[ThisTask->domain.local_end[X]      ][ThisTask->domain.local_start[Y]    ], data_row_size, MPI_MyDatatype, ThisTask->nbrtop,    0,
-		&buffer[ThisTask->domain.local_start[X] - 1][ThisTask->domain.local_start[Y]    ], data_row_size, MPI_MyDatatype, ThisTask->nbrbottom, 0,
-		ThisTask->comm2d, &request[0]);
+  		&buffer[ThisTask->domain.local_start[X] - 1][ThisTask->domain.local_start[Y]    ], data_row_size, MPI_MyDatatype, ThisTask->nbrbottom, 0,
+  		ThisTask->comm2d, &request[0]);
 
   MPI_Isendrecv(&buffer[ThisTask->domain.local_start[X]    ][ThisTask->domain.local_start[Y]    ], data_row_size, MPI_MyDatatype, ThisTask->nbrbottom, 1,
-		&buffer[ThisTask->domain.local_end[X]   + 1][ThisTask->domain.local_start[Y]    ], data_row_size, MPI_MyDatatype, ThisTask->nbrtop,    1,
-		ThisTask->comm2d, &request[1]);
+  		&buffer[ThisTask->domain.local_end[X]   + 1][ThisTask->domain.local_start[Y]    ], data_row_size, MPI_MyDatatype, ThisTask->nbrtop,    1,
+  		ThisTask->comm2d, &request[1]);
 
   MPI_Isendrecv(&buffer[ThisTask->domain.local_start[X]    ][ThisTask->domain.local_end[Y]      ], 1,             column,         ThisTask->nbrright,  2,
-		&buffer[ThisTask->domain.local_start[X]    ][ThisTask->domain.local_start[Y] - 1], 1,             column,         ThisTask->nbrleft,   2,
-		ThisTask->comm2d, &request[2]);
+  		&buffer[ThisTask->domain.local_start[X]    ][ThisTask->domain.local_start[Y] - 1], 1,             column,         ThisTask->nbrleft,   2,
+  		ThisTask->comm2d, &request[2]);
   
   MPI_Isendrecv(&buffer[ThisTask->domain.local_start[X]    ][ThisTask->domain.local_start[Y]    ], 1,             column,         ThisTask->nbrleft,   3,
-		&buffer[ThisTask->domain.local_start[X]    ][ThisTask->domain.local_end[Y]   + 1], 1,             column,         ThisTask->nbrright,  3,
-		ThisTask->comm2d, &request[3]);
+  		&buffer[ThisTask->domain.local_start[X]    ][ThisTask->domain.local_end[Y]   + 1], 1,             column,         ThisTask->nbrright,  3,
+  		ThisTask->comm2d, &request[3]);
+
+#else
+  
+  MPI_Request request[8];
+
+  MPI_Irecv(&buffer[ThisTask->domain.local_start[X] - 1][ThisTask->domain.local_start[Y]    ], data_row_size, MPI_MyDatatype, ThisTask->nbrbottom, 0, ThisTask->comm2d, &request[0]);
+  MPI_Irecv(&buffer[ThisTask->domain.local_end[X]   + 1][ThisTask->domain.local_start[Y]    ], data_row_size, MPI_MyDatatype, ThisTask->nbrtop   , 1, ThisTask->comm2d, &request[1]);
+  MPI_Irecv(&buffer[ThisTask->domain.local_start[X]    ][ThisTask->domain.local_start[Y] - 1], 1,             column,         ThisTask->nbrleft,   2, ThisTask->comm2d, &request[2]);
+  MPI_Irecv(&buffer[ThisTask->domain.local_start[X]    ][ThisTask->domain.local_end[Y]   + 1], 1,             column,         ThisTask->nbrright,  3, ThisTask->comm2d, &request[3]);
+  
+  MPI_Isend(&buffer[ThisTask->domain.local_end[X]      ][ThisTask->domain.local_start[Y]    ], data_row_size, MPI_MyDatatype, ThisTask->nbrtop,    0, ThisTask->comm2d, &request[4]);
+  MPI_Isend(&buffer[ThisTask->domain.local_start[X]    ][ThisTask->domain.local_start[Y]    ], data_row_size, MPI_MyDatatype, ThisTask->nbrbottom, 1, ThisTask->comm2d, &request[5]);
+  MPI_Isend(&buffer[ThisTask->domain.local_start[X]    ][ThisTask->domain.local_end[Y]      ], 1,             column,         ThisTask->nbrright,  2, ThisTask->comm2d, &request[6]);
+  MPI_Isend(&buffer[ThisTask->domain.local_start[X]    ][ThisTask->domain.local_start[Y]    ], 1,             column,         ThisTask->nbrleft,   3, ThisTask->comm2d, &request[7]);
+
+#endif
 
   /**************************************** computation ****************************************/
   /* perform the computation with the local data, (i.e. ghost cells are not required) */
@@ -488,9 +516,13 @@ void Jacobi_Communication(MyData      **const restrict Phi,
   JacobiAlgorithm(Phi, Phi0, delta, jbeg, jend, ibeg, iend, error);
 
   /*********************************************************************************************/
-  
+
+#if MPI_VERSION > 4  
   /* wait the data on the boundaries */
   MPI_Waitall(4, request, MPI_STATUSES_IGNORE);
+#else
+  MPI_Waitall(8, request, MPI_STATUSES_IGNORE);
+#endif
   
   /*  nbrbottom */
   JacobiAlgorithm(Phi, Phi0, delta,