Skip to content
Snippets Groups Projects
Commit dfc849f1 authored by David Goz's avatar David Goz :sleeping:
Browse files

mpi/comp_comm/src/jacobi_2D_mpi_comp_comm.c openmpi MPI_Isendrecv subversion

parent f354993e
No related branches found
No related tags found
No related merge requests found
...@@ -110,6 +110,15 @@ int main(int argc, char **argv) ...@@ -110,6 +110,15 @@ int main(int argc, char **argv)
exit(EXIT_FAILURE); exit(EXIT_FAILURE);
} }
if (rank == MASTERTASK)
{
#if MPI_VERSION > 4
printf("\n\t Using MPI_Isendrecv \n");
#else
printf("\n\t Using MPI_Irecv / MPI_Isend \n");
#endif
}
/* get the comm size */ /* get the comm size */
MPI_Comm_size(ThisTask.comm2d, &ThisTask.nranks); MPI_Comm_size(ThisTask.comm2d, &ThisTask.nranks);
...@@ -455,10 +464,13 @@ void Jacobi_Communication(MyData **const restrict Phi, ...@@ -455,10 +464,13 @@ void Jacobi_Communication(MyData **const restrict Phi,
const int data_row_size = ThisTask->domain.dim[Y]; const int data_row_size = ThisTask->domain.dim[Y];
/* First task: issue the communication */ /* First task: issue the communication */
MPI_Request request[4];
MyData **const restrict buffer = Phi0; MyData **const restrict buffer = Phi0;
#if MPI_VERSION > 4
MPI_Request request[4];
MPI_Isendrecv(&buffer[ThisTask->domain.local_end[X] ][ThisTask->domain.local_start[Y] ], data_row_size, MPI_MyDatatype, ThisTask->nbrtop, 0, MPI_Isendrecv(&buffer[ThisTask->domain.local_end[X] ][ThisTask->domain.local_start[Y] ], data_row_size, MPI_MyDatatype, ThisTask->nbrtop, 0,
&buffer[ThisTask->domain.local_start[X] - 1][ThisTask->domain.local_start[Y] ], data_row_size, MPI_MyDatatype, ThisTask->nbrbottom, 0, &buffer[ThisTask->domain.local_start[X] - 1][ThisTask->domain.local_start[Y] ], data_row_size, MPI_MyDatatype, ThisTask->nbrbottom, 0,
ThisTask->comm2d, &request[0]); ThisTask->comm2d, &request[0]);
...@@ -475,6 +487,22 @@ void Jacobi_Communication(MyData **const restrict Phi, ...@@ -475,6 +487,22 @@ void Jacobi_Communication(MyData **const restrict Phi,
&buffer[ThisTask->domain.local_start[X] ][ThisTask->domain.local_end[Y] + 1], 1, column, ThisTask->nbrright, 3, &buffer[ThisTask->domain.local_start[X] ][ThisTask->domain.local_end[Y] + 1], 1, column, ThisTask->nbrright, 3,
ThisTask->comm2d, &request[3]); ThisTask->comm2d, &request[3]);
#else
MPI_Request request[8];
MPI_Irecv(&buffer[ThisTask->domain.local_start[X] - 1][ThisTask->domain.local_start[Y] ], data_row_size, MPI_MyDatatype, ThisTask->nbrbottom, 0, ThisTask->comm2d, &request[0]);
MPI_Irecv(&buffer[ThisTask->domain.local_end[X] + 1][ThisTask->domain.local_start[Y] ], data_row_size, MPI_MyDatatype, ThisTask->nbrtop , 1, ThisTask->comm2d, &request[1]);
MPI_Irecv(&buffer[ThisTask->domain.local_start[X] ][ThisTask->domain.local_start[Y] - 1], 1, column, ThisTask->nbrleft, 2, ThisTask->comm2d, &request[2]);
MPI_Irecv(&buffer[ThisTask->domain.local_start[X] ][ThisTask->domain.local_end[Y] + 1], 1, column, ThisTask->nbrright, 3, ThisTask->comm2d, &request[3]);
MPI_Isend(&buffer[ThisTask->domain.local_end[X] ][ThisTask->domain.local_start[Y] ], data_row_size, MPI_MyDatatype, ThisTask->nbrtop, 0, ThisTask->comm2d, &request[4]);
MPI_Isend(&buffer[ThisTask->domain.local_start[X] ][ThisTask->domain.local_start[Y] ], data_row_size, MPI_MyDatatype, ThisTask->nbrbottom, 1, ThisTask->comm2d, &request[5]);
MPI_Isend(&buffer[ThisTask->domain.local_start[X] ][ThisTask->domain.local_end[Y] ], 1, column, ThisTask->nbrright, 2, ThisTask->comm2d, &request[6]);
MPI_Isend(&buffer[ThisTask->domain.local_start[X] ][ThisTask->domain.local_start[Y] ], 1, column, ThisTask->nbrleft, 3, ThisTask->comm2d, &request[7]);
#endif
/**************************************** computation ****************************************/ /**************************************** computation ****************************************/
/* perform the computation with the local data, (i.e. ghost cells are not required) */ /* perform the computation with the local data, (i.e. ghost cells are not required) */
/* so overlapping computation and communication */ /* so overlapping computation and communication */
...@@ -489,8 +517,12 @@ void Jacobi_Communication(MyData **const restrict Phi, ...@@ -489,8 +517,12 @@ void Jacobi_Communication(MyData **const restrict Phi,
/*********************************************************************************************/ /*********************************************************************************************/
#if MPI_VERSION > 4
/* wait the data on the boundaries */ /* wait the data on the boundaries */
MPI_Waitall(4, request, MPI_STATUSES_IGNORE); MPI_Waitall(4, request, MPI_STATUSES_IGNORE);
#else
MPI_Waitall(8, request, MPI_STATUSES_IGNORE);
#endif
/* nbrbottom */ /* nbrbottom */
JacobiAlgorithm(Phi, Phi0, delta, JacobiAlgorithm(Phi, Phi0, delta,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment