Skip to content

Commit a785353

Browse files
authored
Merge pull request #1163 from su2code/replace_bsend_with_isend
Bsend -> Isend on ActDisk and Nearfield methods
2 parents 1ea9a2b + bb5c7d4 commit a785353

6 files changed

Lines changed: 39 additions & 180 deletions

File tree

Common/include/option_structure.hpp

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,6 @@ enum SU2_COMPONENT {
7474

7575
const unsigned int EXIT_DIVERGENCE = 2; /*!< \brief Exit code (divergence). */
7676

77-
const unsigned int BUFSIZE = 3000000; /*!< \brief MPI buffer. */
7877
const unsigned int MAX_PARAMETERS = 10; /*!< \brief Maximum number of parameters for a design variable definition. */
7978
const unsigned int MAX_NUMBER_PERIODIC = 10; /*!< \brief Maximum number of periodic boundary conditions. */
8079
const unsigned int MAX_STRING_SIZE = 200; /*!< \brief Maximum number of domains. */

Common/include/parallelization/mpi_structure.hpp

Lines changed: 6 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -144,10 +144,6 @@ class CBaseMPIWrapper {
144144
winMinRankErrorInUse = true;
145145
}
146146

147-
static inline void Buffer_attach(void* buffer, int size) { MPI_Buffer_attach(buffer, size); }
148-
149-
static inline void Buffer_detach(void* buffer, int* size) { MPI_Buffer_detach(buffer, size); }
150-
151147
static inline void Comm_rank(Comm comm, int* rank) { MPI_Comm_rank(comm, rank); }
152148

153149
static inline void Comm_size(Comm comm, int* size) { MPI_Comm_size(comm, size); }
@@ -176,6 +172,8 @@ class CBaseMPIWrapper {
176172

177173
static inline void Wait(Request* request, Status* status) { MPI_Wait(request, status); }
178174

175+
static inline int Request_free(Request *request) { return MPI_Request_free(request); }
176+
179177
static inline void Testall(int count, Request* array_of_requests, int* flag, Status* array_of_statuses) {
180178
MPI_Testall(count, array_of_requests, flag, array_of_statuses);
181179
}
@@ -198,10 +196,6 @@ class CBaseMPIWrapper {
198196
MPI_Bcast(buf, count, datatype, root, comm);
199197
}
200198

201-
static inline void Bsend(const void* buf, int count, Datatype datatype, int dest, int tag, Comm comm) {
202-
MPI_Bsend(buf, count, datatype, dest, tag, comm);
203-
}
204-
205199
static inline void Reduce(const void* sendbuf, void* recvbuf, int count, Datatype datatype, Op op, int root,
206200
Comm comm) {
207201
MPI_Reduce(sendbuf, recvbuf, count, datatype, op, root, comm);
@@ -351,10 +345,6 @@ class CMediMPIWrapper : public CBaseMPIWrapper {
351345
}
352346
}
353347

354-
static inline void Buffer_attach(void* buffer, int size) { AMPI_Buffer_attach(buffer, size); }
355-
356-
static inline void Buffer_detach(void* buffer, int* size) { AMPI_Buffer_detach(buffer, size); }
357-
358348
static inline void Comm_rank(Comm comm, int* rank) { AMPI_Comm_rank(convertComm(comm), rank); }
359349

360350
static inline void Comm_size(Comm comm, int* size) { AMPI_Comm_size(convertComm(comm), size); }
@@ -385,6 +375,8 @@ class CMediMPIWrapper : public CBaseMPIWrapper {
385375

386376
static inline void Wait(SU2_MPI::Request* request, Status* status) { AMPI_Wait(request, status); }
387377

378+
static inline int Request_free(Request *request) { return AMPI_Request_free(request); }
379+
388380
static inline void Testall(int count, Request* array_of_requests, int* flag, Status* array_of_statuses) {
389381
AMPI_Testall(count, array_of_requests, flag, array_of_statuses);
390382
}
@@ -409,10 +401,6 @@ class CMediMPIWrapper : public CBaseMPIWrapper {
409401
AMPI_Bcast(buf, count, convertDatatype(datatype), root, convertComm(comm));
410402
}
411403

412-
static inline void Bsend(const void* buf, int count, Datatype datatype, int dest, int tag, Comm comm) {
413-
AMPI_Bsend(buf, count, convertDatatype(datatype), dest, tag, convertComm(comm));
414-
}
415-
416404
static inline void Reduce(const void* sendbuf, void* recvbuf, int count, Datatype datatype, Op op, int root,
417405
Comm comm) {
418406
AMPI_Reduce(sendbuf, recvbuf, count, convertDatatype(datatype), convertOp(op), root, convertComm(comm));
@@ -574,10 +562,6 @@ class CBaseMPIWrapper {
574562

575563
static inline void Init_thread(int* argc, char*** argv, int required, int* provided) { *provided = required; }
576564

577-
static inline void Buffer_attach(void* buffer, int size) {}
578-
579-
static inline void Buffer_detach(void* buffer, int* size) {}
580-
581565
static inline void Barrier(Comm comm) {}
582566

583567
static inline void Abort(Comm comm, int error) { exit(EXIT_FAILURE); }
@@ -595,6 +579,8 @@ class CBaseMPIWrapper {
595579

596580
static inline void Wait(Request* request, Status* status) {}
597581

582+
static inline int Request_free(Request *request) { return 0; }
583+
598584
static inline void Waitall(int nrequests, Request* request, Status* status) {}
599585

600586
static inline void Waitany(int nrequests, Request* request, int* index, Status* status) {}
@@ -605,8 +591,6 @@ class CBaseMPIWrapper {
605591

606592
static inline void Bcast(void* buf, int count, Datatype datatype, int root, Comm comm) {}
607593

608-
static inline void Bsend(const void* buf, int count, Datatype datatype, int dest, int tag, Comm comm) {}
609-
610594
static inline void Reduce(const void* sendbuf, void* recvbuf, int count, Datatype datatype, Op op, int root,
611595
Comm comm) {
612596
CopyData(sendbuf, recvbuf, count, datatype);

SU2_CFD/src/SU2_CFD.cpp

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -61,16 +61,13 @@ int main(int argc, char *argv[]) {
6161
/*--- MPI initialization, and buffer setting ---*/
6262

6363
#ifdef HAVE_MPI
64-
int buffsize;
65-
char *buffptr;
6664
#ifdef HAVE_OMP
6765
int required = use_thread_mult? MPI_THREAD_MULTIPLE : MPI_THREAD_FUNNELED;
6866
int provided;
6967
SU2_MPI::Init_thread(&argc, &argv, required, &provided);
7068
#else
7169
SU2_MPI::Init(&argc, &argv);
7270
#endif
73-
SU2_MPI::Buffer_attach( malloc(BUFSIZE), BUFSIZE );
7471
SU2_Comm MPICommunicator(MPI_COMM_WORLD);
7572
#else
7673
SU2_Comm MPICommunicator(0);
@@ -178,11 +175,7 @@ int main(int argc, char *argv[]) {
178175
#endif
179176

180177
/*--- Finalize MPI parallelization. ---*/
181-
#ifdef HAVE_MPI
182-
SU2_MPI::Buffer_detach(&buffptr, &buffsize);
183-
free(buffptr);
184178
SU2_MPI::Finalize();
185-
#endif
186179

187180
return EXIT_SUCCESS;
188181

SU2_CFD/src/solvers/CAdjEulerSolver.cpp

Lines changed: 16 additions & 74 deletions
Original file line numberDiff line numberDiff line change
@@ -403,14 +403,10 @@ void CAdjEulerSolver::Set_MPI_ActDisk(CSolver **solver_container, CGeometry *geo
403403
unsigned short iVar, iMarker, jMarker;
404404
long nDomain = 0, iDomain, jDomain;
405405

406-
#ifdef HAVE_MPI
407-
408406
/*--- MPI status and request arrays for non-blocking communications ---*/
409407

410-
SU2_MPI::Status status, status_;
411-
412-
413-
#endif
408+
SU2_MPI::Status status;
409+
SU2_MPI::Request req;
414410

415411
/*--- Define buffer vector interior domain ---*/
416412

@@ -480,13 +476,10 @@ void CAdjEulerSolver::Set_MPI_ActDisk(CSolver **solver_container, CGeometry *geo
480476

481477
if (rank != iDomain) {
482478

483-
#ifdef HAVE_MPI
484-
485479
/*--- Communicate the counts to iDomain with non-blocking sends ---*/
486480

487-
SU2_MPI::Bsend(&nPointTotal_s[iDomain], 1, MPI_UNSIGNED_LONG, iDomain, iDomain, MPI_COMM_WORLD);
488-
489-
#endif
481+
SU2_MPI::Isend(&nPointTotal_s[iDomain], 1, MPI_UNSIGNED_LONG, iDomain, iDomain, MPI_COMM_WORLD, &req);
482+
SU2_MPI::Request_free(&req);
490483

491484
} else {
492485

@@ -508,15 +501,11 @@ void CAdjEulerSolver::Set_MPI_ActDisk(CSolver **solver_container, CGeometry *geo
508501

509502
if (rank != jDomain) {
510503

511-
#ifdef HAVE_MPI
512-
513504
/*--- Recv the data by probing for the current sender, jDomain,
514505
first and then receiving the values from it. ---*/
515506

516507
SU2_MPI::Recv(&nPointTotal_r[jDomain], 1, MPI_UNSIGNED_LONG, jDomain, rank, MPI_COMM_WORLD, &status);
517508

518-
#endif
519-
520509
}
521510
}
522511

@@ -525,12 +514,8 @@ void CAdjEulerSolver::Set_MPI_ActDisk(CSolver **solver_container, CGeometry *geo
525514

526515
/*--- Wait for the non-blocking sends to complete. ---*/
527516

528-
#ifdef HAVE_MPI
529-
530517
SU2_MPI::Barrier(MPI_COMM_WORLD);
531518

532-
#endif
533-
534519
/*--- Initialize the counters for the larger send buffers (by domain) ---*/
535520

536521
PointTotal_Counter = 0;
@@ -578,17 +563,13 @@ void CAdjEulerSolver::Set_MPI_ActDisk(CSolver **solver_container, CGeometry *geo
578563

579564
if (iDomain != rank) {
580565

581-
#ifdef HAVE_MPI
582-
583566
/*--- Communicate the coordinates, global index, colors, and element
584567
date to iDomain with non-blocking sends. ---*/
585568

586-
SU2_MPI::Bsend(&Buffer_Send_AdjVar[PointTotal_Counter*(nVar+3)],
569+
SU2_MPI::Isend(&Buffer_Send_AdjVar[PointTotal_Counter*(nVar+3)],
587570
nPointTotal_s[iDomain]*(nVar+3), MPI_DOUBLE, iDomain,
588-
iDomain, MPI_COMM_WORLD);
589-
590-
#endif
591-
571+
iDomain, MPI_COMM_WORLD, &req);
572+
SU2_MPI::Request_free(&req);
592573
}
593574

594575
else {
@@ -631,12 +612,8 @@ void CAdjEulerSolver::Set_MPI_ActDisk(CSolver **solver_container, CGeometry *geo
631612

632613
/*--- Wait for the non-blocking sends to complete. ---*/
633614

634-
#ifdef HAVE_MPI
635-
636615
SU2_MPI::Barrier(MPI_COMM_WORLD);
637616

638-
#endif
639-
640617
/*--- The next section begins the recv of all data for the interior
641618
points/elements in the mesh. First, create the domain structures for
642619
the points on this rank. First, we recv all of the point data ---*/
@@ -655,7 +632,7 @@ void CAdjEulerSolver::Set_MPI_ActDisk(CSolver **solver_container, CGeometry *geo
655632
/*--- Receive the buffers with the coords, global index, and colors ---*/
656633

657634
SU2_MPI::Recv(Buffer_Receive_AdjVar, nPointTotal_r[iDomain]*(nVar+3) , MPI_DOUBLE,
658-
iDomain, rank, MPI_COMM_WORLD, &status_);
635+
iDomain, rank, MPI_COMM_WORLD, &status);
659636

660637
/*--- Loop over all of the points that we have recv'd and store the
661638
coords, global index vertex and markers ---*/
@@ -687,12 +664,8 @@ void CAdjEulerSolver::Set_MPI_ActDisk(CSolver **solver_container, CGeometry *geo
687664

688665
/*--- Wait for the non-blocking sends to complete. ---*/
689666

690-
#ifdef HAVE_MPI
691-
692667
SU2_MPI::Barrier(MPI_COMM_WORLD);
693668

694-
#endif
695-
696669
/*--- Free all of the memory used for communicating points and elements ---*/
697670

698671
delete[] Buffer_Send_AdjVar;
@@ -712,14 +685,10 @@ void CAdjEulerSolver::Set_MPI_Nearfield(CGeometry *geometry, CConfig *config) {
712685
unsigned short iVar, iMarker, jMarker;
713686
long nDomain = 0, iDomain, jDomain;
714687

715-
#ifdef HAVE_MPI
716-
717688
/*--- MPI status and request arrays for non-blocking communications ---*/
718689

719-
SU2_MPI::Status status, status_;
720-
721-
722-
#endif
690+
SU2_MPI::Status status;
691+
SU2_MPI::Request req;
723692

724693
/*--- Define buffer vector interior domain ---*/
725694

@@ -742,12 +711,8 @@ void CAdjEulerSolver::Set_MPI_Nearfield(CGeometry *geometry, CConfig *config) {
742711

743712
nDomain = size;
744713

745-
#ifdef HAVE_MPI
746-
747714
SU2_MPI::Barrier(MPI_COMM_WORLD);
748715

749-
#endif
750-
751716
/*--- This loop gets the array sizes of points for each
752717
rank to send to each other rank. ---*/
753718

@@ -796,13 +761,10 @@ void CAdjEulerSolver::Set_MPI_Nearfield(CGeometry *geometry, CConfig *config) {
796761

797762
if (rank != iDomain) {
798763

799-
#ifdef HAVE_MPI
800-
801764
/*--- Communicate the counts to iDomain with non-blocking sends ---*/
802765

803-
SU2_MPI::Bsend(&nPointTotal_s[iDomain], 1, MPI_UNSIGNED_LONG, iDomain, iDomain, MPI_COMM_WORLD);
804-
805-
#endif
766+
SU2_MPI::Isend(&nPointTotal_s[iDomain], 1, MPI_UNSIGNED_LONG, iDomain, iDomain, MPI_COMM_WORLD, &req);
767+
SU2_MPI::Request_free(&req);
806768

807769
} else {
808770

@@ -824,15 +786,11 @@ void CAdjEulerSolver::Set_MPI_Nearfield(CGeometry *geometry, CConfig *config) {
824786

825787
if (rank != jDomain) {
826788

827-
#ifdef HAVE_MPI
828-
829789
/*--- Recv the data by probing for the current sender, jDomain,
830790
first and then receiving the values from it. ---*/
831791

832792
SU2_MPI::Recv(&nPointTotal_r[jDomain], 1, MPI_UNSIGNED_LONG, jDomain, rank, MPI_COMM_WORLD, &status);
833793

834-
#endif
835-
836794
}
837795
}
838796

@@ -841,12 +799,8 @@ void CAdjEulerSolver::Set_MPI_Nearfield(CGeometry *geometry, CConfig *config) {
841799

842800
/*--- Wait for the non-blocking sends to complete. ---*/
843801

844-
#ifdef HAVE_MPI
845-
846802
SU2_MPI::Barrier(MPI_COMM_WORLD);
847803

848-
#endif
849-
850804
/*--- Initialize the counters for the larger send buffers (by domain) ---*/
851805

852806
PointTotal_Counter = 0;
@@ -893,17 +847,13 @@ void CAdjEulerSolver::Set_MPI_Nearfield(CGeometry *geometry, CConfig *config) {
893847

894848
if (iDomain != rank) {
895849

896-
#ifdef HAVE_MPI
897-
898850
/*--- Communicate the coordinates, global index, colors, and element
899851
date to iDomain with non-blocking sends. ---*/
900852

901-
SU2_MPI::Bsend(&Buffer_Send_AdjVar[PointTotal_Counter*(nVar+3)],
853+
SU2_MPI::Isend(&Buffer_Send_AdjVar[PointTotal_Counter*(nVar+3)],
902854
nPointTotal_s[iDomain]*(nVar+3), MPI_DOUBLE, iDomain,
903-
iDomain, MPI_COMM_WORLD);
904-
905-
#endif
906-
855+
iDomain, MPI_COMM_WORLD, &req);
856+
SU2_MPI::Request_free(&req);
907857
}
908858

909859
else {
@@ -946,12 +896,8 @@ void CAdjEulerSolver::Set_MPI_Nearfield(CGeometry *geometry, CConfig *config) {
946896

947897
/*--- Wait for the non-blocking sends to complete. ---*/
948898

949-
#ifdef HAVE_MPI
950-
951899
SU2_MPI::Barrier(MPI_COMM_WORLD);
952900

953-
#endif
954-
955901
/*--- The next section begins the recv of all data for the interior
956902
points/elements in the mesh. First, create the domain structures for
957903
the points on this rank. First, we recv all of the point data ---*/
@@ -970,7 +916,7 @@ void CAdjEulerSolver::Set_MPI_Nearfield(CGeometry *geometry, CConfig *config) {
970916
/*--- Receive the buffers with the coords, global index, and colors ---*/
971917

972918
SU2_MPI::Recv(Buffer_Receive_AdjVar, nPointTotal_r[iDomain]*(nVar+3) , MPI_DOUBLE,
973-
iDomain, rank, MPI_COMM_WORLD, &status_);
919+
iDomain, rank, MPI_COMM_WORLD, &status);
974920

975921

976922
/*--- Loop over all of the points that we have recv'd and store the
@@ -1003,12 +949,8 @@ void CAdjEulerSolver::Set_MPI_Nearfield(CGeometry *geometry, CConfig *config) {
1003949

1004950
/*--- Wait for the non-blocking sends to complete. ---*/
1005951

1006-
#ifdef HAVE_MPI
1007-
1008952
SU2_MPI::Barrier(MPI_COMM_WORLD);
1009953

1010-
#endif
1011-
1012954
/*--- Free all of the memory used for communicating points and elements ---*/
1013955

1014956
delete[] Buffer_Send_AdjVar;

0 commit comments

Comments
 (0)