REGRESSION: overly aggressive handling of MPI groups

- attempted reduction in bookkeeping (commit: 068ab8ccc7) meant that
  the worldComm didn't have a group from which sub-communicators could
  be spun off.

- do not force reset of PstreamBuffers positions

STYLE: UPstream::globalComm instead of '0'
This commit is contained in:
Mark Olesen
2023-02-10 09:25:20 +01:00
parent 1dbb54c391
commit 74fd94dff3
3 changed files with 8 additions and 8 deletions

View File

@ -41,7 +41,6 @@ void Foam::PstreamBuffers::finalExchange
// Could also check that it is not called twice // Could also check that it is not called twice
// but that is used for overlapping send/recv (eg, overset) // but that is used for overlapping send/recv (eg, overset)
finishedSendsCalled_ = true; finishedSendsCalled_ = true;
recvPositions_ = Zero;
if (commsType_ == UPstream::commsTypes::nonBlocking) if (commsType_ == UPstream::commsTypes::nonBlocking)
{ {
@ -102,7 +101,6 @@ void Foam::PstreamBuffers::finalExchange
// Could also check that it is not called twice // Could also check that it is not called twice
// but that is used for overlapping send/recv (eg, overset) // but that is used for overlapping send/recv (eg, overset)
finishedSendsCalled_ = true; finishedSendsCalled_ = true;
recvPositions_ = Zero;
if (commsType_ == UPstream::commsTypes::nonBlocking) if (commsType_ == UPstream::commsTypes::nonBlocking)
{ {
@ -140,7 +138,6 @@ void Foam::PstreamBuffers::finalExchangeGatherScatter
// Could also check that it is not called twice // Could also check that it is not called twice
// but that is used for overlapping send/recv (eg, overset) // but that is used for overlapping send/recv (eg, overset)
finishedSendsCalled_ = true; finishedSendsCalled_ = true;
recvPositions_ = Zero;
if (commsType_ == UPstream::commsTypes::nonBlocking) if (commsType_ == UPstream::commsTypes::nonBlocking)
{ {

View File

@ -535,12 +535,16 @@ void Foam::UPstream::allocatePstreamCommunicator
PstreamGlobals::pendingMPIFree_[index] = PstreamGlobals::NonePending; PstreamGlobals::pendingMPIFree_[index] = PstreamGlobals::NonePending;
PstreamGlobals::MPICommunicators_[index] = MPI_COMM_WORLD; PstreamGlobals::MPICommunicators_[index] = MPI_COMM_WORLD;
PstreamGlobals::MPIGroups_[index] = MPI_GROUP_NULL;
// TBD: MPI_Comm_dup(MPI_COMM_WORLD, ...); // TBD: MPI_Comm_dup(MPI_COMM_WORLD, ...);
// with pendingMPIFree_[index] = CommPending ... // with pendingMPIFree_[index] = CommPending ...
// Note: freePstreamCommunicator may need an update // Note: freePstreamCommunicator may need an update
MPI_Comm_group
(
PstreamGlobals::MPICommunicators_[index],
&PstreamGlobals::MPIGroups_[index]
);
MPI_Comm_rank MPI_Comm_rank
( (
PstreamGlobals::MPICommunicators_[index], PstreamGlobals::MPICommunicators_[index],
@ -565,8 +569,8 @@ void Foam::UPstream::allocatePstreamCommunicator
PstreamGlobals::pendingMPIFree_[index] = PstreamGlobals::NonePending; PstreamGlobals::pendingMPIFree_[index] = PstreamGlobals::NonePending;
PstreamGlobals::MPICommunicators_[index] = MPI_COMM_SELF; PstreamGlobals::MPICommunicators_[index] = MPI_COMM_SELF;
PstreamGlobals::MPIGroups_[index] = MPI_GROUP_NULL;
MPI_Comm_group(MPI_COMM_SELF, &PstreamGlobals::MPIGroups_[index]);
MPI_Comm_rank(MPI_COMM_SELF, &myProcNo_[index]); MPI_Comm_rank(MPI_COMM_SELF, &myProcNo_[index]);
// Number of ranks is always 1 (self communicator) // Number of ranks is always 1 (self communicator)

View File

@ -90,9 +90,8 @@ void Foam::functionObjects::syncObjects::sync()
// Note provision of explicit all-world communicator // Note provision of explicit all-world communicator
PstreamBuffers pBufs PstreamBuffers pBufs
( (
Pstream::commsTypes::nonBlocking, UPstream::globalComm,
UPstream::msgType(), UPstream::commsTypes::nonBlocking
0
); );