ENH: cleaner separation of global and local world naming

- UPstream::globalComm constant always refers to MPI_COMM_WORLD but
  UPstream::worldComm could be MPI_COMM_WORLD (single world)
  or a dedicated local communicator (for multi-world).

- provide a Pstream wrapped version of MPI_COMM_SELF,
  references as UPstream::selfComm

- UPstream::isUserComm(label)
  test for additional user-defined communicators
This commit is contained in:
Mark Olesen
2022-11-29 11:37:29 +01:00
committed by Andrew Heather
parent 7fe8bdcf99
commit ffeef76d8f
14 changed files with 575 additions and 229 deletions

View File

@ -0,0 +1,3 @@
Test-parallel-comm0.C
EXE = $(FOAM_USER_APPBIN)/Test-parallel-comm0

View File

@ -0,0 +1,161 @@
/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | www.openfoam.com
\\/ M anipulation |
-------------------------------------------------------------------------------
Copyright (C) 2022 OpenCFD Ltd.
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
OpenFOAM is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
Application
Test-parallel-comm0
Description
Very basic checks on standard communicators
\*---------------------------------------------------------------------------*/
#include "argList.H"
#include "Time.H"
#include "IPstream.H"
#include "OPstream.H"
#include "Pair.H"
#include "Tuple2.H"
#include "IOstreams.H"
#include "PstreamReduceOps.H"
using namespace Foam;
void printInfo(const label comm)
{
Info<< "comm:" << comm
<< " nprocs:" << UPstream::nProcs(comm)
<< " all:" << UPstream::allProcs(comm)
<< " sub:" << UPstream::subProcs(comm) << nl;
if (UPstream::selfComm == comm)
{
Pout<< "self all:" << UPstream::allProcs(comm)
<< " sub:" << UPstream::subProcs(comm) << nl;
}
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
int main(int argc, char *argv[])
{
argList::noBanner();
argList::noCheckProcessorDirectories();
argList::addBoolOption("verbose", "Set debug level");
// Capture manually. We need values before proper startup
int nVerbose = 0;
for (int argi = 1; argi < argc; ++argi)
{
if (strcmp(argv[argi], "-verbose") == 0)
{
++nVerbose;
}
}
UPstream::debug = nVerbose;
#include "setRootCase.H"
Info<< nl
<< "nProcs = " << UPstream::nProcs()
<< " with " << UPstream::nComms() << " predefined comm(s)" << nl;
Info<< "worldComm : ";
printInfo(UPstream::worldComm);
Info<< "selfComm : ";
printInfo(UPstream::selfComm);
Info<< nl;
// Reductions (using MPI intrinsics)
{
label val = Pstream::myProcNo(UPstream::worldComm);
label worldVal = returnReduce
(
val,
sumOp<label>(),
Pstream::msgType(),
UPstream::worldComm
);
label selfVal = returnReduce
(
val,
sumOp<label>(),
Pstream::msgType(),
UPstream::selfComm
);
Pout<< "value " << val
<< " (world) reduced " << worldVal
<< " (self) reduced " << selfVal << nl;
}
// Reductions (not using MPI intrinsics)
{
Pair<label> val
(
Pstream::myProcNo(UPstream::worldComm),
Pstream::myProcNo(UPstream::worldComm)
);
Pair<label> worldVal = val;
Pstream::combineReduce
(
worldVal,
minFirstEqOp<label>(),
Pstream::msgType(),
UPstream::worldComm
);
Pair<label> selfVal = val;
Pstream::combineReduce
(
worldVal,
minFirstEqOp<label>(),
Pstream::msgType(),
UPstream::selfComm
);
Pout<< "value " << val
<< " (world) reduced " << worldVal
<< " (self) reduced " << selfVal << nl;
}
Pout<< "\nEnd\n" << endl;
return 0;
}
// ************************************************************************* //

View File

@ -0,0 +1,3 @@
Test-parallel-comm1.C
EXE = $(FOAM_USER_APPBIN)/Test-parallel-comm1

View File

@ -0,0 +1,2 @@
/* EXE_INC = */
/* EXE_LIBS = */

View File

@ -1,3 +0,0 @@
Test-parallel-communicators.C
EXE = $(FOAM_USER_APPBIN)/Test-parallel-communicators

View File

@ -5,7 +5,7 @@
\\ / A nd | www.openfoam.com
\\/ M anipulation |
-------------------------------------------------------------------------------
Copyright (C) 2019 OpenCFD Ltd.
Copyright (C) 2019-2022 OpenCFD Ltd.
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
@ -47,16 +47,88 @@ using namespace Foam;
bool startMPI()
{
int nprocs = 0, rank = 0;
enum whichComm : int { worldComm = 0, selfComm, nullComm };
int nprocs[3];
int rank[3];
int group_nprocs[3];
int group_rank[3];
MPI_Group mpiGroup;
MPI_Init(nullptr, nullptr);
MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
MPI_Comm_rank(MPI_COMM_WORLD, &rank);
MPI_Comm_size(MPI_COMM_WORLD, &nprocs[worldComm]);
MPI_Comm_rank(MPI_COMM_WORLD, &rank[worldComm]);
if (nprocs && rank == 0)
const bool isMaster = (rank[worldComm] == 0);
const string prefix = '[' + Foam::name(rank[worldComm]) + "] ";
MPI_Comm_group(MPI_COMM_WORLD, &mpiGroup);
MPI_Group_size(mpiGroup, &group_nprocs[worldComm]);
MPI_Group_rank(mpiGroup, &group_rank[worldComm]);
if (isMaster && nprocs[worldComm])
{
std::cout<< nl << "Using MPI with " << nprocs << " procs" << nl << nl;
std::cout
<< nl << "Using MPI with " << nprocs[worldComm]
<< " procs, group:"
<< group_nprocs[worldComm] << nl
<< "World group: " << Foam::name(mpiGroup) << nl
<< nl;
}
MPI_Comm worldMpiComm;
MPI_Comm_dup(MPI_COMM_WORLD, &worldMpiComm);
MPI_Comm_group(MPI_COMM_WORLD, &mpiGroup);
if (isMaster && nprocs[worldComm])
{
std::cout
<< "dup comm group: " << Foam::name(mpiGroup) << nl;
}
MPI_Comm_free(&worldMpiComm);
// May be a bad idea
MPI_Group_free(&mpiGroup);
MPI_Comm_size(MPI_COMM_SELF, &nprocs[selfComm]);
MPI_Comm_rank(MPI_COMM_SELF, &rank[selfComm]);
MPI_Comm_group(MPI_COMM_SELF, &mpiGroup);
MPI_Group_size(mpiGroup, &group_nprocs[selfComm]);
MPI_Group_rank(mpiGroup, &group_rank[selfComm]);
if (isMaster && nprocs[worldComm])
{
std::cout
<< nl
<< "Self group: " << Foam::name(mpiGroup) << nl;
}
// Should be a bad idea
MPI_Group_free(&mpiGroup);
// if (nprocs && isMaster)
{
std::cout
<< prefix
<< "Self: " << rank[selfComm] << " from " << nprocs[selfComm]
<< " procs, group:"
<< group_nprocs[selfComm] << nl;
}
if (isMaster)
{
std::cout
<< "MPI_COMM_NULL: " << MPI_COMM_NULL << nl
<< "MPI_COMM_SELF: " << MPI_COMM_SELF << nl
<< "MPI_COMM_WORLD: " << MPI_COMM_WORLD << nl;
}
return true;
@ -77,8 +149,8 @@ string message()
{
return
(
"rank " + name(Pstream::myProcNo())
+ " / " + name(Pstream::nProcs()) + "\n"
"rank " + Foam::name(Pstream::myProcNo())
+ " / " + Foam::name(Pstream::nProcs()) + "\n"
);
}
@ -87,9 +159,21 @@ string message()
int main(int argc, char *argv[])
{
argList::noBanner();
argList::noCheckProcessorDirectories();
argList::addBoolOption("verbose", "Set debug level");
UPstream::debug = 1;
// Need to capture manually, since we need values before proper startup
int nVerbose = 0;
for (int argi = 1; argi < argc; ++argi)
{
if (strcmp(argv[argi], "-verbose") == 0)
{
++nVerbose;
}
}
UPstream::debug = nVerbose;
startMPI();