mirror of
https://develop.openfoam.com/Development/openfoam.git
synced 2025-11-28 03:28:01 +00:00
ENH: communicators: initial version - extended Pstream API
This commit is contained in:
@ -37,6 +37,7 @@ Foam::UIPstream::UIPstream
|
||||
DynamicList<char>& externalBuf,
|
||||
label& externalBufPosition,
|
||||
const int tag,
|
||||
const label comm,
|
||||
const bool clearAtEnd,
|
||||
streamFormat format,
|
||||
versionNumber version
|
||||
@ -48,6 +49,7 @@ Foam::UIPstream::UIPstream
|
||||
externalBuf_(externalBuf),
|
||||
externalBufPosition_(externalBufPosition),
|
||||
tag_(tag),
|
||||
comm_(comm),
|
||||
clearAtEnd_(clearAtEnd),
|
||||
messageSize_(0)
|
||||
{
|
||||
@ -60,6 +62,7 @@ Foam::UIPstream::UIPstream
|
||||
"DynamicList<char>&,\n"
|
||||
"label&,\n"
|
||||
"const int,\n"
|
||||
"const label,\n"
|
||||
"const bool,\n"
|
||||
"streamFormat,\n"
|
||||
"versionNumber\n"
|
||||
@ -68,11 +71,7 @@ Foam::UIPstream::UIPstream
|
||||
}
|
||||
|
||||
|
||||
Foam::UIPstream::UIPstream
|
||||
(
|
||||
const int fromProcNo,
|
||||
PstreamBuffers& buffers
|
||||
)
|
||||
Foam::UIPstream::UIPstream(const int fromProcNo, PstreamBuffers& buffers)
|
||||
:
|
||||
UPstream(buffers.commsType_),
|
||||
Istream(buffers.format_, buffers.version_),
|
||||
@ -80,6 +79,7 @@ Foam::UIPstream::UIPstream
|
||||
externalBuf_(buffers.recvBuf_[fromProcNo]),
|
||||
externalBufPosition_(buffers.recvBufPos_[fromProcNo]),
|
||||
tag_(buffers.tag_),
|
||||
comm_(buffers.comm_),
|
||||
clearAtEnd_(true),
|
||||
messageSize_(0)
|
||||
{
|
||||
@ -102,7 +102,8 @@ Foam::label Foam::UIPstream::read
|
||||
const int fromProcNo,
|
||||
char* buf,
|
||||
const std::streamsize bufSize,
|
||||
const int tag
|
||||
const int tag,
|
||||
const label communicator
|
||||
)
|
||||
{
|
||||
notImplemented
|
||||
@ -113,7 +114,8 @@ Foam::label Foam::UIPstream::read
|
||||
"const int fromProcNo,"
|
||||
"char* buf,"
|
||||
"const label bufSize,"
|
||||
"const int tag"
|
||||
"const int tag,"
|
||||
"const label communicator"
|
||||
")"
|
||||
);
|
||||
|
||||
|
||||
@ -36,7 +36,8 @@ bool Foam::UOPstream::write
|
||||
const int toProcNo,
|
||||
const char* buf,
|
||||
const std::streamsize bufSize,
|
||||
const int tag
|
||||
const int tag,
|
||||
const label communicator
|
||||
)
|
||||
{
|
||||
notImplemented
|
||||
@ -47,7 +48,8 @@ bool Foam::UOPstream::write
|
||||
"const int fromProcNo,"
|
||||
"char* buf,"
|
||||
"const label bufSize,"
|
||||
"const int tag"
|
||||
"const int tag,"
|
||||
"const label communicator"
|
||||
")"
|
||||
);
|
||||
|
||||
|
||||
@ -55,28 +55,41 @@ void Foam::UPstream::abort()
|
||||
}
|
||||
|
||||
|
||||
void Foam::reduce(scalar&, const sumOp<scalar>&, const int)
|
||||
void Foam::reduce(scalar&, const sumOp<scalar>&, const int, const label)
|
||||
{}
|
||||
|
||||
|
||||
void Foam::reduce(scalar&, const minOp<scalar>&, const int)
|
||||
void Foam::reduce(scalar&, const minOp<scalar>&, const int, const label)
|
||||
{}
|
||||
|
||||
|
||||
void Foam::reduce(vector2D&, const sumOp<vector2D>&, const int)
|
||||
void Foam::reduce(vector2D&, const sumOp<vector2D>&, const int, const label)
|
||||
{}
|
||||
|
||||
|
||||
void Foam::sumReduce
|
||||
(
|
||||
scalar& Value,
|
||||
label& Count,
|
||||
const int tag
|
||||
scalar&,
|
||||
label&,
|
||||
const int,
|
||||
const label
|
||||
)
|
||||
{}
|
||||
|
||||
|
||||
void Foam::reduce(scalar&, const sumOp<scalar>&, const int, label&)
|
||||
void Foam::reduce(scalar&, const sumOp<scalar>&, const int, const label, label&)
|
||||
{}
|
||||
|
||||
|
||||
void Foam::UPstream::allocatePstreamCommunicator
|
||||
(
|
||||
const label,
|
||||
const label
|
||||
)
|
||||
{}
|
||||
|
||||
|
||||
void Foam::UPstream::freePstreamCommunicator(const label)
|
||||
{}
|
||||
|
||||
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
========= |
|
||||
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
||||
\\ / O peration |
|
||||
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation
|
||||
\\ / A nd | Copyright (C) 2013 OpenFOAM Foundation
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
@ -37,6 +37,35 @@ namespace Foam
|
||||
DynamicList<MPI_Request> PstreamGlobals::outstandingRequests_;
|
||||
//! \endcond
|
||||
|
||||
// Allocated communicators.
|
||||
//! \cond fileScope
|
||||
DynamicList<MPI_Comm> PstreamGlobals::MPICommunicators_;
|
||||
DynamicList<MPI_Group> PstreamGlobals::MPIGroups_;
|
||||
//! \endcond
|
||||
|
||||
void PstreamGlobals::checkCommunicator
|
||||
(
|
||||
const label comm,
|
||||
const label otherProcNo
|
||||
)
|
||||
{
|
||||
if
|
||||
(
|
||||
comm < 0
|
||||
|| comm >= PstreamGlobals::MPICommunicators_.size()
|
||||
)
|
||||
{
|
||||
FatalErrorIn
|
||||
(
|
||||
"PstreamGlobals::checkCommunicator(const label, const label)"
|
||||
) << "otherProcNo:" << otherProcNo << " : illegal communicator "
|
||||
<< comm << endl
|
||||
<< "Communicator should be within range 0.."
|
||||
<< PstreamGlobals::MPICommunicators_.size()-1 << abort(FatalError);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
|
||||
} // End namespace Foam
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
========= |
|
||||
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
||||
\\ / O peration |
|
||||
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation
|
||||
\\ / A nd | Copyright (C) 2013 OpenFOAM Foundation
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
@ -26,7 +26,7 @@ Namespace
|
||||
|
||||
Description
|
||||
Global functions and variables for working with parallel streams,
|
||||
but principally for gamma/mpi
|
||||
but principally for mpi
|
||||
|
||||
SourceFiles
|
||||
PstreamGlobals.C
|
||||
@ -54,6 +54,13 @@ namespace PstreamGlobals
|
||||
|
||||
extern DynamicList<MPI_Request> outstandingRequests_;
|
||||
|
||||
|
||||
// Current communicators. First element will be MPI_COMM_WORLD
|
||||
extern DynamicList<MPI_Comm> MPICommunicators_;
|
||||
extern DynamicList<MPI_Group> MPIGroups_;
|
||||
|
||||
void checkCommunicator(const label, const label procNo);
|
||||
|
||||
};
|
||||
|
||||
|
||||
|
||||
@ -41,6 +41,7 @@ Foam::UIPstream::UIPstream
|
||||
DynamicList<char>& externalBuf,
|
||||
label& externalBufPosition,
|
||||
const int tag,
|
||||
const label comm,
|
||||
const bool clearAtEnd,
|
||||
streamFormat format,
|
||||
versionNumber version
|
||||
@ -52,6 +53,7 @@ Foam::UIPstream::UIPstream
|
||||
externalBuf_(externalBuf),
|
||||
externalBufPosition_(externalBufPosition),
|
||||
tag_(tag),
|
||||
comm_(comm),
|
||||
clearAtEnd_(clearAtEnd),
|
||||
messageSize_(0)
|
||||
{
|
||||
@ -80,7 +82,7 @@ Foam::UIPstream::UIPstream
|
||||
// and set it
|
||||
if (!wantedSize)
|
||||
{
|
||||
MPI_Probe(procID(fromProcNo_), tag_, MPI_COMM_WORLD, &status);
|
||||
MPI_Probe(fromProcNo_, tag_, MPI_COMM_WORLD, &status);
|
||||
MPI_Get_count(&status, MPI_BYTE, &messageSize_);
|
||||
|
||||
externalBuf_.setCapacity(messageSize_);
|
||||
@ -99,7 +101,8 @@ Foam::UIPstream::UIPstream
|
||||
fromProcNo_,
|
||||
externalBuf_.begin(),
|
||||
wantedSize,
|
||||
tag_
|
||||
tag_,
|
||||
comm_
|
||||
);
|
||||
|
||||
// Set addressed size. Leave actual allocated memory intact.
|
||||
@ -121,6 +124,7 @@ Foam::UIPstream::UIPstream(const int fromProcNo, PstreamBuffers& buffers)
|
||||
externalBuf_(buffers.recvBuf_[fromProcNo]),
|
||||
externalBufPosition_(buffers.recvBufPos_[fromProcNo]),
|
||||
tag_(buffers.tag_),
|
||||
comm_(buffers.comm_),
|
||||
clearAtEnd_(true),
|
||||
messageSize_(0)
|
||||
{
|
||||
@ -167,7 +171,7 @@ Foam::UIPstream::UIPstream(const int fromProcNo, PstreamBuffers& buffers)
|
||||
// and set it
|
||||
if (!wantedSize)
|
||||
{
|
||||
MPI_Probe(procID(fromProcNo_), tag_, MPI_COMM_WORLD, &status);
|
||||
MPI_Probe(fromProcNo_, tag_, MPI_COMM_WORLD, &status);
|
||||
MPI_Get_count(&status, MPI_BYTE, &messageSize_);
|
||||
|
||||
externalBuf_.setCapacity(messageSize_);
|
||||
@ -186,7 +190,8 @@ Foam::UIPstream::UIPstream(const int fromProcNo, PstreamBuffers& buffers)
|
||||
fromProcNo_,
|
||||
externalBuf_.begin(),
|
||||
wantedSize,
|
||||
tag_
|
||||
tag_,
|
||||
comm_
|
||||
);
|
||||
|
||||
// Set addressed size. Leave actual allocated memory intact.
|
||||
@ -208,13 +213,15 @@ Foam::label Foam::UIPstream::read
|
||||
const int fromProcNo,
|
||||
char* buf,
|
||||
const std::streamsize bufSize,
|
||||
const int tag
|
||||
const int tag,
|
||||
const label communicator
|
||||
)
|
||||
{
|
||||
if (debug)
|
||||
{
|
||||
Pout<< "UIPstream::read : starting read from:" << fromProcNo
|
||||
<< " tag:" << tag << " wanted size:" << label(bufSize)
|
||||
<< " tag:" << tag << " comm:" << communicator
|
||||
<< " wanted size:" << label(bufSize)
|
||||
<< " commsType:" << UPstream::commsTypeNames[commsType]
|
||||
<< Foam::endl;
|
||||
}
|
||||
@ -230,9 +237,9 @@ Foam::label Foam::UIPstream::read
|
||||
buf,
|
||||
bufSize,
|
||||
MPI_PACKED,
|
||||
procID(fromProcNo),
|
||||
fromProcNo,
|
||||
tag,
|
||||
MPI_COMM_WORLD,
|
||||
PstreamGlobals::MPICommunicators_[communicator],
|
||||
&status
|
||||
)
|
||||
)
|
||||
@ -286,9 +293,9 @@ Foam::label Foam::UIPstream::read
|
||||
buf,
|
||||
bufSize,
|
||||
MPI_PACKED,
|
||||
procID(fromProcNo),
|
||||
fromProcNo,
|
||||
tag,
|
||||
MPI_COMM_WORLD,
|
||||
PstreamGlobals::MPICommunicators_[communicator],
|
||||
&request
|
||||
)
|
||||
)
|
||||
|
||||
@ -39,17 +39,22 @@ bool Foam::UOPstream::write
|
||||
const int toProcNo,
|
||||
const char* buf,
|
||||
const std::streamsize bufSize,
|
||||
const int tag
|
||||
const int tag,
|
||||
const label communicator
|
||||
)
|
||||
{
|
||||
if (debug)
|
||||
{
|
||||
Pout<< "UOPstream::write : starting write to:" << toProcNo
|
||||
<< " tag:" << tag << " size:" << label(bufSize)
|
||||
<< " tag:" << tag
|
||||
<< " comm:" << communicator << " size:" << label(bufSize)
|
||||
<< " commsType:" << UPstream::commsTypeNames[commsType]
|
||||
<< Foam::endl;
|
||||
}
|
||||
|
||||
PstreamGlobals::checkCommunicator(communicator, toProcNo);
|
||||
|
||||
|
||||
bool transferFailed = true;
|
||||
|
||||
if (commsType == blocking)
|
||||
@ -59,9 +64,9 @@ bool Foam::UOPstream::write
|
||||
const_cast<char*>(buf),
|
||||
bufSize,
|
||||
MPI_PACKED,
|
||||
procID(toProcNo),
|
||||
toProcNo, //procID(toProcNo),
|
||||
tag,
|
||||
MPI_COMM_WORLD
|
||||
PstreamGlobals::MPICommunicators_[communicator] //MPI_COMM_WORLD
|
||||
);
|
||||
|
||||
if (debug)
|
||||
@ -79,9 +84,9 @@ bool Foam::UOPstream::write
|
||||
const_cast<char*>(buf),
|
||||
bufSize,
|
||||
MPI_PACKED,
|
||||
procID(toProcNo),
|
||||
toProcNo, //procID(toProcNo),
|
||||
tag,
|
||||
MPI_COMM_WORLD
|
||||
PstreamGlobals::MPICommunicators_[communicator] //MPI_COMM_WORLD
|
||||
);
|
||||
|
||||
if (debug)
|
||||
@ -101,9 +106,9 @@ bool Foam::UOPstream::write
|
||||
const_cast<char*>(buf),
|
||||
bufSize,
|
||||
MPI_PACKED,
|
||||
procID(toProcNo),
|
||||
toProcNo, //procID(toProcNo),
|
||||
tag,
|
||||
MPI_COMM_WORLD,
|
||||
PstreamGlobals::MPICommunicators_[communicator],//MPI_COMM_WORLD,
|
||||
&request
|
||||
);
|
||||
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
========= |
|
||||
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
||||
\\ / O peration |
|
||||
\\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
|
||||
\\ / A nd | Copyright (C) 2011-2013 OpenFOAM Foundation
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
@ -55,7 +55,6 @@ void Foam::UPstream::addValidParOptions(HashTable<string>& validParOptions)
|
||||
validParOptions.insert("p4wd", "directory");
|
||||
validParOptions.insert("p4amslave", "");
|
||||
validParOptions.insert("p4yourname", "hostname");
|
||||
validParOptions.insert("GAMMANP", "number of instances");
|
||||
validParOptions.insert("machinefile", "machine file");
|
||||
}
|
||||
|
||||
@ -66,12 +65,13 @@ bool Foam::UPstream::init(int& argc, char**& argv)
|
||||
|
||||
int numprocs;
|
||||
MPI_Comm_size(MPI_COMM_WORLD, &numprocs);
|
||||
MPI_Comm_rank(MPI_COMM_WORLD, &myProcNo_);
|
||||
int myRank;
|
||||
MPI_Comm_rank(MPI_COMM_WORLD, &myRank);
|
||||
|
||||
if (debug)
|
||||
{
|
||||
Pout<< "UPstream::init : initialised with numProcs:" << numprocs
|
||||
<< " myProcNo:" << myProcNo_ << endl;
|
||||
<< " myRank:" << myRank << endl;
|
||||
}
|
||||
|
||||
if (numprocs <= 1)
|
||||
@ -82,14 +82,9 @@ bool Foam::UPstream::init(int& argc, char**& argv)
|
||||
<< Foam::abort(FatalError);
|
||||
}
|
||||
|
||||
procIDs_.setSize(numprocs);
|
||||
|
||||
forAll(procIDs_, procNo)
|
||||
{
|
||||
procIDs_[procNo] = procNo;
|
||||
}
|
||||
|
||||
setParRun();
|
||||
// Initialise parallel structure
|
||||
setParRun(numprocs);
|
||||
|
||||
# ifndef SGIMPI
|
||||
string bufferSizeName = getEnv("MPI_BUFFER_SIZE");
|
||||
@ -116,11 +111,9 @@ bool Foam::UPstream::init(int& argc, char**& argv)
|
||||
char processorName[MPI_MAX_PROCESSOR_NAME];
|
||||
|
||||
MPI_Get_processor_name(processorName, &processorNameLen);
|
||||
processorName[processorNameLen] = '\0';
|
||||
|
||||
//signal(SIGABRT, stop);
|
||||
|
||||
// Now that nprocs is known construct communication tables.
|
||||
initCommunicationSchedule();
|
||||
Pout<< "Processor name:" << processorName << endl;
|
||||
|
||||
return true;
|
||||
}
|
||||
@ -153,6 +146,15 @@ void Foam::UPstream::exit(int errnum)
|
||||
<< endl;
|
||||
}
|
||||
|
||||
// Clean mpi communicators
|
||||
forAll(myProcNo_, communicator)
|
||||
{
|
||||
if (myProcNo_[communicator] != -1)
|
||||
{
|
||||
freePstreamCommunicator(communicator);
|
||||
}
|
||||
}
|
||||
|
||||
if (errnum == 0)
|
||||
{
|
||||
MPI_Finalize();
|
||||
@ -171,21 +173,39 @@ void Foam::UPstream::abort()
|
||||
}
|
||||
|
||||
|
||||
void Foam::reduce(scalar& Value, const sumOp<scalar>& bop, const int tag)
|
||||
void Foam::reduce
|
||||
(
|
||||
scalar& Value,
|
||||
const sumOp<scalar>& bop,
|
||||
const int tag,
|
||||
const label communicator
|
||||
)
|
||||
{
|
||||
allReduce(Value, 1, MPI_SCALAR, MPI_SUM, bop, tag);
|
||||
allReduce(Value, 1, MPI_SCALAR, MPI_SUM, bop, tag, communicator);
|
||||
}
|
||||
|
||||
|
||||
void Foam::reduce(scalar& Value, const minOp<scalar>& bop, const int tag)
|
||||
void Foam::reduce
|
||||
(
|
||||
scalar& Value,
|
||||
const minOp<scalar>& bop,
|
||||
const int tag,
|
||||
const label communicator
|
||||
)
|
||||
{
|
||||
allReduce(Value, 1, MPI_SCALAR, MPI_MIN, bop, tag);
|
||||
allReduce(Value, 1, MPI_SCALAR, MPI_MIN, bop, tag, communicator);
|
||||
}
|
||||
|
||||
|
||||
void Foam::reduce(vector2D& Value, const sumOp<vector2D>& bop, const int tag)
|
||||
void Foam::reduce
|
||||
(
|
||||
vector2D& Value,
|
||||
const sumOp<vector2D>& bop,
|
||||
const int tag,
|
||||
const label communicator
|
||||
)
|
||||
{
|
||||
allReduce(Value, 2, MPI_SCALAR, MPI_SUM, bop, tag);
|
||||
allReduce(Value, 2, MPI_SCALAR, MPI_SUM, bop, tag, communicator);
|
||||
}
|
||||
|
||||
|
||||
@ -193,11 +213,12 @@ void Foam::sumReduce
|
||||
(
|
||||
scalar& Value,
|
||||
label& Count,
|
||||
const int tag
|
||||
const int tag,
|
||||
const label communicator
|
||||
)
|
||||
{
|
||||
vector2D twoScalars(Value, scalar(Count));
|
||||
reduce(twoScalars, sumOp<vector2D>());
|
||||
reduce(twoScalars, sumOp<vector2D>(), tag, communicator);
|
||||
|
||||
Value = twoScalars.x();
|
||||
Count = twoScalars.y();
|
||||
@ -209,6 +230,7 @@ void Foam::reduce
|
||||
scalar& Value,
|
||||
const sumOp<scalar>& bop,
|
||||
const int tag,
|
||||
const label communicator,
|
||||
label& requestID
|
||||
)
|
||||
{
|
||||
@ -225,7 +247,7 @@ void Foam::reduce
|
||||
MPI_SCALAR,
|
||||
MPI_SUM,
|
||||
0, //root
|
||||
MPI_COMM_WORLD,
|
||||
PstreamGlobals::MPICommunicators_[communicator],
|
||||
&request
|
||||
);
|
||||
|
||||
@ -233,12 +255,141 @@ void Foam::reduce
|
||||
PstreamGlobals::outstandingRequests_.append(request);
|
||||
#else
|
||||
// Non-blocking not yet implemented in mpi
|
||||
reduce(Value, bop, tag);
|
||||
reduce(Value, bop, tag, communicator);
|
||||
requestID = -1;
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
void Foam::UPstream::allocatePstreamCommunicator
|
||||
(
|
||||
const label parentIndex,
|
||||
const label index
|
||||
)
|
||||
{
|
||||
if (index == PstreamGlobals::MPIGroups_.size())
|
||||
{
|
||||
// Extend storage with dummy values
|
||||
MPI_Group newGroup;
|
||||
PstreamGlobals::MPIGroups_.append(newGroup);
|
||||
MPI_Comm newComm;
|
||||
PstreamGlobals::MPICommunicators_.append(newComm);
|
||||
}
|
||||
else if (index > PstreamGlobals::MPIGroups_.size())
|
||||
{
|
||||
FatalErrorIn
|
||||
(
|
||||
"UPstream::allocatePstreamCommunicator\n"
|
||||
"(\n"
|
||||
" const label parentIndex,\n"
|
||||
" const labelList& subRanks\n"
|
||||
")\n"
|
||||
) << "PstreamGlobals out of sync with UPstream data. Problem."
|
||||
<< Foam::exit(FatalError);
|
||||
}
|
||||
|
||||
|
||||
if (parentIndex == -1)
|
||||
{
|
||||
// Allocate world communicator
|
||||
|
||||
//std::cout
|
||||
// << "MPI : Allocating world communicator at index " << index
|
||||
// << std::endl;
|
||||
|
||||
if (index != UPstream::worldComm)
|
||||
{
|
||||
FatalErrorIn
|
||||
(
|
||||
"UPstream::allocateCommunicator\n"
|
||||
"(\n"
|
||||
" const label parentIndex,\n"
|
||||
" const labelList& subRanks\n"
|
||||
")\n"
|
||||
) << "world communicator should always be index "
|
||||
<< UPstream::worldComm << Foam::exit(FatalError);
|
||||
}
|
||||
|
||||
PstreamGlobals::MPICommunicators_[index] = MPI_COMM_WORLD;
|
||||
MPI_Comm_group(MPI_COMM_WORLD, &PstreamGlobals::MPIGroups_[index]);
|
||||
MPI_Comm_rank
|
||||
(
|
||||
PstreamGlobals::MPICommunicators_[index],
|
||||
&myProcNo_[index]
|
||||
);
|
||||
|
||||
// Set the number of processes to the actual number
|
||||
int numProcs;
|
||||
MPI_Comm_size(PstreamGlobals::MPICommunicators_[index], &numProcs);
|
||||
procIDs_[index] = identity(numProcs);
|
||||
}
|
||||
else
|
||||
{
|
||||
//std::cout
|
||||
// << "MPI : Allocating new communicator at index " << index
|
||||
// << " from parent " << parentIndex
|
||||
// << std::endl;
|
||||
|
||||
// Create new group
|
||||
MPI_Group_incl
|
||||
(
|
||||
PstreamGlobals::MPIGroups_[parentIndex],
|
||||
procIDs_[index].size(),
|
||||
procIDs_[index].begin(),
|
||||
&PstreamGlobals::MPIGroups_[index]
|
||||
);
|
||||
|
||||
//std::cout
|
||||
// << "MPI : New group " << long(PstreamGlobals::MPIGroups_[index])
|
||||
// << std::endl;
|
||||
|
||||
|
||||
// Create new communicator
|
||||
MPI_Comm_create
|
||||
(
|
||||
PstreamGlobals::MPICommunicators_[parentIndex],
|
||||
PstreamGlobals::MPIGroups_[index],
|
||||
&PstreamGlobals::MPICommunicators_[index]
|
||||
);
|
||||
|
||||
if (PstreamGlobals::MPICommunicators_[index] == MPI_COMM_NULL)
|
||||
{
|
||||
//std::cout
|
||||
// << "MPI : NULL : not in group"
|
||||
// << std::endl;
|
||||
myProcNo_[index] = -1;
|
||||
}
|
||||
else
|
||||
{
|
||||
//std::cout
|
||||
// << "MPI : New comm "
|
||||
// << long(PstreamGlobals::MPICommunicators_[index])
|
||||
// << std::endl;
|
||||
MPI_Comm_rank
|
||||
(
|
||||
PstreamGlobals::MPICommunicators_[index],
|
||||
&myProcNo_[index]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
//std::cout<< "MPI : I am rank " << myProcNo_[index] << std::endl;
|
||||
}
|
||||
|
||||
|
||||
void Foam::UPstream::freePstreamCommunicator(const label communicator)
|
||||
{
|
||||
if (communicator != UPstream::worldComm)
|
||||
{
|
||||
if (PstreamGlobals::MPICommunicators_[communicator] != MPI_COMM_NULL)
|
||||
{
|
||||
MPI_Comm_free(&PstreamGlobals::MPICommunicators_[communicator]);
|
||||
}
|
||||
MPI_Group_free(&PstreamGlobals::MPIGroups_[communicator]);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Foam::label Foam::UPstream::nRequests()
|
||||
{
|
||||
return PstreamGlobals::outstandingRequests_.size();
|
||||
|
||||
@ -52,7 +52,8 @@ void allReduce
|
||||
MPI_Datatype MPIType,
|
||||
MPI_Op op,
|
||||
const BinaryOp& bop,
|
||||
const int tag
|
||||
const int tag,
|
||||
const label communicator
|
||||
);
|
||||
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
|
||||
@ -35,7 +35,8 @@ void Foam::allReduce
|
||||
MPI_Datatype MPIType,
|
||||
MPI_Op MPIOp,
|
||||
const BinaryOp& bop,
|
||||
const int tag
|
||||
const int tag,
|
||||
const label communicator
|
||||
)
|
||||
{
|
||||
if (!UPstream::parRun())
|
||||
@ -43,14 +44,14 @@ void Foam::allReduce
|
||||
return;
|
||||
}
|
||||
|
||||
if (UPstream::nProcs() <= UPstream::nProcsSimpleSum)
|
||||
if (UPstream::nProcs(communicator) <= UPstream::nProcsSimpleSum)
|
||||
{
|
||||
if (UPstream::master())
|
||||
if (UPstream::master(communicator))
|
||||
{
|
||||
for
|
||||
(
|
||||
int slave=UPstream::firstSlave();
|
||||
slave<=UPstream::lastSlave();
|
||||
slave<=UPstream::lastSlave(communicator);
|
||||
slave++
|
||||
)
|
||||
{
|
||||
@ -63,9 +64,9 @@ void Foam::allReduce
|
||||
&value,
|
||||
MPICount,
|
||||
MPIType,
|
||||
UPstream::procID(slave),
|
||||
slave, //UPstream::procID(slave),
|
||||
tag,
|
||||
MPI_COMM_WORLD,
|
||||
PstreamGlobals::MPICommunicators_[communicator],
|
||||
MPI_STATUS_IGNORE
|
||||
)
|
||||
)
|
||||
@ -97,9 +98,9 @@ void Foam::allReduce
|
||||
&Value,
|
||||
MPICount,
|
||||
MPIType,
|
||||
UPstream::procID(UPstream::masterNo()),
|
||||
UPstream::masterNo(),//UPstream::procID(masterNo()),
|
||||
tag,
|
||||
MPI_COMM_WORLD
|
||||
PstreamGlobals::MPICommunicators_[communicator]
|
||||
)
|
||||
)
|
||||
{
|
||||
@ -120,12 +121,12 @@ void Foam::allReduce
|
||||
}
|
||||
|
||||
|
||||
if (UPstream::master())
|
||||
if (UPstream::master(communicator))
|
||||
{
|
||||
for
|
||||
(
|
||||
int slave=UPstream::firstSlave();
|
||||
slave<=UPstream::lastSlave();
|
||||
slave<=UPstream::lastSlave(communicator);
|
||||
slave++
|
||||
)
|
||||
{
|
||||
@ -136,9 +137,9 @@ void Foam::allReduce
|
||||
&Value,
|
||||
MPICount,
|
||||
MPIType,
|
||||
UPstream::procID(slave),
|
||||
slave, //UPstream::procID(slave),
|
||||
tag,
|
||||
MPI_COMM_WORLD
|
||||
PstreamGlobals::MPICommunicators_[communicator]
|
||||
)
|
||||
)
|
||||
{
|
||||
@ -167,9 +168,9 @@ void Foam::allReduce
|
||||
&Value,
|
||||
MPICount,
|
||||
MPIType,
|
||||
UPstream::procID(UPstream::masterNo()),
|
||||
UPstream::masterNo(),//UPstream::procID(masterNo()),
|
||||
tag,
|
||||
MPI_COMM_WORLD,
|
||||
PstreamGlobals::MPICommunicators_[communicator],
|
||||
MPI_STATUS_IGNORE
|
||||
)
|
||||
)
|
||||
@ -193,7 +194,15 @@ void Foam::allReduce
|
||||
else
|
||||
{
|
||||
Type sum;
|
||||
MPI_Allreduce(&Value, &sum, MPICount, MPIType, MPIOp, MPI_COMM_WORLD);
|
||||
MPI_Allreduce
|
||||
(
|
||||
&Value,
|
||||
&sum,
|
||||
MPICount,
|
||||
MPIType,
|
||||
MPIOp,
|
||||
PstreamGlobals::MPICommunicators_[communicator]
|
||||
);
|
||||
Value = sum;
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user