Merge remote-tracking branch 'origin/feature/procAgglom'

Conflicts:
	src/OpenFOAM/meshes/lduMesh/lduMesh.H
This commit is contained in:
mattijs
2013-05-02 14:44:59 +01:00
185 changed files with 10067 additions and 1238 deletions

View File

@ -2,7 +2,7 @@
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation
\\ / A nd | Copyright (C) 2011-2013 OpenFOAM Foundation
\\/ M anipulation |
-------------------------------------------------------------------------------
License
@ -37,6 +37,7 @@ Foam::UIPstream::UIPstream
DynamicList<char>& externalBuf,
label& externalBufPosition,
const int tag,
const label comm,
const bool clearAtEnd,
streamFormat format,
versionNumber version
@ -48,6 +49,7 @@ Foam::UIPstream::UIPstream
externalBuf_(externalBuf),
externalBufPosition_(externalBufPosition),
tag_(tag),
comm_(comm),
clearAtEnd_(clearAtEnd),
messageSize_(0)
{
@ -60,6 +62,7 @@ Foam::UIPstream::UIPstream
"DynamicList<char>&,\n"
"label&,\n"
"const int,\n"
"const label,\n"
"const bool,\n"
"streamFormat,\n"
"versionNumber\n"
@ -68,11 +71,7 @@ Foam::UIPstream::UIPstream
}
Foam::UIPstream::UIPstream
(
const int fromProcNo,
PstreamBuffers& buffers
)
Foam::UIPstream::UIPstream(const int fromProcNo, PstreamBuffers& buffers)
:
UPstream(buffers.commsType_),
Istream(buffers.format_, buffers.version_),
@ -80,6 +79,7 @@ Foam::UIPstream::UIPstream
externalBuf_(buffers.recvBuf_[fromProcNo]),
externalBufPosition_(buffers.recvBufPos_[fromProcNo]),
tag_(buffers.tag_),
comm_(buffers.comm_),
clearAtEnd_(true),
messageSize_(0)
{
@ -102,7 +102,8 @@ Foam::label Foam::UIPstream::read
const int fromProcNo,
char* buf,
const std::streamsize bufSize,
const int tag
const int tag,
const label communicator
)
{
notImplemented
@ -113,7 +114,8 @@ Foam::label Foam::UIPstream::read
"const int fromProcNo,"
"char* buf,"
"const label bufSize,"
"const int tag"
"const int tag,"
"const label communicator"
")"
);

View File

@ -2,7 +2,7 @@
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation
\\ / A nd | Copyright (C) 2011-2013 OpenFOAM Foundation
\\/ M anipulation |
-------------------------------------------------------------------------------
License
@ -36,7 +36,8 @@ bool Foam::UOPstream::write
const int toProcNo,
const char* buf,
const std::streamsize bufSize,
const int tag
const int tag,
const label communicator
)
{
notImplemented
@ -47,7 +48,8 @@ bool Foam::UOPstream::write
"const int fromProcNo,"
"char* buf,"
"const label bufSize,"
"const int tag"
"const int tag,"
"const label communicator"
")"
);

View File

@ -2,7 +2,7 @@
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\ / A nd | Copyright (C) 2011-2013 OpenFOAM Foundation
\\/ M anipulation |
-------------------------------------------------------------------------------
License
@ -55,28 +55,41 @@ void Foam::UPstream::abort()
}
void Foam::reduce(scalar&, const sumOp<scalar>&, const int)
void Foam::reduce(scalar&, const sumOp<scalar>&, const int, const label)
{}
void Foam::reduce(scalar&, const minOp<scalar>&, const int)
void Foam::reduce(scalar&, const minOp<scalar>&, const int, const label)
{}
void Foam::reduce(vector2D&, const sumOp<vector2D>&, const int)
void Foam::reduce(vector2D&, const sumOp<vector2D>&, const int, const label)
{}
void Foam::sumReduce
(
scalar& Value,
label& Count,
const int tag
scalar&,
label&,
const int,
const label
)
{}
void Foam::reduce(scalar&, const sumOp<scalar>&, const int, label&)
void Foam::reduce(scalar&, const sumOp<scalar>&, const int, const label, label&)
{}
void Foam::UPstream::allocatePstreamCommunicator
(
const label,
const label
)
{}
void Foam::UPstream::freePstreamCommunicator(const label)
{}

View File

@ -2,7 +2,7 @@
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation
\\ / A nd | Copyright (C) 2013 OpenFOAM Foundation
\\/ M anipulation |
-------------------------------------------------------------------------------
License
@ -37,6 +37,56 @@ namespace Foam
DynamicList<MPI_Request> PstreamGlobals::outstandingRequests_;
//! \endcond
//// Max outstanding non-blocking operations.
////! \cond fileScope
//int PstreamGlobals::nRequests_ = 0;
////! \endcond
// Free'd non-blocking operations.
//! \cond fileScope
//DynamicList<label> PstreamGlobals::freedRequests_;
//! \endcond
// Max outstanding message tag operations.
//! \cond fileScope
int PstreamGlobals::nTags_ = 0;
//! \endcond
// Free'd message tags
//! \cond fileScope
DynamicList<int> PstreamGlobals::freedTags_;
//! \endcond
// Allocated communicators.
//! \cond fileScope
DynamicList<MPI_Comm> PstreamGlobals::MPICommunicators_;
DynamicList<MPI_Group> PstreamGlobals::MPIGroups_;
//! \endcond
void PstreamGlobals::checkCommunicator
(
const label comm,
const label otherProcNo
)
{
if
(
comm < 0
|| comm >= PstreamGlobals::MPICommunicators_.size()
)
{
FatalErrorIn
(
"PstreamGlobals::checkCommunicator(const label, const label)"
) << "otherProcNo:" << otherProcNo << " : illegal communicator "
<< comm << endl
<< "Communicator should be within range 0.."
<< PstreamGlobals::MPICommunicators_.size()-1 << abort(FatalError);
}
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
} // End namespace Foam

View File

@ -2,7 +2,7 @@
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation
\\ / A nd | Copyright (C) 2013 OpenFOAM Foundation
\\/ M anipulation |
-------------------------------------------------------------------------------
License
@ -26,7 +26,7 @@ Namespace
Description
Global functions and variables for working with parallel streams,
but principally for gamma/mpi
but principally for mpi
SourceFiles
PstreamGlobals.C
@ -54,6 +54,20 @@ namespace PstreamGlobals
extern DynamicList<MPI_Request> outstandingRequests_;
//extern int nRequests_;
//extern DynamicList<label> freedRequests_;
extern int nTags_;
extern DynamicList<int> freedTags_;
// Current communicators. First element will be MPI_COMM_WORLD
extern DynamicList<MPI_Comm> MPICommunicators_;
extern DynamicList<MPI_Group> MPIGroups_;
void checkCommunicator(const label, const label procNo);
};

View File

@ -2,7 +2,7 @@
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\ / A nd | Copyright (C) 2011-2013 OpenFOAM Foundation
\\/ M anipulation |
-------------------------------------------------------------------------------
License
@ -41,6 +41,7 @@ Foam::UIPstream::UIPstream
DynamicList<char>& externalBuf,
label& externalBufPosition,
const int tag,
const label comm,
const bool clearAtEnd,
streamFormat format,
versionNumber version
@ -52,6 +53,7 @@ Foam::UIPstream::UIPstream
externalBuf_(externalBuf),
externalBufPosition_(externalBufPosition),
tag_(tag),
comm_(comm),
clearAtEnd_(clearAtEnd),
messageSize_(0)
{
@ -71,7 +73,8 @@ Foam::UIPstream::UIPstream
if (debug)
{
Pout<< "UIPstream::UIPstream : read from:" << fromProcNo
<< " tag:" << tag << " wanted size:" << wantedSize
<< " tag:" << tag << " comm:" << comm_
<< " wanted size:" << wantedSize
<< Foam::endl;
}
@ -80,7 +83,13 @@ Foam::UIPstream::UIPstream
// and set it
if (!wantedSize)
{
MPI_Probe(procID(fromProcNo_), tag_, MPI_COMM_WORLD, &status);
MPI_Probe
(
fromProcNo_,
tag_,
PstreamGlobals::MPICommunicators_[comm_],
&status
);
MPI_Get_count(&status, MPI_BYTE, &messageSize_);
externalBuf_.setCapacity(messageSize_);
@ -99,7 +108,8 @@ Foam::UIPstream::UIPstream
fromProcNo_,
externalBuf_.begin(),
wantedSize,
tag_
tag_,
comm_
);
// Set addressed size. Leave actual allocated memory intact.
@ -121,6 +131,7 @@ Foam::UIPstream::UIPstream(const int fromProcNo, PstreamBuffers& buffers)
externalBuf_(buffers.recvBuf_[fromProcNo]),
externalBufPosition_(buffers.recvBufPos_[fromProcNo]),
tag_(buffers.tag_),
comm_(buffers.comm_),
clearAtEnd_(true),
messageSize_(0)
{
@ -145,7 +156,8 @@ Foam::UIPstream::UIPstream(const int fromProcNo, PstreamBuffers& buffers)
{
Pout<< "UIPstream::UIPstream PstreamBuffers :"
<< " fromProcNo:" << fromProcNo
<< " tag:" << tag_ << " receive buffer size:" << messageSize_
<< " tag:" << tag_ << " comm:" << comm_
<< " receive buffer size:" << messageSize_
<< Foam::endl;
}
}
@ -159,7 +171,8 @@ Foam::UIPstream::UIPstream(const int fromProcNo, PstreamBuffers& buffers)
{
Pout<< "UIPstream::UIPstream PstreamBuffers :"
<< " read from:" << fromProcNo
<< " tag:" << tag_ << " wanted size:" << wantedSize
<< " tag:" << tag_ << " comm:" << comm_
<< " wanted size:" << wantedSize
<< Foam::endl;
}
@ -167,7 +180,13 @@ Foam::UIPstream::UIPstream(const int fromProcNo, PstreamBuffers& buffers)
// and set it
if (!wantedSize)
{
MPI_Probe(procID(fromProcNo_), tag_, MPI_COMM_WORLD, &status);
MPI_Probe
(
fromProcNo_,
tag_,
PstreamGlobals::MPICommunicators_[comm_],
&status
);
MPI_Get_count(&status, MPI_BYTE, &messageSize_);
externalBuf_.setCapacity(messageSize_);
@ -186,7 +205,8 @@ Foam::UIPstream::UIPstream(const int fromProcNo, PstreamBuffers& buffers)
fromProcNo_,
externalBuf_.begin(),
wantedSize,
tag_
tag_,
comm_
);
// Set addressed size. Leave actual allocated memory intact.
@ -208,16 +228,28 @@ Foam::label Foam::UIPstream::read
const int fromProcNo,
char* buf,
const std::streamsize bufSize,
const int tag
const int tag,
const label communicator
)
{
if (debug)
{
Pout<< "UIPstream::read : starting read from:" << fromProcNo
<< " tag:" << tag << " wanted size:" << label(bufSize)
<< " tag:" << tag << " comm:" << communicator
<< " wanted size:" << label(bufSize)
<< " commsType:" << UPstream::commsTypeNames[commsType]
<< Foam::endl;
}
if (UPstream::warnComm != -1 && communicator != UPstream::warnComm)
{
Pout<< "UIPstream::read : starting read from:" << fromProcNo
<< " tag:" << tag << " comm:" << communicator
<< " wanted size:" << label(bufSize)
<< " commsType:" << UPstream::commsTypeNames[commsType]
<< " warnComm:" << UPstream::warnComm
<< Foam::endl;
error::printStack(Pout);
}
if (commsType == blocking || commsType == scheduled)
{
@ -230,9 +262,9 @@ Foam::label Foam::UIPstream::read
buf,
bufSize,
MPI_PACKED,
procID(fromProcNo),
fromProcNo,
tag,
MPI_COMM_WORLD,
PstreamGlobals::MPICommunicators_[communicator],
&status
)
)
@ -286,9 +318,9 @@ Foam::label Foam::UIPstream::read
buf,
bufSize,
MPI_PACKED,
procID(fromProcNo),
fromProcNo,
tag,
MPI_COMM_WORLD,
PstreamGlobals::MPICommunicators_[communicator],
&request
)
)

View File

@ -2,7 +2,7 @@
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation
\\ / A nd | Copyright (C) 2011-2013 OpenFOAM Foundation
\\/ M anipulation |
-------------------------------------------------------------------------------
License
@ -39,16 +39,32 @@ bool Foam::UOPstream::write
const int toProcNo,
const char* buf,
const std::streamsize bufSize,
const int tag
const int tag,
const label communicator
)
{
if (debug)
{
Pout<< "UOPstream::write : starting write to:" << toProcNo
<< " tag:" << tag << " size:" << label(bufSize)
<< " tag:" << tag
<< " comm:" << communicator << " size:" << label(bufSize)
<< " commsType:" << UPstream::commsTypeNames[commsType]
<< Foam::endl;
}
if (UPstream::warnComm != -1 && communicator != UPstream::warnComm)
{
Pout<< "UOPstream::write : starting write to:" << toProcNo
<< " tag:" << tag
<< " comm:" << communicator << " size:" << label(bufSize)
<< " commsType:" << UPstream::commsTypeNames[commsType]
<< " warnComm:" << UPstream::warnComm
<< Foam::endl;
error::printStack(Pout);
}
PstreamGlobals::checkCommunicator(communicator, toProcNo);
bool transferFailed = true;
@ -59,9 +75,9 @@ bool Foam::UOPstream::write
const_cast<char*>(buf),
bufSize,
MPI_PACKED,
procID(toProcNo),
toProcNo, //procID(toProcNo),
tag,
MPI_COMM_WORLD
PstreamGlobals::MPICommunicators_[communicator] //MPI_COMM_WORLD
);
if (debug)
@ -79,9 +95,9 @@ bool Foam::UOPstream::write
const_cast<char*>(buf),
bufSize,
MPI_PACKED,
procID(toProcNo),
toProcNo, //procID(toProcNo),
tag,
MPI_COMM_WORLD
PstreamGlobals::MPICommunicators_[communicator] //MPI_COMM_WORLD
);
if (debug)
@ -101,9 +117,9 @@ bool Foam::UOPstream::write
const_cast<char*>(buf),
bufSize,
MPI_PACKED,
procID(toProcNo),
toProcNo, //procID(toProcNo),
tag,
MPI_COMM_WORLD,
PstreamGlobals::MPICommunicators_[communicator],//MPI_COMM_WORLD,
&request
);

View File

@ -2,7 +2,7 @@
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\ / A nd | Copyright (C) 2011-2013 OpenFOAM Foundation
\\/ M anipulation |
-------------------------------------------------------------------------------
License
@ -55,7 +55,6 @@ void Foam::UPstream::addValidParOptions(HashTable<string>& validParOptions)
validParOptions.insert("p4wd", "directory");
validParOptions.insert("p4amslave", "");
validParOptions.insert("p4yourname", "hostname");
validParOptions.insert("GAMMANP", "number of instances");
validParOptions.insert("machinefile", "machine file");
}
@ -66,12 +65,13 @@ bool Foam::UPstream::init(int& argc, char**& argv)
int numprocs;
MPI_Comm_size(MPI_COMM_WORLD, &numprocs);
MPI_Comm_rank(MPI_COMM_WORLD, &myProcNo_);
int myRank;
MPI_Comm_rank(MPI_COMM_WORLD, &myRank);
if (debug)
{
Pout<< "UPstream::init : initialised with numProcs:" << numprocs
<< " myProcNo:" << myProcNo_ << endl;
<< " myRank:" << myRank << endl;
}
if (numprocs <= 1)
@ -82,14 +82,9 @@ bool Foam::UPstream::init(int& argc, char**& argv)
<< Foam::abort(FatalError);
}
procIDs_.setSize(numprocs);
forAll(procIDs_, procNo)
{
procIDs_[procNo] = procNo;
}
setParRun();
// Initialise parallel structure
setParRun(numprocs);
# ifndef SGIMPI
string bufferSizeName = getEnv("MPI_BUFFER_SIZE");
@ -112,15 +107,12 @@ bool Foam::UPstream::init(int& argc, char**& argv)
}
# endif
int processorNameLen;
char processorName[MPI_MAX_PROCESSOR_NAME];
MPI_Get_processor_name(processorName, &processorNameLen);
//signal(SIGABRT, stop);
// Now that nprocs is known construct communication tables.
initCommunicationSchedule();
//int processorNameLen;
//char processorName[MPI_MAX_PROCESSOR_NAME];
//
//MPI_Get_processor_name(processorName, &processorNameLen);
//processorName[processorNameLen] = '\0';
//Pout<< "Processor name:" << processorName << endl;
return true;
}
@ -153,6 +145,15 @@ void Foam::UPstream::exit(int errnum)
<< endl;
}
// Clean mpi communicators
forAll(myProcNo_, communicator)
{
if (myProcNo_[communicator] != -1)
{
freePstreamCommunicator(communicator);
}
}
if (errnum == 0)
{
MPI_Finalize();
@ -171,21 +172,60 @@ void Foam::UPstream::abort()
}
void Foam::reduce(scalar& Value, const sumOp<scalar>& bop, const int tag)
void Foam::reduce
(
scalar& Value,
const sumOp<scalar>& bop,
const int tag,
const label communicator
)
{
allReduce(Value, 1, MPI_SCALAR, MPI_SUM, bop, tag);
if (UPstream::warnComm != -1 && communicator != UPstream::warnComm)
{
Pout<< "** reducing:" << Value << " with comm:" << communicator
<< " warnComm:" << UPstream::warnComm
<< endl;
error::printStack(Pout);
}
allReduce(Value, 1, MPI_SCALAR, MPI_SUM, bop, tag, communicator);
}
void Foam::reduce(scalar& Value, const minOp<scalar>& bop, const int tag)
void Foam::reduce
(
scalar& Value,
const minOp<scalar>& bop,
const int tag,
const label communicator
)
{
allReduce(Value, 1, MPI_SCALAR, MPI_MIN, bop, tag);
if (UPstream::warnComm != -1 && communicator != UPstream::warnComm)
{
Pout<< "** reducing:" << Value << " with comm:" << communicator
<< " warnComm:" << UPstream::warnComm
<< endl;
error::printStack(Pout);
}
allReduce(Value, 1, MPI_SCALAR, MPI_MIN, bop, tag, communicator);
}
void Foam::reduce(vector2D& Value, const sumOp<vector2D>& bop, const int tag)
void Foam::reduce
(
vector2D& Value,
const sumOp<vector2D>& bop,
const int tag,
const label communicator
)
{
allReduce(Value, 2, MPI_SCALAR, MPI_SUM, bop, tag);
if (UPstream::warnComm != -1 && communicator != UPstream::warnComm)
{
Pout<< "** reducing:" << Value << " with comm:" << communicator
<< " warnComm:" << UPstream::warnComm
<< endl;
error::printStack(Pout);
}
allReduce(Value, 2, MPI_SCALAR, MPI_SUM, bop, tag, communicator);
}
@ -193,11 +233,19 @@ void Foam::sumReduce
(
scalar& Value,
label& Count,
const int tag
const int tag,
const label communicator
)
{
if (UPstream::warnComm != -1 && communicator != UPstream::warnComm)
{
Pout<< "** reducing:" << Value << " with comm:" << communicator
<< " warnComm:" << UPstream::warnComm
<< endl;
error::printStack(Pout);
}
vector2D twoScalars(Value, scalar(Count));
reduce(twoScalars, sumOp<vector2D>());
reduce(twoScalars, sumOp<vector2D>(), tag, communicator);
Value = twoScalars.x();
Count = twoScalars.y();
@ -209,6 +257,7 @@ void Foam::reduce
scalar& Value,
const sumOp<scalar>& bop,
const int tag,
const label communicator,
label& requestID
)
{
@ -225,20 +274,156 @@ void Foam::reduce
MPI_SCALAR,
MPI_SUM,
0, //root
MPI_COMM_WORLD,
PstreamGlobals::MPICommunicators_[communicator],
&request
);
requestID = PstreamGlobals::outstandingRequests_.size();
PstreamGlobals::outstandingRequests_.append(request);
if (debug)
{
Pout<< "UPstream::allocateRequest for non-blocking reduce"
<< " : request:" << requestID
<< endl;
#else
// Non-blocking not yet implemented in mpi
reduce(Value, bop, tag);
reduce(Value, bop, tag, communicator);
requestID = -1;
#endif
}
void Foam::UPstream::allocatePstreamCommunicator
(
const label parentIndex,
const label index
)
{
if (index == PstreamGlobals::MPIGroups_.size())
{
// Extend storage with dummy values
MPI_Group newGroup;
PstreamGlobals::MPIGroups_.append(newGroup);
MPI_Comm newComm;
PstreamGlobals::MPICommunicators_.append(newComm);
}
else if (index > PstreamGlobals::MPIGroups_.size())
{
FatalErrorIn
(
"UPstream::allocatePstreamCommunicator\n"
"(\n"
" const label parentIndex,\n"
" const labelList& subRanks\n"
")\n"
) << "PstreamGlobals out of sync with UPstream data. Problem."
<< Foam::exit(FatalError);
}
if (parentIndex == -1)
{
// Allocate world communicator
if (index != UPstream::worldComm)
{
FatalErrorIn
(
"UPstream::allocateCommunicator\n"
"(\n"
" const label parentIndex,\n"
" const labelList& subRanks\n"
")\n"
) << "world communicator should always be index "
<< UPstream::worldComm << Foam::exit(FatalError);
}
PstreamGlobals::MPICommunicators_[index] = MPI_COMM_WORLD;
MPI_Comm_group(MPI_COMM_WORLD, &PstreamGlobals::MPIGroups_[index]);
MPI_Comm_rank
(
PstreamGlobals::MPICommunicators_[index],
&myProcNo_[index]
);
// Set the number of processes to the actual number
int numProcs;
MPI_Comm_size(PstreamGlobals::MPICommunicators_[index], &numProcs);
procIDs_[index] = identity(numProcs);
}
else
{
// Create new group
MPI_Group_incl
(
PstreamGlobals::MPIGroups_[parentIndex],
procIDs_[index].size(),
procIDs_[index].begin(),
&PstreamGlobals::MPIGroups_[index]
);
// Create new communicator
MPI_Comm_create
(
PstreamGlobals::MPICommunicators_[parentIndex],
PstreamGlobals::MPIGroups_[index],
&PstreamGlobals::MPICommunicators_[index]
);
if (PstreamGlobals::MPICommunicators_[index] == MPI_COMM_NULL)
{
myProcNo_[index] = -1;
}
else
{
if
(
MPI_Comm_rank
(
PstreamGlobals::MPICommunicators_[index],
&myProcNo_[index]
)
)
{
FatalErrorIn
(
"UPstream::allocatePstreamCommunicator\n"
"(\n"
" const label,\n"
" const labelList&\n"
")\n"
) << "Problem :"
<< " when allocating communicator at " << index
<< " from ranks " << procIDs_[index]
<< " of parent " << parentIndex
<< " cannot find my own rank"
<< Foam::exit(FatalError);
}
}
}
}
void Foam::UPstream::freePstreamCommunicator(const label communicator)
{
if (communicator != UPstream::worldComm)
{
if (PstreamGlobals::MPICommunicators_[communicator] != MPI_COMM_NULL)
{
// Free communicator. Sets communicator to MPI_COMM_NULL
MPI_Comm_free(&PstreamGlobals::MPICommunicators_[communicator]);
}
if (PstreamGlobals::MPIGroups_[communicator] != MPI_GROUP_NULL)
{
// Free greoup. Sets group to MPI_GROUP_NULL
MPI_Group_free(&PstreamGlobals::MPIGroups_[communicator]);
}
}
}
Foam::label Foam::UPstream::nRequests()
{
return PstreamGlobals::outstandingRequests_.size();
@ -345,7 +530,7 @@ bool Foam::UPstream::finishedRequest(const label i)
{
if (debug)
{
Pout<< "UPstream::waitRequests : checking finishedRequest:" << i
Pout<< "UPstream::finishedRequest : checking request:" << i
<< endl;
}
@ -371,7 +556,7 @@ bool Foam::UPstream::finishedRequest(const label i)
if (debug)
{
Pout<< "UPstream::waitRequests : finished finishedRequest:" << i
Pout<< "UPstream::finishedRequest : finished request:" << i
<< endl;
}
@ -379,4 +564,94 @@ bool Foam::UPstream::finishedRequest(const label i)
}
int Foam::UPstream::allocateTag(const char* s)
{
int tag;
if (PstreamGlobals::freedTags_.size())
{
tag = PstreamGlobals::freedTags_.remove();
}
else
{
tag = PstreamGlobals::nTags_++;
}
if (debug)
{
//if (UPstream::lateBlocking > 0)
//{
// string& poutp = Pout.prefix();
// poutp[poutp.size()-2*(UPstream::lateBlocking+2)+tag] = 'X';
// Perr.prefix() = Pout.prefix();
//}
Pout<< "UPstream::allocateTag " << s
<< " : tag:" << tag
<< endl;
}
return tag;
}
int Foam::UPstream::allocateTag(const word& s)
{
int tag;
if (PstreamGlobals::freedTags_.size())
{
tag = PstreamGlobals::freedTags_.remove();
}
else
{
tag = PstreamGlobals::nTags_++;
}
if (debug)
{
//if (UPstream::lateBlocking > 0)
//{
// string& poutp = Pout.prefix();
// poutp[poutp.size()-2*(UPstream::lateBlocking+2)+tag] = 'X';
// Perr.prefix() = Pout.prefix();
//}
Pout<< "UPstream::allocateTag " << s
<< " : tag:" << tag
<< endl;
}
return tag;
}
void Foam::UPstream::freeTag(const char* s, const int tag)
{
if (debug)
{
//if (UPstream::lateBlocking > 0)
//{
// string& poutp = Pout.prefix();
// poutp[poutp.size()-2*(UPstream::lateBlocking+2)+tag] = ' ';
// Perr.prefix() = Pout.prefix();
//}
Pout<< "UPstream::freeTag " << s << " tag:" << tag << endl;
}
PstreamGlobals::freedTags_.append(tag);
}
void Foam::UPstream::freeTag(const word& s, const int tag)
{
if (debug)
{
//if (UPstream::lateBlocking > 0)
//{
// string& poutp = Pout.prefix();
// poutp[poutp.size()-2*(UPstream::lateBlocking+2)+tag] = ' ';
// Perr.prefix() = Pout.prefix();
//}
Pout<< "UPstream::freeTag " << s << " tag:" << tag << endl;
}
PstreamGlobals::freedTags_.append(tag);
}
// ************************************************************************* //

View File

@ -2,7 +2,7 @@
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2012 OpenFOAM Foundation
\\ / A nd | Copyright (C) 2012-2013 OpenFOAM Foundation
\\/ M anipulation |
-------------------------------------------------------------------------------
License
@ -52,7 +52,8 @@ void allReduce
MPI_Datatype MPIType,
MPI_Op op,
const BinaryOp& bop,
const int tag
const int tag,
const int communicator
);
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //

View File

@ -2,7 +2,7 @@
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2012 OpenFOAM Foundation
\\ / A nd | Copyright (C) 2012-2013 OpenFOAM Foundation
\\/ M anipulation |
-------------------------------------------------------------------------------
License
@ -35,7 +35,8 @@ void Foam::allReduce
MPI_Datatype MPIType,
MPI_Op MPIOp,
const BinaryOp& bop,
const int tag
const int tag,
const label communicator
)
{
if (!UPstream::parRun())
@ -43,14 +44,14 @@ void Foam::allReduce
return;
}
if (UPstream::nProcs() <= UPstream::nProcsSimpleSum)
if (UPstream::nProcs(communicator) <= UPstream::nProcsSimpleSum)
{
if (UPstream::master())
if (UPstream::master(communicator))
{
for
(
int slave=UPstream::firstSlave();
slave<=UPstream::lastSlave();
slave<=UPstream::lastSlave(communicator);
slave++
)
{
@ -63,9 +64,9 @@ void Foam::allReduce
&value,
MPICount,
MPIType,
UPstream::procID(slave),
slave, //UPstream::procID(slave),
tag,
MPI_COMM_WORLD,
PstreamGlobals::MPICommunicators_[communicator],
MPI_STATUS_IGNORE
)
)
@ -97,9 +98,9 @@ void Foam::allReduce
&Value,
MPICount,
MPIType,
UPstream::procID(UPstream::masterNo()),
UPstream::masterNo(),//UPstream::procID(masterNo()),
tag,
MPI_COMM_WORLD
PstreamGlobals::MPICommunicators_[communicator]
)
)
{
@ -120,12 +121,12 @@ void Foam::allReduce
}
if (UPstream::master())
if (UPstream::master(communicator))
{
for
(
int slave=UPstream::firstSlave();
slave<=UPstream::lastSlave();
slave<=UPstream::lastSlave(communicator);
slave++
)
{
@ -136,9 +137,9 @@ void Foam::allReduce
&Value,
MPICount,
MPIType,
UPstream::procID(slave),
slave, //UPstream::procID(slave),
tag,
MPI_COMM_WORLD
PstreamGlobals::MPICommunicators_[communicator]
)
)
{
@ -167,9 +168,9 @@ void Foam::allReduce
&Value,
MPICount,
MPIType,
UPstream::procID(UPstream::masterNo()),
UPstream::masterNo(),//UPstream::procID(masterNo()),
tag,
MPI_COMM_WORLD,
PstreamGlobals::MPICommunicators_[communicator],
MPI_STATUS_IGNORE
)
)
@ -193,7 +194,15 @@ void Foam::allReduce
else
{
Type sum;
MPI_Allreduce(&Value, &sum, MPICount, MPIType, MPIOp, MPI_COMM_WORLD);
MPI_Allreduce
(
&Value,
&sum,
MPICount,
MPIType,
MPIOp,
PstreamGlobals::MPICommunicators_[communicator]
);
Value = sum;
}
}