ENH: mpi/Pstream: encapsulate mpi_reduce calls

This commit is contained in:
mattijs
2012-06-07 17:27:02 +01:00
parent c442bf277c
commit 927fc0a313
5 changed files with 293 additions and 271 deletions

View File

@ -124,7 +124,7 @@ void reduce
} }
// Insist there is a specialisation for the sum reduction of scalar(s) // Insist there are specialisations for the common reductions of scalar(s)
void reduce void reduce
( (
scalar& Value, scalar& Value,
@ -132,6 +132,13 @@ void reduce
const int tag = Pstream::msgType() const int tag = Pstream::msgType()
); );
void reduce
(
scalar& Value,
const minOp<scalar>& bop,
const int tag = Pstream::msgType()
);
void reduce void reduce
( (
vector2D& Value, vector2D& Value,

View File

@ -59,6 +59,10 @@ void Foam::reduce(scalar&, const sumOp<scalar>&, const int)
{} {}
void Foam::reduce(scalar&, const minOp<scalar>&, const int)
{}
void Foam::reduce(vector2D&, const sumOp<vector2D>&, const int) void Foam::reduce(vector2D&, const sumOp<vector2D>&, const int)
{} {}

View File

@ -30,6 +30,7 @@ License
#include "OSspecific.H" #include "OSspecific.H"
#include "PstreamGlobals.H" #include "PstreamGlobals.H"
#include "SubList.H" #include "SubList.H"
#include "allReduce.H"
#include <cstring> #include <cstring>
#include <cstdlib> #include <cstdlib>
@ -172,283 +173,19 @@ void Foam::UPstream::abort()
void Foam::reduce(scalar& Value, const sumOp<scalar>& bop, const int tag) void Foam::reduce(scalar& Value, const sumOp<scalar>& bop, const int tag)
{ {
if (Pstream::debug) allReduce(Value, 1, MPI_SCALAR, MPI_SUM, bop, tag);
{
Pout<< "Foam::reduce : value:" << Value << endl;
}
if (!UPstream::parRun())
{
return;
}
if (UPstream::nProcs() <= UPstream::nProcsSimpleSum)
{
if (UPstream::master())
{
for
(
int slave=UPstream::firstSlave();
slave<=UPstream::lastSlave();
slave++
)
{
scalar value;
if
(
MPI_Recv
(
&value,
1,
MPI_SCALAR,
UPstream::procID(slave),
tag,
MPI_COMM_WORLD,
MPI_STATUS_IGNORE
)
)
{
FatalErrorIn
(
"reduce(scalar& Value, const sumOp<scalar>& sumOp)"
) << "MPI_Recv failed"
<< Foam::abort(FatalError);
}
Value = bop(Value, value);
}
}
else
{
if
(
MPI_Send
(
&Value,
1,
MPI_SCALAR,
UPstream::procID(UPstream::masterNo()),
tag,
MPI_COMM_WORLD
)
)
{
FatalErrorIn
(
"reduce(scalar& Value, const sumOp<scalar>& sumOp)"
) << "MPI_Send failed"
<< Foam::abort(FatalError);
}
} }
if (UPstream::master()) void Foam::reduce(scalar& Value, const minOp<scalar>& bop, const int tag)
{ {
for allReduce(Value, 1, MPI_SCALAR, MPI_MIN, bop, tag);
(
int slave=UPstream::firstSlave();
slave<=UPstream::lastSlave();
slave++
)
{
if
(
MPI_Send
(
&Value,
1,
MPI_SCALAR,
UPstream::procID(slave),
tag,
MPI_COMM_WORLD
)
)
{
FatalErrorIn
(
"reduce(scalar& Value, const sumOp<scalar>& sumOp)"
) << "MPI_Send failed"
<< Foam::abort(FatalError);
}
}
}
else
{
if
(
MPI_Recv
(
&Value,
1,
MPI_SCALAR,
UPstream::procID(UPstream::masterNo()),
tag,
MPI_COMM_WORLD,
MPI_STATUS_IGNORE
)
)
{
FatalErrorIn
(
"reduce(scalar& Value, const sumOp<scalar>& sumOp)"
) << "MPI_Recv failed"
<< Foam::abort(FatalError);
}
}
}
else
{
scalar sum;
MPI_Allreduce(&Value, &sum, 1, MPI_SCALAR, MPI_SUM, MPI_COMM_WORLD);
Value = sum;
}
if (Pstream::debug)
{
Pout<< "Foam::reduce : reduced value:" << Value << endl;
}
} }
void Foam::reduce(vector2D& Value, const sumOp<vector2D>& bop, const int tag) void Foam::reduce(vector2D& Value, const sumOp<vector2D>& bop, const int tag)
{ {
if (Pstream::debug) allReduce(Value, 2, MPI_SCALAR, MPI_SUM, bop, tag);
{
Pout<< "Foam::reduce : value:" << Value << endl;
}
if (!UPstream::parRun())
{
return;
}
if (UPstream::nProcs() <= UPstream::nProcsSimpleSum)
{
if (UPstream::master())
{
for
(
int slave=UPstream::firstSlave();
slave<=UPstream::lastSlave();
slave++
)
{
vector2D value;
if
(
MPI_Recv
(
&value,
2,
MPI_SCALAR,
UPstream::procID(slave),
tag,
MPI_COMM_WORLD,
MPI_STATUS_IGNORE
)
)
{
FatalErrorIn
(
"reduce(vector2D& Value, const sumOp<vector2D>& sumOp)"
) << "MPI_Recv failed"
<< Foam::abort(FatalError);
}
Value = bop(Value, value);
}
}
else
{
if
(
MPI_Send
(
&Value,
2,
MPI_SCALAR,
UPstream::procID(UPstream::masterNo()),
tag,
MPI_COMM_WORLD
)
)
{
FatalErrorIn
(
"reduce(vector2D& Value, const sumOp<vector2D>& sumOp)"
) << "MPI_Send failed"
<< Foam::abort(FatalError);
}
}
if (UPstream::master())
{
for
(
int slave=UPstream::firstSlave();
slave<=UPstream::lastSlave();
slave++
)
{
if
(
MPI_Send
(
&Value,
2,
MPI_SCALAR,
UPstream::procID(slave),
tag,
MPI_COMM_WORLD
)
)
{
FatalErrorIn
(
"reduce(vector2D& Value, const sumOp<vector2D>& sumOp)"
) << "MPI_Send failed"
<< Foam::abort(FatalError);
}
}
}
else
{
if
(
MPI_Recv
(
&Value,
2,
MPI_SCALAR,
UPstream::procID(UPstream::masterNo()),
tag,
MPI_COMM_WORLD,
MPI_STATUS_IGNORE
)
)
{
FatalErrorIn
(
"reduce(vector2D& Value, const sumOp<vector2D>& sumOp)"
) << "MPI_Recv failed"
<< Foam::abort(FatalError);
}
}
}
else
{
vector2D sum;
MPI_Allreduce(&Value, &sum, 2, MPI_SCALAR, MPI_SUM, MPI_COMM_WORLD);
Value = sum;
}
if (Pstream::debug)
{
Pout<< "Foam::reduce : reduced value:" << Value << endl;
}
} }

View File

@ -0,0 +1,72 @@
/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2012 OpenFOAM Foundation
\\/ M anipulation |
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
OpenFOAM is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
InNamspace
Foam
Description
Various functions to wrap MPI_Allreduce
SourceFiles
allReduceTemplates.C
\*---------------------------------------------------------------------------*/
#ifndef allReduce_H
#define allReduce_H
#include "mpi.h"
#include "UPstream.H"
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
namespace Foam
{
template<class Type, class BinaryOp>
void allReduce
(
Type& Value,
int count,
MPI_Datatype MPIType,
MPI_Op op,
const BinaryOp& bop,
const int tag
);
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
} // End namespace Foam
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
#ifdef NoRepository
# include "allReduceTemplates.C"
#endif
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
#endif
// ************************************************************************* //

View File

@ -0,0 +1,202 @@
/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2012 OpenFOAM Foundation
\\/ M anipulation |
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
OpenFOAM is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
\*---------------------------------------------------------------------------*/
#include "allReduce.H"
// * * * * * * * * * * * * * * * Global Functions * * * * * * * * * * * * * //
template<class Type, class BinaryOp>
void Foam::allReduce
(
Type& Value,
int MPICount,
MPI_Datatype MPIType,
MPI_Op MPIOp,
const BinaryOp& bop,
const int tag
)
{
if (!UPstream::parRun())
{
return;
}
if (UPstream::nProcs() <= UPstream::nProcsSimpleSum)
{
if (UPstream::master())
{
for
(
int slave=UPstream::firstSlave();
slave<=UPstream::lastSlave();
slave++
)
{
Type value;
if
(
MPI_Recv
(
&value,
MPICount,
MPIType,
UPstream::procID(slave),
tag,
MPI_COMM_WORLD,
MPI_STATUS_IGNORE
)
)
{
FatalErrorIn
(
"void Foam::allReduce\n"
"(\n"
" Type&,\n"
" int,\n"
" MPI_Datatype,\n"
" MPI_Op,\n"
" const BinaryOp&,\n"
" const int\n"
")\n"
) << "MPI_Recv failed"
<< Foam::abort(FatalError);
}
Value = bop(Value, value);
}
}
else
{
if
(
MPI_Send
(
&Value,
MPICount,
MPIType,
UPstream::procID(UPstream::masterNo()),
tag,
MPI_COMM_WORLD
)
)
{
FatalErrorIn
(
"void Foam::allReduce\n"
"(\n"
" Type&,\n"
" int,\n"
" MPI_Datatype,\n"
" MPI_Op,\n"
" const BinaryOp&,\n"
" const int\n"
")\n"
) << "MPI_Send failed"
<< Foam::abort(FatalError);
}
}
if (UPstream::master())
{
for
(
int slave=UPstream::firstSlave();
slave<=UPstream::lastSlave();
slave++
)
{
if
(
MPI_Send
(
&Value,
MPICount,
MPIType,
UPstream::procID(slave),
tag,
MPI_COMM_WORLD
)
)
{
FatalErrorIn
(
"void Foam::allReduce\n"
"(\n"
" Type&,\n"
" int,\n"
" MPI_Datatype,\n"
" MPI_Op,\n"
" const BinaryOp&,\n"
" const int\n"
")\n"
) << "MPI_Send failed"
<< Foam::abort(FatalError);
}
}
}
else
{
if
(
MPI_Recv
(
&Value,
MPICount,
MPIType,
UPstream::procID(UPstream::masterNo()),
tag,
MPI_COMM_WORLD,
MPI_STATUS_IGNORE
)
)
{
FatalErrorIn
(
"void Foam::allReduce\n"
"(\n"
" Type&,\n"
" int,\n"
" MPI_Datatype,\n"
" MPI_Op,\n"
" const BinaryOp&,\n"
" const int\n"
")\n"
) << "MPI_Recv failed"
<< Foam::abort(FatalError);
}
}
}
else
{
Type sum;
MPI_Allreduce(&Value, &sum, MPICount, MPIType, MPIOp, MPI_COMM_WORLD);
Value = sum;
}
}
// ************************************************************************* //