mirror of
https://develop.openfoam.com/Development/openfoam.git
synced 2025-11-28 03:28:01 +00:00
ENH: decomposePar,reconsturctPar: added -allRegions option
This commit is contained in:
@ -3,7 +3,8 @@ EXE_INC = \
|
||||
-I$(LIB_SRC)/parallel/decompose/decompositionMethods/lnInclude \
|
||||
-I$(LIB_SRC)/finiteVolume/lnInclude \
|
||||
-I$(LIB_SRC)/lagrangian/basic/lnInclude \
|
||||
-I$(LIB_SRC)/meshTools/lnInclude
|
||||
-I$(LIB_SRC)/meshTools/lnInclude \
|
||||
-I$(LIB_SRC)/regionModels/regionModel/lnInclude
|
||||
|
||||
EXE_LIBS = \
|
||||
-lfiniteVolume \
|
||||
@ -11,4 +12,5 @@ EXE_LIBS = \
|
||||
-lgenericPatchFields \
|
||||
-ldecompositionMethods -L$(FOAM_LIBBIN)/dummy -lmetisDecomp -lscotchDecomp \
|
||||
-llagrangian \
|
||||
-lmeshTools
|
||||
-lmeshTools \
|
||||
-lregionModels
|
||||
|
||||
@ -39,6 +39,10 @@ Usage
|
||||
\param -region regionName \n
|
||||
Decompose named region. Does not check for existence of processor*.
|
||||
|
||||
\param -allRegions \n
|
||||
Decompose all regions in regionProperties. Does not check for
|
||||
existence of processor*.
|
||||
|
||||
\param -copyUniform \n
|
||||
Copy any \a uniform directories too.
|
||||
|
||||
@ -82,6 +86,7 @@ Usage
|
||||
#include "tensorIOField.H"
|
||||
#include "tensorFieldIOField.H"
|
||||
#include "pointFields.H"
|
||||
#include "regionProperties.H"
|
||||
|
||||
#include "readFields.H"
|
||||
#include "dimFieldDecomposer.H"
|
||||
@ -101,6 +106,11 @@ int main(int argc, char *argv[])
|
||||
argList::noParallel();
|
||||
#include "addRegionOption.H"
|
||||
argList::addBoolOption
|
||||
(
|
||||
"allRegions",
|
||||
"operate on all regions in regionProperties"
|
||||
);
|
||||
argList::addBoolOption
|
||||
(
|
||||
"cellDist",
|
||||
"write cell distribution as a labelList - for use with 'manual' "
|
||||
@ -132,15 +142,7 @@ int main(int argc, char *argv[])
|
||||
|
||||
#include "setRootCase.H"
|
||||
|
||||
word regionName = fvMesh::defaultRegion;
|
||||
word regionDir = word::null;
|
||||
|
||||
if (args.optionReadIfPresent("region", regionName))
|
||||
{
|
||||
regionDir = regionName;
|
||||
Info<< "Decomposing mesh " << regionName << nl << endl;
|
||||
}
|
||||
|
||||
bool allRegions = args.optionFound("allRegions");
|
||||
bool writeCellDist = args.optionFound("cellDist");
|
||||
bool copyUniform = args.optionFound("copyUniform");
|
||||
bool decomposeFieldsOnly = args.optionFound("fields");
|
||||
@ -152,6 +154,51 @@ int main(int argc, char *argv[])
|
||||
// Allow override of time
|
||||
instantList times = timeSelector::selectIfPresent(runTime, args);
|
||||
|
||||
|
||||
wordList regionNames;
|
||||
wordList regionDirs;
|
||||
if (allRegions)
|
||||
{
|
||||
Info<< "Decomposing all regions in regionProperties" << nl << endl;
|
||||
regionProperties rp(runTime);
|
||||
forAllConstIter(HashTable<wordList>, rp, iter)
|
||||
{
|
||||
const wordList& regions = iter();
|
||||
forAll(regions, i)
|
||||
{
|
||||
if (findIndex(regionNames, regions[i]) == -1)
|
||||
{
|
||||
regionNames.append(regions[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
regionDirs = regionNames;
|
||||
}
|
||||
else
|
||||
{
|
||||
word regionName;
|
||||
if (args.optionReadIfPresent("region", regionName))
|
||||
{
|
||||
regionNames = wordList(1, regionName);
|
||||
regionDirs = regionNames;
|
||||
}
|
||||
else
|
||||
{
|
||||
regionNames = wordList(1, fvMesh::defaultRegion);
|
||||
regionDirs = wordList(1, word::null);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
forAll(regionNames, regionI)
|
||||
{
|
||||
const word& regionName = regionNames[regionI];
|
||||
const word& regionDir = regionDirs[regionI];
|
||||
|
||||
Info<< "\n\nDecomposing mesh " << regionName << nl << endl;
|
||||
|
||||
|
||||
// determine the existing processor count directly
|
||||
label nProcs = 0;
|
||||
while
|
||||
@ -418,7 +465,10 @@ int main(int argc, char *argv[])
|
||||
);
|
||||
|
||||
// Particles
|
||||
PtrList<Cloud<indexedParticle> > lagrangianPositions(cloudDirs.size());
|
||||
PtrList<Cloud<indexedParticle> > lagrangianPositions
|
||||
(
|
||||
cloudDirs.size()
|
||||
);
|
||||
// Particles per cell
|
||||
PtrList< List<SLList<indexedParticle*>*> > cellParticles
|
||||
(
|
||||
@ -429,7 +479,8 @@ int main(int argc, char *argv[])
|
||||
(
|
||||
cloudDirs.size()
|
||||
);
|
||||
PtrList<PtrList<labelFieldCompactIOField> > lagrangianLabelFieldFields
|
||||
PtrList<PtrList<labelFieldCompactIOField> >
|
||||
lagrangianLabelFieldFields
|
||||
(
|
||||
cloudDirs.size()
|
||||
);
|
||||
@ -437,7 +488,8 @@ int main(int argc, char *argv[])
|
||||
(
|
||||
cloudDirs.size()
|
||||
);
|
||||
PtrList<PtrList<scalarFieldCompactIOField> > lagrangianScalarFieldFields
|
||||
PtrList<PtrList<scalarFieldCompactIOField> >
|
||||
lagrangianScalarFieldFields
|
||||
(
|
||||
cloudDirs.size()
|
||||
);
|
||||
@ -445,7 +497,8 @@ int main(int argc, char *argv[])
|
||||
(
|
||||
cloudDirs.size()
|
||||
);
|
||||
PtrList<PtrList<vectorFieldCompactIOField> > lagrangianVectorFieldFields
|
||||
PtrList<PtrList<vectorFieldCompactIOField> >
|
||||
lagrangianVectorFieldFields
|
||||
(
|
||||
cloudDirs.size()
|
||||
);
|
||||
@ -469,7 +522,8 @@ int main(int argc, char *argv[])
|
||||
(
|
||||
cloudDirs.size()
|
||||
);
|
||||
PtrList<PtrList<tensorFieldCompactIOField> > lagrangianTensorFieldFields
|
||||
PtrList<PtrList<tensorFieldCompactIOField> >
|
||||
lagrangianTensorFieldFields
|
||||
(
|
||||
cloudDirs.size()
|
||||
);
|
||||
@ -542,7 +596,8 @@ int main(int argc, char *argv[])
|
||||
<< " at position " << iter().position() << nl
|
||||
<< "Cell number should be between 0 and "
|
||||
<< mesh.nCells()-1 << nl
|
||||
<< "On this mesh the particle should be in cell "
|
||||
<< "On this mesh the particle should"
|
||||
<< " be in cell "
|
||||
<< mesh.findCell(iter().position())
|
||||
<< exit(FatalError);
|
||||
}
|
||||
@ -839,7 +894,10 @@ int main(int argc, char *argv[])
|
||||
|
||||
fieldDecomposer.decomposeFields(surfaceScalarFields);
|
||||
fieldDecomposer.decomposeFields(surfaceVectorFields);
|
||||
fieldDecomposer.decomposeFields(surfaceSphericalTensorFields);
|
||||
fieldDecomposer.decomposeFields
|
||||
(
|
||||
surfaceSphericalTensorFields
|
||||
);
|
||||
fieldDecomposer.decomposeFields(surfaceSymmTensorFields);
|
||||
fieldDecomposer.decomposeFields(surfaceTensorFields);
|
||||
|
||||
@ -1075,6 +1133,7 @@ int main(int argc, char *argv[])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Info<< "\nEnd.\n" << endl;
|
||||
|
||||
|
||||
@ -1,11 +1,13 @@
|
||||
EXE_INC = \
|
||||
-I$(LIB_SRC)/finiteVolume/lnInclude \
|
||||
-I$(LIB_SRC)/lagrangian/basic/lnInclude \
|
||||
-I$(LIB_SRC)/parallel/reconstruct/reconstruct/lnInclude
|
||||
-I$(LIB_SRC)/parallel/reconstruct/reconstruct/lnInclude \
|
||||
-I$(LIB_SRC)/regionModels/regionModel/lnInclude
|
||||
|
||||
EXE_LIBS = \
|
||||
-lfiniteVolume \
|
||||
-lgenericPatchFields \
|
||||
-llagrangian \
|
||||
-lmeshTools \
|
||||
-lreconstruct
|
||||
-lreconstruct \
|
||||
-lregionModels
|
||||
|
||||
@ -25,7 +25,7 @@ Application
|
||||
reconstructPar
|
||||
|
||||
Description
|
||||
Reconstructs a mesh and fields of a case that is decomposed for parallel
|
||||
Reconstructs fields of a case that is decomposed for parallel
|
||||
execution of OpenFOAM.
|
||||
|
||||
\*---------------------------------------------------------------------------*/
|
||||
@ -36,6 +36,7 @@ Description
|
||||
#include "fvCFD.H"
|
||||
#include "IOobjectList.H"
|
||||
#include "processorMeshes.H"
|
||||
#include "regionProperties.H"
|
||||
#include "fvFieldReconstructor.H"
|
||||
#include "pointFieldReconstructor.H"
|
||||
#include "reconstructLagrangian.H"
|
||||
@ -44,11 +45,21 @@ Description
|
||||
|
||||
int main(int argc, char *argv[])
|
||||
{
|
||||
argList::addNote
|
||||
(
|
||||
"Reconstruct fields of a parallel case"
|
||||
);
|
||||
|
||||
// enable -constant ... if someone really wants it
|
||||
// enable -zeroTime to prevent accidentally trashing the initial fields
|
||||
timeSelector::addOptions(true, true);
|
||||
argList::noParallel();
|
||||
# include "addRegionOption.H"
|
||||
argList::addBoolOption
|
||||
(
|
||||
"allRegions",
|
||||
"operate on all regions in regionProperties"
|
||||
);
|
||||
argList::addOption
|
||||
(
|
||||
"fields",
|
||||
@ -102,6 +113,7 @@ int main(int argc, char *argv[])
|
||||
|
||||
|
||||
const bool newTimes = args.optionFound("newTimes");
|
||||
const bool allRegions = args.optionFound("allRegions");
|
||||
|
||||
|
||||
// determine the processor count directly
|
||||
@ -159,20 +171,69 @@ int main(int argc, char *argv[])
|
||||
}
|
||||
|
||||
|
||||
|
||||
# include "createNamedMesh.H"
|
||||
word regionDir = word::null;
|
||||
if (regionName != fvMesh::defaultRegion)
|
||||
{
|
||||
regionDir = regionName;
|
||||
}
|
||||
|
||||
// Set all times on processor meshes equal to reconstructed mesh
|
||||
forAll(databases, procI)
|
||||
{
|
||||
databases[procI].setTime(runTime.timeName(), runTime.timeIndex());
|
||||
}
|
||||
|
||||
|
||||
wordList regionNames;
|
||||
wordList regionDirs;
|
||||
if (allRegions)
|
||||
{
|
||||
Info<< "Reconstructing for all regions in regionProperties" << nl
|
||||
<< endl;
|
||||
regionProperties rp(runTime);
|
||||
forAllConstIter(HashTable<wordList>, rp, iter)
|
||||
{
|
||||
const wordList& regions = iter();
|
||||
forAll(regions, i)
|
||||
{
|
||||
if (findIndex(regionNames, regions[i]) == -1)
|
||||
{
|
||||
regionNames.append(regions[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
regionDirs = regionNames;
|
||||
}
|
||||
else
|
||||
{
|
||||
word regionName;
|
||||
if (args.optionReadIfPresent("region", regionName))
|
||||
{
|
||||
regionNames = wordList(1, regionName);
|
||||
regionDirs = regionNames;
|
||||
}
|
||||
else
|
||||
{
|
||||
regionNames = wordList(1, fvMesh::defaultRegion);
|
||||
regionDirs = wordList(1, word::null);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
forAll(regionNames, regionI)
|
||||
{
|
||||
const word& regionName = regionNames[regionI];
|
||||
const word& regionDir = regionDirs[regionI];
|
||||
|
||||
Info<< "\n\nReconstructing fields for mesh " << regionName << nl
|
||||
<< endl;
|
||||
|
||||
fvMesh mesh
|
||||
(
|
||||
IOobject
|
||||
(
|
||||
regionName,
|
||||
runTime.timeName(),
|
||||
runTime,
|
||||
Foam::IOobject::MUST_READ
|
||||
)
|
||||
);
|
||||
|
||||
|
||||
// Read all meshes and addressing to reconstructed mesh
|
||||
processorMeshes procMeshes(databases, regionName);
|
||||
|
||||
@ -223,14 +284,17 @@ int main(int argc, char *argv[])
|
||||
|
||||
if (procStat == fvMesh::POINTS_MOVED)
|
||||
{
|
||||
// Reconstruct the points for moving mesh cases and write them out
|
||||
// Reconstruct the points for moving mesh cases and write
|
||||
// them out
|
||||
procMeshes.reconstructPoints(mesh);
|
||||
}
|
||||
else if (meshStat != procStat)
|
||||
{
|
||||
WarningIn(args.executable())
|
||||
<< "readUpdate for the reconstructed mesh:" << meshStat << nl
|
||||
<< "readUpdate for the processor meshes :" << procStat << nl
|
||||
<< "readUpdate for the reconstructed mesh:"
|
||||
<< meshStat << nl
|
||||
<< "readUpdate for the processor meshes :"
|
||||
<< procStat << nl
|
||||
<< "These should be equal or your addressing"
|
||||
<< " might be incorrect."
|
||||
<< " Please check your time directories for any "
|
||||
@ -239,7 +303,11 @@ int main(int argc, char *argv[])
|
||||
|
||||
|
||||
// Get list of objects from processor0 database
|
||||
IOobjectList objects(procMeshes.meshes()[0], databases[0].timeName());
|
||||
IOobjectList objects
|
||||
(
|
||||
procMeshes.meshes()[0],
|
||||
databases[0].timeName()
|
||||
);
|
||||
|
||||
{
|
||||
// If there are any FV fields, reconstruct them
|
||||
@ -264,7 +332,8 @@ int main(int argc, char *argv[])
|
||||
objects,
|
||||
selectedFields
|
||||
);
|
||||
fvReconstructor.reconstructFvVolumeInternalFields<sphericalTensor>
|
||||
fvReconstructor.reconstructFvVolumeInternalFields
|
||||
<sphericalTensor>
|
||||
(
|
||||
objects,
|
||||
selectedFields
|
||||
@ -346,7 +415,11 @@ int main(int argc, char *argv[])
|
||||
|
||||
forAll(pMeshes, procI)
|
||||
{
|
||||
pMeshes.set(procI, new pointMesh(procMeshes.meshes()[procI]));
|
||||
pMeshes.set
|
||||
(
|
||||
procI,
|
||||
new pointMesh(procMeshes.meshes()[procI])
|
||||
);
|
||||
}
|
||||
|
||||
pointFieldReconstructor pointReconstructor
|
||||
@ -407,14 +480,17 @@ int main(int argc, char *argv[])
|
||||
(
|
||||
readDir
|
||||
(
|
||||
databases[procI].timePath() / regionDir / cloud::prefix,
|
||||
databases[procI].timePath()
|
||||
/ regionDir
|
||||
/ cloud::prefix,
|
||||
fileName::DIRECTORY
|
||||
)
|
||||
);
|
||||
|
||||
forAll(cloudDirs, i)
|
||||
{
|
||||
// Check if we already have cloud objects for this cloudname
|
||||
// Check if we already have cloud objects for this
|
||||
// cloudname
|
||||
HashTable<IOobjectList>::const_iterator iter =
|
||||
cloudObjects.find(cloudDirs[i]);
|
||||
|
||||
@ -428,7 +504,10 @@ int main(int argc, char *argv[])
|
||||
cloud::prefix/cloudDirs[i]
|
||||
);
|
||||
|
||||
IOobject* positionsPtr = sprayObjs.lookup("positions");
|
||||
IOobject* positionsPtr = sprayObjs.lookup
|
||||
(
|
||||
"positions"
|
||||
);
|
||||
|
||||
if (positionsPtr)
|
||||
{
|
||||
@ -444,7 +523,10 @@ int main(int argc, char *argv[])
|
||||
// Pass2: reconstruct the cloud
|
||||
forAllConstIter(HashTable<IOobjectList>, cloudObjects, iter)
|
||||
{
|
||||
const word cloudName = string::validate<word>(iter.key());
|
||||
const word cloudName = string::validate<word>
|
||||
(
|
||||
iter.key()
|
||||
);
|
||||
|
||||
// Objects (on arbitrary processor)
|
||||
const IOobjectList& sprayObjs = iter();
|
||||
@ -563,10 +645,13 @@ int main(int argc, char *argv[])
|
||||
Info<< "No lagrangian fields" << nl << endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If there are any "uniform" directories copy them from
|
||||
// the master processor
|
||||
|
||||
forAll(timeDirs, timeI)
|
||||
{
|
||||
fileName uniformDir0 = databases[0].timePath()/"uniform";
|
||||
if (isDir(uniformDir0))
|
||||
{
|
||||
|
||||
@ -31,27 +31,18 @@ done
|
||||
#runApplication `getApplication`
|
||||
|
||||
# Decompose
|
||||
for i in bottomAir topAir heater leftSolid rightSolid
|
||||
do
|
||||
decomposePar -region $i > log.decomposePar.$i 2>&1
|
||||
done
|
||||
runApplication decomposePar -allRegions
|
||||
|
||||
# Run
|
||||
runParallel `getApplication` 4
|
||||
|
||||
# Reconstruct
|
||||
for i in bottomAir topAir heater leftSolid rightSolid
|
||||
do
|
||||
reconstructPar -region $i > log.reconstructPar.$i2 >&1
|
||||
done
|
||||
runApplication reconstructPar -allRegions
|
||||
|
||||
|
||||
echo
|
||||
echo "creating files for paraview post-processing"
|
||||
echo
|
||||
for i in bottomAir topAir heater leftSolid rightSolid
|
||||
do
|
||||
paraFoam -touch -region $i
|
||||
done
|
||||
paraFoam -touchAll
|
||||
|
||||
# ----------------------------------------------------------------- end-of-file
|
||||
|
||||
@ -31,19 +31,13 @@ done
|
||||
#runApplication chtMultiRegionFoam
|
||||
|
||||
# Decompose
|
||||
for i in bottomWater topAir heater leftSolid rightSolid
|
||||
do
|
||||
decomposePar -region $i > log.decomposePar.$i 2>&1
|
||||
done
|
||||
runApplication decomposePar -allRegions
|
||||
|
||||
# Run
|
||||
runParallel `getApplication` 4
|
||||
|
||||
# Reconstruct
|
||||
for i in bottomWater topAir heater leftSolid rightSolid
|
||||
do
|
||||
reconstructPar -region $i > log.reconstructPar.$i 2>&1
|
||||
done
|
||||
runApplication reconstructPar -allRegions
|
||||
|
||||
|
||||
echo
|
||||
|
||||
@ -36,27 +36,18 @@ runApplication `getApplication`
|
||||
|
||||
|
||||
## Decompose
|
||||
#for i in bottomAir topAir heater leftSolid rightSolid
|
||||
#do
|
||||
# decomposePar -region $i > log.decomposePar.$i 2>&1
|
||||
#done
|
||||
#runApplication decomposePar -allRegions
|
||||
#
|
||||
## Run
|
||||
#runParallel `getApplication` 4
|
||||
#
|
||||
## Reconstruct
|
||||
#for i in bottomAir topAir heater leftSolid rightSolid
|
||||
#do
|
||||
# reconstructPar -region $i > log.reconstructPar.$i 2>&1
|
||||
#done
|
||||
#runApplication reconstructPar -allRegions
|
||||
|
||||
|
||||
echo
|
||||
echo "creating files for paraview post-processing"
|
||||
echo
|
||||
for i in bottomAir topAir heater leftSolid rightSolid
|
||||
do
|
||||
paraFoam -touch -region $i
|
||||
done
|
||||
paraFoam -touchAll
|
||||
|
||||
# ----------------------------------------------------------------- end-of-file
|
||||
|
||||
@ -31,19 +31,13 @@ done
|
||||
runApplication `getApplication`
|
||||
|
||||
## Decompose
|
||||
#for i in bottomAir topAir heater leftSolid rightSolid
|
||||
#do
|
||||
# decomposePar -region $i > log.decomposePar.$i 2>&1
|
||||
#done
|
||||
#runApplication decomposePar -allRegions
|
||||
#
|
||||
## Run
|
||||
#runParallel `getApplication` 4
|
||||
#
|
||||
## Reconstruct
|
||||
#for i in bottomAir topAir heater leftSolid rightSolid
|
||||
#do
|
||||
# reconstructPar -region $i > log.reconstructPar.$i 2>&1
|
||||
#done
|
||||
#runApplication reconstructPar -allRegions
|
||||
|
||||
|
||||
echo
|
||||
|
||||
@ -44,10 +44,7 @@ runApplication `getApplication`
|
||||
|
||||
## Run in parallel
|
||||
## Decompose
|
||||
#for i in bottomAir topAir heater leftSolid rightSolid
|
||||
#do
|
||||
# decomposePar -region $i > log.decomposePar.$i 2>&1
|
||||
#done
|
||||
#runApplication decomposePar -allRegions
|
||||
#
|
||||
#for i in bottomAir topAir
|
||||
#do
|
||||
@ -63,10 +60,7 @@ runApplication `getApplication`
|
||||
#runParallel `getApplication` 4
|
||||
#
|
||||
## Reconstruct
|
||||
#for i in bottomAir topAir heater leftSolid rightSolid
|
||||
#do
|
||||
# reconstructPar -region $i > log.reconstructPar.$i 2>&1
|
||||
#done
|
||||
#runApplication reconstructPar -allRegions
|
||||
|
||||
|
||||
echo
|
||||
|
||||
Reference in New Issue
Block a user