Merge branch 'feature-decomposeParDict' into 'develop'

Feature decompose par dict

Merging remaining functionality to do with -decomposeParDict parallel argList option.

See merge request !10
This commit is contained in:
Andrew Heather
2015-11-25 15:35:21 +00:00
28 changed files with 561 additions and 203 deletions

View File

@ -63,6 +63,10 @@ extrudeModel wedge;
//- Extrudes into sphere with grading according to pressure (atmospherics)
//extrudeModel sigmaRadial;
//- Extrudes by interpolating along path inbetween two (topologically identical)
// surfaces (e.g. one is an offsetted version of the other)
//extrudeModel offsetSurface;
nLayers 10;
expansionRatio 1.0;
@ -105,6 +109,16 @@ sigmaRadialCoeffs
pStrat 1;
}
offsetSurfaceCoeffs
{
// Surface that mesh has been meshed to
baseSurface "$FOAM_CASE/constant/triSurface/DTC-scaled-inflated.obj";
// Surface to fill in to
offsetSurface "$FOAM_CASE/constant/triSurface/DTC-scaled.obj";
}
// Do front and back need to be merged? Usually only makes sense for 360
// degree wedges.
mergeFaces false;

View File

@ -16,6 +16,7 @@ EXE_INC = \
-I$(LIB_SRC)/finiteVolume/lnInclude \
-I$(LIB_SRC)/meshTools/lnInclude \
-I$(LIB_SRC)/parallel/decompose/decompositionMethods/lnInclude \
-I$(LIB_SRC)/parallel/decompose/decompose/lnInclude \
-I$(LIB_SRC)/edgeMesh/lnInclude \
-I$(LIB_SRC)/fileFormats/lnInclude \
-I$(LIB_SRC)/dynamicMesh/lnInclude \
@ -29,6 +30,7 @@ EXE_INC = \
LIB_LIBS = \
${CGAL_LIBS} \
-lmeshTools \
-ldecompose \
-ledgeMesh \
-lfileFormats \
-ltriSurface \

View File

@ -30,6 +30,7 @@ License
#include "Time.H"
#include "Random.H"
#include "pointConversion.H"
#include "decompositionModel.H"
// * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * //
@ -149,7 +150,8 @@ void Foam::backgroundMeshDecomposition::initialRefinement()
const conformationSurfaces& geometry = geometryToConformTo_;
decompositionMethod& decomposer = decomposerPtr_();
decompositionMethod& decomposer =
decompositionModel::New(mesh_).decomposer();
volScalarField::InternalField& icellWeights = cellWeights.internalField();
@ -782,7 +784,8 @@ Foam::backgroundMeshDecomposition::backgroundMeshDecomposition
const Time& runTime,
Random& rndGen,
const conformationSurfaces& geometryToConformTo,
const dictionary& coeffsDict
const dictionary& coeffsDict,
const fileName& decompDictFile
)
:
runTime_(runTime),
@ -810,18 +813,6 @@ Foam::backgroundMeshDecomposition::backgroundMeshDecomposition
bFTreePtr_(),
allBackgroundMeshBounds_(Pstream::nProcs()),
globalBackgroundBounds_(),
decomposeDict_
(
IOobject
(
"decomposeParDict",
runTime_.system(),
runTime_,
IOobject::MUST_READ_IF_MODIFIED,
IOobject::NO_WRITE
)
),
decomposerPtr_(decompositionMethod::New(decomposeDict_)),
mergeDist_(1e-6*mesh_.bounds().mag()),
spanScale_(readScalar(coeffsDict.lookup("spanScale"))),
minCellSizeLimit_
@ -846,14 +837,17 @@ Foam::backgroundMeshDecomposition::backgroundMeshDecomposition
<< exit(FatalError);
}
if (!decomposerPtr_().parallelAware())
const decompositionMethod& decomposer =
decompositionModel::New(mesh_, decompDictFile).decomposer();
if (!decomposer.parallelAware())
{
FatalErrorIn
(
"void Foam::backgroundMeshDecomposition::initialRefinement() const"
)
<< "You have selected decomposition method "
<< decomposerPtr_().typeName
<< decomposer.typeName
<< " which is not parallel aware." << endl
<< exit(FatalError);
}
@ -1008,7 +1002,10 @@ Foam::backgroundMeshDecomposition::distribute
<< endl;
}
labelList newDecomp = decomposerPtr_().decompose
decompositionMethod& decomposer =
decompositionModel::New(mesh_).decomposer();
labelList newDecomp = decomposer.decompose
(
mesh_,
mesh_.cellCentres(),

View File

@ -127,13 +127,7 @@ class backgroundMeshDecomposition
// a point that is not found on any processor is in the domain at all
treeBoundBox globalBackgroundBounds_;
//- Decomposition dictionary
IOdictionary decomposeDict_;
//- Decomposition method
autoPtr<decompositionMethod> decomposerPtr_;
//- Merge distance required by fvMeshDistribute
//- merge distance required by fvMeshDistribute
scalar mergeDist_;
//- Scale of a cell span vs cell size used to decide to refine a cell
@ -204,7 +198,8 @@ public:
const Time& runTime,
Random& rndGen,
const conformationSurfaces& geometryToConformTo,
const dictionary& coeffsDict
const dictionary& coeffsDict,
const fileName& decompDictFile = ""
);
@ -324,8 +319,6 @@ public:
//- Return the point level of the underlying mesh
inline const labelList& pointLevel() const;
//- Return the current decomposition method
inline const decompositionMethod& decomposer() const;
};

View File

@ -57,11 +57,4 @@ const Foam::labelList& Foam::backgroundMeshDecomposition::pointLevel() const
}
const Foam::decompositionMethod&
Foam::backgroundMeshDecomposition::decomposer() const
{
return decomposerPtr_();
}
// ************************************************************************* //

View File

@ -392,61 +392,59 @@ Foam::cellShapeControlMesh::cellShapeControlMesh(const Time& runTime)
if (mesh.nPoints() == this->vertexCount())
{
pointScalarField sizes
IOobject io
(
IOobject
(
"sizes",
runTime.timeName(),
meshSubDir,
runTime,
IOobject::READ_IF_PRESENT,
IOobject::NO_WRITE,
false
),
pointMesh::New(mesh)
"sizes",
runTime.timeName(),
meshSubDir,
runTime,
IOobject::MUST_READ,
IOobject::NO_WRITE,
false
);
triadIOField alignments
(
IOobject
(
"alignments",
mesh.time().timeName(),
meshSubDir,
mesh.time(),
IOobject::READ_IF_PRESENT,
IOobject::AUTO_WRITE,
false
)
);
if
(
sizes.size() == this->vertexCount()
&& alignments.size() == this->vertexCount()
)
if (io.headerOk())
{
for
pointScalarField sizes(io, pointMesh::New(mesh));
triadIOField alignments
(
Finite_vertices_iterator vit = finite_vertices_begin();
vit != finite_vertices_end();
++vit
)
IOobject
(
"alignments",
mesh.time().timeName(),
meshSubDir,
mesh.time(),
IOobject::MUST_READ,
IOobject::NO_WRITE,
false
)
);
if (alignments.size() == this->vertexCount())
{
vit->targetCellSize() = sizes[vit->index()];
vit->alignment() = alignments[vit->index()];
for
(
Finite_vertices_iterator vit = finite_vertices_begin();
vit != finite_vertices_end();
++vit
)
{
vit->targetCellSize() = sizes[vit->index()];
vit->alignment() = alignments[vit->index()];
}
}
else
{
FatalErrorIn
(
"Foam::cellShapeControlMesh::cellShapeControlMesh"
"(const Time&)"
) << "Cell alignments point field " << alignments.size()
<< " is not the same size as the number of vertices"
<< " in the mesh " << this->vertexCount()
<< abort(FatalError);
}
}
else
{
FatalErrorIn
(
"Foam::cellShapeControlMesh::cellShapeControlMesh"
"(const Time&)"
) << "Cell size point field is not the same size as the "
<< "mesh."
<< abort(FatalError);
}
}
}
@ -672,7 +670,7 @@ void Foam::cellShapeControlMesh::write() const
IOobject::AUTO_WRITE
),
pointMesh::New(mesh),
scalar(0)
dimensionedScalar("zero", dimLength, scalar(0))
);
triadIOField alignments

View File

@ -837,7 +837,8 @@ bool Foam::conformalVoronoiMesh::ownerAndNeighbour
Foam::conformalVoronoiMesh::conformalVoronoiMesh
(
const Time& runTime,
const dictionary& foamyHexMeshDict
const dictionary& foamyHexMeshDict,
const fileName& decompDictFile
)
:
DistributedDelaunayMesh<Delaunay>(runTime),
@ -876,7 +877,8 @@ Foam::conformalVoronoiMesh::conformalVoronoiMesh
foamyHexMeshControls().foamyHexMeshDict().subDict
(
"backgroundMeshDecomposition"
)
),
decompDictFile
)
: NULL
),

View File

@ -163,10 +163,8 @@ private:
//- Limiting bound box before infinity begins
treeBoundBox limitBounds_;
//-
mutable pointPairs<Delaunay> ptPairs_;
//-
featurePointConformer ftPtConformer_;
//- Search tree for edge point locations
@ -546,7 +544,7 @@ private:
) const;
//- Check if a location is in the exclusion range of an existing feature
//- Edge conformation location
// edge conformation location
bool nearFeatureEdgeLocation
(
const pointIndexHit& pHit,
@ -730,8 +728,7 @@ private:
label classifyBoundaryPoint(Cell_handle cit) const;
//- Index all of the the Delaunay cells and calculate their
//- Dual points
//- Index all of the the Delaunay cells and calculate their dual points
void indexDualVertices
(
pointField& pts,
@ -874,7 +871,8 @@ public:
conformalVoronoiMesh
(
const Time& runTime,
const dictionary& foamyHexMeshDict
const dictionary& foamyHexMeshDict,
const fileName& decompDictFile = ""
);

View File

@ -1292,9 +1292,9 @@ void Foam::conformalVoronoiMesh::indexDualVertices
}
}
OBJstream snapping1("snapToSurface1.obj");
OBJstream snapping2("snapToSurface2.obj");
OFstream tetToSnapTo("tetsToSnapTo.obj");
//OBJstream snapping1("snapToSurface1.obj");
//OBJstream snapping2("snapToSurface2.obj");
//OFstream tetToSnapTo("tetsToSnapTo.obj");
for
(

View File

@ -88,15 +88,13 @@ class indexedCell
// Private data
//- The index for this Delaunay tetrahedral cell. Type information is
//- Also carried:
// also carried:
// ctFar : the dual point of this cell does not form part of the
// internal or boundary of the dual mesh
// >=0 : the (local) index of an internal or boundary dual point,
// not on a processor face
// < 0 && > ctFar : the (global) index of a dual point on a processor
// face
Foam::label index_;
//- The number of times that this Delaunay cell has been limited

View File

@ -30,7 +30,8 @@ EXE_LIBS = \
-lconformalVoronoiMesh \
-lmeshTools \
-ldecompositionMethods \
-L$(FOAM_LIBBIN)/dummy -lptscotchDecomp \
-ldecompose \
-L$(FOAM_LIBBIN)/dummy -lptscotchDecomp -lscotchDecomp \
-ledgeMesh \
-lfileFormats \
-ltriSurface \

View File

@ -62,6 +62,17 @@ int main(int argc, char *argv[])
const bool checkGeometry = args.optionFound("checkGeometry");
const bool conformationOnly = args.optionFound("conformationOnly");
// Allow override of decomposeParDict location
fileName decompDictFile;
if (args.optionReadIfPresent("decomposeParDict", decompDictFile))
{
if (isDir(decompDictFile))
{
decompDictFile = decompDictFile / "decomposeParDict";
}
}
IOdictionary foamyHexMeshDict
(
IOobject
@ -114,7 +125,7 @@ int main(int argc, char *argv[])
Info<< "Create mesh for time = " << runTime.timeName() << nl << endl;
conformalVoronoiMesh mesh(runTime, foamyHexMeshDict);
conformalVoronoiMesh mesh(runTime, foamyHexMeshDict, decompDictFile);
if (conformationOnly)
@ -145,7 +156,7 @@ int main(int argc, char *argv[])
}
Info<< nl << "End" << nl << endl;
Info<< "\nEnd\n" << endl;
return 0;
}

View File

@ -271,11 +271,18 @@ motionControl
forceInitialPointInsertion on;
priority 1;
mode inside;
// Cell size at surface
surfaceCellSizeFunction uniformValue;
uniformValueCoeffs
{
surfaceCellSizeCoeff 0.5;
}
// Cell size inside domain by having a region of thickness
// surfaceOffsetaround the surface with the surface cell size
// (so constant) and then down to distanceCellSize over a distance
// of linearDistance.
cellSizeFunction surfaceOffsetLinearDistance;
surfaceOffsetLinearDistanceCoeffs
{
@ -375,9 +382,17 @@ polyMeshFiltering
// Filter small and sliver faces
filterFaces off;
// Write the underlying Delaunay tet mesh at output time
// Write the underlying Delaunay tet mesh (at output time)
writeTetDualMesh false;
// Write the Delaunay tet mesh used for interpolating cell size and
// alignment (at output time)
writeCellShapeControlMesh true;
// Write the hex/split-hex mesh used for parallel load balancing
// (at output time)
writeBackgroundMeshDecomposition true;
// Upper limit on the size of faces to be filtered.
// fraction of the local target cell size
filterSizeCoeff 0.2;

View File

@ -9,6 +9,7 @@ EXE_INC = \
${CGAL_INC} \
-I../conformalVoronoiMesh/lnInclude \
-I$(LIB_SRC)/parallel/decompose/decompositionMethods/lnInclude \
-I$(LIB_SRC)/parallel/decompose/decompose/lnInclude \
-I$(LIB_SRC)/edgeMesh/lnInclude \
-I$(LIB_SRC)/mesh/autoMesh/lnInclude \
-I$(LIB_SRC)/triSurface/lnInclude \
@ -26,6 +27,7 @@ EXE_LIBS = \
-lgmp \
-lconformalVoronoiMesh \
-ldecompositionMethods /* -L$(FOAM_LIBBIN)/dummy -lscotchDecomp */ \
-ldecompose \
-ledgeMesh \
-ltriSurface \
-lmeshTools \

View File

@ -44,6 +44,7 @@ Description
#include "isoSurfaceCell.H"
#include "vtkSurfaceWriter.H"
#include "syncTools.H"
#include "decompositionModel.H"
using namespace Foam;
@ -467,7 +468,7 @@ int main(int argc, char *argv[])
// Determine the number of cells in each direction.
const vector span = bb.span();
vector nScalarCells = span/cellShapeControls().defaultCellSize();
vector nScalarCells = span/cellShapeControls.defaultCellSize();
// Calculate initial cell size to be a little bit smaller than the
// defaultCellSize to avoid initial refinement triggering.
@ -521,28 +522,21 @@ int main(int argc, char *argv[])
Info<< "Loaded mesh:" << endl;
printMeshData(mesh);
// Allocate a decomposer
IOdictionary decompositionDict
(
IOobject
(
"decomposeParDict",
runTime.system(),
mesh,
IOobject::MUST_READ_IF_MODIFIED,
IOobject::NO_WRITE
)
);
// Allow override of decomposeParDict location
fileName decompDictFile;
if (args.optionReadIfPresent("decomposeParDict", decompDictFile))
{
if (isDir(decompDictFile))
{
decompDictFile = decompDictFile / "decomposeParDict";
}
}
autoPtr<decompositionMethod> decomposer
labelList decomp = decompositionModel::New
(
decompositionMethod::New
(
decompositionDict
)
);
labelList decomp = decomposer().decompose(mesh, mesh.cellCentres());
mesh,
decompDictFile
).decomposer().decompose(mesh, mesh.cellCentres());
// Global matching tolerance
const scalar tolDim = getMergeDistance
@ -574,18 +568,15 @@ int main(int argc, char *argv[])
Info<< "Refining backgroud mesh according to cell size specification" << nl
<< endl;
const dictionary& backgroundMeshDict =
foamyHexMeshDict.subDict("backgroundMeshDecomposition");
backgroundMeshDecomposition backgroundMesh
(
1.0, //spanScale,ratio of poly cell size v.s. hex cell size
0.0, //minCellSizeLimit
0, //minLevels
4, //volRes, check multiple points per cell
20.0, //maxCellWeightCoeff
runTime,
geometryToConformTo,
cellShapeControls(),
rndGen,
foamyHexMeshDict
geometryToConformTo,
backgroundMeshDict
);
if (writeMesh)

View File

@ -221,7 +221,7 @@ int main(int argc, char *argv[])
Info<< "Finished extruding in = "
<< runTime.cpuTimeIncrement() << " s." << endl;
Info<< nl << "End\n" << endl;
Info<< "\nEnd\n" << endl;
return 0;
}

View File

@ -8,14 +8,18 @@ EXE_INC = \
-I$(LIB_SRC)/surfMesh/lnInclude \
-I$(LIB_SRC)/dynamicMesh/lnInclude \
-I$(LIB_SRC)/edgeMesh/lnInclude \
-I$(LIB_SRC)/parallel/decompose/decompose/lnInclude \
-I$(LIB_SRC)/finiteVolume/lnInclude
EXE_LIBS = \
-lfiniteVolume \
-ldecompositionMethods \
-L$(FOAM_LIBBIN)/dummy -lptscotchDecomp \
/* note: scotch < 6.0 does not like both scotch and ptscotch together */ \
-lscotchDecomp \
-lmeshTools \
-lsurfMesh \
-lfileFormats \
-ldynamicMesh \
-ldecompose \
-lautoMesh

View File

@ -3,7 +3,7 @@
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2011-2015 OpenFOAM Foundation
\\/ M anipulation | Copyright (C) 2015 OpenCFD Ltd.
\\/ M anipulation |
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
@ -57,6 +57,7 @@ Description
#include "MeshedSurface.H"
#include "globalIndex.H"
#include "IOmanip.H"
#include "decompositionModel.H"
using namespace Foam;
@ -819,15 +820,28 @@ int main(int argc, char *argv[])
{
if (Pstream::parRun())
{
fileName decompDictFile;
if (args.optionReadIfPresent("decomposeParDict", decompDictFile))
{
if (isDir(decompDictFile))
{
decompDictFile = decompDictFile/"decomposeParDict";
}
}
decomposeDict = IOdictionary
(
IOobject
decompositionModel::selectIO
(
"decomposeParDict",
runTime.system(),
mesh,
IOobject::MUST_READ_IF_MODIFIED,
IOobject::NO_WRITE
IOobject
(
"decomposeParDict",
runTime.system(),
mesh,
IOobject::MUST_READ_IF_MODIFIED,
IOobject::NO_WRITE
),
decompDictFile
)
);
}

View File

@ -3,7 +3,7 @@
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2011-2015 OpenFOAM Foundation
\\/ M anipulation | Copyright (C) 2015 OpenCFD Ltd.
\\/ M anipulation |
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
@ -143,7 +143,7 @@ int main(int argc, char *argv[])
);
argList::noParallel();
Foam::argList::addOption
argList::addOption
(
"decomposeParDict",
"file",

View File

@ -2,6 +2,8 @@ EXE_INC = \
-I$(LIB_SRC)/meshTools/lnInclude \
-I$(LIB_SRC)/lagrangian/basic/lnInclude \
-I$(LIB_SRC)/finiteVolume/lnInclude \
-I$(LIB_SRC)/parallel/decompose/decompose/lnInclude \
-I$(LIB_SRC)/parallel/decompose/decompositionMethods/lnInclude \
-I$(LIB_SRC)/sampling/lnInclude
EXE_LIBS = \
@ -9,4 +11,5 @@ EXE_LIBS = \
-lmeshTools \
-llagrangian \
-lfiniteVolume \
-ldecompose \
-lgenericPatchFields

View File

@ -36,9 +36,48 @@ Description
#include "meshToMesh0.H"
#include "processorFvPatch.H"
#include "MapMeshes.H"
#include "decompositionModel.H"
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
int readNumProcs
(
const argList& args,
const word& optionName,
const Time& runTime
)
{
fileName dictFile;
if (args.optionReadIfPresent(optionName, dictFile))
{
if (isDir(dictFile))
{
dictFile = dictFile/"decomposeParDict";
}
}
return readInt
(
IOdictionary
(
decompositionModel::selectIO
(
IOobject
(
"decomposeParDict",
runTime.system(),
runTime,
IOobject::MUST_READ_IF_MODIFIED,
IOobject::NO_WRITE,
false
),
dictFile
)
).lookup("numberOfSubdomains")
);
}
void mapConsistentMesh
(
const fvMesh& meshSource,
@ -225,6 +264,19 @@ int main(int argc, char *argv[])
"subtract",
"subtract mapped source from target"
);
argList::addOption
(
"sourceDecomposeParDict",
"file",
"read decomposePar dictionary from specified location"
);
argList::addOption
(
"targetDecomposeParDict",
"file",
"read decomposePar dictionary from specified location"
);
argList args(argc, argv);
@ -320,20 +372,13 @@ int main(int argc, char *argv[])
if (parallelSource && !parallelTarget)
{
IOdictionary decompositionDict
int nProcs = readNumProcs
(
IOobject
(
"decomposeParDict",
runTimeSource.system(),
runTimeSource,
IOobject::MUST_READ_IF_MODIFIED,
IOobject::NO_WRITE
)
args,
"sourceDecomposeParDict",
runTimeSource
);
int nProcs(readInt(decompositionDict.lookup("numberOfSubdomains")));
Info<< "Create target mesh\n" << endl;
fvMesh meshTarget
@ -399,19 +444,13 @@ int main(int argc, char *argv[])
}
else if (!parallelSource && parallelTarget)
{
IOdictionary decompositionDict
int nProcs = readNumProcs
(
IOobject
(
"decomposeParDict",
runTimeTarget.system(),
runTimeTarget,
IOobject::MUST_READ_IF_MODIFIED,
IOobject::NO_WRITE
)
args,
"targetDecomposeParDict",
runTimeTarget
);
int nProcs(readInt(decompositionDict.lookup("numberOfSubdomains")));
Info<< "Create source mesh\n" << endl;
@ -478,39 +517,17 @@ int main(int argc, char *argv[])
}
else if (parallelSource && parallelTarget)
{
IOdictionary decompositionDictSource
int nProcsSource = readNumProcs
(
IOobject
(
"decomposeParDict",
runTimeSource.system(),
runTimeSource,
IOobject::MUST_READ_IF_MODIFIED,
IOobject::NO_WRITE
)
args,
"sourceDecomposeParDict",
runTimeSource
);
int nProcsSource
int nProcsTarget = readNumProcs
(
readInt(decompositionDictSource.lookup("numberOfSubdomains"))
);
IOdictionary decompositionDictTarget
(
IOobject
(
"decomposeParDict",
runTimeTarget.system(),
runTimeTarget,
IOobject::MUST_READ_IF_MODIFIED,
IOobject::NO_WRITE
)
);
int nProcsTarget
(
readInt(decompositionDictTarget.lookup("numberOfSubdomains"))
args,
"targetDecomposeParDict",
runTimeTarget
);
List<boundBox> bbsTarget(nProcsTarget);

View File

@ -104,6 +104,8 @@ void MapLagrangianFields
Info<< " mapping lagrangian fieldField " << fieldName << endl;
// Read field (does not need mesh)
// Note: some fieldFields are 0 size (e.g. collision records) if
// not used
IOField<Field<Type> > fieldSource(*fieldIter());
// Map - use CompactIOField to automatically write in
@ -120,12 +122,22 @@ void MapLagrangianFields
IOobject::NO_WRITE,
false
),
addParticles.size()
min(fieldSource.size(), addParticles.size()) // handle 0 size
);
forAll(addParticles, i)
if (fieldSource.size())
{
fieldTarget[i] = fieldSource[addParticles[i]];
forAll(addParticles, i)
{
fieldTarget[i] = fieldSource[addParticles[i]];
}
}
else if (cloud::debug)
{
Pout<< "Not mapping " << fieldName << " since source size "
<< fieldSource.size() << " different to"
<< " cloud size " << addParticles.size()
<< endl;
}
// Write field
@ -139,8 +151,9 @@ void MapLagrangianFields
forAllIter(IOobjectList, fieldFields, fieldIter)
{
Info<< " mapping lagrangian fieldField "
<< fieldIter()->name() << endl;
const word& fieldName = fieldIter()->name();
Info<< " mapping lagrangian fieldField " << fieldName << endl;
// Read field (does not need mesh)
CompactIOField<Field<Type>, Type> fieldSource(*fieldIter());
@ -150,7 +163,7 @@ void MapLagrangianFields
(
IOobject
(
fieldIter()->name(),
fieldName,
meshTarget.time().timeName(),
cloud::prefix/cloudName,
meshTarget,
@ -158,12 +171,22 @@ void MapLagrangianFields
IOobject::NO_WRITE,
false
),
addParticles.size()
min(fieldSource.size(), addParticles.size()) // handle 0 size
);
forAll(addParticles, i)
if (fieldSource.size())
{
fieldTarget[i] = fieldSource[addParticles[i]];
forAll(addParticles, i)
{
fieldTarget[i] = fieldSource[addParticles[i]];
}
}
else if (cloud::debug)
{
Pout<< "Not mapping " << fieldName << " since source size "
<< fieldSource.size() << " different to"
<< " cloud size " << addParticles.size()
<< endl;
}
// Write field

View File

@ -110,9 +110,10 @@ void mapLagrangian(const meshToMesh& interp)
cloud::prefix/cloudDirs[cloudI]
);
IOobject* positionsPtr = objects.lookup(word("positions"));
bool foundPositions =
returnReduce(objects.found("positions"), orOp<bool>());;
if (positionsPtr)
if (foundPositions)
{
Info<< nl << " processing cloud " << cloudDirs[cloudI] << endl;

View File

@ -89,7 +89,7 @@ for (label procI = 0; procI < Pstream::nProcs(); procI++)
DynamicList<label> dRayIs;
// Collect the rays which has not abstacle in bettween in rayStartFace
// Collect the rays which has not hit obstacle inbetween rayStartFace
// and rayEndFace. If the ray hit itself get stored in dRayIs
forAll(hitInfo, rayI)
{

View File

@ -4,6 +4,7 @@ linearNormal/linearNormal.C
planeExtrusion/planeExtrusion.C
linearDirection/linearDirection.C
linearRadial/linearRadial.C
offsetSurface/offsetSurface.C
radial/radial.C
sigmaRadial/sigmaRadial.C
sector/sector.C

View File

@ -1,5 +1,6 @@
EXE_INC = \
-I$(LIB_SRC)/meshTools/lnInclude \
-I$(LIB_SRC)/triSurface/lnInclude \
-I$(LIB_SRC)/dynamicMesh/lnInclude
LIB_LIBS = \

View File

@ -0,0 +1,165 @@
/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2014 OpenFOAM Foundation
\\/ M anipulation |
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
OpenFOAM is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
\*---------------------------------------------------------------------------*/
#include "offsetSurface.H"
#include "addToRunTimeSelectionTable.H"
#include "triSurface.H"
#include "triSurfaceSearch.H"
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
namespace Foam
{
namespace extrudeModels
{
// * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * //
defineTypeNameAndDebug(offsetSurface, 0);
addToRunTimeSelectionTable(extrudeModel, offsetSurface, dictionary);
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
offsetSurface::offsetSurface(const dictionary& dict)
:
extrudeModel(typeName, dict),
project_(coeffDict_.lookupOrDefault("project", false))
{
// Read surface
fileName baseName(coeffDict_.lookup("baseSurface"));
baseName.expand();
baseSurfPtr_.reset(new triSurface(baseName));
// Construct search engine
baseSearchPtr_.reset(new triSurfaceSearch(baseSurfPtr_()));
// Read offsetted surface
fileName offsetName(coeffDict_.lookup("offsetSurface"));
offsetName.expand();
offsetSurfPtr_.reset(new triSurface(offsetName));
// Construct search engine
offsetSearchPtr_.reset(new triSurfaceSearch(offsetSurfPtr_()));
const triSurface& b = baseSurfPtr_();
const triSurface& o = offsetSurfPtr_();
if
(
b.size() != o.size()
|| b.nPoints() != o.nPoints()
|| b.nEdges() != o.nEdges()
)
{
FatalIOErrorIn("offsetSurface::offsetSurface(const dictionary&)", dict)
<< "offsetSurface " << offsetName
<< " should have exactly the same topology as the baseSurface "
<< baseName << exit(FatalIOError);
}
}
// * * * * * * * * * * * * * * * * Destructor * * * * * * * * * * * * * * * //
offsetSurface::~offsetSurface()
{}
// * * * * * * * * * * * * * * * * Operators * * * * * * * * * * * * * * * * //
point offsetSurface::operator()
(
const point& surfacePoint,
const vector& surfaceNormal,
const label layer
) const
{
if (layer == 0)
{
return surfacePoint;
}
else
{
pointField samples(1, surfacePoint);
scalarField nearestDistSqr(1, GREAT);
List<pointIndexHit> info;
baseSearchPtr_().findNearest(samples, nearestDistSqr, info);
label triI = info[0].index();
const triSurface& base = baseSurfPtr_();
const triPointRef baseTri(base[triI].tri(base.points()));
List<scalar> bary;
baseTri.barycentric(surfacePoint, bary);
const triSurface& offset = offsetSurfPtr_();
const triPointRef offsetTri(offset[triI].tri(offset.points()));
const point offsetPoint
(
bary[0]*offsetTri.a()
+bary[1]*offsetTri.b()
+bary[2]*offsetTri.c()
);
point interpolatedPoint
(
surfacePoint + sumThickness(layer)*(offsetPoint-surfacePoint)
);
//- Either return interpolatedPoint or re-project onto surface (since
// snapping might not have do so exactly)
if (project_)
{
// Re-project onto surface
offsetSearchPtr_().findNearest
(
pointField(1, interpolatedPoint),
scalarField(1, GREAT),
info
);
return info[0].hitPoint();
}
else
{
return interpolatedPoint;
}
}
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
} // End namespace extrudeModels
} // End namespace Foam
// ************************************************************************* //

View File

@ -0,0 +1,114 @@
/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2014 OpenFOAM Foundation
\\/ M anipulation |
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
OpenFOAM is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
Class
Foam::extrudeModels::offsetSurface
Description
Extrudes by interpolating points from one surface to the other. Surfaces
have to be topologically identical i.e. one has to be an offsetted version
of the other.
\*---------------------------------------------------------------------------*/
#ifndef offsetSurface_H
#define offsetSurface_H
#include "point.H"
#include "extrudeModel.H"
#include "Switch.H"
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
namespace Foam
{
class triSurface;
class triSurfaceSearch;
namespace extrudeModels
{
/*---------------------------------------------------------------------------*\
Class offsetSurface Declaration
\*---------------------------------------------------------------------------*/
class offsetSurface
:
public extrudeModel
{
// Private data
//- surface
autoPtr<triSurface> baseSurfPtr_;
//- search engine
autoPtr<triSurfaceSearch> baseSearchPtr_;
//- offsets
autoPtr<triSurface> offsetSurfPtr_;
//- search engine
autoPtr<triSurfaceSearch> offsetSearchPtr_;
// Whether to re-project onto offsetted surface
const Switch project_;
public:
//- Runtime type information
TypeName("offsetSurface");
// Constructors
//- Construct from dictionary
offsetSurface(const dictionary& dict);
//- Destructor
virtual ~offsetSurface();
// Member Operators
//- Return point
point operator()
(
const point& surfacePoint,
const vector& surfaceNormal,
const label layer
) const;
};
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
} // End namespace extrudeModels
} // End namespace Foam
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
#endif
// ************************************************************************* //