ENH: allow patch specification; have conforming faceZones after layering

This commit is contained in:
mattijs
2010-09-30 14:13:46 +01:00
parent 2a6e9f10c0
commit 5f43ae45ec
17 changed files with 390 additions and 1287 deletions

View File

@ -5,7 +5,6 @@ $(autoHexMeshDriver)/autoLayerDriver.C
$(autoHexMeshDriver)/autoLayerDriverShrink.C
$(autoHexMeshDriver)/autoSnapDriver.C
$(autoHexMeshDriver)/autoRefineDriver.C
$(autoHexMeshDriver)/autoHexMeshDriver.C
$(autoHexMeshDriver)/layerParameters/layerParameters.C
$(autoHexMeshDriver)/refinementParameters/refinementParameters.C

View File

@ -1,548 +0,0 @@
/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 1991-2010 OpenCFD Ltd.
\\/ M anipulation |
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
OpenFOAM is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
\*----------------------------------------------------------------------------*/
#include "autoHexMeshDriver.H"
#include "fvMesh.H"
#include "Time.H"
#include "boundBox.H"
#include "wallPolyPatch.H"
#include "cellSet.H"
#include "syncTools.H"
#include "refinementParameters.H"
#include "snapParameters.H"
#include "layerParameters.H"
#include "autoRefineDriver.H"
#include "autoSnapDriver.H"
#include "autoLayerDriver.H"
#include "triSurfaceMesh.H"
// * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * //
namespace Foam
{
defineTypeNameAndDebug(autoHexMeshDriver, 0);
}
// * * * * * * * * * * * * * Private Member Functions * * * * * * * * * * * //
// Check writing tolerance before doing any serious work
Foam::scalar Foam::autoHexMeshDriver::getMergeDistance(const scalar mergeTol)
const
{
const boundBox& meshBb = mesh_.bounds();
scalar mergeDist = mergeTol * meshBb.mag();
scalar writeTol = std::pow
(
scalar(10.0),
-scalar(IOstream::defaultPrecision())
);
Info<< nl
<< "Overall mesh bounding box : " << meshBb << nl
<< "Relative tolerance : " << mergeTol << nl
<< "Absolute matching distance : " << mergeDist << nl
<< endl;
if (mesh_.time().writeFormat() == IOstream::ASCII && mergeTol < writeTol)
{
FatalErrorIn("autoHexMeshDriver::getMergeDistance(const scalar) const")
<< "Your current settings specify ASCII writing with "
<< IOstream::defaultPrecision() << " digits precision." << endl
<< "Your merging tolerance (" << mergeTol << ") is finer than this."
<< endl
<< "Please change your writeFormat to binary"
<< " or increase the writePrecision" << endl
<< "or adjust the merge tolerance (-mergeTol)."
<< exit(FatalError);
}
return mergeDist;
}
//// Specifically orient using a calculated point outside
//void Foam::autoHexMeshDriver::orientOutside
//(
// PtrList<searchableSurface>& shells
//)
//{
// // Determine outside point.
// boundBox overallBb = boundBox::invertedBox;
//
// bool hasSurface = false;
//
// forAll(shells, shellI)
// {
// if (isA<triSurfaceMesh>(shells[shellI]))
// {
// const triSurfaceMesh& shell =
// refCast<const triSurfaceMesh>(shells[shellI]);
//
// hasSurface = true;
//
// boundBox shellBb(shell.localPoints(), false);
//
// overallBb.min() = min(overallBb.min(), shellBb.min());
// overallBb.max() = max(overallBb.max(), shellBb.max());
// }
// }
//
// if (hasSurface)
// {
// const point outsidePt = 2 * overallBb.span();
//
// //Info<< "Using point " << outsidePt << " to orient shells" << endl;
//
// forAll(shells, shellI)
// {
// if (isA<triSurfaceMesh>(shells[shellI]))
// {
// triSurfaceMesh& shell =
// refCast<triSurfaceMesh>(shells[shellI]);
//
// if (!refinementSurfaces::isSurfaceClosed(shell))
// {
// FatalErrorIn("orientOutside(PtrList<searchableSurface>&)")
// << "Refinement shell "
// << shell.searchableSurface::name()
// << " is not closed." << exit(FatalError);
// }
//
// refinementSurfaces::orientSurface(outsidePt, shell);
// }
// }
// }
//}
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
// Construct from components
Foam::autoHexMeshDriver::autoHexMeshDriver
(
fvMesh& mesh,
const bool overwrite,
const dictionary& dict,
const dictionary& decomposeDict
)
:
mesh_(mesh),
dict_(dict),
debug_(readLabel(dict_.lookup("debug"))),
mergeDist_(getMergeDistance(readScalar(dict_.lookup("mergeTolerance"))))
{
if (debug_ > 0)
{
meshRefinement::debug = debug_;
autoHexMeshDriver::debug = debug_;
autoRefineDriver::debug = debug;
autoSnapDriver::debug = debug;
autoLayerDriver::debug = debug;
}
refinementParameters refineParams(dict, 1);
Info<< "Overall cell limit : "
<< refineParams.maxGlobalCells() << endl;
Info<< "Per processor cell limit : "
<< refineParams.maxLocalCells() << endl;
Info<< "Minimum number of cells to refine : "
<< refineParams.minRefineCells() << endl;
Info<< "Curvature : "
<< refineParams.curvature() << nl << endl;
Info<< "Layers between different refinement levels : "
<< refineParams.nBufferLayers() << endl;
PtrList<dictionary> shellDicts(dict_.lookup("refinementShells"));
PtrList<dictionary> surfaceDicts(dict_.lookup("surfaces"));
// Read geometry
// ~~~~~~~~~~~~~
{
Info<< "Reading all geometry." << endl;
// Construct dictionary with all shells and all refinement surfaces
dictionary geometryDict;
forAll(shellDicts, shellI)
{
dictionary shellDict = shellDicts[shellI];
const word name(shellDict.lookup("name"));
shellDict.remove("name");
shellDict.remove("level");
shellDict.remove("refineInside");
geometryDict.add(name, shellDict);
}
forAll(surfaceDicts, surfI)
{
dictionary surfDict = surfaceDicts[surfI];
const word name(string::validate<word>(surfDict.lookup("file")));
surfDict.remove("file");
surfDict.remove("regions");
if (!surfDict.found("name"))
{
surfDict.add("name", name);
}
surfDict.add("type", triSurfaceMesh::typeName);
geometryDict.add(name, surfDict);
}
allGeometryPtr_.reset
(
new searchableSurfaces
(
IOobject
(
"abc", // dummy name
//mesh_.time().findInstance("triSurface", word::null),
// instance
mesh_.time().constant(), // instance
"triSurface", // local
mesh_.time(), // registry
IOobject::MUST_READ,
IOobject::NO_WRITE
),
geometryDict
)
);
Info<< "Read geometry in = "
<< mesh_.time().cpuTimeIncrement() << " s" << endl;
}
// Read refinement surfaces
// ~~~~~~~~~~~~~~~~~~~~~~~~
{
Info<< "Reading surfaces and constructing search trees." << endl;
surfacesPtr_.reset
(
new refinementSurfaces
(
allGeometryPtr_(),
surfaceDicts
)
);
Info<< "Read surfaces in = "
<< mesh_.time().cpuTimeIncrement() << " s" << endl;
}
// Read refinement shells
// ~~~~~~~~~~~~~~~~~~~~~~
{
Info<< "Reading refinement shells." << endl;
shellsPtr_.reset
(
new shellSurfaces
(
allGeometryPtr_(),
shellDicts
)
);
Info<< "Read refinement shells in = "
<< mesh_.time().cpuTimeIncrement() << " s" << endl;
//// Orient shell surfaces before any searching is done.
//Info<< "Orienting triSurface shells so point far away is outside."
// << endl;
//orientOutside(shells_);
//Info<< "Oriented shells in = "
// << mesh_.time().cpuTimeIncrement() << " s" << endl;
Info<< "Setting refinement level of surface to be consistent"
<< " with shells." << endl;
surfacesPtr_().setMinLevelFields(shells());
Info<< "Checked shell refinement in = "
<< mesh_.time().cpuTimeIncrement() << " s" << endl;
}
// Check faceZones are synchronised
meshRefinement::checkCoupledFaceZones(mesh_);
// Refinement engine
// ~~~~~~~~~~~~~~~~~
{
Info<< nl
<< "Determining initial surface intersections" << nl
<< "-----------------------------------------" << nl
<< endl;
// Main refinement engine
meshRefinerPtr_.reset
(
new meshRefinement
(
mesh,
mergeDist_, // tolerance used in sorting coordinates
overwrite,
surfaces(),
shells()
)
);
Info<< "Calculated surface intersections in = "
<< mesh_.time().cpuTimeIncrement() << " s" << endl;
// Some stats
meshRefinerPtr_().printMeshInfo(debug_, "Initial mesh");
meshRefinerPtr_().write
(
debug_&meshRefinement::OBJINTERSECTIONS,
mesh_.time().path()/meshRefinerPtr_().timeName()
);
}
// Add all the surface regions as patches
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
{
Info<< nl
<< "Adding patches for surface regions" << nl
<< "----------------------------------" << nl
<< endl;
// From global region number to mesh patch.
globalToPatch_.setSize(surfaces().nRegions(), -1);
Info<< "Patch\tRegion" << nl
<< "-----\t------"
<< endl;
const labelList& surfaceGeometry = surfaces().surfaces();
forAll(surfaceGeometry, surfI)
{
label geomI = surfaceGeometry[surfI];
const wordList& regNames = allGeometryPtr_().regionNames()[geomI];
Info<< surfaces().names()[surfI] << ':' << nl << nl;
forAll(regNames, i)
{
label patchI = meshRefinerPtr_().addMeshedPatch
(
regNames[i],
wallPolyPatch::typeName
);
Info<< patchI << '\t' << regNames[i] << nl;
globalToPatch_[surfaces().globalRegion(surfI, i)] = patchI;
}
Info<< nl;
}
Info<< "Added patches in = "
<< mesh_.time().cpuTimeIncrement() << " s" << nl << endl;
}
//// Add cyclics for any named faceZones
//// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
//// (these cyclics are used later on to temporarily put the faceZones
//// in when snapping)
//
//labelList namedSurfaces(surfaces().getNamedSurfaces());
//if (namedSurfaces.size())
//{
// Info<< nl
// << "Introducing cyclics for faceZones" << nl
// << "---------------------------------" << nl
// << endl;
//
// // From surface to cyclic patch
// surfaceToCyclicPatch_.setSize(surfaces().size(), -1);
//
// Info<< "Patch\tZone" << nl
// << "----\t-----"
// << endl;
//
// forAll(namedSurfaces, i)
// {
// label surfI = namedSurfaces[i];
//
// surfaceToCyclicPatch_[surfI] = meshRefinement::addPatch
// (
// mesh,
// surfaces().faceZoneNames()[surfI],
// cyclicPolyPatch::typeName
// );
//
// Info<< surfaceToCyclicPatch_[surfI] << '\t'
// << surfaces().faceZoneNames()[surfI] << nl << endl;
// }
// Info<< "Added cyclic patches in = "
// << mesh_.time().cpuTimeIncrement() << " s" << endl;
//}
// Parallel
// ~~~~~~~~
{
// Decomposition
decomposerPtr_ = decompositionMethod::New(decomposeDict);
decompositionMethod& decomposer = decomposerPtr_();
if (Pstream::parRun() && !decomposer.parallelAware())
{
FatalErrorIn("autoHexMeshDriver::autoHexMeshDriver"
"(const IOobject&, fvMesh&)")
<< "You have selected decomposition method "
<< decomposer.typeName
<< " which is not parallel aware." << endl
<< "Please select one that is (hierarchical, ptscotch)"
<< exit(FatalError);
}
// Mesh distribution engine (uses tolerance to reconstruct meshes)
distributorPtr_.reset(new fvMeshDistribute(mesh_, mergeDist_));
}
}
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
void Foam::autoHexMeshDriver::writeMesh(const string& msg) const
{
const meshRefinement& meshRefiner = meshRefinerPtr_();
meshRefiner.printMeshInfo(debug_, msg);
Info<< "Writing mesh to time " << meshRefiner.timeName() << endl;
meshRefiner.write(meshRefinement::MESH|meshRefinement::SCALARLEVELS, "");
if (debug_ & meshRefinement::OBJINTERSECTIONS)
{
meshRefiner.write
(
meshRefinement::OBJINTERSECTIONS,
mesh_.time().path()/meshRefiner.timeName()
);
}
Info<< "Wrote mesh in = "
<< mesh_.time().cpuTimeIncrement() << " s." << endl;
}
void Foam::autoHexMeshDriver::doMesh()
{
const Switch wantRefine(dict_.lookup("doRefine"));
const Switch wantSnap(dict_.lookup("doSnap"));
const Switch wantLayers(dict_.lookup("doLayers"));
Info<< "Do refinement : " << wantRefine << nl
<< "Do snapping : " << wantSnap << nl
<< "Do layers : " << wantLayers << nl
<< endl;
if (wantRefine)
{
const dictionary& motionDict = dict_.subDict("motionDict");
autoRefineDriver refineDriver
(
meshRefinerPtr_(),
decomposerPtr_(),
distributorPtr_(),
globalToPatch_
);
// Get all the refinement specific params
refinementParameters refineParams(dict_, 1);
refineDriver.doRefine(dict_, refineParams, wantSnap, motionDict);
// Write mesh
writeMesh("Refined mesh");
}
if (wantSnap)
{
const dictionary& snapDict = dict_.subDict("snapDict");
const dictionary& motionDict = dict_.subDict("motionDict");
autoSnapDriver snapDriver
(
meshRefinerPtr_(),
globalToPatch_
);
// Get all the snapping specific params
snapParameters snapParams(snapDict, 1);
snapDriver.doSnap(snapDict, motionDict, snapParams);
// Write mesh.
writeMesh("Snapped mesh");
}
if (wantLayers)
{
const dictionary& motionDict = dict_.subDict("motionDict");
const dictionary& shrinkDict = dict_.subDict("shrinkDict");
PtrList<dictionary> surfaceDicts(dict_.lookup("surfaces"));
autoLayerDriver layerDriver(meshRefinerPtr_());
// Get all the layer specific params
layerParameters layerParams
(
surfaceDicts,
surfacesPtr_(),
globalToPatch_,
shrinkDict,
mesh_.boundaryMesh()
);
layerDriver.doLayers
(
shrinkDict,
motionDict,
layerParams,
true, // pre-balance
decomposerPtr_(),
distributorPtr_()
);
// Write mesh.
writeMesh("Layer mesh");
}
}
// ************************************************************************* //

View File

@ -1,232 +0,0 @@
/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 1991-2010 OpenCFD Ltd.
\\/ M anipulation |
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
OpenFOAM is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
Class
Foam::autoHexMeshDriver
Description
main meshing driver.
SourceFiles
autoHexMeshDriver.C
\*---------------------------------------------------------------------------*/
#ifndef autoHexMeshDriver_H
#define autoHexMeshDriver_H
#include "autoPtr.H"
#include "dictionary.H"
#include "searchableSurfaces.H"
#include "refinementSurfaces.H"
#include "shellSurfaces.H"
#include "meshRefinement.H"
#include "decompositionMethod.H"
#include "fvMeshDistribute.H"
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
namespace Foam
{
// Class forward declarations
class fvMesh;
/*---------------------------------------------------------------------------*\
Class autoHexMeshDriver Declaration
\*---------------------------------------------------------------------------*/
class autoHexMeshDriver
{
// Static data members
//- Extrusion controls
enum extrudeMode
{
NOEXTRUDE, /*!< Do not extrude. No layers added. */
EXTRUDE, /*!< Extrude */
EXTRUDEREMOVE /*!< Extrude but afterwards remove added */
/*!< faces locally */
};
// Private classes
//- Combine operator class for equalizing displacements.
class minMagEqOp
{
public:
void operator()(vector& x, const vector& y) const
{
if (magSqr(y) < magSqr(x))
{
x = y;
}
}
};
//- Combine operator class to combine normal with other normal.
class nomalsCombine
{
public:
void operator()(vector& x, const vector& y) const
{
if (y != point::max)
{
if (x == point::max)
{
x = y;
}
else
{
x *= (x&y);
}
}
}
};
// Private data
//- Reference to mesh
fvMesh& mesh_;
//- Input dictionary
const dictionary dict_;
//- Debug level
const label debug_;
//- Merge distance
const scalar mergeDist_;
//- All surface based geometry
autoPtr<searchableSurfaces> allGeometryPtr_;
//- Shells (geometry for inside/outside refinement)
autoPtr<shellSurfaces> shellsPtr_;
//- Surfaces (geometry for intersection based refinement)
autoPtr<refinementSurfaces> surfacesPtr_;
//- Per refinement surface region the patch
labelList globalToPatch_;
//- Mesh refinement engine
autoPtr<meshRefinement> meshRefinerPtr_;
//- Decomposition engine
autoPtr<decompositionMethod> decomposerPtr_;
//- Mesh distribution engine
autoPtr<fvMeshDistribute> distributorPtr_;
// Private Member Functions
//- Calculate merge distance. Check against writing tolerance.
scalar getMergeDistance(const scalar mergeTol) const;
//static void orientOutside(PtrList<searchableSurface>&);
//- Disallow default bitwise copy construct
autoHexMeshDriver(const autoHexMeshDriver&);
//- Disallow default bitwise assignment
void operator=(const autoHexMeshDriver&);
public:
//- Runtime type information
ClassName("autoHexMeshDriver");
// Constructors
//- Construct from dictionary and mesh to modify
autoHexMeshDriver
(
fvMesh& mesh,
const bool overwrite,
const dictionary& meshDict,
const dictionary& decomposeDict
);
// Member Functions
// Access
//- reference to mesh
const fvMesh& mesh() const
{
return mesh_;
}
fvMesh& mesh()
{
return mesh_;
}
//- Surfaces to base refinement on
const refinementSurfaces& surfaces() const
{
return surfacesPtr_();
}
//- Surfaces to volume refinement on
const shellSurfaces& shells() const
{
return shellsPtr_();
}
//- Per refinementsurface, per region the patch
const labelList& globalToPatch() const
{
return globalToPatch_;
}
// Meshing
//- Write mesh
void writeMesh(const string&) const;
//- Do all : refine, snap, layers
void doMesh();
};
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
} // End namespace Foam
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
#endif
// ************************************************************************* //

View File

@ -193,13 +193,13 @@ Foam::label Foam::autoLayerDriver::mergePatchFacesUndo
errorFaces
);
// if (checkEdgeConnectivity)
// {
//if (checkEdgeConnectivity)
//{
// Info<< "Checking edge-face connectivity (duplicate faces"
// << " or non-consecutive shared vertices)" << endl;
//
// label nOldSize = errorFaces.size();
//
// hasErrors =
// mesh.checkFaceFaces
// (
@ -207,7 +207,7 @@ Foam::label Foam::autoLayerDriver::mergePatchFacesUndo
// &errorFaces
// )
// || hasErrors;
//
// Info<< "Detected additional "
// << returnReduce
// (
@ -216,7 +216,7 @@ Foam::label Foam::autoLayerDriver::mergePatchFacesUndo
// )
// << " faces with illegal face-face connectivity"
// << endl;
// }
//}
if (!hasErrors)
{
@ -2344,6 +2344,7 @@ Foam::label Foam::autoLayerDriver::checkAndUnmark
(
const addPatchCellLayer& addLayer,
const dictionary& meshQualityDict,
const List<labelPair>& baffles,
const indirectPrimitivePatch& pp,
const fvMesh& newMesh,
@ -2355,7 +2356,15 @@ Foam::label Foam::autoLayerDriver::checkAndUnmark
// Check the resulting mesh for errors
Info<< nl << "Checking mesh with layer ..." << endl;
faceSet wrongFaces(newMesh, "wrongFaces", newMesh.nFaces()/1000);
motionSmoother::checkMesh(false, newMesh, meshQualityDict, wrongFaces);
motionSmoother::checkMesh
(
false,
newMesh,
meshQualityDict,
identity(newMesh.nFaces()),
baffles,
wrongFaces
);
Info<< "Detected " << returnReduce(wrongFaces.size(), sumOp<label>())
<< " illegal faces"
<< " (concave, zero area or negative cell pyramid volume)"
@ -2484,9 +2493,14 @@ void Foam::autoLayerDriver::getLayerCellsFaces
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
Foam::autoLayerDriver::autoLayerDriver(meshRefinement& meshRefiner)
Foam::autoLayerDriver::autoLayerDriver
(
meshRefinement& meshRefiner,
const labelList& globalToPatch
)
:
meshRefiner_(meshRefiner)
meshRefiner_(meshRefiner),
globalToPatch_(globalToPatch)
{}
@ -2536,6 +2550,20 @@ void Foam::autoLayerDriver::addLayers
{
fvMesh& mesh = meshRefiner_.mesh();
// Create baffles (pairs of faces that share the same points)
// Baffles stored as owner and neighbour face that have been created.
List<labelPair> baffles;
meshRefiner_.createZoneBaffles(globalToPatch_, baffles);
if (debug)
{
const_cast<Time&>(mesh.time())++;
Info<< "Writing baffled mesh to " << meshRefiner_.timeName() << endl;
mesh.write();
}
autoPtr<indirectPrimitivePatch> pp
(
meshRefinement::makePatch
@ -2920,6 +2948,7 @@ void Foam::autoLayerDriver::addLayers
(
meshMover(),
meshQualityDict,
baffles,
layerParams.nSmoothThickness(),
layerParams.maxThicknessToMedialRatio(),
@ -3086,6 +3115,14 @@ void Foam::autoLayerDriver::addLayers
identity(pp().nPoints())
);
// Update numbering of baffles
forAll(baffles, i)
{
labelPair& p = baffles[i];
p[0] = map().reverseFaceMap()[p[0]];
p[1] = map().reverseFaceMap()[p[1]];
}
// Collect layer faces and cells for outside loop.
getLayerCellsFaces
(
@ -3130,6 +3167,7 @@ void Foam::autoLayerDriver::addLayers
(
addLayer,
meshQualityDict,
baffles,
pp(),
newMesh,
@ -3197,6 +3235,24 @@ void Foam::autoLayerDriver::addLayers
meshRefiner_.updateMesh(map, labelList(0));
label nBaffles = returnReduce(baffles.size(), sumOp<label>());
if (nBaffles > 0)
{
// Merge any baffles
Info<< "Converting " << nBaffles
<< " baffles back into zoned faces ..."
<< endl;
autoPtr<mapPolyMesh> map = meshRefiner_.mergeBaffles(baffles);
inplaceReorder(map().reverseCellMap(), flaggedCells);
inplaceReorder(map().reverseFaceMap(), flaggedFaces);
Info<< "Converted baffles in = "
<< meshRefiner_.mesh().time().cpuTimeIncrement()
<< " s\n" << nl << endl;
}
// Do final balancing
// ~~~~~~~~~~~~~~~~~~
@ -3280,8 +3336,19 @@ void Foam::autoLayerDriver::doLayers
{
if (numLayers[patchI] > 0)
{
patchIDs.append(patchI);
nFacesWithLayers += mesh.boundaryMesh()[patchI].size();
const polyPatch& pp = mesh.boundaryMesh()[patchI];
if (!polyPatch::constraintType(pp.type()))
{
patchIDs.append(patchI);
nFacesWithLayers += mesh.boundaryMesh()[patchI].size();
}
else
{
WarningIn("autoLayerDriver::doLayers(..)")
<< "Ignoring layers on constraint patch " << pp.name()
<< endl;
}
}
}
patchIDs.shrink();
@ -3331,28 +3398,17 @@ void Foam::autoLayerDriver::doLayers
}
}
// Balance mesh (and meshRefinement). No restriction on face zones
// and baffles.
// Balance mesh (and meshRefinement). Restrict faceZones to
// be on internal faces only since they will be converted into
// baffles.
autoPtr<mapDistributePolyMesh> map = meshRefiner_.balance
(
false,
true, //false, // keepZoneFaces
false,
cellWeights,
decomposer,
distributor
);
//{
// globalIndex globalCells(mesh.nCells());
//
// Info<< "** Distribution after balancing:" << endl;
// for (label procI = 0; procI < Pstream::nProcs(); procI++)
// {
// Info<< " " << procI << '\t'
// << globalCells.localSize(procI) << endl;
// }
// Info<< endl;
//}
}

View File

@ -98,6 +98,9 @@ class autoLayerDriver
//- Mesh+surface
meshRefinement& meshRefiner_;
//- From surface region to patch
const labelList globalToPatch_;
// Private Member Functions
@ -333,6 +336,7 @@ class autoLayerDriver
(
const addPatchCellLayer& addLayer,
const dictionary& motionDict,
const List<labelPair>& baffles,
const indirectPrimitivePatch& pp,
const fvMesh&,
@ -468,6 +472,7 @@ class autoLayerDriver
(
motionSmoother& meshMover,
const dictionary& meshQualityDict,
const List<labelPair>& baffles,
const label nSmoothThickness,
const scalar maxThicknessToMedialRatio,
const label nAllowableErrors,
@ -503,7 +508,11 @@ public:
// Constructors
//- Construct from components
autoLayerDriver(meshRefinement& meshRefiner);
autoLayerDriver
(
meshRefinement& meshRefiner,
const labelList& globalToPatch
);
// Member Functions

View File

@ -945,6 +945,7 @@ void Foam::autoLayerDriver::shrinkMeshMedialDistance
(
motionSmoother& meshMover,
const dictionary& meshQualityDict,
const List<labelPair>& baffles,
const label nSmoothThickness,
const scalar maxThicknessToMedialRatio,
const label nAllowableErrors,
@ -1135,7 +1136,7 @@ void Foam::autoLayerDriver::shrinkMeshMedialDistance
meshMover.scaleMesh
(
checkFaces,
List<labelPair>(0),
baffles,
meshMover.paramDict(),
meshQualityDict,
true,

View File

@ -844,10 +844,10 @@ void Foam::autoRefineDriver::doRefine
<< "---------------------" << nl
<< endl;
if (debug)
{
const_cast<Time&>(mesh.time())++;
}
//if (debug)
//{
// const_cast<Time&>(mesh.time())++;
//}
// Do final balancing. Keep zoned faces on one processor since the
// snap phase will convert them to baffles and this only works for

View File

@ -46,75 +46,16 @@ Description
// * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * //
defineTypeNameAndDebug(Foam::autoSnapDriver, 0);
namespace Foam
{
defineTypeNameAndDebug(autoSnapDriver, 0);
} // End namespace Foam
// * * * * * * * * * * * * * Private Member Functions * * * * * * * * * * * //
// Get faces to repatch. Returns map from face to patch.
Foam::Map<Foam::label> Foam::autoSnapDriver::getZoneBafflePatches
(
const bool allowBoundary
) const
{
const fvMesh& mesh = meshRefiner_.mesh();
const refinementSurfaces& surfaces = meshRefiner_.surfaces();
Map<label> bafflePatch(mesh.nFaces()/1000);
const wordList& faceZoneNames = surfaces.faceZoneNames();
const faceZoneMesh& fZones = mesh.faceZones();
forAll(faceZoneNames, surfI)
{
if (faceZoneNames[surfI].size())
{
// Get zone
const faceZone& fZone = fZones[faceZoneNames[surfI]];
//// Get patch allocated for zone
//label patchI = surfaceToCyclicPatch_[surfI];
// Get patch of (first region) of surface
label patchI = globalToPatch_[surfaces.globalRegion(surfI, 0)];
Info<< "For surface "
<< surfaces.names()[surfI]
<< " found faceZone " << fZone.name()
<< " and patch " << mesh.boundaryMesh()[patchI].name()
<< endl;
forAll(fZone, i)
{
label faceI = fZone[i];
if (allowBoundary || mesh.isInternalFace(faceI))
{
if (!bafflePatch.insert(faceI, patchI))
{
label oldPatchI = bafflePatch[faceI];
if (oldPatchI != patchI)
{
FatalErrorIn("getZoneBafflePatches(const bool)")
<< "Face " << faceI
<< " fc:" << mesh.faceCentres()[faceI]
<< " in zone " << fZone.name()
<< " is in patch "
<< mesh.boundaryMesh()[oldPatchI].name()
<< " and in patch "
<< mesh.boundaryMesh()[patchI].name()
<< abort(FatalError);
}
}
}
}
}
}
return bafflePatch;
}
// Calculate geometrically collocated points, Requires PackedList to be
// sized and initalised!
Foam::label Foam::autoSnapDriver::getCollocatedPoints
@ -647,91 +588,6 @@ Foam::autoSnapDriver::autoSnapDriver
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
Foam::autoPtr<Foam::mapPolyMesh> Foam::autoSnapDriver::createZoneBaffles
(
List<labelPair>& baffles
)
{
labelList zonedSurfaces = meshRefiner_.surfaces().getNamedSurfaces();
autoPtr<mapPolyMesh> map;
// No need to sync; all processors will have all same zonedSurfaces.
if (zonedSurfaces.size())
{
fvMesh& mesh = meshRefiner_.mesh();
// Split internal faces on interface surfaces
Info<< "Converting zoned faces into baffles ..." << endl;
// Get faces (internal only) to be baffled. Map from face to patch
// label.
Map<label> faceToPatch(getZoneBafflePatches(false));
label nZoneFaces = returnReduce(faceToPatch.size(), sumOp<label>());
if (nZoneFaces > 0)
{
// Convert into labelLists
labelList ownPatch(mesh.nFaces(), -1);
forAllConstIter(Map<label>, faceToPatch, iter)
{
ownPatch[iter.key()] = iter();
}
// Create baffles. both sides same patch.
map = meshRefiner_.createBaffles(ownPatch, ownPatch);
// Get pairs of faces created.
// Just loop over faceMap and store baffle if we encounter a slave
// face.
baffles.setSize(faceToPatch.size());
label baffleI = 0;
const labelList& faceMap = map().faceMap();
const labelList& reverseFaceMap = map().reverseFaceMap();
forAll(faceMap, faceI)
{
label oldFaceI = faceMap[faceI];
// Does face originate from face-to-patch
Map<label>::const_iterator iter = faceToPatch.find(oldFaceI);
if (iter != faceToPatch.end())
{
label masterFaceI = reverseFaceMap[oldFaceI];
if (faceI != masterFaceI)
{
baffles[baffleI++] = labelPair(masterFaceI, faceI);
}
}
}
if (baffleI != faceToPatch.size())
{
FatalErrorIn("autoSnapDriver::createZoneBaffles(..)")
<< "Had " << faceToPatch.size() << " patches to create "
<< " but encountered " << baffleI
<< " slave faces originating from patcheable faces."
<< abort(FatalError);
}
if (debug)
{
const_cast<Time&>(mesh.time())++;
Pout<< "Writing baffled mesh to time "
<< meshRefiner_.timeName() << endl;
mesh.write();
}
}
Info<< "Created " << nZoneFaces << " baffles in = "
<< mesh.time().cpuTimeIncrement() << " s\n" << nl << endl;
}
return map;
}
Foam::autoPtr<Foam::mapPolyMesh> Foam::autoSnapDriver::mergeZoneBaffles
(
const List<labelPair>& baffles
@ -1419,7 +1275,7 @@ void Foam::autoSnapDriver::doSnap
// Create baffles (pairs of faces that share the same points)
// Baffles stored as owner and neighbour face that have been created.
List<labelPair> baffles;
createZoneBaffles(baffles);
meshRefiner_.createZoneBaffles(globalToPatch_, baffles);
{
autoPtr<indirectPrimitivePatch> ppPtr

View File

@ -83,9 +83,6 @@ class autoSnapDriver
// Snapping
//- Get faces to repatch. Returns map from face to patch.
Map<label> getZoneBafflePatches(const bool allowBoundary) const;
//- Calculates (geometric) shared points
static label getCollocatedPoints
(
@ -155,10 +152,6 @@ public:
// Snapping
//- Create baffles for faces straddling zoned surfaces. Return
// baffles.
autoPtr<mapPolyMesh> createZoneBaffles(List<labelPair>&);
//- Merge baffles.
autoPtr<mapPolyMesh> mergeZoneBaffles(const List<labelPair>&);

View File

@ -364,7 +364,8 @@ Foam::layerParameters::layerParameters
{
IOWarningIn("layerParameters::layerParameters(..)", layersDict)
<< "Wildcard layer specification for " << wildCards[i]
<< " does not match any patch." << endl;
<< " does not match any patch." << endl
<< "Valid patches are " << boundaryMesh.names() << endl;
}
}
@ -376,7 +377,8 @@ Foam::layerParameters::layerParameters
{
IOWarningIn("layerParameters::layerParameters(..)", layersDict)
<< "Layer specification for " << nonWildCards[i]
<< " does not match any patch." << endl;
<< " does not match any patch." << endl
<< "Valid patches are " << boundaryMesh.names() << endl;
}
}
}

View File

@ -1127,6 +1127,7 @@ Foam::autoPtr<Foam::mapDistributePolyMesh> Foam::meshRefinement::balance
const wordList& fzNames = surfaces().faceZoneNames();
const faceZoneMesh& fZones = mesh_.faceZones();
const polyBoundaryMesh& pbm = mesh_.boundaryMesh();
// Get faces whose owner and neighbour should stay together,
// i.e. they are not 'blocked'.
@ -1140,10 +1141,18 @@ Foam::autoPtr<Foam::mapDistributePolyMesh> Foam::meshRefinement::balance
forAll(fZone, i)
{
if (blockedFace[fZone[i]])
label faceI = fZone[i];
if (blockedFace[faceI])
{
blockedFace[fZone[i]] = false;
nUnblocked++;
if
(
mesh_.isInternalFace(faceI)
|| pbm[pbm.whichPatch(faceI)].coupled()
)
{
blockedFace[faceI] = false;
nUnblocked++;
}
}
}
}
@ -1550,7 +1559,7 @@ Foam::label Foam::meshRefinement::addPatch
(
fvMesh& mesh,
const word& patchName,
const word& patchType
const dictionary& patchInfo
)
{
polyBoundaryMesh& polyPatches =
@ -1559,21 +1568,8 @@ Foam::label Foam::meshRefinement::addPatch
const label patchI = polyPatches.findPatchID(patchName);
if (patchI != -1)
{
if (polyPatches[patchI].type() == patchType)
{
// Already there
return patchI;
}
//else
//{
// FatalErrorIn
// (
// "meshRefinement::addPatch(fvMesh&, const word&, const word&)"
// ) << "Patch " << patchName << " already exists but with type "
// << patchType << nl
// << "Current patch names:" << polyPatches.names()
// << exit(FatalError);
//}
// Already there
return patchI;
}
@ -1603,6 +1599,10 @@ Foam::label Foam::meshRefinement::addPatch
fvBoundaryMesh& fvPatches = const_cast<fvBoundaryMesh&>(mesh.boundary());
dictionary patchDict(patchInfo);
patchDict.set("nFaces", 0);
patchDict.set("startFace", startFaceI);
// Add polyPatch at the end
polyPatches.setSize(sz+1);
polyPatches.set
@ -1610,10 +1610,8 @@ Foam::label Foam::meshRefinement::addPatch
sz,
polyPatch::New
(
patchType,
patchName,
0, // size
startFaceI,
patchDict,
insertPatchI,
polyPatches
)
@ -1720,7 +1718,7 @@ Foam::label Foam::meshRefinement::addPatch
Foam::label Foam::meshRefinement::addMeshedPatch
(
const word& name,
const word& type
const dictionary& patchInfo
)
{
label meshedI = findIndex(meshedPatches_, name);
@ -1733,7 +1731,7 @@ Foam::label Foam::meshRefinement::addMeshedPatch
else
{
// Add patch
label patchI = addPatch(mesh_, name, type);
label patchI = addPatch(mesh_, name, patchInfo);
// Store
label sz = meshedPatches_.size();
@ -1747,17 +1745,24 @@ Foam::label Foam::meshRefinement::addMeshedPatch
Foam::labelList Foam::meshRefinement::meshedPatches() const
{
labelList patchIDs(meshedPatches_.size());
const polyBoundaryMesh& patches = mesh_.boundaryMesh();
DynamicList<label> patchIDs(meshedPatches_.size());
forAll(meshedPatches_, i)
{
patchIDs[i] = mesh_.boundaryMesh().findPatchID(meshedPatches_[i]);
label patchI = patches.findPatchID(meshedPatches_[i]);
if (patchIDs[i] == -1)
if (patchI == -1)
{
FatalErrorIn("meshRefinement::meshedPatches() const")
<< "Problem : did not find patch " << meshedPatches_[i]
<< endl << "Valid patches are " << patches.names()
<< abort(FatalError);
}
if (!polyPatch::constraintType(patches[patchI].type()))
{
patchIDs.append(patchI);
}
}
return patchIDs;

View File

@ -325,6 +325,13 @@ private:
// Baffle handling
//- Get faces to repatch. Returns map from face to patch.
Map<label> getZoneBafflePatches
(
const bool allowBoundary,
const labelList& globalToPatch
) const;
//- Determine patches for baffles
void getBafflePatches
(
@ -713,6 +720,14 @@ public:
const labelList& neiPatch
);
//- Create baffles for faces straddling zoned surfaces. Return
// baffles.
autoPtr<mapPolyMesh> createZoneBaffles
(
const labelList& globalToPatch,
List<labelPair>&
);
//- Return a list of coupled face pairs, i.e. faces that
// use the same vertices.
List<labelPair> getDuplicateFaces(const labelList& testFaces) const;
@ -734,10 +749,10 @@ public:
//- Helper:add patch to mesh. Update all registered fields.
// Use addMeshedPatch to add patches originating from surfaces.
static label addPatch(fvMesh&, const word& name, const word& type);
static label addPatch(fvMesh&, const word& name, const dictionary&);
//- Add patch originating from meshing. Update meshedPatches_.
label addMeshedPatch(const word& name, const word& type);
label addMeshedPatch(const word& name, const dictionary&);
//- Get patchIDs for patches added in addMeshedPatch.
labelList meshedPatches() const;

View File

@ -216,7 +216,7 @@ Foam::label Foam::meshRefinement::getBafflePatch
}
// Determine patches for baffles.
// Determine patches for baffles on all intersected unnamed faces
void Foam::meshRefinement::getBafflePatches
(
const labelList& globalToPatch,
@ -359,6 +359,70 @@ void Foam::meshRefinement::getBafflePatches
}
// Get faces to repatch. Returns map from face to patch.
Foam::Map<Foam::label> Foam::meshRefinement::getZoneBafflePatches
(
const bool allowBoundary,
const labelList& globalToPatch
) const
{
Map<label> bafflePatch(mesh_.nFaces()/1000);
const wordList& faceZoneNames = surfaces_.faceZoneNames();
const faceZoneMesh& fZones = mesh_.faceZones();
forAll(faceZoneNames, surfI)
{
if (faceZoneNames[surfI].size())
{
// Get zone
label zoneI = fZones.findZoneID(faceZoneNames[surfI]);
const faceZone& fZone = fZones[zoneI];
//// Get patch allocated for zone
//label patchI = surfaceToCyclicPatch_[surfI];
// Get patch of (first region) of surface
label patchI = globalToPatch[surfaces_.globalRegion(surfI, 0)];
Info<< "For surface "
<< surfaces_.names()[surfI]
<< " found faceZone " << fZone.name()
<< " and patch " << mesh_.boundaryMesh()[patchI].name()
<< endl;
forAll(fZone, i)
{
label faceI = fZone[i];
if (allowBoundary || mesh_.isInternalFace(faceI))
{
if (!bafflePatch.insert(faceI, patchI))
{
label oldPatchI = bafflePatch[faceI];
if (oldPatchI != patchI)
{
FatalErrorIn("getZoneBafflePatches(const bool)")
<< "Face " << faceI
<< " fc:" << mesh_.faceCentres()[faceI]
<< " in zone " << fZone.name()
<< " is in patch "
<< mesh_.boundaryMesh()[oldPatchI].name()
<< " and in patch "
<< mesh_.boundaryMesh()[patchI].name()
<< abort(FatalError);
}
}
}
}
}
}
return bafflePatch;
}
// Create baffle for every face where ownPatch != -1
Foam::autoPtr<Foam::mapPolyMesh> Foam::meshRefinement::createBaffles
(
@ -586,6 +650,90 @@ Foam::List<Foam::labelPair> Foam::meshRefinement::getDuplicateFaces
}
Foam::autoPtr<Foam::mapPolyMesh> Foam::meshRefinement::createZoneBaffles
(
const labelList& globalToPatch,
List<labelPair>& baffles
)
{
labelList zonedSurfaces = surfaces_.getNamedSurfaces();
autoPtr<mapPolyMesh> map;
// No need to sync; all processors will have all same zonedSurfaces.
if (zonedSurfaces.size())
{
// Split internal faces on interface surfaces
Info<< "Converting zoned faces into baffles ..." << endl;
// Get faces (internal only) to be baffled. Map from face to patch
// label.
Map<label> faceToPatch(getZoneBafflePatches(false, globalToPatch));
label nZoneFaces = returnReduce(faceToPatch.size(), sumOp<label>());
if (nZoneFaces > 0)
{
// Convert into labelLists
labelList ownPatch(mesh_.nFaces(), -1);
forAllConstIter(Map<label>, faceToPatch, iter)
{
ownPatch[iter.key()] = iter();
}
// Create baffles. both sides same patch.
map = createBaffles(ownPatch, ownPatch);
// Get pairs of faces created.
// Just loop over faceMap and store baffle if we encounter a slave
// face.
baffles.setSize(faceToPatch.size());
label baffleI = 0;
const labelList& faceMap = map().faceMap();
const labelList& reverseFaceMap = map().reverseFaceMap();
forAll(faceMap, faceI)
{
label oldFaceI = faceMap[faceI];
// Does face originate from face-to-patch
Map<label>::const_iterator iter = faceToPatch.find(oldFaceI);
if (iter != faceToPatch.end())
{
label masterFaceI = reverseFaceMap[oldFaceI];
if (faceI != masterFaceI)
{
baffles[baffleI++] = labelPair(masterFaceI, faceI);
}
}
}
if (baffleI != faceToPatch.size())
{
FatalErrorIn("meshRefinement::createZoneBaffles(..)")
<< "Had " << faceToPatch.size() << " patches to create "
<< " but encountered " << baffleI
<< " slave faces originating from patcheable faces."
<< abort(FatalError);
}
if (debug)
{
const_cast<Time&>(mesh_.time())++;
Pout<< "Writing baffled mesh to time "
<< mesh_.time().timeName() << endl;
mesh_.write();
}
}
Info<< "Created " << nZoneFaces << " baffles in = "
<< mesh_.time().cpuTimeIncrement() << " s\n" << nl << endl;
}
return map;
}
// Extract those baffles (duplicate) faces that are on the edge of a baffle
// region. These are candidates for merging.
// Done by counting the number of baffles faces per mesh edge. If edge

View File

@ -50,268 +50,6 @@ const Foam::NamedEnum<Foam::refinementSurfaces::areaSelectionAlgo, 4>
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
// Construct from components
Foam::refinementSurfaces::refinementSurfaces
(
const searchableSurfaces& allGeometry,
const PtrList<dictionary>& surfaceDicts
)
:
allGeometry_(allGeometry),
surfaces_(surfaceDicts.size()),
names_(surfaceDicts.size()),
faceZoneNames_(surfaceDicts.size()),
cellZoneNames_(surfaceDicts.size()),
zoneInside_(surfaceDicts.size(), NONE),
zoneInsidePoints_(surfaceDicts.size()),
regionOffset_(surfaceDicts.size())
{
labelList globalMinLevel(surfaceDicts.size(), 0);
labelList globalMaxLevel(surfaceDicts.size(), 0);
scalarField globalAngle(surfaceDicts.size(), -GREAT);
List<Map<label> > regionMinLevel(surfaceDicts.size());
List<Map<label> > regionMaxLevel(surfaceDicts.size());
List<Map<scalar> > regionAngle(surfaceDicts.size());
//wordList globalPatchType(surfaceDicts.size());
//List<HashTable<word> > regionPatchType(surfaceDicts.size());
//List<HashTable<word> > regionPatchName(surfaceDicts.size());
forAll(surfaceDicts, surfI)
{
const dictionary& dict = surfaceDicts[surfI];
dict.lookup("name") >> names_[surfI];
surfaces_[surfI] = allGeometry_.findSurfaceID(names_[surfI]);
// Global refinement level
globalMinLevel[surfI] = readLabel(dict.lookup("minRefinementLevel"));
globalMaxLevel[surfI] = readLabel(dict.lookup("maxRefinementLevel"));
// Global zone names per surface
if (dict.readIfPresent("faceZone", faceZoneNames_[surfI]))
{
// Read optional entry to determine inside of faceZone
word method;
bool hasSide = dict.readIfPresent("cellZoneInside", method);
if (hasSide)
{
zoneInside_[surfI] = areaSelectionAlgoNames[method];
if (zoneInside_[surfI] == INSIDEPOINT)
{
dict.lookup("insidePoint") >> zoneInsidePoints_[surfI];
}
}
else
{
// Check old syntax
bool inside;
if (dict.readIfPresent("zoneInside", inside))
{
hasSide = true;
zoneInside_[surfI] = (inside ? INSIDE : OUTSIDE);
}
}
// Read optional cellZone name
if (dict.readIfPresent("cellZone", cellZoneNames_[surfI]))
{
if
(
(
zoneInside_[surfI] == INSIDE
|| zoneInside_[surfI] == OUTSIDE
)
&& !allGeometry_[surfaces_[surfI]].hasVolumeType()
)
{
IOWarningIn
(
"refinementSurfaces::refinementSurfaces(..)",
dict
) << "Illegal entry zoneInside "
<< areaSelectionAlgoNames[zoneInside_[surfI]]
<< " for faceZone "
<< faceZoneNames_[surfI]
<< " since surface " << names_[surfI]
<< " is not closed." << endl;
}
}
else if (hasSide)
{
IOWarningIn("refinementSurfaces::refinementSurfaces(..)", dict)
<< "Unused entry zoneInside for faceZone "
<< faceZoneNames_[surfI]
<< " since no cellZone specified."
<< endl;
}
}
// Global perpendicular angle
dict.readIfPresent("perpendicularAngle", globalAngle[surfI]);
//// Global patch name per surface
//dict.readIfPresent("patchType", globalPatchType[surfI]);
if (dict.found("regions"))
{
PtrList<dictionary> regionDicts(dict.lookup("regions"));
const wordList& regionNames =
allGeometry_[surfaces_[surfI]].regions();
forAll(regionDicts, dictI)
{
const dictionary& regionDict = regionDicts[dictI];
const word regionName(regionDict.lookup("name"));
label regionI = findIndex(regionNames, regionName);
if (regionI == -1)
{
FatalErrorIn
(
"refinementSurfaces::refinementSurfaces"
"(const IOobject&, const PtrList<dictionary>&)"
) << "No region called " << regionName << " on surface "
<< allGeometry_[surfaces_[surfI]].name() << endl
<< "Valid regions are " << regionNames
<< exit(FatalError);
}
label min = readLabel(regionDict.lookup("minRefinementLevel"));
label max = readLabel(regionDict.lookup("maxRefinementLevel"));
bool hasInserted = regionMinLevel[surfI].insert(regionI, min);
if (!hasInserted)
{
FatalErrorIn
(
"refinementSurfaces::refinementSurfaces"
"(const IOobject&, const PtrList<dictionary>&)"
) << "Duplicate region name " << regionName
<< " on surface " << names_[surfI]
<< exit(FatalError);
}
regionMaxLevel[surfI].insert(regionI, max);
if (regionDict.found("perpendicularAngle"))
{
regionAngle[surfI].insert
(
regionI,
readScalar(regionDict.lookup("perpendicularAngle"))
);
}
}
}
}
// Check for duplicate surface names
{
HashTable<label> surfaceNames(names_.size());
forAll(names_, surfI)
{
if (!surfaceNames.insert(names_[surfI], surfI))
{
FatalErrorIn
(
"refinementSurfaces::refinementSurfaces"
"(const IOobject&, const PtrList<dictionary>&)"
) << "Duplicate surface name " << names_[surfI] << endl
<< "Previous occurrence of name at surface "
<< surfaceNames[names_[surfI]]
<< exit(FatalError);
}
}
}
// Calculate local to global region offset
label nRegions = 0;
forAll(surfaceDicts, surfI)
{
regionOffset_[surfI] = nRegions;
nRegions += allGeometry_[surfaces_[surfI]].regions().size();
}
// Rework surface specific information into information per global region
minLevel_.setSize(nRegions);
minLevel_ = 0;
maxLevel_.setSize(nRegions);
maxLevel_ = 0;
perpendicularAngle_.setSize(nRegions);
perpendicularAngle_ = -GREAT;
//patchName_.setSize(nRegions);
//patchType_.setSize(nRegions);
forAll(surfaceDicts, surfI)
{
label nRegions = allGeometry_[surfaces_[surfI]].regions().size();
// Initialise to global (i.e. per surface)
for (label i = 0; i < nRegions; i++)
{
minLevel_[regionOffset_[surfI] + i] = globalMinLevel[surfI];
maxLevel_[regionOffset_[surfI] + i] = globalMaxLevel[surfI];
perpendicularAngle_[regionOffset_[surfI] + i] = globalAngle[surfI];
}
// Overwrite with region specific information
forAllConstIter(Map<label>, regionMinLevel[surfI], iter)
{
label globalRegionI = regionOffset_[surfI] + iter.key();
minLevel_[globalRegionI] = iter();
maxLevel_[globalRegionI] = regionMaxLevel[surfI][iter.key()];
// Check validity
if
(
minLevel_[globalRegionI] < 0
|| maxLevel_[globalRegionI] < minLevel_[globalRegionI]
)
{
FatalErrorIn
(
"refinementSurfaces::refinementSurfaces"
"(const IOobject&, const PtrList<dictionary>&)"
) << "Illegal level or layer specification for surface "
<< names_[surfI]
<< " : minLevel:" << minLevel_[globalRegionI]
<< " maxLevel:" << maxLevel_[globalRegionI]
<< exit(FatalError);
}
}
forAllConstIter(Map<scalar>, regionAngle[surfI], iter)
{
label globalRegionI = regionOffset_[surfI] + iter.key();
perpendicularAngle_[globalRegionI] = regionAngle[surfI][iter.key()];
}
//// Optional patch names and patch types
//forAllConstIter(HashTable<word>, regionPatchName[surfI], iter)
//{
// label regionI = findIndex(regionNames, iter.key());
// label globalRegionI = regionOffset_[surfI] + regionI;
//
// patchName_[globalRegionI] = iter();
// patchType_[globalRegionI] = regionPatchType[surfI][iter.key()];
//}
}
}
Foam::refinementSurfaces::refinementSurfaces
(
const searchableSurfaces& allGeometry,
@ -353,9 +91,11 @@ Foam::refinementSurfaces::refinementSurfaces
labelList globalMinLevel(surfI, 0);
labelList globalMaxLevel(surfI, 0);
scalarField globalAngle(surfI, -GREAT);
PtrList<dictionary> globalPatchInfo(surfI);
List<Map<label> > regionMinLevel(surfI);
List<Map<label> > regionMaxLevel(surfI);
List<Map<scalar> > regionAngle(surfI);
List<Map<autoPtr<dictionary> > > regionPatchInfo(surfI);
surfI = 0;
forAll(allGeometry.names(), geomI)
@ -439,6 +179,14 @@ Foam::refinementSurfaces::refinementSurfaces
}
// Global perpendicular angle
if (dict.found("patchInfo"))
{
globalPatchInfo.set
(
surfI,
dict.subDict("patchInfo").clone()
);
}
dict.readIfPresent("perpendicularAngle", globalAngle[surfI]);
if (dict.found("regions"))
@ -473,6 +221,15 @@ Foam::refinementSurfaces::refinementSurfaces
)
);
}
if (regionDict.found("patchInfo"))
{
regionPatchInfo[surfI].insert
(
regionI,
regionDict.subDict("patchInfo").clone()
);
}
}
}
}
@ -496,6 +253,7 @@ Foam::refinementSurfaces::refinementSurfaces
maxLevel_ = 0;
perpendicularAngle_.setSize(nRegions);
perpendicularAngle_ = -GREAT;
patchInfo_.setSize(nRegions);
forAll(globalMinLevel, surfI)
@ -505,9 +263,18 @@ Foam::refinementSurfaces::refinementSurfaces
// Initialise to global (i.e. per surface)
for (label i = 0; i < nRegions; i++)
{
minLevel_[regionOffset_[surfI] + i] = globalMinLevel[surfI];
maxLevel_[regionOffset_[surfI] + i] = globalMaxLevel[surfI];
perpendicularAngle_[regionOffset_[surfI] + i] = globalAngle[surfI];
label globalRegionI = regionOffset_[surfI] + i;
minLevel_[globalRegionI] = globalMinLevel[surfI];
maxLevel_[globalRegionI] = globalMaxLevel[surfI];
perpendicularAngle_[globalRegionI] = globalAngle[surfI];
if (globalPatchInfo.set(surfI))
{
patchInfo_.set
(
globalRegionI,
globalPatchInfo[surfI].clone()
);
}
}
// Overwrite with region specific information
@ -542,6 +309,14 @@ Foam::refinementSurfaces::refinementSurfaces
perpendicularAngle_[globalRegionI] = regionAngle[surfI][iter.key()];
}
const Map<autoPtr<dictionary> >& localInfo = regionPatchInfo[surfI];
forAllConstIter(Map<autoPtr<dictionary> >, localInfo, iter)
{
label globalRegionI = regionOffset_[surfI] + iter.key();
patchInfo_.set(globalRegionI, iter()().clone());
}
}
}
@ -1166,7 +941,7 @@ void Foam::refinementSurfaces::findNearestRegion
(
const labelList& surfacesToTest,
const pointField& samples,
const scalarField& nearestDistSqr,
const scalarField& nearestDistSqr,
labelList& hitSurface,
labelList& hitRegion
) const

View File

@ -109,6 +109,9 @@ private:
//- From global region number to perpendicular angle
scalarField perpendicularAngle_;
//- From global region number to patchType
PtrList<dictionary> patchInfo_;
// Private Member Functions
@ -123,13 +126,6 @@ public:
// Constructors
//- Construct from surfaces and dictionaries
refinementSurfaces
(
const searchableSurfaces& allGeometry,
const PtrList<dictionary>&
);
//- Construct from surfaces and dictionary
refinementSurfaces
(
@ -215,6 +211,12 @@ public:
return perpendicularAngle_;
}
//- From global region number to patch type
const PtrList<dictionary>& patchInfo() const
{
return patchInfo_;
}
// Helper