mirror of
https://develop.openfoam.com/Development/openfoam.git
synced 2025-11-28 03:28:01 +00:00
Merge branch 'master' of ssh://dm/home/dm4/OpenFOAM/OpenFOAM-dev
This commit is contained in:
@ -30,6 +30,13 @@ License
|
||||
|
||||
void Foam::Time::readDict()
|
||||
{
|
||||
word application;
|
||||
if (controlDict_.readIfPresent("application", application))
|
||||
{
|
||||
// Do not override if already set so external application can override
|
||||
setEnv("FOAM_APPLICATION", application, false);
|
||||
}
|
||||
|
||||
if (!deltaTchanged_)
|
||||
{
|
||||
deltaT_ = readScalar(controlDict_.lookup("deltaT"));
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
========= |
|
||||
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
||||
\\ / O peration |
|
||||
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation
|
||||
\\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
@ -417,6 +417,120 @@ Foam::ITstream& Foam::dictionary::lookup
|
||||
}
|
||||
|
||||
|
||||
const Foam::entry* Foam::dictionary::lookupScopedEntryPtr
|
||||
(
|
||||
const word& keyword,
|
||||
bool recursive,
|
||||
bool patternMatch
|
||||
) const
|
||||
{
|
||||
string::size_type dotPos = keyword.find('.');
|
||||
|
||||
if (dotPos == string::npos)
|
||||
{
|
||||
return lookupEntryPtr(keyword, recursive, patternMatch);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (dotPos == 0)
|
||||
{
|
||||
const dictionary* dictPtr = this;
|
||||
while (&dictPtr->parent_ != &dictionary::null)
|
||||
{
|
||||
dictPtr = &dictPtr->parent_;
|
||||
}
|
||||
|
||||
// At top
|
||||
return dictPtr->lookupScopedEntryPtr
|
||||
(
|
||||
keyword.substr(1, keyword.size()-1),
|
||||
false,
|
||||
patternMatch
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
wordList entryNames(fileName(keyword).components('.'));
|
||||
|
||||
const entry* entPtr = lookupEntryPtr(entryNames[0], false, true);
|
||||
|
||||
for (int i=1; i<entryNames.size(); ++i)
|
||||
{
|
||||
if (!entPtr)
|
||||
{
|
||||
FatalIOErrorIn
|
||||
(
|
||||
"dictionary::lookupScopedEntryPtr"
|
||||
"(const word&, bool, bool)",
|
||||
*this
|
||||
) << "keyword " << keyword
|
||||
<< " is undefined in dictionary "
|
||||
<< name()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
if (!entPtr->isDict())
|
||||
{
|
||||
FatalIOErrorIn
|
||||
(
|
||||
"dictionary::lookupScopedEntryPtr"
|
||||
"(const word&, bool, bool)",
|
||||
*this
|
||||
) << "Entry " << entPtr->name()
|
||||
<< " is not a dictionary so cannot lookup sub entry "
|
||||
<< entryNames[i]
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
entPtr = entPtr->dict().lookupEntryPtr
|
||||
(
|
||||
entryNames[i],
|
||||
false,
|
||||
true
|
||||
);
|
||||
}
|
||||
|
||||
if (!entPtr)
|
||||
{
|
||||
FatalIOErrorIn
|
||||
(
|
||||
"dictionary::lookupScopedEntryPtr"
|
||||
"(const word&, bool, bool)",
|
||||
*this
|
||||
) << "keyword " << keyword
|
||||
<< " is not a valid scoped entry in dictionary "
|
||||
<< name()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
return entPtr;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
bool Foam::dictionary::substituteScopedKeyword(const word& keyword)
|
||||
{
|
||||
word varName = keyword(1, keyword.size()-1);
|
||||
|
||||
// lookup the variable name in the given dictionary
|
||||
const entry* ePtr = lookupScopedEntryPtr(varName, true, true);
|
||||
|
||||
// if defined insert its entries into this dictionary
|
||||
if (ePtr != NULL)
|
||||
{
|
||||
const dictionary& addDict = ePtr->dict();
|
||||
|
||||
forAllConstIter(IDLList<entry>, addDict, iter)
|
||||
{
|
||||
add(iter());
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
bool Foam::dictionary::isDict(const word& keyword) const
|
||||
{
|
||||
// Find non-recursive with patterns
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
========= |
|
||||
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
||||
\\ / O peration |
|
||||
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation
|
||||
\\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
@ -353,6 +353,15 @@ public:
|
||||
bool patternMatch=true
|
||||
) const;
|
||||
|
||||
//- Find and return an entry data stream pointer if present
|
||||
// otherwise return NULL. Allows scoping using '.'
|
||||
const entry* lookupScopedEntryPtr
|
||||
(
|
||||
const word&,
|
||||
bool recursive,
|
||||
bool patternMatch
|
||||
) const;
|
||||
|
||||
//- Check if entry is a sub-dictionary
|
||||
bool isDict(const word&) const;
|
||||
|
||||
@ -387,6 +396,10 @@ public:
|
||||
// corresponding sub-dictionary entries
|
||||
bool substituteKeyword(const word& keyword);
|
||||
|
||||
//- Substitute the given scoped keyword prepended by '$' with the
|
||||
// corresponding sub-dictionary entries
|
||||
bool substituteScopedKeyword(const word& keyword);
|
||||
|
||||
//- Add a new entry
|
||||
// With the merge option, dictionaries are interwoven and
|
||||
// primitive entries are overwritten
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
========= |
|
||||
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
||||
\\ / O peration |
|
||||
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation
|
||||
\\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
@ -112,7 +112,7 @@ bool Foam::entry::New(dictionary& parentDict, Istream& is)
|
||||
!disableFunctionEntries
|
||||
&& keyword[0] == '$') // ... Substitution entry
|
||||
{
|
||||
parentDict.substituteKeyword(keyword);
|
||||
parentDict.substituteScopedKeyword(keyword);
|
||||
return true;
|
||||
}
|
||||
else if
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
========= |
|
||||
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
||||
\\ / O peration |
|
||||
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation
|
||||
\\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
@ -52,7 +52,7 @@ bool Foam::primitiveEntry::expandVariable
|
||||
// internalField:
|
||||
// internalField XXX;
|
||||
// boundaryField { ".*" {YYY;} movingWall {value $internalField;}
|
||||
const entry* ePtr = dict.lookupEntryPtr(varName, true, false);
|
||||
const entry* ePtr = dict.lookupScopedEntryPtr(varName, true, false);
|
||||
|
||||
// ...if defined append its tokens into this
|
||||
if (ePtr)
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
========= |
|
||||
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
||||
\\ / O peration |
|
||||
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation
|
||||
\\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
@ -663,18 +663,11 @@ void Foam::autoRefineDriver::mergePatchFaces
|
||||
const dictionary& motionDict
|
||||
)
|
||||
{
|
||||
const fvMesh& mesh = meshRefiner_.mesh();
|
||||
|
||||
Info<< nl
|
||||
<< "Merge refined boundary faces" << nl
|
||||
<< "----------------------------" << nl
|
||||
<< endl;
|
||||
|
||||
if (debug)
|
||||
{
|
||||
const_cast<Time&>(mesh.time())++;
|
||||
}
|
||||
|
||||
meshRefiner_.mergePatchFacesUndo
|
||||
(
|
||||
Foam::cos(degToRad(45.0)),
|
||||
|
||||
@ -1465,6 +1465,7 @@ void Foam::autoSnapDriver::doSnap
|
||||
{
|
||||
disp = calcNearestSurfaceFeature
|
||||
(
|
||||
snapParams,
|
||||
iter,
|
||||
featureCos,
|
||||
scalar(iter+1)/nFeatIter,
|
||||
|
||||
@ -111,17 +111,43 @@ class autoSnapDriver
|
||||
|
||||
// Feature line snapping
|
||||
|
||||
//- Is point on two feature edges that make a largish angle?
|
||||
bool isFeaturePoint
|
||||
(
|
||||
const scalar featureCos,
|
||||
const indirectPrimitivePatch& pp,
|
||||
const PackedBoolList& isFeatureEdge,
|
||||
const label pointI
|
||||
) const;
|
||||
|
||||
void smoothAndConstrain
|
||||
(
|
||||
const indirectPrimitivePatch& pp,
|
||||
const List<pointConstraint>& constraints,
|
||||
vectorField& disp
|
||||
) const;
|
||||
//void calcNearest
|
||||
void smoothAndConstrain2
|
||||
(
|
||||
const bool applyConstraints,
|
||||
const indirectPrimitivePatch& pp,
|
||||
const List<pointConstraint>& constraints,
|
||||
vectorField& disp
|
||||
) const;
|
||||
void calcNearest
|
||||
(
|
||||
const label iter,
|
||||
const indirectPrimitivePatch& pp,
|
||||
vectorField& pointDisp,
|
||||
vectorField& pointSurfaceNormal,
|
||||
vectorField& pointRotation
|
||||
) const;
|
||||
//void calcNearestFace
|
||||
//(
|
||||
// const pointField& points,
|
||||
// vectorField& disp,
|
||||
// vectorField& surfaceNormal
|
||||
// const label iter,
|
||||
// const indirectPrimitivePatch& pp,
|
||||
// vectorField& faceDisp,
|
||||
// vectorField& faceSurfaceNormal,
|
||||
// vectorField& faceRotation
|
||||
//) const;
|
||||
void calcNearestFace
|
||||
(
|
||||
@ -129,19 +155,18 @@ class autoSnapDriver
|
||||
const indirectPrimitivePatch& pp,
|
||||
vectorField& faceDisp,
|
||||
vectorField& faceSurfaceNormal,
|
||||
labelList& faceSurfaceRegion,
|
||||
vectorField& faceRotation
|
||||
) const;
|
||||
void interpolateFaceToPoint
|
||||
(
|
||||
const label iter,
|
||||
const indirectPrimitivePatch& pp,
|
||||
const vectorField& faceSurfaceNormal,
|
||||
|
||||
const vectorField& faceDisp,
|
||||
const vectorField& faceRotation,
|
||||
|
||||
vectorField& patchDisp,
|
||||
vectorField& patchRotationDisp
|
||||
const List<List<point> >& pointFaceDisp,
|
||||
const List<List<point> >& pointFaceRotation,
|
||||
const List<List<point> >& pointFaceCentres,
|
||||
vectorField& patchDisp
|
||||
//vectorField& patchRotationDisp
|
||||
) const;
|
||||
void correctAttraction
|
||||
(
|
||||
@ -152,6 +177,14 @@ class autoSnapDriver
|
||||
const point& pt,
|
||||
vector& edgeOffset // offset from pt to point on edge
|
||||
) const;
|
||||
|
||||
//- Return hit if on multiple points
|
||||
pointIndexHit findMultiPatchPoint
|
||||
(
|
||||
const point& pt,
|
||||
const labelList& patchIDs,
|
||||
const List<point>& faceCentres
|
||||
) const;
|
||||
void binFeatureFace
|
||||
(
|
||||
const label iter,
|
||||
@ -186,6 +219,25 @@ class autoSnapDriver
|
||||
DynamicList<label>& surfaceCount
|
||||
) const;
|
||||
|
||||
|
||||
void featureAttractionUsingReconstruction
|
||||
(
|
||||
const label iter,
|
||||
const scalar featureCos,
|
||||
|
||||
const indirectPrimitivePatch& pp,
|
||||
const scalarField& snapDist,
|
||||
const label pointI,
|
||||
|
||||
const List<List<point> >& pointFaceSurfNormals,
|
||||
const List<List<point> >& pointFaceDisp,
|
||||
const List<List<point> >& pointFaceCentres,
|
||||
const labelListList& pointFacePatchID,
|
||||
|
||||
vector& patchAttraction,
|
||||
pointConstraint& patchConstraint
|
||||
) const;
|
||||
|
||||
void featureAttractionUsingReconstruction
|
||||
(
|
||||
const label iter,
|
||||
@ -196,31 +248,12 @@ class autoSnapDriver
|
||||
const List<List<point> >& pointFaceNormals,
|
||||
const List<List<point> >& pointFaceDisp,
|
||||
const List<List<point> >& pointFaceCentres,
|
||||
const labelListList& pointFacePatchID,
|
||||
|
||||
vectorField& patchAttraction,
|
||||
List<pointConstraint>& patchConstraints
|
||||
) const;
|
||||
|
||||
void determineAllFeatures
|
||||
(
|
||||
const label iter,
|
||||
const scalar featureCos,
|
||||
|
||||
const indirectPrimitivePatch&,
|
||||
const scalarField&,
|
||||
|
||||
const List<List<point> >& pointFaceNormals,
|
||||
const List<List<point> >& pointFaceDisp,
|
||||
const List<List<point> >& pointFaceCentres,
|
||||
|
||||
List<labelList>& pointAttractor,
|
||||
List<List<pointConstraint> >& pointConstraints,
|
||||
// Feature-edge to pp point
|
||||
List<List<DynamicList<point> > >& edgeAttractors,
|
||||
List<List<DynamicList<pointConstraint> > >& edgeConstraints,
|
||||
vectorField& patchAttraction,
|
||||
List<pointConstraint>& patchConstraints
|
||||
) const;
|
||||
void determineFeatures
|
||||
(
|
||||
const label iter,
|
||||
@ -232,6 +265,7 @@ class autoSnapDriver
|
||||
const List<List<point> >& pointFaceNormals,
|
||||
const List<List<point> >& pointFaceDisp,
|
||||
const List<List<point> >& pointFaceCentres,
|
||||
const labelListList& pointFacePatchID,
|
||||
|
||||
List<labelList>& pointAttractor,
|
||||
List<List<pointConstraint> >& pointConstraints,
|
||||
@ -242,6 +276,47 @@ class autoSnapDriver
|
||||
List<pointConstraint>& patchConstraints
|
||||
) const;
|
||||
|
||||
//- Find point on nearest feature edge (within searchDist).
|
||||
// Return point and feature
|
||||
// and store feature-edge to mesh-point and vice versa
|
||||
pointIndexHit findNearFeatureEdge
|
||||
(
|
||||
const indirectPrimitivePatch& pp,
|
||||
const scalarField& snapDist,
|
||||
const label pointI,
|
||||
const point& estimatedPt,
|
||||
|
||||
label& featI,
|
||||
List<List<DynamicList<point> > >&,
|
||||
List<List<DynamicList<pointConstraint> > >&,
|
||||
vectorField&,
|
||||
List<pointConstraint>&
|
||||
) const;
|
||||
|
||||
//- Find nearest feature point (within searchDist).
|
||||
// Return feature point
|
||||
// and store feature-point to mesh-point and vice versa.
|
||||
// If another mesh point already referring to this feature
|
||||
// point and further away, reset that one to a near feature
|
||||
// edge (using findNearFeatureEdge above)
|
||||
labelPair findNearFeaturePoint
|
||||
(
|
||||
const indirectPrimitivePatch& pp,
|
||||
const scalarField& snapDist,
|
||||
const label pointI,
|
||||
const point& estimatedPt,
|
||||
|
||||
// Feature-point to pp point
|
||||
List<labelList>& pointAttractor,
|
||||
List<List<pointConstraint> >& pointConstraints,
|
||||
// Feature-edge to pp point
|
||||
List<List<DynamicList<point> > >& edgeAttractors,
|
||||
List<List<DynamicList<pointConstraint> > >& edgeConstraints,
|
||||
// pp point to nearest feature
|
||||
vectorField& patchAttraction,
|
||||
List<pointConstraint>& patchConstraints
|
||||
) const;
|
||||
|
||||
void featureAttractionUsingFeatureEdges
|
||||
(
|
||||
const label iter,
|
||||
@ -270,6 +345,7 @@ class autoSnapDriver
|
||||
|
||||
vectorField calcNearestSurfaceFeature
|
||||
(
|
||||
const snapParameters& snapParams,
|
||||
const label iter,
|
||||
const scalar featureCos,
|
||||
const scalar featureAttract,
|
||||
@ -342,7 +418,6 @@ public:
|
||||
motionSmoother& meshMover
|
||||
) const;
|
||||
|
||||
|
||||
//- Smooth the displacement field to the internal.
|
||||
void smoothDisplacement
|
||||
(
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -2,7 +2,7 @@
|
||||
========= |
|
||||
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
||||
\\ / O peration |
|
||||
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation
|
||||
\\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
@ -34,7 +34,9 @@ Foam::snapParameters::snapParameters(const dictionary& dict)
|
||||
snapTol_(readScalar(dict.lookup("tolerance"))),
|
||||
nSmoothDispl_(readLabel(dict.lookup("nSolveIter"))),
|
||||
nSnap_(readLabel(dict.lookup("nRelaxIter"))),
|
||||
nFeatureSnap_(dict.lookupOrDefault("nFeatureSnapIter", -1))
|
||||
nFeatureSnap_(dict.lookupOrDefault("nFeatureSnapIter", -1)),
|
||||
explicitFeatureSnap_(dict.lookupOrDefault("explicitFeatureSnap", true)),
|
||||
implicitFeatureSnap_(dict.lookupOrDefault("implicitFeatureSnap", false))
|
||||
{}
|
||||
|
||||
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
========= |
|
||||
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
||||
\\ / O peration |
|
||||
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation
|
||||
\\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
@ -37,6 +37,7 @@ SourceFiles
|
||||
|
||||
#include "dictionary.H"
|
||||
#include "scalar.H"
|
||||
#include "Switch.H"
|
||||
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
|
||||
@ -63,6 +64,10 @@ class snapParameters
|
||||
|
||||
const label nFeatureSnap_;
|
||||
|
||||
const Switch explicitFeatureSnap_;
|
||||
|
||||
const Switch implicitFeatureSnap_;
|
||||
|
||||
|
||||
// Private Member Functions
|
||||
|
||||
@ -119,6 +124,16 @@ public:
|
||||
return nFeatureSnap_;
|
||||
}
|
||||
|
||||
Switch explicitFeatureSnap() const
|
||||
{
|
||||
return explicitFeatureSnap_;
|
||||
}
|
||||
|
||||
Switch implicitFeatureSnap() const
|
||||
{
|
||||
return implicitFeatureSnap_;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
|
||||
|
||||
@ -326,6 +326,16 @@ private:
|
||||
const labelList& globalToPatch
|
||||
) const;
|
||||
|
||||
//- Geometric test on see whether face needs to be baffled:
|
||||
// is face boundary face and perpendicular to surface normal?
|
||||
bool validBaffleTopology
|
||||
(
|
||||
const label faceI,
|
||||
const vector& n1,
|
||||
const vector& n2,
|
||||
const vector& testDir
|
||||
) const;
|
||||
|
||||
//- Determine patches for baffles
|
||||
void getBafflePatches
|
||||
(
|
||||
@ -414,7 +424,7 @@ private:
|
||||
|
||||
//- Extract those baffles (duplicate) faces that are on the edge
|
||||
// of a baffle region. These are candidates for merging.
|
||||
List<labelPair> filterDuplicateFaces(const List<labelPair>&) const;
|
||||
List<labelPair> freeStandingBaffles(const List<labelPair>&) const;
|
||||
|
||||
|
||||
// Zone handling
|
||||
|
||||
@ -43,6 +43,7 @@ License
|
||||
#include "OFstream.H"
|
||||
#include "regionSplit.H"
|
||||
#include "removeCells.H"
|
||||
#include "unitConversion.H"
|
||||
|
||||
// * * * * * * * * * * * * * Private Member Functions * * * * * * * * * * * //
|
||||
|
||||
@ -216,6 +217,45 @@ Foam::label Foam::meshRefinement::getBafflePatch
|
||||
}
|
||||
|
||||
|
||||
// Check if we are a boundary face and normal of surface does
|
||||
// not align with test vector. In this case there'd probably be
|
||||
// a freestanding 'baffle' so we might as well not create it.
|
||||
// Note that since it is not a proper baffle we cannot detect it
|
||||
// afterwards so this code cannot be merged with the
|
||||
// filterDuplicateFaces code.
|
||||
bool Foam::meshRefinement::validBaffleTopology
|
||||
(
|
||||
const label faceI,
|
||||
const vector& n1,
|
||||
const vector& n2,
|
||||
const vector& testDir
|
||||
) const
|
||||
{
|
||||
|
||||
label patchI = mesh_.boundaryMesh().whichPatch(faceI);
|
||||
if (patchI == -1 || mesh_.boundaryMesh()[patchI].coupled())
|
||||
{
|
||||
return true;
|
||||
}
|
||||
else if (mag(n1&n2) > cos(degToRad(30)))
|
||||
{
|
||||
// Both normals aligned. Check that test vector perpendicularish to
|
||||
// surface normal
|
||||
scalar magTestDir = mag(testDir);
|
||||
if (magTestDir > VSMALL)
|
||||
{
|
||||
if (mag(n1&(testDir/magTestDir)) < cos(degToRad(45)))
|
||||
{
|
||||
//Pout<< "** disabling baffling face "
|
||||
// << mesh_.faceCentres()[faceI] << endl;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
// Determine patches for baffles on all intersected unnamed faces
|
||||
void Foam::meshRefinement::getBafflePatches
|
||||
(
|
||||
@ -745,7 +785,7 @@ Foam::autoPtr<Foam::mapPolyMesh> Foam::meshRefinement::createZoneBaffles
|
||||
// Done by counting the number of baffles faces per mesh edge. If edge
|
||||
// has 2 boundary faces and both are baffle faces it is the edge of a baffle
|
||||
// region.
|
||||
Foam::List<Foam::labelPair> Foam::meshRefinement::filterDuplicateFaces
|
||||
Foam::List<Foam::labelPair> Foam::meshRefinement::freeStandingBaffles
|
||||
(
|
||||
const List<labelPair>& couples
|
||||
) const
|
||||
@ -852,12 +892,112 @@ Foam::List<Foam::labelPair> Foam::meshRefinement::filterDuplicateFaces
|
||||
}
|
||||
filteredCouples.setSize(filterI);
|
||||
|
||||
//Info<< "filterDuplicateFaces : from "
|
||||
//Info<< "freeStandingBaffles : from "
|
||||
// << returnReduce(couples.size(), sumOp<label>())
|
||||
// << " down to "
|
||||
// << returnReduce(filteredCouples.size(), sumOp<label>())
|
||||
// << " baffles." << nl << endl;
|
||||
|
||||
|
||||
|
||||
//XXXXXX
|
||||
// {
|
||||
// // Collect segments
|
||||
// // ~~~~~~~~~~~~~~~~
|
||||
//
|
||||
// pointField start(filteredCouples.size());
|
||||
// pointField end(filteredCouples.size());
|
||||
//
|
||||
// const pointField& cellCentres = mesh_.cellCentres();
|
||||
//
|
||||
// forAll(filteredCouples, i)
|
||||
// {
|
||||
// const labelPair& couple = couples[i];
|
||||
// start[i] = cellCentres[mesh_.faceOwner()[couple.first()]];
|
||||
// end[i] = cellCentres[mesh_.faceOwner()[couple.second()]];
|
||||
// }
|
||||
//
|
||||
// // Extend segments a bit
|
||||
// {
|
||||
// const vectorField smallVec(Foam::sqrt(SMALL)*(end-start));
|
||||
// start -= smallVec;
|
||||
// end += smallVec;
|
||||
// }
|
||||
//
|
||||
//
|
||||
// // Do test for intersections
|
||||
// // ~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
// labelList surface1;
|
||||
// List<pointIndexHit> hit1;
|
||||
// labelList region1;
|
||||
// vectorField normal1;
|
||||
//
|
||||
// labelList surface2;
|
||||
// List<pointIndexHit> hit2;
|
||||
// labelList region2;
|
||||
// vectorField normal2;
|
||||
//
|
||||
// surfaces_.findNearestIntersection
|
||||
// (
|
||||
// surfacesToBaffle,
|
||||
// start,
|
||||
// end,
|
||||
//
|
||||
// surface1,
|
||||
// hit1,
|
||||
// region1,
|
||||
// normal1,
|
||||
//
|
||||
// surface2,
|
||||
// hit2,
|
||||
// region2,
|
||||
// normal2
|
||||
// );
|
||||
//
|
||||
// forAll(testFaces, i)
|
||||
// {
|
||||
// if (hit1[i].hit() && hit2[i].hit())
|
||||
// {
|
||||
// bool createBaffle = true;
|
||||
//
|
||||
// label faceI = couples[i].first();
|
||||
// label patchI = mesh_.boundaryMesh().whichPatch(faceI);
|
||||
// if (patchI != -1 && !mesh_.boundaryMesh()[patchI].coupled())
|
||||
// {
|
||||
// // Check if we are a boundary face and normal of surface
|
||||
// // does
|
||||
// // not align with test vector. In this case there'd
|
||||
// // probably be
|
||||
// // a freestanding 'baffle' so we might as well not
|
||||
// // create it.
|
||||
// // Note that since it is not a proper baffle we cannot
|
||||
// // detect it
|
||||
// // afterwards so this code cannot be merged with the
|
||||
// // filterDuplicateFaces code.
|
||||
// if (mag(normal1[i]&normal2[i]) > cos(degToRad(30)))
|
||||
// {
|
||||
// // Both normals aligned
|
||||
// vector n = end[i]-start[i];
|
||||
// scalar magN = mag(n);
|
||||
// if (magN > VSMALL)
|
||||
// {
|
||||
// n /= magN;
|
||||
//
|
||||
// if (mag(normal1[i]&n) < cos(degToRad(45)))
|
||||
// {
|
||||
// Pout<< "** disabling baffling face "
|
||||
// << mesh_.faceCentres()[faceI] << endl;
|
||||
// createBaffle = false;
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
//
|
||||
//
|
||||
// }
|
||||
//XXXXXX
|
||||
|
||||
|
||||
return filteredCouples;
|
||||
}
|
||||
|
||||
@ -1717,11 +1857,6 @@ void Foam::meshRefinement::baffleAndSplitMesh
|
||||
neiPatch
|
||||
);
|
||||
|
||||
if (debug)
|
||||
{
|
||||
runTime++;
|
||||
}
|
||||
|
||||
createBaffles(ownPatch, neiPatch);
|
||||
|
||||
if (debug)
|
||||
@ -1874,15 +2009,11 @@ void Foam::meshRefinement::baffleAndSplitMesh
|
||||
<< "---------------------------" << nl
|
||||
<< endl;
|
||||
|
||||
if (debug)
|
||||
{
|
||||
runTime++;
|
||||
}
|
||||
|
||||
// List of pairs of freestanding baffle faces.
|
||||
List<labelPair> couples
|
||||
(
|
||||
filterDuplicateFaces // filter out freestanding baffles
|
||||
freeStandingBaffles // filter out freestanding baffles
|
||||
(
|
||||
getDuplicateFaces // get all baffles
|
||||
(
|
||||
@ -2484,11 +2615,13 @@ Foam::autoPtr<Foam::mapPolyMesh> Foam::meshRefinement::zonify
|
||||
// the information already in surfaceIndex_.
|
||||
|
||||
labelList surface1;
|
||||
List<pointIndexHit> hit1;
|
||||
vectorField normal1;
|
||||
labelList surface2;
|
||||
List<pointIndexHit> hit2;
|
||||
vectorField normal2;
|
||||
{
|
||||
List<pointIndexHit> hit1;
|
||||
labelList region1;
|
||||
List<pointIndexHit> hit2;
|
||||
labelList region2;
|
||||
surfaces_.findNearestIntersection
|
||||
(
|
||||
@ -2511,9 +2644,36 @@ Foam::autoPtr<Foam::mapPolyMesh> Foam::meshRefinement::zonify
|
||||
|
||||
if (surface1[i] != -1)
|
||||
{
|
||||
// If both hit should probably choose nearest. For later.
|
||||
namedSurfaceIndex[faceI] = surface1[i];
|
||||
nSurfFaces[surface1[i]]++;
|
||||
//- Not allowed not to create baffle - is vital for regioning.
|
||||
// Have logic instead at erosion!
|
||||
//bool createBaffle = validBaffleTopology
|
||||
//(
|
||||
// faceI,
|
||||
// normal1[i],
|
||||
// normal2[i],
|
||||
// end[i]-start[i]
|
||||
//);
|
||||
//
|
||||
|
||||
|
||||
// If both hit should probably choose 'nearest'
|
||||
if
|
||||
(
|
||||
surface2[i] != -1
|
||||
&& (
|
||||
magSqr(hit2[i].hitPoint())
|
||||
< magSqr(hit1[i].hitPoint())
|
||||
)
|
||||
)
|
||||
{
|
||||
namedSurfaceIndex[faceI] = surface2[i];
|
||||
nSurfFaces[surface2[i]]++;
|
||||
}
|
||||
else
|
||||
{
|
||||
namedSurfaceIndex[faceI] = surface1[i];
|
||||
nSurfFaces[surface1[i]]++;
|
||||
}
|
||||
}
|
||||
else if (surface2[i] != -1)
|
||||
{
|
||||
|
||||
@ -26,21 +26,14 @@ License
|
||||
#include "refinementFeatures.H"
|
||||
#include "Time.H"
|
||||
|
||||
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
|
||||
// * * * * * * * * * * * * * Private Member Functions * * * * * * * * * * * //
|
||||
|
||||
Foam::refinementFeatures::refinementFeatures
|
||||
void Foam::refinementFeatures::read
|
||||
(
|
||||
const objectRegistry& io,
|
||||
const PtrList<dictionary>& featDicts
|
||||
)
|
||||
:
|
||||
PtrList<featureEdgeMesh>(featDicts.size()),
|
||||
levels_(featDicts.size()),
|
||||
edgeTrees_(featDicts.size()),
|
||||
pointTrees_(featDicts.size())
|
||||
{
|
||||
// Read features
|
||||
|
||||
forAll(featDicts, i)
|
||||
{
|
||||
const dictionary& dict = featDicts[i];
|
||||
@ -75,50 +68,87 @@ Foam::refinementFeatures::refinementFeatures
|
||||
<< " (" << eMesh.points().size() << " points, "
|
||||
<< eMesh.edges().size() << " edges)." << endl;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void Foam::refinementFeatures::buildTrees
|
||||
(
|
||||
const label featI,
|
||||
const labelList& featurePoints
|
||||
)
|
||||
{
|
||||
const featureEdgeMesh& eMesh = operator[](featI);
|
||||
const pointField& points = eMesh.points();
|
||||
const edgeList& edges = eMesh.edges();
|
||||
|
||||
// Calculate bb of all points
|
||||
treeBoundBox bb(points);
|
||||
|
||||
// Random number generator. Bit dodgy since not exactly random ;-)
|
||||
Random rndGen(65431);
|
||||
|
||||
// Slightly extended bb. Slightly off-centred just so on symmetric
|
||||
// geometry there are less face/edge aligned items.
|
||||
bb = bb.extend(rndGen, 1e-4);
|
||||
bb.min() -= point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL);
|
||||
bb.max() += point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL);
|
||||
|
||||
edgeTrees_.set
|
||||
(
|
||||
featI,
|
||||
new indexedOctree<treeDataEdge>
|
||||
(
|
||||
treeDataEdge
|
||||
(
|
||||
false, // do not cache bb
|
||||
edges,
|
||||
points,
|
||||
identity(edges.size())
|
||||
),
|
||||
bb, // overall search domain
|
||||
8, // maxLevel
|
||||
10, // leafsize
|
||||
3.0 // duplicity
|
||||
)
|
||||
);
|
||||
|
||||
pointTrees_.set
|
||||
(
|
||||
featI,
|
||||
new indexedOctree<treeDataPoint>
|
||||
(
|
||||
treeDataPoint(points, featurePoints),
|
||||
bb, // overall search domain
|
||||
8, // maxLevel
|
||||
10, // leafsize
|
||||
3.0 // duplicity
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
|
||||
|
||||
Foam::refinementFeatures::refinementFeatures
|
||||
(
|
||||
const objectRegistry& io,
|
||||
const PtrList<dictionary>& featDicts
|
||||
)
|
||||
:
|
||||
PtrList<featureEdgeMesh>(featDicts.size()),
|
||||
levels_(featDicts.size()),
|
||||
edgeTrees_(featDicts.size()),
|
||||
pointTrees_(featDicts.size())
|
||||
{
|
||||
// Read features
|
||||
read(io, featDicts);
|
||||
|
||||
// Search engines
|
||||
|
||||
forAll(*this, i)
|
||||
{
|
||||
const featureEdgeMesh& eMesh = operator[](i);
|
||||
const pointField& points = eMesh.points();
|
||||
const edgeList& edges = eMesh.edges();
|
||||
|
||||
// Calculate bb of all points
|
||||
treeBoundBox bb(points);
|
||||
|
||||
// Random number generator. Bit dodgy since not exactly random ;-)
|
||||
Random rndGen(65431);
|
||||
|
||||
// Slightly extended bb. Slightly off-centred just so on symmetric
|
||||
// geometry there are less face/edge aligned items.
|
||||
bb = bb.extend(rndGen, 1e-4);
|
||||
bb.min() -= point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL);
|
||||
bb.max() += point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL);
|
||||
|
||||
edgeTrees_.set
|
||||
(
|
||||
i,
|
||||
new indexedOctree<treeDataEdge>
|
||||
(
|
||||
treeDataEdge
|
||||
(
|
||||
false, // do not cache bb
|
||||
edges,
|
||||
points,
|
||||
identity(edges.size())
|
||||
),
|
||||
bb, // overall search domain
|
||||
8, // maxLevel
|
||||
10, // leafsize
|
||||
3.0 // duplicity
|
||||
)
|
||||
);
|
||||
|
||||
|
||||
// Detect feature points from edges.
|
||||
const labelListList& pointEdges = eMesh.pointEdges();
|
||||
|
||||
DynamicList<label> featurePoints;
|
||||
forAll(pointEdges, pointI)
|
||||
{
|
||||
@ -129,21 +159,79 @@ Foam::refinementFeatures::refinementFeatures
|
||||
}
|
||||
|
||||
Info<< "Detected " << featurePoints.size()
|
||||
<< " featurePoints out of " << points.size()
|
||||
<< " featurePoints out of " << pointEdges.size()
|
||||
<< " on feature " << eMesh.name() << endl;
|
||||
|
||||
pointTrees_.set
|
||||
(
|
||||
i,
|
||||
new indexedOctree<treeDataPoint>
|
||||
(
|
||||
treeDataPoint(points, featurePoints),
|
||||
bb, // overall search domain
|
||||
8, // maxLevel
|
||||
10, // leafsize
|
||||
3.0 // duplicity
|
||||
)
|
||||
);
|
||||
buildTrees(i, featurePoints);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Foam::refinementFeatures::refinementFeatures
|
||||
(
|
||||
const objectRegistry& io,
|
||||
const PtrList<dictionary>& featDicts,
|
||||
const scalar minCos
|
||||
)
|
||||
:
|
||||
PtrList<featureEdgeMesh>(featDicts.size()),
|
||||
levels_(featDicts.size()),
|
||||
edgeTrees_(featDicts.size()),
|
||||
pointTrees_(featDicts.size())
|
||||
{
|
||||
// Read features
|
||||
read(io, featDicts);
|
||||
|
||||
// Search engines
|
||||
forAll(*this, i)
|
||||
{
|
||||
const featureEdgeMesh& eMesh = operator[](i);
|
||||
const pointField& points = eMesh.points();
|
||||
const edgeList& edges = eMesh.edges();
|
||||
const labelListList& pointEdges = eMesh.pointEdges();
|
||||
|
||||
DynamicList<label> featurePoints;
|
||||
forAll(pointEdges, pointI)
|
||||
{
|
||||
const labelList& pEdges = pointEdges[pointI];
|
||||
if (pEdges.size() > 2)
|
||||
{
|
||||
featurePoints.append(pointI);
|
||||
}
|
||||
else if (pEdges.size() == 2)
|
||||
{
|
||||
// Check the angle
|
||||
const edge& e0 = edges[pEdges[0]];
|
||||
const edge& e1 = edges[pEdges[1]];
|
||||
|
||||
const point& p = points[pointI];
|
||||
const point& p0 = points[e0.otherVertex(pointI)];
|
||||
const point& p1 = points[e1.otherVertex(pointI)];
|
||||
|
||||
vector v0 = p-p0;
|
||||
scalar v0Mag = mag(v0);
|
||||
|
||||
vector v1 = p1-p;
|
||||
scalar v1Mag = mag(v1);
|
||||
|
||||
if
|
||||
(
|
||||
v0Mag > SMALL
|
||||
&& v1Mag > SMALL
|
||||
&& ((v0/v0Mag & v1/v1Mag) < minCos)
|
||||
)
|
||||
{
|
||||
featurePoints.append(pointI);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Info<< "Detected " << featurePoints.size()
|
||||
<< " featurePoints out of " << points.size()
|
||||
<< " on feature " << eMesh.name()
|
||||
<< " when using feature cos " << minCos << endl;
|
||||
|
||||
buildTrees(i, featurePoints);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
========= |
|
||||
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
||||
\\ / O peration |
|
||||
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation
|
||||
\\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
@ -69,17 +69,33 @@ private:
|
||||
|
||||
// Private Member Functions
|
||||
|
||||
//- Read set of feature edge meshes
|
||||
void read(const objectRegistry&, const PtrList<dictionary>&);
|
||||
|
||||
//- Build edge tree and feature point tree
|
||||
void buildTrees(const label, const labelList&);
|
||||
|
||||
|
||||
public:
|
||||
|
||||
// Constructors
|
||||
|
||||
//- Construct from components
|
||||
//- Construct from description
|
||||
refinementFeatures
|
||||
(
|
||||
const objectRegistry& io,
|
||||
const PtrList<dictionary>& featDicts
|
||||
);
|
||||
|
||||
//- Construct from description and do geometric analysis to determine
|
||||
// feature points
|
||||
refinementFeatures
|
||||
(
|
||||
const objectRegistry& io,
|
||||
const PtrList<dictionary>& featDicts,
|
||||
const scalar minCos
|
||||
);
|
||||
|
||||
|
||||
// Member Functions
|
||||
|
||||
|
||||
@ -917,6 +917,127 @@ void Foam::refinementSurfaces::findNearestIntersection
|
||||
}
|
||||
|
||||
|
||||
void Foam::refinementSurfaces::findNearestIntersection
|
||||
(
|
||||
const labelList& surfacesToTest,
|
||||
const pointField& start,
|
||||
const pointField& end,
|
||||
|
||||
labelList& surface1,
|
||||
List<pointIndexHit>& hit1,
|
||||
labelList& region1,
|
||||
vectorField& normal1,
|
||||
|
||||
labelList& surface2,
|
||||
List<pointIndexHit>& hit2,
|
||||
labelList& region2,
|
||||
vectorField& normal2
|
||||
) const
|
||||
{
|
||||
// 1. intersection from start to end
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
// Initialize arguments
|
||||
surface1.setSize(start.size());
|
||||
surface1 = -1;
|
||||
hit1.setSize(start.size());
|
||||
region1.setSize(start.size());
|
||||
normal1.setSize(start.size());
|
||||
|
||||
// Current end of segment to test.
|
||||
pointField nearest(end);
|
||||
// Work array
|
||||
List<pointIndexHit> nearestInfo(start.size());
|
||||
labelList region;
|
||||
vectorField normal;
|
||||
|
||||
forAll(surfacesToTest, testI)
|
||||
{
|
||||
label surfI = surfacesToTest[testI];
|
||||
const searchableSurface& geom = allGeometry_[surfaces_[surfI]];
|
||||
|
||||
// See if any intersection between start and current nearest
|
||||
geom.findLine(start, nearest, nearestInfo);
|
||||
geom.getRegion(nearestInfo, region);
|
||||
geom.getNormal(nearestInfo, normal);
|
||||
|
||||
forAll(nearestInfo, pointI)
|
||||
{
|
||||
if (nearestInfo[pointI].hit())
|
||||
{
|
||||
hit1[pointI] = nearestInfo[pointI];
|
||||
surface1[pointI] = surfI;
|
||||
region1[pointI] = region[pointI];
|
||||
normal1[pointI] = normal[pointI];
|
||||
nearest[pointI] = hit1[pointI].hitPoint();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// 2. intersection from end to last intersection
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
// Find the nearest intersection from end to start. Note that we initialize
|
||||
// to the first intersection (if any).
|
||||
surface2 = surface1;
|
||||
hit2 = hit1;
|
||||
region2 = region1;
|
||||
normal2 = normal1;
|
||||
|
||||
// Set current end of segment to test.
|
||||
forAll(nearest, pointI)
|
||||
{
|
||||
if (hit1[pointI].hit())
|
||||
{
|
||||
nearest[pointI] = hit1[pointI].hitPoint();
|
||||
}
|
||||
else
|
||||
{
|
||||
// Disable testing by setting to end.
|
||||
nearest[pointI] = end[pointI];
|
||||
}
|
||||
}
|
||||
|
||||
forAll(surfacesToTest, testI)
|
||||
{
|
||||
label surfI = surfacesToTest[testI];
|
||||
const searchableSurface& geom = allGeometry_[surfaces_[surfI]];
|
||||
|
||||
// See if any intersection between end and current nearest
|
||||
geom.findLine(end, nearest, nearestInfo);
|
||||
geom.getRegion(nearestInfo, region);
|
||||
geom.getNormal(nearestInfo, normal);
|
||||
|
||||
forAll(nearestInfo, pointI)
|
||||
{
|
||||
if (nearestInfo[pointI].hit())
|
||||
{
|
||||
hit2[pointI] = nearestInfo[pointI];
|
||||
surface2[pointI] = surfI;
|
||||
region2[pointI] = region[pointI];
|
||||
normal2[pointI] = normal[pointI];
|
||||
nearest[pointI] = hit2[pointI].hitPoint();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Make sure that if hit1 has hit something, hit2 will have at least the
|
||||
// same point (due to tolerances it might miss its end point)
|
||||
forAll(hit1, pointI)
|
||||
{
|
||||
if (hit1[pointI].hit() && !hit2[pointI].hit())
|
||||
{
|
||||
hit2[pointI] = hit1[pointI];
|
||||
surface2[pointI] = surface1[pointI];
|
||||
region2[pointI] = region1[pointI];
|
||||
normal2[pointI] = normal1[pointI];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void Foam::refinementSurfaces::findAnyIntersection
|
||||
(
|
||||
const pointField& start,
|
||||
|
||||
@ -317,6 +317,24 @@ public:
|
||||
labelList& region2
|
||||
) const;
|
||||
|
||||
//- findNearestIntersection but also get normals
|
||||
void findNearestIntersection
|
||||
(
|
||||
const labelList& surfacesToTest,
|
||||
const pointField& start,
|
||||
const pointField& end,
|
||||
|
||||
labelList& surface1,
|
||||
List<pointIndexHit>& hit1,
|
||||
labelList& region1,
|
||||
vectorField& normal1,
|
||||
|
||||
labelList& surface2,
|
||||
List<pointIndexHit>& hit2,
|
||||
labelList& region2,
|
||||
vectorField& normal2
|
||||
) const;
|
||||
|
||||
//- Used for debugging only: find intersection of edge.
|
||||
void findAnyIntersection
|
||||
(
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
========= |
|
||||
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
||||
\\ / O peration |
|
||||
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation
|
||||
\\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
@ -357,51 +357,6 @@ void Foam::shellSurfaces::findHigherLevel
|
||||
|
||||
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
|
||||
|
||||
Foam::shellSurfaces::shellSurfaces
|
||||
(
|
||||
const searchableSurfaces& allGeometry,
|
||||
const PtrList<dictionary>& shellDicts
|
||||
)
|
||||
:
|
||||
allGeometry_(allGeometry)
|
||||
{
|
||||
shells_.setSize(shellDicts.size());
|
||||
modes_.setSize(shellDicts.size());
|
||||
distances_.setSize(shellDicts.size());
|
||||
levels_.setSize(shellDicts.size());
|
||||
|
||||
forAll(shellDicts, shellI)
|
||||
{
|
||||
const dictionary& dict = shellDicts[shellI];
|
||||
const word name = dict.lookup("name");
|
||||
const word type = dict.lookup("type");
|
||||
|
||||
shells_[shellI] = allGeometry_.findSurfaceID(name);
|
||||
|
||||
if (shells_[shellI] == -1)
|
||||
{
|
||||
FatalErrorIn
|
||||
(
|
||||
"shellSurfaces::shellSurfaces"
|
||||
"(const searchableSurfaces&, const PtrList<dictionary>&)"
|
||||
) << "No surface called " << name << endl
|
||||
<< "Valid surfaces are " << allGeometry_.names()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
modes_[shellI] = refineModeNames_.read(dict.lookup("refineMode"));
|
||||
|
||||
// Read pairs of distance+level
|
||||
setAndCheckLevels(shellI, dict.lookup("levels"));
|
||||
}
|
||||
|
||||
// Orient shell surfaces before any searching is done. Note that this
|
||||
// only needs to be done for inside or outside. Orienting surfaces
|
||||
// constructs lots of addressing which we want to avoid.
|
||||
orient();
|
||||
}
|
||||
|
||||
|
||||
Foam::shellSurfaces::shellSurfaces
|
||||
(
|
||||
const searchableSurfaces& allGeometry,
|
||||
@ -410,34 +365,43 @@ Foam::shellSurfaces::shellSurfaces
|
||||
:
|
||||
allGeometry_(allGeometry)
|
||||
{
|
||||
shells_.setSize(shellsDict.size());
|
||||
modes_.setSize(shellsDict.size());
|
||||
distances_.setSize(shellsDict.size());
|
||||
levels_.setSize(shellsDict.size());
|
||||
// Wilcard specification : loop over all surfaces and try to find a match.
|
||||
|
||||
// Count number of shells.
|
||||
label shellI = 0;
|
||||
forAllConstIter(dictionary, shellsDict, iter)
|
||||
forAll(allGeometry.names(), geomI)
|
||||
{
|
||||
shells_[shellI] = allGeometry_.findSurfaceID(iter().keyword());
|
||||
const word& geomName = allGeometry_.names()[geomI];
|
||||
|
||||
if (shells_[shellI] == -1)
|
||||
if (shellsDict.found(geomName))
|
||||
{
|
||||
FatalErrorIn
|
||||
(
|
||||
"shellSurfaces::shellSurfaces"
|
||||
"(const searchableSurfaces&, const dictionary>&"
|
||||
) << "No surface called " << iter().keyword() << endl
|
||||
<< "Valid surfaces are " << allGeometry_.names()
|
||||
<< exit(FatalError);
|
||||
shellI++;
|
||||
}
|
||||
const dictionary& dict = shellsDict.subDict(iter().keyword());
|
||||
}
|
||||
|
||||
modes_[shellI] = refineModeNames_.read(dict.lookup("mode"));
|
||||
|
||||
// Read pairs of distance+level
|
||||
setAndCheckLevels(shellI, dict.lookup("levels"));
|
||||
// Size lists
|
||||
shells_.setSize(shellI);
|
||||
modes_.setSize(shellI);
|
||||
distances_.setSize(shellI);
|
||||
levels_.setSize(shellI);
|
||||
|
||||
shellI++;
|
||||
shellI = 0;
|
||||
forAll(allGeometry.names(), geomI)
|
||||
{
|
||||
const word& geomName = allGeometry_.names()[geomI];
|
||||
|
||||
if (shellsDict.found(geomName))
|
||||
{
|
||||
shells_[shellI] = geomI;
|
||||
const dictionary& dict = shellsDict.subDict(geomName);
|
||||
modes_[shellI] = refineModeNames_.read(dict.lookup("mode"));
|
||||
|
||||
// Read pairs of distance+level
|
||||
setAndCheckLevels(shellI, dict.lookup("levels"));
|
||||
|
||||
shellI++;
|
||||
}
|
||||
}
|
||||
|
||||
// Orient shell surfaces before any searching is done. Note that this
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
========= |
|
||||
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
||||
\\ / O peration |
|
||||
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation
|
||||
\\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
@ -113,23 +113,6 @@ public:
|
||||
|
||||
// Constructors
|
||||
|
||||
//- Construct from components
|
||||
shellSurfaces
|
||||
(
|
||||
const searchableSurfaces& allGeometry,
|
||||
const labelList& shells,
|
||||
const List<refineMode>& modes,
|
||||
const List<scalarField>& distances,
|
||||
const labelListList& levels
|
||||
);
|
||||
|
||||
//- Construct from geometry and dictionaries
|
||||
shellSurfaces
|
||||
(
|
||||
const searchableSurfaces& allGeometry,
|
||||
const PtrList<dictionary>& shellDicts
|
||||
);
|
||||
|
||||
//- Construct from geometry and dictionary
|
||||
shellSurfaces
|
||||
(
|
||||
|
||||
Reference in New Issue
Block a user