Merge branch 'master' into cvm

This commit is contained in:
graham
2008-10-15 13:02:36 +01:00
16 changed files with 389 additions and 141 deletions

View File

@ -625,10 +625,8 @@ int main(int argc, char *argv[])
<< " as patch " << destPatchI << " as patch " << destPatchI
<< " from " << patchDict << endl; << " from " << patchDict << endl;
patchDict.remove("nFaces"); patchDict.set("nFaces", 0);
patchDict.add("nFaces", 0); patchDict.set("startFace", startFaceI);
patchDict.remove("startFace");
patchDict.add("startFace", startFaceI);
// Add an empty patch. // Add an empty patch.
allPatches.append allPatches.append

View File

@ -1,5 +1,6 @@
EXE_INC = \ EXE_INC = \
-I$(LIB_SRC)/dynamicMesh/lnInclude \ -I$(LIB_SRC)/dynamicMesh/lnInclude \
-I$(LIB_SRC)/finiteVolume/lnInclude \
-I$(LIB_SRC)/meshTools/lnInclude -I$(LIB_SRC)/meshTools/lnInclude
EXE_LIBS = \ EXE_LIBS = \

View File

@ -46,6 +46,8 @@ Description
#include "mapAddedPolyMesh.H" #include "mapAddedPolyMesh.H"
#include "polyMeshAdder.H" #include "polyMeshAdder.H"
#include "faceCoupleInfo.H" #include "faceCoupleInfo.H"
#include "fvMeshAdder.H"
#include "polyTopoChange.H"
using namespace Foam; using namespace Foam;
@ -203,6 +205,78 @@ autoPtr<faceCoupleInfo> determineCoupledFaces
} }
autoPtr<mapPolyMesh> mergeSharedPoints
(
const scalar mergeDist,
polyMesh& mesh,
labelListList& pointProcAddressing
)
{
// Find out which sets of points get merged and create a map from
// mesh point to unique point.
Map<label> pointToMaster
(
fvMeshAdder::findSharedPoints
(
mesh,
mergeDist
)
);
Info<< "mergeSharedPoints : detected " << pointToMaster.size()
<< " points that are to be merged." << endl;
if (returnReduce(pointToMaster.size(), sumOp<label>()) == 0)
{
return autoPtr<mapPolyMesh>(NULL);
}
polyTopoChange meshMod(mesh);
fvMeshAdder::mergePoints(mesh, pointToMaster, meshMod);
// Change the mesh (no inflation). Note: parallel comms allowed.
autoPtr<mapPolyMesh> map = meshMod.changeMesh(mesh, false, true);
// Update fields. No inflation, parallel sync.
mesh.updateMesh(map);
// pointProcAddressing give indices into the master mesh so adapt them
// for changed point numbering.
// Adapt constructMaps for merged points.
forAll(pointProcAddressing, procI)
{
labelList& constructMap = pointProcAddressing[procI];
forAll(constructMap, i)
{
label oldPointI = constructMap[i];
// New label of point after changeMesh.
label newPointI = map().reversePointMap()[oldPointI];
if (newPointI < -1)
{
constructMap[i] = -newPointI-2;
}
else if (newPointI >= 0)
{
constructMap[i] = newPointI;
}
else
{
FatalErrorIn("fvMeshDistribute::mergeSharedPoints()")
<< "Problem. oldPointI:" << oldPointI
<< " newPointI:" << newPointI << abort(FatalError);
}
}
}
return map;
}
int main(int argc, char *argv[]) int main(int argc, char *argv[])
{ {
argList::noParallel(); argList::noParallel();
@ -214,7 +288,7 @@ int main(int argc, char *argv[])
# include "setRootCase.H" # include "setRootCase.H"
# include "createTime.H" # include "createTime.H"
Pout<< "This is an experimental tool which tries to merge" Info<< "This is an experimental tool which tries to merge"
<< " individual processor" << nl << " individual processor" << nl
<< "meshes back into one master mesh. Use it if the original" << "meshes back into one master mesh. Use it if the original"
<< " master mesh has" << nl << " master mesh has" << nl
@ -246,7 +320,7 @@ int main(int argc, char *argv[])
} }
scalar writeTol = Foam::pow(10.0, -scalar(IOstream::defaultPrecision())); scalar writeTol = Foam::pow(10.0, -scalar(IOstream::defaultPrecision()));
Pout<< "Merge tolerance : " << mergeTol << nl Info<< "Merge tolerance : " << mergeTol << nl
<< "Write tolerance : " << writeTol << endl; << "Write tolerance : " << writeTol << endl;
if (runTime.writeFormat() == IOstream::ASCII && mergeTol < writeTol) if (runTime.writeFormat() == IOstream::ASCII && mergeTol < writeTol)
@ -267,11 +341,11 @@ int main(int argc, char *argv[])
if (fullMatch) if (fullMatch)
{ {
Pout<< "Doing geometric matching on all boundary faces." << nl << endl; Info<< "Doing geometric matching on all boundary faces." << nl << endl;
} }
else else
{ {
Pout<< "Doing geometric matching on correct procBoundaries only." Info<< "Doing geometric matching on correct procBoundaries only."
<< nl << "This assumes a correct decomposition." << endl; << nl << "This assumes a correct decomposition." << endl;
} }
@ -292,7 +366,7 @@ int main(int argc, char *argv[])
nProcs++; nProcs++;
} }
Pout<< "Found " << nProcs << " processor directories" << nl << endl; Info<< "Found " << nProcs << " processor directories" << nl << endl;
// Read all databases. // Read all databases.
@ -300,7 +374,7 @@ int main(int argc, char *argv[])
forAll (databases, procI) forAll (databases, procI)
{ {
Pout<< "Reading database " Info<< "Reading database "
<< args.caseName()/fileName(word("processor") + name(procI)) << args.caseName()/fileName(word("processor") + name(procI))
<< endl; << endl;
@ -337,7 +411,7 @@ int main(int argc, char *argv[])
} }
// Set master time // Set master time
Pout<< "Setting master time to " << databases[0].timeName() << nl << endl; Info<< "Setting master time to " << databases[0].timeName() << nl << endl;
runTime.setTime(databases[0]); runTime.setTime(databases[0]);
@ -373,7 +447,7 @@ int main(int argc, char *argv[])
<< endl << exit(FatalError); << endl << exit(FatalError);
} }
Pout<< "Reading points from " Info<< "Reading points from "
<< databases[procI].caseName() << databases[procI].caseName()
<< " for time = " << databases[procI].timeName() << " for time = " << databases[procI].timeName()
<< nl << endl; << nl << endl;
@ -403,7 +477,7 @@ int main(int argc, char *argv[])
} }
const scalar mergeDist = mergeTol*mag(bb.max() - bb.min()); const scalar mergeDist = mergeTol*mag(bb.max() - bb.min());
Pout<< "Overall mesh bounding box : " << bb << nl Info<< "Overall mesh bounding box : " << bb << nl
<< "Relative tolerance : " << mergeTol << nl << "Relative tolerance : " << mergeTol << nl
<< "Absolute matching distance : " << mergeDist << nl << "Absolute matching distance : " << mergeDist << nl
<< endl; << endl;
@ -422,7 +496,7 @@ int main(int argc, char *argv[])
{ {
// Construct empty mesh. // Construct empty mesh.
Pout<< "Constructing empty mesh to add to." << nl << endl; Info<< "Constructing empty mesh to add to." << nl << endl;
polyMesh masterMesh polyMesh masterMesh
( (
IOobject IOobject
@ -439,7 +513,7 @@ int main(int argc, char *argv[])
for (label procI = 0; procI < nProcs; procI++) for (label procI = 0; procI < nProcs; procI++)
{ {
Pout<< "Reading mesh to add from " Info<< "Reading mesh to add from "
<< databases[procI].caseName() << databases[procI].caseName()
<< " for time = " << databases[procI].timeName() << " for time = " << databases[procI].timeName()
<< nl << endl; << nl << endl;
@ -475,7 +549,7 @@ int main(int argc, char *argv[])
// Add elements to mesh // Add elements to mesh
Pout<< "Adding to master mesh" << nl << endl; Info<< "Adding to master mesh" << nl << endl;
autoPtr<mapAddedPolyMesh> map = polyMeshAdder::add autoPtr<mapAddedPolyMesh> map = polyMeshAdder::add
( (
@ -503,16 +577,19 @@ int main(int argc, char *argv[])
renumber(map().addedPointMap(), pointProcAddressing[procI]); renumber(map().addedPointMap(), pointProcAddressing[procI]);
renumber(map().addedPatchMap(), boundaryProcAddressing[procI]); renumber(map().addedPatchMap(), boundaryProcAddressing[procI]);
Pout<< endl; Info<< endl;
} }
// See if any points on the mastermesh have become connected
// because of connections through processor meshes.
mergeSharedPoints(mergeDist, masterMesh, pointProcAddressing);
// Save some properties on the reconstructed mesh // Save some properties on the reconstructed mesh
masterInternalFaces = masterMesh.nInternalFaces(); masterInternalFaces = masterMesh.nInternalFaces();
masterOwner = masterMesh.faceOwner(); masterOwner = masterMesh.faceOwner();
Pout<< "\nWriting merged mesh to " Info<< "\nWriting merged mesh to "
<< runTime.path()/runTime.timeName() << runTime.path()/runTime.timeName()
<< nl << endl; << nl << endl;
@ -527,12 +604,12 @@ int main(int argc, char *argv[])
// Write the addressing // Write the addressing
Pout<< "Reconstructing the addressing from the processor meshes" Info<< "Reconstructing the addressing from the processor meshes"
<< " to the newly reconstructed mesh" << nl << endl; << " to the newly reconstructed mesh" << nl << endl;
forAll(databases, procI) forAll(databases, procI)
{ {
Pout<< "Reading processor " << procI << " mesh from " Info<< "Reading processor " << procI << " mesh from "
<< databases[procI].caseName() << endl; << databases[procI].caseName() << endl;
polyMesh procMesh polyMesh procMesh
@ -548,7 +625,7 @@ int main(int argc, char *argv[])
// From processor point to reconstructed mesh point // From processor point to reconstructed mesh point
Pout<< "Writing pointProcAddressing to " Info<< "Writing pointProcAddressing to "
<< databases[procI].caseName() << databases[procI].caseName()
/procMesh.facesInstance() /procMesh.facesInstance()
/polyMesh::meshSubDir /polyMesh::meshSubDir
@ -572,7 +649,7 @@ int main(int argc, char *argv[])
// From processor face to reconstructed mesh face // From processor face to reconstructed mesh face
Pout<< "Writing faceProcAddressing to " Info<< "Writing faceProcAddressing to "
<< databases[procI].caseName() << databases[procI].caseName()
/procMesh.facesInstance() /procMesh.facesInstance()
/polyMesh::meshSubDir /polyMesh::meshSubDir
@ -635,7 +712,7 @@ int main(int argc, char *argv[])
// From processor cell to reconstructed mesh cell // From processor cell to reconstructed mesh cell
Pout<< "Writing cellProcAddressing to " Info<< "Writing cellProcAddressing to "
<< databases[procI].caseName() << databases[procI].caseName()
/procMesh.facesInstance() /procMesh.facesInstance()
/polyMesh::meshSubDir /polyMesh::meshSubDir
@ -660,7 +737,7 @@ int main(int argc, char *argv[])
// From processor patch to reconstructed mesh patch // From processor patch to reconstructed mesh patch
Pout<< "Writing boundaryProcAddressing to " Info<< "Writing boundaryProcAddressing to "
<< databases[procI].caseName() << databases[procI].caseName()
/procMesh.facesInstance() /procMesh.facesInstance()
/polyMesh::meshSubDir /polyMesh::meshSubDir
@ -681,10 +758,10 @@ int main(int argc, char *argv[])
boundaryProcAddressing[procI] boundaryProcAddressing[procI]
).write(); ).write();
Pout<< endl; Info<< endl;
} }
Pout<< "End.\n" << endl; Info<< "End.\n" << endl;
return 0; return 0;
} }

View File

@ -54,9 +54,11 @@ Foam::Field<T> Foam::channelIndex::collapse
) const ) const
{ {
// Average and order // Average and order
const Field<T> summedField(regionSum(cellField));
Field<T> regionField Field<T> regionField
( (
regionSum(cellField) summedField
/ regionCount_, / regionCount_,
sortMap_ sortMap_
); );

View File

@ -79,8 +79,9 @@ inline void Foam::DynamicList<T, SizeInc, SizeMult, SizeDiv>::setSize
const label s const label s
) )
{ {
if (s < List<T>::size()) if (s <= List<T>::size())
{ {
// shrink addressable size, leave allocated size untouched
List<T>::size() = s; List<T>::size() = s;
} }
else else
@ -100,8 +101,9 @@ inline void Foam::DynamicList<T, SizeInc, SizeMult, SizeDiv>::setSize
const T& t const T& t
) )
{ {
if (s < List<T>::size()) if (s <= List<T>::size())
{ {
// shrink addressable size, leave allocated size untouched
List<T>::size() = s; List<T>::size() = s;
} }
else else

View File

@ -77,7 +77,7 @@ Foam::Istream& Foam::operator>>(Foam::Istream& is, FixedList<T, Size>& L)
} }
else else
{ {
// Putback the openning bracket // Putback the opening bracket
is.putBack(firstToken); is.putBack(firstToken);
} }
@ -190,8 +190,9 @@ Foam::Ostream& Foam::operator<<(Ostream& os, const FixedList<T, Size>& L)
if (uniform) if (uniform)
{ {
// Write size of list and start contents delimiter // Write size of list (so it is valid dictionary entry) and
os << token::BEGIN_BLOCK; // start contents delimiter
os << L.size() << token::BEGIN_BLOCK;
// Write list contents // Write list contents
os << L[0]; os << L[0];
@ -201,7 +202,7 @@ Foam::Ostream& Foam::operator<<(Ostream& os, const FixedList<T, Size>& L)
} }
else if (Size < 11 && contiguous<T>()) else if (Size < 11 && contiguous<T>())
{ {
// Write size of list and start contents delimiter // Write start of contents delimiter
os << token::BEGIN_LIST; os << token::BEGIN_LIST;
// Write list contents // Write list contents
@ -216,7 +217,7 @@ Foam::Ostream& Foam::operator<<(Ostream& os, const FixedList<T, Size>& L)
} }
else else
{ {
// Write size of list and start contents delimiter // Write start of contents delimiter
os << nl << token::BEGIN_LIST; os << nl << token::BEGIN_LIST;
// Write list contents // Write list contents

View File

@ -45,9 +45,19 @@ bool Foam::matchPoints
bool fullMatch = true; bool fullMatch = true;
SortableList<scalar> pts0MagSqr(magSqr(pts0 - origin)); point compareOrigin = origin;
SortableList<scalar> pts1MagSqr(magSqr(pts1 - origin)); if (origin == point(VGREAT, VGREAT, VGREAT))
{
if (pts1.size() > 0)
{
compareOrigin = sum(pts1)/pts1.size();
}
}
SortableList<scalar> pts0MagSqr(magSqr(pts0 - compareOrigin));
SortableList<scalar> pts1MagSqr(magSqr(pts1 - compareOrigin));
forAll(pts0MagSqr, i) forAll(pts0MagSqr, i)
{ {

View File

@ -40,6 +40,16 @@ bool Foam::mergePoints
const point& origin const point& origin
) )
{ {
point compareOrigin = origin;
if (origin == point(VGREAT, VGREAT, VGREAT))
{
if (points.size() > 0)
{
compareOrigin = sum(points)/points.size();
}
}
// Create a old to new point mapping array // Create a old to new point mapping array
pointMap.setSize(points.size()); pointMap.setSize(points.size());
pointMap = -1; pointMap = -1;
@ -56,7 +66,7 @@ bool Foam::mergePoints
const scalar mergeTolSqr = sqr(mergeTol); const scalar mergeTolSqr = sqr(mergeTol);
// Sort points by magSqr // Sort points by magSqr
SortableList<scalar> sortedMagSqr(magSqr(points - origin)); SortableList<scalar> sortedMagSqr(magSqr(points - compareOrigin));
bool hasMerged = false; bool hasMerged = false;

View File

@ -279,27 +279,13 @@ Foam::layerParameters::layerParameters
{ {
const dictionary& layersDict = dict.subDict("layers"); const dictionary& layersDict = dict.subDict("layers");
forAllConstIter(dictionary, layersDict, iter) forAll(boundaryMesh, patchI)
{ {
const word& key = iter().keyword(); const word& patchName = boundaryMesh[patchI].name();
if (layersDict.isDict(key)) if (layersDict.found(patchName))
{ {
label patchI = boundaryMesh.findPatchID(key); const dictionary& layerDict = layersDict.subDict(patchName);
if (patchI == -1)
{
FatalErrorIn
(
"layerParameters::layerParameters"
"(const dictionary&, const polyBoundaryMesh&)"
) << "Specified illegal patch " << key
<< " in layer dictionary." << endl
<< "Valid patch names are " << boundaryMesh.names()
<< exit(FatalError);
}
const dictionary& layerDict = layersDict.subDict(key);
numLayers_[patchI] = numLayers_[patchI] =
readLabel(layerDict.lookup("nSurfaceLayers")); readLabel(layerDict.lookup("nSurfaceLayers"));

View File

@ -40,6 +40,7 @@ SourceFiles
#include "polyMeshAdder.H" #include "polyMeshAdder.H"
#include "fvPatchFieldsFwd.H" #include "fvPatchFieldsFwd.H"
#include "fvsPatchFieldsFwd.H"
#include "fvPatchFieldMapper.H" #include "fvPatchFieldMapper.H"
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //

View File

@ -586,8 +586,8 @@ Foam::autoPtr<Foam::mapPolyMesh> Foam::fvMeshDistribute::repatch
"fvMeshDistribute::repatch(const labelList&, labelListList&)" "fvMeshDistribute::repatch(const labelList&, labelListList&)"
) << "reverseFaceMap contains -1 at index:" ) << "reverseFaceMap contains -1 at index:"
<< index << endl << index << endl
<< "This means that the repatch operation was not just a shuffle?" << "This means that the repatch operation was not just"
<< abort(FatalError); << " a shuffle?" << abort(FatalError);
} }
} }
@ -622,9 +622,7 @@ Foam::autoPtr<Foam::mapPolyMesh> Foam::fvMeshDistribute::mergeSharedPoints
) )
); );
bool merged = pointToMaster.size() > 0; if (returnReduce(pointToMaster.size(), sumOp<label>()) == 0)
if (!returnReduce(merged, orOp<bool>()))
{ {
return autoPtr<mapPolyMesh>(NULL); return autoPtr<mapPolyMesh>(NULL);
} }
@ -639,14 +637,7 @@ Foam::autoPtr<Foam::mapPolyMesh> Foam::fvMeshDistribute::mergeSharedPoints
// Update fields. No inflation, parallel sync. // Update fields. No inflation, parallel sync.
mesh_.updateMesh(map); mesh_.updateMesh(map);
// Move mesh (since morphing does not do this)
if (map().hasMotionPoints())
{
mesh_.movePoints(map().preMotionPoints());
}
// Adapt constructMaps for merged points. // Adapt constructMaps for merged points.
// 1.4.1: use reversePointMap < -1 feature.
forAll(constructPointMap, procI) forAll(constructPointMap, procI)
{ {
labelList& constructMap = constructPointMap[procI]; labelList& constructMap = constructPointMap[procI];
@ -655,16 +646,38 @@ Foam::autoPtr<Foam::mapPolyMesh> Foam::fvMeshDistribute::mergeSharedPoints
{ {
label oldPointI = constructMap[i]; label oldPointI = constructMap[i];
// See if merged into other point label newPointI = map().reversePointMap()[oldPointI];
Map<label>::const_iterator iter = pointToMaster.find(oldPointI);
if (iter != pointToMaster.end()) if (newPointI < -1)
{ {
oldPointI = iter(); constructMap[i] = -newPointI-2;
}
else if (newPointI >= 0)
{
constructMap[i] = newPointI;
}
else
{
FatalErrorIn("fvMeshDistribute::mergeSharedPoints()")
<< "Problem. oldPointI:" << oldPointI
<< " newPointI:" << newPointI << abort(FatalError);
} }
constructMap[i] = map().reversePointMap()[oldPointI];
} }
//- old: use pointToMaster map.
//forAll(constructMap, i)
//{
// label oldPointI = constructMap[i];
//
// // See if merged into other point
// Map<label>::const_iterator iter = pointToMaster.find(oldPointI);
//
// if (iter != pointToMaster.end())
// {
// oldPointI = iter();
// }
//
// constructMap[i] = map().reversePointMap()[oldPointI];
//}
} }
return map; return map;
} }

View File

@ -1737,85 +1737,218 @@ Foam::autoPtr<Foam::mapAddedPolyMesh> Foam::polyMeshAdder::add
Foam::Map<Foam::label> Foam::polyMeshAdder::findSharedPoints Foam::Map<Foam::label> Foam::polyMeshAdder::findSharedPoints
( (
const polyMesh& mesh, const polyMesh& mesh,
const scalar mergeTol const scalar mergeDist
) )
{ {
const labelList& sharedPointLabels = mesh.globalData().sharedPointLabels(); const labelList& sharedPointLabels = mesh.globalData().sharedPointLabels();
const labelList& sharedPointAddr = mesh.globalData().sharedPointAddr();
labelList sharedToMerged; // Because of adding the missing pieces e.g. when redistributing a mesh
pointField mergedPoints; // it can be that there are multiple points on the same processor that
bool hasMerged = Foam::mergePoints // refer to the same shared point.
(
pointField // Invert point-to-shared addressing
( // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
IndirectList<point>
Map<labelList> sharedToMesh(sharedPointLabels.size());
label nMultiple = 0;
forAll(sharedPointLabels, i)
{
label pointI = sharedPointLabels[i];
label sharedI = sharedPointAddr[i];
Map<labelList>::iterator iter = sharedToMesh.find(sharedI);
if (iter != sharedToMesh.end())
{
// sharedI already used by other point. Add this one.
nMultiple++;
labelList& connectedPointLabels = iter();
label sz = connectedPointLabels.size();
// Check just to make sure.
if (findIndex(connectedPointLabels, pointI) != -1)
{
FatalErrorIn("polyMeshAdder::findSharedPoints(..)")
<< "Duplicate point in sharedPoint addressing." << endl
<< "When trying to add point " << pointI << " on shared "
<< sharedI << " with connected points "
<< connectedPointLabels
<< abort(FatalError);
}
connectedPointLabels.setSize(sz+1);
connectedPointLabels[sz] = pointI;
}
else
{
sharedToMesh.insert(sharedI, labelList(1, pointI));
}
}
// Assign single master for every shared with multiple geometric points
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Map<label> pointToMaster(nMultiple);
forAllConstIter(Map<labelList>, sharedToMesh, iter)
{
const labelList& connectedPointLabels = iter();
//Pout<< "For shared:" << iter.key()
// << " found points:" << connectedPointLabels
// << " at coords:"
// << pointField(mesh.points(), connectedPointLabels) << endl;
if (connectedPointLabels.size() > 1)
{
const pointField connectedPoints
( (
mesh.points(), mesh.points(),
sharedPointLabels connectedPointLabels
)() );
),
mergeTol,
false,
sharedToMerged,
mergedPoints
);
// Find out which sets of points get merged and create a map from labelList toMergedPoints;
// mesh point to unique point. pointField mergedPoints;
bool hasMerged = Foam::mergePoints
Map<label> pointToMaster(10*sharedToMerged.size());
if (hasMerged)
{
labelListList mergeSets
(
invertOneToMany
( (
sharedToMerged.size(), connectedPoints,
sharedToMerged mergeDist,
) false,
); toMergedPoints,
mergedPoints
);
label nMergeSets = 0; if (hasMerged)
forAll(mergeSets, setI)
{
const labelList& mergeSet = mergeSets[setI];
if (mergeSet.size() > 1)
{ {
// Take as master the shared point with the lowest mesh // Invert toMergedPoints
// point label. (rather arbitrarily - could use max or any other const labelListList mergeSets
// one of the points) (
invertOneToMany
(
mergedPoints.size(),
toMergedPoints
)
);
nMergeSets++; // Find master for valid merges
forAll(mergeSets, setI)
label masterI = labelMax;
forAll(mergeSet, i)
{ {
label sharedI = mergeSet[i]; const labelList& mergeSet = mergeSets[setI];
masterI = min(masterI, sharedPointLabels[sharedI]); if (mergeSet.size() > 1)
} {
// Pick lowest numbered point
label masterPointI = labelMax;
forAll(mergeSet, i) forAll(mergeSet, i)
{ {
label sharedI = mergeSet[i]; label pointI = connectedPointLabels[mergeSet[i]];
pointToMaster.insert(sharedPointLabels[sharedI], masterI); masterPointI = min(masterPointI, pointI);
}
forAll(mergeSet, i)
{
label pointI = connectedPointLabels[mergeSet[i]];
//Pout<< "Merging point " << pointI
// << " at " << mesh.points()[pointI]
// << " into master point "
// << masterPointI
// << " at " << mesh.points()[masterPointI]
// << endl;
pointToMaster.insert(pointI, masterPointI);
}
}
} }
} }
} }
//if (debug)
//{
// Pout<< "polyMeshAdder : merging:"
// << pointToMaster.size() << " into " << nMergeSets << " sets."
// << endl;
//}
} }
//- Old: geometric merging. Causes problems for two close shared points.
//labelList sharedToMerged;
//pointField mergedPoints;
//bool hasMerged = Foam::mergePoints
//(
// pointField
// (
// IndirectList<point>
// (
// mesh.points(),
// sharedPointLabels
// )()
// ),
// mergeDist,
// false,
// sharedToMerged,
// mergedPoints
//);
//
//// Find out which sets of points get merged and create a map from
//// mesh point to unique point.
//
//Map<label> pointToMaster(10*sharedToMerged.size());
//
//if (hasMerged)
//{
// labelListList mergeSets
// (
// invertOneToMany
// (
// sharedToMerged.size(),
// sharedToMerged
// )
// );
//
// label nMergeSets = 0;
//
// forAll(mergeSets, setI)
// {
// const labelList& mergeSet = mergeSets[setI];
//
// if (mergeSet.size() > 1)
// {
// // Take as master the shared point with the lowest mesh
// // point label. (rather arbitrarily - could use max or
// // any other one of the points)
//
// nMergeSets++;
//
// label masterI = labelMax;
//
// forAll(mergeSet, i)
// {
// label sharedI = mergeSet[i];
//
// masterI = min(masterI, sharedPointLabels[sharedI]);
// }
//
// forAll(mergeSet, i)
// {
// label sharedI = mergeSet[i];
//
// pointToMaster.insert(sharedPointLabels[sharedI], masterI);
// }
// }
// }
//
// //if (debug)
// //{
// // Pout<< "polyMeshAdder : merging:"
// // << pointToMaster.size() << " into " << nMergeSets
// // << " sets." << endl;
// //}
//}
return pointToMaster; return pointToMaster;
} }
@ -1836,8 +1969,7 @@ void Foam::polyMeshAdder::mergePoints
{ {
if (iter() != pointI) if (iter() != pointI)
{ {
//1.4.1: meshMod.removePoint(pointI, iter()); meshMod.removePoint(pointI, iter());
meshMod.setAction(polyRemovePoint(pointI));
} }
} }
} }

View File

@ -2617,6 +2617,13 @@ void Foam::polyTopoChange::removePoint
<< abort(FatalError); << abort(FatalError);
} }
if (pointI == mergePointI)
{
FatalErrorIn("polyTopoChange::removePoint(const label, const label)")
<< "Cannot remove/merge point " << pointI << " onto itself."
<< abort(FatalError);
}
points_[pointI] = greatPoint; points_[pointI] = greatPoint;
pointMap_[pointI] = -1; pointMap_[pointI] = -1;
if (mergePointI >= 0) if (mergePointI >= 0)

View File

@ -410,8 +410,14 @@ Foam::refinementHistory::refinementHistory
} }
else else
{ {
splitCells_.setSize(nCells, splitCell8()); visibleCells_.setSize(nCells);
visibleCells_ = identity(nCells); splitCells_.setSize(nCells);
for (label cellI = 0; cellI < nCells; cellI++)
{
visibleCells_[cellI] = cellI;
splitCells_.append(splitCell8());
}
} }
// Check indices. // Check indices.

View File

@ -459,6 +459,7 @@ bool triSurface::readSTLASCII(const fileName& STLfileName)
patches_[iter()].name() = iter.key(); patches_[iter()].name() = iter.key();
} }
// Fill in the missing information in the patches
setDefaultPatches(); setDefaultPatches();
return true; return true;

View File

@ -631,15 +631,16 @@ surfacePatchList triSurface::calcPatches(labelList& faceMap) const
faceMap = sortedRegion.indices(); faceMap = sortedRegion.indices();
// Compact regions // Extend regions
// Get last region
//label maxRegion = 0; // for compacted regions
label maxRegion = patches_.size()-1; // for non-compacted regions label maxRegion = patches_.size()-1; // for non-compacted regions
if (faceMap.size() > 0) if (faceMap.size() > 0)
{ {
maxRegion = operator[](faceMap[faceMap.size() - 1]).region(); maxRegion = max
(
maxRegion,
operator[](faceMap[faceMap.size() - 1]).region()
);
} }
// Get new region list // Get new region list