Compare commits
2 Commits
feature-pr
...
multiNodeD
| Author | SHA1 | Date | |
|---|---|---|---|
| 8862a0b3b2 | |||
| c7477885a5 |
@ -4,6 +4,7 @@ simpleGeomDecomp/simpleGeomDecomp.C
|
||||
hierarchGeomDecomp/hierarchGeomDecomp.C
|
||||
manualDecomp/manualDecomp.C
|
||||
multiLevelDecomp/multiLevelDecomp.C
|
||||
multiNodeDecomp/multiNodeDecomp.C
|
||||
metisLikeDecomp/metisLikeDecomp.C
|
||||
structuredDecomp/structuredDecomp.C
|
||||
randomDecomp/randomDecomp.C
|
||||
|
||||
@ -0,0 +1,58 @@
|
||||
# New Multi-Level Decomposition
|
||||
The multi-node decomposition is an extension of the existing multi-level decomposition. It supports the syntax of the current multi-level decomposition, but allows to change the decomposition tree as you wish. For example, you may split into unbalanced nodes, set the weights of some nodes to be bigger than others, or perhaps use a different decomposition method for some nodes.
|
||||
You may set up the decomposition in two ways:
|
||||
1. Using a domains list and a default method:
|
||||
```
|
||||
numberOfSubdomains 8;
|
||||
multiNodeCoeffs {
|
||||
domains (2 4);
|
||||
method metis;
|
||||
}
|
||||
```
|
||||
2. Using a dictionary for each level:
|
||||
```
|
||||
numberOfSubdomains 8;
|
||||
multiLevelCoeffs {
|
||||
nodes {
|
||||
numberOfSubdomains 2;
|
||||
method metis;
|
||||
}
|
||||
cores {
|
||||
numberOfSubdomains 4;
|
||||
method scotch;
|
||||
}
|
||||
}
|
||||
```
|
||||
Note that if the total number of subdomains does not match the product of the number of subdomains at each level, but a default method is provided, a new level will be inferred in order to match the total number of subdomains.
|
||||
|
||||
This creates a "decomposition tree" - for example, the dictionaries above create a tree, where the root has two children, and each child has four children (who are the leaves of the tree). Every leaf in the tree is a subdomain in the final decomposition.
|
||||
After setting up the decomposition, we may edit specific nodes or ranges of nodes. For example, suppose we want to split into two nodes, the first one having four subdomains and the second having eight subdomains. We can use the above dictionaries, and then use:
|
||||
```
|
||||
domains[1] (8);
|
||||
```
|
||||
The squared brackets indicate which nodes in the tree should we edit - We want the second child of the root (the indexing starts from zero). If we wanted to change the first two children of the third child of the root, we would write:
|
||||
```
|
||||
domains[2][0-1] (8);
|
||||
```
|
||||
|
||||
Note that the total number of subdomains must match the number of subdomains declared after all modifications. In addition, note that the decomposition into two nodes will be done as if they were of the same size, hence the first four subdomains will be bigger than the other eight. In order to fix this, we may:
|
||||
1. Change the weight of the second node into twice the weight:
|
||||
```
|
||||
weight[1] 2;
|
||||
```
|
||||
2. Set the weights initialization into relative - this will cause the weights of the children to first be computed by the amount of leaves in their subtree. Note that this updates the whole subtree initialization, but using the `weight` parameter, we can override this initialization.
|
||||
```
|
||||
weightsInitialization[1] relative;
|
||||
```
|
||||
|
||||
|
||||
We may also set a special method dictionary that decomposes differently for some nodes:
|
||||
```
|
||||
method[2-4] {
|
||||
numberOfSubdomains 4;
|
||||
method metis;
|
||||
coeffs {
|
||||
...
|
||||
}
|
||||
}
|
||||
```
|
||||
@ -0,0 +1,788 @@
|
||||
/*---------------------------------------------------------------------------*\
|
||||
========= |
|
||||
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
||||
\\ / O peration |
|
||||
\\ / A nd | www.openfoam.com
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2011-2017 OpenFOAM Foundation
|
||||
Copyright (C) 2017-2021 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
|
||||
OpenFOAM is free software: you can redistribute it and/or modify it
|
||||
under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
|
||||
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
\*---------------------------------------------------------------------------*/
|
||||
|
||||
#include "multiNodeDecomp.H"
|
||||
#include "addToRunTimeSelectionTable.H"
|
||||
#include "IFstream.H"
|
||||
#include "globalIndex.H"
|
||||
#include "mapDistribute.H"
|
||||
#include "DynamicList.H"
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * //
|
||||
|
||||
namespace Foam
|
||||
{
|
||||
defineTypeNameAndDebug(multiNodeDecomp, 0);
|
||||
addToRunTimeSelectionTable
|
||||
(
|
||||
decompositionMethod,
|
||||
multiNodeDecomp,
|
||||
dictionary
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * Private Member Functions * * * * * * * * * * * //
|
||||
namespace Foam {
|
||||
void multiNodeDecomp::initializeMetadata(const dictionary& coeffsDict) {
|
||||
word defaultMethod;
|
||||
dictionary defaultMethodDict;
|
||||
if(coeffsDict.readIfPresent("method", defaultMethod, keyType::LITERAL)) {
|
||||
defaultMethodDict.add("method",defaultMethod);
|
||||
const dictionary& subMethodCoeffsDict
|
||||
(
|
||||
findCoeffsDict
|
||||
(
|
||||
coeffsDict,
|
||||
defaultMethod + "Coeffs",
|
||||
selectionType::NULL_DICT
|
||||
)
|
||||
);
|
||||
if(subMethodCoeffsDict.size())
|
||||
defaultMethodDict.add(subMethodCoeffsDict.dictName(), subMethodCoeffsDict);
|
||||
}
|
||||
labelList domains;
|
||||
|
||||
label nTotal = 0;
|
||||
label nLevels = 0;
|
||||
|
||||
//Check if any meta argument is changed using the new syntax.
|
||||
//If they are, we cannot infer an additional level of decomposition,
|
||||
//as it may interfere with the indices.
|
||||
List<string> domainChanges = metaParser::getEntries(coeffsDict, "domains");
|
||||
List<string> methodChanges = metaParser::getEntries(coeffsDict, "method");
|
||||
List<string> weightChanges = metaParser::getEntries(coeffsDict, "weight");
|
||||
//We can parse weightMode without brackets too
|
||||
List<string> weightModeChanges = metaParser::getEntries(coeffsDict, "weightsInitialization", true);
|
||||
|
||||
bool bChangesDomains = !domainChanges.empty();
|
||||
bool bChangesArguments = bChangesDomains
|
||||
|| (!methodChanges.empty())
|
||||
|| (!weightChanges.empty())
|
||||
|| (!weightModeChanges.empty());
|
||||
|
||||
bool bMetadataInitialized = false;
|
||||
|
||||
// Found (non-recursive, no patterns) "method" and "domains" ?
|
||||
// Allow as quick short-cut entry
|
||||
if
|
||||
(
|
||||
// non-recursive, no patterns
|
||||
coeffsDict.readIfPresent("method", defaultMethod, keyType::LITERAL)
|
||||
// non-recursive, no patterns
|
||||
&& coeffsDict.readIfPresent("domains", domains, keyType::LITERAL)
|
||||
)
|
||||
{
|
||||
// Short-cut version specified by method, domains only
|
||||
|
||||
nTotal = (domains.empty() ? 0 : 1);
|
||||
for (const label n : domains)
|
||||
{
|
||||
nTotal *= n;
|
||||
++nLevels;
|
||||
}
|
||||
|
||||
//Update domains here
|
||||
if(nTotal != 0 && bChangesDomains) {
|
||||
rootMetadata_.initialize(
|
||||
domains,
|
||||
&defaultMethodDict
|
||||
);
|
||||
bMetadataInitialized = true;
|
||||
for(string key : domainChanges)
|
||||
rootMetadata_.updateDomains( key,
|
||||
coeffsDict.get<labelList>(key, keyType::LITERAL));
|
||||
|
||||
nTotal = rootMetadata_.getSize();
|
||||
}
|
||||
|
||||
if (nTotal == 1)
|
||||
{
|
||||
// Emit Warning
|
||||
nTotal = nDomains();
|
||||
nLevels = 1;
|
||||
domains.setSize(1);
|
||||
domains[0] = nTotal;
|
||||
}
|
||||
//If bChangesDomains is true, we do not want to add another dimension as this
|
||||
//may affect the user's assignments of domains/weights/methods later on.
|
||||
else if (nTotal > 0 && nTotal < nDomains() && !(nDomains() % nTotal) && !bChangesArguments)
|
||||
{
|
||||
// nTotal < nDomains, but with an integral factor,
|
||||
// which we insert as level 0
|
||||
++nLevels;
|
||||
|
||||
labelList old(std::move(domains));
|
||||
|
||||
domains.setSize(old.size()+1);
|
||||
|
||||
domains[0] = nDomains() / nTotal;
|
||||
forAll(old, i)
|
||||
{
|
||||
domains[i+1] = old[i];
|
||||
}
|
||||
nTotal *= domains[0];
|
||||
|
||||
|
||||
Info<<" inferred level 0 with " << domains[0]
|
||||
<< " domains" << nl << nl;
|
||||
}
|
||||
|
||||
if (!nLevels || nTotal != nDomains())
|
||||
{
|
||||
FatalErrorInFunction
|
||||
<< "Top level decomposition specifies " << nDomains()
|
||||
<< " domains which is not equal to the product of"
|
||||
<< " all sub domains " << nTotal
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
if(!bMetadataInitialized) {
|
||||
bMetadataInitialized = true;
|
||||
rootMetadata_.initialize(
|
||||
domains,
|
||||
&defaultMethodDict
|
||||
);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Specified by full dictionaries
|
||||
|
||||
// Create editable methods dictionaries
|
||||
// - Only consider sub-dictionaries with a "numberOfSubdomains" entry
|
||||
// This automatically filters out any coeffs dictionaries
|
||||
|
||||
label nTotal = 1;
|
||||
List<const dictionary*> methods;
|
||||
for (const entry& dEntry : coeffsDict)
|
||||
{
|
||||
word methodName;
|
||||
|
||||
if
|
||||
(
|
||||
dEntry.isDict()
|
||||
// non-recursive, no patterns
|
||||
&& dEntry.dict().found("numberOfSubdomains", keyType::LITERAL)
|
||||
)
|
||||
{
|
||||
domains.append(dEntry.dict().get<label>("numberOfSubdomains"));
|
||||
nTotal *= domains.last();
|
||||
// No method specified? can use a default method?
|
||||
|
||||
const bool addDefaultMethod
|
||||
(
|
||||
!(dEntry.dict().found("method", keyType::LITERAL))
|
||||
&& !defaultMethod.empty()
|
||||
);
|
||||
if(!(dEntry.dict().found("method",keyType::LITERAL)) && defaultMethod.empty()) {
|
||||
FatalErrorInFunction <<
|
||||
dEntry.keyword() <<
|
||||
" dictionary does not contain method, and no default method is specified."
|
||||
<< nl << exit(FatalError);
|
||||
}
|
||||
dictionary* levelDict = new dictionary(dEntry.dict());
|
||||
levelDict->remove("numberOfSubdomains");
|
||||
if(addDefaultMethod) levelDict->add("method", defaultMethod);
|
||||
methods.append(levelDict);
|
||||
}
|
||||
}
|
||||
if(domains.empty())
|
||||
nTotal = 0;
|
||||
|
||||
|
||||
rootMetadata_.initialize(domains, methods[0]);
|
||||
bMetadataInitialized = true;
|
||||
for(string key : domainChanges)
|
||||
rootMetadata_.updateDomains( key,
|
||||
coeffsDict.get<labelList>(key, keyType::LITERAL));
|
||||
|
||||
if(nTotal != nDomains()) {
|
||||
FatalErrorInFunction
|
||||
<< "Top level decomposition specifies " << nDomains()
|
||||
<< " domains which is not equal to the product of"
|
||||
<< " all sub domains " << nTotal << " manually defined by dictionaries. "
|
||||
<< exit(FatalError);
|
||||
}
|
||||
rootMetadata_.setLeveledDictionaries(methods);
|
||||
for(const dictionary* method : methods)
|
||||
delete method;
|
||||
}
|
||||
|
||||
|
||||
for(string key : methodChanges)
|
||||
rootMetadata_.updateMethod(key, coeffsDict.subDict(key, keyType::LITERAL));
|
||||
|
||||
for(string key : weightChanges)
|
||||
rootMetadata_.updateWeight(key, coeffsDict.get<label>(key, keyType::LITERAL));
|
||||
|
||||
for(string key : weightModeChanges) {
|
||||
word value = coeffsDict.get<word>(key, keyType::LITERAL);
|
||||
WeightsInitialization newValue = UNKNOWN;
|
||||
|
||||
if(value=="uniform")
|
||||
newValue = UNIFORM;
|
||||
else if(value == "relative")
|
||||
newValue = RELATIVE;
|
||||
else
|
||||
FatalErrorInFunction <<
|
||||
"unknown weights initialization (" << value << "). Must be one of: relative, uniform."
|
||||
<< nl << exit(FatalError);
|
||||
|
||||
rootMetadata_.updateWeightsInitialization(key, newValue);
|
||||
}
|
||||
|
||||
if(!rootMetadata_.isLeaf())
|
||||
rootMetadata_.constructMethods();
|
||||
}
|
||||
|
||||
|
||||
// Given a subset of cells determine the new global indices. The problem
|
||||
// is in the cells from neighbouring processors which need to be renumbered.
|
||||
void multiNodeDecomp::subsetGlobalCellCells
|
||||
(
|
||||
const label nDomains,
|
||||
const label domainI,
|
||||
const labelList& dist,
|
||||
|
||||
const labelListList& cellCells,
|
||||
const labelList& set,
|
||||
labelListList& subCellCells,
|
||||
labelList& cutConnections
|
||||
) const
|
||||
{
|
||||
// Determine new index for cells by inverting subset
|
||||
labelList oldToNew(invert(cellCells.size(), set));
|
||||
|
||||
globalIndex globalCells(cellCells.size());
|
||||
|
||||
// Subset locally the elements for which I have data
|
||||
subCellCells = UIndirectList<labelList>(cellCells, set);
|
||||
|
||||
// Get new indices for neighbouring processors
|
||||
List<Map<label>> compactMap;
|
||||
mapDistribute map(globalCells, subCellCells, compactMap);
|
||||
map.distribute(oldToNew);
|
||||
labelList allDist(dist);
|
||||
map.distribute(allDist);
|
||||
|
||||
// Now we have:
|
||||
// oldToNew : the locally-compact numbering of all our cellCells. -1 if
|
||||
// cellCell is not in set.
|
||||
// allDist : destination domain for all our cellCells
|
||||
// subCellCells : indexes into oldToNew and allDist
|
||||
|
||||
// Globally compact numbering for cells in set.
|
||||
globalIndex globalSubCells(set.size());
|
||||
|
||||
// Now subCellCells contains indices into oldToNew which are the
|
||||
// new locations of the neighbouring cells.
|
||||
|
||||
cutConnections.setSize(nDomains);
|
||||
cutConnections = 0;
|
||||
|
||||
forAll(subCellCells, subCelli)
|
||||
{
|
||||
labelList& cCells = subCellCells[subCelli];
|
||||
|
||||
// Keep the connections to valid mapped cells
|
||||
label newI = 0;
|
||||
forAll(cCells, i)
|
||||
{
|
||||
// Get locally-compact cell index of neighbouring cell
|
||||
const label nbrCelli = oldToNew[cCells[i]];
|
||||
if (nbrCelli == -1)
|
||||
{
|
||||
cutConnections[allDist[cCells[i]]]++;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Reconvert local cell index into global one
|
||||
|
||||
// Get original neighbour
|
||||
const label celli = set[subCelli];
|
||||
const label oldNbrCelli = cellCells[celli][i];
|
||||
// Get processor from original neighbour
|
||||
const label proci = globalCells.whichProcID(oldNbrCelli);
|
||||
// Convert into global compact numbering
|
||||
cCells[newI++] = globalSubCells.toGlobal(proci, nbrCelli);
|
||||
}
|
||||
}
|
||||
cCells.setSize(newI);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void multiNodeDecomp::decompose
|
||||
(
|
||||
const labelListList& pointPoints,
|
||||
const pointField& points,
|
||||
const scalarField& pointWeights,
|
||||
const labelUList& pointMap, // map back to original points
|
||||
const nodeMetadata& decomposeData,
|
||||
const label leafOffset,
|
||||
|
||||
labelList& finalDecomp
|
||||
) const
|
||||
{
|
||||
labelList dist
|
||||
(
|
||||
decomposeData.getMethod()->decompose
|
||||
(
|
||||
pointPoints,
|
||||
points,
|
||||
pointWeights
|
||||
)
|
||||
);
|
||||
|
||||
// Number of domains at the current level
|
||||
const label nCurrDomains = decomposeData.nDomains();
|
||||
|
||||
// Calculate the domain remapping.
|
||||
// The decompose() method delivers a distribution of [0..nDomains-1]
|
||||
// which we map to the final location according to the decomposition
|
||||
// leaf we are on.
|
||||
|
||||
labelList domainOffsets(nCurrDomains);
|
||||
domainOffsets[0] = leafOffset;
|
||||
for(label nDomain = 1; nDomain < nCurrDomains; ++nDomain) {
|
||||
domainOffsets[nDomain] = domainOffsets[nDomain-1] + decomposeData.getChild(nDomain-1)->getSize();
|
||||
}
|
||||
|
||||
// Extract processor+local index from point-point addressing
|
||||
forAll(pointMap, i)
|
||||
{
|
||||
finalDecomp[pointMap[i]] = domainOffsets[dist[i]];
|
||||
}
|
||||
|
||||
if (nCurrDomains > 0)
|
||||
{
|
||||
// Recurse
|
||||
|
||||
// Determine points per domain
|
||||
labelListList domainToPoints(invertOneToMany(nCurrDomains, dist));
|
||||
|
||||
for (label domainI = 0; domainI < nCurrDomains; ++domainI)
|
||||
{
|
||||
if(decomposeData.getChild(domainI)->isLeaf()) continue;
|
||||
// Extract elements for current domain
|
||||
const labelList domainPoints(findIndices(dist, domainI));
|
||||
|
||||
// Subset point-wise data.
|
||||
pointField subPoints(points, domainPoints);
|
||||
scalarField subWeights(pointWeights, domainPoints);
|
||||
labelList subPointMap(labelUIndList(pointMap, domainPoints));
|
||||
// Subset point-point addressing (adapt global numbering)
|
||||
labelListList subPointPoints;
|
||||
labelList nOutsideConnections;
|
||||
subsetGlobalCellCells
|
||||
(
|
||||
nCurrDomains,
|
||||
domainI,
|
||||
dist,
|
||||
|
||||
pointPoints,
|
||||
domainPoints,
|
||||
|
||||
subPointPoints,
|
||||
nOutsideConnections
|
||||
);
|
||||
|
||||
decompose
|
||||
(
|
||||
subPointPoints,
|
||||
subPoints,
|
||||
subWeights,
|
||||
subPointMap,
|
||||
*decomposeData.getChild(domainI),
|
||||
domainOffsets[domainI], // The offset for this level and leaf
|
||||
|
||||
finalDecomp
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
|
||||
|
||||
multiNodeDecomp::multiNodeDecomp
|
||||
(
|
||||
const dictionary& decompDict,
|
||||
const word& regionName
|
||||
)
|
||||
:
|
||||
decompositionMethod(decompDict, regionName),
|
||||
rootMetadata_()
|
||||
{
|
||||
const dictionary& coeffsDict(
|
||||
findCoeffsDict(
|
||||
typeName + "Coeffs",
|
||||
(selectionType::EXACT | selectionType::MANDATORY)
|
||||
)
|
||||
);
|
||||
initializeMetadata(coeffsDict);
|
||||
}
|
||||
|
||||
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
|
||||
|
||||
|
||||
bool multiNodeDecomp::parallelAware() const
|
||||
{
|
||||
return rootMetadata_.parallelAware();
|
||||
}
|
||||
|
||||
|
||||
labelList multiNodeDecomp::decompose
|
||||
(
|
||||
const polyMesh& mesh,
|
||||
const pointField& cc,
|
||||
const scalarField& cWeights
|
||||
) const
|
||||
{
|
||||
CompactListList<label> cellCells;
|
||||
calcCellCells(mesh, identity(cc.size()), cc.size(), true, cellCells);
|
||||
|
||||
labelList finalDecomp(cc.size(), Zero);
|
||||
labelList cellMap(identity(cc.size()));
|
||||
|
||||
decompose
|
||||
(
|
||||
cellCells.unpack(),
|
||||
cc,
|
||||
cWeights,
|
||||
cellMap, // map back to original cells
|
||||
rootMetadata_,
|
||||
0,
|
||||
|
||||
finalDecomp
|
||||
);
|
||||
|
||||
return finalDecomp;
|
||||
}
|
||||
|
||||
|
||||
labelList multiNodeDecomp::decompose
|
||||
(
|
||||
const labelListList& globalPointPoints,
|
||||
const pointField& points,
|
||||
const scalarField& pointWeights
|
||||
) const
|
||||
{
|
||||
labelList finalDecomp(points.size(), Zero);
|
||||
labelList pointMap(identity(points.size()));
|
||||
|
||||
decompose
|
||||
(
|
||||
globalPointPoints,
|
||||
points,
|
||||
pointWeights,
|
||||
pointMap, // map back to original points
|
||||
rootMetadata_,
|
||||
0,
|
||||
|
||||
finalDecomp
|
||||
);
|
||||
|
||||
return finalDecomp;
|
||||
}
|
||||
|
||||
// * * * * * * * * * * * * Meta Parser Class * * * * * * * * * * * * * //
|
||||
|
||||
|
||||
|
||||
List<string> multiNodeDecomp::metaParser::getEntries(const dictionary& dict, const string& argument, bool allowWithoutBrackets) {
|
||||
string argumentBracket = argument + "[";
|
||||
DynamicList<string, 4> Result;
|
||||
for(auto& dEntry : dict) {
|
||||
if(dEntry.keyword().starts_with(argumentBracket) || (allowWithoutBrackets && dEntry.keyword() == argument))
|
||||
Result.push_back(dEntry.keyword());
|
||||
}
|
||||
return Result;
|
||||
}
|
||||
|
||||
List<Pair<label>> multiNodeDecomp::metaParser::parseRanges(const string& key) {
|
||||
|
||||
// First, discard the argument and process the indices only.
|
||||
// The current syntax is argument[...]
|
||||
// Assuming that this key was returned in getEntries,
|
||||
// if there is no '[', it is OK and we use the
|
||||
// empty string (update the root).
|
||||
string indices = "";
|
||||
if(key.find_first_of('[') != key.npos) {
|
||||
// There is a '[' in the string.
|
||||
// We can substr from that location.
|
||||
label nFirstBracket = key.find('[');
|
||||
indices = key.substr(nFirstBracket);
|
||||
}
|
||||
|
||||
// All checks print an error message if failed, explaining why.
|
||||
|
||||
DynamicList<Pair<label>, 4> Result;
|
||||
label nCurPtr = 0, nIndicesLength = indices.size();
|
||||
// As long as there are more ranges to parse.
|
||||
while(nCurPtr != nIndicesLength) {
|
||||
// First, check if there is an opening bracket.
|
||||
if(indices[nCurPtr]!='[')
|
||||
FatalError
|
||||
<< "Error when parsing indices "
|
||||
<< indices << ": Expected '[', found "
|
||||
<< indices[nCurPtr] << ". Aborting\n"
|
||||
<< exit(FatalError);
|
||||
|
||||
// Then, find the matching close bracket.
|
||||
label nEndIndex = indices.find(']', nCurPtr);
|
||||
if(nEndIndex == nIndicesLength) {
|
||||
FatalError
|
||||
<< "Error when parsing indices "
|
||||
<< indices << ": Expected ']' after '['. Aborting\n"
|
||||
<< exit(FatalError);
|
||||
}
|
||||
// Read inside the brackets, mark the hyphen if it exists, and make sure
|
||||
// every character is either a digit or a hyphen.
|
||||
// Note that only one hyphen may exist.
|
||||
label nHyphenIdx=-1;
|
||||
for(label nCurIndex = nCurPtr+1; nCurIndex < nEndIndex; ++nCurIndex) {
|
||||
if(!isdigit(indices[nCurIndex])&&indices[nCurIndex]!='-') {
|
||||
FatalError
|
||||
<< "Error when parsing indices "
|
||||
<< indices << ": Expected digit/'-'/']', found "
|
||||
<< indices[nCurIndex] << ". Aborting\n"
|
||||
<< exit(FatalError);
|
||||
}
|
||||
if(indices[nCurIndex]=='-') {
|
||||
if(nHyphenIdx!=-1)
|
||||
FatalError
|
||||
<< "Error when parsing indices "
|
||||
<< indices << ": Found two hyphens(-) inside an index. Aborting\n"
|
||||
<< exit(FatalError);
|
||||
|
||||
nHyphenIdx = nCurIndex;
|
||||
}
|
||||
}
|
||||
label nLeft,nRight;
|
||||
if(nHyphenIdx == -1) {
|
||||
// Not a range - just a single index, or empty brackets (indicating to change the whole range).
|
||||
if(nCurPtr+1==nEndIndex) nLeft = 0, nRight = -1;
|
||||
else {
|
||||
string sNum = indices.substr(nCurPtr+1,nEndIndex-nCurPtr-1);
|
||||
nLeft = nRight = atoi(sNum.c_str());
|
||||
}
|
||||
} else {
|
||||
// A range of indices.
|
||||
// Assert that the hyphen is not right next to the brackets.
|
||||
if(nHyphenIdx+1==nEndIndex||nCurPtr+1==nHyphenIdx)
|
||||
FatalError
|
||||
<< "Error when parsing indices "
|
||||
<< indices << ": Expected number, found "
|
||||
<< (nCurPtr+1==nHyphenIdx?'-':']')
|
||||
<< ". Aborting\n"
|
||||
<< exit(FatalError);
|
||||
|
||||
// Parse the numbers
|
||||
string sLeftNum = indices.substr(nCurPtr+1,nHyphenIdx-nCurPtr-1);
|
||||
string sRightNum = indices.substr(nHyphenIdx+1,nEndIndex-nHyphenIdx-1);
|
||||
nLeft = atoi(sLeftNum.c_str());
|
||||
nRight = atoi(sRightNum.c_str());
|
||||
// Make sure left endpoint is at most the right endpoint
|
||||
if(nLeft>nRight)
|
||||
FatalError
|
||||
<< "Error when parsing indices "
|
||||
<< indices << ": right endpoint("<< nRight
|
||||
<< ") cannot be smaller than left endpoint("
|
||||
<< nLeft << "). Aborting\n"
|
||||
<< exit(FatalError);
|
||||
}
|
||||
// Move the pointer after the closing bracket and append to the result list.
|
||||
nCurPtr = nEndIndex + 1;
|
||||
Result.push_back({nLeft,nRight});
|
||||
}
|
||||
return Result;
|
||||
}
|
||||
|
||||
// * * * * * * * * * * * * Node Metadata Class * * * * * * * * * * * * * //
|
||||
|
||||
void multiNodeDecomp::nodeMetadata::setLeveledDictionaries(const List<const dictionary*>& dictionaries) {
|
||||
setLeveledDictionaries(dictionaries, 0);
|
||||
}
|
||||
|
||||
bool multiNodeDecomp::nodeMetadata::parallelAware() const {
|
||||
// The decomposition tree is parallel aware if and only if all methods used are parallel aware.
|
||||
// If this is a leaf, we are OK.
|
||||
if(children.empty())
|
||||
return true;
|
||||
|
||||
// Otherwise, check if the method used in this node is parallel aware.
|
||||
if(!method->parallelAware())
|
||||
return false;
|
||||
|
||||
// Check recursively, and if any child is not parallel aware - return false.
|
||||
for(auto& child : children)
|
||||
if(!child->parallelAware())
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
void multiNodeDecomp::nodeMetadata::updateProcessorWeights() {
|
||||
label nDom = nDomains();
|
||||
word methodCoeffsName = coeffsDict->get<word>("method") + "Coeffs";
|
||||
// If processorWeights were set by the user, we do not modify them.
|
||||
if(
|
||||
// Check if the user did not specify processorWeights under the coeffs dictionary or the methodCoeffs dictionary
|
||||
!(coeffsDict->subDictOrAdd(methodCoeffsName).found("processorWeights", keyType::LITERAL)
|
||||
|| coeffsDict->subDictOrAdd("coeffs").found("processorWeights", keyType::LITERAL))) {
|
||||
// Then we should compute weights on our own
|
||||
Field<float> processorWeights(nDom);
|
||||
forAll(children, i) {
|
||||
if(children[i]->weight != 1)
|
||||
processorWeights[i] = children[i]->weight;
|
||||
else switch(weightsInitialization) {
|
||||
case RELATIVE:
|
||||
processorWeights[i] = children[i]->size;
|
||||
break;
|
||||
case UNIFORM:
|
||||
processorWeights[i] = 1;
|
||||
break;
|
||||
default:
|
||||
FatalError
|
||||
<< "Weights initialization is not handled in updateProcessorWeights. Aborting\n"
|
||||
<< exit(FatalError);
|
||||
}
|
||||
}
|
||||
|
||||
coeffsDict->subDictOrAdd(methodCoeffsName).add("processorWeights", processorWeights);
|
||||
}
|
||||
}
|
||||
void multiNodeDecomp::nodeMetadata::constructMethods() {
|
||||
// Special handling of nDomains = 1, because some decomposition methods crash when decomposing to one domain.
|
||||
label nDom = nDomains();
|
||||
if(nDom==1) {
|
||||
coeffsDict->clear();
|
||||
coeffsDict->add("method","none");
|
||||
} else
|
||||
updateProcessorWeights();
|
||||
coeffsDict->add("numberOfSubdomains",nDom);
|
||||
|
||||
// Non-verbose construction of decomposition methods would be nice
|
||||
method = decompositionMethod::New(*coeffsDict).release();
|
||||
// Cannot release coeffsDict from memory because method uses a reference that must stay alive
|
||||
|
||||
forAll(children, i) {
|
||||
if(!children[i]->isLeaf())
|
||||
children[i]->constructMethods();
|
||||
}
|
||||
}
|
||||
|
||||
// Recursively construct the decomposition tree, given the list of dimensions and a default method.
|
||||
void multiNodeDecomp::nodeMetadata::constructRecursive(const labelList& dims, const dictionary* defaultMethod) {
|
||||
if(!dims.empty()) {
|
||||
// The list of dimensions of the children is the current list without the first element.
|
||||
labelList newDims(dims.size() - 1);
|
||||
forAll(newDims, i)
|
||||
newDims[i] = dims[i+1];
|
||||
|
||||
// Construct children recursively
|
||||
// First, resize existing children
|
||||
// And delete the excess
|
||||
forAll(children, i) {
|
||||
if(i < dims[0])
|
||||
children[i]->constructRecursive(newDims, defaultMethod);
|
||||
else
|
||||
delete children[i];
|
||||
}
|
||||
label nOldSize = children.size();
|
||||
children.resize(dims[0]);
|
||||
// If the new array is bigger we will need to allocate new children.
|
||||
for(label i = nOldSize; i < dims[0]; ++i)
|
||||
children[i] = new nodeMetadata(newDims, defaultMethod);
|
||||
|
||||
// Compute size (number of leaves in subtree)
|
||||
size = dims[0];
|
||||
if(!children.empty())
|
||||
size *= children[0]->size;
|
||||
}
|
||||
}
|
||||
void multiNodeDecomp::nodeMetadata::updateNodes(const string& key, const std::function<void(nodeMetadata*)>& update) {
|
||||
List<Pair<label>> indicesList = metaParser::parseRanges(key);
|
||||
updateNodes(indicesList, update);
|
||||
}
|
||||
|
||||
// Parse the indices, and apply the update function to all matching nodes.
|
||||
// nCurPtr is used to indicate the index we are now parsing (instead of sending substrings of indices)
|
||||
void multiNodeDecomp::nodeMetadata::updateNodes(const List<Pair<label>>& indices, const std::function<void(nodeMetadata*)>& update, label nCurIdx) {
|
||||
if(nCurIdx == label(indices.size())) update(this);
|
||||
else {
|
||||
// Otherwise, call recursively.
|
||||
label nLeft, nRight, nChildren = children.size();
|
||||
nLeft = indices[nCurIdx].first();
|
||||
nRight = indices[nCurIdx].second();
|
||||
|
||||
// [0,-1] means the entire range.
|
||||
|
||||
if(nLeft==0 && nRight == -1)
|
||||
nRight = nChildren - 1;
|
||||
// Make sure that the indices do not exceed the number of children.
|
||||
if(nRight >= nChildren)
|
||||
FatalError
|
||||
<< "Error when parsing indices: The #" << (nCurIdx+1)
|
||||
<< " range ["<< nLeft <<"," << nRight<<"]:\n"
|
||||
<< " Cannot update indices bigger than number of children("
|
||||
<< nChildren << "). Aborting\n"
|
||||
<< exit(FatalError);
|
||||
|
||||
for(label nChildIdx = nLeft; nChildIdx <= nRight; ++nChildIdx)
|
||||
children[nChildIdx]->updateNodes(indices,update, nCurIdx+1);
|
||||
}
|
||||
// Recompute size assuming children are updated.
|
||||
if(!children.empty()) {
|
||||
size = 0;
|
||||
forAll(children, i)
|
||||
size += children[i]->size;
|
||||
}
|
||||
}
|
||||
|
||||
void multiNodeDecomp::nodeMetadata::setLeveledDictionaries(const List<const dictionary*>& dictionaries, label nLevel) {
|
||||
// Set the dictionary to this level, and to non-leaf children.
|
||||
setDict(*dictionaries[nLevel]);
|
||||
forAll(children, i) {
|
||||
if(children[i]->nDomains() > 0)
|
||||
children[i]->setLeveledDictionaries(dictionaries,nLevel+1);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// ************************************************************************* //
|
||||
@ -0,0 +1,435 @@
|
||||
/*---------------------------------------------------------------------------*\
|
||||
========= |
|
||||
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
||||
\\ / O peration |
|
||||
\\ / A nd | www.openfoam.com
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2011-2015 OpenFOAM Foundation
|
||||
Copyright (C) 2017-2021 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
|
||||
OpenFOAM is free software: you can redistribute it and/or modify it
|
||||
under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
|
||||
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Class
|
||||
Foam::multiNodeDecomp
|
||||
|
||||
Description
|
||||
Decompose given using consecutive application of decomposers,
|
||||
to perhaps uneven pieces.
|
||||
Note: If uneven pieces are required, the decomposition method
|
||||
used must support the processorWeights argument.
|
||||
|
||||
SourceFiles
|
||||
multiNodeDecomp.C
|
||||
|
||||
\*---------------------------------------------------------------------------*/
|
||||
|
||||
#ifndef multiNodeDecomp_H
|
||||
#define multiNodeDecomp_H
|
||||
|
||||
#include "decompositionMethod.H"
|
||||
// #include "List.H"
|
||||
|
||||
namespace Foam
|
||||
{
|
||||
|
||||
/*---------------------------------------------------------------------------*\
|
||||
Class multiNodeDecomp Declaration
|
||||
\*---------------------------------------------------------------------------*/
|
||||
|
||||
class multiNodeDecomp
|
||||
:
|
||||
public decompositionMethod
|
||||
{
|
||||
// Nested classes declarations
|
||||
|
||||
/*---------------------------------------------------------------------------*\
|
||||
Class metaParser Declaration
|
||||
\*---------------------------------------------------------------------------*/
|
||||
|
||||
// A class responsible for detecting and parsing metadata-related arguments.
|
||||
class metaParser {
|
||||
public:
|
||||
// Detect and return entries related to the given argument.
|
||||
// Input:
|
||||
// dict - the coeffs dictionary we are looking inside.
|
||||
// argument - the argument we're looking for.
|
||||
// allowWithoutBrackets - set to true if the argument can be detected without brackets
|
||||
// For example, domains should not be allowed without brackets, but weightsInitialization can.
|
||||
static List<string> getEntries(const dictionary& dict, const string& argument, bool allowWithoutBrackets = false);
|
||||
|
||||
// Given a key string of an entry returned from getEntries,
|
||||
// Parse and return all ranges described in the key.
|
||||
// Note that it is the user's responsibility to make sure that the right endpoint
|
||||
// does not exceed the number of children in each node.
|
||||
// The user may also write "[]" to specify all children of the node.
|
||||
// In this case, the range returned is [0,-1]. Otherwise, endpoints are always non-negative.
|
||||
// Input:
|
||||
// key - a key of an entry we are parsing, perhaps from an entry returned in getEntries.
|
||||
// Output:
|
||||
// A list of ranges, where the i-th range corresponds to the i-th level of the decomposition tree.
|
||||
// i.e. if two ranges are returned, we will traverse each child of the root in the first range, and recursively
|
||||
// each child in the second range, thus updating nodes at the third level.
|
||||
|
||||
static List<Pair<label>> parseRanges(const string& key);
|
||||
|
||||
};
|
||||
enum WeightsInitialization {
|
||||
RELATIVE,
|
||||
UNIFORM,
|
||||
UNKNOWN
|
||||
};
|
||||
|
||||
/*---------------------------------------------------------------------------*\
|
||||
Class nodeMetadata Declaration
|
||||
\*---------------------------------------------------------------------------*/
|
||||
// A class holding all the information necessary for a multi-node decomposition, without building
|
||||
// the decompositionMethod objects.
|
||||
// The size indicates the number of processors in this subtree. It will be used
|
||||
// When computing the offset of the decomposition, and when using the relative weights initialization.
|
||||
// The weight is a multiplicative factor applied when decomposing. It is 1 by default and can be set by the user.
|
||||
// If the uniform weights initialization is used, all nodes will have the same weight. If the relative weights
|
||||
// initialization is used, each node's weight is set relatively to their size.
|
||||
// Then, the weight field can be used to change the weight of a specific node.
|
||||
// Note that if the coeffs dictionary contains a processorWeights field, it will not be overwritten.
|
||||
// We will then construct a new dictionary with the required numberOfSubdomains and processorWeights.
|
||||
|
||||
class nodeMetadata {
|
||||
public:
|
||||
|
||||
nodeMetadata() : weight(1), size(1), weightsInitialization(UNIFORM), children(0), coeffsDict(nullptr), method(nullptr) {}
|
||||
|
||||
// Constructs a decomposition data tree with dimensions dims and a default method.
|
||||
// Input: A list of domains for each level, and a default dictionary method.
|
||||
nodeMetadata(const labelList& dims, const dictionary* defaultMethod) : nodeMetadata() {
|
||||
initialize(dims, defaultMethod);
|
||||
}
|
||||
|
||||
// Initializes an existing nodeMetadata object.
|
||||
void initialize(const labelList& dims, const dictionary* defaultMethod) {
|
||||
setDict(*defaultMethod);
|
||||
constructRecursive(dims, defaultMethod);
|
||||
}
|
||||
|
||||
~nodeMetadata() {
|
||||
// Since this class represents a tree, we will need to destruct recursively.
|
||||
for(nodeMetadata* child : children)
|
||||
delete child;
|
||||
|
||||
// Only delete method and dict if they were assigned.
|
||||
if(method != nullptr)
|
||||
delete method;
|
||||
if(coeffsDict != nullptr)
|
||||
delete coeffsDict;
|
||||
}
|
||||
|
||||
|
||||
// Getters
|
||||
|
||||
// Get the weight of this node, with respect to the decomposition done in this node's parent
|
||||
label getWeight() const {
|
||||
return weight;
|
||||
}
|
||||
|
||||
// Get the coeffs dictionary for the decomposition of this node.
|
||||
const dictionary* getDict() const {
|
||||
return coeffsDict;
|
||||
}
|
||||
|
||||
// Get the modifiable coeffs dictionary for the decomposition of this node.
|
||||
dictionary* getMutableDict() {
|
||||
return coeffsDict;
|
||||
}
|
||||
|
||||
// Get the number of leaves in this subtree, i.e., number of processors
|
||||
// created under this node.
|
||||
label getSize() const {
|
||||
return size;
|
||||
}
|
||||
|
||||
// Get the decomposition method object of this node.
|
||||
// Note that construct methods must be called first, otherwise
|
||||
// a null pointer will be returned.
|
||||
const Foam::decompositionMethod* getMethod() const {
|
||||
return method;
|
||||
}
|
||||
|
||||
// Get the current weights initialization mode.
|
||||
bool getWeightsInitialization() const {
|
||||
return weightsInitialization;
|
||||
}
|
||||
|
||||
// Get a const pointer to a child of this node.
|
||||
const nodeMetadata* getChild(label index) const {
|
||||
return children[index];
|
||||
}
|
||||
|
||||
// Get a non-const pointer to a child of this node.
|
||||
nodeMetadata* getMutableChild(label index) {
|
||||
return children[index];
|
||||
}
|
||||
|
||||
// Returns the number of direct subdomains this node has.
|
||||
label nDomains() const {
|
||||
return children.size();
|
||||
}
|
||||
|
||||
// Returns whether this node represents a leaf (i.e., has no children)
|
||||
bool isLeaf() const {
|
||||
return children.empty();
|
||||
}
|
||||
// Setters
|
||||
|
||||
// Set the weight of this node, with respect to the decomposition done in this node's parent
|
||||
void setWeight(label weight) {
|
||||
this->weight = weight;
|
||||
}
|
||||
|
||||
// Set the coeffs dictionary for the decomposition of this node.
|
||||
// This creates a copy of the dictionary (and deletes the previous copy created)
|
||||
void setDict(const dictionary& dict) {
|
||||
if(coeffsDict != nullptr) {
|
||||
delete coeffsDict;
|
||||
}
|
||||
coeffsDict = new dictionary(dict);
|
||||
}
|
||||
|
||||
// Set the decomposition method object of this node.
|
||||
void setMethod(Foam::decompositionMethod* method) {
|
||||
this->method = method;
|
||||
}
|
||||
|
||||
// Sets the weights initialization mode. If setRecursive is true, propagate to the entire subtree (i.e., the root and all of the descendents)
|
||||
void setWeightsInitialization(WeightsInitialization newMode, bool setRecursive = true) {
|
||||
weightsInitialization = newMode;
|
||||
if(setRecursive) {
|
||||
for(nodeMetadata* child : children)
|
||||
child->setWeightsInitialization(newMode, true);
|
||||
}
|
||||
}
|
||||
|
||||
// Updates
|
||||
|
||||
// Update the weights of the nodes at the given indices to the given weight.
|
||||
// Input: A string indicating the indices of the nodes to be updated, and
|
||||
// the new weight of the nodes.
|
||||
void updateWeight(const string& indices, label newWeight) {
|
||||
updateNodes(indices, [newWeight](nodeMetadata* node) {
|
||||
node->setWeight(newWeight);
|
||||
});
|
||||
}
|
||||
|
||||
// Update the dimensions array of the nodes at the given indices to the given dimensions array.
|
||||
// Input: A string indicating the indices of the nodes to be updated, and
|
||||
// the new list of dimensions.
|
||||
void updateDomains(const string& indices,const labelList& dims) {
|
||||
updateNodes(indices, [dims](nodeMetadata* node) {
|
||||
// Reconstruct using this node's dict.
|
||||
// Note that first all domain changes are done,
|
||||
// And only then dictionaries are set.
|
||||
// So the descendents' dictionaries are not overwritten.
|
||||
node->constructRecursive(dims,node->getDict());
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// Update the method of the nodes at the given indices to the given method dictionary.
|
||||
// Input: A string indicating the indices of the nodes to be updated, and
|
||||
// the new method dictionary.
|
||||
void updateMethod(const string& indices, const dictionary& dict) {
|
||||
updateNodes(indices, [dict](nodeMetadata* node) {
|
||||
node->setDict(dict);
|
||||
});
|
||||
}
|
||||
|
||||
// Update the weight initialization mode of nodes at the given indices and their descendents to the new mode.
|
||||
// Input: A string indicating the indices of the nodes to be updated, and
|
||||
// the new weight mode.
|
||||
void updateWeightsInitialization(const string& indices, WeightsInitialization newMode) {
|
||||
updateNodes(indices, [newMode](nodeMetadata* node) {
|
||||
node->setWeightsInitialization(newMode);
|
||||
});
|
||||
}
|
||||
|
||||
// Given a list of dictionaries for each level, set the dictionaries accordingly.
|
||||
// Input: A list of dictionaries for each level.
|
||||
void setLeveledDictionaries(const List<const dictionary*>& dictionaries);
|
||||
|
||||
// To be used within the decompositionMethod's parallelAware function.
|
||||
// Returns whether all decompositions in this subtree are parallel aware
|
||||
// (i.e., synchronize domains across proc boundaries)
|
||||
bool parallelAware() const;
|
||||
|
||||
// Calculate (and add to the dictionary) the new processor weights if reqired,
|
||||
// Using the children's weights and the weight initialization mode.
|
||||
void updateProcessorWeights();
|
||||
|
||||
// Construct the decompositionMethod object for this node and all its descendents.
|
||||
void constructMethods();
|
||||
|
||||
private:
|
||||
|
||||
// The weight of this node in the parent's decomposition, relative to the other nodes.
|
||||
// Overrides weights set by the weights initialization.
|
||||
label weight;
|
||||
|
||||
// The size of the node indicates the total number of subdomains this node has.
|
||||
label size;
|
||||
|
||||
// An enum describing the weights initialization.
|
||||
WeightsInitialization weightsInitialization;
|
||||
|
||||
// The direct descendents.
|
||||
List<nodeMetadata*> children;
|
||||
|
||||
// The dictionary used to construct the decomposition method.
|
||||
dictionary* coeffsDict;
|
||||
|
||||
// The decomposition method of this node.
|
||||
const Foam::decompositionMethod* method;
|
||||
|
||||
// Recursively constructs the subtree rooted at this node
|
||||
// Input: A list of dimensions and the dictionary of the default method.
|
||||
void constructRecursive(const labelList& dims, const dictionary* defaultMethod);
|
||||
|
||||
// Update all nodes matching the given indices with the given updating function.
|
||||
// Input: A list of ranges for each level, and a function that receives a pointer to nodeMetadata,
|
||||
// updates it accordingly and returns nothing.
|
||||
void updateNodes(const string& key, const std::function<void(nodeMetadata*)>& update);
|
||||
|
||||
// Internal implementation of updateNodes.
|
||||
// The list of ranges are constructed by passing the key argument to the meta parser.
|
||||
// nCurIdx is an internal variable that indicates our location inside the indices array.
|
||||
void updateNodes(const List<Pair<label>>& indices, const std::function<void(nodeMetadata*)>& update, label nCurIdx = 0);
|
||||
|
||||
// This function is used inside the public setLeveledDictionaries function.
|
||||
void setLeveledDictionaries(const List<const dictionary*>& dictionaries, label nLevel);
|
||||
|
||||
// Parse the range of indices starting at the (string) index nStartIndex.
|
||||
// Input: The indices string, and the starting position of the range
|
||||
// (i.e, the position of the opening bracket)
|
||||
// Returns a pair representing the range if succeeded,
|
||||
// or crashes the program with an appropriate error message if failed to parse.
|
||||
Pair<label> parseRange(const string& indices, label nStartIndex) const;
|
||||
|
||||
};
|
||||
|
||||
|
||||
// Private Data
|
||||
|
||||
//- The decomposition metadata.
|
||||
nodeMetadata rootMetadata_;
|
||||
|
||||
|
||||
// Private Member Functions
|
||||
|
||||
//- Read coeffsDict and construct the decomposition metadata.
|
||||
void initializeMetadata(const dictionary& coeffsDict);
|
||||
|
||||
|
||||
//- Given connectivity across processors work out connectivity
|
||||
// for a (consistent) subset
|
||||
void subsetGlobalCellCells
|
||||
(
|
||||
const label nDomains,
|
||||
const label domainI,
|
||||
const labelList& dist,
|
||||
|
||||
const labelListList& cellCells,
|
||||
const labelList& set,
|
||||
labelListList& subCellCells,
|
||||
labelList& cutConnections
|
||||
) const;
|
||||
|
||||
//- Decompose at 'currLevel' without addressing
|
||||
void decompose
|
||||
(
|
||||
const labelListList& pointPoints,
|
||||
const pointField& points,
|
||||
const scalarField& pointWeights,
|
||||
const labelUList& pointMap, // map back to original points
|
||||
const nodeMetadata& decomposeData,
|
||||
const label leafOffset,
|
||||
|
||||
labelList& finalDecomp
|
||||
) const;
|
||||
|
||||
|
||||
//- No copy construct
|
||||
multiNodeDecomp(const multiNodeDecomp&) = delete;
|
||||
|
||||
//- No copy assignment
|
||||
void operator=(const multiNodeDecomp&) = delete;
|
||||
|
||||
public:
|
||||
|
||||
//- Runtime type information
|
||||
TypeName("multiNode");
|
||||
|
||||
|
||||
// Constructors
|
||||
|
||||
//- Construct given decomposition dictionary and optional region name
|
||||
explicit multiNodeDecomp
|
||||
(
|
||||
const dictionary& decompDict,
|
||||
const word& regionName = ""
|
||||
);
|
||||
|
||||
|
||||
//- Destructor
|
||||
virtual ~multiNodeDecomp() = default;
|
||||
|
||||
|
||||
// Member Functions
|
||||
|
||||
//- Is method parallel aware?
|
||||
// i.e. does it synchronize domains across proc boundaries
|
||||
virtual bool parallelAware() const;
|
||||
|
||||
//- Inherit decompose from decompositionMethod
|
||||
using decompositionMethod::decompose;
|
||||
|
||||
//- Return for every coordinate the wanted processor number.
|
||||
// Use the mesh connectivity (if needed)
|
||||
virtual labelList decompose
|
||||
(
|
||||
const polyMesh& mesh,
|
||||
const pointField& points,
|
||||
const scalarField& pointWeights
|
||||
) const;
|
||||
|
||||
//- Return for every coordinate the wanted processor number.
|
||||
// Explicitly provided connectivity - does not use mesh_.
|
||||
virtual labelList decompose
|
||||
(
|
||||
const labelListList& globalCellCells,
|
||||
const pointField& cc,
|
||||
const scalarField& cWeights
|
||||
) const;
|
||||
};
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
|
||||
} // End namespace Foam
|
||||
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
|
||||
#endif
|
||||
|
||||
// ************************************************************************* //
|
||||
Reference in New Issue
Block a user