mirror of
https://develop.openfoam.com/Development/openfoam.git
synced 2025-11-28 03:28:01 +00:00
Merge branch 'master' into cvm
This commit is contained in:
@ -1,13 +1,12 @@
|
||||
#!/bin/sh
|
||||
cd ${0%/*} || exit 1 # run from this directory
|
||||
|
||||
if [ "$PWD" != "$WM_PROJECT_DIR/src" ]
|
||||
then
|
||||
wmakeCheckPwd "$WM_PROJECT_DIR/src" || {
|
||||
echo "Error: Current directory is not \$WM_PROJECT_DIR/src"
|
||||
echo " The environment variables are inconsistent with the installation."
|
||||
echo " Check the OpenFOAM entries in your dot-files and source them."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
set -x
|
||||
|
||||
@ -31,6 +30,7 @@ decompositionMethods/Allwmake
|
||||
|
||||
wmake libso meshTools
|
||||
wmake libso finiteVolume
|
||||
wmake libso genericPatchFields
|
||||
|
||||
wmake libso edgeMesh
|
||||
|
||||
|
||||
@ -47,7 +47,7 @@ Foam::autoPtr<Foam::ODESolver> Foam::ODESolver::New
|
||||
) << "Unknown ODESolver type "
|
||||
<< ODESolverTypeName << endl << endl
|
||||
<< "Valid ODESolvers are : " << endl
|
||||
<< ODEConstructorTablePtr_->toc()
|
||||
<< ODEConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
|
||||
@ -478,7 +478,6 @@ $(pointPatchFields)/pointPatchField/pointPatchFields.C
|
||||
|
||||
basicPointPatchFields = $(pointPatchFields)/basic
|
||||
$(basicPointPatchFields)/calculated/calculatedPointPatchFields.C
|
||||
$(basicPointPatchFields)/generic/genericPointPatchFields.C
|
||||
$(basicPointPatchFields)/coupled/coupledPointPatchFields.C
|
||||
$(basicPointPatchFields)/value/valuePointPatchFields.C
|
||||
$(basicPointPatchFields)/fixedValue/fixedValuePointPatchFields.C
|
||||
|
||||
@ -231,7 +231,6 @@ Foam::HashTable<T, Key, Hash>::find
|
||||
}
|
||||
|
||||
|
||||
// Return the table of contents
|
||||
template<class T, class Key, class Hash>
|
||||
Foam::List<Key> Foam::HashTable<T, Key, Hash>::toc() const
|
||||
{
|
||||
@ -247,6 +246,16 @@ Foam::List<Key> Foam::HashTable<T, Key, Hash>::toc() const
|
||||
}
|
||||
|
||||
|
||||
template<class T, class Key, class Hash>
|
||||
Foam::List<Key> Foam::HashTable<T, Key, Hash>::sortedToc() const
|
||||
{
|
||||
List<Key> sortedList = this->toc();
|
||||
sort(sortedList);
|
||||
|
||||
return sortedList;
|
||||
}
|
||||
|
||||
|
||||
template<class T, class Key, class Hash>
|
||||
bool Foam::HashTable<T, Key, Hash>::set
|
||||
(
|
||||
|
||||
@ -198,6 +198,9 @@ public:
|
||||
//- Return the table of contents
|
||||
List<Key> toc() const;
|
||||
|
||||
//- Return the table of contents as a sorted list
|
||||
List<Key> sortedToc() const;
|
||||
|
||||
//- Print information
|
||||
Ostream& printInfo(Ostream&) const;
|
||||
|
||||
|
||||
@ -69,6 +69,14 @@ public:
|
||||
const UList<label>&
|
||||
);
|
||||
|
||||
//- Construct given the complete list and by transferring addressing
|
||||
inline BiIndirectList
|
||||
(
|
||||
const UList<T>& posList,
|
||||
const UList<T>& negList,
|
||||
const Xfer< List<label> >&
|
||||
);
|
||||
|
||||
|
||||
// Member Functions
|
||||
|
||||
@ -80,12 +88,17 @@ public:
|
||||
inline const UList<T>& posList() const;
|
||||
inline const UList<T>& negList() const;
|
||||
inline const List<label>& addressing() const;
|
||||
inline List<label>& addressing();
|
||||
|
||||
//- Calculate index given whether index is into posList or negList
|
||||
inline static label posIndex(const label);
|
||||
inline static label negIndex(const label);
|
||||
|
||||
// Edit
|
||||
|
||||
//- Reset addressing
|
||||
inline void resetAddressing(const UList<label>&);
|
||||
inline void resetAddressing(const Xfer< List<label> >&);
|
||||
|
||||
|
||||
// Member Operators
|
||||
|
||||
|
||||
@ -40,6 +40,20 @@ inline Foam::BiIndirectList<T>::BiIndirectList
|
||||
{}
|
||||
|
||||
|
||||
template<class T>
|
||||
inline Foam::BiIndirectList<T>::BiIndirectList
|
||||
(
|
||||
const UList<T>& posList,
|
||||
const UList<T>& negList,
|
||||
const Xfer< List<label> >& addr
|
||||
)
|
||||
:
|
||||
posList_(const_cast<UList<T>&>(posList)),
|
||||
negList_(const_cast<UList<T>&>(negList)),
|
||||
addressing_(addr)
|
||||
{}
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
|
||||
|
||||
template<class T>
|
||||
@ -79,9 +93,22 @@ inline const Foam::List<Foam::label>& Foam::BiIndirectList<T>::addressing()
|
||||
|
||||
|
||||
template<class T>
|
||||
inline Foam::List<Foam::label>& Foam::BiIndirectList<T>::addressing()
|
||||
inline void Foam::BiIndirectList<T>::resetAddressing
|
||||
(
|
||||
const UList<label>& addr
|
||||
)
|
||||
{
|
||||
return addressing_;
|
||||
addressing_ = addr;
|
||||
}
|
||||
|
||||
|
||||
template<class T>
|
||||
inline void Foam::BiIndirectList<T>::resetAddressing
|
||||
(
|
||||
const Xfer< List<label> >& addr
|
||||
)
|
||||
{
|
||||
addressing_.transfer(addr());
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -63,6 +63,8 @@ public:
|
||||
//- Construct given the complete list and the addressing array
|
||||
inline IndirectList(const UList<T>&, const UList<label>&);
|
||||
|
||||
//- Construct given the complete list and by transferring addressing
|
||||
inline IndirectList(const UList<T>&, const Xfer< List<label> >&);
|
||||
|
||||
// Member Functions
|
||||
|
||||
@ -77,7 +79,8 @@ public:
|
||||
// Edit
|
||||
|
||||
//- Reset addressing
|
||||
void resetAddressing(const UList<label>&);
|
||||
inline void resetAddressing(const UList<label>&);
|
||||
inline void resetAddressing(const Xfer< List<label> >&);
|
||||
|
||||
|
||||
// Member Operators
|
||||
|
||||
@ -38,6 +38,18 @@ inline Foam::IndirectList<T>::IndirectList
|
||||
{}
|
||||
|
||||
|
||||
template<class T>
|
||||
inline Foam::IndirectList<T>::IndirectList
|
||||
(
|
||||
const UList<T>& completeList,
|
||||
const Xfer< List<label> >& addr
|
||||
)
|
||||
:
|
||||
completeList_(const_cast<UList<T>&>(completeList)),
|
||||
addressing_(addr)
|
||||
{}
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
|
||||
|
||||
template<class T>
|
||||
@ -78,6 +90,16 @@ inline void Foam::IndirectList<T>::resetAddressing
|
||||
}
|
||||
|
||||
|
||||
template<class T>
|
||||
inline void Foam::IndirectList<T>::resetAddressing
|
||||
(
|
||||
const Xfer< List<label> >& addr
|
||||
)
|
||||
{
|
||||
addressing_.transfer(addr());
|
||||
}
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * * Member Operators * * * * * * * * * * * * * //
|
||||
|
||||
template<class T>
|
||||
|
||||
@ -102,7 +102,7 @@ char Foam::ISstream::nextValid()
|
||||
|
||||
Foam::Istream& Foam::ISstream::read(token& t)
|
||||
{
|
||||
static char numberBuffer[100];
|
||||
static char charBuffer[128];
|
||||
|
||||
// Return the put back token if it exists
|
||||
if (Istream::getBack(t))
|
||||
@ -113,7 +113,7 @@ Foam::Istream& Foam::ISstream::read(token& t)
|
||||
// Assume that the streams supplied are in working order.
|
||||
// Lines are counted by '\n'
|
||||
|
||||
// Get next 'valid character': i.e. proceed through any white space
|
||||
// Get next 'valid character': i.e. proceed through any whitespace
|
||||
// and/or comments until a semantically valid character is hit upon.
|
||||
|
||||
char c = nextValid();
|
||||
@ -144,7 +144,7 @@ Foam::Istream& Foam::ISstream::read(token& t)
|
||||
case token::COMMA :
|
||||
case token::ASSIGN :
|
||||
case token::ADD :
|
||||
// case token::SUBTRACT : // Handled later as the posible start of a number
|
||||
// case token::SUBTRACT : // Handled later as the possible start of a number
|
||||
case token::MULTIPLY :
|
||||
case token::DIVIDE :
|
||||
{
|
||||
@ -152,6 +152,7 @@ Foam::Istream& Foam::ISstream::read(token& t)
|
||||
return *this;
|
||||
}
|
||||
|
||||
|
||||
// Strings: enclosed by double quotes.
|
||||
case token::BEGIN_STRING :
|
||||
{
|
||||
@ -170,69 +171,83 @@ Foam::Istream& Foam::ISstream::read(token& t)
|
||||
return *this;
|
||||
}
|
||||
|
||||
|
||||
// Numbers: do not distinguish at this point between Types.
|
||||
//
|
||||
// ideally match the equivalent of this regular expression
|
||||
//
|
||||
// /[-+]?([0-9]+\.?[0-9]*|\.[0-9]+)([Ee][-+]?[0-9]+)?/
|
||||
//
|
||||
case '-' :
|
||||
case '.' :
|
||||
case '0' : case '1' : case '2' : case '3' : case '4' :
|
||||
case '5' : case '6' : case '7' : case '8' : case '9' :
|
||||
{
|
||||
bool isScalar = false;
|
||||
bool asLabel = (c != '.');
|
||||
|
||||
if (c == '.')
|
||||
{
|
||||
isScalar = true;
|
||||
}
|
||||
|
||||
int i=0;
|
||||
numberBuffer[i++] = c;
|
||||
unsigned int nChar = 0;
|
||||
charBuffer[nChar++] = c;
|
||||
|
||||
// get everything that could reasonable look like a number
|
||||
while
|
||||
(
|
||||
is_.get(c)
|
||||
&& (
|
||||
isdigit(c)
|
||||
|| c == '.'
|
||||
|| c == 'e'
|
||||
|| c == 'E'
|
||||
|| c == '+'
|
||||
|| c == '-'
|
||||
|| c == '.'
|
||||
|| c == 'E'
|
||||
|| c == 'e'
|
||||
)
|
||||
)
|
||||
{
|
||||
numberBuffer[i++] = c;
|
||||
asLabel = asLabel && isdigit(c);
|
||||
|
||||
if (!isdigit(c))
|
||||
charBuffer[nChar++] = c;
|
||||
if (nChar >= sizeof(charBuffer))
|
||||
{
|
||||
isScalar = true;
|
||||
// runaway argument - avoid buffer overflow
|
||||
t.setBad();
|
||||
return *this;
|
||||
}
|
||||
}
|
||||
numberBuffer[i] = '\0';
|
||||
charBuffer[nChar] = '\0';
|
||||
|
||||
setState(is_.rdstate());
|
||||
|
||||
if (!is_.bad())
|
||||
{
|
||||
is_.putback(c);
|
||||
|
||||
if (i == 1 && numberBuffer[0] == '-')
|
||||
if (nChar == 1 && charBuffer[0] == '-')
|
||||
{
|
||||
// a single '-' is punctuation
|
||||
t = token::punctuationToken(token::SUBTRACT);
|
||||
}
|
||||
else if (isScalar)
|
||||
{
|
||||
t = scalar(atof(numberBuffer));
|
||||
}
|
||||
else
|
||||
{
|
||||
long lt = atol(numberBuffer);
|
||||
t = label(lt);
|
||||
char *endptr;
|
||||
|
||||
// If the integer is too large to be represented as a label
|
||||
// return it as a scalar
|
||||
if (t.labelToken() != lt)
|
||||
if (asLabel)
|
||||
{
|
||||
isScalar = true;
|
||||
t = scalar(atof(numberBuffer));
|
||||
long longval = strtol(charBuffer, &endptr, 10);
|
||||
t = label(longval);
|
||||
|
||||
// return as a scalar if doesn't fit in a label
|
||||
if (t.labelToken() != longval)
|
||||
{
|
||||
t = scalar(strtod(charBuffer, &endptr));
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
t = scalar(strtod(charBuffer, &endptr));
|
||||
}
|
||||
|
||||
// nothing converted (bad format), or trailing junk
|
||||
if (endptr == charBuffer || *endptr != '\0')
|
||||
{
|
||||
t.setBad();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -244,6 +259,7 @@ Foam::Istream& Foam::ISstream::read(token& t)
|
||||
return *this;
|
||||
}
|
||||
|
||||
|
||||
// Should be a word (which can be a single character)
|
||||
default:
|
||||
{
|
||||
|
||||
@ -67,7 +67,7 @@ Foam::autoPtr<Foam::token::compound> Foam::token::compound::New
|
||||
FatalErrorIn("token::compound::New(const word&, Istream&)")
|
||||
<< "Unknown compound type " << compoundType << nl << nl
|
||||
<< "Valid compound types:" << endl
|
||||
<< IstreamConstructorTablePtr_->toc()
|
||||
<< IstreamConstructorTablePtr_->sortedToc()
|
||||
<< abort(FatalError);
|
||||
}
|
||||
|
||||
|
||||
@ -638,10 +638,10 @@ Foam::Time& Foam::Time::operator+=(const scalar deltaT)
|
||||
|
||||
Foam::Time& Foam::Time::operator++()
|
||||
{
|
||||
readModifiedObjects();
|
||||
|
||||
if (!subCycling_)
|
||||
{
|
||||
readModifiedObjects();
|
||||
|
||||
if (timeIndex_ == startTimeIndex_)
|
||||
{
|
||||
functionObjects_.start();
|
||||
|
||||
@ -89,7 +89,7 @@ Foam::autoPtr<Foam::functionObject> Foam::functionObject::New
|
||||
) << "Unknown function type "
|
||||
<< functionType << nl << nl
|
||||
<< "Valid functions are : " << nl
|
||||
<< dictionaryConstructorTablePtr_->toc() << endl
|
||||
<< dictionaryConstructorTablePtr_->sortedToc() << endl
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
|
||||
@ -55,7 +55,7 @@ Foam::autoPtr<Foam::pointPatchField<Type> > Foam::pointPatchField<Type>::New
|
||||
<< patchFieldType
|
||||
<< endl << endl
|
||||
<< "Valid patchField types are :" << endl
|
||||
<< pointPatchConstructorTablePtr_->toc()
|
||||
<< pointPatchConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
@ -111,7 +111,7 @@ Foam::autoPtr<Foam::pointPatchField<Type> > Foam::pointPatchField<Type>::New
|
||||
) << "Unknown patchField type " << patchFieldType
|
||||
<< " for patch type " << p.type() << endl << endl
|
||||
<< "Valid patchField types are :" << endl
|
||||
<< dictionaryConstructorTablePtr_->toc()
|
||||
<< dictionaryConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
}
|
||||
@ -182,7 +182,7 @@ Foam::autoPtr<Foam::pointPatchField<Type> > Foam::pointPatchField<Type>::New
|
||||
) << "unknown patchTypefield type "
|
||||
<< ptf.type() << endl << endl
|
||||
<< "Valid patchField types are :" << endl
|
||||
<< patchMapperConstructorTablePtr_->toc()
|
||||
<< patchMapperConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
|
||||
@ -168,7 +168,7 @@ autoPtr<graph::writer> graph::writer::New(const word& graphFormat)
|
||||
) << "Unknown graph format " << graphFormat
|
||||
<< endl << endl
|
||||
<< "Valid graph formats are : " << endl
|
||||
<< wordConstructorTablePtr_->toc()
|
||||
<< wordConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
|
||||
@ -97,7 +97,7 @@ Foam::lduMatrix::preconditioner::New
|
||||
) << "Unknown symmetric matrix preconditioner "
|
||||
<< name << nl << nl
|
||||
<< "Valid symmetric matrix preconditioners :" << endl
|
||||
<< symMatrixConstructorTablePtr_->toc()
|
||||
<< symMatrixConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
@ -125,7 +125,7 @@ Foam::lduMatrix::preconditioner::New
|
||||
) << "Unknown asymmetric matrix preconditioner "
|
||||
<< name << nl << nl
|
||||
<< "Valid asymmetric matrix preconditioners :" << endl
|
||||
<< asymMatrixConstructorTablePtr_->toc()
|
||||
<< asymMatrixConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
|
||||
@ -98,7 +98,7 @@ Foam::autoPtr<Foam::lduMatrix::smoother> Foam::lduMatrix::smoother::New
|
||||
) << "Unknown symmetric matrix smoother "
|
||||
<< name << nl << nl
|
||||
<< "Valid symmetric matrix smoothers are :" << endl
|
||||
<< symMatrixConstructorTablePtr_->toc()
|
||||
<< symMatrixConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
@ -127,7 +127,7 @@ Foam::autoPtr<Foam::lduMatrix::smoother> Foam::lduMatrix::smoother::New
|
||||
) << "Unknown asymmetric matrix smoother "
|
||||
<< name << nl << nl
|
||||
<< "Valid asymmetric matrix smoothers are :" << endl
|
||||
<< asymMatrixConstructorTablePtr_->toc()
|
||||
<< asymMatrixConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
|
||||
@ -77,7 +77,7 @@ Foam::autoPtr<Foam::lduMatrix::solver> Foam::lduMatrix::solver::New
|
||||
"lduMatrix::solver::New", solverControls
|
||||
) << "Unknown symmetric matrix solver " << name << nl << nl
|
||||
<< "Valid symmetric matrix solvers are :" << endl
|
||||
<< symMatrixConstructorTablePtr_->toc()
|
||||
<< symMatrixConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
@ -106,7 +106,7 @@ Foam::autoPtr<Foam::lduMatrix::solver> Foam::lduMatrix::solver::New
|
||||
"lduMatrix::solver::New", solverControls
|
||||
) << "Unknown asymmetric matrix solver " << name << nl << nl
|
||||
<< "Valid asymmetric matrix solvers are :" << endl
|
||||
<< asymMatrixConstructorTablePtr_->toc()
|
||||
<< asymMatrixConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
|
||||
@ -124,9 +124,9 @@ const Foam::GAMGAgglomeration& Foam::GAMGAgglomeration::New
|
||||
) << "Unknown GAMGAgglomeration type "
|
||||
<< agglomeratorType << ".\n"
|
||||
<< "Valid algebraic GAMGAgglomeration types are :"
|
||||
<< lduMatrixConstructorTablePtr_->toc() << endl
|
||||
<< lduMatrixConstructorTablePtr_->sortedToc() << endl
|
||||
<< "Valid algebraic GAMGAgglomeration types are :"
|
||||
<< lduMeshConstructorTablePtr_->toc()
|
||||
<< lduMeshConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
|
||||
@ -48,7 +48,7 @@ Foam::autoPtr<Foam::GAMGInterfaceField> Foam::GAMGInterfaceField::New
|
||||
"const lduInterfaceField& fineInterface)"
|
||||
) << "Unknown GAMGInterfaceField type " << coupleType << ".\n"
|
||||
<< "Valid GAMGInterfaceField types are :"
|
||||
<< lduInterfaceConstructorTablePtr_->toc()
|
||||
<< lduInterfaceConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
|
||||
@ -52,7 +52,7 @@ Foam::autoPtr<Foam::GAMGInterface> Foam::GAMGInterface::New
|
||||
"const labelField& neighbourRestrictAddressing)"
|
||||
) << "Unknown GAMGInterface type " << coupleType << ".\n"
|
||||
<< "Valid GAMGInterface types are :"
|
||||
<< lduInterfaceConstructorTablePtr_->toc()
|
||||
<< lduInterfaceConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
|
||||
@ -60,7 +60,7 @@ autoPtr<facePointPatch> facePointPatch::New
|
||||
<< patch.type()
|
||||
<< endl << endl
|
||||
<< "Valid facePointPatch types are :" << endl
|
||||
<< polyPatchConstructorTablePtr_->toc()
|
||||
<< polyPatchConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
|
||||
@ -59,7 +59,7 @@ Foam::autoPtr<Foam::polyPatch> Foam::polyPatch::New
|
||||
) << "Unknown polyPatch type " << patchType << " for patch " << name
|
||||
<< endl << endl
|
||||
<< "Valid polyPatch types are :" << endl
|
||||
<< wordConstructorTablePtr_->toc()
|
||||
<< wordConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
@ -107,7 +107,7 @@ Foam::autoPtr<Foam::polyPatch> Foam::polyPatch::New
|
||||
<< " for patch " << name
|
||||
<< endl << endl
|
||||
<< "Valid polyPatch types are :" << endl
|
||||
<< dictionaryConstructorTablePtr_->toc()
|
||||
<< dictionaryConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
}
|
||||
|
||||
@ -62,7 +62,7 @@ Foam::autoPtr<Foam::cellZone> Foam::cellZone::New
|
||||
dict
|
||||
) << "Unknown cellZone type " << zoneType << endl << endl
|
||||
<< "Valid cellZone types are :" << endl
|
||||
<< dictionaryConstructorTablePtr_->toc()
|
||||
<< dictionaryConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
|
||||
@ -62,7 +62,7 @@ Foam::autoPtr<Foam::faceZone> Foam::faceZone::New
|
||||
dict
|
||||
) << "Unknown faceZone type " << zoneType << endl << endl
|
||||
<< "Valid faceZone types are :" << endl
|
||||
<< dictionaryConstructorTablePtr_->toc()
|
||||
<< dictionaryConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
|
||||
@ -62,7 +62,7 @@ Foam::autoPtr<Foam::pointZone> Foam::pointZone::New
|
||||
dict
|
||||
) << "Unknown pointZone type " << zoneType << endl << endl
|
||||
<< "Valid pointZone types are :" << endl
|
||||
<< dictionaryConstructorTablePtr_->toc()
|
||||
<< dictionaryConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
|
||||
@ -84,7 +84,7 @@ Istream& operator>>(Istream& is, Scalar& s)
|
||||
{
|
||||
is.setBad();
|
||||
FatalIOErrorIn("operator>>(Istream&, Scalar&)", is)
|
||||
<< "wrong token type - expected Scalar found " << t.info()
|
||||
<< "wrong token type - expected Scalar, found " << t.info()
|
||||
<< exit(FatalIOError);
|
||||
|
||||
return is;
|
||||
|
||||
@ -74,7 +74,7 @@ Foam::Istream& Foam::operator>>(Istream& is, Switch& s)
|
||||
{
|
||||
is.setBad();
|
||||
FatalIOErrorIn("operator>>(Istream&, bool/Switch&)", is)
|
||||
<< "wrong token type - expected bool found " << t
|
||||
<< "wrong token type - expected bool, found " << t
|
||||
<< exit(FatalIOError);
|
||||
|
||||
return is;
|
||||
|
||||
@ -67,7 +67,7 @@ Foam::Istream& Foam::operator>>(Istream& is, int& i)
|
||||
{
|
||||
is.setBad();
|
||||
FatalIOErrorIn("operator>>(Istream&, int&)", is)
|
||||
<< "wrong token type - expected int found " << t.info()
|
||||
<< "wrong token type - expected int, found " << t.info()
|
||||
<< exit(FatalIOError);
|
||||
|
||||
return is;
|
||||
|
||||
@ -65,7 +65,7 @@ Foam::Istream& Foam::operator>>(Istream& is, long& l)
|
||||
{
|
||||
is.setBad();
|
||||
FatalIOErrorIn("operator>>(Istream&, long&)", is)
|
||||
<< "wrong token type - expected long found " << t.info()
|
||||
<< "wrong token type - expected long, found " << t.info()
|
||||
<< exit(FatalIOError);
|
||||
|
||||
return is;
|
||||
|
||||
@ -66,7 +66,7 @@ Foam::Istream& Foam::operator>>(Istream& is, unsigned int& i)
|
||||
{
|
||||
is.setBad();
|
||||
FatalIOErrorIn("operator>>(Istream&, unsigned int&)", is)
|
||||
<< "wrong token type - expected unsigned int found " << t.info()
|
||||
<< "wrong token type - expected unsigned int, found " << t.info()
|
||||
<< exit(FatalIOError);
|
||||
|
||||
return is;
|
||||
|
||||
@ -63,7 +63,7 @@ Foam::Istream& Foam::operator>>(Istream& is, unsigned long& val)
|
||||
{
|
||||
is.setBad();
|
||||
FatalIOErrorIn("operator>>(Istream&, unsigned long&)", is)
|
||||
<< "wrong token type - expected unsigned long found " << t.info()
|
||||
<< "wrong token type - expected unsigned long, found " << t.info()
|
||||
<< exit(FatalIOError);
|
||||
|
||||
return is;
|
||||
|
||||
@ -28,6 +28,7 @@ Class
|
||||
Description
|
||||
A class for handling file names.
|
||||
|
||||
A fileName is a string of characters without whitespace or quotes.
|
||||
A fileName can be
|
||||
- constructed from a char*, a string or a word
|
||||
- concatenated by adding a '/' separator
|
||||
|
||||
@ -58,12 +58,22 @@ Foam::Istream& Foam::operator>>(Istream& is, keyType& w)
|
||||
{
|
||||
// Assign from string. Sets regular expression.
|
||||
w = t.stringToken();
|
||||
|
||||
// flag empty strings as an error
|
||||
if (w.empty())
|
||||
{
|
||||
is.setBad();
|
||||
FatalIOErrorIn("operator>>(Istream&, keyType&)", is)
|
||||
<< "empty word/expression "
|
||||
<< exit(FatalIOError);
|
||||
return is;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
is.setBad();
|
||||
FatalIOErrorIn("operator>>(Istream&, keyType&)", is)
|
||||
<< "wrong token type - expected word or string found "
|
||||
<< "wrong token type - expected word or string, found "
|
||||
<< t.info()
|
||||
<< exit(FatalIOError);
|
||||
|
||||
|
||||
@ -55,7 +55,7 @@ Foam::Istream& Foam::operator>>(Istream& is, string& s)
|
||||
{
|
||||
is.setBad();
|
||||
FatalIOErrorIn("operator>>(Istream&, string&)", is)
|
||||
<< "wrong token type - expected string found " << t.info()
|
||||
<< "wrong token type - expected string, found " << t.info()
|
||||
<< exit(FatalIOError);
|
||||
|
||||
return is;
|
||||
|
||||
@ -28,9 +28,8 @@ Class
|
||||
Description
|
||||
A class for handling words, derived from string.
|
||||
|
||||
A word is a string of characters containing no whitespace and may be
|
||||
constructed from a string by removing whitespace. Words are delimited
|
||||
by whitespace.
|
||||
A word is a string of characters without whitespace, quotes, slashes,
|
||||
semicolons or brace brackets. Words are delimited by whitespace.
|
||||
|
||||
SourceFiles
|
||||
word.C
|
||||
|
||||
@ -65,7 +65,8 @@ Foam::Istream& Foam::operator>>(Istream& is, word& w)
|
||||
{
|
||||
is.setBad();
|
||||
FatalIOErrorIn("operator>>(Istream&, word&)", is)
|
||||
<< "wrong token type - expected word found non-word characters "
|
||||
<< "wrong token type - expected word, found "
|
||||
"non-word characters "
|
||||
<< t.info()
|
||||
<< exit(FatalIOError);
|
||||
return is;
|
||||
@ -75,7 +76,7 @@ Foam::Istream& Foam::operator>>(Istream& is, word& w)
|
||||
{
|
||||
is.setBad();
|
||||
FatalIOErrorIn("operator>>(Istream&, word&)", is)
|
||||
<< "wrong token type - expected word found "
|
||||
<< "wrong token type - expected word, found "
|
||||
<< t.info()
|
||||
<< exit(FatalIOError);
|
||||
|
||||
|
||||
@ -29,7 +29,7 @@ Description
|
||||
A wordRe is a word, but can also have a regular expression for matching
|
||||
words.
|
||||
|
||||
By default the constructors will generally preserve the argument as
|
||||
By default the constructors will generally preserve the argument as a
|
||||
string literal and the assignment operators will use the wordRe::DETECT
|
||||
compOption to scan the string for regular expression meta characters
|
||||
and/or invalid word characters and react accordingly.
|
||||
|
||||
@ -57,12 +57,22 @@ Foam::Istream& Foam::operator>>(Istream& is, wordRe& w)
|
||||
{
|
||||
// Auto-tests for regular expression
|
||||
w = t.stringToken();
|
||||
|
||||
// flag empty strings as an error
|
||||
if (w.empty())
|
||||
{
|
||||
is.setBad();
|
||||
FatalIOErrorIn("operator>>(Istream&, wordRe&)", is)
|
||||
<< "empty word/expression "
|
||||
<< exit(FatalIOError);
|
||||
return is;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
is.setBad();
|
||||
FatalIOErrorIn("operator>>(Istream&, wordRe&)", is)
|
||||
<< "wrong token type - expected word or string found "
|
||||
<< "wrong token type - expected word or string, found "
|
||||
<< t.info()
|
||||
<< exit(FatalIOError);
|
||||
|
||||
|
||||
@ -225,7 +225,9 @@ Foam::autoHexMeshDriver::autoHexMeshDriver
|
||||
IOobject
|
||||
(
|
||||
"abc", // dummy name
|
||||
mesh_.time().findInstance("triSurface", word::null),// inst
|
||||
//mesh_.time().findInstance("triSurface", word::null),
|
||||
// instance
|
||||
mesh_.time().constant(), // instance
|
||||
"triSurface", // local
|
||||
mesh_.time(), // registry
|
||||
IOobject::MUST_READ,
|
||||
|
||||
@ -78,8 +78,9 @@ Foam::label Foam::autoRefineDriver::readFeatureEdges
|
||||
IOobject
|
||||
(
|
||||
featFileName, // name
|
||||
mesh.time().findInstance("triSurface", featFileName),
|
||||
// instance
|
||||
//mesh.time().findInstance("triSurface", featFileName),
|
||||
// // instance
|
||||
mesh.time().constant(), // instance
|
||||
"triSurface", // local
|
||||
mesh.time(), // registry
|
||||
IOobject::MUST_READ,
|
||||
|
||||
@ -142,7 +142,7 @@ Foam::autoPtr<Foam::ensightPart> Foam::ensightPart::New(Istream& is)
|
||||
is
|
||||
) << "unknown ensightPart type " << partType << endl << endl
|
||||
<< "Valid ensightPart types are :" << endl
|
||||
<< istreamConstructorTablePtr_->toc()
|
||||
<< istreamConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
|
||||
@ -25,8 +25,6 @@ License
|
||||
InClass
|
||||
decompositionMethod
|
||||
|
||||
Description
|
||||
|
||||
\*---------------------------------------------------------------------------*/
|
||||
|
||||
#include "decompositionMethod.H"
|
||||
@ -64,7 +62,7 @@ Foam::autoPtr<Foam::decompositionMethod> Foam::decompositionMethod::New
|
||||
) << "Unknown decompositionMethod "
|
||||
<< decompositionMethodTypeName << endl << endl
|
||||
<< "Valid decompositionMethods are : " << endl
|
||||
<< dictionaryConstructorTablePtr_->toc()
|
||||
<< dictionaryConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
@ -96,7 +94,7 @@ Foam::autoPtr<Foam::decompositionMethod> Foam::decompositionMethod::New
|
||||
) << "Unknown decompositionMethod "
|
||||
<< decompositionMethodTypeName << endl << endl
|
||||
<< "Valid decompositionMethods are : " << endl
|
||||
<< dictionaryMeshConstructorTablePtr_->toc()
|
||||
<< dictionaryMeshConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
@ -104,14 +102,26 @@ Foam::autoPtr<Foam::decompositionMethod> Foam::decompositionMethod::New
|
||||
}
|
||||
|
||||
|
||||
Foam::labelList Foam::decompositionMethod::decompose
|
||||
(
|
||||
const pointField& points
|
||||
)
|
||||
{
|
||||
scalarField weights(0);
|
||||
|
||||
return decompose(points, weights);
|
||||
}
|
||||
|
||||
|
||||
Foam::labelList Foam::decompositionMethod::decompose
|
||||
(
|
||||
const labelList& fineToCoarse,
|
||||
const pointField& coarsePoints
|
||||
const pointField& coarsePoints,
|
||||
const scalarField& coarseWeights
|
||||
)
|
||||
{
|
||||
// Decompose based on agglomerated points
|
||||
labelList coarseDistribution(decompose(coarsePoints));
|
||||
labelList coarseDistribution(decompose(coarsePoints, coarseWeights));
|
||||
|
||||
// Rework back into decomposition for original mesh_
|
||||
labelList fineDistribution(fineToCoarse.size());
|
||||
@ -125,6 +135,18 @@ Foam::labelList Foam::decompositionMethod::decompose
|
||||
}
|
||||
|
||||
|
||||
Foam::labelList Foam::decompositionMethod::decompose
|
||||
(
|
||||
const labelList& fineToCoarse,
|
||||
const pointField& coarsePoints
|
||||
)
|
||||
{
|
||||
scalarField coarseWeights(0);
|
||||
|
||||
return decompose(fineToCoarse, coarsePoints, coarseWeights);
|
||||
}
|
||||
|
||||
|
||||
void Foam::decompositionMethod::calcCellCells
|
||||
(
|
||||
const polyMesh& mesh,
|
||||
@ -170,4 +192,16 @@ void Foam::decompositionMethod::calcCellCells
|
||||
}
|
||||
|
||||
|
||||
Foam::labelList Foam::decompositionMethod::decompose
|
||||
(
|
||||
const labelListList& globalCellCells,
|
||||
const pointField& cc
|
||||
)
|
||||
{
|
||||
scalarField cWeights(0);
|
||||
|
||||
return decompose(globalCellCells, cc, cWeights);
|
||||
}
|
||||
|
||||
|
||||
// ************************************************************************* //
|
||||
|
||||
@ -150,20 +150,37 @@ public:
|
||||
|
||||
//- Return for every coordinate the wanted processor number. Use the
|
||||
// mesh connectivity (if needed)
|
||||
virtual labelList decompose(const pointField&) = 0;
|
||||
virtual labelList decompose
|
||||
(
|
||||
const pointField& points,
|
||||
const scalarField& pointWeights
|
||||
) = 0;
|
||||
|
||||
//- Like decompose but with uniform weights on the points
|
||||
virtual labelList decompose(const pointField&);
|
||||
|
||||
|
||||
//- Return for every coordinate the wanted processor number. Gets
|
||||
// passed agglomeration map (from fine to coarse cells) and coarse cell
|
||||
// location. Can be overridden by decomposers that provide this
|
||||
// functionality natively. Coarse cells are local to the processor
|
||||
// (if in parallel). If you want to have coarse cells spanning
|
||||
// processors use the next function below instead.
|
||||
// processors use the globalCellCells instead.
|
||||
virtual labelList decompose
|
||||
(
|
||||
const labelList& cellToRegion,
|
||||
const pointField& regionPoints,
|
||||
const scalarField& regionWeights
|
||||
);
|
||||
|
||||
//- Like decompose but with uniform weights on the regions
|
||||
virtual labelList decompose
|
||||
(
|
||||
const labelList& cellToRegion,
|
||||
const pointField& regionPoints
|
||||
);
|
||||
|
||||
|
||||
//- Return for every coordinate the wanted processor number. Explicitly
|
||||
// provided connectivity - does not use mesh_.
|
||||
// The connectivity is equal to mesh.cellCells() except for
|
||||
@ -174,9 +191,17 @@ public:
|
||||
virtual labelList decompose
|
||||
(
|
||||
const labelListList& globalCellCells,
|
||||
const pointField& cc
|
||||
const pointField& cc,
|
||||
const scalarField& cWeights
|
||||
) = 0;
|
||||
|
||||
//- Like decompose but with uniform weights on the cells
|
||||
virtual labelList decompose
|
||||
(
|
||||
const labelListList& globalCellCells,
|
||||
const pointField& cc
|
||||
);
|
||||
|
||||
};
|
||||
|
||||
|
||||
|
||||
@ -142,6 +142,38 @@ Foam::label Foam::hierarchGeomDecomp::findLower
|
||||
}
|
||||
|
||||
|
||||
// Create a mapping between the index and the weighted size.
|
||||
// For convenience, sortedWeightedSize is one size bigger than current. This
|
||||
// avoids extra tests.
|
||||
void Foam::hierarchGeomDecomp::calculateSortedWeightedSizes
|
||||
(
|
||||
const labelList& current,
|
||||
const labelList& indices,
|
||||
const scalarField& weights,
|
||||
const label globalCurrentSize,
|
||||
|
||||
scalarField& sortedWeightedSizes
|
||||
)
|
||||
{
|
||||
// Evaluate cumulative weights.
|
||||
sortedWeightedSizes[0] = 0;
|
||||
forAll(current, i)
|
||||
{
|
||||
label pointI = current[indices[i]];
|
||||
sortedWeightedSizes[i + 1] = sortedWeightedSizes[i] + weights[pointI];
|
||||
}
|
||||
// Non-dimensionalise and multiply by size.
|
||||
scalar globalCurrentLength = returnReduce
|
||||
(
|
||||
sortedWeightedSizes[current.size()],
|
||||
sumOp<scalar>()
|
||||
);
|
||||
// Normalise weights by global sum of weights and multiply through
|
||||
// by global size.
|
||||
sortedWeightedSizes *= (globalCurrentSize/globalCurrentLength);
|
||||
}
|
||||
|
||||
|
||||
// Find position in values so between minIndex and this position there
|
||||
// are wantedSize elements.
|
||||
void Foam::hierarchGeomDecomp::findBinary
|
||||
@ -170,7 +202,6 @@ void Foam::hierarchGeomDecomp::findBinary
|
||||
label midPrev = -1;
|
||||
label highPrev = -1;
|
||||
|
||||
//while (low <= high)
|
||||
while (true)
|
||||
{
|
||||
label size = returnReduce(mid-minIndex, sumOp<label>());
|
||||
@ -204,7 +235,98 @@ void Foam::hierarchGeomDecomp::findBinary
|
||||
mid = findLower(values, midValue, low, high);
|
||||
|
||||
// Safeguard if same as previous.
|
||||
bool hasNotChanged = (mid == midPrev) && (low == lowPrev) && (high == highPrev);
|
||||
bool hasNotChanged =
|
||||
(mid == midPrev)
|
||||
&& (low == lowPrev)
|
||||
&& (high == highPrev);
|
||||
|
||||
if (returnReduce(hasNotChanged, andOp<bool>()))
|
||||
{
|
||||
WarningIn("hierarchGeomDecomp::findBinary(..)")
|
||||
<< "unable to find desired deomposition split, making do!"
|
||||
<< endl;
|
||||
break;
|
||||
}
|
||||
|
||||
midPrev = mid;
|
||||
lowPrev = low;
|
||||
highPrev = high;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Find position in values so between minIndex and this position there
|
||||
// are wantedSize elements.
|
||||
void Foam::hierarchGeomDecomp::findBinary
|
||||
(
|
||||
const label sizeTol,
|
||||
const List<scalar>& sortedWeightedSizes,
|
||||
const List<scalar>& values,
|
||||
const label minIndex, // index of previous value
|
||||
const scalar minValue, // value at minIndex
|
||||
const scalar maxValue, // global max of values
|
||||
const scalar wantedSize, // wanted size
|
||||
|
||||
label& mid, // index where size of bin is
|
||||
// wantedSize (to within sizeTol)
|
||||
scalar& midValue // value at mid
|
||||
)
|
||||
{
|
||||
label low = minIndex;
|
||||
scalar lowValue = minValue;
|
||||
|
||||
scalar highValue = maxValue;
|
||||
// (one beyond) index of highValue
|
||||
label high = values.size();
|
||||
|
||||
// Safeguards to avoid infinite loop.
|
||||
label lowPrev = -1;
|
||||
label midPrev = -1;
|
||||
label highPrev = -1;
|
||||
|
||||
while (true)
|
||||
{
|
||||
label weightedSize = returnReduce
|
||||
(
|
||||
sortedWeightedSizes[mid] - sortedWeightedSizes[minIndex],
|
||||
sumOp<label>()
|
||||
);
|
||||
|
||||
if (debug)
|
||||
{
|
||||
Pout<< "low:" << low << " lowValue:" << lowValue
|
||||
<< " high:" << high << " highValue:" << highValue
|
||||
<< " mid:" << mid << " midValue:" << midValue << nl
|
||||
<< "globalSize:" << weightedSize
|
||||
<< " wantedSize:" << wantedSize
|
||||
<< " sizeTol:" << sizeTol << endl;
|
||||
}
|
||||
|
||||
if (wantedSize < weightedSize - sizeTol)
|
||||
{
|
||||
high = mid;
|
||||
highValue = midValue;
|
||||
}
|
||||
else if (wantedSize > weightedSize + sizeTol)
|
||||
{
|
||||
low = mid;
|
||||
lowValue = midValue;
|
||||
}
|
||||
else
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
// Update mid, midValue
|
||||
midValue = 0.5*(lowValue+highValue);
|
||||
mid = findLower(values, midValue, low, high);
|
||||
|
||||
// Safeguard if same as previous.
|
||||
bool hasNotChanged =
|
||||
(mid == midPrev)
|
||||
&& (low == lowPrev)
|
||||
&& (high == highPrev);
|
||||
|
||||
if (returnReduce(hasNotChanged, andOp<bool>()))
|
||||
{
|
||||
WarningIn("hierarchGeomDecomp::findBinary(..)")
|
||||
@ -394,6 +516,190 @@ void Foam::hierarchGeomDecomp::sortComponent
|
||||
}
|
||||
|
||||
|
||||
// Sort points into bins according to one component. Recurses to next component.
|
||||
void Foam::hierarchGeomDecomp::sortComponent
|
||||
(
|
||||
const label sizeTol,
|
||||
const scalarField& weights,
|
||||
const pointField& points,
|
||||
const labelList& current, // slice of points to decompose
|
||||
const direction componentIndex, // index in decompOrder_
|
||||
const label mult, // multiplication factor for finalDecomp
|
||||
labelList& finalDecomp
|
||||
)
|
||||
{
|
||||
// Current component
|
||||
label compI = decompOrder_[componentIndex];
|
||||
|
||||
if (debug)
|
||||
{
|
||||
Pout<< "sortComponent : Sorting slice of size " << current.size()
|
||||
<< " in component " << compI << endl;
|
||||
}
|
||||
|
||||
// Storage for sorted component compI
|
||||
SortableList<scalar> sortedCoord(current.size());
|
||||
|
||||
forAll(current, i)
|
||||
{
|
||||
label pointI = current[i];
|
||||
|
||||
sortedCoord[i] = points[pointI][compI];
|
||||
}
|
||||
sortedCoord.sort();
|
||||
|
||||
label globalCurrentSize = returnReduce(current.size(), sumOp<label>());
|
||||
|
||||
// Now evaluate local cumulative weights, based on the sorting.
|
||||
// Make one bigger than the nodes.
|
||||
scalarField sortedWeightedSizes(current.size()+1, 0);
|
||||
calculateSortedWeightedSizes
|
||||
(
|
||||
current,
|
||||
sortedCoord.indices(),
|
||||
weights,
|
||||
globalCurrentSize,
|
||||
sortedWeightedSizes
|
||||
);
|
||||
|
||||
scalar minCoord = returnReduce
|
||||
(
|
||||
(
|
||||
sortedCoord.size()
|
||||
? sortedCoord[0]
|
||||
: GREAT
|
||||
),
|
||||
minOp<scalar>()
|
||||
);
|
||||
|
||||
scalar maxCoord = returnReduce
|
||||
(
|
||||
(
|
||||
sortedCoord.size()
|
||||
? sortedCoord[sortedCoord.size()-1]
|
||||
: -GREAT
|
||||
),
|
||||
maxOp<scalar>()
|
||||
);
|
||||
|
||||
if (debug)
|
||||
{
|
||||
Pout<< "sortComponent : minCoord:" << minCoord
|
||||
<< " maxCoord:" << maxCoord << endl;
|
||||
}
|
||||
|
||||
// starting index (in sortedCoord) of bin (= local)
|
||||
label leftIndex = 0;
|
||||
// starting value of bin (= global since coordinate)
|
||||
scalar leftCoord = minCoord;
|
||||
|
||||
// Sort bins of size n
|
||||
for (label bin = 0; bin < n_[compI]; bin++)
|
||||
{
|
||||
// Now we need to determine the size of the bin (dx). This is
|
||||
// determined by the 'pivot' values - everything to the left of this
|
||||
// value goes in the current bin, everything to the right into the next
|
||||
// bins.
|
||||
|
||||
// Local number of elements
|
||||
label localSize = -1; // offset from leftOffset
|
||||
|
||||
// Value at right of bin (leftIndex+localSize-1)
|
||||
scalar rightCoord = -GREAT;
|
||||
|
||||
if (bin == n_[compI]-1)
|
||||
{
|
||||
// Last bin. Copy all.
|
||||
localSize = current.size()-leftIndex;
|
||||
rightCoord = maxCoord; // note: not used anymore
|
||||
}
|
||||
else
|
||||
{
|
||||
// For the current bin (starting at leftCoord) we want a rightCoord
|
||||
// such that the sum of all weighted sizes are
|
||||
// globalCurrentLength/n_[compI].
|
||||
// We have to iterate to obtain this.
|
||||
|
||||
label rightIndex = current.size();
|
||||
rightCoord = maxCoord;
|
||||
|
||||
// Calculate rightIndex/rightCoord to have wanted size
|
||||
findBinary
|
||||
(
|
||||
sizeTol,
|
||||
sortedWeightedSizes,
|
||||
sortedCoord,
|
||||
leftIndex,
|
||||
leftCoord,
|
||||
maxCoord,
|
||||
globalCurrentSize/n_[compI], // wanted size
|
||||
|
||||
rightIndex,
|
||||
rightCoord
|
||||
);
|
||||
localSize = rightIndex - leftIndex;
|
||||
}
|
||||
|
||||
if (debug)
|
||||
{
|
||||
Pout<< "For component " << compI << ", bin " << bin
|
||||
<< " copying" << nl
|
||||
<< "from " << leftCoord << " at local index "
|
||||
<< leftIndex << nl
|
||||
<< "to " << rightCoord << " localSize:"
|
||||
<< localSize << nl
|
||||
<< endl;
|
||||
}
|
||||
|
||||
|
||||
// Copy localSize elements starting from leftIndex.
|
||||
labelList slice(localSize);
|
||||
|
||||
forAll(slice, i)
|
||||
{
|
||||
label pointI = current[sortedCoord.indices()[leftIndex+i]];
|
||||
|
||||
// Mark point into correct bin
|
||||
finalDecomp[pointI] += bin*mult;
|
||||
|
||||
// And collect for next sorting action
|
||||
slice[i] = pointI;
|
||||
}
|
||||
|
||||
// Sort slice in next component
|
||||
if (componentIndex < 2)
|
||||
{
|
||||
string oldPrefix;
|
||||
if (debug)
|
||||
{
|
||||
oldPrefix = Pout.prefix();
|
||||
Pout.prefix() = " " + oldPrefix;
|
||||
}
|
||||
|
||||
sortComponent
|
||||
(
|
||||
sizeTol,
|
||||
weights,
|
||||
points,
|
||||
slice,
|
||||
componentIndex+1,
|
||||
mult*n_[compI], // Multiplier to apply to decomposition.
|
||||
finalDecomp
|
||||
);
|
||||
|
||||
if (debug)
|
||||
{
|
||||
Pout.prefix() = oldPrefix;
|
||||
}
|
||||
}
|
||||
|
||||
// Step to next bin.
|
||||
leftIndex += localSize;
|
||||
leftCoord = rightCoord;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
|
||||
|
||||
Foam::hierarchGeomDecomp::hierarchGeomDecomp
|
||||
@ -464,4 +770,47 @@ Foam::labelList Foam::hierarchGeomDecomp::decompose
|
||||
}
|
||||
|
||||
|
||||
Foam::labelList Foam::hierarchGeomDecomp::decompose
|
||||
(
|
||||
const pointField& points,
|
||||
const scalarField& weights
|
||||
)
|
||||
{
|
||||
// construct a list for the final result
|
||||
labelList finalDecomp(points.size(), 0);
|
||||
|
||||
// Start off with every point sorted onto itself.
|
||||
labelList slice(points.size());
|
||||
forAll(slice, i)
|
||||
{
|
||||
slice[i] = i;
|
||||
}
|
||||
|
||||
pointField rotatedPoints = rotDelta_ & points;
|
||||
|
||||
|
||||
// Calculate tolerance of cell distribution. For large cases finding
|
||||
// distibution to the cell exact would cause too many iterations so allow
|
||||
// some slack.
|
||||
label allSize = points.size();
|
||||
reduce(allSize, sumOp<label>());
|
||||
|
||||
const label sizeTol = max(1, label(1E-3*allSize/nProcessors_));
|
||||
|
||||
// Sort recursive
|
||||
sortComponent
|
||||
(
|
||||
sizeTol,
|
||||
weights,
|
||||
rotatedPoints,
|
||||
slice,
|
||||
0, // Sort first component in decompOrder.
|
||||
1, // Offset for different x bins.
|
||||
finalDecomp
|
||||
);
|
||||
|
||||
return finalDecomp;
|
||||
}
|
||||
|
||||
|
||||
// ************************************************************************* //
|
||||
|
||||
@ -80,6 +80,17 @@ class hierarchGeomDecomp
|
||||
//- Convert ordering string ("xyz") into list of components.
|
||||
void setDecompOrder();
|
||||
|
||||
//- Evaluates the weighted sizes for each sorted point.
|
||||
static void calculateSortedWeightedSizes
|
||||
(
|
||||
const labelList& current,
|
||||
const labelList& indices,
|
||||
const scalarField& weights,
|
||||
const label globalCurrentSize,
|
||||
|
||||
scalarField& sortedWeightedSizes
|
||||
);
|
||||
|
||||
//- Find index of t in list inbetween indices left and right
|
||||
static label findLower
|
||||
(
|
||||
@ -104,6 +115,22 @@ class hierarchGeomDecomp
|
||||
scalar& midValue // value at mid
|
||||
);
|
||||
|
||||
//- Find midValue (at local index mid) such that the number of
|
||||
// elements between mid and leftIndex are (globally summed) the
|
||||
// wantedSize. Binary search.
|
||||
static void findBinary
|
||||
(
|
||||
const label sizeTol, // size difference considered acceptible
|
||||
const List<scalar>& sortedWeightedSizes,
|
||||
const List<scalar>&,
|
||||
const label leftIndex, // index of previous value
|
||||
const scalar leftValue, // value at leftIndex
|
||||
const scalar maxValue, // global max of values
|
||||
const scalar wantedSize, // wanted size
|
||||
label& mid, // index where size of bin is wantedSize
|
||||
scalar& midValue // value at mid
|
||||
);
|
||||
|
||||
//- Recursively sort in x,y,z (or rather acc. to decompOrder_)
|
||||
void sortComponent
|
||||
(
|
||||
@ -115,6 +142,19 @@ class hierarchGeomDecomp
|
||||
labelList& finalDecomp // overall decomposition
|
||||
);
|
||||
|
||||
//- Recursively sort in x,y,z (or rather acc. to decompOrder_)
|
||||
//- using weighted points.
|
||||
void sortComponent
|
||||
(
|
||||
const label sizeTol,
|
||||
const scalarField& weights,
|
||||
const pointField&,
|
||||
const labelList& slice, // slice of points to decompose
|
||||
const direction componentIndex, // index in decompOrder_
|
||||
const label prevMult, // multiplication factor
|
||||
labelList& finalDecomp // overall decomposition
|
||||
);
|
||||
|
||||
|
||||
//- Disallow default bitwise copy construct and assignment
|
||||
void operator=(const hierarchGeomDecomp&);
|
||||
@ -156,6 +196,14 @@ public:
|
||||
|
||||
//- Return for every coordinate the wanted processor number. Use the
|
||||
// mesh connectivity (if needed)
|
||||
virtual labelList decompose
|
||||
(
|
||||
const pointField&,
|
||||
const scalarField& weights
|
||||
);
|
||||
|
||||
//- Without weights. Code for weighted decomposition is a bit complex
|
||||
// so kept separate for now.
|
||||
virtual labelList decompose(const pointField&);
|
||||
|
||||
//- Return for every coordinate the wanted processor number. Explicitly
|
||||
@ -165,6 +213,16 @@ public:
|
||||
// from 0 at processor0 and then incrementing all through the
|
||||
// processors)
|
||||
// - the connections are across coupled patches
|
||||
virtual labelList decompose
|
||||
(
|
||||
const labelListList& globalCellCells,
|
||||
const pointField& cc,
|
||||
const scalarField& cWeights
|
||||
)
|
||||
{
|
||||
return decompose(cc, cWeights);
|
||||
}
|
||||
|
||||
virtual labelList decompose
|
||||
(
|
||||
const labelListList& globalCellCells,
|
||||
|
||||
@ -82,7 +82,11 @@ Foam::manualDecomp::manualDecomp
|
||||
|
||||
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
|
||||
|
||||
Foam::labelList Foam::manualDecomp::decompose(const pointField& points)
|
||||
Foam::labelList Foam::manualDecomp::decompose
|
||||
(
|
||||
const pointField& points,
|
||||
const scalarField& pointWeights
|
||||
)
|
||||
{
|
||||
const polyMesh& mesh = *meshPtr_;
|
||||
|
||||
@ -103,8 +107,10 @@ Foam::labelList Foam::manualDecomp::decompose(const pointField& points)
|
||||
|
||||
if (finalDecomp.size() != points.size())
|
||||
{
|
||||
FatalErrorIn("manualDecomp::decompose(const pointField&)")
|
||||
<< "Size of decomposition list does not correspond "
|
||||
FatalErrorIn
|
||||
(
|
||||
"manualDecomp::decompose(const pointField&, const scalarField&)"
|
||||
) << "Size of decomposition list does not correspond "
|
||||
<< "to the number of points. Size: "
|
||||
<< finalDecomp.size() << " Number of points: "
|
||||
<< points.size()
|
||||
@ -115,8 +121,10 @@ Foam::labelList Foam::manualDecomp::decompose(const pointField& points)
|
||||
|
||||
if (min(finalDecomp) < 0 || max(finalDecomp) > nProcessors_ - 1)
|
||||
{
|
||||
FatalErrorIn("labelList manualDecomp::decompose(const pointField&)")
|
||||
<< "According to the decomposition, cells assigned to "
|
||||
FatalErrorIn
|
||||
(
|
||||
"manualDecomp::decompose(const pointField&, const scalarField&)"
|
||||
) << "According to the decomposition, cells assigned to "
|
||||
<< "impossible processor numbers. Min processor = "
|
||||
<< min(finalDecomp) << " Max processor = " << max(finalDecomp)
|
||||
<< ".\n" << "Manual decomposition data read from file "
|
||||
|
||||
@ -99,7 +99,11 @@ public:
|
||||
|
||||
//- Return for every coordinate the wanted processor number. Use the
|
||||
// mesh connectivity (if needed)
|
||||
virtual labelList decompose(const pointField&);
|
||||
virtual labelList decompose
|
||||
(
|
||||
const pointField& points,
|
||||
const scalarField& pointWeights
|
||||
);
|
||||
|
||||
//- Return for every coordinate the wanted processor number. Explicitly
|
||||
// provided connectivity - does not use mesh_.
|
||||
@ -111,10 +115,11 @@ public:
|
||||
virtual labelList decompose
|
||||
(
|
||||
const labelListList& globalCellCells,
|
||||
const pointField& cc
|
||||
const pointField& cc,
|
||||
const scalarField& cWeights
|
||||
)
|
||||
{
|
||||
return decompose(cc);
|
||||
return decompose(cc, cWeights);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@ -60,6 +60,7 @@ Foam::label Foam::metisDecomp::decompose
|
||||
(
|
||||
const List<int>& adjncy,
|
||||
const List<int>& xadj,
|
||||
const scalarField& cWeights,
|
||||
|
||||
List<int>& finalDecomp
|
||||
)
|
||||
@ -72,6 +73,8 @@ Foam::label Foam::metisDecomp::decompose
|
||||
// k-way: multi-level k-way
|
||||
word method("k-way");
|
||||
|
||||
int numCells = xadj.size()-1;
|
||||
|
||||
// decomposition options. 0 = use defaults
|
||||
List<int> options(5, 0);
|
||||
|
||||
@ -85,6 +88,30 @@ Foam::label Foam::metisDecomp::decompose
|
||||
// face weights (so on the edges of the dual)
|
||||
List<int> faceWeights;
|
||||
|
||||
|
||||
// Check for externally provided cellweights and if so initialise weights
|
||||
scalar maxWeights = gMax(cWeights);
|
||||
if (cWeights.size() > 0)
|
||||
{
|
||||
if (cWeights.size() != numCells)
|
||||
{
|
||||
FatalErrorIn
|
||||
(
|
||||
"metisDecomp::decompose"
|
||||
"(const pointField&, const scalarField&)"
|
||||
) << "Number of cell weights " << cWeights.size()
|
||||
<< " does not equal number of cells " << numCells
|
||||
<< exit(FatalError);
|
||||
}
|
||||
// Convert to integers.
|
||||
cellWeights.setSize(cWeights.size());
|
||||
forAll(cellWeights, i)
|
||||
{
|
||||
cellWeights[i] = int(1000000*cWeights[i]/maxWeights);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Check for user supplied weights and decomp options
|
||||
if (decompositionDict_.found("metisCoeffs"))
|
||||
{
|
||||
@ -161,52 +188,8 @@ Foam::label Foam::metisDecomp::decompose
|
||||
<< exit(FatalError);
|
||||
}
|
||||
}
|
||||
|
||||
//- faceWeights disabled. Only makes sense for cellCells from mesh.
|
||||
//if (metisCoeffs.readIfPresent("faceWeightsFile", weightsFile))
|
||||
//{
|
||||
// Info<< "metisDecomp : Using face-based weights." << endl;
|
||||
//
|
||||
// IOList<int> weights
|
||||
// (
|
||||
// IOobject
|
||||
// (
|
||||
// weightsFile,
|
||||
// mesh_.time().timeName(),
|
||||
// mesh_,
|
||||
// IOobject::MUST_READ,
|
||||
// IOobject::AUTO_WRITE
|
||||
// )
|
||||
// );
|
||||
//
|
||||
// if (weights.size() != adjncy.size()/2)
|
||||
// {
|
||||
// FatalErrorIn("metisDecomp::decompose(const pointField&)")
|
||||
// << "Number of face weights " << weights.size()
|
||||
// << " does not equal number of internal faces "
|
||||
// << adjncy.size()/2
|
||||
// << exit(FatalError);
|
||||
// }
|
||||
//
|
||||
// // Assume symmetric weights. Keep same ordering as adjncy.
|
||||
// faceWeights.setSize(adjncy.size());
|
||||
//
|
||||
// labelList nFacesPerCell(mesh_.nCells(), 0);
|
||||
//
|
||||
// for (label faceI = 0; faceI < mesh_.nInternalFaces(); faceI++)
|
||||
// {
|
||||
// label w = weights[faceI];
|
||||
//
|
||||
// label own = mesh_.faceOwner()[faceI];
|
||||
// label nei = mesh_.faceNeighbour()[faceI];
|
||||
//
|
||||
// faceWeights[xadj[own] + nFacesPerCell[own]++] = w;
|
||||
// faceWeights[xadj[nei] + nFacesPerCell[nei]++] = w;
|
||||
// }
|
||||
//}
|
||||
}
|
||||
|
||||
int numCells = xadj.size()-1;
|
||||
int nProcs = nProcessors_;
|
||||
|
||||
// output: cell -> processor addressing
|
||||
@ -327,12 +310,18 @@ Foam::metisDecomp::metisDecomp
|
||||
|
||||
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
|
||||
|
||||
Foam::labelList Foam::metisDecomp::decompose(const pointField& points)
|
||||
Foam::labelList Foam::metisDecomp::decompose
|
||||
(
|
||||
const pointField& points,
|
||||
const scalarField& pointWeights
|
||||
)
|
||||
{
|
||||
if (points.size() != mesh_.nCells())
|
||||
{
|
||||
FatalErrorIn("metisDecomp::decompose(const pointField&)")
|
||||
<< "Can use this decomposition method only for the whole mesh"
|
||||
FatalErrorIn
|
||||
(
|
||||
"metisDecomp::decompose(const pointField&,const scalarField&)"
|
||||
) << "Can use this decomposition method only for the whole mesh"
|
||||
<< endl
|
||||
<< "and supply one coordinate (cellCentre) for every cell." << endl
|
||||
<< "The number of coordinates " << points.size() << endl
|
||||
@ -340,19 +329,49 @@ Foam::labelList Foam::metisDecomp::decompose(const pointField& points)
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
List<int> adjncy;
|
||||
List<int> xadj;
|
||||
calcMetisCSR
|
||||
(
|
||||
mesh_,
|
||||
adjncy,
|
||||
xadj
|
||||
);
|
||||
|
||||
// Decompose using default weights
|
||||
List<int> finalDecomp;
|
||||
decompose(adjncy, xadj, pointWeights, finalDecomp);
|
||||
|
||||
// Copy back to labelList
|
||||
labelList decomp(finalDecomp.size());
|
||||
forAll(decomp, i)
|
||||
{
|
||||
decomp[i] = finalDecomp[i];
|
||||
}
|
||||
return decomp;
|
||||
}
|
||||
|
||||
|
||||
void Foam::metisDecomp::calcMetisCSR
|
||||
(
|
||||
const polyMesh& mesh,
|
||||
List<int>& adjncy,
|
||||
List<int>& xadj
|
||||
)
|
||||
{
|
||||
// Make Metis CSR (Compressed Storage Format) storage
|
||||
// adjncy : contains neighbours (= edges in graph)
|
||||
// xadj(celli) : start of information in adjncy for celli
|
||||
|
||||
List<int> xadj(mesh_.nCells()+1);
|
||||
xadj.setSize(mesh.nCells()+1);
|
||||
|
||||
// Initialise the number of internal faces of the cells to twice the
|
||||
// number of internal faces
|
||||
label nInternalFaces = 2*mesh_.nInternalFaces();
|
||||
label nInternalFaces = 2*mesh.nInternalFaces();
|
||||
|
||||
// Check the boundary for coupled patches and add to the number of
|
||||
// internal faces
|
||||
const polyBoundaryMesh& pbm = mesh_.boundaryMesh();
|
||||
const polyBoundaryMesh& pbm = mesh.boundaryMesh();
|
||||
|
||||
forAll(pbm, patchi)
|
||||
{
|
||||
@ -364,17 +383,17 @@ Foam::labelList Foam::metisDecomp::decompose(const pointField& points)
|
||||
|
||||
// Create the adjncy array the size of the total number of internal and
|
||||
// coupled faces
|
||||
List<int> adjncy(nInternalFaces);
|
||||
adjncy.setSize(nInternalFaces);
|
||||
|
||||
// Fill in xadj
|
||||
// ~~~~~~~~~~~~
|
||||
label freeAdj = 0;
|
||||
|
||||
for (label cellI = 0; cellI < mesh_.nCells(); cellI++)
|
||||
for (label cellI = 0; cellI < mesh.nCells(); cellI++)
|
||||
{
|
||||
xadj[cellI] = freeAdj;
|
||||
|
||||
const labelList& cFaces = mesh_.cells()[cellI];
|
||||
const labelList& cFaces = mesh.cells()[cellI];
|
||||
|
||||
forAll(cFaces, i)
|
||||
{
|
||||
@ -382,7 +401,7 @@ Foam::labelList Foam::metisDecomp::decompose(const pointField& points)
|
||||
|
||||
if
|
||||
(
|
||||
mesh_.isInternalFace(faceI)
|
||||
mesh.isInternalFace(faceI)
|
||||
|| isA<cyclicPolyPatch>(pbm[pbm.whichPatch(faceI)])
|
||||
)
|
||||
{
|
||||
@ -390,19 +409,19 @@ Foam::labelList Foam::metisDecomp::decompose(const pointField& points)
|
||||
}
|
||||
}
|
||||
}
|
||||
xadj[mesh_.nCells()] = freeAdj;
|
||||
xadj[mesh.nCells()] = freeAdj;
|
||||
|
||||
|
||||
// Fill in adjncy
|
||||
// ~~~~~~~~~~~~~~
|
||||
|
||||
labelList nFacesPerCell(mesh_.nCells(), 0);
|
||||
labelList nFacesPerCell(mesh.nCells(), 0);
|
||||
|
||||
// Internal faces
|
||||
for (label faceI = 0; faceI < mesh_.nInternalFaces(); faceI++)
|
||||
for (label faceI = 0; faceI < mesh.nInternalFaces(); faceI++)
|
||||
{
|
||||
label own = mesh_.faceOwner()[faceI];
|
||||
label nei = mesh_.faceNeighbour()[faceI];
|
||||
label own = mesh.faceOwner()[faceI];
|
||||
label nei = mesh.faceNeighbour()[faceI];
|
||||
|
||||
adjncy[xadj[own] + nFacesPerCell[own]++] = nei;
|
||||
adjncy[xadj[nei] + nFacesPerCell[nei]++] = own;
|
||||
@ -427,18 +446,6 @@ Foam::labelList Foam::metisDecomp::decompose(const pointField& points)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Decompose using default weights
|
||||
List<int> finalDecomp;
|
||||
decompose(adjncy, xadj, finalDecomp);
|
||||
|
||||
// Copy back to labelList
|
||||
labelList decomp(finalDecomp.size());
|
||||
forAll(decomp, i)
|
||||
{
|
||||
decomp[i] = finalDecomp[i];
|
||||
}
|
||||
return decomp;
|
||||
}
|
||||
|
||||
|
||||
@ -487,14 +494,16 @@ void Foam::metisDecomp::calcMetisCSR
|
||||
Foam::labelList Foam::metisDecomp::decompose
|
||||
(
|
||||
const labelList& agglom,
|
||||
const pointField& agglomPoints
|
||||
const pointField& agglomPoints,
|
||||
const scalarField& agglomWeights
|
||||
)
|
||||
{
|
||||
if (agglom.size() != mesh_.nCells())
|
||||
{
|
||||
FatalErrorIn
|
||||
(
|
||||
"parMetisDecomp::decompose(const labelList&, const pointField&)"
|
||||
"metisDecomp::decompose"
|
||||
"(const labelList&, const pointField&, const scalarField&)"
|
||||
) << "Size of cell-to-coarse map " << agglom.size()
|
||||
<< " differs from number of cells in mesh " << mesh_.nCells()
|
||||
<< exit(FatalError);
|
||||
@ -521,7 +530,7 @@ Foam::labelList Foam::metisDecomp::decompose
|
||||
|
||||
// Decompose using default weights
|
||||
List<int> finalDecomp;
|
||||
decompose(adjncy, xadj, finalDecomp);
|
||||
decompose(adjncy, xadj, agglomWeights, finalDecomp);
|
||||
|
||||
|
||||
// Rework back into decomposition for original mesh_
|
||||
@ -539,14 +548,16 @@ Foam::labelList Foam::metisDecomp::decompose
|
||||
Foam::labelList Foam::metisDecomp::decompose
|
||||
(
|
||||
const labelListList& globalCellCells,
|
||||
const pointField& cellCentres
|
||||
const pointField& cellCentres,
|
||||
const scalarField& cellWeights
|
||||
)
|
||||
{
|
||||
if (cellCentres.size() != globalCellCells.size())
|
||||
{
|
||||
FatalErrorIn
|
||||
(
|
||||
"metisDecomp::decompose(const pointField&, const labelListList&)"
|
||||
"metisDecomp::decompose"
|
||||
"(const pointField&, const labelListList&, const scalarField&)"
|
||||
) << "Inconsistent number of cells (" << globalCellCells.size()
|
||||
<< ") and number of cell centres (" << cellCentres.size()
|
||||
<< ")." << exit(FatalError);
|
||||
@ -564,7 +575,7 @@ Foam::labelList Foam::metisDecomp::decompose
|
||||
|
||||
// Decompose using default weights
|
||||
List<int> finalDecomp;
|
||||
decompose(adjncy, xadj, finalDecomp);
|
||||
decompose(adjncy, xadj, cellWeights, finalDecomp);
|
||||
|
||||
// Copy back to labelList
|
||||
labelList decomp(finalDecomp.size());
|
||||
|
||||
@ -60,6 +60,7 @@ class metisDecomp
|
||||
(
|
||||
const List<int>& adjncy,
|
||||
const List<int>& xadj,
|
||||
const scalarField& cellWeights,
|
||||
List<int>& finalDecomp
|
||||
);
|
||||
|
||||
@ -100,7 +101,11 @@ public:
|
||||
|
||||
//- Return for every coordinate the wanted processor number. Use the
|
||||
// mesh connectivity (if needed)
|
||||
virtual labelList decompose(const pointField&);
|
||||
virtual labelList decompose
|
||||
(
|
||||
const pointField& points,
|
||||
const scalarField& pointWeights
|
||||
);
|
||||
|
||||
//- Return for every coordinate the wanted processor number. Gets
|
||||
// passed agglomeration map (from fine to coarse cells) and coarse cell
|
||||
@ -109,7 +114,8 @@ public:
|
||||
virtual labelList decompose
|
||||
(
|
||||
const labelList& agglom,
|
||||
const pointField&
|
||||
const pointField& regionPoints,
|
||||
const scalarField& regionWeights
|
||||
);
|
||||
|
||||
//- Return for every coordinate the wanted processor number. Explicitly
|
||||
@ -122,10 +128,21 @@ public:
|
||||
virtual labelList decompose
|
||||
(
|
||||
const labelListList& globalCellCells,
|
||||
const pointField& cc
|
||||
const pointField& cc,
|
||||
const scalarField& cWeights
|
||||
);
|
||||
|
||||
//- Helper to convert cellcells into Metis storage
|
||||
//- Helper to convert connectivity (supplied as owner,neighbour) into
|
||||
// Metis storage
|
||||
static void calcMetisCSR
|
||||
(
|
||||
const polyMesh& mesh,
|
||||
List<int>& adjncy,
|
||||
List<int>& xadj
|
||||
);
|
||||
|
||||
//- Helper to convert connectivity (supplied as cellcells) into
|
||||
// Metis storage
|
||||
static void calcMetisCSR
|
||||
(
|
||||
const labelListList& globalCellCells,
|
||||
|
||||
@ -65,6 +65,7 @@ License
|
||||
#include "Time.H"
|
||||
#include "cyclicPolyPatch.H"
|
||||
#include "OFstream.H"
|
||||
#include "metisDecomp.H"
|
||||
|
||||
extern "C"
|
||||
{
|
||||
@ -125,6 +126,7 @@ Foam::label Foam::scotchDecomp::decompose
|
||||
(
|
||||
const List<int>& adjncy,
|
||||
const List<int>& xadj,
|
||||
const scalarField& cWeights,
|
||||
|
||||
List<int>& finalDecomp
|
||||
)
|
||||
@ -160,6 +162,33 @@ Foam::label Foam::scotchDecomp::decompose
|
||||
// Graph
|
||||
// ~~~~~
|
||||
|
||||
List<int> velotab;
|
||||
|
||||
|
||||
// Check for externally provided cellweights and if so initialise weights
|
||||
scalar maxWeights = gMax(cWeights);
|
||||
if (cWeights.size() > 0)
|
||||
{
|
||||
if (cWeights.size() != xadj.size()-1)
|
||||
{
|
||||
FatalErrorIn
|
||||
(
|
||||
"parMetisDecomp::decompose"
|
||||
"(const pointField&, const scalarField&)"
|
||||
) << "Number of cell weights " << cWeights.size()
|
||||
<< " does not equal number of cells " << xadj.size()-1
|
||||
<< exit(FatalError);
|
||||
}
|
||||
// Convert to integers.
|
||||
velotab.setSize(cWeights.size());
|
||||
forAll(velotab, i)
|
||||
{
|
||||
velotab[i] = int(1000000*cWeights[i]/maxWeights);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
SCOTCH_Graph grafdat;
|
||||
check(SCOTCH_graphInit(&grafdat), "SCOTCH_graphInit");
|
||||
check
|
||||
@ -171,7 +200,7 @@ Foam::label Foam::scotchDecomp::decompose
|
||||
xadj.size()-1, // vertnbr, nCells
|
||||
xadj.begin(), // verttab, start index per cell into adjncy
|
||||
&xadj[1], // vendtab, end index ,,
|
||||
NULL, // velotab, vertex weights
|
||||
velotab.begin(), // velotab, vertex weights
|
||||
NULL, // vlbltab
|
||||
adjncy.size(), // edgenbr, number of arcs
|
||||
adjncy.begin(), // edgetab
|
||||
@ -340,7 +369,11 @@ Foam::scotchDecomp::scotchDecomp
|
||||
|
||||
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
|
||||
|
||||
Foam::labelList Foam::scotchDecomp::decompose(const pointField& points)
|
||||
Foam::labelList Foam::scotchDecomp::decompose
|
||||
(
|
||||
const pointField& points,
|
||||
const scalarField& pointWeights
|
||||
)
|
||||
{
|
||||
if (points.size() != mesh_.nCells())
|
||||
{
|
||||
@ -356,94 +389,18 @@ Foam::labelList Foam::scotchDecomp::decompose(const pointField& points)
|
||||
// Make Metis CSR (Compressed Storage Format) storage
|
||||
// adjncy : contains neighbours (= edges in graph)
|
||||
// xadj(celli) : start of information in adjncy for celli
|
||||
|
||||
List<int> xadj(mesh_.nCells()+1);
|
||||
|
||||
// Initialise the number of internal faces of the cells to twice the
|
||||
// number of internal faces
|
||||
label nInternalFaces = 2*mesh_.nInternalFaces();
|
||||
|
||||
// Check the boundary for coupled patches and add to the number of
|
||||
// internal faces
|
||||
const polyBoundaryMesh& pbm = mesh_.boundaryMesh();
|
||||
|
||||
forAll(pbm, patchi)
|
||||
{
|
||||
if (isA<cyclicPolyPatch>(pbm[patchi]))
|
||||
{
|
||||
nInternalFaces += pbm[patchi].size();
|
||||
}
|
||||
}
|
||||
|
||||
// Create the adjncy array the size of the total number of internal and
|
||||
// coupled faces
|
||||
List<int> adjncy(nInternalFaces);
|
||||
|
||||
// Fill in xadj
|
||||
// ~~~~~~~~~~~~
|
||||
label freeAdj = 0;
|
||||
|
||||
for (label cellI = 0; cellI < mesh_.nCells(); cellI++)
|
||||
{
|
||||
xadj[cellI] = freeAdj;
|
||||
|
||||
const labelList& cFaces = mesh_.cells()[cellI];
|
||||
|
||||
forAll(cFaces, i)
|
||||
{
|
||||
label faceI = cFaces[i];
|
||||
|
||||
if
|
||||
(
|
||||
mesh_.isInternalFace(faceI)
|
||||
|| isA<cyclicPolyPatch>(pbm[pbm.whichPatch(faceI)])
|
||||
)
|
||||
{
|
||||
freeAdj++;
|
||||
}
|
||||
}
|
||||
}
|
||||
xadj[mesh_.nCells()] = freeAdj;
|
||||
|
||||
|
||||
// Fill in adjncy
|
||||
// ~~~~~~~~~~~~~~
|
||||
|
||||
labelList nFacesPerCell(mesh_.nCells(), 0);
|
||||
|
||||
// Internal faces
|
||||
for (label faceI = 0; faceI < mesh_.nInternalFaces(); faceI++)
|
||||
{
|
||||
label own = mesh_.faceOwner()[faceI];
|
||||
label nei = mesh_.faceNeighbour()[faceI];
|
||||
|
||||
adjncy[xadj[own] + nFacesPerCell[own]++] = nei;
|
||||
adjncy[xadj[nei] + nFacesPerCell[nei]++] = own;
|
||||
}
|
||||
|
||||
// Coupled faces. Only cyclics done.
|
||||
forAll(pbm, patchi)
|
||||
{
|
||||
if (isA<cyclicPolyPatch>(pbm[patchi]))
|
||||
{
|
||||
const unallocLabelList& faceCells = pbm[patchi].faceCells();
|
||||
|
||||
label sizeby2 = faceCells.size()/2;
|
||||
|
||||
for (label facei=0; facei<sizeby2; facei++)
|
||||
{
|
||||
label own = faceCells[facei];
|
||||
label nei = faceCells[facei + sizeby2];
|
||||
|
||||
adjncy[xadj[own] + nFacesPerCell[own]++] = nei;
|
||||
adjncy[xadj[nei] + nFacesPerCell[nei]++] = own;
|
||||
}
|
||||
}
|
||||
}
|
||||
List<int> adjncy;
|
||||
List<int> xadj;
|
||||
metisDecomp::calcMetisCSR
|
||||
(
|
||||
mesh_,
|
||||
adjncy,
|
||||
xadj
|
||||
);
|
||||
|
||||
// Decompose using default weights
|
||||
List<int> finalDecomp;
|
||||
decompose(adjncy, xadj, finalDecomp);
|
||||
decompose(adjncy, xadj, pointWeights, finalDecomp);
|
||||
|
||||
// Copy back to labelList
|
||||
labelList decomp(finalDecomp.size());
|
||||
@ -455,52 +412,11 @@ Foam::labelList Foam::scotchDecomp::decompose(const pointField& points)
|
||||
}
|
||||
|
||||
|
||||
// From cell-cell connections to Metis format (like CompactListList)
|
||||
void Foam::scotchDecomp::calcMetisCSR
|
||||
(
|
||||
const labelListList& cellCells,
|
||||
List<int>& adjncy,
|
||||
List<int>& xadj
|
||||
)
|
||||
{
|
||||
// Count number of internal faces
|
||||
label nConnections = 0;
|
||||
|
||||
forAll(cellCells, coarseI)
|
||||
{
|
||||
nConnections += cellCells[coarseI].size();
|
||||
}
|
||||
|
||||
// Create the adjncy array as twice the size of the total number of
|
||||
// internal faces
|
||||
adjncy.setSize(nConnections);
|
||||
|
||||
xadj.setSize(cellCells.size()+1);
|
||||
|
||||
|
||||
// Fill in xadj
|
||||
// ~~~~~~~~~~~~
|
||||
label freeAdj = 0;
|
||||
|
||||
forAll(cellCells, coarseI)
|
||||
{
|
||||
xadj[coarseI] = freeAdj;
|
||||
|
||||
const labelList& cCells = cellCells[coarseI];
|
||||
|
||||
forAll(cCells, i)
|
||||
{
|
||||
adjncy[freeAdj++] = cCells[i];
|
||||
}
|
||||
}
|
||||
xadj[cellCells.size()] = freeAdj;
|
||||
}
|
||||
|
||||
|
||||
Foam::labelList Foam::scotchDecomp::decompose
|
||||
(
|
||||
const labelList& agglom,
|
||||
const pointField& agglomPoints
|
||||
const pointField& agglomPoints,
|
||||
const scalarField& pointWeights
|
||||
)
|
||||
{
|
||||
if (agglom.size() != mesh_.nCells())
|
||||
@ -529,13 +445,12 @@ Foam::labelList Foam::scotchDecomp::decompose
|
||||
cellCells
|
||||
);
|
||||
|
||||
calcMetisCSR(cellCells, adjncy, xadj);
|
||||
metisDecomp::calcMetisCSR(cellCells, adjncy, xadj);
|
||||
}
|
||||
|
||||
// Decompose using default weights
|
||||
// Decompose using weights
|
||||
List<int> finalDecomp;
|
||||
decompose(adjncy, xadj, finalDecomp);
|
||||
|
||||
decompose(adjncy, xadj, pointWeights, finalDecomp);
|
||||
|
||||
// Rework back into decomposition for original mesh_
|
||||
labelList fineDistribution(agglom.size());
|
||||
@ -552,7 +467,8 @@ Foam::labelList Foam::scotchDecomp::decompose
|
||||
Foam::labelList Foam::scotchDecomp::decompose
|
||||
(
|
||||
const labelListList& globalCellCells,
|
||||
const pointField& cellCentres
|
||||
const pointField& cellCentres,
|
||||
const scalarField& cWeights
|
||||
)
|
||||
{
|
||||
if (cellCentres.size() != globalCellCells.size())
|
||||
@ -572,12 +488,11 @@ Foam::labelList Foam::scotchDecomp::decompose
|
||||
|
||||
List<int> adjncy;
|
||||
List<int> xadj;
|
||||
calcMetisCSR(globalCellCells, adjncy, xadj);
|
||||
metisDecomp::calcMetisCSR(globalCellCells, adjncy, xadj);
|
||||
|
||||
|
||||
// Decompose using default weights
|
||||
// Decompose using weights
|
||||
List<int> finalDecomp;
|
||||
decompose(adjncy, xadj, finalDecomp);
|
||||
decompose(adjncy, xadj, cWeights, finalDecomp);
|
||||
|
||||
// Copy back to labelList
|
||||
labelList decomp(finalDecomp.size());
|
||||
|
||||
@ -63,6 +63,7 @@ class scotchDecomp
|
||||
(
|
||||
const List<int>& adjncy,
|
||||
const List<int>& xadj,
|
||||
const scalarField& cWeights,
|
||||
List<int>& finalDecomp
|
||||
);
|
||||
|
||||
@ -103,7 +104,11 @@ public:
|
||||
|
||||
//- Return for every coordinate the wanted processor number. Use the
|
||||
// mesh connectivity (if needed)
|
||||
virtual labelList decompose(const pointField&);
|
||||
virtual labelList decompose
|
||||
(
|
||||
const pointField& points,
|
||||
const scalarField& pointWeights
|
||||
);
|
||||
|
||||
//- Return for every coordinate the wanted processor number. Gets
|
||||
// passed agglomeration map (from fine to coarse cells) and coarse cell
|
||||
@ -112,7 +117,8 @@ public:
|
||||
virtual labelList decompose
|
||||
(
|
||||
const labelList& agglom,
|
||||
const pointField&
|
||||
const pointField& regionPoints,
|
||||
const scalarField& regionWeights
|
||||
);
|
||||
|
||||
//- Return for every coordinate the wanted processor number. Explicitly
|
||||
@ -125,15 +131,8 @@ public:
|
||||
virtual labelList decompose
|
||||
(
|
||||
const labelListList& globalCellCells,
|
||||
const pointField& cc
|
||||
);
|
||||
|
||||
//- Helper to convert cellcells into compact row storage
|
||||
static void calcMetisCSR
|
||||
(
|
||||
const labelListList& globalCellCells,
|
||||
List<int>& adjncy,
|
||||
List<int>& xadj
|
||||
const pointField& cc,
|
||||
const scalarField& cWeights
|
||||
);
|
||||
|
||||
};
|
||||
|
||||
@ -22,8 +22,6 @@ License
|
||||
along with OpenFOAM; if not, write to the Free Software Foundation,
|
||||
Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
|
||||
Description
|
||||
|
||||
\*---------------------------------------------------------------------------*/
|
||||
|
||||
#include "simpleGeomDecomp.H"
|
||||
@ -93,6 +91,49 @@ void Foam::simpleGeomDecomp::assignToProcessorGroup
|
||||
}
|
||||
|
||||
|
||||
void Foam::simpleGeomDecomp::assignToProcessorGroup
|
||||
(
|
||||
labelList& processorGroup,
|
||||
const label nProcGroup,
|
||||
const labelList& indices,
|
||||
const scalarField& weights,
|
||||
const scalar summedWeights
|
||||
)
|
||||
{
|
||||
// This routine gets the sorted points.
|
||||
// Easiest to explain with an example.
|
||||
// E.g. 400 points, sum of weights : 513.
|
||||
// Now with number of divisions in this direction (nProcGroup) : 4
|
||||
// gives the split at 513/4 = 128
|
||||
// So summed weight from 0..128 goes into bin 0,
|
||||
// ,, 128..256 goes into bin 1
|
||||
// etc.
|
||||
// Finally any remaining ones go into the last bin (3).
|
||||
|
||||
const scalar jump = summedWeights/nProcGroup;
|
||||
const label nProcGroupM1 = nProcGroup - 1;
|
||||
scalar sumWeights = 0;
|
||||
label ind = 0;
|
||||
label j = 0;
|
||||
|
||||
// assign cells to all except last group.
|
||||
for (j=0; j<nProcGroupM1; j++)
|
||||
{
|
||||
const scalar limit = jump*scalar(j + 1);
|
||||
while (sumWeights < limit)
|
||||
{
|
||||
sumWeights += weights[indices[ind]];
|
||||
processorGroup[ind++] = j;
|
||||
}
|
||||
}
|
||||
// Ensure last included.
|
||||
while (ind < processorGroup.size())
|
||||
{
|
||||
processorGroup[ind++] = nProcGroupM1;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
|
||||
|
||||
Foam::simpleGeomDecomp::simpleGeomDecomp(const dictionary& decompositionDict)
|
||||
@ -121,7 +162,10 @@ Foam::labelList Foam::simpleGeomDecomp::decompose(const pointField& points)
|
||||
labelList processorGroups(points.size());
|
||||
|
||||
labelList pointIndices(points.size());
|
||||
forAll(pointIndices, i) {pointIndices[i] = i;}
|
||||
forAll(pointIndices, i)
|
||||
{
|
||||
pointIndices[i] = i;
|
||||
}
|
||||
|
||||
pointField rotatedPoints = rotDelta_ & points;
|
||||
|
||||
@ -175,9 +219,101 @@ Foam::labelList Foam::simpleGeomDecomp::decompose(const pointField& points)
|
||||
finalDecomp[pointIndices[i]] += n_.x()*n_.y()*processorGroups[i];
|
||||
}
|
||||
|
||||
|
||||
return finalDecomp;
|
||||
}
|
||||
|
||||
|
||||
Foam::labelList Foam::simpleGeomDecomp::decompose
|
||||
(
|
||||
const pointField& points,
|
||||
const scalarField& weights
|
||||
)
|
||||
{
|
||||
// construct a list for the final result
|
||||
labelList finalDecomp(points.size());
|
||||
|
||||
labelList processorGroups(points.size());
|
||||
|
||||
labelList pointIndices(points.size());
|
||||
forAll(pointIndices, i)
|
||||
{
|
||||
pointIndices[i] = i;
|
||||
}
|
||||
|
||||
pointField rotatedPoints = rotDelta_ & points;
|
||||
|
||||
// and one to take the processor group id's. For each direction.
|
||||
// we assign the processors to groups of processors labelled
|
||||
// 0..nX to give a banded structure on the mesh. Then we
|
||||
// construct the actual processor number by treating this as
|
||||
// the units part of the processor number.
|
||||
sort
|
||||
(
|
||||
pointIndices,
|
||||
UList<scalar>::less(rotatedPoints.component(vector::X))
|
||||
);
|
||||
|
||||
const scalar summedWeights = sum(weights);
|
||||
assignToProcessorGroup
|
||||
(
|
||||
processorGroups,
|
||||
n_.x(),
|
||||
pointIndices,
|
||||
weights,
|
||||
summedWeights
|
||||
);
|
||||
|
||||
forAll(points, i)
|
||||
{
|
||||
finalDecomp[pointIndices[i]] = processorGroups[i];
|
||||
}
|
||||
|
||||
|
||||
// now do the same thing in the Y direction. These processor group
|
||||
// numbers add multiples of nX to the proc. number (columns)
|
||||
sort
|
||||
(
|
||||
pointIndices,
|
||||
UList<scalar>::less(rotatedPoints.component(vector::Y))
|
||||
);
|
||||
|
||||
assignToProcessorGroup
|
||||
(
|
||||
processorGroups,
|
||||
n_.y(),
|
||||
pointIndices,
|
||||
weights,
|
||||
summedWeights
|
||||
);
|
||||
|
||||
forAll(points, i)
|
||||
{
|
||||
finalDecomp[pointIndices[i]] += n_.x()*processorGroups[i];
|
||||
}
|
||||
|
||||
|
||||
// finally in the Z direction. Now we add multiples of nX*nY to give
|
||||
// layers
|
||||
sort
|
||||
(
|
||||
pointIndices,
|
||||
UList<scalar>::less(rotatedPoints.component(vector::Z))
|
||||
);
|
||||
|
||||
assignToProcessorGroup
|
||||
(
|
||||
processorGroups,
|
||||
n_.z(),
|
||||
pointIndices,
|
||||
weights,
|
||||
summedWeights
|
||||
);
|
||||
|
||||
forAll(points, i)
|
||||
{
|
||||
finalDecomp[pointIndices[i]] += n_.x()*n_.y()*processorGroups[i];
|
||||
}
|
||||
|
||||
return finalDecomp;
|
||||
}
|
||||
// ************************************************************************* //
|
||||
|
||||
@ -52,6 +52,15 @@ class simpleGeomDecomp
|
||||
|
||||
void assignToProcessorGroup(labelList& processorGroup, const label);
|
||||
|
||||
void assignToProcessorGroup
|
||||
(
|
||||
labelList& processorGroup,
|
||||
const label nProcGroup,
|
||||
const labelList& indices,
|
||||
const scalarField& weights,
|
||||
const scalar summedWeights
|
||||
);
|
||||
|
||||
//- Disallow default bitwise copy construct and assignment
|
||||
void operator=(const simpleGeomDecomp&);
|
||||
simpleGeomDecomp(const simpleGeomDecomp&);
|
||||
@ -86,21 +95,31 @@ public:
|
||||
|
||||
virtual bool parallelAware() const
|
||||
{
|
||||
// simple decomp does not attempt to do anything across proc
|
||||
// simpleDecomp does not attempt to do anything across proc
|
||||
// boundaries
|
||||
return false;
|
||||
}
|
||||
|
||||
virtual labelList decompose(const pointField&);
|
||||
virtual labelList decompose
|
||||
(
|
||||
const pointField& points
|
||||
);
|
||||
|
||||
virtual labelList decompose
|
||||
(
|
||||
const pointField& points,
|
||||
const scalarField& pointWeights
|
||||
);
|
||||
|
||||
//- Explicitly provided connectivity
|
||||
virtual labelList decompose
|
||||
(
|
||||
const labelListList& globalCellCells,
|
||||
const pointField& cc
|
||||
const pointField& cc,
|
||||
const scalarField& cWeights
|
||||
)
|
||||
{
|
||||
return decompose(cc);
|
||||
return decompose(cc, cWeights);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@ -24,9 +24,9 @@ License
|
||||
|
||||
\*---------------------------------------------------------------------------*/
|
||||
|
||||
#include "syncTools.H"
|
||||
#include "parMetisDecomp.H"
|
||||
#include "metisDecomp.H"
|
||||
#include "syncTools.H"
|
||||
#include "addToRunTimeSelectionTable.H"
|
||||
#include "floatScalar.H"
|
||||
#include "polyMesh.H"
|
||||
@ -370,15 +370,22 @@ Foam::parMetisDecomp::parMetisDecomp
|
||||
|
||||
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
|
||||
|
||||
Foam::labelList Foam::parMetisDecomp::decompose(const pointField& points)
|
||||
Foam::labelList Foam::parMetisDecomp::decompose
|
||||
(
|
||||
const pointField& cc,
|
||||
const scalarField& cWeights
|
||||
)
|
||||
{
|
||||
if (points.size() != mesh_.nCells())
|
||||
if (cc.size() != mesh_.nCells())
|
||||
{
|
||||
FatalErrorIn("parMetisDecomp::decompose(const pointField&)")
|
||||
<< "Can use this decomposition method only for the whole mesh"
|
||||
FatalErrorIn
|
||||
(
|
||||
"parMetisDecomp::decompose"
|
||||
"(const pointField&, const scalarField&)"
|
||||
) << "Can use this decomposition method only for the whole mesh"
|
||||
<< endl
|
||||
<< "and supply one coordinate (cellCentre) for every cell." << endl
|
||||
<< "The number of coordinates " << points.size() << endl
|
||||
<< "The number of coordinates " << cc.size() << endl
|
||||
<< "The number of cells in the mesh " << mesh_.nCells()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
@ -386,156 +393,24 @@ Foam::labelList Foam::parMetisDecomp::decompose(const pointField& points)
|
||||
// For running sequential ...
|
||||
if (Pstream::nProcs() <= 1)
|
||||
{
|
||||
return metisDecomp(decompositionDict_, mesh_).decompose(points);
|
||||
}
|
||||
|
||||
// Create global cell numbers
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
// Get number of cells on all processors
|
||||
List<int> nLocalCells(Pstream::nProcs());
|
||||
nLocalCells[Pstream::myProcNo()] = mesh_.nCells();
|
||||
Pstream::gatherList(nLocalCells);
|
||||
Pstream::scatterList(nLocalCells);
|
||||
|
||||
// Get cell offsets.
|
||||
List<int> cellOffsets(Pstream::nProcs()+1);
|
||||
int nGlobalCells = 0;
|
||||
forAll(nLocalCells, procI)
|
||||
{
|
||||
cellOffsets[procI] = nGlobalCells;
|
||||
nGlobalCells += nLocalCells[procI];
|
||||
}
|
||||
cellOffsets[Pstream::nProcs()] = nGlobalCells;
|
||||
|
||||
int myOffset = cellOffsets[Pstream::myProcNo()];
|
||||
|
||||
|
||||
//
|
||||
// Make Metis Distributed CSR (Compressed Storage Format) storage
|
||||
// adjncy : contains cellCells (= edges in graph)
|
||||
// xadj(celli) : start of information in adjncy for celli
|
||||
//
|
||||
|
||||
|
||||
|
||||
const labelList& faceOwner = mesh_.faceOwner();
|
||||
const labelList& faceNeighbour = mesh_.faceNeighbour();
|
||||
const polyBoundaryMesh& patches = mesh_.boundaryMesh();
|
||||
|
||||
|
||||
// Get renumbered owner on other side of coupled faces
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
List<int> globalNeighbour(mesh_.nFaces()-mesh_.nInternalFaces());
|
||||
|
||||
forAll(patches, patchI)
|
||||
{
|
||||
const polyPatch& pp = patches[patchI];
|
||||
|
||||
if (pp.coupled())
|
||||
{
|
||||
label faceI = pp.start();
|
||||
label bFaceI = pp.start() - mesh_.nInternalFaces();
|
||||
|
||||
forAll(pp, i)
|
||||
{
|
||||
globalNeighbour[bFaceI++] = faceOwner[faceI++] + myOffset;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get the cell on the other side of coupled patches
|
||||
syncTools::swapBoundaryFaceList(mesh_, globalNeighbour, false);
|
||||
|
||||
|
||||
// Count number of faces (internal + coupled)
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
// Number of faces per cell
|
||||
List<int> nFacesPerCell(mesh_.nCells(), 0);
|
||||
|
||||
// Number of coupled faces
|
||||
label nCoupledFaces = 0;
|
||||
|
||||
for (label faceI = 0; faceI < mesh_.nInternalFaces(); faceI++)
|
||||
{
|
||||
nFacesPerCell[faceOwner[faceI]]++;
|
||||
nFacesPerCell[faceNeighbour[faceI]]++;
|
||||
}
|
||||
// Handle coupled faces
|
||||
forAll(patches, patchI)
|
||||
{
|
||||
const polyPatch& pp = patches[patchI];
|
||||
|
||||
if (pp.coupled())
|
||||
{
|
||||
label faceI = pp.start();
|
||||
|
||||
forAll(pp, i)
|
||||
{
|
||||
nCoupledFaces++;
|
||||
nFacesPerCell[faceOwner[faceI++]]++;
|
||||
}
|
||||
}
|
||||
return metisDecomp(decompositionDict_, mesh_).decompose
|
||||
(
|
||||
cc,
|
||||
cWeights
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
// Fill in xadj
|
||||
// ~~~~~~~~~~~~
|
||||
|
||||
Field<int> xadj(mesh_.nCells()+1, -1);
|
||||
|
||||
int freeAdj = 0;
|
||||
|
||||
for (label cellI = 0; cellI < mesh_.nCells(); cellI++)
|
||||
{
|
||||
xadj[cellI] = freeAdj;
|
||||
|
||||
freeAdj += nFacesPerCell[cellI];
|
||||
}
|
||||
xadj[mesh_.nCells()] = freeAdj;
|
||||
|
||||
|
||||
|
||||
// Fill in adjncy
|
||||
// ~~~~~~~~~~~~~~
|
||||
|
||||
Field<int> adjncy(2*mesh_.nInternalFaces() + nCoupledFaces, -1);
|
||||
|
||||
nFacesPerCell = 0;
|
||||
|
||||
// For internal faces is just offsetted owner and neighbour
|
||||
for (label faceI = 0; faceI < mesh_.nInternalFaces(); faceI++)
|
||||
{
|
||||
label own = faceOwner[faceI];
|
||||
label nei = faceNeighbour[faceI];
|
||||
|
||||
adjncy[xadj[own] + nFacesPerCell[own]++] = nei + myOffset;
|
||||
adjncy[xadj[nei] + nFacesPerCell[nei]++] = own + myOffset;
|
||||
}
|
||||
// For boundary faces is offsetted coupled neighbour
|
||||
forAll(patches, patchI)
|
||||
{
|
||||
const polyPatch& pp = patches[patchI];
|
||||
|
||||
if (pp.coupled())
|
||||
{
|
||||
label faceI = pp.start();
|
||||
label bFaceI = pp.start()-mesh_.nInternalFaces();
|
||||
|
||||
forAll(pp, i)
|
||||
{
|
||||
label own = faceOwner[faceI];
|
||||
adjncy[xadj[own] + nFacesPerCell[own]++] =
|
||||
globalNeighbour[bFaceI];
|
||||
|
||||
faceI++;
|
||||
bFaceI++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Connections
|
||||
Field<int> adjncy;
|
||||
// Offsets into adjncy
|
||||
Field<int> xadj;
|
||||
calcMetisDistributedCSR
|
||||
(
|
||||
mesh_,
|
||||
adjncy,
|
||||
xadj
|
||||
);
|
||||
|
||||
|
||||
// decomposition options. 0 = use defaults
|
||||
@ -550,6 +425,30 @@ Foam::labelList Foam::parMetisDecomp::decompose(const pointField& points)
|
||||
// face weights (so on the edges of the dual)
|
||||
Field<int> faceWeights;
|
||||
|
||||
|
||||
// Check for externally provided cellweights and if so initialise weights
|
||||
scalar maxWeights = gMax(cWeights);
|
||||
if (cWeights.size() > 0)
|
||||
{
|
||||
if (cWeights.size() != mesh_.nCells())
|
||||
{
|
||||
FatalErrorIn
|
||||
(
|
||||
"metisDecomp::decompose"
|
||||
"(const pointField&, const scalarField&)"
|
||||
) << "Number of cell weights " << cWeights.size()
|
||||
<< " does not equal number of cells " << mesh_.nCells()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
// Convert to integers.
|
||||
cellWeights.setSize(cWeights.size());
|
||||
forAll(cellWeights, i)
|
||||
{
|
||||
cellWeights[i] = int(1000000*cWeights[i]/maxWeights);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Check for user supplied weights and decomp options
|
||||
if (decompositionDict_.found("metisCoeffs"))
|
||||
{
|
||||
@ -577,8 +476,11 @@ Foam::labelList Foam::parMetisDecomp::decompose(const pointField& points)
|
||||
|
||||
if (cellWeights.size() != mesh_.nCells())
|
||||
{
|
||||
FatalErrorIn("parMetisDecomp::decompose(const pointField&)")
|
||||
<< "Number of cell weights " << cellWeights.size()
|
||||
FatalErrorIn
|
||||
(
|
||||
"parMetisDecomp::decompose"
|
||||
"(const pointField&, const scalarField&)"
|
||||
) << "Number of cell weights " << cellWeights.size()
|
||||
<< " read from " << cellIOWeights.objectPath()
|
||||
<< " does not equal number of cells " << mesh_.nCells()
|
||||
<< exit(FatalError);
|
||||
@ -604,30 +506,35 @@ Foam::labelList Foam::parMetisDecomp::decompose(const pointField& points)
|
||||
|
||||
if (weights.size() != mesh_.nFaces())
|
||||
{
|
||||
FatalErrorIn("parMetisDecomp::decompose(const pointField&)")
|
||||
<< "Number of face weights " << weights.size()
|
||||
FatalErrorIn
|
||||
(
|
||||
"parMetisDecomp::decompose"
|
||||
"(const pointField&, const scalarField&)"
|
||||
) << "Number of face weights " << weights.size()
|
||||
<< " does not equal number of internal and boundary faces "
|
||||
<< mesh_.nFaces()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
faceWeights.setSize(2*mesh_.nInternalFaces()+nCoupledFaces);
|
||||
faceWeights.setSize(adjncy.size());
|
||||
|
||||
// Assume symmetric weights. Keep same ordering as adjncy.
|
||||
nFacesPerCell = 0;
|
||||
List<int> nFacesPerCell(mesh_.nCells(), 0);
|
||||
|
||||
// Handle internal faces
|
||||
for (label faceI = 0; faceI < mesh_.nInternalFaces(); faceI++)
|
||||
{
|
||||
label w = weights[faceI];
|
||||
|
||||
label own = faceOwner[faceI];
|
||||
label nei = faceNeighbour[faceI];
|
||||
label own = mesh_.faceOwner()[faceI];
|
||||
label nei = mesh_.faceNeighbour()[faceI];
|
||||
|
||||
faceWeights[xadj[own] + nFacesPerCell[own]++] = w;
|
||||
faceWeights[xadj[nei] + nFacesPerCell[nei]++] = w;
|
||||
}
|
||||
// Coupled boundary faces
|
||||
const polyBoundaryMesh& patches = mesh_.boundaryMesh();
|
||||
|
||||
forAll(patches, patchI)
|
||||
{
|
||||
const polyPatch& pp = patches[patchI];
|
||||
@ -639,8 +546,8 @@ Foam::labelList Foam::parMetisDecomp::decompose(const pointField& points)
|
||||
forAll(pp, i)
|
||||
{
|
||||
label w = weights[faceI];
|
||||
label own = faceOwner[faceI];
|
||||
adjncy[xadj[own] + nFacesPerCell[own]++] = w;
|
||||
label own = mesh_.faceOwner()[faceI];
|
||||
faceWeights[xadj[own] + nFacesPerCell[own]++] = w;
|
||||
faceI++;
|
||||
}
|
||||
}
|
||||
@ -654,8 +561,11 @@ Foam::labelList Foam::parMetisDecomp::decompose(const pointField& points)
|
||||
|
||||
if (options.size() != 3)
|
||||
{
|
||||
FatalErrorIn("parMetisDecomp::decompose(const pointField&)")
|
||||
<< "Number of options " << options.size()
|
||||
FatalErrorIn
|
||||
(
|
||||
"parMetisDecomp::decompose"
|
||||
"(const pointField&, const scalarField&)"
|
||||
) << "Number of options " << options.size()
|
||||
<< " should be three." << exit(FatalError);
|
||||
}
|
||||
}
|
||||
@ -668,7 +578,7 @@ Foam::labelList Foam::parMetisDecomp::decompose(const pointField& points)
|
||||
(
|
||||
xadj,
|
||||
adjncy,
|
||||
points,
|
||||
cc,
|
||||
cellWeights,
|
||||
faceWeights,
|
||||
options,
|
||||
@ -689,7 +599,8 @@ Foam::labelList Foam::parMetisDecomp::decompose(const pointField& points)
|
||||
Foam::labelList Foam::parMetisDecomp::decompose
|
||||
(
|
||||
const labelList& cellToRegion,
|
||||
const pointField& regionPoints
|
||||
const pointField& regionPoints,
|
||||
const scalarField& regionWeights
|
||||
)
|
||||
{
|
||||
const labelList& faceOwner = mesh_.faceOwner();
|
||||
@ -798,7 +709,15 @@ Foam::labelList Foam::parMetisDecomp::decompose
|
||||
}
|
||||
}
|
||||
|
||||
labelList regionDecomp(decompose(globalRegionRegions, regionPoints));
|
||||
labelList regionDecomp
|
||||
(
|
||||
decompose
|
||||
(
|
||||
globalRegionRegions,
|
||||
regionPoints,
|
||||
regionWeights
|
||||
)
|
||||
);
|
||||
|
||||
// Rework back into decomposition for original mesh_
|
||||
labelList cellDistribution(cellToRegion.size());
|
||||
@ -814,24 +733,26 @@ Foam::labelList Foam::parMetisDecomp::decompose
|
||||
Foam::labelList Foam::parMetisDecomp::decompose
|
||||
(
|
||||
const labelListList& globalCellCells,
|
||||
const pointField& cellCentres
|
||||
const pointField& cellCentres,
|
||||
const scalarField& cWeights
|
||||
)
|
||||
{
|
||||
if (cellCentres.size() != globalCellCells.size())
|
||||
{
|
||||
FatalErrorIn
|
||||
(
|
||||
"parMetisDecomp::decompose(const labelListList&, const pointField&)"
|
||||
"parMetisDecomp::decompose(const labelListList&"
|
||||
", const pointField&, const scalarField&)"
|
||||
) << "Inconsistent number of cells (" << globalCellCells.size()
|
||||
<< ") and number of cell centres (" << cellCentres.size()
|
||||
<< ")." << exit(FatalError);
|
||||
<< ") or weights (" << cWeights.size() << ")." << exit(FatalError);
|
||||
}
|
||||
|
||||
// For running sequential ...
|
||||
if (Pstream::nProcs() <= 1)
|
||||
{
|
||||
return metisDecomp(decompositionDict_, mesh_)
|
||||
.decompose(globalCellCells, cellCentres);
|
||||
.decompose(globalCellCells, cellCentres, cWeights);
|
||||
}
|
||||
|
||||
|
||||
@ -855,106 +776,35 @@ Foam::labelList Foam::parMetisDecomp::decompose
|
||||
// face weights (so on the edges of the dual)
|
||||
Field<int> faceWeights;
|
||||
|
||||
|
||||
// Check for externally provided cellweights and if so initialise weights
|
||||
scalar maxWeights = gMax(cWeights);
|
||||
if (cWeights.size() > 0)
|
||||
{
|
||||
if (cWeights.size() != globalCellCells.size())
|
||||
{
|
||||
FatalErrorIn
|
||||
(
|
||||
"parMetisDecomp::decompose(const labelListList&"
|
||||
", const pointField&, const scalarField&)"
|
||||
) << "Number of cell weights " << cWeights.size()
|
||||
<< " does not equal number of cells " << globalCellCells.size()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
// Convert to integers.
|
||||
cellWeights.setSize(cWeights.size());
|
||||
forAll(cellWeights, i)
|
||||
{
|
||||
cellWeights[i] = int(1000000*cWeights[i]/maxWeights);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Check for user supplied weights and decomp options
|
||||
if (decompositionDict_.found("metisCoeffs"))
|
||||
{
|
||||
const dictionary& metisCoeffs =
|
||||
decompositionDict_.subDict("metisCoeffs");
|
||||
word weightsFile;
|
||||
|
||||
if (metisCoeffs.readIfPresent("cellWeightsFile", weightsFile))
|
||||
{
|
||||
Info<< "parMetisDecomp : Using cell-based weights read from "
|
||||
<< weightsFile << endl;
|
||||
|
||||
labelIOField cellIOWeights
|
||||
(
|
||||
IOobject
|
||||
(
|
||||
weightsFile,
|
||||
mesh_.time().timeName(),
|
||||
mesh_,
|
||||
IOobject::MUST_READ,
|
||||
IOobject::AUTO_WRITE
|
||||
)
|
||||
);
|
||||
cellWeights.transfer(cellIOWeights);
|
||||
|
||||
if (cellWeights.size() != cellCentres.size())
|
||||
{
|
||||
FatalErrorIn
|
||||
(
|
||||
"parMetisDecomp::decompose"
|
||||
"(const labelListList&, const pointField&)"
|
||||
) << "Number of cell weights " << cellWeights.size()
|
||||
<< " read from " << cellIOWeights.objectPath()
|
||||
<< " does not equal number of cells " << cellCentres.size()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
}
|
||||
|
||||
//- faceWeights disabled. Only makes sense for cellCells from mesh.
|
||||
//if (metisCoeffs.readIfPresent("faceWeightsFile", weightsFile))
|
||||
//{
|
||||
// Info<< "parMetisDecomp : Using face-based weights read from "
|
||||
// << weightsFile << endl;
|
||||
//
|
||||
// labelIOField weights
|
||||
// (
|
||||
// IOobject
|
||||
// (
|
||||
// weightsFile,
|
||||
// mesh_.time().timeName(),
|
||||
// mesh_,
|
||||
// IOobject::MUST_READ,
|
||||
// IOobject::AUTO_WRITE
|
||||
// )
|
||||
// );
|
||||
//
|
||||
// if (weights.size() != mesh_.nFaces())
|
||||
// {
|
||||
// FatalErrorIn("parMetisDecomp::decompose(const pointField&)")
|
||||
// << "Number of face weights " << weights.size()
|
||||
// << " does not equal number of internal and boundary faces "
|
||||
// << mesh_.nFaces()
|
||||
// << exit(FatalError);
|
||||
// }
|
||||
//
|
||||
// faceWeights.setSize(2*mesh_.nInternalFaces()+nCoupledFaces);
|
||||
//
|
||||
// // Assume symmetric weights. Keep same ordering as adjncy.
|
||||
// nFacesPerCell = 0;
|
||||
//
|
||||
// // Handle internal faces
|
||||
// for (label faceI = 0; faceI < mesh_.nInternalFaces(); faceI++)
|
||||
// {
|
||||
// label w = weights[faceI];
|
||||
//
|
||||
// label own = faceOwner[faceI];
|
||||
// label nei = faceNeighbour[faceI];
|
||||
//
|
||||
// faceWeights[xadj[own] + nFacesPerCell[own]++] = w;
|
||||
// faceWeights[xadj[nei] + nFacesPerCell[nei]++] = w;
|
||||
// }
|
||||
// // Coupled boundary faces
|
||||
// forAll(patches, patchI)
|
||||
// {
|
||||
// const polyPatch& pp = patches[patchI];
|
||||
//
|
||||
// if (pp.coupled())
|
||||
// {
|
||||
// label faceI = pp.start();
|
||||
//
|
||||
// forAll(pp, i)
|
||||
// {
|
||||
// label w = weights[faceI];
|
||||
// label own = faceOwner[faceI];
|
||||
// adjncy[xadj[own] + nFacesPerCell[own]++] = w;
|
||||
// faceI++;
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
//}
|
||||
|
||||
if (metisCoeffs.readIfPresent("options", options))
|
||||
{
|
||||
@ -965,8 +815,8 @@ Foam::labelList Foam::parMetisDecomp::decompose
|
||||
{
|
||||
FatalErrorIn
|
||||
(
|
||||
"parMetisDecomp::decompose"
|
||||
"(const labelListList&, const pointField&)"
|
||||
"parMetisDecomp::decompose(const labelListList&"
|
||||
", const pointField&, const scalarField&)"
|
||||
) << "Number of options " << options.size()
|
||||
<< " should be three." << exit(FatalError);
|
||||
}
|
||||
@ -998,4 +848,146 @@ Foam::labelList Foam::parMetisDecomp::decompose
|
||||
}
|
||||
|
||||
|
||||
void Foam::parMetisDecomp::calcMetisDistributedCSR
|
||||
(
|
||||
const polyMesh& mesh,
|
||||
List<int>& adjncy,
|
||||
List<int>& xadj
|
||||
)
|
||||
{
|
||||
// Create global cell numbers
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
globalIndex globalCells(mesh.nCells());
|
||||
|
||||
|
||||
//
|
||||
// Make Metis Distributed CSR (Compressed Storage Format) storage
|
||||
// adjncy : contains cellCells (= edges in graph)
|
||||
// xadj(celli) : start of information in adjncy for celli
|
||||
//
|
||||
|
||||
|
||||
const labelList& faceOwner = mesh.faceOwner();
|
||||
const labelList& faceNeighbour = mesh.faceNeighbour();
|
||||
const polyBoundaryMesh& patches = mesh.boundaryMesh();
|
||||
|
||||
|
||||
// Get renumbered owner on other side of coupled faces
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
List<int> globalNeighbour(mesh.nFaces()-mesh.nInternalFaces());
|
||||
|
||||
forAll(patches, patchI)
|
||||
{
|
||||
const polyPatch& pp = patches[patchI];
|
||||
|
||||
if (pp.coupled())
|
||||
{
|
||||
label faceI = pp.start();
|
||||
label bFaceI = pp.start() - mesh.nInternalFaces();
|
||||
|
||||
forAll(pp, i)
|
||||
{
|
||||
globalNeighbour[bFaceI++] = globalCells.toGlobal
|
||||
(
|
||||
faceOwner[faceI++]
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get the cell on the other side of coupled patches
|
||||
syncTools::swapBoundaryFaceList(mesh, globalNeighbour, false);
|
||||
|
||||
|
||||
// Count number of faces (internal + coupled)
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
// Number of faces per cell
|
||||
List<int> nFacesPerCell(mesh.nCells(), 0);
|
||||
|
||||
// Number of coupled faces
|
||||
label nCoupledFaces = 0;
|
||||
|
||||
for (label faceI = 0; faceI < mesh.nInternalFaces(); faceI++)
|
||||
{
|
||||
nFacesPerCell[faceOwner[faceI]]++;
|
||||
nFacesPerCell[faceNeighbour[faceI]]++;
|
||||
}
|
||||
// Handle coupled faces
|
||||
forAll(patches, patchI)
|
||||
{
|
||||
const polyPatch& pp = patches[patchI];
|
||||
|
||||
if (pp.coupled())
|
||||
{
|
||||
label faceI = pp.start();
|
||||
|
||||
forAll(pp, i)
|
||||
{
|
||||
nCoupledFaces++;
|
||||
nFacesPerCell[faceOwner[faceI++]]++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Fill in xadj
|
||||
// ~~~~~~~~~~~~
|
||||
|
||||
xadj.setSize(mesh.nCells()+1);
|
||||
|
||||
int freeAdj = 0;
|
||||
|
||||
for (label cellI = 0; cellI < mesh.nCells(); cellI++)
|
||||
{
|
||||
xadj[cellI] = freeAdj;
|
||||
|
||||
freeAdj += nFacesPerCell[cellI];
|
||||
}
|
||||
xadj[mesh.nCells()] = freeAdj;
|
||||
|
||||
|
||||
|
||||
// Fill in adjncy
|
||||
// ~~~~~~~~~~~~~~
|
||||
|
||||
adjncy.setSize(2*mesh.nInternalFaces() + nCoupledFaces);
|
||||
|
||||
nFacesPerCell = 0;
|
||||
|
||||
// For internal faces is just offsetted owner and neighbour
|
||||
for (label faceI = 0; faceI < mesh.nInternalFaces(); faceI++)
|
||||
{
|
||||
label own = faceOwner[faceI];
|
||||
label nei = faceNeighbour[faceI];
|
||||
|
||||
adjncy[xadj[own] + nFacesPerCell[own]++] = globalCells.toGlobal(nei);
|
||||
adjncy[xadj[nei] + nFacesPerCell[nei]++] = globalCells.toGlobal(own);
|
||||
}
|
||||
// For boundary faces is offsetted coupled neighbour
|
||||
forAll(patches, patchI)
|
||||
{
|
||||
const polyPatch& pp = patches[patchI];
|
||||
|
||||
if (pp.coupled())
|
||||
{
|
||||
label faceI = pp.start();
|
||||
label bFaceI = pp.start()-mesh.nInternalFaces();
|
||||
|
||||
forAll(pp, i)
|
||||
{
|
||||
label own = faceOwner[faceI];
|
||||
adjncy[xadj[own] + nFacesPerCell[own]++] =
|
||||
globalNeighbour[bFaceI];
|
||||
|
||||
faceI++;
|
||||
bFaceI++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// ************************************************************************* //
|
||||
|
||||
@ -112,7 +112,11 @@ public:
|
||||
|
||||
//- Return for every coordinate the wanted processor number. Use the
|
||||
// mesh connectivity (if needed)
|
||||
virtual labelList decompose(const pointField&);
|
||||
virtual labelList decompose
|
||||
(
|
||||
const pointField& points,
|
||||
const scalarField& pointWeights
|
||||
);
|
||||
|
||||
//- Return for every coordinate the wanted processor number. Gets
|
||||
// passed agglomeration map (from fine to coarse cells) and coarse cell
|
||||
@ -120,8 +124,9 @@ public:
|
||||
// functionality natively.
|
||||
virtual labelList decompose
|
||||
(
|
||||
const labelList& agglom,
|
||||
const pointField&
|
||||
const labelList& cellToRegion,
|
||||
const pointField& regionPoints,
|
||||
const scalarField& regionWeights
|
||||
);
|
||||
|
||||
//- Return for every coordinate the wanted processor number. Explicitly
|
||||
@ -134,7 +139,16 @@ public:
|
||||
virtual labelList decompose
|
||||
(
|
||||
const labelListList& globalCellCells,
|
||||
const pointField& cc
|
||||
const pointField& cc,
|
||||
const scalarField& cWeights
|
||||
);
|
||||
|
||||
//- Helper to convert mesh connectivity into distributed CSR
|
||||
static void calcMetisDistributedCSR
|
||||
(
|
||||
const polyMesh&,
|
||||
List<int>& adjncy,
|
||||
List<int>& xadj
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
@ -79,7 +79,7 @@ Foam::autoPtr<Foam::dynamicFvMesh> Foam::dynamicFvMesh::New(const IOobject& io)
|
||||
) << "Unknown dynamicFvMesh type " << dynamicFvMeshTypeName
|
||||
<< endl << endl
|
||||
<< "Valid dynamicFvMesh types are :" << endl
|
||||
<< IOobjectConstructorTablePtr_->toc()
|
||||
<< IOobjectConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
|
||||
@ -55,7 +55,7 @@ Foam::autoPtr<Foam::solidBodyMotionFunction> Foam::solidBodyMotionFunction::New
|
||||
) << "Unknown solidBodyMotionFunction type "
|
||||
<< solidBodyMotionFunctionTypeName << endl << endl
|
||||
<< "Valid solidBodyMotionFunctions are : " << endl
|
||||
<< dictionaryConstructorTablePtr_->toc()
|
||||
<< dictionaryConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
|
||||
@ -16,16 +16,13 @@ $(enrichedPatch)/enrichedPatchPointPoints.C
|
||||
$(enrichedPatch)/enrichedPatchCutFaces.C
|
||||
$(enrichedPatch)/enrichedPatchMasterPoints.C
|
||||
|
||||
/* polyTopoChange/polyTopoChange/polyTopoChange.C */
|
||||
polyTopoChange/polyTopoChange/topoAction/topoActions.C
|
||||
|
||||
polyMeshModifier = polyTopoChange/polyMeshModifier
|
||||
$(polyMeshModifier)/polyMeshModifier.C
|
||||
$(polyMeshModifier)/newPolyMeshModifier.C
|
||||
|
||||
polyTopoChange/polyTopoChange/topoAction/topoActions.C
|
||||
polyTopoChange/polyTopoChanger/polyTopoChanger.C
|
||||
/* polyTopoChange/polyTopoChanger/polyTopoChangerChangeMesh.C */
|
||||
|
||||
polyTopoChange/polyTopoChange/polyTopoChange.C
|
||||
polyTopoChange/polyTopoChange/addPatchCellLayer.C
|
||||
polyTopoChange/polyTopoChange/edgeCollapser.C
|
||||
|
||||
@ -62,7 +62,7 @@ autoPtr<cellLooper> cellLooper::New
|
||||
) << "Unknown set type " << type
|
||||
<< endl << endl
|
||||
<< "Valid cellLooper types : " << endl
|
||||
<< wordConstructorTablePtr_->toc()
|
||||
<< wordConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
|
||||
@ -74,7 +74,7 @@ public:
|
||||
{
|
||||
TAN1,
|
||||
TAN2,
|
||||
NORMAL,
|
||||
NORMAL
|
||||
};
|
||||
|
||||
private:
|
||||
|
||||
@ -107,7 +107,7 @@ Foam::autoPtr<Foam::motionSolver> Foam::motionSolver::New(const polyMesh& mesh)
|
||||
) << "Unknown solver type " << solverTypeName
|
||||
<< endl << endl
|
||||
<< "Valid solver types are: " << endl
|
||||
<< dictionaryConstructorTablePtr_->toc()
|
||||
<< dictionaryConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
|
||||
@ -64,7 +64,7 @@ autoPtr<polyMeshModifier> polyMeshModifier::New
|
||||
dict
|
||||
) << "Unknown polyMeshModifier type " << patchType << endl << endl
|
||||
<< "Valid polyMeshModifier types are :" << endl
|
||||
<< dictionaryConstructorTablePtr_->toc()
|
||||
<< dictionaryConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
|
||||
@ -350,6 +350,18 @@ void Foam::hexRef8::modFace
|
||||
// Bit complex way to determine the unrefined edge length.
|
||||
Foam::scalar Foam::hexRef8::getLevel0EdgeLength() const
|
||||
{
|
||||
if (cellLevel_.size() != mesh_.nCells())
|
||||
{
|
||||
FatalErrorIn
|
||||
(
|
||||
"hexRef8::getLevel0EdgeLength() const"
|
||||
) << "Number of cells in mesh:" << mesh_.nCells()
|
||||
<< " does not equal size of cellLevel:" << cellLevel_.size()
|
||||
<< endl
|
||||
<< "This might be because of a restart with inconsistent cellLevel."
|
||||
<< abort(FatalError);
|
||||
}
|
||||
|
||||
// Determine minimum edge length per refinement level
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
@ -69,7 +69,7 @@ Foam::autoPtr<Foam::engineMesh> Foam::engineMesh::New
|
||||
) << "Unknown engineMesh type " << engineMeshTypeName
|
||||
<< endl << endl
|
||||
<< "Valid engineMesh types are :" << endl
|
||||
<< IOobjectConstructorTablePtr_->toc()
|
||||
<< IOobjectConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
|
||||
@ -86,7 +86,6 @@ $(basicFvPatchFields)/coupled/coupledFvPatchFields.C
|
||||
$(basicFvPatchFields)/directionMixed/directionMixedFvPatchFields.C
|
||||
$(basicFvPatchFields)/fixedGradient/fixedGradientFvPatchFields.C
|
||||
$(basicFvPatchFields)/fixedValue/fixedValueFvPatchFields.C
|
||||
$(basicFvPatchFields)/generic/genericFvPatchFields.C
|
||||
$(basicFvPatchFields)/mixed/mixedFvPatchFields.C
|
||||
$(basicFvPatchFields)/sliced/slicedFvPatchFields.C
|
||||
$(basicFvPatchFields)/transform/transformFvPatchFields.C
|
||||
|
||||
@ -76,7 +76,7 @@ autoPtr<SRFModel> SRFModel::New
|
||||
) << "Unknown SRFModel type " << SRFModelTypeName
|
||||
<< nl << nl
|
||||
<< "Valid SRFModel types are :" << nl
|
||||
<< dictionaryConstructorTablePtr_->toc()
|
||||
<< dictionaryConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
|
||||
@ -114,12 +114,12 @@ directMappedFixedValueFvPatchField<Type>::directMappedFixedValueFvPatchField
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
// Force calculation of schedule (uses parallel comms)
|
||||
const directMappedPatchBase& mpp = refCast<const directMappedPatchBase>
|
||||
(
|
||||
this->patch().patch()
|
||||
);
|
||||
(void)mpp.map().schedule();
|
||||
//// Force calculation of schedule (uses parallel comms)
|
||||
//const directMappedPatchBase& mpp = refCast<const directMappedPatchBase>
|
||||
//(
|
||||
// this->patch().patch()
|
||||
//);
|
||||
//(void)mpp.map().schedule();
|
||||
}
|
||||
|
||||
|
||||
@ -166,6 +166,10 @@ void directMappedFixedValueFvPatchField<Type>::updateCoeffs()
|
||||
directMappedFixedValueFvPatchField<Type>::patch().patch()
|
||||
);
|
||||
const mapDistribute& distMap = mpp.map();
|
||||
|
||||
// Force recalculation of schedule
|
||||
(void)distMap.schedule();
|
||||
|
||||
const fvMesh& nbrMesh = refCast<const fvMesh>(mpp.sampleMesh());
|
||||
const word& fldName = this->dimensionedInternalField().name();
|
||||
|
||||
|
||||
@ -54,7 +54,7 @@ Foam::tmp<Foam::fvPatchField<Type> > Foam::fvPatchField<Type>::New
|
||||
) << "Unknown patchTypefield type " << patchFieldType
|
||||
<< endl << endl
|
||||
<< "Valid patchField types are :" << endl
|
||||
<< patchConstructorTablePtr_->toc()
|
||||
<< patchConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
@ -111,7 +111,7 @@ Foam::tmp<Foam::fvPatchField<Type> > Foam::fvPatchField<Type>::New
|
||||
) << "Unknown patchField type " << patchFieldType
|
||||
<< " for patch type " << p.type() << endl << endl
|
||||
<< "Valid patchField types are :" << endl
|
||||
<< dictionaryConstructorTablePtr_->toc()
|
||||
<< dictionaryConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
}
|
||||
@ -178,7 +178,7 @@ Foam::tmp<Foam::fvPatchField<Type> > Foam::fvPatchField<Type>::New
|
||||
"const fvPatchFieldMapper&)"
|
||||
) << "unknown patchTypefield type " << ptf.type() << endl << endl
|
||||
<< "Valid patchField types are :" << endl
|
||||
<< patchMapperConstructorTablePtr_->toc()
|
||||
<< patchMapperConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
|
||||
@ -59,7 +59,7 @@ tmp<fvsPatchField<Type> > fvsPatchField<Type>::New
|
||||
) << "Unknown patchTypefield type " << patchFieldType
|
||||
<< endl << endl
|
||||
<< "Valid patchField types are :" << endl
|
||||
<< patchConstructorTablePtr_->toc()
|
||||
<< patchConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
@ -115,7 +115,7 @@ tmp<fvsPatchField<Type> > fvsPatchField<Type>::New
|
||||
) << "Unknown patchField type " << patchFieldType
|
||||
<< " for patch type " << p.type() << endl << endl
|
||||
<< "Valid patchField types are :" << endl
|
||||
<< dictionaryConstructorTablePtr_->toc()
|
||||
<< dictionaryConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
}
|
||||
@ -183,7 +183,7 @@ tmp<fvsPatchField<Type> > fvsPatchField<Type>::New
|
||||
"const fvPatchFieldMapper&)"
|
||||
) << "unknown patchTypefield type " << ptf.type() << endl << endl
|
||||
<< "Valid patchField types are :" << endl
|
||||
<< patchMapperConstructorTablePtr_->toc()
|
||||
<< patchMapperConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
|
||||
@ -79,7 +79,7 @@ tmp<convectionScheme<Type> > convectionScheme<Type>::New
|
||||
schemeData
|
||||
) << "Convection scheme not specified" << endl << endl
|
||||
<< "Valid convection schemes are :" << endl
|
||||
<< IstreamConstructorTablePtr_->toc()
|
||||
<< IstreamConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
@ -97,7 +97,7 @@ tmp<convectionScheme<Type> > convectionScheme<Type>::New
|
||||
schemeData
|
||||
) << "unknown convection scheme " << schemeName << endl << endl
|
||||
<< "Valid convection schemes are :" << endl
|
||||
<< IstreamConstructorTablePtr_->toc()
|
||||
<< IstreamConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
@ -136,7 +136,7 @@ tmp<convectionScheme<Type> > convectionScheme<Type>::New
|
||||
schemeData
|
||||
) << "Convection scheme not specified" << endl << endl
|
||||
<< "Valid convection schemes are :" << endl
|
||||
<< MultivariateConstructorTablePtr_->toc()
|
||||
<< MultivariateConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
@ -156,7 +156,7 @@ tmp<convectionScheme<Type> > convectionScheme<Type>::New
|
||||
schemeData
|
||||
) << "unknown convection scheme " << schemeName << endl << endl
|
||||
<< "Valid convection schemes are :" << endl
|
||||
<< MultivariateConstructorTablePtr_->toc()
|
||||
<< MultivariateConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
|
||||
@ -64,7 +64,7 @@ tmp<d2dt2Scheme<Type> > d2dt2Scheme<Type>::New
|
||||
schemeData
|
||||
) << "D2dt2 scheme not specified" << endl << endl
|
||||
<< "Valid d2dt2 schemes are :" << endl
|
||||
<< IstreamConstructorTablePtr_->toc()
|
||||
<< IstreamConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
@ -81,7 +81,7 @@ tmp<d2dt2Scheme<Type> > d2dt2Scheme<Type>::New
|
||||
schemeData
|
||||
) << "unknown d2dt2 scheme " << schemeName << endl << endl
|
||||
<< "Valid d2dt2 schemes are :" << endl
|
||||
<< IstreamConstructorTablePtr_->toc()
|
||||
<< IstreamConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
|
||||
@ -62,7 +62,7 @@ tmp<ddtScheme<Type> > ddtScheme<Type>::New
|
||||
schemeData
|
||||
) << "Ddt scheme not specified" << endl << endl
|
||||
<< "Valid ddt schemes are :" << endl
|
||||
<< IstreamConstructorTablePtr_->toc()
|
||||
<< IstreamConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
@ -79,7 +79,7 @@ tmp<ddtScheme<Type> > ddtScheme<Type>::New
|
||||
schemeData
|
||||
) << "unknown ddt scheme " << schemeName << endl << endl
|
||||
<< "Valid ddt schemes are :" << endl
|
||||
<< IstreamConstructorTablePtr_->toc()
|
||||
<< IstreamConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
|
||||
@ -65,7 +65,7 @@ tmp<divScheme<Type> > divScheme<Type>::New
|
||||
schemeData
|
||||
) << "Div scheme not specified" << endl << endl
|
||||
<< "Valid div schemes are :" << endl
|
||||
<< IstreamConstructorTablePtr_->toc()
|
||||
<< IstreamConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
@ -82,7 +82,7 @@ tmp<divScheme<Type> > divScheme<Type>::New
|
||||
schemeData
|
||||
) << "unknown div scheme " << schemeName << endl << endl
|
||||
<< "Valid div schemes are :" << endl
|
||||
<< IstreamConstructorTablePtr_->toc()
|
||||
<< IstreamConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
|
||||
@ -64,7 +64,7 @@ tmp<gradScheme<Type> > gradScheme<Type>::New
|
||||
schemeData
|
||||
) << "Grad scheme not specified" << endl << endl
|
||||
<< "Valid grad schemes are :" << endl
|
||||
<< IstreamConstructorTablePtr_->toc()
|
||||
<< IstreamConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
@ -81,7 +81,7 @@ tmp<gradScheme<Type> > gradScheme<Type>::New
|
||||
schemeData
|
||||
) << "unknown grad scheme " << schemeName << endl << endl
|
||||
<< "Valid grad schemes are :" << endl
|
||||
<< IstreamConstructorTablePtr_->toc()
|
||||
<< IstreamConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
|
||||
@ -63,7 +63,7 @@ tmp<laplacianScheme<Type, GType> > laplacianScheme<Type, GType>::New
|
||||
schemeData
|
||||
) << "Laplacian scheme not specified" << endl << endl
|
||||
<< "Valid laplacian schemes are :" << endl
|
||||
<< IstreamConstructorTablePtr_->toc()
|
||||
<< IstreamConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
@ -80,7 +80,7 @@ tmp<laplacianScheme<Type, GType> > laplacianScheme<Type, GType>::New
|
||||
schemeData
|
||||
) << "unknown laplacian scheme " << schemeName << endl << endl
|
||||
<< "Valid laplacian schemes are :" << endl
|
||||
<< IstreamConstructorTablePtr_->toc()
|
||||
<< IstreamConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
|
||||
@ -65,7 +65,7 @@ tmp<snGradScheme<Type> > snGradScheme<Type>::New
|
||||
) << "Discretisation scheme not specified"
|
||||
<< endl << endl
|
||||
<< "Valid schemes are :" << endl
|
||||
<< MeshConstructorTablePtr_->toc()
|
||||
<< MeshConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
@ -83,7 +83,7 @@ tmp<snGradScheme<Type> > snGradScheme<Type>::New
|
||||
) << "Unknown discretisation scheme " << schemeName
|
||||
<< endl << endl
|
||||
<< "Valid schemes are :" << endl
|
||||
<< MeshConstructorTablePtr_->toc()
|
||||
<< MeshConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
|
||||
@ -50,7 +50,7 @@ Foam::autoPtr<Foam::fvPatch> Foam::fvPatch::New
|
||||
FatalErrorIn("fvPatch::New(const polyPatch&, const fvBoundaryMesh&)")
|
||||
<< "Unknown fvPatch type " << patch.type() << ".\n"
|
||||
<< "Valid fvPatch types are :"
|
||||
<< polyPatchConstructorTablePtr_->toc()
|
||||
<< polyPatchConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
|
||||
@ -50,7 +50,7 @@ Foam::interpolation<Type>::New
|
||||
) << "Unknown interpolation type " << interpolationType
|
||||
<< " for field " << psi.name() << nl << nl
|
||||
<< "Valid interpolation types : " << endl
|
||||
<< dictionaryConstructorTablePtr_->toc()
|
||||
<< dictionaryConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
|
||||
@ -62,7 +62,7 @@ limitedSurfaceInterpolationScheme<Type>::New
|
||||
) << "Discretisation scheme not specified"
|
||||
<< endl << endl
|
||||
<< "Valid schemes are :" << endl
|
||||
<< MeshConstructorTablePtr_->toc()
|
||||
<< MeshConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
@ -81,7 +81,7 @@ limitedSurfaceInterpolationScheme<Type>::New
|
||||
) << "Unknown discretisation scheme " << schemeName
|
||||
<< endl << endl
|
||||
<< "Valid schemes are :" << endl
|
||||
<< MeshConstructorTablePtr_->toc()
|
||||
<< MeshConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
@ -117,7 +117,7 @@ limitedSurfaceInterpolationScheme<Type>::New
|
||||
) << "Discretisation scheme not specified"
|
||||
<< endl << endl
|
||||
<< "Valid schemes are :" << endl
|
||||
<< MeshConstructorTablePtr_->toc()
|
||||
<< MeshConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
@ -136,7 +136,7 @@ limitedSurfaceInterpolationScheme<Type>::New
|
||||
) << "Unknown discretisation scheme " << schemeName
|
||||
<< endl << endl
|
||||
<< "Valid schemes are :" << endl
|
||||
<< MeshFluxConstructorTablePtr_->toc()
|
||||
<< MeshFluxConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
|
||||
@ -92,7 +92,7 @@ multivariateSurfaceInterpolationScheme<Type>::New
|
||||
schemeData
|
||||
) << "unknown discretisation scheme " << schemeName << endl << endl
|
||||
<< "Valid schemes are :" << endl
|
||||
<< IstreamConstructorTablePtr_->toc()
|
||||
<< IstreamConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
|
||||
@ -56,7 +56,7 @@ tmp<surfaceInterpolationScheme<Type> > surfaceInterpolationScheme<Type>::New
|
||||
) << "Discretisation scheme not specified"
|
||||
<< endl << endl
|
||||
<< "Valid schemes are :" << endl
|
||||
<< MeshConstructorTablePtr_->toc()
|
||||
<< MeshConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
@ -83,7 +83,7 @@ tmp<surfaceInterpolationScheme<Type> > surfaceInterpolationScheme<Type>::New
|
||||
) << "Unknown discretisation scheme " << schemeName
|
||||
<< endl << endl
|
||||
<< "Valid schemes are :" << endl
|
||||
<< MeshConstructorTablePtr_->toc()
|
||||
<< MeshConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
@ -110,7 +110,7 @@ tmp<surfaceInterpolationScheme<Type> > surfaceInterpolationScheme<Type>::New
|
||||
) << "Discretisation scheme not specified"
|
||||
<< endl << endl
|
||||
<< "Valid schemes are :" << endl
|
||||
<< MeshConstructorTablePtr_->toc()
|
||||
<< MeshConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
@ -138,7 +138,7 @@ tmp<surfaceInterpolationScheme<Type> > surfaceInterpolationScheme<Type>::New
|
||||
) << "Unknown discretisation scheme " << schemeName
|
||||
<< endl << endl
|
||||
<< "Valid schemes are :" << endl
|
||||
<< MeshFluxConstructorTablePtr_->toc()
|
||||
<< MeshFluxConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalIOError);
|
||||
}
|
||||
|
||||
|
||||
@ -68,7 +68,7 @@ Foam::autoPtr<Foam::motionDiffusivity> Foam::motionDiffusivity::New
|
||||
) << "Unknown diffusion type " << diffTypeName
|
||||
<< endl << endl
|
||||
<< "Valid diffusion types are :" << endl
|
||||
<< IstreamConstructorTablePtr_->toc()
|
||||
<< IstreamConstructorTablePtr_->sortedToc()
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
|
||||
4
src/genericPatchFields/Make/files
Normal file
4
src/genericPatchFields/Make/files
Normal file
@ -0,0 +1,4 @@
|
||||
genericFvPatchField/genericFvPatchFields.C
|
||||
genericPointPatchField/genericPointPatchFields.C
|
||||
|
||||
LIB = $(FOAM_LIBBIN)/libgenericPatchFields
|
||||
5
src/genericPatchFields/Make/options
Normal file
5
src/genericPatchFields/Make/options
Normal file
@ -0,0 +1,5 @@
|
||||
EXE_INC = \
|
||||
-I$(LIB_SRC)/finiteVolume/lnInclude
|
||||
|
||||
LIB_LIBS = \
|
||||
-lfiniteVolume
|
||||
@ -26,7 +26,8 @@ Class
|
||||
Foam::genericFvPatchField
|
||||
|
||||
Description
|
||||
Foam::genericFvPatchField
|
||||
A generic version of calculatedFvPatchField, useful as a fallback for
|
||||
handling unknown patch types.
|
||||
|
||||
SourceFiles
|
||||
genericFvPatchField.C
|
||||
@ -26,7 +26,8 @@ Class
|
||||
Foam::genericPointPatchField
|
||||
|
||||
Description
|
||||
Foam::genericPointPatchField
|
||||
A generic version of calculatedPointPatchField, useful as a fallback for
|
||||
handling unknown patch types.
|
||||
|
||||
SourceFiles
|
||||
genericPointPatchField.C
|
||||
@ -74,7 +74,7 @@ Foam::autoPtr<Foam::injectorType> Foam::injectorType::New
|
||||
<< injectorTypeName
|
||||
<< ", constructor not in hash table" << endl << endl
|
||||
<< " Valid injector types are :" << endl;
|
||||
Info<< dictionaryConstructorTablePtr_->toc() << abort(FatalError);
|
||||
Info<< dictionaryConstructorTablePtr_->sortedToc() << abort(FatalError);
|
||||
}
|
||||
|
||||
return autoPtr<injectorType>(cstrIter()(t, dict));
|
||||
|
||||
@ -61,7 +61,7 @@ autoPtr<atomizationModel> atomizationModel::New
|
||||
<< atomizationModelType
|
||||
<< ", constructor not in hash table" << endl << endl
|
||||
<< " Valid atomizationModel types are :" << endl;
|
||||
Info<< dictionaryConstructorTablePtr_->toc() << abort(FatalError);
|
||||
Info<< dictionaryConstructorTablePtr_->sortedToc() << abort(FatalError);
|
||||
}
|
||||
|
||||
return autoPtr<atomizationModel>(cstrIter()(dict, sm));
|
||||
|
||||
@ -62,7 +62,7 @@ autoPtr<breakupModel> breakupModel::New
|
||||
<< breakupModelType
|
||||
<< ", constructor not in hash table" << endl << endl
|
||||
<< " Valid breakupModel types are :" << endl;
|
||||
Info<< dictionaryConstructorTablePtr_->toc() << abort(FatalError);
|
||||
Info<< dictionaryConstructorTablePtr_->sortedToc() << abort(FatalError);
|
||||
}
|
||||
|
||||
return autoPtr<breakupModel>(cstrIter()(dict, sm));
|
||||
|
||||
@ -67,7 +67,7 @@ autoPtr<collisionModel> collisionModel::New
|
||||
<< ", constructor not in hash table" << endl << endl
|
||||
<< " Valid collisionModel types are :"
|
||||
<< endl;
|
||||
Info<< dictionaryConstructorTablePtr_->toc()
|
||||
Info<< dictionaryConstructorTablePtr_->sortedToc()
|
||||
<< abort(FatalError);
|
||||
}
|
||||
|
||||
|
||||
@ -62,7 +62,7 @@ autoPtr<dispersionModel> dispersionModel::New
|
||||
<< dispersionModelType
|
||||
<< ", constructor not in hash table" << endl << endl
|
||||
<< " Valid dispersionModel types are :" << endl;
|
||||
Info<< dictionaryConstructorTablePtr_->toc() << abort(FatalError);
|
||||
Info<< dictionaryConstructorTablePtr_->sortedToc() << abort(FatalError);
|
||||
}
|
||||
|
||||
return autoPtr<dispersionModel>(cstrIter()(dict, sm));
|
||||
|
||||
@ -60,7 +60,7 @@ autoPtr<dragModel> dragModel::New
|
||||
<< dragModelType
|
||||
<< ", constructor not in hash table" << endl << endl
|
||||
<< " Valid dragModel types are :" << endl;
|
||||
Info<< dictionaryConstructorTablePtr_->toc() << abort(FatalError);
|
||||
Info<< dictionaryConstructorTablePtr_->sortedToc() << abort(FatalError);
|
||||
}
|
||||
|
||||
return autoPtr<dragModel>(cstrIter()(dict));
|
||||
|
||||
@ -62,7 +62,7 @@ autoPtr<evaporationModel> evaporationModel::New
|
||||
<< evaporationModelType
|
||||
<< ", constructor not in hash table" << endl << endl
|
||||
<< " Valid evaporationModel types are :" << endl;
|
||||
Info<< dictionaryConstructorTablePtr_->toc() << abort(FatalError);
|
||||
Info<< dictionaryConstructorTablePtr_->sortedToc() << abort(FatalError);
|
||||
}
|
||||
|
||||
return autoPtr<evaporationModel>(cstrIter()(dict));
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user