Compare commits
3 Commits
feature-st
...
OpenFOAM-v
| Author | SHA1 | Date | |
|---|---|---|---|
| a6e826bd55 | |||
| fbf00d6bf2 | |||
| 70874860b9 |
@ -18,6 +18,6 @@ dimensionedScalar rho("rho", dimDensity, transportProperties);
|
||||
|
||||
scalar MaxCo =
|
||||
max(mesh.surfaceInterpolation::deltaCoeffs()*c0).value()
|
||||
*runTime.deltaTValue();
|
||||
*runTime.deltaT().value();
|
||||
|
||||
Info<< "Max acoustic Courant Number = " << MaxCo << endl;
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
if (adjustTimeStep)
|
||||
{
|
||||
runTime.setDeltaT(min(dtChem, maxDeltaT));
|
||||
Info<< "deltaT = " << runTime.deltaTValue() << endl;
|
||||
Info<< "deltaT = " << runTime.deltaT().value() << endl;
|
||||
}
|
||||
|
||||
@ -1,3 +1,3 @@
|
||||
dtChem = chemistry.solve(runTime.deltaTValue());
|
||||
dtChem = chemistry.solve(runTime.deltaT().value());
|
||||
scalar Qdot = chemistry.Qdot()()[0]/rho[0];
|
||||
integratedHeat += Qdot*runTime.deltaTValue();
|
||||
integratedHeat += Qdot*runTime.deltaT().value();
|
||||
|
||||
@ -17,7 +17,7 @@ tmp<GeometricField<Type, fvsPatchField, surfaceMesh>> interpolate
|
||||
vf,
|
||||
dir,
|
||||
"reconstruct("
|
||||
+ (reconFieldName.empty() ? vf.name() : reconFieldName)
|
||||
+ (reconFieldName != word::null ? reconFieldName : vf.name())
|
||||
+ ')'
|
||||
)
|
||||
);
|
||||
|
||||
@ -7,7 +7,7 @@
|
||||
*mag(aMesh.edgeInterpolation::deltaCoeffs())
|
||||
/rhol
|
||||
)
|
||||
).value()*runTime.deltaTValue();
|
||||
).value()*runTime.deltaT().value();
|
||||
|
||||
Info<< "Max Capillary Courant Number = " << CoNumSigma << '\n' << endl;
|
||||
}
|
||||
|
||||
@ -47,10 +47,10 @@ if (aMesh.nInternalEdges())
|
||||
);
|
||||
|
||||
CoNum = max(SfUfbyDelta/aMesh.magLe())
|
||||
.value()*runTime.deltaTValue();
|
||||
.value()*runTime.deltaT().value();
|
||||
|
||||
meanCoNum = (sum(SfUfbyDelta)/sum(aMesh.magLe()))
|
||||
.value()*runTime.deltaTValue();
|
||||
.value()*runTime.deltaT().value();
|
||||
|
||||
velMag = max(mag(phis)/aMesh.magLe()).value();
|
||||
}
|
||||
|
||||
@ -292,7 +292,8 @@ updateCoeffs()
|
||||
|
||||
// Since we're inside initEvaluate/evaluate there might be processor
|
||||
// comms underway. Change the tag we use.
|
||||
const int oldTag = UPstream::incrMsgType();
|
||||
int oldTag = UPstream::msgType();
|
||||
UPstream::msgType() = oldTag+1;
|
||||
|
||||
// Get the coupling information from the mappedPatchBase
|
||||
const label patchi = patch().index();
|
||||
@ -470,9 +471,10 @@ updateCoeffs()
|
||||
<< regionTypeNames_ << nl << exit(FatalError);
|
||||
}
|
||||
|
||||
UPstream::msgType(oldTag); // Restore tag
|
||||
|
||||
mixedFvPatchScalarField::updateCoeffs();
|
||||
|
||||
// Restore tag
|
||||
UPstream::msgType() = oldTag;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -49,11 +49,11 @@ if (adjustTimeStep)
|
||||
(
|
||||
min
|
||||
(
|
||||
min(maxCo/CoNum, maxDi/DiNum)*runTime.deltaTValue(),
|
||||
min(maxCo/CoNum, maxDi/DiNum)*runTime.deltaT().value(),
|
||||
min(runTime.deltaTValue(), maxDeltaT)
|
||||
)
|
||||
);
|
||||
Info<< "deltaT = " << runTime.deltaTValue() << endl;
|
||||
Info<< "deltaT = " << runTime.deltaT().value() << endl;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -59,12 +59,12 @@ if (adjustTimeStep)
|
||||
(
|
||||
min
|
||||
(
|
||||
min(deltaTFluid, maxDeltaTSolid)*runTime.deltaTValue(),
|
||||
min(deltaTFluid, maxDeltaTSolid)*runTime.deltaT().value(),
|
||||
maxDeltaT
|
||||
)
|
||||
);
|
||||
|
||||
Info<< "deltaT = " << runTime.deltaTValue() << endl;
|
||||
Info<< "deltaT = " << runTime.deltaT().value() << endl;
|
||||
}
|
||||
|
||||
// ************************************************************************* //
|
||||
|
||||
@ -86,7 +86,6 @@ VoFPatchTransfer::VoFPatchTransfer
|
||||
wordRes patchNames;
|
||||
if (coeffDict_.readIfPresent("patches", patchNames))
|
||||
{
|
||||
// Can also use pbm.indices(), but no warnings...
|
||||
patchIDs_ = pbm.patchSet(patchNames).sortedToc();
|
||||
|
||||
Info<< " applying to " << patchIDs_.size() << " patches:" << nl;
|
||||
|
||||
@ -1075,7 +1075,7 @@ void Foam::multiphaseMixtureThermo::solveAlphas
|
||||
|
||||
MULES::limit
|
||||
(
|
||||
1.0/mesh_.time().deltaTValue(),
|
||||
1.0/mesh_.time().deltaT().value(),
|
||||
geometricOneField(),
|
||||
alpha,
|
||||
phi_,
|
||||
|
||||
@ -699,7 +699,7 @@ void Foam::radiation::laserDTRM::calculate()
|
||||
scalar totalQ = gSum(Q_.primitiveFieldRef()*mesh_.V());
|
||||
Info << "Total energy absorbed [W]: " << totalQ << endl;
|
||||
|
||||
if (mesh_.time().writeTime())
|
||||
if (mesh_.time().outputTime())
|
||||
{
|
||||
reflectingCellsVol.write();
|
||||
nHat.write();
|
||||
|
||||
@ -52,9 +52,7 @@ namespace Foam
|
||||
const Foam::volScalarField&
|
||||
Foam::radiation::localDensityAbsorptionEmission::alpha(word alphaName) const
|
||||
{
|
||||
const volScalarField* ptr = mesh_.cfindObject<volScalarField>(alphaName);
|
||||
|
||||
if (!ptr)
|
||||
if (!mesh_.foundObject<volScalarField>(alphaName))
|
||||
{
|
||||
FatalErrorInFunction
|
||||
<< "Unable to retrieve density field " << alphaName << " from "
|
||||
@ -62,7 +60,7 @@ Foam::radiation::localDensityAbsorptionEmission::alpha(word alphaName) const
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
return *ptr;
|
||||
return mesh_.lookupObject<volScalarField>(alphaName);
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -124,7 +124,7 @@ Foam::temperaturePhaseChangeTwoPhaseMixtures::constant::mDot() const
|
||||
* max(TSat - T, T0)
|
||||
);
|
||||
|
||||
if (mesh_.time().writeTime())
|
||||
if (mesh_.time().outputTime())
|
||||
{
|
||||
mDotC.write();
|
||||
mDotE.write();
|
||||
|
||||
@ -96,7 +96,7 @@
|
||||
MULES::limiter
|
||||
(
|
||||
allLambda,
|
||||
1.0/runTime.deltaTValue(),
|
||||
1.0/runTime.deltaT().value(),
|
||||
geometricOneField(),
|
||||
alpha1,
|
||||
alphaPhi1BD,
|
||||
@ -164,7 +164,7 @@
|
||||
MULES::limiter
|
||||
(
|
||||
allLambda,
|
||||
1.0/runTime.deltaTValue(),
|
||||
1.0/runTime.deltaT().value(),
|
||||
geometricOneField(),
|
||||
alpha2,
|
||||
alphaPhi2BD,
|
||||
|
||||
@ -628,7 +628,7 @@ void Foam::multiphaseMixture::solveAlphas
|
||||
|
||||
MULES::limit
|
||||
(
|
||||
1.0/mesh_.time().deltaTValue(),
|
||||
1.0/mesh_.time().deltaT().value(),
|
||||
geometricOneField(),
|
||||
alpha,
|
||||
phi_,
|
||||
|
||||
@ -20,7 +20,7 @@
|
||||
IOobject rhoIO
|
||||
(
|
||||
"rho",
|
||||
Time::timeName(0),
|
||||
runTime.timeName(0),
|
||||
mesh,
|
||||
IOobject::NO_READ,
|
||||
IOobject::NO_WRITE
|
||||
@ -75,7 +75,7 @@
|
||||
IOobject EHeader
|
||||
(
|
||||
"E",
|
||||
Time::timeName(0),
|
||||
runTime.timeName(0),
|
||||
mesh,
|
||||
IOobject::NO_READ,
|
||||
IOobject::NO_WRITE
|
||||
@ -127,7 +127,7 @@
|
||||
IOobject nuIO
|
||||
(
|
||||
"nu",
|
||||
Time::timeName(0),
|
||||
runTime.timeName(0),
|
||||
mesh,
|
||||
IOobject::NO_READ,
|
||||
IOobject::NO_WRITE
|
||||
|
||||
@ -51,7 +51,7 @@ if (thermalStress)
|
||||
IOobject CIO
|
||||
(
|
||||
"C",
|
||||
Time::timeName(0),
|
||||
runTime.timeName(0),
|
||||
mesh,
|
||||
IOobject::NO_READ,
|
||||
IOobject::NO_WRITE
|
||||
@ -106,7 +106,7 @@ if (thermalStress)
|
||||
IOobject rhoKIO
|
||||
(
|
||||
"k",
|
||||
Time::timeName(0),
|
||||
runTime.timeName(0),
|
||||
mesh,
|
||||
IOobject::NO_READ,
|
||||
IOobject::NO_WRITE
|
||||
@ -161,7 +161,7 @@ if (thermalStress)
|
||||
IOobject alphaIO
|
||||
(
|
||||
"alpha",
|
||||
Time::timeName(0),
|
||||
runTime.timeName(0),
|
||||
mesh,
|
||||
IOobject::NO_READ,
|
||||
IOobject::NO_WRITE
|
||||
|
||||
@ -92,10 +92,7 @@ int main(int argc, char *argv[])
|
||||
}
|
||||
report(buf1);
|
||||
|
||||
buf1.push_back(identity(5));
|
||||
buf1.emplace_front(-1000);
|
||||
buf1.emplace_back(1000);
|
||||
report(buf1);
|
||||
buf1.push_back(identity(5)); report(buf1);
|
||||
|
||||
buf1.info(Info);
|
||||
Info<< buf1 << nl;
|
||||
|
||||
@ -55,7 +55,10 @@ public:
|
||||
i_(i)
|
||||
{}
|
||||
|
||||
const word& keyword() const noexcept { return keyword_; }
|
||||
const word& keyword() const
|
||||
{
|
||||
return keyword_;
|
||||
}
|
||||
|
||||
friend Ostream& operator<<(Ostream& os, const ent& e)
|
||||
{
|
||||
@ -71,27 +74,28 @@ class Scalar
|
||||
|
||||
public:
|
||||
|
||||
static bool verbose;
|
||||
Scalar()
|
||||
:
|
||||
data_(0)
|
||||
{}
|
||||
|
||||
constexpr Scalar() noexcept : data_(0) {}
|
||||
Scalar(scalar val) noexcept : data_(val) {}
|
||||
Scalar(scalar val)
|
||||
:
|
||||
data_(val)
|
||||
{}
|
||||
|
||||
~Scalar()
|
||||
{
|
||||
if (verbose) Info<< "delete Scalar: " << data_ << endl;
|
||||
Info<<"delete Scalar: " << data_ << endl;
|
||||
}
|
||||
|
||||
scalar value() const noexcept { return data_; }
|
||||
scalar& value() noexcept { return data_; }
|
||||
|
||||
friend Ostream& operator<<(Ostream& os, const Scalar& item)
|
||||
friend Ostream& operator<<(Ostream& os, const Scalar& val)
|
||||
{
|
||||
os << item.value();
|
||||
os << val.data_;
|
||||
return os;
|
||||
}
|
||||
};
|
||||
|
||||
bool Scalar::verbose = true;
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
@ -113,7 +117,7 @@ int main(int argc, char *argv[])
|
||||
|
||||
dict.swapDown(dict.first());
|
||||
|
||||
forAllConstIters(dict, iter)
|
||||
forAllConstIter(Dictionary<ent>, dict, iter)
|
||||
{
|
||||
Info<< "element : " << *iter;
|
||||
}
|
||||
@ -153,9 +157,9 @@ int main(int argc, char *argv[])
|
||||
}
|
||||
|
||||
Info<< nl << "scalarDict1: " << endl;
|
||||
forAllConstIters(scalarDict, iter)
|
||||
forAllConstIter(PtrDictionary<Scalar>, scalarDict, iter)
|
||||
{
|
||||
Info<< " = " << *iter << endl;
|
||||
Info<< " = " << iter() << endl;
|
||||
}
|
||||
|
||||
PtrDictionary<Scalar> scalarDict2;
|
||||
@ -165,7 +169,7 @@ int main(int argc, char *argv[])
|
||||
scalarDict2.insert(key, new Scalar(1.3*i));
|
||||
}
|
||||
Info<< nl << "scalarDict2: " << endl;
|
||||
forAllConstIters(scalarDict2, iter)
|
||||
forAllConstIter(PtrDictionary<Scalar>, scalarDict2, iter)
|
||||
{
|
||||
std::cout<< "iter: " << typeid(*iter).name() << '\n';
|
||||
|
||||
|
||||
@ -85,7 +85,7 @@ Description
|
||||
|
||||
fileNameList procDirs
|
||||
(
|
||||
DirLister::dirs(".").csorted<fileName>(matchProcs)
|
||||
DirLister::dirs(".").sorted<fileName>(matchProcs)
|
||||
);
|
||||
}
|
||||
\endcode
|
||||
@ -206,11 +206,11 @@ public:
|
||||
|
||||
//- Return a complete list of names, sorted in natural order
|
||||
template<class StringType=Foam::word>
|
||||
List<StringType> csorted() const;
|
||||
List<StringType> sorted() const;
|
||||
|
||||
//- Return complete list of names, sorted in natural order
|
||||
template<class StringType=Foam::word, class UnaryPredicate>
|
||||
List<StringType> csorted
|
||||
List<StringType> sorted
|
||||
(
|
||||
const UnaryPredicate& pred,
|
||||
const bool prune = false
|
||||
|
||||
@ -70,23 +70,23 @@ Foam::List<StringType> Foam::DirLister::list() const
|
||||
|
||||
|
||||
template<class StringType, class UnaryPredicate>
|
||||
Foam::List<StringType> Foam::DirLister::csorted
|
||||
Foam::List<StringType> Foam::DirLister::sorted
|
||||
(
|
||||
const UnaryPredicate& pred,
|
||||
const bool prune
|
||||
) const
|
||||
{
|
||||
List<StringType> list(list<StringType>(pred, prune));
|
||||
Foam::sort(list, stringOps::natural_sort());
|
||||
List<StringType> lst(list<StringType>(pred, prune));
|
||||
sort(lst, stringOps::natural_sort());
|
||||
|
||||
return list;
|
||||
return lst;
|
||||
}
|
||||
|
||||
|
||||
template<class StringType>
|
||||
Foam::List<StringType> Foam::DirLister::csorted() const
|
||||
Foam::List<StringType> Foam::DirLister::sorted() const
|
||||
{
|
||||
return csorted<StringType>(predicates::always());
|
||||
return sorted<StringType>(predicates::always());
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -162,7 +162,7 @@ int main(int argc, char *argv[])
|
||||
Info<< "dirList: "
|
||||
<< flatOutput
|
||||
(
|
||||
DirLister::dirs(".").csorted<fileName>(relist)
|
||||
DirLister::dirs(".").sorted<fileName>(relist)
|
||||
) << nl;
|
||||
}
|
||||
|
||||
|
||||
@ -202,31 +202,14 @@ int main(int argc, char *argv[])
|
||||
Info<< "get<3>: " << list1.get<3>() << nl;
|
||||
// Will not compile: Info<< "get<4>: " << list1.get<4>() << nl;
|
||||
|
||||
// Test deprecated form
|
||||
label array2[4] = {0, 1, 2, 3};
|
||||
FixedList<label, 4> list2(array2);
|
||||
label a[4] = {0, 1, 2, 3};
|
||||
FixedList<label, 4> list2(a);
|
||||
|
||||
Info<< "list2:" << list2
|
||||
<< " hash:" << FixedList<label, 4>::hasher()(list2) << nl
|
||||
<< " hash:" << Hash<FixedList<label, 4>>()(list2) << nl;
|
||||
|
||||
|
||||
// Test deprecated form
|
||||
SLList<label> sllist3;
|
||||
{
|
||||
sllist3.push_back(0);
|
||||
sllist3.push_back(1);
|
||||
sllist3.push_back(2);
|
||||
sllist3.push_back(3);
|
||||
}
|
||||
FixedList<label, 4> list3(sllist3);
|
||||
|
||||
Info<< "list3:" << list3 << nl;
|
||||
// Test deprecated forms
|
||||
list3 = array2;
|
||||
list2 = sllist3;
|
||||
|
||||
|
||||
// Using FixedList for content too
|
||||
{
|
||||
List<FixedList<label, 4>> twolists{list1, list2};
|
||||
@ -254,8 +237,8 @@ int main(int argc, char *argv[])
|
||||
Info<< "mem: "
|
||||
<< name(list1.data()) << " " << name(list2.data()) << nl;
|
||||
|
||||
Foam::Swap(list1, list2);
|
||||
Info<< "Foam::Swap() function" << nl;
|
||||
Swap(list1, list2);
|
||||
Info<< "The Swap() function" << nl;
|
||||
Info<< "list1: " << list1 << nl
|
||||
<< "list2: " << list2 << nl;
|
||||
|
||||
|
||||
@ -68,7 +68,7 @@ void runSwapTest
|
||||
|
||||
for (label iLoop = 0; iLoop < nLoops; ++iLoop)
|
||||
{
|
||||
Foam::Swap(list1, list2);
|
||||
Swap(list1, list2);
|
||||
}
|
||||
|
||||
Info<< "output 1: " << list1.first() << nl;
|
||||
|
||||
@ -44,28 +44,38 @@ class Scalar
|
||||
|
||||
public:
|
||||
|
||||
static bool verbose;
|
||||
Scalar()
|
||||
:
|
||||
data_(0)
|
||||
{}
|
||||
|
||||
constexpr Scalar() noexcept : data_(0) {}
|
||||
Scalar(scalar val) noexcept : data_(val) {}
|
||||
Scalar(scalar val)
|
||||
:
|
||||
data_(val)
|
||||
{}
|
||||
|
||||
~Scalar()
|
||||
{
|
||||
if (verbose) Info<< "delete Scalar: " << data_ << endl;
|
||||
Info<<"delete Scalar: " << data_ << endl;
|
||||
}
|
||||
|
||||
const scalar& value() const noexcept { return data_; }
|
||||
scalar& value() noexcept { return data_; }
|
||||
|
||||
friend Ostream& operator<<(Ostream& os, const Scalar& item)
|
||||
const scalar& value() const
|
||||
{
|
||||
os << item.value();
|
||||
return data_;
|
||||
}
|
||||
|
||||
scalar& value()
|
||||
{
|
||||
return data_;
|
||||
}
|
||||
|
||||
friend Ostream& operator<<(Ostream& os, const Scalar& val)
|
||||
{
|
||||
os << val.data_;
|
||||
return os;
|
||||
}
|
||||
};
|
||||
|
||||
bool Scalar::verbose = true;
|
||||
|
||||
|
||||
template<class T>
|
||||
void printTable(const HashPtrTable<T>& table)
|
||||
@ -119,9 +129,6 @@ int main()
|
||||
myTable.set("natlog", new double(2.718282));
|
||||
myTable.insert("sqrt2", autoPtr<double>::New(1.414214));
|
||||
myTable.insert("euler", autoPtr<double>::New(0.577216));
|
||||
myTable.set("def_0", nullptr);
|
||||
myTable.emplace_set("def_1", 123);
|
||||
myTable.emplace_set("def_2", 456);
|
||||
|
||||
HashTable<std::unique_ptr<double>> myTable1;
|
||||
|
||||
@ -139,14 +146,6 @@ int main()
|
||||
Info<< "Initial table" << nl;
|
||||
printTable(myTable);
|
||||
|
||||
myTable.try_emplace("def_0", 1000); // was nullptr, now value
|
||||
myTable.try_emplace("def_1", 1001); // no-op
|
||||
myTable.try_emplace("def_2", 1002); // no-op;
|
||||
myTable.try_emplace("def_3", 1003); // was non-existent, now value
|
||||
|
||||
Info<< "after try_emplace" << nl;
|
||||
printTable(myTable);
|
||||
|
||||
Info<< "print" << nl;
|
||||
Info<< myTable2 << nl;
|
||||
|
||||
|
||||
@ -75,8 +75,8 @@ int main()
|
||||
}
|
||||
|
||||
|
||||
Info<< "\ntable1 csorted() :" << endl;
|
||||
for (const auto& iter : table1.csorted())
|
||||
Info<< "\ntable1 sorted() :" << endl;
|
||||
for (const auto& iter : table1.sorted())
|
||||
{
|
||||
Info<< " " << iter.key() << " => " << iter.val() << nl;
|
||||
}
|
||||
@ -100,7 +100,7 @@ int main()
|
||||
}
|
||||
|
||||
Info<< "\nInplace modified - via sorted() access :" << endl;
|
||||
for (const auto& iter : table1.csorted())
|
||||
for (const auto& iter : table1.sorted())
|
||||
{
|
||||
Info<< " " << iter.key() << " => " << iter.val() << nl;
|
||||
}
|
||||
@ -366,8 +366,8 @@ int main()
|
||||
Info<< nl << "input values" << nl;
|
||||
Info<<"table1 = " << table1 << nl <<"table2 = " << table2 << nl;
|
||||
|
||||
Info<<"std::swap function" << nl;
|
||||
std::swap(table1, table2);
|
||||
Info<<"global Swap function" << nl;
|
||||
Swap(table1, table2);
|
||||
Info<<"table1 = " << table1 << nl <<"table2 = " << table2 << nl;
|
||||
|
||||
Info<<"swap method" << nl;
|
||||
|
||||
@ -5,7 +5,7 @@
|
||||
\\ / A nd | www.openfoam.com
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2017-2023 OpenCFD Ltd.
|
||||
Copyright (C) 2017-2022 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -290,14 +290,14 @@ int main(int argc, char *argv[])
|
||||
Info<< "Time: " << runTime.timeName() << nl;
|
||||
|
||||
report(objects);
|
||||
report(objects.csorted());
|
||||
report(objects.sorted());
|
||||
|
||||
report(objects.csorted<volScalarField>());
|
||||
report(objects.csorted<volVectorField>());
|
||||
report(objects.sorted<volScalarField>());
|
||||
report(objects.sorted<volVectorField>());
|
||||
|
||||
// Extra checks
|
||||
report<volScalarField>(objects.csorted<volScalarField>());
|
||||
report<volScalarField>(objects.csorted<volVectorField>());
|
||||
report<volScalarField>(objects.sorted<volScalarField>());
|
||||
report<volScalarField>(objects.sorted<volVectorField>());
|
||||
|
||||
|
||||
findObjectTest(objects);
|
||||
|
||||
@ -46,26 +46,25 @@ class Scalar
|
||||
{
|
||||
public:
|
||||
|
||||
// static bool verbose;
|
||||
|
||||
scalar data_;
|
||||
|
||||
Scalar() : data_(0) {}
|
||||
Scalar(scalar val) : data_(val) {}
|
||||
Scalar()
|
||||
:
|
||||
data_(0)
|
||||
{}
|
||||
|
||||
// ~Scalar() {}
|
||||
Scalar(scalar s)
|
||||
:
|
||||
data_(s)
|
||||
{}
|
||||
|
||||
scalar value() const noexcept { return data_; }
|
||||
scalar& value() noexcept { return data_; }
|
||||
|
||||
friend Ostream& operator<<(Ostream& os, const Scalar& item)
|
||||
friend Ostream& operator<<(Ostream& os, const Scalar& s)
|
||||
{
|
||||
os << item.value();
|
||||
os << s.data_;
|
||||
return os;
|
||||
}
|
||||
};
|
||||
|
||||
// bool Scalar::verbose = true;
|
||||
};
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
|
||||
@ -5,7 +5,7 @@
|
||||
\\ / A nd | www.openfoam.com
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2017-2023 OpenCFD Ltd.
|
||||
Copyright (C) 2017-2021 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -27,7 +27,6 @@ Description
|
||||
|
||||
\*---------------------------------------------------------------------------*/
|
||||
|
||||
#include "argList.H"
|
||||
#include "ListStream.H"
|
||||
#include "UListStream.H"
|
||||
#include "wordList.H"
|
||||
@ -156,378 +155,35 @@ void doTest
|
||||
}
|
||||
|
||||
|
||||
void printToken(const label index, const token& tok)
|
||||
{
|
||||
Info<< " " << index << " " << tok.name();
|
||||
if (tok.good())
|
||||
{
|
||||
Info<< " : " << tok;
|
||||
}
|
||||
Info<< nl;
|
||||
}
|
||||
|
||||
|
||||
template<class BUF>
|
||||
void testWalk1
|
||||
(
|
||||
const std::string& name,
|
||||
const BUF& input,
|
||||
const int verbose
|
||||
)
|
||||
{
|
||||
Info<< "tokenized " << name.c_str() << ":" << nl
|
||||
<< "====" << nl;
|
||||
toString(Info, input)
|
||||
<< nl
|
||||
<< "====" << endl;
|
||||
|
||||
ITstream is(input);
|
||||
Info<< is.size() << " tokens" << endl;
|
||||
for (is.rewind(); !is.eof(); is.skip())
|
||||
{
|
||||
printToken(is.tokenIndex(), is.currentToken());
|
||||
}
|
||||
Info<< nl;
|
||||
|
||||
Info<< "every other token:" << nl;
|
||||
for (is.seek(1); is.nRemainingTokens(); is.skip(2))
|
||||
{
|
||||
printToken(is.tokenIndex(), is.currentToken());
|
||||
}
|
||||
|
||||
for (int i : { 3, 7, 11, 20 })
|
||||
{
|
||||
Info<< "peekToken: ";
|
||||
printToken(i, is.peekToken(i));
|
||||
}
|
||||
|
||||
labelRange range(is.size()-2, 2);
|
||||
Info<< nl
|
||||
<< "remove: " << range << " of 0/" << is.size() << " tokens" << endl;
|
||||
is.remove(range);
|
||||
|
||||
Info<< "Now " << is.size() << " tokens" << endl;
|
||||
for (is.rewind(); !is.eof(); is.skip())
|
||||
{
|
||||
printToken(is.tokenIndex(), is.currentToken());
|
||||
}
|
||||
|
||||
range.reset(10, 3);
|
||||
Info<< nl
|
||||
<< "remove: " << range << " of 0/" << is.size() << " tokens" << endl;
|
||||
is.remove(range);
|
||||
|
||||
Info<< "Now " << is.size() << " tokens" << endl;
|
||||
for (is.rewind(); !is.eof(); is.skip())
|
||||
{
|
||||
printToken(is.tokenIndex(), is.currentToken());
|
||||
}
|
||||
|
||||
Info<< nl;
|
||||
}
|
||||
|
||||
|
||||
void testRewrite(const std::string& input, const int verbose)
|
||||
{
|
||||
Info<< "tokens" << nl
|
||||
<< "====" << nl;
|
||||
toString(Info, input)
|
||||
<< nl
|
||||
<< "====" << endl;
|
||||
|
||||
ITstream is(input);
|
||||
Info<< is.size() << " tokens" << endl;
|
||||
|
||||
if (verbose)
|
||||
{
|
||||
for (is.rewind(); !is.eof(); is.skip())
|
||||
{
|
||||
printToken(is.tokenIndex(), is.currentToken());
|
||||
}
|
||||
Info<< nl;
|
||||
}
|
||||
else
|
||||
{
|
||||
Info<< "==>";
|
||||
for (const token& tok : is)
|
||||
{
|
||||
Info<< ' ' << tok;
|
||||
}
|
||||
Info<< nl;
|
||||
}
|
||||
|
||||
Info<< nl
|
||||
<< "removing sub-dictionary tokens" << nl;
|
||||
|
||||
for (is.rewind(); !is.eof(); is.skip())
|
||||
{
|
||||
if (is.currentToken().isPunctuation(token::BEGIN_BLOCK))
|
||||
{
|
||||
labelRange slice(is.tokenIndex(), 0);
|
||||
|
||||
#if 0
|
||||
// This is a bad way to remove things since we lose the parse
|
||||
// point!
|
||||
for (/*nil*/; !is.eof(); is.skip())
|
||||
{
|
||||
if (is.currentToken().isPunctuation(token::END_BLOCK))
|
||||
{
|
||||
slice.size() = (is.tokenIndex() - slice.start()) + 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
#else
|
||||
for (label toki = is.tokenIndex()+1; toki < is.size(); ++toki)
|
||||
{
|
||||
if (is.peekToken(toki).isPunctuation(token::END_BLOCK))
|
||||
{
|
||||
slice.size() = (toki - slice.start()) + 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
Info<< "remove range: " << slice
|
||||
<< " currentIndex: " << is.tokenIndex() << '/' << is.size()
|
||||
// NB peekToken handles out-of-range
|
||||
<< " token: " << is.peekToken(is.tokenIndex()) << nl;
|
||||
|
||||
const label nRemoved = is.remove(slice);
|
||||
|
||||
Info<< "remove " << nRemoved
|
||||
<< " new current: " << is.tokenIndex() << '/' << is.size()
|
||||
// NB peekToken handles out-of-range
|
||||
<< " token: " << is.peekToken(is.tokenIndex()) << nl;
|
||||
|
||||
Info<< "==>";
|
||||
for (const token& tok : is)
|
||||
{
|
||||
Info<< ' ' << tok;
|
||||
}
|
||||
Info<< nl << nl;
|
||||
}
|
||||
}
|
||||
Info<< nl;
|
||||
}
|
||||
|
||||
|
||||
void testRemoveDict(const std::string& input, const int verbose)
|
||||
{
|
||||
Info<< "tokens" << nl
|
||||
<< "====" << nl;
|
||||
toString(Info, input)
|
||||
<< nl
|
||||
<< "====" << endl;
|
||||
|
||||
ITstream is(input);
|
||||
Info<< is.size() << " tokens" << endl;
|
||||
|
||||
if (verbose)
|
||||
{
|
||||
for (is.rewind(); !is.eof(); is.skip())
|
||||
{
|
||||
printToken(is.tokenIndex(), is.currentToken());
|
||||
}
|
||||
Info<< nl;
|
||||
}
|
||||
else
|
||||
{
|
||||
Info<< "==>";
|
||||
for (const token& tok : is)
|
||||
{
|
||||
Info<< ' ' << tok;
|
||||
}
|
||||
Info<< nl;
|
||||
}
|
||||
|
||||
for (label pos = 0; pos < is.size(); /*nil*/)
|
||||
{
|
||||
labelRange slice
|
||||
(
|
||||
is.find(token::BEGIN_BLOCK, token::END_BLOCK, pos)
|
||||
);
|
||||
|
||||
if (slice.good())
|
||||
{
|
||||
pos = slice.end_value();
|
||||
|
||||
tokenList::subList substream(is.slice(slice));
|
||||
|
||||
Info<< " dict " << slice << " ==>";
|
||||
for (const token& tok : substream)
|
||||
{
|
||||
Info<< ' ' << tok;
|
||||
}
|
||||
Info<< nl;
|
||||
}
|
||||
else
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Info<< nl
|
||||
<< "removing sub-dictionary tokens" << nl;
|
||||
|
||||
for (is.rewind(); !is.eof(); is.skip())
|
||||
{
|
||||
if (is.currentToken().isPunctuation(token::BEGIN_BLOCK))
|
||||
{
|
||||
labelRange slice
|
||||
(
|
||||
is.find(token::BEGIN_BLOCK, token::END_BLOCK, is.tokenIndex())
|
||||
);
|
||||
|
||||
if (slice.good())
|
||||
{
|
||||
ITstream substream(is.extract(slice));
|
||||
|
||||
Info<< "got " << slice << " ==>";
|
||||
for (const token& tok : substream)
|
||||
{
|
||||
Info<< ' ' << tok;
|
||||
}
|
||||
Info<< nl;
|
||||
|
||||
dictionary dict(substream);
|
||||
|
||||
Info<< "tokenIndex: " << is.tokenIndex() << nl;
|
||||
Info<< "sub-dict " << dict << nl;
|
||||
|
||||
Info<< "remove range: " << slice
|
||||
<< " currentIndex: " << is.tokenIndex() << '/' << is.size()
|
||||
<< " token: " << is.peekToken(is.tokenIndex()) << nl;
|
||||
|
||||
const label nRemoved = is.remove(slice);
|
||||
|
||||
Info<< "remove " << nRemoved
|
||||
<< " new current: " << is.tokenIndex() << '/' << is.size()
|
||||
<< " token: " << is.peekToken(is.tokenIndex()) << nl;
|
||||
|
||||
Info<< "==>";
|
||||
for (const token& tok : is)
|
||||
{
|
||||
Info<< ' ' << tok;
|
||||
}
|
||||
Info<< nl << nl;
|
||||
|
||||
// Reposition the parse point
|
||||
is.seek(slice.start());
|
||||
is.skip(-1);
|
||||
|
||||
Info<< "continue after " << is.tokenIndex()
|
||||
<< " : " << is.peekToken(is.tokenIndex()) << nl;
|
||||
}
|
||||
}
|
||||
}
|
||||
Info<< nl;
|
||||
}
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
// Main program:
|
||||
|
||||
int main(int argc, char *argv[])
|
||||
{
|
||||
argList::noBanner();
|
||||
argList::noParallel();
|
||||
argList::addVerboseOption("additional verbosity");
|
||||
argList::addBoolOption("basic", "basic tests");
|
||||
argList::addBoolOption("rewrite", "test rewriting only");
|
||||
argList::addBoolOption("remove-dict", "test rewriting only");
|
||||
const char* charInput =
|
||||
"( const char input \"string\" to tokenize )\n"
|
||||
"List<label> 5(0 1 2 3 4);";
|
||||
|
||||
argList args(argc, argv);
|
||||
string stringInput("( string ; input \"string\" to tokenize )");
|
||||
|
||||
if
|
||||
List<char> listInput
|
||||
(
|
||||
!args.found("basic")
|
||||
&& !args.found("rewrite")
|
||||
&& !args.found("remove-dict")
|
||||
)
|
||||
{
|
||||
Info<< "No test options specified!" << nl << nl;
|
||||
}
|
||||
|
||||
if (args.found("basic"))
|
||||
{
|
||||
const char* charInput =
|
||||
"( const char input \"string\" to tokenize )\n"
|
||||
"List<label> 5(0 1 2 3 4);";
|
||||
|
||||
string stringInput("( string ; input \"string\" to tokenize )");
|
||||
|
||||
List<char> listInput
|
||||
ListOps::create<char>
|
||||
(
|
||||
ListOps::create<char>
|
||||
(
|
||||
stringInput.cbegin(),
|
||||
stringInput.cend(),
|
||||
Foam::identityOp{}
|
||||
)
|
||||
);
|
||||
stringInput.cbegin(),
|
||||
stringInput.cend(),
|
||||
Foam::identityOp{}
|
||||
)
|
||||
);
|
||||
|
||||
doTest("empty", "", true, true);
|
||||
doTest("empty", "", true, true);
|
||||
|
||||
doTest("char*", charInput, true, true);
|
||||
doTest("string", stringInput, true);
|
||||
doTest("List<char>", listInput, true);
|
||||
doTest("char*", charInput, true, true);
|
||||
doTest("string", stringInput, true);
|
||||
doTest("List<char>", listInput, true);
|
||||
|
||||
reverse(listInput);
|
||||
doTest("List<char>", listInput, true);
|
||||
}
|
||||
|
||||
if (args.found("rewrite"))
|
||||
{
|
||||
testWalk1
|
||||
(
|
||||
"std::string",
|
||||
"( string ; input \"string\" to tokenize )"
|
||||
"{ other entry; value 100; value2 200; }"
|
||||
, args.verbose()
|
||||
);
|
||||
|
||||
testRewrite
|
||||
(
|
||||
"some entry ( string1 ; )"
|
||||
"{ sub dict1; value 100; value2 200; }"
|
||||
"other entry ( string2 ; )"
|
||||
"{ sub dict2; value 100; value2 200; }"
|
||||
"{ sub dict3; value 100; value2 200; }"
|
||||
"trailing entry"
|
||||
, args.verbose()
|
||||
);
|
||||
}
|
||||
|
||||
if (args.found("remove-dict"))
|
||||
{
|
||||
testRemoveDict
|
||||
(
|
||||
"some entry ( string1 ; )"
|
||||
"{ sub dict1; value 100; value2 200; }"
|
||||
"other entry ( string2 ; )"
|
||||
"{ sub dict2; value 100; value2 200; }"
|
||||
"{ sub dict3; value 100; value2 200; }"
|
||||
"trailing entry"
|
||||
, args.verbose()
|
||||
);
|
||||
|
||||
testRemoveDict
|
||||
(
|
||||
"some entry no dictionary"
|
||||
, args.verbose()
|
||||
);
|
||||
testRemoveDict
|
||||
(
|
||||
"{ leading dict; } last-stuff"
|
||||
, args.verbose()
|
||||
);
|
||||
testRemoveDict
|
||||
(
|
||||
"first-stuff { trailing dict; }"
|
||||
, args.verbose()
|
||||
);
|
||||
}
|
||||
reverse(listInput);
|
||||
doTest("List<char>", listInput, true);
|
||||
|
||||
Info<< "\nEnd\n" << endl;
|
||||
|
||||
|
||||
@ -183,7 +183,9 @@ int main(int argc, char *argv[])
|
||||
Pout<<"recv: " << flatOutput(recv) << endl;
|
||||
}
|
||||
|
||||
UPstream::barrier(UPstream::worldComm);
|
||||
// MPI barrier
|
||||
bool barrier = true;
|
||||
Pstream::broadcast(barrier);
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -1,3 +0,0 @@
|
||||
Test-ListRead1.C
|
||||
|
||||
EXE = $(FOAM_USER_APPBIN)/Test-ListRead1
|
||||
@ -1,2 +0,0 @@
|
||||
/* EXE_INC = */
|
||||
/* EXE_LIBS = */
|
||||
@ -1,234 +0,0 @@
|
||||
/*---------------------------------------------------------------------------*\
|
||||
========= |
|
||||
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
||||
\\ / O peration |
|
||||
\\ / A nd | www.openfoam.com
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2023 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
|
||||
OpenFOAM is free software: you can redistribute it and/or modify it
|
||||
under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
|
||||
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Application
|
||||
Test-ListRead1
|
||||
|
||||
Description
|
||||
List reading
|
||||
|
||||
\*---------------------------------------------------------------------------*/
|
||||
|
||||
#include "OSspecific.H"
|
||||
#include "argList.H"
|
||||
#include "wordRes.H"
|
||||
|
||||
#include "IOstreams.H"
|
||||
#include "Fstream.H"
|
||||
#include "StringStream.H"
|
||||
#include "scalar.H"
|
||||
#include "vector.H"
|
||||
|
||||
#include "labelRange.H"
|
||||
#include "scalarList.H"
|
||||
#include "HashOps.H"
|
||||
#include "ListOps.H"
|
||||
#include "IndirectList.H"
|
||||
#include "SubList.H"
|
||||
#include "SliceList.H"
|
||||
#include "ListPolicy.H"
|
||||
|
||||
#include <list>
|
||||
#include <numeric>
|
||||
#include <functional>
|
||||
|
||||
using namespace Foam;
|
||||
|
||||
label chunkSize = 128;
|
||||
|
||||
template<class T>
|
||||
bool readBracketList(List<T>& list, Istream& is)
|
||||
{
|
||||
is.fatalCheck(FUNCTION_NAME);
|
||||
|
||||
token tok(is);
|
||||
|
||||
is.fatalCheck
|
||||
(
|
||||
"List<T>::readBracketList(Istream&) : reading first token"
|
||||
);
|
||||
|
||||
if (!tok.isPunctuation(token::BEGIN_LIST))
|
||||
{
|
||||
is.putBack(tok);
|
||||
return false;
|
||||
}
|
||||
|
||||
{
|
||||
// "(...)" : read element-wise.
|
||||
// Uses chunk-wise reading to avoid too many re-allocations
|
||||
// and avoids relocation of contiguous memory until all of the reading
|
||||
// is completed. Chunks are wrapped as unique_ptr to ensure proper
|
||||
// cleanup on failure.
|
||||
|
||||
// The choice of chunk-size is somewhat arbitrary...
|
||||
// constexpr label chunkSize = 128;
|
||||
typedef std::unique_ptr<List<T>> chunkType;
|
||||
|
||||
is >> tok;
|
||||
is.fatalCheck(FUNCTION_NAME);
|
||||
|
||||
if (tok.isPunctuation(token::END_LIST))
|
||||
{
|
||||
// Trivial case, an empty list
|
||||
list.clear();
|
||||
return true;
|
||||
}
|
||||
|
||||
// Use all storage
|
||||
//private:// list.resize(list.capacity());
|
||||
|
||||
// Start with a few slots, recover current memory where possible
|
||||
List<chunkType> chunks(16);
|
||||
if (list.empty())
|
||||
{
|
||||
chunks[0] = chunkType(new List<T>(chunkSize));
|
||||
}
|
||||
else
|
||||
{
|
||||
chunks[0] = chunkType(new List<T>(std::move(list)));
|
||||
}
|
||||
|
||||
label nChunks = 1; // Active number of chunks
|
||||
label totalCount = 0; // Total number of elements
|
||||
label localIndex = 0; // Chunk-local index
|
||||
|
||||
InfoErr
|
||||
<< nl << "initial chunk: " << chunks[0]->size() << endl;
|
||||
|
||||
while (!tok.isPunctuation(token::END_LIST))
|
||||
{
|
||||
is.putBack(tok);
|
||||
|
||||
if (chunks[nChunks-1]->size() <= localIndex)
|
||||
{
|
||||
// Increase number of slots (doubling)
|
||||
if (nChunks >= chunks.size())
|
||||
{
|
||||
chunks.resize(2*chunks.size());
|
||||
}
|
||||
|
||||
InfoErr<< "new chunk" << endl;
|
||||
chunks[nChunks] = chunkType(new List<T>(chunkSize));
|
||||
++nChunks;
|
||||
localIndex = 0;
|
||||
}
|
||||
|
||||
is >> chunks[nChunks-1]->operator[](localIndex);
|
||||
++localIndex;
|
||||
++totalCount;
|
||||
|
||||
InfoErr
|
||||
<< " chunk=" << nChunks
|
||||
<< " index=" << localIndex
|
||||
<< " total=" << totalCount << nl;
|
||||
|
||||
is.fatalCheck
|
||||
(
|
||||
"List<T>::readBracketList(Istream&) : "
|
||||
"reading entry"
|
||||
);
|
||||
|
||||
is >> tok;
|
||||
is.fatalCheck(FUNCTION_NAME);
|
||||
}
|
||||
|
||||
// Simple case
|
||||
if (nChunks == 1)
|
||||
{
|
||||
list = std::move(*(chunks[0]));
|
||||
list.resize(totalCount);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Destination
|
||||
//private:// list.setCapacity_nocopy(totalCount);
|
||||
list.resize_nocopy(totalCount);
|
||||
auto dest = list.begin();
|
||||
|
||||
for (label chunki = 0; chunki < nChunks; ++chunki)
|
||||
{
|
||||
List<T> currChunk(std::move(*(chunks[chunki])));
|
||||
chunks[chunki].reset(nullptr);
|
||||
|
||||
const label localLen = min(currChunk.size(), totalCount);
|
||||
|
||||
dest = std::move
|
||||
(
|
||||
currChunk.begin(),
|
||||
currChunk.begin(localLen),
|
||||
dest
|
||||
);
|
||||
|
||||
totalCount -= localLen;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
// Main program:
|
||||
|
||||
int main(int argc, char *argv[])
|
||||
{
|
||||
argList::noBanner();
|
||||
argList::noParallel();
|
||||
argList::noFunctionObjects();
|
||||
argList::addOption("chunk-size", "value", "change read chunk size");
|
||||
argList::addArgument("file1 .. fileN");
|
||||
|
||||
argList args(argc, argv, false, true);
|
||||
|
||||
args.readIfPresent("chunk-size", chunkSize);
|
||||
|
||||
Info<< "chunk-size: " << chunkSize << nl;
|
||||
|
||||
if (args.size() <= 1)
|
||||
{
|
||||
InfoErr<< "Provide a file or files to test" << nl;
|
||||
}
|
||||
else
|
||||
{
|
||||
for (label argi=1; argi < args.size(); ++argi)
|
||||
{
|
||||
const auto input = args.get<fileName>(argi);
|
||||
IFstream is(input);
|
||||
|
||||
while (!is.eof())
|
||||
{
|
||||
labelList list;
|
||||
|
||||
readBracketList(list, is);
|
||||
Info<< "read: " << flatOutput(list) << endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
// ************************************************************************* //
|
||||
@ -1,17 +0,0 @@
|
||||
(
|
||||
0 1 2 3 4 5 6 7 8 9
|
||||
10 11 12 13 14 15 16 17 18 19
|
||||
20 21 22 23 24 25 26 27 28 29
|
||||
30 31 32 33 34 35 36 37 38 39
|
||||
40 41 42 43 44 45 46 47 48 49
|
||||
)
|
||||
|
||||
(
|
||||
0 1 2 3 4 5 6 7 8 9
|
||||
10 11 12 13 14 15 16 17 18 19
|
||||
20 21 22 23 24 25 26 27 28 29
|
||||
30 31 32 33 34 35 36 37 38 39
|
||||
40 41 42 43 44 45 46 47 48 49
|
||||
)
|
||||
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
@ -47,29 +47,43 @@ class Scalar
|
||||
|
||||
public:
|
||||
|
||||
static bool verbose;
|
||||
Scalar()
|
||||
:
|
||||
data_(0)
|
||||
{}
|
||||
|
||||
constexpr Scalar() noexcept : data_(0) {}
|
||||
Scalar(scalar val) noexcept : data_(val) {}
|
||||
Scalar(scalar val)
|
||||
:
|
||||
data_(val)
|
||||
{}
|
||||
|
||||
~Scalar()
|
||||
{
|
||||
if (verbose) Info<< "delete Scalar: " << data_ << endl;
|
||||
Info<< "delete Scalar: " << data_ << endl;
|
||||
}
|
||||
|
||||
scalar value() const noexcept { return data_; }
|
||||
scalar& value() noexcept { return data_; }
|
||||
|
||||
autoPtr<Scalar> clone() const { return autoPtr<Scalar>::New(data_); }
|
||||
|
||||
friend Ostream& operator<<(Ostream& os, const Scalar& item)
|
||||
const scalar& value() const
|
||||
{
|
||||
os << item.value();
|
||||
return data_;
|
||||
}
|
||||
|
||||
scalar& value()
|
||||
{
|
||||
return data_;
|
||||
}
|
||||
|
||||
autoPtr<Scalar> clone() const
|
||||
{
|
||||
return autoPtr<Scalar>::New(data_);
|
||||
}
|
||||
|
||||
friend Ostream& operator<<(Ostream& os, const Scalar& val)
|
||||
{
|
||||
os << val.data_;
|
||||
return os;
|
||||
}
|
||||
};
|
||||
|
||||
bool Scalar::verbose = true;
|
||||
|
||||
|
||||
// As per
|
||||
@ -254,22 +268,6 @@ Ostream& report
|
||||
|
||||
int main(int argc, char *argv[])
|
||||
{
|
||||
#if 1
|
||||
{
|
||||
DLPtrList<Scalar> llist1;
|
||||
Info<< "emplace_front: " << llist1.emplace_front(100) << nl;
|
||||
Info<< "emplace_front: " << llist1.emplace_front(200) << nl;
|
||||
Info<< "emplace_front: " << llist1.emplace_front(300) << nl;
|
||||
Info<< "emplace_back: " << llist1.emplace_back(500) << nl;
|
||||
|
||||
Info<< "DLPtrList: " << llist1 << endl;
|
||||
|
||||
Scalar::verbose = false;
|
||||
llist1.clear();
|
||||
Scalar::verbose = true;
|
||||
}
|
||||
#endif
|
||||
|
||||
#if 0
|
||||
{
|
||||
DLPtrList<Scalar> llist1;
|
||||
@ -351,20 +349,6 @@ int main(int argc, char *argv[])
|
||||
list2.emplace(i, (10 + 1.3*i));
|
||||
}
|
||||
|
||||
list2.release(5);
|
||||
list2.release(10);
|
||||
|
||||
{
|
||||
// Memory error (with fulldebug): const label len = (list2.size()+2);
|
||||
const label len = list2.size();
|
||||
Info<< "try_emplace " << len << " values" << nl;
|
||||
|
||||
for (label i = 0; i < len; ++i)
|
||||
{
|
||||
list2.try_emplace(i, (50 + 1.3*i));
|
||||
}
|
||||
}
|
||||
|
||||
PtrList<Scalar> listApp;
|
||||
for (label i = 0; i < 5; ++i)
|
||||
{
|
||||
@ -655,7 +639,7 @@ int main(int argc, char *argv[])
|
||||
dynPlanes.set(6, new plane(vector(2,2,1), vector::one));
|
||||
dynPlanes.set(10, new plane(vector(4,5,6), vector::one));
|
||||
|
||||
Info<< "emplaced[12]: "
|
||||
Info<< "emplaced :"
|
||||
<< dynPlanes.emplace(12, vector(3,2,1), vector::one) << endl;
|
||||
|
||||
dynPlanes.emplace_back(Zero, vector::one);
|
||||
|
||||
@ -45,29 +45,28 @@ class Scalar
|
||||
|
||||
public:
|
||||
|
||||
static bool verbose;
|
||||
Scalar()
|
||||
:
|
||||
data_(0)
|
||||
{}
|
||||
|
||||
constexpr Scalar() noexcept : data_(0) {}
|
||||
Scalar(scalar val) noexcept : data_(val) {}
|
||||
Scalar(scalar val)
|
||||
:
|
||||
data_(val)
|
||||
{}
|
||||
|
||||
~Scalar()
|
||||
{
|
||||
if (verbose) Info<< "delete Scalar: " << data_ << endl;
|
||||
Info<<"delete Scalar: " << data_ << endl;
|
||||
}
|
||||
|
||||
const scalar& value() const noexcept { return data_; }
|
||||
scalar& value() noexcept { return data_; }
|
||||
|
||||
autoPtr<Scalar> clone() const { return autoPtr<Scalar>::New(data_); }
|
||||
|
||||
friend Ostream& operator<<(Ostream& os, const Scalar& item)
|
||||
friend Ostream& operator<<(Ostream& os, const Scalar& val)
|
||||
{
|
||||
os << item.value();
|
||||
os << val.data_;
|
||||
return os;
|
||||
}
|
||||
};
|
||||
|
||||
bool Scalar::verbose = true;
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
|
||||
@ -51,7 +51,10 @@ public:
|
||||
i_(i)
|
||||
{}
|
||||
|
||||
const word& keyword() const noexcept { return keyword_; }
|
||||
const word& keyword() const
|
||||
{
|
||||
return keyword_;
|
||||
}
|
||||
|
||||
friend Ostream& operator<<(Ostream& os, const ent& e)
|
||||
{
|
||||
@ -80,7 +83,7 @@ int main(int argc, char *argv[])
|
||||
|
||||
dict.swapDown(dict.first());
|
||||
|
||||
forAllConstIters(dict, iter)
|
||||
forAllConstIter(UDictionary<ent>, dict, iter)
|
||||
{
|
||||
Info<< "element : " << *iter;
|
||||
}
|
||||
|
||||
@ -5,7 +5,7 @@
|
||||
\\ / A nd | www.openfoam.com
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2018-2023 OpenCFD Ltd.
|
||||
Copyright (C) 2018-2022 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -84,20 +84,6 @@ struct DerivedList : public List<T>
|
||||
};
|
||||
|
||||
|
||||
template<class T>
|
||||
void printInfo(const autoPtr<T>& item, const bool verbose = false)
|
||||
{
|
||||
Info<< "autoPtr good:" << Switch::name(item.good())
|
||||
<< " addr: " << Foam::name(item.get());
|
||||
|
||||
if (verbose && item)
|
||||
{
|
||||
Info<< " content: " << item();
|
||||
}
|
||||
Info<< nl;
|
||||
}
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
// Main program:
|
||||
|
||||
@ -126,17 +112,6 @@ int main(int argc, char *argv[])
|
||||
|
||||
Info<<"move unique to autoPtr: " << *list3 << nl;
|
||||
Info<<"old is " << Switch(bool(list2)) << nl;
|
||||
|
||||
Info<< "before emplace: ";
|
||||
printInfo(list, true);
|
||||
|
||||
list.emplace(4, label(-2));
|
||||
Info<< "after emplace: ";
|
||||
printInfo(list, true);
|
||||
|
||||
list.emplace(2, label(-4));
|
||||
Info<< "after emplace: ";
|
||||
printInfo(list, true);
|
||||
}
|
||||
|
||||
// Confirm that forwarding with move construct actually works as expected
|
||||
|
||||
@ -58,8 +58,8 @@ int main(int argc, char *argv[])
|
||||
|
||||
#include "setRootCase.H"
|
||||
|
||||
// Info<< "Known compound tokens: "
|
||||
// << token::compound::emptyConstructorTablePtr_->sortedToc() << nl;
|
||||
Info<< "Known compound tokens: "
|
||||
<< token::compound::IstreamConstructorTablePtr_->sortedToc() << nl;
|
||||
|
||||
OStringStream ostr;
|
||||
|
||||
@ -79,13 +79,6 @@ int main(int argc, char *argv[])
|
||||
List<char> alphabet(istr);
|
||||
|
||||
Info<< "re-read: " << alphabet << nl;
|
||||
|
||||
// Can assign zero?
|
||||
//Fails: alphabet = char(Zero);
|
||||
alphabet = Foam::zero{};
|
||||
|
||||
// alphabet = '@';
|
||||
Info<< "blanked: " << alphabet << nl;
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
||||
@ -1,3 +0,0 @@
|
||||
Test-compoundToken1.C
|
||||
|
||||
EXE = $(FOAM_USER_APPBIN)/Test-compoundToken1
|
||||
@ -1 +0,0 @@
|
||||
/* EXE_INC = */
|
||||
@ -1,307 +0,0 @@
|
||||
/*---------------------------------------------------------------------------*\
|
||||
========= |
|
||||
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
||||
\\ / O peration |
|
||||
\\ / A nd | www.openfoam.com
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2023 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
|
||||
OpenFOAM is free software: you can redistribute it and/or modify it
|
||||
under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
|
||||
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Description
|
||||
Test token construct assign etc.
|
||||
\*---------------------------------------------------------------------------*/
|
||||
|
||||
#include "argList.H"
|
||||
#include "IOobject.H"
|
||||
#include "IOstreams.H"
|
||||
#include "IFstream.H"
|
||||
#include "StringStream.H"
|
||||
#include "cpuTime.H"
|
||||
#include "labelList.H"
|
||||
#include "DynamicList.H"
|
||||
|
||||
namespace Foam
|
||||
{
|
||||
|
||||
template<class OS>
|
||||
OS& printTypeCode(OS& os, char typeCode)
|
||||
{
|
||||
os << int(static_cast<unsigned char>(typeCode));
|
||||
return os;
|
||||
}
|
||||
|
||||
|
||||
/*---------------------------------------------------------------------------*\
|
||||
Class IFstream Declaration
|
||||
\*---------------------------------------------------------------------------*/
|
||||
|
||||
bool test_pending = false;
|
||||
|
||||
class IFstreamDelayed
|
||||
:
|
||||
public IFstream
|
||||
{
|
||||
virtual bool readCompoundToken(token& tok, const word& type)
|
||||
{
|
||||
auto& is = *this;
|
||||
|
||||
bool delay = true;
|
||||
|
||||
// Low-level: get next valid character (after comments)
|
||||
// and branch based on it being a '{' or not
|
||||
|
||||
char c = 0;
|
||||
if (is.read(c))
|
||||
{
|
||||
// Delay further reading?
|
||||
delay = (c == token::BEGIN_BLOCK);
|
||||
is.putback(c);
|
||||
|
||||
if (c)
|
||||
{
|
||||
cerr<< "nextChar:" << c << " : delay read: " << delay << nl;
|
||||
}
|
||||
}
|
||||
|
||||
// Caller already checked token::compound::isCompound(...)
|
||||
// but use readCompoundToken anyhow for convenience
|
||||
|
||||
if (tok.readCompoundToken(type, is, !delay))
|
||||
{
|
||||
cerr<< "readCompound(" << type << ")\n";
|
||||
cerr<< "typeCode: ";
|
||||
printTypeCode(cerr, tok.compoundToken().typeCode()) << nl;
|
||||
|
||||
if (test_pending && delay)
|
||||
{
|
||||
InfoErr<< "pending read "
|
||||
<< tok.compoundToken().type() << endl;
|
||||
|
||||
tok.refCompoundToken().pending(true);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
public:
|
||||
|
||||
// Constructors
|
||||
using IFstream::IFstream;
|
||||
|
||||
//- Destructor
|
||||
~IFstreamDelayed() = default;
|
||||
|
||||
// Testing deprecation warnings
|
||||
FOAM_DEPRECATED_STRICT(2023-08, "direct calling")
|
||||
Istream& operator()() const
|
||||
{
|
||||
return const_cast<IFstreamDelayed&>(*this);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
} // End namespace Foam
|
||||
|
||||
|
||||
using namespace Foam;
|
||||
|
||||
void populateCompound(token::compound& ct, const dictionary& dict)
|
||||
{
|
||||
Info<< "populateCompound: " << nl;
|
||||
|
||||
// This is where runTime dispatch, eg based on transport type
|
||||
// could be used...
|
||||
|
||||
switch (ct.typeCode())
|
||||
{
|
||||
#undef fillComponents
|
||||
#define fillComponents(Type, Variable, Value) \
|
||||
{ \
|
||||
ct.pending(false); \
|
||||
ct.resize(10); \
|
||||
UList<Type> Variable \
|
||||
( \
|
||||
reinterpret_cast<Type*>(ct.data_bytes()), \
|
||||
label(ct.size_bytes() / sizeof(Type)) \
|
||||
); \
|
||||
Variable = Value; \
|
||||
}
|
||||
|
||||
case token::tokenType::PUNCTUATION :
|
||||
{
|
||||
fillComponents(char, cmpts, '@');
|
||||
}
|
||||
break;
|
||||
|
||||
case token::tokenType::BOOL :
|
||||
{
|
||||
fillComponents(bool, cmpts, false);
|
||||
}
|
||||
break;
|
||||
|
||||
case token::tokenType::LABEL :
|
||||
{
|
||||
fillComponents(label, cmpts, 123);
|
||||
}
|
||||
break;
|
||||
|
||||
case token::tokenType::FLOAT :
|
||||
{
|
||||
fillComponents(float, cmpts, 2.7);
|
||||
}
|
||||
break;
|
||||
|
||||
case token::tokenType::DOUBLE :
|
||||
{
|
||||
fillComponents(double, cmpts, 3.1415);
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
|
||||
#undef fillComponents
|
||||
}
|
||||
|
||||
if (!ct.pending())
|
||||
{
|
||||
Info<< "assigned values:" << endl;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void rewriteCompounds(ITstream& is)
|
||||
{
|
||||
Info<< "rewrite: " << flatOutput(is) << endl;
|
||||
|
||||
for (label toki = 0; toki < is.size(); ++toki)
|
||||
{
|
||||
if (is[toki].isCompound() && is[toki].compoundToken().pending())
|
||||
{
|
||||
Info<< "replace : " << is[toki].info() << endl;
|
||||
|
||||
if (is.peekToken(toki+1).isPunctuation(token::BEGIN_BLOCK))
|
||||
{
|
||||
labelRange slice
|
||||
(
|
||||
is.find(token::BEGIN_BLOCK, token::END_BLOCK, toki+1)
|
||||
);
|
||||
|
||||
if (slice.good() && (slice.start() == toki+1))
|
||||
{
|
||||
Info<< "Compound at:" << toki
|
||||
<< " dict:" << slice << endl;
|
||||
|
||||
ITstream substream(is.extract(slice));
|
||||
|
||||
dictionary dict(substream);
|
||||
|
||||
populateCompound(is[toki].refCompoundToken(), dict);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void rewriteDict(dictionary& dict)
|
||||
{
|
||||
for (entry& e : dict)
|
||||
{
|
||||
if (e.isDict())
|
||||
{
|
||||
rewriteDict(e.dict());
|
||||
}
|
||||
else if (e.isStream())
|
||||
{
|
||||
rewriteCompounds(e.stream());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
// Main program:
|
||||
|
||||
int main(int argc, char *argv[])
|
||||
{
|
||||
argList::noBanner();
|
||||
argList::noParallel();
|
||||
argList::addBoolOption("std", "standard reading (no delayed compounds)");
|
||||
argList::addBoolOption("pending", "read with pending");
|
||||
|
||||
argList args(argc, argv, false, true);
|
||||
|
||||
Info<< "typeCodes:" << nl;
|
||||
Info<< " bool=";
|
||||
printTypeCode(Info, token::tokenType::BOOL) << nl;
|
||||
Info<< " label=";
|
||||
printTypeCode(Info, token::tokenType::LABEL) << nl;
|
||||
Info<< " float=";
|
||||
printTypeCode(Info, token::tokenType::FLOAT) << nl;
|
||||
Info<< " double=";
|
||||
printTypeCode(Info, token::tokenType::DOUBLE) << nl;
|
||||
Info<< nl;
|
||||
|
||||
if (args.found("pending"))
|
||||
{
|
||||
test_pending = true;
|
||||
}
|
||||
|
||||
if (args.found("std"))
|
||||
{
|
||||
for (label argi = 1; argi < args.size(); ++argi)
|
||||
{
|
||||
Info<< "Read: " << args[argi] << endl;
|
||||
IFstream is(args[argi]);
|
||||
|
||||
dictionary dict(is);
|
||||
|
||||
Info<< "read: " << dict << nl;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
for (label argi = 1; argi < args.size(); ++argi)
|
||||
{
|
||||
Info<< "Read delay: " << args[argi] << endl;
|
||||
|
||||
IFstreamDelayed is(args[argi]);
|
||||
|
||||
// Trigger strict warning?
|
||||
Info<< "stream: " << is().name() << nl;
|
||||
|
||||
dictionary dict(is);
|
||||
Info<< "read: " << dict << nl;
|
||||
|
||||
rewriteDict(dict);
|
||||
|
||||
Info<< "modified: " << dict << nl;
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
// ************************************************************************* //
|
||||
@ -1,24 +0,0 @@
|
||||
/*--------------------------------*- C++ -*----------------------------------*\
|
||||
| ========= | |
|
||||
| \\ / F ield | OpenFOAM: The Open Source CFD Toolbox |
|
||||
| \\ / O peration | Version: v2312 |
|
||||
| \\ / A nd | Website: www.openfoam.com |
|
||||
| \\/ M anipulation | |
|
||||
\*---------------------------------------------------------------------------*/
|
||||
FoamFile
|
||||
{
|
||||
version 2.0;
|
||||
format ascii;
|
||||
class dictionary;
|
||||
object dictionary;
|
||||
}
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
|
||||
internalField uniform 1;
|
||||
|
||||
temperature List<scalar> 10(270 271 272 273 274 275 276 277 278 279);
|
||||
|
||||
pressure 1e5;
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
@ -1,65 +0,0 @@
|
||||
/*--------------------------------*- C++ -*----------------------------------*\
|
||||
| ========= | |
|
||||
| \\ / F ield | OpenFOAM: The Open Source CFD Toolbox |
|
||||
| \\ / O peration | Version: v2312 |
|
||||
| \\ / A nd | Website: www.openfoam.com |
|
||||
| \\/ M anipulation | |
|
||||
\*---------------------------------------------------------------------------*/
|
||||
FoamFile
|
||||
{
|
||||
version 2.0;
|
||||
format ascii;
|
||||
class dictionary;
|
||||
object dictionary;
|
||||
}
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
|
||||
internalField uniform 1;
|
||||
|
||||
// Regular syntax
|
||||
valuesT List<scalar> (123 456 890);
|
||||
|
||||
// Test some non-standard syntax
|
||||
temperature List<scalar>
|
||||
{
|
||||
transport adios;
|
||||
length 10;
|
||||
values (270 271 272 273 274 275 276 277 278 279);
|
||||
};
|
||||
|
||||
// Test some non-standard syntax
|
||||
velocity List<vector>
|
||||
{
|
||||
transport adios;
|
||||
length 10;
|
||||
values (270 271 272 273 274 275 276 277 278 279);
|
||||
};
|
||||
|
||||
// Test some non-standard syntax
|
||||
isGood List<bool>
|
||||
{
|
||||
transport adios;
|
||||
length 10;
|
||||
values (true false true);
|
||||
};
|
||||
|
||||
// Test some non-standard syntax
|
||||
master List<label>
|
||||
{
|
||||
transport adios;
|
||||
length 10;
|
||||
values (0 100 35 50);
|
||||
};
|
||||
|
||||
// Test some non-standard syntax
|
||||
edges List<edge>
|
||||
{
|
||||
transport adios;
|
||||
length 10;
|
||||
values ((0 1) (2 1));
|
||||
};
|
||||
|
||||
pressure 1e5;
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
@ -61,7 +61,7 @@ void basicTests(const coordinateSystem& cs)
|
||||
|
||||
if (const auto* cartptr = isA<coordSystem::cartesian>(cs))
|
||||
{
|
||||
if (!cartptr->good())
|
||||
if (!cartptr->valid())
|
||||
{
|
||||
Info<< "invalid cartesian = " << (*cartptr)
|
||||
<< " with: " << (*cartptr).R() << nl;
|
||||
|
||||
@ -5,7 +5,7 @@
|
||||
\\ / A nd | www.openfoam.com
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2021-2023 OpenCFD Ltd.
|
||||
Copyright (C) 2021 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM, distributed under GPL-3.0-or-later.
|
||||
@ -17,9 +17,8 @@ Description
|
||||
|
||||
#include "IOstreams.H"
|
||||
#include "ITstream.H"
|
||||
#include "uLabel.H"
|
||||
|
||||
#include "exprTraits.H"
|
||||
#include "uLabel.H"
|
||||
#include "error.H"
|
||||
#include "stringList.H"
|
||||
#include "exprScanToken.H"
|
||||
@ -28,18 +27,16 @@ using namespace Foam;
|
||||
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
|
||||
template<class Type>
|
||||
template<class T>
|
||||
void printTraits()
|
||||
{
|
||||
const auto typeCode = exprTypeTraits<Type>::value;
|
||||
const auto typeCode = exprTypeTraits<T>::value;
|
||||
|
||||
Info<< "Type '" << pTraits<Type>::typeName
|
||||
<< "' = code:" << int(typeCode)
|
||||
<< " rank:" << exprTypeTraits<Type>::rank
|
||||
<< " cmpt:" << exprTypeTraits<Type>::nComponents
|
||||
<< " name:" << exprTypeTraits<Type>::name;
|
||||
Info<< "type " << pTraits<T>::typeName
|
||||
<< " code:" << int(typeCode)
|
||||
<< " name:" << exprTypeTraits<T>::name;
|
||||
|
||||
if (pTraits<Type>::typeName != word(exprTypeTraits<Type>::name))
|
||||
if (pTraits<T>::typeName != word(exprTypeTraits<T>::name))
|
||||
{
|
||||
Info<< " (UNSUPPORTED)";
|
||||
}
|
||||
@ -48,17 +45,6 @@ void printTraits()
|
||||
}
|
||||
|
||||
|
||||
void print(const expressions::scanToken& tok)
|
||||
{
|
||||
Info<< " type:" << int(tok.type_);
|
||||
if (tok.is_pointer())
|
||||
{
|
||||
Info<< " ptr:" << Foam::name(tok.name_);
|
||||
}
|
||||
Info<< nl;
|
||||
}
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
|
||||
int main()
|
||||
@ -70,7 +56,6 @@ int main()
|
||||
printTraits<bool>();
|
||||
printTraits<label>();
|
||||
printTraits<scalar>();
|
||||
printTraits<complex>();
|
||||
printTraits<vector>();
|
||||
printTraits<tensor>();
|
||||
printTraits<symmTensor>();
|
||||
@ -86,27 +71,33 @@ int main()
|
||||
Info<< "Name of typeCode: "
|
||||
<< getName(expressions::valueTypeCode::type_bool) << nl;
|
||||
|
||||
|
||||
{
|
||||
expressions::scanToken tok(expressions::scanToken::null());
|
||||
expressions::scanToken tok2(expressions::scanToken::null());
|
||||
expressions::scanToken tok;
|
||||
expressions::scanToken tok2;
|
||||
|
||||
Info<< nl << "sizeof(scanToken): "
|
||||
<< sizeof(tok) << nl;
|
||||
|
||||
print(tok);
|
||||
print(tok2);
|
||||
Info<< " type:" << int(tok.type_) << nl;
|
||||
Info<< " ptr:" << Foam::name(tok.name_) << nl;
|
||||
|
||||
Info<< " type:" << int(tok2.type_) << nl;
|
||||
Info<< " ptr:" << Foam::name(tok2.name_) << nl;
|
||||
|
||||
tok.setWord("hello");
|
||||
|
||||
print(tok);
|
||||
Info<< " type:" << int(tok.type_) << nl;
|
||||
Info<< " ptr:" << Foam::name(tok.name_) << nl;
|
||||
|
||||
tok2 = tok;
|
||||
print(tok2);
|
||||
Info<< " type:" << int(tok2.type_) << nl;
|
||||
Info<< " ptr:" << Foam::name(tok2.name_) << nl;
|
||||
|
||||
tok2.destroy();
|
||||
|
||||
print(tok); // Not a leak, but old rubbish
|
||||
print(tok2);
|
||||
Info<< " type:" << int(tok2.type_) << nl;
|
||||
Info<< " ptr:" << Foam::name(tok2.name_) << nl;
|
||||
}
|
||||
|
||||
Info<< nl << "Done" << nl;
|
||||
|
||||
@ -1,3 +0,0 @@
|
||||
Test-exprValue.C
|
||||
|
||||
EXE = $(FOAM_USER_APPBIN)/Test-exprValue
|
||||
@ -1,2 +0,0 @@
|
||||
/* EXE_INC = */
|
||||
/* EXE_LIBS = */
|
||||
@ -1,141 +0,0 @@
|
||||
/*---------------------------------------------------------------------------*\
|
||||
========= |
|
||||
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
||||
\\ / O peration |
|
||||
\\ / A nd | www.openfoam.com
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2021-2023 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM, distributed under GPL-3.0-or-later.
|
||||
|
||||
Application
|
||||
Test-exprValue
|
||||
|
||||
Description
|
||||
Test low-level polymorphic value container (exprValue)
|
||||
|
||||
\*---------------------------------------------------------------------------*/
|
||||
|
||||
#include "argList.H"
|
||||
#include "IOstreams.H"
|
||||
#include "ITstream.H"
|
||||
#include "exprValue.H"
|
||||
|
||||
using namespace Foam;
|
||||
|
||||
void printInfo(const expressions::exprValue& val)
|
||||
{
|
||||
Info<< "Boxed type:" << int(val.typeCode())
|
||||
<< " (" << val.valueTypeName() << ") good:"
|
||||
<< val.good() << " => " << val << nl;
|
||||
}
|
||||
|
||||
|
||||
expressions::exprValue tryParse(const std::string& str)
|
||||
{
|
||||
expressions::exprValue val, val2;
|
||||
|
||||
ITstream is(str);
|
||||
|
||||
const bool ok = val.read(is);
|
||||
|
||||
Info<< "read " << Foam::name(val.typeCode()) << " from " << str;
|
||||
|
||||
if (ok)
|
||||
{
|
||||
Info<< " trailing tokens:" << is.nRemainingTokens() << nl
|
||||
<< "value: " << val << nl;
|
||||
}
|
||||
else
|
||||
{
|
||||
Info<< " FAILED" << nl;
|
||||
}
|
||||
|
||||
if (ok)
|
||||
{
|
||||
Info<< "Direct from string: ";
|
||||
if (expressions::exprValue::read(str, val2))
|
||||
{
|
||||
Info<< "good" << nl;
|
||||
}
|
||||
else
|
||||
{
|
||||
Info<< "bad" << nl;
|
||||
}
|
||||
}
|
||||
return val;
|
||||
}
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
|
||||
int main(int argc, char *argv[])
|
||||
{
|
||||
argList::noBanner();
|
||||
argList::noParallel();
|
||||
|
||||
#include "setRootCase.H"
|
||||
|
||||
// Aborts
|
||||
// expressions::exprValue value(std::string(""));
|
||||
|
||||
{
|
||||
expressions::exprValue value;
|
||||
|
||||
// Nothing
|
||||
printInfo(value);
|
||||
|
||||
value.set(scalar(100));
|
||||
printInfo(value);
|
||||
|
||||
value.set(vector(1,2,3));
|
||||
printInfo(value);
|
||||
|
||||
value = vector(4,5,6);
|
||||
printInfo(value);
|
||||
|
||||
value = Zero;
|
||||
printInfo(value);
|
||||
|
||||
value.clear();
|
||||
printInfo(value);
|
||||
|
||||
value = 100 * vector(1,0,0);
|
||||
printInfo(value);
|
||||
}
|
||||
|
||||
|
||||
{
|
||||
Info<< nl << "Test parsing" << nl << nl;
|
||||
|
||||
for
|
||||
(
|
||||
const auto& input :
|
||||
stringList
|
||||
({
|
||||
"()", // bad
|
||||
"( 1 2 ", // also bad
|
||||
"( ", // really bad
|
||||
"(1 16 12)",
|
||||
"(1 bad)",
|
||||
"(5)",
|
||||
"1.2345",
|
||||
"5.678 trailing",
|
||||
"true",
|
||||
"false",
|
||||
" 1 ",
|
||||
" yes no "
|
||||
})
|
||||
)
|
||||
{
|
||||
(void) tryParse(input);
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
// ************************************************************************* //
|
||||
@ -38,6 +38,7 @@ Description
|
||||
#include "OSspecific.H"
|
||||
#include "Switch.H"
|
||||
|
||||
#include <csignal>
|
||||
#include <cstdlib>
|
||||
#include <iostream>
|
||||
|
||||
|
||||
@ -5,7 +5,7 @@
|
||||
\\ / A nd | www.openfoam.com
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2022-2023 OpenCFD Ltd.
|
||||
Copyright (C) 2022 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -139,7 +139,7 @@ void printInfo(const ensightMesh& mesh, int verbose = 0)
|
||||
FixedList<label, 3> cellStats(Zero);
|
||||
FixedList<label, 3> faceStats(Zero);
|
||||
|
||||
for (const auto& iter : mesh.cellZoneParts().csorted())
|
||||
for (const auto& iter : mesh.cellZoneParts().sorted())
|
||||
{
|
||||
FixedList<label, 3> stats = printPartInfo(iter.val(), verbose);
|
||||
|
||||
@ -149,7 +149,7 @@ void printInfo(const ensightMesh& mesh, int verbose = 0)
|
||||
}
|
||||
}
|
||||
|
||||
for (const auto& iter : mesh.faceZoneParts().csorted())
|
||||
for (const auto& iter : mesh.faceZoneParts().sorted())
|
||||
{
|
||||
FixedList<label, 3> stats = printPartInfo(iter.val(), verbose);
|
||||
|
||||
@ -159,7 +159,7 @@ void printInfo(const ensightMesh& mesh, int verbose = 0)
|
||||
}
|
||||
}
|
||||
|
||||
for (const auto& iter : mesh.boundaryParts().csorted())
|
||||
for (const auto& iter : mesh.boundaryParts().sorted())
|
||||
{
|
||||
FixedList<label, 3> stats = printPartInfo(iter.val(), verbose);
|
||||
|
||||
|
||||
@ -204,7 +204,7 @@ int main(int argc, char *argv[])
|
||||
labelPair inOut;
|
||||
pointField allCcs(globalNumbering.gather(mesh.cellCentres()));
|
||||
inOut[0] = allCcs.size();
|
||||
Pstream::broadcastList(allCcs);
|
||||
Pstream::broadcast(allCcs);
|
||||
inOut[1] = allCcs.size();
|
||||
Pout<< " " << inOut << endl;
|
||||
|
||||
|
||||
@ -57,7 +57,7 @@ int main(int argc, char *argv[])
|
||||
|
||||
Info<< "Found: " << objects << nl << endl;
|
||||
|
||||
for (const IOobject& io : objects.csorted<uniformDimensionedVectorField>())
|
||||
for (const IOobject& io : objects.sorted<uniformDimensionedVectorField>())
|
||||
{
|
||||
if (io.name() == meshObjects::gravity::typeName)
|
||||
{
|
||||
|
||||
@ -5,7 +5,7 @@
|
||||
\\ / A nd | www.openfoam.com
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2016-2023 OpenCFD Ltd.
|
||||
Copyright (C) 2016-2022 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -78,8 +78,8 @@ void printRegistry
|
||||
Foam::label indent
|
||||
)
|
||||
{
|
||||
const UPtrList<const regIOobject> objects(obr.csorted());
|
||||
const wordList regNames(obr.sortedNames<objectRegistry>());
|
||||
UPtrList<const regIOobject> objects(obr.sorted());
|
||||
wordList regNames(obr.sortedNames<objectRegistry>());
|
||||
|
||||
std::string prefix;
|
||||
for (label i=indent; i; --i)
|
||||
|
||||
@ -145,8 +145,8 @@ void printRegistry
|
||||
Foam::label indent
|
||||
)
|
||||
{
|
||||
const UPtrList<const regIOobject> objects(obr.csorted());
|
||||
const wordList regNames(obr.sortedNames<objectRegistry>());
|
||||
UPtrList<const regIOobject> objects(obr.sorted());
|
||||
wordList regNames(obr.sortedNames<objectRegistry>());
|
||||
|
||||
std::string prefix;
|
||||
for (label i=indent; i; --i)
|
||||
@ -315,7 +315,7 @@ int main(int argc, char *argv[])
|
||||
|
||||
registryTests(mesh);
|
||||
|
||||
report(mesh.csorted<volScalarField>());
|
||||
report(mesh.sorted<const volScalarField>());
|
||||
report(mesh.csorted<volVectorField>());
|
||||
|
||||
Info<< nl;
|
||||
|
||||
@ -6,7 +6,6 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2011 OpenFOAM Foundation
|
||||
Copyright (C) 2023 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -30,12 +29,9 @@ Description
|
||||
|
||||
#include "IOstreams.H"
|
||||
#include "pTraits.H"
|
||||
#include "contiguous.H"
|
||||
#include "boolVector.H" // A FixedList pretending to be a vector
|
||||
#include "vector.H"
|
||||
#include "tensor.H"
|
||||
#include "uLabel.H"
|
||||
#include "Switch.H"
|
||||
|
||||
#include <type_traits>
|
||||
|
||||
@ -44,72 +40,14 @@ using namespace Foam;
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
// Main program:
|
||||
|
||||
//- Test if Type has typeName member
|
||||
template<class T, class = void>
|
||||
struct has_typeName : std::false_type {};
|
||||
|
||||
//- Test if Type has typeName member
|
||||
|
||||
template<class T>
|
||||
struct has_typeName<T, stdFoam::void_t<decltype(pTraits<T>::typeName)>>
|
||||
:
|
||||
std::true_type
|
||||
{};
|
||||
|
||||
|
||||
template<class T>
|
||||
typename std::enable_if<has_typeName<T>::value, void>::type
|
||||
printTypeName()
|
||||
{
|
||||
Info<< pTraits<T>::typeName;
|
||||
}
|
||||
|
||||
template<class T>
|
||||
typename std::enable_if<!has_typeName<T>::value, void>::type
|
||||
printTypeName()
|
||||
{
|
||||
Info<< typeid(T).name();
|
||||
}
|
||||
|
||||
|
||||
template<class T, class = void>
|
||||
struct has_zero_one : std::false_type {};
|
||||
|
||||
template<class T>
|
||||
struct has_zero_one
|
||||
<
|
||||
T,
|
||||
stdFoam::void_t<decltype(pTraits<T>::zero), decltype(pTraits<T>::one)>
|
||||
> : std::true_type {};
|
||||
|
||||
|
||||
template<class T>
|
||||
typename std::enable_if<has_zero_one<T>::value, void>::type
|
||||
printMinMaxRange()
|
||||
{
|
||||
Info<< " zero=" << pTraits<T>::zero
|
||||
<< " one=" << pTraits<T>::one;
|
||||
}
|
||||
|
||||
template<class T>
|
||||
typename std::enable_if<!has_zero_one<T>::value, void>::type
|
||||
printMinMaxRange()
|
||||
{}
|
||||
|
||||
|
||||
template<class T>
|
||||
void printTraits()
|
||||
{
|
||||
printTypeName<T>();
|
||||
printMinMaxRange<T>();
|
||||
|
||||
Info<< " integral=" << std::is_integral<T>::value
|
||||
Info<< pTraits<T>::typeName
|
||||
<< ": zero=" << pTraits<T>::zero
|
||||
<< " one=" << pTraits<T>::one
|
||||
<< " integral=" << std::is_integral<T>::value
|
||||
<< " floating=" << std::is_floating_point<T>::value
|
||||
<< " rank=" << pTraits_rank<T>::value
|
||||
<< " nComponents=" << pTraits_nComponents<T>::value
|
||||
<< " vector-space=" << Switch::name(is_vectorspace<T>::value)
|
||||
<< " is_label=" << Switch::name(is_contiguous_label<T>::value)
|
||||
<< " is_scalar=" << Switch::name(is_contiguous_scalar<T>::value)
|
||||
<< endl;
|
||||
}
|
||||
|
||||
@ -131,9 +69,6 @@ int main()
|
||||
printTraits<scalar>();
|
||||
printTraits<vector>();
|
||||
printTraits<tensor>();
|
||||
printTraits<boolVector>();
|
||||
printTraits<word>();
|
||||
printTraits<std::string>();
|
||||
|
||||
{
|
||||
pTraits<bool> b(true);
|
||||
|
||||
@ -39,7 +39,6 @@ Description
|
||||
#include "Tuple2.H"
|
||||
#include "IOstreams.H"
|
||||
#include "PstreamReduceOps.H"
|
||||
#include "bitSet.H"
|
||||
|
||||
using namespace Foam;
|
||||
|
||||
@ -88,10 +87,7 @@ int main(int argc, char *argv[])
|
||||
|
||||
// Reductions (using MPI intrinsics)
|
||||
{
|
||||
const label myRank = UPstream::myProcNo(UPstream::commWorld());
|
||||
const label nProcs = UPstream::nProcs(UPstream::commWorld());
|
||||
|
||||
label val = myRank;
|
||||
label val = Pstream::myProcNo(UPstream::commWorld());
|
||||
|
||||
label worldVal = returnReduce
|
||||
(
|
||||
@ -112,52 +108,6 @@ int main(int argc, char *argv[])
|
||||
Pout<< "value " << val
|
||||
<< " (world) reduced " << worldVal
|
||||
<< " (self) reduced " << selfVal << nl;
|
||||
|
||||
// Identical size on all procs
|
||||
bitSet procUsed(nProcs);
|
||||
|
||||
if ((myRank % 4) == 0)
|
||||
{
|
||||
procUsed.set(myRank);
|
||||
}
|
||||
|
||||
Pout<< "local procUsed " << procUsed << nl;
|
||||
reduce
|
||||
(
|
||||
procUsed.data(),
|
||||
procUsed.size_data(),
|
||||
bitOrOp<unsigned int>()
|
||||
);
|
||||
Pout<< "reduce procUsed " << procUsed << nl;
|
||||
|
||||
// Identical size on all procs
|
||||
// encode as 0:empty, 1:uniform, 2:nonuniform, 3:mixed
|
||||
PackedList<2> uniformity(10);
|
||||
|
||||
if ((myRank % 2) == 0)
|
||||
{
|
||||
// Every second is uniform
|
||||
uniformity.set(2, 1);
|
||||
uniformity.set(4, 1);
|
||||
uniformity.set(6, 1);
|
||||
uniformity.set(8, 1);
|
||||
}
|
||||
else if ((myRank % 3) == 0)
|
||||
{
|
||||
// Every third is nonuniform
|
||||
uniformity.set(3, 2);
|
||||
uniformity.set(6, 2);
|
||||
uniformity.set(9, 2);
|
||||
}
|
||||
|
||||
Pout<< "local uniform " << uniformity << nl;
|
||||
reduce
|
||||
(
|
||||
uniformity.data(),
|
||||
uniformity.size_data(),
|
||||
bitOrOp<unsigned int>()
|
||||
);
|
||||
Pout<< "reduce uniform " << uniformity << nl;
|
||||
}
|
||||
|
||||
// Reductions (not using MPI intrinsics)
|
||||
|
||||
@ -1,3 +0,0 @@
|
||||
Test-parallel-waitSome.C
|
||||
|
||||
EXE = $(FOAM_USER_APPBIN)/Test-parallel-waitSome
|
||||
@ -1,2 +0,0 @@
|
||||
/* EXE_INC = */
|
||||
/* EXE_LIBS = */
|
||||
@ -1,328 +0,0 @@
|
||||
/*---------------------------------------------------------------------------*\
|
||||
========= |
|
||||
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
||||
\\ / O peration |
|
||||
\\ / A nd | www.openfoam.com
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2023 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
|
||||
OpenFOAM is free software: you can redistribute it and/or modify it
|
||||
under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
|
||||
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Application
|
||||
Test-parallel-waitSome
|
||||
|
||||
Description
|
||||
Test polling versus wait-all for processing receive data.
|
||||
Will not see much difference between -wait-all and -no-polling though
|
||||
since the master doesn't have enough other work.
|
||||
|
||||
\*---------------------------------------------------------------------------*/
|
||||
|
||||
#include "List.H"
|
||||
#include "argList.H"
|
||||
#include "Time.H"
|
||||
#include "IPstream.H"
|
||||
#include "OPstream.H"
|
||||
#include "IOstreams.H"
|
||||
#include "Switch.H"
|
||||
#include "clockTime.H"
|
||||
|
||||
using namespace Foam;
|
||||
|
||||
|
||||
// The 'classic' waiting receive, but also only waiting for recv request
|
||||
template<class Type>
|
||||
void waitingReceive
|
||||
(
|
||||
const labelRange& recvRequests,
|
||||
const List<List<Type>>& recvBuffers,
|
||||
const bool waitAll = false
|
||||
)
|
||||
{
|
||||
clockTime waitTiming;
|
||||
|
||||
if (waitAll)
|
||||
{
|
||||
// Wait for send and recv (assumes recv followed by send)
|
||||
UPstream::waitRequests(recvRequests.start(), -1);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Wait for receives only
|
||||
UPstream::waitRequests(recvRequests.start(), recvRequests.size());
|
||||
}
|
||||
|
||||
double waited = waitTiming.timeIncrement();
|
||||
if (waited > 1e-3)
|
||||
{
|
||||
Pout<< "waited: " << waited << " before processing" << endl;
|
||||
}
|
||||
|
||||
forAll(recvBuffers, proci)
|
||||
{
|
||||
const auto& slice = recvBuffers[proci];
|
||||
|
||||
if (!slice.empty())
|
||||
{
|
||||
// Process data from proci
|
||||
Pout<< "proc:" << proci
|
||||
<< ' ' << flatOutput(slice) << nl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Polling receive
|
||||
template<class Type>
|
||||
void pollingReceive
|
||||
(
|
||||
const labelRange& recvRequests,
|
||||
const UList<int>& recvProcs,
|
||||
const List<List<Type>>& recvBuffers
|
||||
)
|
||||
{
|
||||
clockTime waitTiming;
|
||||
|
||||
DynamicList<int> indices(recvRequests.size());
|
||||
|
||||
if (!recvRequests.empty()) Pout<< "..." << endl;
|
||||
|
||||
for
|
||||
(
|
||||
label loop = 0;
|
||||
UPstream::waitSomeRequests
|
||||
(
|
||||
recvRequests.start(),
|
||||
recvRequests.size(),
|
||||
&indices
|
||||
);
|
||||
++loop
|
||||
)
|
||||
{
|
||||
double waited = waitTiming.timeIncrement();
|
||||
if (waited <= 1e-3)
|
||||
{
|
||||
waited = 0;
|
||||
}
|
||||
Pout<< "loop:" << loop
|
||||
<< " waited: " << waited
|
||||
<< " before processing" << endl;
|
||||
|
||||
for (const int idx : indices)
|
||||
{
|
||||
const int proci = recvProcs[idx];
|
||||
const auto& slice = recvBuffers[proci];
|
||||
|
||||
// Process data from proci
|
||||
Pout<< "loop:" << loop << " polled:" << indices.size()
|
||||
<< " proc:" << proci
|
||||
<< ' ' << flatOutput(slice) << endl;
|
||||
}
|
||||
Pout<< "..." << endl;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
|
||||
int main(int argc, char *argv[])
|
||||
{
|
||||
argList::noCheckProcessorDirectories();
|
||||
argList::addVerboseOption("timings etc");
|
||||
argList::addBoolOption("no-polling", "wait all instead of polling");
|
||||
argList::addBoolOption("wait-all", "wait all instead of polling");
|
||||
argList::addOption("sleep", "s", "change sleep (default: 5)");
|
||||
argList::noCheckProcessorDirectories();
|
||||
|
||||
const label transferSize = 10;
|
||||
label sleepSeconds = 5;
|
||||
|
||||
#include "setRootCase.H"
|
||||
|
||||
args.readIfPresent("sleep", sleepSeconds);
|
||||
const bool waitAll = args.found("wait-all");
|
||||
const bool nonPolling = args.found("no-polling");
|
||||
|
||||
if (!Pstream::parRun())
|
||||
{
|
||||
Info<< "\nWarning: not parallel - skipping further tests\n" << endl;
|
||||
return 0;
|
||||
}
|
||||
|
||||
Info<< "Calling with sleep=" << sleepSeconds
|
||||
<< ", polling=" << Switch::name(!nonPolling)
|
||||
<< ", wait-all=" << Switch::name(waitAll) << nl;
|
||||
|
||||
labelList sendBuffer;
|
||||
List<labelList> recvBuffers;
|
||||
|
||||
if (UPstream::master())
|
||||
{
|
||||
recvBuffers.resize(UPstream::nProcs());
|
||||
}
|
||||
else
|
||||
{
|
||||
recvBuffers.resize(1);
|
||||
}
|
||||
|
||||
clockTime timing;
|
||||
|
||||
const label startOfRequests = UPstream::nRequests();
|
||||
|
||||
// Setup receives
|
||||
labelRange recvRequests(UPstream::nRequests(), 0);
|
||||
DynamicList<int> recvProcs(UPstream::nProcs());
|
||||
|
||||
if (UPstream::master())
|
||||
{
|
||||
for (const int proci : UPstream::subProcs())
|
||||
{
|
||||
// The rank corresponding to the request
|
||||
recvProcs.push_back(proci);
|
||||
auto& slice = recvBuffers[proci];
|
||||
slice.resize_nocopy(transferSize);
|
||||
|
||||
UIPstream::read
|
||||
(
|
||||
UPstream::commsTypes::nonBlocking,
|
||||
proci,
|
||||
slice
|
||||
);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
const int proci = UPstream::masterNo();
|
||||
|
||||
if ((UPstream::myProcNo() % 2) == 0)
|
||||
{
|
||||
recvProcs.push_back(proci);
|
||||
auto& slice = recvBuffers[proci];
|
||||
slice.resize_nocopy(transferSize);
|
||||
|
||||
UIPstream::read
|
||||
(
|
||||
UPstream::commsTypes::nonBlocking,
|
||||
proci,
|
||||
slice
|
||||
);
|
||||
}
|
||||
}
|
||||
// OR: recvRequests.size() = (UPstream::nRequests() - recvRequests.start());
|
||||
recvRequests += recvProcs.size();
|
||||
|
||||
|
||||
labelList overallRecvRequests
|
||||
(
|
||||
UPstream::listGatherValues<label>(recvRequests.size())
|
||||
);
|
||||
|
||||
Info<< "Number of recv requests: "
|
||||
<< flatOutput(overallRecvRequests) << nl << nl;
|
||||
|
||||
|
||||
// Setup sends
|
||||
sendBuffer.resize_nocopy(transferSize);
|
||||
sendBuffer = UPstream::myProcNo();
|
||||
|
||||
const auto startBufferSend = [&]() -> void
|
||||
{
|
||||
if (sleepSeconds > 0)
|
||||
{
|
||||
// Dispatch some immediately, others with a delay
|
||||
if ((UPstream::myProcNo() % 2) == 0)
|
||||
{
|
||||
sleep(sleepSeconds);
|
||||
}
|
||||
else if ((UPstream::myProcNo() % 3) == 0)
|
||||
{
|
||||
sleep(1.5*sleepSeconds);
|
||||
}
|
||||
}
|
||||
|
||||
UOPstream::write
|
||||
(
|
||||
UPstream::commsTypes::nonBlocking,
|
||||
UPstream::masterNo(),
|
||||
sendBuffer
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
if (UPstream::master())
|
||||
{
|
||||
for (const int proci : UPstream::subProcs())
|
||||
{
|
||||
if ((UPstream::myProcNo() % 2) == 0)
|
||||
{
|
||||
UOPstream::write
|
||||
(
|
||||
UPstream::commsTypes::nonBlocking,
|
||||
proci,
|
||||
sendBuffer
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (waitAll)
|
||||
{
|
||||
startBufferSend();
|
||||
}
|
||||
|
||||
|
||||
// Some skulduggery to get a differential in timings...
|
||||
|
||||
const int nloops = (UPstream::master() ? 1 : 2);
|
||||
|
||||
for (int loopi = 0; loopi < nloops; ++loopi)
|
||||
{
|
||||
if (waitAll || nonPolling)
|
||||
{
|
||||
waitingReceive(recvRequests, recvBuffers, waitAll);
|
||||
}
|
||||
else
|
||||
{
|
||||
pollingReceive(recvRequests, recvProcs, recvBuffers);
|
||||
}
|
||||
|
||||
// Timing for processing all the receives
|
||||
if (args.verbose())
|
||||
{
|
||||
Pout<< "receive: " << timing.timeIncrement() << 's' << endl;
|
||||
}
|
||||
|
||||
if (!UPstream::master() && loopi == 0 && !waitAll)
|
||||
{
|
||||
startBufferSend();
|
||||
}
|
||||
}
|
||||
|
||||
if (args.verbose())
|
||||
{
|
||||
Pout<< "timing: " << timing.elapsedTime() << 's' << endl;
|
||||
}
|
||||
|
||||
// Final
|
||||
UPstream::waitRequests(startOfRequests);
|
||||
|
||||
Info<< "End\n" << endl;
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
// ************************************************************************* //
|
||||
@ -5,7 +5,7 @@
|
||||
\\ / A nd | www.openfoam.com
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2020-2023 OpenCFD Ltd.
|
||||
Copyright (C) 2020-2022 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM, distributed under GPL-3.0-or-later.
|
||||
@ -105,21 +105,12 @@ int main()
|
||||
Info<< nl << "Construct from reference" << nl;
|
||||
|
||||
scalarField f2(10, Foam::sqrt(2.0));
|
||||
refPtr<scalarField> tfld2(f2);
|
||||
printInfo(tfld2, true);
|
||||
|
||||
Info<< nl << "emplaced:"<< nl;
|
||||
tfld2.emplace(25, scalar(1));
|
||||
printInfo(tfld2, true);
|
||||
printInfo(refPtr<scalarField>(f2), true);
|
||||
}
|
||||
|
||||
{
|
||||
Info<< nl << "Construct from New (is_pointer)" << nl;
|
||||
auto tfld1 = refPtr<scalarField>::New(10, scalar(-1));
|
||||
printInfo(tfld1, true);
|
||||
|
||||
Info<< nl << "emplaced:"<< nl;
|
||||
tfld1.emplace(15, scalar(1));
|
||||
auto tfld1 = refPtr<scalarField>::New(10, scalar(1));
|
||||
printInfo(tfld1, true);
|
||||
|
||||
Info<< nl << "Dereferenced: " << *tfld1 << nl;
|
||||
|
||||
@ -5,7 +5,7 @@
|
||||
\\ / A nd | www.openfoam.com
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2018-2023 OpenCFD Ltd.
|
||||
Copyright (C) 2018-2021 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM, distributed under GPL-3.0-or-later.
|
||||
@ -78,10 +78,7 @@ int main()
|
||||
}
|
||||
|
||||
{
|
||||
auto tfld1 = tmp<scalarField>::New(10, Zero);
|
||||
printInfo(tfld1, true);
|
||||
|
||||
tfld1.emplace(20, Zero);
|
||||
auto tfld1 = tmp<scalarField>::New(20, Zero);
|
||||
printInfo(tfld1, true);
|
||||
|
||||
// Hold on to the old content for a bit
|
||||
|
||||
@ -5,7 +5,7 @@
|
||||
\\ / A nd | www.openfoam.com
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2017-2023 OpenCFD Ltd.
|
||||
Copyright (C) 2017-2020 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -34,7 +34,6 @@ Description
|
||||
#include "StringStream.H"
|
||||
#include "cpuTime.H"
|
||||
#include "labelList.H"
|
||||
#include "scalarList.H"
|
||||
#include "DynamicList.H"
|
||||
|
||||
using namespace Foam;
|
||||
@ -50,7 +49,7 @@ int main(int argc, char *argv[])
|
||||
argList args(argc, argv, false, true);
|
||||
|
||||
token tok1;
|
||||
Info<< "default construct: " << tok1.info() << endl;
|
||||
Info<< "construct null: " << tok1.info() << endl;
|
||||
|
||||
tok1 = double(3.14159);
|
||||
Info<< "assign double: " << tok1.info() << endl;
|
||||
@ -82,8 +81,7 @@ int main(int argc, char *argv[])
|
||||
|
||||
token ctok1(new token::Compound<labelList>(identity(10)));
|
||||
|
||||
Info<< "compound from pointer: "
|
||||
<< ctok1.info() << nl << ctok1 << endl;
|
||||
Info<< "compound token: " << ctok1.info() << nl << ctok1 << endl;
|
||||
}
|
||||
|
||||
{
|
||||
@ -96,155 +94,26 @@ int main(int argc, char *argv[])
|
||||
|
||||
token ctok1(ptr.release()); // release() not get()!
|
||||
|
||||
Info<< "compound from autoPtr: "
|
||||
<< ctok1.info() << nl << ctok1 << endl;
|
||||
Info<< "compound token: " << ctok1.info() << nl << ctok1 << endl;
|
||||
}
|
||||
|
||||
#if 0
|
||||
{
|
||||
// Construct from pointer
|
||||
autoPtr<token::compound> ptr
|
||||
// This version will segfault.
|
||||
// The implicit pointer cast from autoPtr to pointer wracks havoc
|
||||
|
||||
autoPtr<token::Compound<labelList>> ptr
|
||||
(
|
||||
token::compound::New("List<label>")
|
||||
new token::Compound<labelList>(identity(10))
|
||||
);
|
||||
|
||||
token ctok1(ptr.release()); // release() not get()!
|
||||
token ctok1(ptr);
|
||||
|
||||
Info<< "compound from New (via pointer): "
|
||||
<< ctok1.info() << nl << ctok1 << endl;
|
||||
}
|
||||
|
||||
{
|
||||
// Construct from autoPtr
|
||||
autoPtr<token::compound> ptr
|
||||
(
|
||||
token::Compound<scalarList>::New(10, 1.0)
|
||||
);
|
||||
|
||||
token ctok1(std::move(ptr));
|
||||
Info<< "compound from autoPtr: "
|
||||
<< ctok1.info() << nl << ctok1 << endl;
|
||||
|
||||
// Shrink
|
||||
ctok1.refCompoundToken().resize(5);
|
||||
|
||||
Info<< "resized: "
|
||||
<< ctok1.info() << nl << ctok1 << endl;
|
||||
|
||||
const scalarList* listptr = ctok1.compoundToken().isA<scalarList>();
|
||||
if (listptr)
|
||||
{
|
||||
for (scalar& val : const_cast<scalarList&>(*listptr))
|
||||
{
|
||||
val *= 5;
|
||||
}
|
||||
|
||||
Info<< "multiplied List<scalar>: "
|
||||
<< ctok1.info() << nl << ctok1 << endl;
|
||||
}
|
||||
|
||||
listptr = ctok1.isCompound<scalarList>();
|
||||
if (listptr)
|
||||
{
|
||||
for (scalar& val : const_cast<scalarList&>(*listptr))
|
||||
{
|
||||
val /= 2;
|
||||
}
|
||||
|
||||
Info<< "divided List<scalar>: "
|
||||
<< ctok1.info() << nl << ctok1 << endl;
|
||||
}
|
||||
|
||||
const labelList* listptr2 = ctok1.isCompound<labelList>();
|
||||
if (listptr2)
|
||||
{
|
||||
for (label& val : const_cast<labelList&>(*listptr2))
|
||||
{
|
||||
val /= 2;
|
||||
}
|
||||
|
||||
Info<< "divided List<label>: "
|
||||
<< ctok1.info() << nl << ctok1 << endl;
|
||||
}
|
||||
else
|
||||
{
|
||||
Info<< "compound is not List<label>" << nl;
|
||||
}
|
||||
|
||||
Info<< "Before fill_zero: " << ctok1 << endl;
|
||||
|
||||
ctok1.refCompoundToken().fill_zero();
|
||||
|
||||
Info<< "After fill_zero: " << ctok1 << endl;
|
||||
|
||||
|
||||
if (ctok1.isCompound())
|
||||
{
|
||||
auto& ct = ctok1.refCompoundToken();
|
||||
|
||||
ct.resize(20);
|
||||
bool handled = true;
|
||||
|
||||
switch (ct.typeCode())
|
||||
{
|
||||
case token::tokenType::BOOL :
|
||||
{
|
||||
UList<bool> cmpts
|
||||
(
|
||||
reinterpret_cast<bool*>(ct.data_bytes()),
|
||||
label(ct.size_bytes() / sizeof(bool))
|
||||
);
|
||||
cmpts = false;
|
||||
}
|
||||
break;
|
||||
|
||||
case token::tokenType::LABEL :
|
||||
{
|
||||
UList<label> cmpts
|
||||
(
|
||||
reinterpret_cast<label*>(ct.data_bytes()),
|
||||
label(ct.size_bytes() / sizeof(label))
|
||||
);
|
||||
cmpts = 123;
|
||||
}
|
||||
break;
|
||||
|
||||
case token::tokenType::FLOAT :
|
||||
{
|
||||
UList<float> cmpts
|
||||
(
|
||||
reinterpret_cast<float*>(ct.data_bytes()),
|
||||
label(ct.size_bytes() / sizeof(float))
|
||||
);
|
||||
cmpts = 2.7;
|
||||
}
|
||||
break;
|
||||
|
||||
case token::tokenType::DOUBLE :
|
||||
{
|
||||
UList<double> cmpts
|
||||
(
|
||||
reinterpret_cast<double*>(ct.data_bytes()),
|
||||
label(ct.size_bytes() / sizeof(double))
|
||||
);
|
||||
cmpts = 3.1415;
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
handled = false;
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
if (handled)
|
||||
{
|
||||
Info<< "assigned: " << ctok1 << nl;
|
||||
}
|
||||
}
|
||||
Info<< "compound token: " << ctok1.info() << nl << ctok1 << endl;
|
||||
}
|
||||
#endif
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
// ************************************************************************* //
|
||||
|
||||
@ -30,7 +30,12 @@ Input
|
||||
|
||||
const labelList patchIDs
|
||||
(
|
||||
pbm.indices(polyPatchNames, true) // useGroups
|
||||
pbm.patchSet
|
||||
(
|
||||
polyPatchNames,
|
||||
false, // warnNotFound
|
||||
true // useGroups
|
||||
).sortedToc()
|
||||
);
|
||||
|
||||
label nFaceLabels = 0;
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2011-2016 OpenFOAM Foundation
|
||||
Copyright (C) 2016-2023 OpenCFD Ltd.
|
||||
Copyright (C) 2016-2022 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -131,56 +131,40 @@ void modifyOrAddFace
|
||||
|
||||
|
||||
template<class Type>
|
||||
PtrList<GeometricField<Type, fvPatchField, volMesh>> subsetVolFields
|
||||
void subsetVolFields
|
||||
(
|
||||
const fvMeshSubset& subsetter,
|
||||
const IOobjectList& objects,
|
||||
const label patchi,
|
||||
const Type& exposedValue
|
||||
const Type& exposedValue,
|
||||
PtrList<GeometricField<Type, fvPatchField, volMesh>>& subFields
|
||||
)
|
||||
{
|
||||
typedef GeometricField<Type, fvPatchField, volMesh> GeoField;
|
||||
|
||||
const fvMesh& baseMesh = subsetter.baseMesh();
|
||||
|
||||
const UPtrList<const IOobject> fieldObjects
|
||||
(
|
||||
objects.csorted<GeoField>()
|
||||
);
|
||||
|
||||
PtrList<GeoField> subFields(fieldObjects.size());
|
||||
|
||||
label nFields = 0;
|
||||
for (const IOobject& io : fieldObjects)
|
||||
|
||||
for (const word& fieldName : objects.sortedNames<GeoField>())
|
||||
{
|
||||
const IOobject* ioptr = objects.findObject(fieldName);
|
||||
|
||||
if (!nFields)
|
||||
{
|
||||
Info<< "Subsetting " << GeoField::typeName << " (";
|
||||
Info<< "Subsetting " << GeoField::typeName << nl;
|
||||
}
|
||||
else
|
||||
{
|
||||
Info<< ' ';
|
||||
}
|
||||
Info<< " " << io.name() << endl;
|
||||
Info<< " " << fieldName << endl;
|
||||
|
||||
// Read unregistered
|
||||
IOobject rio(io, IOobjectOption::NO_REGISTER);
|
||||
GeoField origField(rio, baseMesh);
|
||||
GeoField origField(*ioptr, baseMesh);
|
||||
|
||||
subFields.set(nFields, subsetter.interpolate(origField));
|
||||
auto& subField = subFields[nFields];
|
||||
++nFields;
|
||||
|
||||
|
||||
// Subsetting adds 'subset' prefix. Rename field to be like original.
|
||||
subField.rename(io.name());
|
||||
subField.writeOpt(IOobjectOption::AUTO_WRITE);
|
||||
|
||||
|
||||
// Explicitly set exposed faces (in patchi) to exposedValue.
|
||||
if (patchi >= 0)
|
||||
{
|
||||
fvPatchField<Type>& fld = subField.boundaryFieldRef()[patchi];
|
||||
fvPatchField<Type>& fld =
|
||||
subFields[nFields].boundaryFieldRef()[patchi];
|
||||
|
||||
const label newStart = fld.patch().patch().start();
|
||||
const label oldPatchi = subsetter.patchMap()[patchi];
|
||||
@ -211,68 +195,48 @@ PtrList<GeometricField<Type, fvPatchField, volMesh>> subsetVolFields
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
++nFields;
|
||||
}
|
||||
}
|
||||
|
||||
if (nFields)
|
||||
{
|
||||
Info<< ')' << nl;
|
||||
}
|
||||
|
||||
return subFields;
|
||||
}
|
||||
|
||||
|
||||
template<class Type>
|
||||
PtrList<GeometricField<Type, fvsPatchField, surfaceMesh>> subsetSurfaceFields
|
||||
void subsetSurfaceFields
|
||||
(
|
||||
const fvMeshSubset& subsetter,
|
||||
const IOobjectList& objects,
|
||||
const label patchi,
|
||||
const Type& exposedValue
|
||||
const Type& exposedValue,
|
||||
PtrList<GeometricField<Type, fvsPatchField, surfaceMesh>>& subFields
|
||||
)
|
||||
{
|
||||
typedef GeometricField<Type, fvsPatchField, surfaceMesh> GeoField;
|
||||
|
||||
const fvMesh& baseMesh = subsetter.baseMesh();
|
||||
|
||||
const UPtrList<const IOobject> fieldObjects
|
||||
(
|
||||
objects.csorted<GeoField>()
|
||||
);
|
||||
|
||||
PtrList<GeoField> subFields(fieldObjects.size());
|
||||
|
||||
label nFields = 0;
|
||||
for (const IOobject& io : fieldObjects)
|
||||
|
||||
for (const word& fieldName : objects.sortedNames<GeoField>())
|
||||
{
|
||||
const IOobject* ioptr = objects.findObject(fieldName);
|
||||
|
||||
if (!nFields)
|
||||
{
|
||||
Info<< "Subsetting " << GeoField::typeName << " (";
|
||||
Info<< "Subsetting " << GeoField::typeName << nl;
|
||||
}
|
||||
else
|
||||
{
|
||||
Info<< ' ';
|
||||
}
|
||||
Info<< io.name();
|
||||
Info<< " " << fieldName << endl;
|
||||
|
||||
// Read unregistered
|
||||
IOobject rio(io, IOobjectOption::NO_REGISTER);
|
||||
GeoField origField(rio, baseMesh);
|
||||
GeoField origField(*ioptr, baseMesh);
|
||||
|
||||
subFields.set(nFields, subsetter.interpolate(origField));
|
||||
auto& subField = subFields[nFields];
|
||||
++nFields;
|
||||
|
||||
// Subsetting adds 'subset' prefix. Rename field to be like original.
|
||||
subField.rename(io.name());
|
||||
subField.writeOpt(IOobjectOption::AUTO_WRITE);
|
||||
|
||||
|
||||
// Explicitly set exposed faces (in patchi) to exposedValue.
|
||||
if (patchi >= 0)
|
||||
{
|
||||
fvsPatchField<Type>& fld = subField.boundaryFieldRef()[patchi];
|
||||
fvsPatchField<Type>& fld =
|
||||
subFields[nFields].boundaryFieldRef()[patchi];
|
||||
|
||||
const label newStart = fld.patch().patch().start();
|
||||
const label oldPatchi = subsetter.patchMap()[patchi];
|
||||
@ -304,14 +268,9 @@ PtrList<GeometricField<Type, fvsPatchField, surfaceMesh>> subsetSurfaceFields
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (nFields)
|
||||
{
|
||||
Info<< ')' << nl;
|
||||
++nFields;
|
||||
}
|
||||
|
||||
return subFields;
|
||||
}
|
||||
|
||||
|
||||
@ -325,9 +284,16 @@ void initCreatedPatches
|
||||
const typename GeoField::value_type initValue
|
||||
)
|
||||
{
|
||||
for (const GeoField& field : mesh.objectRegistry::csorted<GeoField>())
|
||||
HashTable<const GeoField*> fields
|
||||
(
|
||||
mesh.objectRegistry::lookupClass<GeoField>()
|
||||
);
|
||||
|
||||
forAllIters(fields, fieldIter)
|
||||
{
|
||||
auto& fieldBf = const_cast<GeoField&>(field).boundaryFieldRef();
|
||||
GeoField& field = const_cast<GeoField&>(*fieldIter());
|
||||
|
||||
auto& fieldBf = field.boundaryFieldRef();
|
||||
|
||||
forAll(fieldBf, patchi)
|
||||
{
|
||||
@ -360,36 +326,43 @@ void subsetTopoSets
|
||||
PtrList<TopoSet> sets;
|
||||
ReadFields<TopoSet>(objects, sets);
|
||||
|
||||
subSets.resize_null(sets.size());
|
||||
|
||||
subSets.resize(sets.size());
|
||||
forAll(sets, seti)
|
||||
{
|
||||
const TopoSet& set = sets[seti];
|
||||
TopoSet& set = sets[seti];
|
||||
|
||||
Info<< "Subsetting " << set.type() << ' ' << set.name() << endl;
|
||||
|
||||
labelHashSet subset(2*min(set.size(), map.size()));
|
||||
Info<< "Subsetting " << set.type() << " " << set.name() << endl;
|
||||
|
||||
// Map the data
|
||||
forAll(map, i)
|
||||
bitSet isSet(set.maxSize(mesh));
|
||||
for (const label id : set)
|
||||
{
|
||||
if (set.contains(map[i]))
|
||||
isSet.set(id);
|
||||
}
|
||||
|
||||
label nSet = 0;
|
||||
for (const label id : map)
|
||||
{
|
||||
if (isSet.test(id))
|
||||
{
|
||||
subset.insert(i);
|
||||
++nSet;
|
||||
}
|
||||
}
|
||||
|
||||
subSets.set
|
||||
(
|
||||
seti,
|
||||
new TopoSet
|
||||
(
|
||||
subMesh,
|
||||
set.name(),
|
||||
std::move(subset),
|
||||
IOobjectOption::AUTO_WRITE
|
||||
)
|
||||
new TopoSet(subMesh, set.name(), nSet, IOobject::AUTO_WRITE)
|
||||
);
|
||||
TopoSet& subSet = subSets[seti];
|
||||
|
||||
forAll(map, i)
|
||||
{
|
||||
if (isSet.test(map[i]))
|
||||
{
|
||||
subSet.insert(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -640,7 +613,6 @@ label findPatch(const polyBoundaryMesh& patches, const word& patchName)
|
||||
}
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
|
||||
int main(int argc, char *argv[])
|
||||
{
|
||||
@ -872,117 +844,138 @@ int main(int argc, char *argv[])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Read vol fields and subset.
|
||||
PtrList<volScalarField> scalarFlds
|
||||
|
||||
wordList scalarNames(objects.sortedNames<volScalarField>());
|
||||
PtrList<volScalarField> scalarFlds(scalarNames.size());
|
||||
subsetVolFields
|
||||
(
|
||||
subsetVolFields<scalar>
|
||||
(
|
||||
subsetter,
|
||||
objects,
|
||||
defaultPatchi,
|
||||
scalar(Zero)
|
||||
)
|
||||
subsetter,
|
||||
objects,
|
||||
defaultPatchi,
|
||||
scalar(Zero),
|
||||
scalarFlds
|
||||
);
|
||||
|
||||
PtrList<volVectorField> vectorFlds
|
||||
wordList vectorNames(objects.sortedNames<volVectorField>());
|
||||
PtrList<volVectorField> vectorFlds(vectorNames.size());
|
||||
subsetVolFields
|
||||
(
|
||||
subsetVolFields<vector>
|
||||
(
|
||||
subsetter,
|
||||
objects,
|
||||
defaultPatchi,
|
||||
vector(Zero)
|
||||
)
|
||||
subsetter,
|
||||
objects,
|
||||
defaultPatchi,
|
||||
vector(Zero),
|
||||
vectorFlds
|
||||
);
|
||||
|
||||
wordList sphTensorNames
|
||||
(
|
||||
objects.sortedNames<volSphericalTensorField>()
|
||||
);
|
||||
PtrList<volSphericalTensorField> sphTensorFlds
|
||||
(
|
||||
subsetVolFields<sphericalTensor>
|
||||
(
|
||||
subsetter,
|
||||
objects,
|
||||
defaultPatchi,
|
||||
sphericalTensor(Zero)
|
||||
)
|
||||
sphTensorNames.size()
|
||||
);
|
||||
subsetVolFields
|
||||
(
|
||||
subsetter,
|
||||
objects,
|
||||
defaultPatchi,
|
||||
sphericalTensor(Zero),
|
||||
sphTensorFlds
|
||||
);
|
||||
|
||||
PtrList<volSymmTensorField> symmTensorFlds
|
||||
wordList symmTensorNames(objects.sortedNames<volSymmTensorField>());
|
||||
PtrList<volSymmTensorField> symmTensorFlds(symmTensorNames.size());
|
||||
subsetVolFields
|
||||
(
|
||||
subsetVolFields<symmTensor>
|
||||
(
|
||||
subsetter,
|
||||
objects,
|
||||
defaultPatchi,
|
||||
symmTensor(Zero)
|
||||
)
|
||||
subsetter,
|
||||
objects,
|
||||
defaultPatchi,
|
||||
symmTensor(Zero),
|
||||
symmTensorFlds
|
||||
);
|
||||
|
||||
PtrList<volTensorField> tensorFlds
|
||||
wordList tensorNames(objects.sortedNames<volTensorField>());
|
||||
PtrList<volTensorField> tensorFlds(tensorNames.size());
|
||||
subsetVolFields
|
||||
(
|
||||
subsetVolFields<tensor>
|
||||
(
|
||||
subsetter,
|
||||
objects,
|
||||
defaultPatchi,
|
||||
tensor(Zero)
|
||||
)
|
||||
subsetter,
|
||||
objects,
|
||||
defaultPatchi,
|
||||
tensor(Zero),
|
||||
tensorFlds
|
||||
);
|
||||
|
||||
// Read surface fields and subset.
|
||||
PtrList<surfaceScalarField> surfScalarFlds
|
||||
|
||||
wordList surfScalarNames(objects.sortedNames<surfaceScalarField>());
|
||||
PtrList<surfaceScalarField> surfScalarFlds(surfScalarNames.size());
|
||||
subsetSurfaceFields
|
||||
(
|
||||
subsetSurfaceFields<scalar>
|
||||
(
|
||||
subsetter,
|
||||
objects,
|
||||
defaultPatchi,
|
||||
scalar(Zero)
|
||||
)
|
||||
subsetter,
|
||||
objects,
|
||||
defaultPatchi,
|
||||
scalar(Zero),
|
||||
surfScalarFlds
|
||||
);
|
||||
|
||||
PtrList<surfaceVectorField> surfVectorFlds
|
||||
wordList surfVectorNames(objects.sortedNames<surfaceVectorField>());
|
||||
PtrList<surfaceVectorField> surfVectorFlds(surfVectorNames.size());
|
||||
subsetSurfaceFields
|
||||
(
|
||||
subsetSurfaceFields<vector>
|
||||
(
|
||||
subsetter,
|
||||
objects,
|
||||
defaultPatchi,
|
||||
vector(Zero)
|
||||
)
|
||||
subsetter,
|
||||
objects,
|
||||
defaultPatchi,
|
||||
vector(Zero),
|
||||
surfVectorFlds
|
||||
);
|
||||
|
||||
wordList surfSphTensorNames
|
||||
(
|
||||
objects.sortedNames<surfaceSphericalTensorField>()
|
||||
);
|
||||
PtrList<surfaceSphericalTensorField> surfSphericalTensorFlds
|
||||
(
|
||||
subsetSurfaceFields<sphericalTensor>
|
||||
(
|
||||
subsetter,
|
||||
objects,
|
||||
defaultPatchi,
|
||||
sphericalTensor(Zero)
|
||||
)
|
||||
surfSphTensorNames.size()
|
||||
);
|
||||
subsetSurfaceFields
|
||||
(
|
||||
subsetter,
|
||||
objects,
|
||||
defaultPatchi,
|
||||
sphericalTensor(Zero),
|
||||
surfSphericalTensorFlds
|
||||
);
|
||||
|
||||
wordList surfSymmTensorNames
|
||||
(
|
||||
objects.sortedNames<surfaceSymmTensorField>()
|
||||
);
|
||||
|
||||
PtrList<surfaceSymmTensorField> surfSymmTensorFlds
|
||||
(
|
||||
subsetSurfaceFields<symmTensor>
|
||||
(
|
||||
subsetter,
|
||||
objects,
|
||||
defaultPatchi,
|
||||
symmTensor(Zero)
|
||||
)
|
||||
surfSymmTensorNames.size()
|
||||
);
|
||||
|
||||
PtrList<surfaceTensorField> surfTensorFlds
|
||||
subsetSurfaceFields
|
||||
(
|
||||
subsetSurfaceFields<tensor>
|
||||
(
|
||||
subsetter,
|
||||
objects,
|
||||
defaultPatchi,
|
||||
tensor(Zero)
|
||||
)
|
||||
subsetter,
|
||||
objects,
|
||||
defaultPatchi,
|
||||
symmTensor(Zero),
|
||||
surfSymmTensorFlds
|
||||
);
|
||||
|
||||
wordList surfTensorNames(objects.sortedNames<surfaceTensorField>());
|
||||
PtrList<surfaceTensorField> surfTensorFlds(surfTensorNames.size());
|
||||
subsetSurfaceFields
|
||||
(
|
||||
subsetter,
|
||||
objects,
|
||||
defaultPatchi,
|
||||
tensor(Zero),
|
||||
surfTensorFlds
|
||||
);
|
||||
|
||||
|
||||
@ -1024,8 +1017,62 @@ int main(int argc, char *argv[])
|
||||
++runTime;
|
||||
}
|
||||
|
||||
Info<< "Writing mesh without blockedCells to time "
|
||||
<< runTime.value() << endl;
|
||||
Info<< "Writing mesh without blockedCells to time " << runTime.value()
|
||||
<< endl;
|
||||
|
||||
// Subsetting adds 'subset' prefix. Rename field to be like original.
|
||||
forAll(scalarFlds, i)
|
||||
{
|
||||
scalarFlds[i].rename(scalarNames[i]);
|
||||
scalarFlds[i].writeOpt(IOobject::AUTO_WRITE);
|
||||
}
|
||||
forAll(vectorFlds, i)
|
||||
{
|
||||
vectorFlds[i].rename(vectorNames[i]);
|
||||
vectorFlds[i].writeOpt(IOobject::AUTO_WRITE);
|
||||
}
|
||||
forAll(sphTensorFlds, i)
|
||||
{
|
||||
sphTensorFlds[i].rename(sphTensorNames[i]);
|
||||
sphTensorFlds[i].writeOpt(IOobject::AUTO_WRITE);
|
||||
}
|
||||
forAll(symmTensorFlds, i)
|
||||
{
|
||||
symmTensorFlds[i].rename(symmTensorNames[i]);
|
||||
symmTensorFlds[i].writeOpt(IOobject::AUTO_WRITE);
|
||||
}
|
||||
forAll(tensorFlds, i)
|
||||
{
|
||||
tensorFlds[i].rename(tensorNames[i]);
|
||||
tensorFlds[i].writeOpt(IOobject::AUTO_WRITE);
|
||||
}
|
||||
|
||||
// Surface ones.
|
||||
forAll(surfScalarFlds, i)
|
||||
{
|
||||
surfScalarFlds[i].rename(surfScalarNames[i]);
|
||||
surfScalarFlds[i].writeOpt(IOobject::AUTO_WRITE);
|
||||
}
|
||||
forAll(surfVectorFlds, i)
|
||||
{
|
||||
surfVectorFlds[i].rename(surfVectorNames[i]);
|
||||
surfVectorFlds[i].writeOpt(IOobject::AUTO_WRITE);
|
||||
}
|
||||
forAll(surfSphericalTensorFlds, i)
|
||||
{
|
||||
surfSphericalTensorFlds[i].rename(surfSphTensorNames[i]);
|
||||
surfSphericalTensorFlds[i].writeOpt(IOobject::AUTO_WRITE);
|
||||
}
|
||||
forAll(surfSymmTensorFlds, i)
|
||||
{
|
||||
surfSymmTensorFlds[i].rename(surfSymmTensorNames[i]);
|
||||
surfSymmTensorFlds[i].writeOpt(IOobject::AUTO_WRITE);
|
||||
}
|
||||
forAll(surfTensorNames, i)
|
||||
{
|
||||
surfTensorFlds[i].rename(surfTensorNames[i]);
|
||||
surfTensorFlds[i].writeOpt(IOobject::AUTO_WRITE);
|
||||
}
|
||||
|
||||
subsetter.subMesh().write();
|
||||
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2011-2016 OpenFOAM Foundation
|
||||
Copyright (C) 2019-2023 OpenCFD Ltd.
|
||||
Copyright (C) 2019 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -47,7 +47,7 @@ Description
|
||||
#include "Time.H"
|
||||
#include "polyMesh.H"
|
||||
#include "cellSet.H"
|
||||
#include "SortList.H"
|
||||
#include "SortableList.H"
|
||||
#include "labelIOList.H"
|
||||
#include "fvMesh.H"
|
||||
#include "volFields.H"
|
||||
@ -128,54 +128,71 @@ int main(int argc, char *argv[])
|
||||
|
||||
const scalarField& vols = mesh.cellVolumes();
|
||||
|
||||
SortList<scalar> sortedVols(vols);
|
||||
SortableList<scalar> sortedVols(vols);
|
||||
|
||||
// All cell labels, sorted per bin.
|
||||
DynamicList<DynamicList<label>> bins;
|
||||
|
||||
// Lower/upper limits
|
||||
DynamicList<scalarMinMax> limits;
|
||||
DynamicList<scalar> lowerLimits;
|
||||
DynamicList<scalar> upperLimits;
|
||||
|
||||
// Create bin0. Have upperlimit as factor times lowerlimit.
|
||||
bins.emplace_back();
|
||||
limits.emplace_back(sortedVols[0], 1.1*sortedVols[0]);
|
||||
bins.append(DynamicList<label>());
|
||||
lowerLimits.append(sortedVols[0]);
|
||||
upperLimits.append(1.1 * lowerLimits.last());
|
||||
|
||||
forAll(sortedVols, i)
|
||||
{
|
||||
if (sortedVols[i] > limits.back().max())
|
||||
if (sortedVols[i] > upperLimits.last())
|
||||
{
|
||||
// New value outside of current bin
|
||||
Info<< "Collected " << bins.back() << " elements in bin "
|
||||
<< limits.back().min() << " .. "
|
||||
<< limits.back().max() << endl;
|
||||
|
||||
// Shrink old bin.
|
||||
DynamicList<label>& bin = bins.last();
|
||||
|
||||
bin.shrink();
|
||||
|
||||
Info<< "Collected " << bin.size() << " elements in bin "
|
||||
<< lowerLimits.last() << " .. "
|
||||
<< upperLimits.last() << endl;
|
||||
|
||||
// Create new bin.
|
||||
bins.emplace_back();
|
||||
limits.emplace_back(sortedVols[i], 1.1 * sortedVols[i]);
|
||||
bins.append(DynamicList<label>());
|
||||
lowerLimits.append(sortedVols[i]);
|
||||
upperLimits.append(1.1 * lowerLimits.last());
|
||||
|
||||
Info<< "Creating new bin "
|
||||
<< limits.back().min() << " .. "
|
||||
<< limits.back().max() << endl;
|
||||
Info<< "Creating new bin " << lowerLimits.last()
|
||||
<< " .. " << upperLimits.last()
|
||||
<< endl;
|
||||
}
|
||||
|
||||
// Add to current bin.
|
||||
bins.back().push_back(sortedVols.indices()[i]);
|
||||
// Append to current bin.
|
||||
DynamicList<label>& bin = bins.last();
|
||||
|
||||
bin.append(sortedVols.indices()[i]);
|
||||
}
|
||||
Info<< endl;
|
||||
|
||||
bins.last().shrink();
|
||||
bins.shrink();
|
||||
lowerLimits.shrink();
|
||||
upperLimits.shrink();
|
||||
|
||||
|
||||
//
|
||||
// Write to cellSets.
|
||||
//
|
||||
|
||||
Info<< "Volume bins:" << nl;
|
||||
forAll(bins, bini)
|
||||
forAll(bins, binI)
|
||||
{
|
||||
const auto& bin = bins[bini];
|
||||
const DynamicList<label>& bin = bins[binI];
|
||||
|
||||
cellSet cells(mesh, "vol" + Foam::name(bini), bin.size());
|
||||
cellSet cells(mesh, "vol" + name(binI), bin.size());
|
||||
cells.insert(bin);
|
||||
|
||||
Info<< " " << limits[bini].min() << " .. " << limits[bini].max()
|
||||
Info<< " " << lowerLimits[binI] << " .. " << upperLimits[binI]
|
||||
<< " : writing " << bin.size() << " cells to cellSet "
|
||||
<< cells.name() << endl;
|
||||
|
||||
@ -277,13 +294,13 @@ int main(int argc, char *argv[])
|
||||
);
|
||||
|
||||
// Set cell values
|
||||
forAll(bins, bini)
|
||||
forAll(bins, binI)
|
||||
{
|
||||
const auto& bin = bins[bini];
|
||||
const DynamicList<label>& bin = bins[binI];
|
||||
|
||||
forAll(bin, i)
|
||||
{
|
||||
refLevel[bin[i]] = bins.size() - bini - 1;
|
||||
refLevel[bin[i]] = bins.size() - binI - 1;
|
||||
postRefLevel[bin[i]] = refLevel[bin[i]];
|
||||
}
|
||||
}
|
||||
|
||||
@ -143,10 +143,10 @@ void Foam::fileFormats::ensightMeshReader::setHandedness
|
||||
// if (((x ^ y) & z) < 0)
|
||||
// {
|
||||
// // Flipped hex
|
||||
// std::swap(verts[0], verts[4]);
|
||||
// std::swap(verts[1], verts[5]);
|
||||
// std::swap(verts[2], verts[6]);
|
||||
// std::swap(verts[3], verts[7]);
|
||||
// Swap(verts[0], verts[4]);
|
||||
// Swap(verts[1], verts[5]);
|
||||
// Swap(verts[2], verts[6]);
|
||||
// Swap(verts[3], verts[7]);
|
||||
// }
|
||||
// }
|
||||
|
||||
@ -155,27 +155,27 @@ void Foam::fileFormats::ensightMeshReader::setHandedness
|
||||
if (verts.size() == 8)
|
||||
{
|
||||
// Flipped hex
|
||||
std::swap(verts[0], verts[4]);
|
||||
std::swap(verts[1], verts[5]);
|
||||
std::swap(verts[2], verts[6]);
|
||||
std::swap(verts[3], verts[7]);
|
||||
Swap(verts[0], verts[4]);
|
||||
Swap(verts[1], verts[5]);
|
||||
Swap(verts[2], verts[6]);
|
||||
Swap(verts[3], verts[7]);
|
||||
}
|
||||
else if (verts.size() == 4)
|
||||
{
|
||||
// Flipped tet. Change orientation of base
|
||||
std::swap(verts[0], verts[1]);
|
||||
Swap(verts[0], verts[1]);
|
||||
}
|
||||
else if (verts.size() == 5)
|
||||
{
|
||||
// Flipped pyr. Change orientation of base
|
||||
std::swap(verts[1], verts[3]);
|
||||
Swap(verts[1], verts[3]);
|
||||
}
|
||||
else if (verts.size() == 6)
|
||||
{
|
||||
// Flipped prism.
|
||||
std::swap(verts[0], verts[3]);
|
||||
std::swap(verts[1], verts[4]);
|
||||
std::swap(verts[2], verts[5]);
|
||||
Swap(verts[0], verts[3]);
|
||||
Swap(verts[1], verts[4]);
|
||||
Swap(verts[2], verts[5]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -221,7 +221,7 @@ int main(int argc, char *argv[])
|
||||
|
||||
blockMesh blocks(meshDict, regionName, strategy, args.verbose());
|
||||
|
||||
if (!blocks.good())
|
||||
if (!blocks.valid())
|
||||
{
|
||||
// Could/should be Fatal?
|
||||
|
||||
|
||||
@ -54,44 +54,56 @@ Description
|
||||
);
|
||||
|
||||
// An empty zone for cut points
|
||||
pzs.emplace_back
|
||||
pzs.append
|
||||
(
|
||||
mergeName + "CutPointZone",
|
||||
pzs.size(), // index
|
||||
pzs
|
||||
new pointZone
|
||||
(
|
||||
mergeName + "CutPointZone",
|
||||
pzs.size(),
|
||||
pzs
|
||||
)
|
||||
);
|
||||
cleanupPointZones.insert(pzs.back().name());
|
||||
cleanupPointZones.insert(pzs.last().name());
|
||||
|
||||
// Coupling side 0 (master)
|
||||
fzs.emplace_back
|
||||
fzs.append
|
||||
(
|
||||
mergeName + "Side0Zone",
|
||||
identity(patch0.range()),
|
||||
false, // none are flipped
|
||||
fzs.size(), // index
|
||||
fzs
|
||||
new faceZone
|
||||
(
|
||||
mergeName + "Side0Zone",
|
||||
identity(patch0.range()),
|
||||
false, // none are flipped
|
||||
fzs.size(),
|
||||
fzs
|
||||
)
|
||||
);
|
||||
cleanupFaceZones.insert(fzs.back().name());
|
||||
cleanupFaceZones.insert(fzs.last().name());
|
||||
|
||||
// Coupling side 1 (slave)
|
||||
fzs.emplace_back
|
||||
fzs.append
|
||||
(
|
||||
mergeName + "Side1Zone",
|
||||
identity(patch1.range()),
|
||||
false, // none are flipped
|
||||
fzs.size(), // index
|
||||
fzs
|
||||
new faceZone
|
||||
(
|
||||
mergeName + "Side1Zone",
|
||||
identity(patch1.range()),
|
||||
false, // none are flipped
|
||||
fzs.size(),
|
||||
fzs
|
||||
)
|
||||
);
|
||||
cleanupFaceZones.insert(fzs.back().name());
|
||||
cleanupFaceZones.insert(fzs.last().name());
|
||||
|
||||
// An empty zone for cut faces
|
||||
fzs.emplace_back
|
||||
fzs.append
|
||||
(
|
||||
mergeName + "CutFaceZone",
|
||||
fzs.size(), // index
|
||||
fzs
|
||||
new faceZone
|
||||
(
|
||||
mergeName + "CutFaceZone",
|
||||
fzs.size(),
|
||||
fzs
|
||||
)
|
||||
);
|
||||
cleanupFaceZones.insert(fzs.back().name());
|
||||
cleanupFaceZones.insert(fzs.last().name());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -979,7 +979,7 @@ int main(int argc, char *argv[])
|
||||
// Change the front and back patch types as required
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
word frontBackType;
|
||||
word frontBackType(word::null);
|
||||
|
||||
if (isType<extrudeModels::wedge>(model()))
|
||||
{
|
||||
|
||||
@ -947,7 +947,7 @@ void addZoneSidePatches
|
||||
|
||||
forAll(zoneSidePatch, zoneI)
|
||||
{
|
||||
if (!oneDPolyPatchType.empty())
|
||||
if (oneDPolyPatchType != word::null)
|
||||
{
|
||||
// Reuse single empty patch.
|
||||
word patchName;
|
||||
|
||||
@ -47,7 +47,7 @@ void Foam::DelaunayMesh<Triangulation>::timeCheck
|
||||
<< time().elapsedCpuTime() << " s, "
|
||||
<< "delta " << time().cpuTimeIncrement()<< " s";
|
||||
|
||||
if (!description.empty())
|
||||
if (description != word::null)
|
||||
{
|
||||
Info<< ", " << description << " ";
|
||||
}
|
||||
|
||||
@ -33,7 +33,7 @@ template<class KeyType, class DataType>
|
||||
Foam::PrintTable<KeyType, DataType>::PrintTable()
|
||||
:
|
||||
table_(),
|
||||
title_()
|
||||
title_(string::null)
|
||||
{}
|
||||
|
||||
|
||||
|
||||
@ -468,8 +468,8 @@ inline Foam::List<Foam::label> Foam::conformalVoronoiMesh::processorsAttached
|
||||
|
||||
forAll(c1Procs, aPI)
|
||||
{
|
||||
procsAttached.push_uniq(c1Procs[aPI]);
|
||||
procsAttached.push_uniq(c2Procs[aPI]);
|
||||
procsAttached.appendUniq(c1Procs[aPI]);
|
||||
procsAttached.appendUniq(c2Procs[aPI]);
|
||||
}
|
||||
|
||||
return List<label>(procsAttached);
|
||||
|
||||
@ -65,7 +65,7 @@ void Foam::conformalVoronoiMesh::timeCheck
|
||||
<< runTime.elapsedCpuTime() << " s, "
|
||||
<< "delta " << runTime.cpuTimeIncrement()<< " s";
|
||||
|
||||
if (!description.empty())
|
||||
if (description != word::null)
|
||||
{
|
||||
Info<< ", " << description << " ";
|
||||
}
|
||||
@ -78,7 +78,7 @@ void Foam::conformalVoronoiMesh::timeCheck
|
||||
|
||||
memInfo m;
|
||||
|
||||
if (m.good())
|
||||
if (m.valid())
|
||||
{
|
||||
PrintTable<word, label> memoryTable
|
||||
(
|
||||
|
||||
@ -46,8 +46,8 @@ void Foam::shortEdgeFilter2D::assignBoundaryPointRegions
|
||||
const edge& e = iter.key();
|
||||
const label regi = iter.val();
|
||||
|
||||
boundaryPointRegions[e.first()].push_uniq(regi);
|
||||
boundaryPointRegions[e.second()].push_uniq(regi);
|
||||
boundaryPointRegions[e.start()].appendUniq(regi);
|
||||
boundaryPointRegions[e.end()].appendUniq(regi);
|
||||
}
|
||||
}
|
||||
|
||||
@ -66,7 +66,7 @@ void Foam::shortEdgeFilter2D::updateEdgeRegionMap
|
||||
const edgeList& edges = surfMesh.edges();
|
||||
const labelList& meshPoints = surfMesh.meshPoints();
|
||||
|
||||
patchSizes.resize_nocopy(patchNames_.size());
|
||||
patchSizes.setSize(patchNames_.size(), 0);
|
||||
patchSizes = 0;
|
||||
|
||||
forAll(edges, edgeI)
|
||||
@ -78,13 +78,15 @@ void Foam::shortEdgeFilter2D::updateEdgeRegionMap
|
||||
|
||||
const edge& e = edges[edgeI];
|
||||
|
||||
const label startI = meshPoints[e[0]];
|
||||
const label endI = meshPoints[e[1]];
|
||||
|
||||
label region = -1;
|
||||
|
||||
const DynamicList<label>& startPtRegions =
|
||||
boundaryPtRegions[surfPtToBoundaryPt[meshPoints[e.first()]]];
|
||||
|
||||
const DynamicList<label>& endPtRegions =
|
||||
boundaryPtRegions[surfPtToBoundaryPt[meshPoints[e.second()]]];
|
||||
const DynamicList<label> startPtRegions =
|
||||
boundaryPtRegions[surfPtToBoundaryPt[startI]];
|
||||
const DynamicList<label> endPtRegions =
|
||||
boundaryPtRegions[surfPtToBoundaryPt[endI]];
|
||||
|
||||
if (startPtRegions.size() > 1 && endPtRegions.size() > 1)
|
||||
{
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2011-2016 OpenFOAM Foundation
|
||||
Copyright (C) 2015-2023 OpenCFD Ltd.
|
||||
Copyright (C) 2015-2022 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -52,7 +52,6 @@ Description
|
||||
#include "snapParameters.H"
|
||||
#include "layerParameters.H"
|
||||
#include "vtkCoordSetWriter.H"
|
||||
#include "vtkSurfaceWriter.H"
|
||||
#include "faceSet.H"
|
||||
#include "motionSmoother.H"
|
||||
#include "polyTopoChange.H"
|
||||
@ -860,36 +859,19 @@ int main(int argc, char *argv[])
|
||||
// Writer for writing lines
|
||||
autoPtr<coordSetWriter> setFormatter;
|
||||
{
|
||||
const word writerType
|
||||
const word setFormat
|
||||
(
|
||||
meshDict.getOrDefault<word>
|
||||
(
|
||||
"setFormat",
|
||||
coordSetWriters::vtkWriter::typeName // Default: "vtk"
|
||||
coordSetWriters::vtkWriter::typeName // Default: "vtk"
|
||||
)
|
||||
);
|
||||
|
||||
setFormatter = coordSetWriter::New
|
||||
(
|
||||
writerType,
|
||||
meshDict.subOrEmptyDict("formatOptions").optionalSubDict(writerType)
|
||||
);
|
||||
}
|
||||
// Writer for writing surfaces
|
||||
refPtr<surfaceWriter> surfFormatter;
|
||||
{
|
||||
const word type
|
||||
(
|
||||
meshDict.getOrDefault<word>
|
||||
(
|
||||
"surfaceFormat",
|
||||
surfaceWriters::vtkWriter::typeName // Default: "vtk"
|
||||
)
|
||||
);
|
||||
surfFormatter = surfaceWriter::New
|
||||
(
|
||||
type,
|
||||
meshDict.subOrEmptyDict("formatOptions").optionalSubDict(type)
|
||||
setFormat,
|
||||
meshDict.subOrEmptyDict("formatOptions").optionalSubDict(setFormat)
|
||||
);
|
||||
}
|
||||
|
||||
@ -1792,7 +1774,6 @@ int main(int argc, char *argv[])
|
||||
globalToMasterPatch,
|
||||
globalToSlavePatch,
|
||||
setFormatter(),
|
||||
surfFormatter,
|
||||
dryRun
|
||||
);
|
||||
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2015-2017 OpenFOAM Foundation
|
||||
Copyright (C) 2015-2023 OpenCFD Ltd.
|
||||
Copyright (C) 2015-2022 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -233,15 +233,58 @@ void Foam::mergeAndWrite
|
||||
const fileName& outputDir
|
||||
)
|
||||
{
|
||||
writer.open
|
||||
(
|
||||
setPatch.localPoints(),
|
||||
setPatch.localFaces(),
|
||||
(outputDir / name)
|
||||
);
|
||||
if (Pstream::parRun())
|
||||
{
|
||||
labelList pointToGlobal;
|
||||
labelList uniqueMeshPointLabels;
|
||||
autoPtr<globalIndex> globalPoints;
|
||||
autoPtr<globalIndex> globalFaces;
|
||||
faceList mergedFaces;
|
||||
pointField mergedPoints;
|
||||
Foam::PatchTools::gatherAndMerge
|
||||
(
|
||||
mesh,
|
||||
setPatch.localFaces(),
|
||||
setPatch.meshPoints(),
|
||||
setPatch.meshPointMap(),
|
||||
|
||||
writer.write();
|
||||
writer.clear();
|
||||
pointToGlobal,
|
||||
uniqueMeshPointLabels,
|
||||
globalPoints,
|
||||
globalFaces,
|
||||
|
||||
mergedFaces,
|
||||
mergedPoints
|
||||
);
|
||||
|
||||
// Write
|
||||
if (Pstream::master())
|
||||
{
|
||||
writer.open
|
||||
(
|
||||
mergedPoints,
|
||||
mergedFaces,
|
||||
(outputDir / name),
|
||||
false // serial - already merged
|
||||
);
|
||||
|
||||
writer.write();
|
||||
writer.clear();
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
writer.open
|
||||
(
|
||||
setPatch.localPoints(),
|
||||
setPatch.localFaces(),
|
||||
(outputDir / name),
|
||||
false // serial - already merged
|
||||
);
|
||||
|
||||
writer.write();
|
||||
writer.clear();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -87,7 +87,11 @@ label addPatch
|
||||
)
|
||||
);
|
||||
auto& pp = *ppPtr;
|
||||
pp.addGroup(groupName);
|
||||
|
||||
if (!groupName.empty())
|
||||
{
|
||||
pp.inGroups().appendUniq(groupName);
|
||||
}
|
||||
|
||||
|
||||
// Add patch, create calculated everywhere
|
||||
|
||||
@ -148,15 +148,21 @@ void matchPatchFaces
|
||||
// Mesh 0
|
||||
//~~~~~~~
|
||||
|
||||
auto& intMesh0 = interfaceMesh0.emplace_back(nSourcei, -1);
|
||||
interfaceMesh0.append(labelList());
|
||||
auto& intMesh0 = interfaceMesh0.last();
|
||||
intMesh0.setSize(nSourcei, -1);
|
||||
intMesh0[sourcei] = meshi;
|
||||
|
||||
interfaceSource0.push_back(sourcei);
|
||||
interfaceSource0.append(sourcei);
|
||||
|
||||
auto& intPatch0 = interfacePatch0.emplace_back(nSourcei, -1);
|
||||
interfacePatch0.append(labelList());
|
||||
auto& intPatch0 = interfacePatch0.last();
|
||||
intPatch0.setSize(nSourcei, -1);
|
||||
intPatch0[sourcei] = ppi.index();
|
||||
|
||||
auto& intNames0 = interfaceNames0.emplace_back(nSourcei);
|
||||
interfaceNames0.append(wordList());
|
||||
auto& intNames0 = interfaceNames0.last();
|
||||
intNames0.setSize(nSourcei);
|
||||
intNames0[sourcei] =
|
||||
patchName(entryName, meshes[meshi], meshes[meshj]);
|
||||
|
||||
@ -164,23 +170,33 @@ void matchPatchFaces
|
||||
// Mesh 1
|
||||
//~~~~~~~
|
||||
|
||||
auto& intMesh1 = interfaceMesh1.emplace_back(nSourcej, -1);
|
||||
interfaceMesh1.append(labelList());
|
||||
auto& intMesh1 = interfaceMesh1.last();
|
||||
intMesh1.setSize(nSourcej, -1);
|
||||
intMesh1[sourcej] = meshj;
|
||||
|
||||
interfaceSource1.push_back(sourcej);
|
||||
interfaceSource1.append(sourcej);
|
||||
|
||||
auto& intPatch1 = interfacePatch1.emplace_back(nSourcej, -1);
|
||||
interfacePatch1.append(labelList());
|
||||
auto& intPatch1 = interfacePatch1.last();
|
||||
intPatch1.setSize(nSourcej, -1);
|
||||
intPatch1[sourcej] = ppj.index();
|
||||
|
||||
auto& intNames1 = interfaceNames1.emplace_back(nSourcej);
|
||||
interfaceNames1.append(wordList());
|
||||
auto& intNames1 = interfaceNames1.last();
|
||||
intNames1.setSize(nSourcej);
|
||||
intNames1[sourcej] =
|
||||
patchName(entryName, meshes[meshj], meshes[meshi]);
|
||||
|
||||
auto& intFaces0 = interfaceFaces0.emplace_back(nSourcei);
|
||||
interfaceFaces0.append(List<DynamicList<label>>());
|
||||
auto& intFaces0 = interfaceFaces0.last();
|
||||
intFaces0.setSize(nSourcei);
|
||||
DynamicList<label>& faces0 = intFaces0[sourcei];
|
||||
faces0.setCapacity(ppi.size());
|
||||
|
||||
auto& intFaces1 = interfaceFaces1.emplace_back(nSourcej);
|
||||
interfaceFaces1.append(List<DynamicList<label>>());
|
||||
auto& intFaces1 = interfaceFaces1.last();
|
||||
intFaces1.setSize(nSourcej);
|
||||
DynamicList<label>& faces1 = intFaces1[sourcej];
|
||||
faces1.setCapacity(ppj.size());
|
||||
|
||||
@ -233,7 +249,7 @@ void matchPatchFaces
|
||||
{
|
||||
if (weights[facei] > 0.5 || sourceMask[facei] > SMALL)
|
||||
{
|
||||
faces0.push_back(ppi.start()+facei);
|
||||
faces0.append(ppi.start()+facei);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -243,7 +259,7 @@ void matchPatchFaces
|
||||
{
|
||||
if (weights[facei] > 0.5 || targetMask[facei] > SMALL)
|
||||
{
|
||||
faces1.push_back(ppj.start()+facei);
|
||||
faces1.append(ppj.start()+facei);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -809,7 +825,7 @@ int main(int argc, char *argv[])
|
||||
|
||||
if (pDict.get<word>("constructFrom") == "autoPatch")
|
||||
{
|
||||
interRegionSources[meshi].push_back(sourcei);
|
||||
interRegionSources[meshi].append(sourcei);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -357,7 +357,7 @@ int main(int argc, char *argv[])
|
||||
dict.subDict("merge").get<wordRes>("patches")
|
||||
).sortedToc();
|
||||
|
||||
Info<< "Merge baffles on " << mergePatchIDs.size()
|
||||
Info<< "Detecting baffles on " << mergePatchIDs.size()
|
||||
<< " patches with "
|
||||
<< returnReduce(patchSize(mesh, mergePatchIDs), sumOp<label>())
|
||||
<< " faces" << endl;
|
||||
@ -369,7 +369,7 @@ int main(int argc, char *argv[])
|
||||
dict.subDict("split").get<wordRes>("patches")
|
||||
).sortedToc();
|
||||
|
||||
Info<< "Split baffles on " << splitPatchIDs.size()
|
||||
Info<< "Detecting baffles on " << splitPatchIDs.size()
|
||||
<< " patches with "
|
||||
<< returnReduce(patchSize(mesh, splitPatchIDs), sumOp<label>())
|
||||
<< " faces" << endl;
|
||||
|
||||
@ -518,7 +518,7 @@ void Foam::meshDualiser::createFacesAroundEdge
|
||||
|
||||
if (startDual != -1)
|
||||
{
|
||||
verts.push_uniq(startDual);
|
||||
verts.appendUniq(startDual);
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2011-2016 OpenFOAM Foundation
|
||||
Copyright (C) 2019-2023 OpenCFD Ltd.
|
||||
Copyright (C) 2019-2022 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -55,9 +55,12 @@ void ReadAndRotateFields
|
||||
const dimensionedTensor& rotT
|
||||
)
|
||||
{
|
||||
for (const IOobject& io : objects.csorted<GeoField>())
|
||||
// Objects of field type
|
||||
IOobjectList fields(objects.lookupClass<GeoField>());
|
||||
|
||||
forAllConstIters(fields, fieldIter)
|
||||
{
|
||||
GeoField fld(io, mesh);
|
||||
GeoField fld(*fieldIter(), mesh);
|
||||
Info<< " Rotating " << fld.name() << endl;
|
||||
transform(fld, rotT, fld);
|
||||
fld.write();
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2011-2018 OpenFOAM Foundation
|
||||
Copyright (C) 2017-2023 OpenCFD Ltd.
|
||||
Copyright (C) 2017-2022 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -158,7 +158,7 @@ void printSets(Ostream& os, const IOobjectList& objects)
|
||||
{
|
||||
label n = 0;
|
||||
|
||||
for (const IOobject& io : objects.csorted<SetType>())
|
||||
for (const IOobject& io : objects.sorted<SetType>())
|
||||
{
|
||||
SetType set(io);
|
||||
if (!n++) os << SetType::typeName << "s:" << nl;
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2011-2017 OpenFOAM Foundation
|
||||
Copyright (C) 2021-2023 OpenCFD Ltd.
|
||||
Copyright (C) 2021-2022 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -105,7 +105,7 @@ int main(int argc, char *argv[])
|
||||
<< endl;
|
||||
|
||||
|
||||
for (const IOobject& io : objects.csorted<pointSet>())
|
||||
for (const IOobject& io : objects.sorted<pointSet>())
|
||||
{
|
||||
// Not in memory. Load it.
|
||||
pointSet set(io);
|
||||
@ -137,7 +137,7 @@ int main(int argc, char *argv[])
|
||||
|
||||
wordHashSet slaveCellSets;
|
||||
|
||||
for (const IOobject& io : objects.csorted<faceSet>())
|
||||
for (const IOobject& io : objects.sorted<faceSet>())
|
||||
{
|
||||
// Not in memory. Load it.
|
||||
faceSet set(io);
|
||||
@ -259,7 +259,7 @@ int main(int argc, char *argv[])
|
||||
|
||||
|
||||
|
||||
for (const IOobject& io : objects.csorted<cellSet>())
|
||||
for (const IOobject& io : objects.sorted<cellSet>())
|
||||
{
|
||||
if (!slaveCellSets.found(io.name()))
|
||||
{
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2011-2017 OpenFOAM Foundation
|
||||
Copyright (C) 2015-2023 OpenCFD Ltd.
|
||||
Copyright (C) 2015-2022 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -164,8 +164,14 @@ void subsetVolFields
|
||||
{
|
||||
const labelList patchMap(identity(mesh.boundaryMesh().size()));
|
||||
|
||||
for (const GeoField& fld : mesh.objectRegistry::csorted<GeoField>())
|
||||
HashTable<const GeoField*> fields
|
||||
(
|
||||
mesh.objectRegistry::lookupClass<GeoField>()
|
||||
);
|
||||
forAllConstIters(fields, iter)
|
||||
{
|
||||
const GeoField& fld = *iter.val();
|
||||
|
||||
Info<< "Mapping field " << fld.name() << endl;
|
||||
|
||||
tmp<GeoField> tSubFld
|
||||
@ -186,7 +192,8 @@ void subsetVolFields
|
||||
{
|
||||
if (addedPatches.found(patchi))
|
||||
{
|
||||
tSubFld.ref().boundaryFieldRef()[patchi] == Zero;
|
||||
tSubFld.ref().boundaryFieldRef()[patchi] ==
|
||||
typename GeoField::value_type(Zero);
|
||||
}
|
||||
}
|
||||
|
||||
@ -211,8 +218,14 @@ void subsetSurfaceFields
|
||||
{
|
||||
const labelList patchMap(identity(mesh.boundaryMesh().size()));
|
||||
|
||||
for (const GeoField& fld : mesh.objectRegistry::csorted<GeoField>())
|
||||
HashTable<const GeoField*> fields
|
||||
(
|
||||
mesh.objectRegistry::lookupClass<GeoField>()
|
||||
);
|
||||
forAllConstIters(fields, iter)
|
||||
{
|
||||
const GeoField& fld = *iter.val();
|
||||
|
||||
Info<< "Mapping field " << fld.name() << endl;
|
||||
|
||||
tmp<GeoField> tSubFld
|
||||
@ -233,7 +246,8 @@ void subsetSurfaceFields
|
||||
{
|
||||
if (addedPatches.found(patchi))
|
||||
{
|
||||
tSubFld.ref().boundaryFieldRef()[patchi] == Zero;
|
||||
tSubFld.ref().boundaryFieldRef()[patchi] ==
|
||||
typename GeoField::value_type(Zero);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2011-2016 OpenFOAM Foundation
|
||||
Copyright (C) 2016-2023 OpenCFD Ltd.
|
||||
Copyright (C) 2016-2022 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -70,7 +70,7 @@ label getExposedPatchId(const polyMesh& mesh, const word& patchName)
|
||||
|
||||
Info<< "Adding exposed internal faces to "
|
||||
<< (patchId == -1 ? "new" : "existing")
|
||||
<< " patch: " << patchName << nl << endl;
|
||||
<< " patch \"" << patchName << "\"" << nl << endl;
|
||||
|
||||
return patchId;
|
||||
}
|
||||
@ -149,127 +149,178 @@ labelList nearestPatch(const polyMesh& mesh, const labelList& patchIDs)
|
||||
|
||||
|
||||
//
|
||||
// Subset DimensionedField/GeometricField
|
||||
// Subset field-type, availability information cached
|
||||
// in the availableFields hashtable.
|
||||
//
|
||||
template<class FieldType>
|
||||
PtrList<FieldType> subsetFields
|
||||
template<class Type, template<class> class PatchField, class GeoMesh>
|
||||
void subsetFields
|
||||
(
|
||||
const fvMeshSubset& subsetter,
|
||||
const IOobjectList& objects
|
||||
HashTable<wordHashSet>& availableFields,
|
||||
PtrList<GeometricField<Type, PatchField, GeoMesh>>& subFields
|
||||
)
|
||||
{
|
||||
typedef GeometricField<Type, PatchField, GeoMesh> FieldType;
|
||||
const word fieldType = FieldType::typeName;
|
||||
|
||||
const wordList fieldNames = availableFields(fieldType).sortedToc();
|
||||
subFields.setSize(fieldNames.size());
|
||||
|
||||
const fvMesh& baseMesh = subsetter.baseMesh();
|
||||
|
||||
const UPtrList<const IOobject> fieldObjects
|
||||
(
|
||||
objects.csorted<FieldType>()
|
||||
);
|
||||
|
||||
PtrList<FieldType> subFields(fieldObjects.size());
|
||||
|
||||
label nFields = 0;
|
||||
for (const IOobject& io : fieldObjects)
|
||||
for (const word& fieldName : fieldNames)
|
||||
{
|
||||
if (!nFields)
|
||||
{
|
||||
Info<< "Subsetting " << FieldType::typeName << " (";
|
||||
Info<< "Subsetting " << fieldType << " (";
|
||||
}
|
||||
else
|
||||
{
|
||||
Info<< ' ';
|
||||
}
|
||||
Info<< io.name();
|
||||
Info<< fieldName;
|
||||
|
||||
FieldType fld
|
||||
(
|
||||
IOobject
|
||||
(
|
||||
io.name(),
|
||||
fieldName,
|
||||
baseMesh.time().timeName(),
|
||||
baseMesh,
|
||||
IOobjectOption::MUST_READ,
|
||||
IOobjectOption::NO_WRITE,
|
||||
IOobjectOption::NO_REGISTER
|
||||
IOobject::MUST_READ,
|
||||
IOobject::NO_WRITE
|
||||
),
|
||||
baseMesh
|
||||
);
|
||||
|
||||
subFields.set(nFields, subsetter.interpolate(fld));
|
||||
auto& subField = subFields[nFields];
|
||||
++nFields;
|
||||
|
||||
// Subsetting adds 'subset' prefix - rename to match original.
|
||||
subField.rename(io.name());
|
||||
subFields[nFields].rename(fieldName);
|
||||
|
||||
++nFields;
|
||||
}
|
||||
|
||||
if (nFields)
|
||||
{
|
||||
Info<< ')' << nl;
|
||||
}
|
||||
|
||||
return subFields;
|
||||
}
|
||||
|
||||
|
||||
// Subset point fields
|
||||
template<class FieldType>
|
||||
PtrList<FieldType> subsetFields
|
||||
template<class Type>
|
||||
void subsetPointFields
|
||||
(
|
||||
const fvMeshSubset& subsetter,
|
||||
const IOobjectList& objects,
|
||||
const pointMesh& pMesh
|
||||
const pointMesh& pMesh,
|
||||
HashTable<wordHashSet>& availableFields,
|
||||
PtrList<GeometricField<Type, pointPatchField, pointMesh>>& subFields
|
||||
)
|
||||
{
|
||||
typedef GeometricField<Type, pointPatchField, pointMesh> FieldType;
|
||||
const word fieldType = FieldType::typeName;
|
||||
|
||||
const wordList fieldNames = availableFields(fieldType).sortedToc();
|
||||
subFields.setSize(fieldNames.size());
|
||||
|
||||
const fvMesh& baseMesh = subsetter.baseMesh();
|
||||
|
||||
const UPtrList<const IOobject> fieldObjects
|
||||
(
|
||||
objects.csorted<FieldType>()
|
||||
);
|
||||
|
||||
PtrList<FieldType> subFields(fieldObjects.size());
|
||||
|
||||
label nFields = 0;
|
||||
for (const IOobject& io : fieldObjects)
|
||||
for (const word& fieldName : fieldNames)
|
||||
{
|
||||
if (!nFields)
|
||||
{
|
||||
Info<< "Subsetting " << FieldType::typeName << " (";
|
||||
Info<< "Subsetting " << fieldType << " (";
|
||||
}
|
||||
else
|
||||
{
|
||||
Info<< ' ';
|
||||
}
|
||||
Info<< io.name();
|
||||
Info<< fieldName;
|
||||
|
||||
FieldType fld
|
||||
(
|
||||
IOobject
|
||||
(
|
||||
io.name(),
|
||||
fieldName,
|
||||
baseMesh.time().timeName(),
|
||||
baseMesh,
|
||||
IOobjectOption::MUST_READ,
|
||||
IOobjectOption::NO_WRITE,
|
||||
IOobjectOption::NO_REGISTER
|
||||
IOobject::MUST_READ,
|
||||
IOobject::NO_WRITE
|
||||
),
|
||||
pMesh
|
||||
);
|
||||
|
||||
subFields.set(nFields, subsetter.interpolate(fld));
|
||||
auto& subField = subFields[nFields];
|
||||
++nFields;
|
||||
|
||||
// Subsetting adds 'subset' prefix - rename to match original.
|
||||
subField.rename(io.name());
|
||||
subFields[nFields].rename(fieldName);
|
||||
|
||||
++nFields;
|
||||
}
|
||||
|
||||
if (nFields)
|
||||
{
|
||||
Info<< ')' << nl;
|
||||
}
|
||||
}
|
||||
|
||||
return subFields;
|
||||
|
||||
template<class Type>
|
||||
void subsetDimensionedFields
|
||||
(
|
||||
const fvMeshSubset& subsetter,
|
||||
HashTable<wordHashSet>& availableFields,
|
||||
PtrList<DimensionedField<Type, volMesh>>& subFields
|
||||
)
|
||||
{
|
||||
typedef DimensionedField<Type, volMesh> FieldType;
|
||||
const word fieldType = FieldType::typeName;
|
||||
|
||||
const wordList fieldNames = availableFields(fieldType).sortedToc();
|
||||
subFields.setSize(fieldNames.size());
|
||||
|
||||
const fvMesh& baseMesh = subsetter.baseMesh();
|
||||
|
||||
label nFields = 0;
|
||||
for (const word& fieldName : fieldNames)
|
||||
{
|
||||
if (!nFields)
|
||||
{
|
||||
Info<< "Subsetting " << fieldType << " (";
|
||||
}
|
||||
else
|
||||
{
|
||||
Info<< ' ';
|
||||
}
|
||||
Info<< fieldName;
|
||||
|
||||
FieldType fld
|
||||
(
|
||||
IOobject
|
||||
(
|
||||
fieldName,
|
||||
baseMesh.time().timeName(),
|
||||
baseMesh,
|
||||
IOobject::MUST_READ,
|
||||
IOobject::NO_WRITE
|
||||
),
|
||||
baseMesh
|
||||
);
|
||||
|
||||
subFields.set(nFields, subsetter.interpolate(fld));
|
||||
|
||||
// Subsetting adds 'subset' prefix - rename to match original.
|
||||
subFields[nFields].rename(fieldName);
|
||||
|
||||
++nFields;
|
||||
}
|
||||
|
||||
if (nFields)
|
||||
{
|
||||
Info<< ')' << nl;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -287,8 +338,7 @@ void subsetTopoSets
|
||||
PtrList<TopoSet> sets;
|
||||
ReadFields<TopoSet>(objects, sets);
|
||||
|
||||
subSets.resize_null(sets.size());
|
||||
|
||||
subSets.setSize(sets.size());
|
||||
forAll(sets, seti)
|
||||
{
|
||||
const TopoSet& set = sets[seti];
|
||||
@ -297,7 +347,6 @@ void subsetTopoSets
|
||||
|
||||
labelHashSet subset(2*min(set.size(), map.size()));
|
||||
|
||||
// Map the data
|
||||
forAll(map, i)
|
||||
{
|
||||
if (set.found(map[i]))
|
||||
@ -314,14 +363,13 @@ void subsetTopoSets
|
||||
subMesh,
|
||||
set.name(),
|
||||
std::move(subset),
|
||||
IOobjectOption::AUTO_WRITE
|
||||
IOobject::AUTO_WRITE
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
|
||||
int main(int argc, char *argv[])
|
||||
{
|
||||
@ -357,12 +405,6 @@ int main(int argc, char *argv[])
|
||||
"Add exposed internal faces to closest of specified patches"
|
||||
" instead of \"oldInternalFaces\""
|
||||
);
|
||||
argList::addOption
|
||||
(
|
||||
"exclude-patches",
|
||||
"wordRes",
|
||||
"Exclude single or multiple patches from the -patches selection"
|
||||
);
|
||||
argList::addBoolOption
|
||||
(
|
||||
"zone",
|
||||
@ -406,59 +448,36 @@ int main(int argc, char *argv[])
|
||||
// Default exposed patch id
|
||||
labelList exposedPatchIDs(one{}, -1);
|
||||
|
||||
wordRes includePatches, excludePatches;
|
||||
|
||||
if (!args.readListIfPresent<wordRe>("patches", includePatches))
|
||||
if (args.found("patches"))
|
||||
{
|
||||
if (args.found("patch"))
|
||||
const wordRes patchNames(args.getList<wordRe>("patches"));
|
||||
|
||||
if (patchNames.size() == 1 && patchNames.first().isLiteral())
|
||||
{
|
||||
includePatches.resize(1);
|
||||
includePatches.front() = args.get<word>("patch");
|
||||
exposedPatchIDs.first() =
|
||||
getExposedPatchId(mesh, patchNames.first());
|
||||
}
|
||||
}
|
||||
args.readListIfPresent<wordRe>("exclude-patches", excludePatches);
|
||||
|
||||
if (includePatches.size() == 1 && includePatches.front().isLiteral())
|
||||
{
|
||||
// Select a single patch - no exclude possible
|
||||
exposedPatchIDs.front() =
|
||||
getExposedPatchId(mesh, includePatches.front());
|
||||
}
|
||||
else if (!includePatches.empty())
|
||||
{
|
||||
// Patches selected (sorted order)
|
||||
exposedPatchIDs =
|
||||
mesh.boundaryMesh().indices(includePatches, excludePatches);
|
||||
|
||||
// Only retain initial, non-processor patches
|
||||
const label nNonProcessor
|
||||
(
|
||||
mesh.boundaryMesh().nNonProcessor()
|
||||
);
|
||||
|
||||
forAll(exposedPatchIDs, i)
|
||||
else
|
||||
{
|
||||
if (exposedPatchIDs[i] > nNonProcessor)
|
||||
exposedPatchIDs =
|
||||
mesh.boundaryMesh().patchSet(patchNames).sortedToc();
|
||||
|
||||
Info<< "Adding exposed internal faces to nearest of patches "
|
||||
<< flatOutput(patchNames) << nl << endl;
|
||||
|
||||
if (exposedPatchIDs.empty())
|
||||
{
|
||||
exposedPatchIDs.resize(i);
|
||||
break;
|
||||
FatalErrorInFunction
|
||||
<< nl << "No patches matched. Patches: "
|
||||
<< mesh.boundaryMesh().names() << nl
|
||||
<< exit(FatalError);
|
||||
}
|
||||
}
|
||||
|
||||
const wordList allPatchNames(mesh.boundaryMesh().names());
|
||||
|
||||
Info<< "Adding exposed internal faces to nearest of patches:" << nl
|
||||
<< " include: " << flatOutput(includePatches) << nl
|
||||
<< " exclude: " << flatOutput(excludePatches) << nl
|
||||
<< nl;
|
||||
|
||||
if (exposedPatchIDs.empty())
|
||||
{
|
||||
FatalErrorInFunction
|
||||
<< nl << "No patches matched. Patches: "
|
||||
<< flatOutput(allPatchNames) << nl
|
||||
<< exit(FatalError);
|
||||
}
|
||||
}
|
||||
else if (args.found("patch"))
|
||||
{
|
||||
exposedPatchIDs.first() =
|
||||
getExposedPatchId(mesh, args.get<word>("patch"));
|
||||
}
|
||||
else
|
||||
{
|
||||
@ -494,7 +513,7 @@ int main(int argc, char *argv[])
|
||||
|
||||
Info<< "Using cellSet " << selectionName << nl << endl;
|
||||
|
||||
cellSetPtr.emplace(mesh, selectionName);
|
||||
cellSetPtr = autoPtr<cellSet>::New(mesh, selectionName);
|
||||
}
|
||||
|
||||
|
||||
@ -512,7 +531,7 @@ int main(int argc, char *argv[])
|
||||
if (exposedPatchIDs.size() == 1)
|
||||
{
|
||||
// Single patch for exposed faces (syncPar)
|
||||
subsetter.reset(selectedCells, exposedPatchIDs.front(), true);
|
||||
subsetter.reset(selectedCells, exposedPatchIDs.first(), true);
|
||||
}
|
||||
else
|
||||
{
|
||||
@ -533,51 +552,54 @@ int main(int argc, char *argv[])
|
||||
);
|
||||
}
|
||||
|
||||
FixedList<label, 2> cellCount;
|
||||
cellCount[0] = subsetter.subMesh().nCells();
|
||||
cellCount[1] = mesh.nCells();
|
||||
reduce(cellCount, sumOp<label>());
|
||||
|
||||
Info<< "Subset " << cellCount[0] << " of " << cellCount[1]
|
||||
Info<< "Subset "
|
||||
<< returnReduce(subsetter.subMesh().nCells(), sumOp<label>())
|
||||
<< " of "
|
||||
<< returnReduce(mesh.nCells(), sumOp<label>())
|
||||
<< " cells" << nl << nl;
|
||||
}
|
||||
|
||||
|
||||
IOobjectList objects(mesh, runTime.timeName());
|
||||
|
||||
|
||||
// Read fields and subset
|
||||
#undef createSubsetFields
|
||||
#define createSubsetFields(FieldType, Variable) \
|
||||
PtrList<FieldType> Variable \
|
||||
( \
|
||||
subsetFields<FieldType>(subsetter, objects) \
|
||||
);
|
||||
HashTable<wordHashSet> availableFields = objects.classes();
|
||||
|
||||
|
||||
// Read vol fields and subset
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
createSubsetFields(volScalarField, vScalarFlds);
|
||||
createSubsetFields(volVectorField, vVectorFlds);
|
||||
createSubsetFields(volSphericalTensorField, vSphTensorFlds);
|
||||
createSubsetFields(volSymmTensorField, vSymmTensorFlds);
|
||||
createSubsetFields(volTensorField, vTensorFlds);
|
||||
|
||||
PtrList<volScalarField> vScalarFlds;
|
||||
subsetFields(subsetter, availableFields, vScalarFlds);
|
||||
|
||||
PtrList<volVectorField> vVectorFlds;
|
||||
subsetFields(subsetter, availableFields, vVectorFlds);
|
||||
|
||||
PtrList<volSphericalTensorField> vSphTensorFlds;
|
||||
subsetFields(subsetter, availableFields, vSphTensorFlds);
|
||||
|
||||
PtrList<volSymmTensorField> vSymmTensorFlds;
|
||||
subsetFields(subsetter, availableFields, vSymmTensorFlds);
|
||||
|
||||
PtrList<volTensorField> vTensorFlds;
|
||||
subsetFields(subsetter, availableFields, vTensorFlds);
|
||||
|
||||
|
||||
// Read surface fields and subset
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
createSubsetFields(surfaceScalarField, sScalarFlds);
|
||||
createSubsetFields(surfaceVectorField, sVectorFlds);
|
||||
createSubsetFields(surfaceSphericalTensorField, sSphTensorFlds);
|
||||
createSubsetFields(surfaceSymmTensorField, sSymmTensorFlds);
|
||||
createSubsetFields(surfaceTensorField, sTensorFlds);
|
||||
|
||||
// Read dimensioned fields and subset
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
createSubsetFields(volScalarField::Internal, dScalarFlds);
|
||||
createSubsetFields(volVectorField::Internal, dVectorFlds);
|
||||
createSubsetFields(volSphericalTensorField::Internal, dSphTensorFlds);
|
||||
createSubsetFields(volSymmTensorField::Internal, dSymmTensorFlds);
|
||||
createSubsetFields(volTensorField::Internal, dTensorFlds);
|
||||
PtrList<surfaceScalarField> sScalarFlds;
|
||||
subsetFields(subsetter, availableFields, sScalarFlds);
|
||||
|
||||
PtrList<surfaceVectorField> sVectorFlds;
|
||||
subsetFields(subsetter, availableFields, sVectorFlds);
|
||||
|
||||
PtrList<surfaceSphericalTensorField> sSphTensorFlds;
|
||||
subsetFields(subsetter, availableFields, sSphTensorFlds);
|
||||
|
||||
PtrList<surfaceSymmTensorField> sSymmTensorFlds;
|
||||
subsetFields(subsetter, availableFields, sSymmTensorFlds);
|
||||
|
||||
PtrList<surfaceTensorField> sTensorFlds;
|
||||
subsetFields(subsetter, availableFields, sTensorFlds);
|
||||
|
||||
|
||||
// Read point fields and subset
|
||||
@ -585,20 +607,39 @@ int main(int argc, char *argv[])
|
||||
|
||||
const pointMesh& pMesh = pointMesh::New(mesh);
|
||||
|
||||
#undef createSubsetFields
|
||||
#define createSubsetFields(FieldType, Variable) \
|
||||
PtrList<FieldType> Variable \
|
||||
( \
|
||||
subsetFields<FieldType>(subsetter, objects, pMesh) \
|
||||
);
|
||||
PtrList<pointScalarField> pScalarFlds;
|
||||
subsetPointFields(subsetter, pMesh, availableFields, pScalarFlds);
|
||||
|
||||
createSubsetFields(pointScalarField, pScalarFlds);
|
||||
createSubsetFields(pointVectorField, pVectorFlds);
|
||||
createSubsetFields(pointSphericalTensorField, pSphTensorFlds);
|
||||
createSubsetFields(pointSymmTensorField, pSymmTensorFlds);
|
||||
createSubsetFields(pointTensorField, pTensorFlds);
|
||||
PtrList<pointVectorField> pVectorFlds;
|
||||
subsetPointFields(subsetter, pMesh, availableFields, pVectorFlds);
|
||||
|
||||
#undef createSubsetFields
|
||||
PtrList<pointSphericalTensorField> pSphTensorFlds;
|
||||
subsetPointFields(subsetter, pMesh, availableFields, pSphTensorFlds);
|
||||
|
||||
PtrList<pointSymmTensorField> pSymmTensorFlds;
|
||||
subsetPointFields(subsetter, pMesh, availableFields, pSymmTensorFlds);
|
||||
|
||||
PtrList<pointTensorField> pTensorFlds;
|
||||
subsetPointFields(subsetter, pMesh, availableFields, pTensorFlds);
|
||||
|
||||
|
||||
// Read dimensioned fields and subset
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
PtrList<volScalarField::Internal> dScalarFlds;
|
||||
subsetDimensionedFields(subsetter, availableFields, dScalarFlds);
|
||||
|
||||
PtrList<volVectorField::Internal> dVectorFlds;
|
||||
subsetDimensionedFields(subsetter, availableFields, dVectorFlds);
|
||||
|
||||
PtrList<volSphericalTensorField::Internal> dSphTensorFlds;
|
||||
subsetDimensionedFields(subsetter, availableFields, dSphTensorFlds);
|
||||
|
||||
PtrList<volSymmTensorField::Internal> dSymmTensorFlds;
|
||||
subsetDimensionedFields(subsetter, availableFields, dSymmTensorFlds);
|
||||
|
||||
PtrList<volTensorField::Internal> dTensorFlds;
|
||||
subsetDimensionedFields(subsetter, availableFields, dTensorFlds);
|
||||
|
||||
|
||||
// Read topoSets and subset
|
||||
@ -678,13 +719,6 @@ int main(int argc, char *argv[])
|
||||
for (const auto& fld : sSymmTensorFlds) { fld.write(); }
|
||||
for (const auto& fld : sTensorFlds) { fld.write(); }
|
||||
|
||||
// Dimensioned fields
|
||||
for (const auto& fld : dScalarFlds) { fld.write(); }
|
||||
for (const auto& fld : dVectorFlds) { fld.write(); }
|
||||
for (const auto& fld : dSphTensorFlds) { fld.write(); }
|
||||
for (const auto& fld : dSymmTensorFlds) { fld.write(); }
|
||||
for (const auto& fld : dTensorFlds) { fld.write(); }
|
||||
|
||||
// Point fields
|
||||
for (const auto& fld : pScalarFlds) { fld.write(); }
|
||||
for (const auto& fld : pVectorFlds) { fld.write(); }
|
||||
@ -692,6 +726,13 @@ int main(int argc, char *argv[])
|
||||
for (const auto& fld : pSymmTensorFlds) { fld.write(); }
|
||||
for (const auto& fld : pTensorFlds) { fld.write(); }
|
||||
|
||||
// Dimensioned fields
|
||||
for (const auto& fld : dScalarFlds) { fld.write(); }
|
||||
for (const auto& fld : dVectorFlds) { fld.write(); }
|
||||
for (const auto& fld : dSphTensorFlds) { fld.write(); }
|
||||
for (const auto& fld : dSymmTensorFlds) { fld.write(); }
|
||||
for (const auto& fld : dTensorFlds) { fld.write(); }
|
||||
|
||||
Info<< "\nEnd\n" << endl;
|
||||
|
||||
return 0;
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2016-2017 OpenFOAM Foundation
|
||||
Copyright (C) 2017-2023 OpenCFD Ltd.
|
||||
Copyright (C) 2017-2022 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -78,23 +78,23 @@ Usage
|
||||
|
||||
- Change solver:
|
||||
\verbatim
|
||||
foamDictionary system/fvSolution -entry solvers/p/solver -set PCG
|
||||
foamDictionary system/fvSolution -entry solvers.p.solver -set PCG
|
||||
\endverbatim
|
||||
|
||||
- Print bc type:
|
||||
\verbatim
|
||||
foamDictionary 0/U -entry boundaryField/movingWall/type
|
||||
foamDictionary 0/U -entry boundaryField.movingWall.type
|
||||
\endverbatim
|
||||
|
||||
- Change bc parameter:
|
||||
\verbatim
|
||||
foamDictionary 0/U -entry boundaryField/movingWall/value \
|
||||
foamDictionary 0/U -entry boundaryField.movingWall.value \
|
||||
-set "uniform (2 0 0)"
|
||||
\endverbatim
|
||||
|
||||
- Change whole bc type:
|
||||
\verbatim
|
||||
foamDictionary 0/U -entry boundaryField/movingWall \
|
||||
foamDictionary 0/U -entry boundaryField.movingWall \
|
||||
-set "{type uniformFixedValue; uniformValue (2 0 0);}"
|
||||
\endverbatim
|
||||
|
||||
@ -113,7 +113,7 @@ Usage
|
||||
- Change patch type:
|
||||
\verbatim
|
||||
foamDictionary constant/polyMesh/boundary \
|
||||
-entry entry0/fixedWalls/type -set patch
|
||||
-entry entry0.fixedWalls.type -set patch
|
||||
\endverbatim
|
||||
This uses special parsing of Lists which stores these in the
|
||||
dictionary with keyword 'entryDDD' where DDD is the position
|
||||
@ -140,31 +140,21 @@ using namespace Foam;
|
||||
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
|
||||
//- Convert very old ':' scope syntax to less old '.' scope syntax,
|
||||
//- Convert older ':' scope syntax to newer '.' scope syntax,
|
||||
// but leave anything with '/' delimiters untouched
|
||||
bool upgradeScope(word& entryName)
|
||||
{
|
||||
if (entryName.contains(':') && !entryName.contains('/'))
|
||||
if (!entryName.contains('/') && entryName.contains(':'))
|
||||
{
|
||||
InfoErr
|
||||
<< "Warning: upgrading very old ':' scope syntax: \""
|
||||
<< entryName << '"' << endl;
|
||||
const wordList names(fileName(entryName).components(':'));
|
||||
|
||||
// Make copy - cannot use stringOps::split
|
||||
const wordList cmpts(fileName(entryName).components(':'));
|
||||
entryName.resize(0);
|
||||
|
||||
entryName.clear();
|
||||
|
||||
bool addSep = false;
|
||||
|
||||
for (const word& cmpt : cmpts)
|
||||
for (const word& name : names)
|
||||
{
|
||||
if (addSep) entryName += '.';
|
||||
if (!cmpt.empty())
|
||||
{
|
||||
addSep = true;
|
||||
entryName += cmpt;
|
||||
}
|
||||
if (entryName.size()) entryName.append(".");
|
||||
|
||||
entryName.append(name);
|
||||
}
|
||||
|
||||
return true;
|
||||
@ -186,12 +176,12 @@ public:
|
||||
dictAndKeyword(const word& scopedName)
|
||||
{
|
||||
auto i = scopedName.rfind('/');
|
||||
if (i == std::string::npos)
|
||||
if (i == string::npos)
|
||||
{
|
||||
i = scopedName.rfind('.');
|
||||
}
|
||||
|
||||
if (i != std::string::npos)
|
||||
if (i != string::npos)
|
||||
{
|
||||
dict_ = scopedName.substr(0, i);
|
||||
key_ = scopedName.substr(i+1);
|
||||
@ -202,9 +192,15 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
const word& dict() const noexcept { return dict_; }
|
||||
inline const word& dict() const
|
||||
{
|
||||
return dict_;
|
||||
}
|
||||
|
||||
const word& key() const noexcept { return key_; }
|
||||
inline const word& key() const
|
||||
{
|
||||
return key_;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@ -351,7 +347,10 @@ int main(int argc, char *argv[])
|
||||
if (disableEntries)
|
||||
{
|
||||
// Report on stderr (once) to avoid polluting the output
|
||||
InfoErr<< "Not expanding variables or dictionary directives" << endl;
|
||||
if (Pstream::master())
|
||||
{
|
||||
Serr<< "Not expanding variables or dictionary directives" << endl;
|
||||
}
|
||||
entry::disableFunctionEntries = true;
|
||||
}
|
||||
|
||||
@ -360,7 +359,11 @@ int main(int argc, char *argv[])
|
||||
const unsigned prec = args.getOrDefault<unsigned>("precision", 0u);
|
||||
if (prec)
|
||||
{
|
||||
// InfoErr<< "Output write precision set to " << prec << endl;
|
||||
// if (Pstream::master())
|
||||
// {
|
||||
// Serr<< "Output write precision set to " << prec << endl;
|
||||
// }
|
||||
|
||||
IOstream::defaultPrecision(prec);
|
||||
Sout.precision(prec);
|
||||
}
|
||||
@ -476,12 +479,12 @@ int main(int argc, char *argv[])
|
||||
}
|
||||
changed = true;
|
||||
|
||||
// Print the changed entry
|
||||
const auto finder = dict.csearchScoped(scopedName, keyType::REGEX);
|
||||
|
||||
// Print the changed entry to stderr
|
||||
if (finder.good())
|
||||
{
|
||||
InfoErr<< finder.ref();
|
||||
Info<< finder.ref();
|
||||
}
|
||||
}
|
||||
else if (args.found("remove"))
|
||||
@ -536,7 +539,6 @@ int main(int argc, char *argv[])
|
||||
}
|
||||
else if (args.found("keywords"))
|
||||
{
|
||||
// Report keywords to stdout
|
||||
for (const entry& e : finder.dict())
|
||||
{
|
||||
Info<< e.keyword() << endl;
|
||||
@ -544,36 +546,32 @@ int main(int argc, char *argv[])
|
||||
}
|
||||
else if (args.found("value"))
|
||||
{
|
||||
// Report value to stdout
|
||||
if (finder.isDict())
|
||||
{
|
||||
Info<< finder.dict();
|
||||
}
|
||||
else if (finder.ref().isStream())
|
||||
{
|
||||
bool addSep = false;
|
||||
|
||||
const tokenList& tokens = finder.ref().stream();
|
||||
|
||||
for (const token& tok : tokens)
|
||||
forAll(tokens, i)
|
||||
{
|
||||
if (addSep) Info<< token::SPACE;
|
||||
addSep = true;
|
||||
Info<< tok;
|
||||
Info<< tokens[i];
|
||||
if (i < tokens.size() - 1)
|
||||
{
|
||||
Info<< token::SPACE;
|
||||
}
|
||||
}
|
||||
Info<< endl;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Report entry to stdout
|
||||
Info<< finder.ref();
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (args.found("keywords"))
|
||||
{
|
||||
// Report keywords to stdout
|
||||
for (const entry& e : dict)
|
||||
{
|
||||
Info<< e.keyword() << endl;
|
||||
@ -581,13 +579,11 @@ int main(int argc, char *argv[])
|
||||
}
|
||||
else if (optDiff)
|
||||
{
|
||||
// Report difference to stdout
|
||||
removeDict(dict, diffDict);
|
||||
dict.write(Info, false);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Report dictionary to stdout
|
||||
dict.write(Info, false);
|
||||
}
|
||||
|
||||
|
||||
@ -92,8 +92,8 @@ void Foam::helpTypes::helpBoundary::execute
|
||||
{
|
||||
setEnv("FOAM_ABORT", "", true);
|
||||
|
||||
word condition;
|
||||
word fieldName;
|
||||
word condition(word::null);
|
||||
word fieldName(word::null);
|
||||
|
||||
if (args.readIfPresent("browse", condition))
|
||||
{
|
||||
|
||||
@ -72,7 +72,7 @@ void Foam::helpTypes::helpFunctionObject::execute
|
||||
const fvMesh& mesh
|
||||
)
|
||||
{
|
||||
word function;
|
||||
word function(word::null);
|
||||
|
||||
if (args.readIfPresent("browse", function))
|
||||
{
|
||||
|
||||
@ -80,7 +80,7 @@ void Foam::helpTypes::helpSolver::execute
|
||||
const fvMesh& mesh
|
||||
)
|
||||
{
|
||||
word solver;
|
||||
word solver(word::null);
|
||||
|
||||
if (args.readIfPresent("browse", solver))
|
||||
{
|
||||
|
||||
@ -6,7 +6,6 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2011-2016 OpenFOAM Foundation
|
||||
Copyright (C) 2023 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -90,41 +89,50 @@ int main(int argc, char *argv[])
|
||||
<< endl;
|
||||
}
|
||||
|
||||
const IOobjectList objects(mesh, runTime.timeName());
|
||||
|
||||
Info<< "Reading fields:" << endl;
|
||||
|
||||
// Read fields
|
||||
#undef createFields
|
||||
#define createFields(FieldType, Variable) \
|
||||
PtrList<FieldType> Variable \
|
||||
( \
|
||||
readFields<FieldType>(objects, mesh) \
|
||||
const wordList objNames
|
||||
(
|
||||
IOobjectList(mesh, runTime.timeName()).sortedNames()
|
||||
);
|
||||
|
||||
createFields(volScalarField, vsf);
|
||||
createFields(volVectorField, vvf);
|
||||
createFields(volSphericalTensorField, vsptf);
|
||||
createFields(volSymmTensorField, vsytf);
|
||||
createFields(volTensorField, vtf);
|
||||
PtrList<volScalarField> vsf(objNames.size());
|
||||
PtrList<volVectorField> vvf(objNames.size());
|
||||
PtrList<volSphericalTensorField> vsptf(objNames.size());
|
||||
PtrList<volSymmTensorField> vsytf(objNames.size());
|
||||
PtrList<volTensorField> vtf(objNames.size());
|
||||
|
||||
// Point fields
|
||||
const pointMesh& pMesh = pointMesh::New(mesh);
|
||||
PtrList<pointScalarField> psf(objNames.size());
|
||||
PtrList<pointVectorField> pvf(objNames.size());
|
||||
PtrList<pointSphericalTensorField> psptf(objNames.size());
|
||||
PtrList<pointSymmTensorField> psytf(objNames.size());
|
||||
PtrList<pointTensorField> ptf(objNames.size());
|
||||
|
||||
#undef createFields
|
||||
#define createFields(FieldType, Variable) \
|
||||
PtrList<FieldType> Variable \
|
||||
( \
|
||||
readFields<FieldType>(objects, pMesh) \
|
||||
);
|
||||
Info<< "Valid fields:" << endl;
|
||||
|
||||
createFields(pointScalarField, psf);
|
||||
createFields(pointVectorField, pvf);
|
||||
createFields(pointSphericalTensorField, psptf);
|
||||
createFields(pointSymmTensorField, psytf);
|
||||
createFields(pointTensorField, ptf);
|
||||
forAll(objNames, objI)
|
||||
{
|
||||
IOobject obj
|
||||
(
|
||||
objNames[objI],
|
||||
runTime.timeName(),
|
||||
mesh,
|
||||
IOobject::MUST_READ
|
||||
);
|
||||
|
||||
#undef createFields
|
||||
if (obj.typeHeaderOk<volScalarField>(false))
|
||||
{
|
||||
addToFieldList(vsf, obj, objI, mesh);
|
||||
addToFieldList(vvf, obj, objI, mesh);
|
||||
addToFieldList(vsptf, obj, objI, mesh);
|
||||
addToFieldList(vsytf, obj, objI, mesh);
|
||||
addToFieldList(vtf, obj, objI, mesh);
|
||||
|
||||
addToFieldList(psf, obj, objI, pointMesh::New(mesh));
|
||||
addToFieldList(pvf, obj, objI, pointMesh::New(mesh));
|
||||
addToFieldList(psptf, obj, objI, pointMesh::New(mesh));
|
||||
addToFieldList(psytf, obj, objI, pointMesh::New(mesh));
|
||||
addToFieldList(ptf, obj, objI, pointMesh::New(mesh));
|
||||
}
|
||||
}
|
||||
|
||||
Info<< endl;
|
||||
|
||||
@ -136,7 +144,6 @@ int main(int argc, char *argv[])
|
||||
forAll(bm, patchi)
|
||||
{
|
||||
Info<< bm[patchi].type() << "\t: " << bm[patchi].name() << nl;
|
||||
|
||||
outputFieldList(vsf, patchi);
|
||||
outputFieldList(vvf, patchi);
|
||||
outputFieldList(vsptf, patchi);
|
||||
@ -160,7 +167,6 @@ int main(int argc, char *argv[])
|
||||
DynamicList<HashTable<word>> fieldToTypes(bm.size());
|
||||
// Per 'group' the patches
|
||||
DynamicList<DynamicList<label>> groupToPatches(bm.size());
|
||||
|
||||
forAll(bm, patchi)
|
||||
{
|
||||
HashTable<word> fieldToType;
|
||||
@ -202,39 +208,40 @@ int main(int argc, char *argv[])
|
||||
labelHashSet nonGroupPatches;
|
||||
bm.matchGroups(patchIDs, groups, nonGroupPatches);
|
||||
|
||||
for (const label patchi : nonGroupPatches.sortedToc())
|
||||
const labelList sortedPatches(nonGroupPatches.sortedToc());
|
||||
forAll(sortedPatches, i)
|
||||
{
|
||||
Info<< bm[patchi].type()
|
||||
<< "\t: " << bm[patchi].name() << nl;
|
||||
Info<< bm[sortedPatches[i]].type()
|
||||
<< "\t: " << bm[sortedPatches[i]].name() << nl;
|
||||
}
|
||||
for (const word& groupName : groups)
|
||||
if (groups.size())
|
||||
{
|
||||
Info<< "group\t: " << groupName << nl;
|
||||
forAll(groups, i)
|
||||
{
|
||||
Info<< "group\t: " << groups[i] << nl;
|
||||
}
|
||||
}
|
||||
outputFieldList(vsf, patchIDs[0]);
|
||||
outputFieldList(vvf, patchIDs[0]);
|
||||
outputFieldList(vsptf, patchIDs[0]);
|
||||
outputFieldList(vsytf, patchIDs[0]);
|
||||
outputFieldList(vtf, patchIDs[0]);
|
||||
|
||||
const label patchi = patchIDs[0];
|
||||
|
||||
outputFieldList(vsf, patchi);
|
||||
outputFieldList(vvf, patchi);
|
||||
outputFieldList(vsptf, patchi);
|
||||
outputFieldList(vsytf, patchi);
|
||||
outputFieldList(vtf, patchi);
|
||||
|
||||
outputFieldList(psf, patchi);
|
||||
outputFieldList(pvf, patchi);
|
||||
outputFieldList(psptf, patchi);
|
||||
outputFieldList(psytf, patchi);
|
||||
outputFieldList(ptf, patchi);
|
||||
outputFieldList(psf, patchIDs[0]);
|
||||
outputFieldList(pvf, patchIDs[0]);
|
||||
outputFieldList(psptf, patchIDs[0]);
|
||||
outputFieldList(psytf, patchIDs[0]);
|
||||
outputFieldList(ptf, patchIDs[0]);
|
||||
Info<< endl;
|
||||
}
|
||||
else
|
||||
{
|
||||
// No group.
|
||||
for (const label patchi : patchIDs)
|
||||
forAll(patchIDs, i)
|
||||
{
|
||||
label patchi = patchIDs[i];
|
||||
Info<< bm[patchi].type()
|
||||
<< "\t: " << bm[patchi].name() << nl;
|
||||
|
||||
outputFieldList(vsf, patchi);
|
||||
outputFieldList(vvf, patchi);
|
||||
outputFieldList(vsptf, patchi);
|
||||
|
||||
@ -6,7 +6,6 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2011-2016 OpenFOAM Foundation
|
||||
Copyright (C) 2023 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -32,49 +31,30 @@ License
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
|
||||
template<class GeoField>
|
||||
Foam::PtrList<GeoField> Foam::readFields
|
||||
void Foam::addToFieldList
|
||||
(
|
||||
const IOobjectList& objects,
|
||||
PtrList<GeoField>& fieldList,
|
||||
const IOobject& obj,
|
||||
const label fieldi,
|
||||
const typename GeoField::Mesh& mesh
|
||||
)
|
||||
{
|
||||
const UPtrList<const IOobject> fieldObjects
|
||||
(
|
||||
objects.csorted<GeoField>()
|
||||
);
|
||||
|
||||
PtrList<GeoField> fields(fieldObjects.size());
|
||||
|
||||
label nFields = 0;
|
||||
for (const IOobject& io : fieldObjects)
|
||||
if (obj.isHeaderClass<GeoField>())
|
||||
{
|
||||
if (!nFields)
|
||||
{
|
||||
Info<< " " << GeoField::typeName << " (";
|
||||
}
|
||||
else
|
||||
{
|
||||
Info<< ' ';
|
||||
}
|
||||
Info<< io.name();
|
||||
|
||||
fields.emplace_set(nFields, io, mesh);
|
||||
++nFields;
|
||||
fieldList.set
|
||||
(
|
||||
fieldi,
|
||||
new GeoField(obj, mesh)
|
||||
);
|
||||
Info<< " " << GeoField::typeName << tab << obj.name() << endl;
|
||||
}
|
||||
|
||||
if (nFields)
|
||||
{
|
||||
Info<< ')' << nl;
|
||||
}
|
||||
|
||||
return fields;
|
||||
}
|
||||
|
||||
|
||||
template<class GeoField>
|
||||
void Foam::outputFieldList
|
||||
(
|
||||
const UPtrList<GeoField>& fieldList,
|
||||
const PtrList<GeoField>& fieldList,
|
||||
const label patchi
|
||||
)
|
||||
{
|
||||
@ -94,7 +74,7 @@ void Foam::outputFieldList
|
||||
template<class GeoField>
|
||||
void Foam::collectFieldList
|
||||
(
|
||||
const UPtrList<GeoField>& fieldList,
|
||||
const PtrList<GeoField>& fieldList,
|
||||
const label patchi,
|
||||
HashTable<word>& fieldToType
|
||||
)
|
||||
|
||||
@ -6,7 +6,6 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2011-2016 OpenFOAM Foundation
|
||||
Copyright (C) 2023 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -26,39 +25,39 @@ License
|
||||
|
||||
\*---------------------------------------------------------------------------*/
|
||||
|
||||
#ifndef Foam_patchSummaryTemplates_H
|
||||
#define Foam_patchSummaryTemplates_H
|
||||
#ifndef patchSummaryTemplates_H
|
||||
#define patchSummaryTemplates_H
|
||||
|
||||
#include "fvCFD.H"
|
||||
#include "volFields.H"
|
||||
#include "IOobjectList.H"
|
||||
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
|
||||
namespace Foam
|
||||
{
|
||||
template<class GeoField>
|
||||
void addToFieldList
|
||||
(
|
||||
PtrList<GeoField>& fieldList,
|
||||
const IOobject& obj,
|
||||
const label fieldi,
|
||||
const typename GeoField::Mesh& mesh
|
||||
);
|
||||
|
||||
template<class GeoField>
|
||||
PtrList<GeoField> readFields
|
||||
(
|
||||
const IOobjectList& objects,
|
||||
const typename GeoField::Mesh& mesh
|
||||
);
|
||||
|
||||
template<class GeoField>
|
||||
void outputFieldList
|
||||
(
|
||||
const UPtrList<GeoField>& fieldList,
|
||||
const label patchi
|
||||
);
|
||||
|
||||
template<class GeoField>
|
||||
void collectFieldList
|
||||
(
|
||||
const UPtrList<GeoField>& fieldList,
|
||||
const label patchi,
|
||||
HashTable<word>& fieldToType
|
||||
);
|
||||
template<class GeoField>
|
||||
void outputFieldList
|
||||
(
|
||||
const PtrList<GeoField>& fieldList,
|
||||
const label patchi
|
||||
);
|
||||
|
||||
template<class GeoField>
|
||||
void collectFieldList
|
||||
(
|
||||
const PtrList<GeoField>& fieldList,
|
||||
const label patchi,
|
||||
HashTable<word>& fieldToType
|
||||
);
|
||||
} // End namespace Foam
|
||||
|
||||
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2011-2016 OpenFOAM Foundation
|
||||
Copyright (C) 2019-2023 OpenCFD Ltd.
|
||||
Copyright (C) 2019-2022 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -186,17 +186,26 @@ bool Foam::domainDecomposition::writeDecomposition(const bool decomposeSets)
|
||||
{
|
||||
// Read sets
|
||||
IOobjectList objects(*this, facesInstance(), "polyMesh/sets");
|
||||
for (const IOobject& io : objects.csorted<cellSet>())
|
||||
{
|
||||
cellSets.emplace_back(io);
|
||||
IOobjectList sets(objects.lookupClass<cellSet>());
|
||||
forAllConstIters(sets, iter)
|
||||
{
|
||||
cellSets.append(new cellSet(*(iter.val())));
|
||||
}
|
||||
}
|
||||
for (const IOobject& io : objects.csorted<faceSet>())
|
||||
{
|
||||
faceSets.emplace_back(io);
|
||||
IOobjectList sets(objects.lookupClass<faceSet>());
|
||||
forAllConstIters(sets, iter)
|
||||
{
|
||||
faceSets.append(new faceSet(*(iter.val())));
|
||||
}
|
||||
}
|
||||
for (const IOobject& io : objects.csorted<pointSet>())
|
||||
{
|
||||
pointSets.emplace_back(io);
|
||||
IOobjectList sets(objects.lookupClass<pointSet>());
|
||||
forAllConstIters(sets, iter)
|
||||
{
|
||||
pointSets.append(new pointSet(*(iter.val())));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -210,9 +219,9 @@ bool Foam::domainDecomposition::writeDecomposition(const bool decomposeSets)
|
||||
facesInstance(),
|
||||
polyMesh::meshSubDir,
|
||||
*this,
|
||||
IOobjectOption::READ_IF_PRESENT,
|
||||
IOobjectOption::NO_WRITE,
|
||||
IOobjectOption::NO_REGISTER
|
||||
IOobject::READ_IF_PRESENT,
|
||||
IOobject::NO_WRITE,
|
||||
IOobject::NO_REGISTER
|
||||
)
|
||||
);
|
||||
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2011-2017 OpenFOAM Foundation
|
||||
Copyright (C) 2015-2023 OpenCFD Ltd.
|
||||
Copyright (C) 2015-2022 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -597,19 +597,19 @@ int main(int argc, char *argv[])
|
||||
polyMesh::meshSubDir/"sets"
|
||||
);
|
||||
|
||||
for (const IOobject& io : objects.csorted<cellSet>())
|
||||
for (const word& setName : objects.sortedNames<cellSet>())
|
||||
{
|
||||
cSetNames.insert(io.name(), cSetNames.size());
|
||||
cSetNames.insert(setName, cSetNames.size());
|
||||
}
|
||||
|
||||
for (const IOobject& io : objects.csorted<faceSet>())
|
||||
for (const word& setName : objects.sortedNames<faceSet>())
|
||||
{
|
||||
fSetNames.insert(io.name(), fSetNames.size());
|
||||
fSetNames.insert(setName, fSetNames.size());
|
||||
}
|
||||
|
||||
for (const IOobject& io : objects.csorted<pointSet>())
|
||||
for (const word& setName : objects.sortedNames<pointSet>())
|
||||
{
|
||||
pSetNames.insert(io.name(), pSetNames.size());
|
||||
pSetNames.insert(setName, pSetNames.size());
|
||||
}
|
||||
}
|
||||
|
||||
@ -653,7 +653,7 @@ int main(int argc, char *argv[])
|
||||
const labelList& cellMap =
|
||||
procMeshes.cellProcAddressing()[proci];
|
||||
|
||||
for (const IOobject& io : objects.csorted<cellSet>())
|
||||
for (const IOobject& io : objects.sorted<cellSet>())
|
||||
{
|
||||
// Load cellSet
|
||||
const cellSet procSet(io);
|
||||
@ -684,7 +684,7 @@ int main(int argc, char *argv[])
|
||||
const labelList& faceMap =
|
||||
procMeshes.faceProcAddressing()[proci];
|
||||
|
||||
for (const IOobject& io : objects.csorted<faceSet>())
|
||||
for (const IOobject& io : objects.sorted<faceSet>())
|
||||
{
|
||||
// Load faceSet
|
||||
const faceSet procSet(io);
|
||||
@ -714,7 +714,7 @@ int main(int argc, char *argv[])
|
||||
const labelList& pointMap =
|
||||
procMeshes.pointProcAddressing()[proci];
|
||||
|
||||
for (const IOobject& io : objects.csorted<pointSet>())
|
||||
for (const IOobject& io : objects.sorted<pointSet>())
|
||||
{
|
||||
// Load pointSet
|
||||
const pointSet procSet(io);
|
||||
|
||||
@ -405,20 +405,20 @@ Foam::label Foam::parFvFieldDistributor::distributeInternalFields
|
||||
{
|
||||
typedef DimensionedField<Type, volMesh> fieldType;
|
||||
|
||||
label nFields = 0;
|
||||
for
|
||||
// Available fields, sorted order
|
||||
const wordList fieldNames =
|
||||
(
|
||||
const IOobject& io :
|
||||
(
|
||||
selectedFields.empty()
|
||||
? objects.csorted<fieldType>()
|
||||
: objects.csorted<fieldType>(selectedFields)
|
||||
)
|
||||
)
|
||||
selectedFields.empty()
|
||||
? objects.sortedNames<fieldType>()
|
||||
: objects.sortedNames<fieldType>(selectedFields)
|
||||
);
|
||||
|
||||
label nFields = 0;
|
||||
for (const word& fieldName : fieldNames)
|
||||
{
|
||||
if ("cellDist" == io.name())
|
||||
if ("cellDist" == fieldName)
|
||||
{
|
||||
// Ignore cellDist (internal or volume) field
|
||||
// There is an odd chance this is an internal field
|
||||
continue;
|
||||
}
|
||||
if (verbose_)
|
||||
@ -428,13 +428,13 @@ Foam::label Foam::parFvFieldDistributor::distributeInternalFields
|
||||
Info<< " Reconstructing "
|
||||
<< fieldType::typeName << "s\n" << nl;
|
||||
}
|
||||
Info<< " " << io.name() << nl;
|
||||
Info<< " " << fieldName << nl;
|
||||
}
|
||||
++nFields;
|
||||
|
||||
tmp<fieldType> tfld
|
||||
(
|
||||
distributeInternalField<Type>(io)
|
||||
distributeInternalField<Type>(*(objects[fieldName]))
|
||||
);
|
||||
|
||||
if (isWriteProc_.good())
|
||||
@ -470,20 +470,19 @@ Foam::label Foam::parFvFieldDistributor::distributeVolumeFields
|
||||
{
|
||||
typedef GeometricField<Type, fvPatchField, volMesh> fieldType;
|
||||
|
||||
label nFields = 0;
|
||||
for
|
||||
// Available fields, sorted order
|
||||
const wordList fieldNames =
|
||||
(
|
||||
const IOobject& io :
|
||||
(
|
||||
selectedFields.empty()
|
||||
? objects.csorted<fieldType>()
|
||||
: objects.csorted<fieldType>(selectedFields)
|
||||
)
|
||||
)
|
||||
selectedFields.empty()
|
||||
? objects.sortedNames<fieldType>()
|
||||
: objects.sortedNames<fieldType>(selectedFields)
|
||||
);
|
||||
|
||||
label nFields = 0;
|
||||
for (const word& fieldName : fieldNames)
|
||||
{
|
||||
if ("cellDist" == io.name())
|
||||
if ("cellDist" == fieldName)
|
||||
{
|
||||
// Ignore cellDist (internal or volume) field
|
||||
continue;
|
||||
}
|
||||
if (verbose_)
|
||||
@ -493,13 +492,13 @@ Foam::label Foam::parFvFieldDistributor::distributeVolumeFields
|
||||
Info<< " Reconstructing "
|
||||
<< fieldType::typeName << "s\n" << nl;
|
||||
}
|
||||
Info<< " " << io.name() << nl;
|
||||
Info<< " " << fieldName << nl;
|
||||
}
|
||||
++nFields;
|
||||
|
||||
tmp<fieldType> tfld
|
||||
(
|
||||
distributeVolumeField<Type>(io)
|
||||
distributeVolumeField<Type>(*(objects[fieldName]))
|
||||
);
|
||||
|
||||
if (isWriteProc_.good())
|
||||
@ -535,16 +534,16 @@ Foam::label Foam::parFvFieldDistributor::distributeSurfaceFields
|
||||
{
|
||||
typedef GeometricField<Type, fvsPatchField, surfaceMesh> fieldType;
|
||||
|
||||
label nFields = 0;
|
||||
for
|
||||
// Available fields, sorted order
|
||||
const wordList fieldNames =
|
||||
(
|
||||
const IOobject& io :
|
||||
(
|
||||
selectedFields.empty()
|
||||
? objects.csorted<fieldType>()
|
||||
: objects.csorted<fieldType>(selectedFields)
|
||||
)
|
||||
)
|
||||
selectedFields.empty()
|
||||
? objects.sortedNames<fieldType>()
|
||||
: objects.sortedNames<fieldType>(selectedFields)
|
||||
);
|
||||
|
||||
label nFields = 0;
|
||||
for (const word& fieldName : fieldNames)
|
||||
{
|
||||
if (verbose_)
|
||||
{
|
||||
@ -553,13 +552,13 @@ Foam::label Foam::parFvFieldDistributor::distributeSurfaceFields
|
||||
Info<< " Reconstructing "
|
||||
<< fieldType::typeName << "s\n" << nl;
|
||||
}
|
||||
Info<< " " << io.name() << nl;
|
||||
Info<< " " << fieldName << nl;
|
||||
}
|
||||
++nFields;
|
||||
|
||||
tmp<fieldType> tfld
|
||||
(
|
||||
distributeSurfaceField<Type>(io)
|
||||
distributeSurfaceField<Type>(*(objects[fieldName]))
|
||||
);
|
||||
|
||||
if (isWriteProc_.good())
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2015 OpenFOAM Foundation
|
||||
Copyright (C) 2015-2023 OpenCFD Ltd.
|
||||
Copyright (C) 2015-2022 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -82,7 +82,7 @@ void Foam::parLagrangianDistributor::findClouds
|
||||
)
|
||||
);
|
||||
|
||||
cloudNames.resize_nocopy(localCloudDirs.size());
|
||||
cloudNames.setSize(localCloudDirs.size());
|
||||
forAll(localCloudDirs, i)
|
||||
{
|
||||
cloudNames[i] = localCloudDirs[i];
|
||||
@ -144,10 +144,10 @@ Foam::parLagrangianDistributor::distributeLagrangianPositions
|
||||
|
||||
const label oldLpi = lpi.size();
|
||||
|
||||
labelListList sendMap;
|
||||
labelListList subMap;
|
||||
|
||||
// Transfer buffers
|
||||
PstreamBuffers pBufs(UPstream::commsTypes::nonBlocking);
|
||||
// Allocate transfer buffers
|
||||
PstreamBuffers pBufs(Pstream::commsTypes::nonBlocking);
|
||||
|
||||
{
|
||||
// List of lists of particles to be transferred for all of the
|
||||
@ -173,7 +173,7 @@ Foam::parLagrangianDistributor::distributeLagrangianPositions
|
||||
|
||||
|
||||
// Per processor the indices of the particles to send
|
||||
sendMap = invertOneToMany(UPstream::nProcs(), destProc);
|
||||
subMap = invertOneToMany(Pstream::nProcs(), destProc);
|
||||
|
||||
|
||||
// Stream into send buffers
|
||||
@ -282,12 +282,29 @@ Foam::parLagrangianDistributor::distributeLagrangianPositions
|
||||
lpi.rename(cloudName);
|
||||
}
|
||||
|
||||
// Until now (FEB-2023) we have always used processor ordering for the
|
||||
// construct map (whereas mapDistribute has local transfers first),
|
||||
// so we'll stick with that for now, but can likely just use the subMap
|
||||
// directly with mapDistribute and have it determine the constructMap.
|
||||
|
||||
labelList recvSizes;
|
||||
Pstream::exchangeSizes(subMap, recvSizes);
|
||||
|
||||
label constructSize = 0;
|
||||
labelListList constructMap(Pstream::nProcs());
|
||||
|
||||
forAll(constructMap, proci)
|
||||
{
|
||||
const label len = recvSizes[proci];
|
||||
constructMap[proci] = identity(len, constructSize);
|
||||
constructSize += len;
|
||||
}
|
||||
|
||||
// The constructMap is in linear (processor) order
|
||||
return autoPtr<mapDistributeBase>::New
|
||||
(
|
||||
mapDistributeBase::layoutTypes::linear,
|
||||
std::move(sendMap)
|
||||
constructSize,
|
||||
std::move(subMap),
|
||||
std::move(constructMap)
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2015 OpenFOAM Foundation
|
||||
Copyright (C) 2018-2023 OpenCFD Ltd.
|
||||
Copyright (C) 2018-2022 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -324,11 +324,18 @@ Foam::label Foam::parLagrangianDistributor::distributeStoredFields
|
||||
passivePositionParticleCloud& cloud
|
||||
) const
|
||||
{
|
||||
bool reconstruct = false;
|
||||
label nFields = 0;
|
||||
HashTable<Container*> fields
|
||||
(
|
||||
cloud.lookupClass<Container>()
|
||||
);
|
||||
|
||||
for (Container& field : cloud.sorted<Container>())
|
||||
bool reconstruct = false;
|
||||
|
||||
label nFields = 0;
|
||||
forAllIters(fields, iter)
|
||||
{
|
||||
Container& field = *(iter.val());
|
||||
|
||||
if (!nFields)
|
||||
{
|
||||
// Performing an all-to-one (reconstruct)?
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user