Compare commits
23 Commits
multiNodeD
...
maintenanc
| Author | SHA1 | Date | |
|---|---|---|---|
| 5aadc3a03d | |||
| 956fb4ca3a | |||
| 905d63357c | |||
| 0177b762c0 | |||
| 308615e63a | |||
| 2999c15f77 | |||
| 3d8a1c7f63 | |||
| bf14272826 | |||
| 095c9bc45b | |||
| 20c7f0970d | |||
| fd1661ae15 | |||
| 70874860b9 | |||
| 113fe48d0e | |||
| d94744e9f7 | |||
| dcf005508b | |||
| 868d6dd778 | |||
| f8e05934f1 | |||
| ed89d97627 | |||
| 5614a571f2 | |||
| 4136b686ba | |||
| c9081d5daf | |||
| a597c044c7 | |||
| d8c6b6b811 |
@ -1,2 +1,2 @@
|
||||
api=2212
|
||||
patch=230110
|
||||
patch=240625
|
||||
|
||||
@ -18,6 +18,8 @@ Description
|
||||
type scalarTransport;
|
||||
libs ("libsolverFunctionObjects.so");
|
||||
|
||||
writeControl writeTime;
|
||||
|
||||
field s;
|
||||
schemesField s;
|
||||
D 1e-09;
|
||||
|
||||
@ -174,7 +174,7 @@ Foam::tokenList Foam::functionEntries::evalEntry::evaluate
|
||||
result.writeField(toks);
|
||||
}
|
||||
|
||||
return std::move(toks);
|
||||
return tokenList(std::move(toks.tokens()));
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -5,7 +5,7 @@
|
||||
\\ / A nd | www.openfoam.com
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2019-2021 OpenCFD Ltd.
|
||||
Copyright (C) 2019-2023 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -41,9 +41,11 @@ void Foam::expressions::exprDriver::fill_random
|
||||
{
|
||||
if (seed <= 0)
|
||||
{
|
||||
if (timeStatePtr_)
|
||||
const TimeState* ts = this->timeState();
|
||||
|
||||
if (ts)
|
||||
{
|
||||
seed = (timeStatePtr_->timeIndex() - seed);
|
||||
seed = (ts->timeIndex() - seed);
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
@ -152,10 +152,11 @@ slicedBoundaryField
|
||||
new SlicedPatchField<Type>
|
||||
(
|
||||
mesh.boundary()[patchi],
|
||||
DimensionedField<Type, GeoMesh>::null(),
|
||||
bField[patchi]
|
||||
DimensionedField<Type, GeoMesh>::null()
|
||||
)
|
||||
);
|
||||
|
||||
bf[patchi].UList<Type>::shallowCopy(bField[patchi]);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -201,7 +201,8 @@ inline Foam::Matrix<Form, Type>::Matrix
|
||||
)
|
||||
:
|
||||
mRows_(Mb.m()),
|
||||
nCols_(Mb.n())
|
||||
nCols_(Mb.n()),
|
||||
v_(nullptr)
|
||||
{
|
||||
doAlloc();
|
||||
|
||||
@ -223,7 +224,8 @@ inline Foam::Matrix<Form, Type>::Matrix
|
||||
)
|
||||
:
|
||||
mRows_(Mb.m()),
|
||||
nCols_(Mb.n())
|
||||
nCols_(Mb.n()),
|
||||
v_(nullptr)
|
||||
{
|
||||
doAlloc();
|
||||
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2011-2017 OpenFOAM Foundation
|
||||
Copyright (C) 2018-2022 OpenCFD Ltd.
|
||||
Copyright (C) 2018-2023 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -489,12 +489,12 @@ public:
|
||||
|
||||
//- Collect indirect data in processor order on master
|
||||
// Handles contiguous/non-contiguous data, skips empty fields.
|
||||
template<class Type, class Addr>
|
||||
template<class ProcIDsContainer, class Type, class Addr>
|
||||
static void gather
|
||||
(
|
||||
const labelUList& offsets, //!< offsets (master only)
|
||||
const label comm, //!< communicator
|
||||
const UList<int>& procIDs,
|
||||
const ProcIDsContainer& procIDs,
|
||||
const IndirectListBase<Type, Addr>& fld,
|
||||
List<Type>& allFld, //! output field (master only)
|
||||
const int tag = UPstream::msgType(),
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2013-2017 OpenFOAM Foundation
|
||||
Copyright (C) 2019-2022 OpenCFD Ltd.
|
||||
Copyright (C) 2019-2023 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -248,12 +248,12 @@ void Foam::globalIndex::gather
|
||||
}
|
||||
|
||||
|
||||
template<class Type, class Addr>
|
||||
template<class ProcIDsContainer, class Type, class Addr>
|
||||
void Foam::globalIndex::gather
|
||||
(
|
||||
const labelUList& off, // needed on master only
|
||||
const label comm,
|
||||
const UList<int>& procIDs,
|
||||
const ProcIDsContainer& procIDs,
|
||||
const IndirectListBase<Type, Addr>& fld,
|
||||
List<Type>& allFld,
|
||||
const int tag,
|
||||
@ -368,7 +368,7 @@ void Foam::globalIndex::gather
|
||||
(
|
||||
offsets_, // needed on master only
|
||||
comm,
|
||||
UPstream::procID(comm),
|
||||
UPstream::allProcs(comm), // All communicator ranks
|
||||
sendData,
|
||||
allData,
|
||||
tag,
|
||||
@ -404,7 +404,7 @@ void Foam::globalIndex::gather
|
||||
(
|
||||
offsets_, // needed on master only
|
||||
comm,
|
||||
UPstream::procID(comm),
|
||||
UPstream::allProcs(comm), // All communicator ranks
|
||||
sendData,
|
||||
allData,
|
||||
tag,
|
||||
@ -622,7 +622,7 @@ void Foam::globalIndex::mpiGather
|
||||
(
|
||||
offsets_, // needed on master only
|
||||
comm,
|
||||
UPstream::procID(comm),
|
||||
UPstream::allProcs(comm), // All communicator ranks
|
||||
sendData,
|
||||
allData,
|
||||
tag,
|
||||
@ -967,7 +967,7 @@ void Foam::globalIndex::scatter
|
||||
(
|
||||
offsets_, // needed on master only
|
||||
comm,
|
||||
UPstream::procID(comm),
|
||||
UPstream::allProcs(comm), // All communicator ranks
|
||||
allData,
|
||||
localData,
|
||||
tag,
|
||||
|
||||
@ -409,6 +409,10 @@ inline Foam::Tensor<Cmpt> Foam::Tensor<Cmpt>::T() const
|
||||
|
||||
|
||||
template<class Cmpt>
|
||||
#if defined(__GNUC__) && !defined(__clang__)
|
||||
// Workaround for gcc (11+) that fails to handle tensor dot vector
|
||||
__attribute__((optimize("no-tree-vectorize")))
|
||||
#endif
|
||||
inline Foam::Tensor<Cmpt>
|
||||
Foam::Tensor<Cmpt>::inner(const Tensor<Cmpt>& t2) const
|
||||
{
|
||||
@ -432,6 +436,10 @@ Foam::Tensor<Cmpt>::inner(const Tensor<Cmpt>& t2) const
|
||||
|
||||
|
||||
template<class Cmpt>
|
||||
#if defined(__GNUC__) && !defined(__clang__)
|
||||
// Workaround for gcc (11+) that fails to handle tensor dot vector
|
||||
__attribute__((optimize("no-tree-vectorize")))
|
||||
#endif
|
||||
inline Foam::Tensor<Cmpt>
|
||||
Foam::Tensor<Cmpt>::schur(const Tensor<Cmpt>& t2) const
|
||||
{
|
||||
@ -867,6 +875,10 @@ operator&(const Tensor<Cmpt>& t1, const Tensor<Cmpt>& t2)
|
||||
|
||||
//- Inner-product of a SphericalTensor and a Tensor
|
||||
template<class Cmpt>
|
||||
#if defined(__GNUC__) && !defined(__clang__)
|
||||
// Workaround for gcc (11+) that fails to handle tensor dot vector
|
||||
__attribute__((optimize("no-tree-vectorize")))
|
||||
#endif
|
||||
inline Tensor<Cmpt>
|
||||
operator&(const SphericalTensor<Cmpt>& st1, const Tensor<Cmpt>& t2)
|
||||
{
|
||||
@ -881,6 +893,10 @@ operator&(const SphericalTensor<Cmpt>& st1, const Tensor<Cmpt>& t2)
|
||||
|
||||
//- Inner-product of a Tensor and a SphericalTensor
|
||||
template<class Cmpt>
|
||||
#if defined(__GNUC__) && !defined(__clang__)
|
||||
// Workaround for gcc (11+) that fails to handle tensor dot vector
|
||||
__attribute__((optimize("no-tree-vectorize")))
|
||||
#endif
|
||||
inline Tensor<Cmpt>
|
||||
operator&(const Tensor<Cmpt>& t1, const SphericalTensor<Cmpt>& st2)
|
||||
{
|
||||
@ -895,6 +911,10 @@ operator&(const Tensor<Cmpt>& t1, const SphericalTensor<Cmpt>& st2)
|
||||
|
||||
//- Inner-product of a SymmTensor and a Tensor
|
||||
template<class Cmpt>
|
||||
#if defined(__GNUC__) && !defined(__clang__)
|
||||
// Workaround for gcc (11+) that fails to handle tensor dot vector
|
||||
__attribute__((optimize("no-tree-vectorize")))
|
||||
#endif
|
||||
inline Tensor<Cmpt>
|
||||
operator&(const SymmTensor<Cmpt>& st1, const Tensor<Cmpt>& t2)
|
||||
{
|
||||
@ -917,6 +937,10 @@ operator&(const SymmTensor<Cmpt>& st1, const Tensor<Cmpt>& t2)
|
||||
|
||||
//- Inner-product of a Tensor and a SymmTensor
|
||||
template<class Cmpt>
|
||||
#if defined(__GNUC__) && !defined(__clang__)
|
||||
// Workaround for gcc (11+) that fails to handle tensor dot vector
|
||||
__attribute__((optimize("no-tree-vectorize")))
|
||||
#endif
|
||||
inline Tensor<Cmpt>
|
||||
operator&(const Tensor<Cmpt>& t1, const SymmTensor<Cmpt>& st2)
|
||||
{
|
||||
@ -957,6 +981,10 @@ operator&(const Tensor<Cmpt>& t, const Vector<Cmpt>& v)
|
||||
|
||||
//- Inner-product of a Vector and a Tensor
|
||||
template<class Cmpt>
|
||||
#if defined(__GNUC__) && !defined(__clang__)
|
||||
// Workaround for gcc (11+) that fails to handle tensor dot vector
|
||||
__attribute__((optimize("no-tree-vectorize")))
|
||||
#endif
|
||||
inline Vector<Cmpt>
|
||||
operator&(const Vector<Cmpt>& v, const Tensor<Cmpt>& t)
|
||||
{
|
||||
|
||||
@ -29,6 +29,7 @@ License
|
||||
#include "word.H"
|
||||
#include "debug.H"
|
||||
#include <cctype>
|
||||
#include <cstdint>
|
||||
#include <sstream>
|
||||
|
||||
// * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * //
|
||||
|
||||
@ -641,7 +641,16 @@ void Foam::UPstream::freePstreamCommunicator(const label communicator)
|
||||
}
|
||||
|
||||
// Not touching the first two communicators (SELF, WORLD)
|
||||
if (communicator > 1)
|
||||
// or anything out-of bounds.
|
||||
//
|
||||
// No UPstream communicator indices when MPI is initialized outside
|
||||
// of OpenFOAM - thus needs a bounds check too!
|
||||
|
||||
if
|
||||
(
|
||||
communicator > 1
|
||||
&& (communicator < PstreamGlobals::MPICommunicators_.size())
|
||||
)
|
||||
{
|
||||
if (MPI_COMM_NULL != PstreamGlobals::MPICommunicators_[communicator])
|
||||
{
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2013-2016 OpenFOAM Foundation
|
||||
Copyright (C) 2022 OpenCFD Ltd.
|
||||
Copyright (C) 2022-2023 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -131,6 +131,9 @@ makeLESModel(dynamicKEqn);
|
||||
#include "dynamicLagrangian.H"
|
||||
makeLESModel(dynamicLagrangian);
|
||||
|
||||
#include "sigma.H"
|
||||
makeLESModel(sigma);
|
||||
|
||||
#include "SpalartAllmarasDES.H"
|
||||
makeLESModel(SpalartAllmarasDES);
|
||||
|
||||
|
||||
@ -5,7 +5,7 @@
|
||||
\\ / A nd | www.openfoam.com
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2017-2022 OpenCFD Ltd.
|
||||
Copyright (C) 2017-2024 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -122,7 +122,8 @@ std::string Foam::fileFormats::NASCore::nextNasField
|
||||
(
|
||||
const std::string& str,
|
||||
std::string::size_type& pos,
|
||||
std::string::size_type len
|
||||
const std::string::size_type width,
|
||||
const bool free_format
|
||||
)
|
||||
{
|
||||
const auto beg = pos;
|
||||
@ -130,15 +131,23 @@ std::string Foam::fileFormats::NASCore::nextNasField
|
||||
|
||||
if (end == std::string::npos)
|
||||
{
|
||||
pos = beg + len; // Continue after field width
|
||||
if (free_format)
|
||||
{
|
||||
// Nothing left
|
||||
pos = str.size();
|
||||
return str.substr(beg);
|
||||
}
|
||||
|
||||
// Fixed format - continue after field width
|
||||
pos = beg + width;
|
||||
return str.substr(beg, width);
|
||||
}
|
||||
else
|
||||
{
|
||||
len = (end - beg); // Efffective width
|
||||
pos = end + 1; // Continue after comma
|
||||
// Free format - continue after comma
|
||||
pos = end + 1;
|
||||
return str.substr(beg, (end - beg));
|
||||
}
|
||||
|
||||
return str.substr(beg, len);
|
||||
}
|
||||
|
||||
|
||||
@ -235,8 +244,8 @@ void Foam::fileFormats::NASCore::writeCoord
|
||||
// 2 ID : point ID - requires starting index of 1
|
||||
// 3 CP : coordinate system ID (blank)
|
||||
// 4 X1 : point x coordinate
|
||||
// 5 X2 : point x coordinate
|
||||
// 6 X3 : point x coordinate
|
||||
// 5 X2 : point y coordinate
|
||||
// 6 X3 : point z coordinate
|
||||
// 7 CD : coordinate system for displacements (blank)
|
||||
// 8 PS : single point constraints (blank)
|
||||
// 9 SEID : super-element ID
|
||||
|
||||
@ -5,7 +5,7 @@
|
||||
\\ / A nd | www.openfoam.com
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2017-2022 OpenCFD Ltd.
|
||||
Copyright (C) 2017-2024 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -48,7 +48,6 @@ SourceFiles
|
||||
|
||||
namespace Foam
|
||||
{
|
||||
|
||||
namespace fileFormats
|
||||
{
|
||||
|
||||
@ -74,18 +73,18 @@ public:
|
||||
//- Output load format
|
||||
enum loadFormat
|
||||
{
|
||||
PLOAD2,
|
||||
PLOAD4
|
||||
PLOAD2, //!< Face load (eg, pressure)
|
||||
PLOAD4 //!< Vertex load
|
||||
};
|
||||
|
||||
//- Selection names for the NASTRAN file field formats
|
||||
//- Selection names for the NASTRAN load formats
|
||||
static const Enum<loadFormat> loadFormatNames;
|
||||
|
||||
|
||||
// Constructors
|
||||
|
||||
//- Default construct
|
||||
NASCore() = default;
|
||||
NASCore() noexcept = default;
|
||||
|
||||
|
||||
// Public Static Member Functions
|
||||
@ -93,18 +92,20 @@ public:
|
||||
//- Extract numbers from things like "-2.358-8" (same as "-2.358e-8")
|
||||
static scalar readNasScalar(const std::string& str);
|
||||
|
||||
//- A string::substr() to handle fixed-format and free-format NASTRAN.
|
||||
// Returns the substr to the next comma (if found) or the given length
|
||||
//
|
||||
// \param str The string to extract from
|
||||
// \param pos On input, the position of the first character of the
|
||||
// substring. On output, advances to the next position to use.
|
||||
// \param len The fixed-format length to use if a comma is not found.
|
||||
//- A std::string::substr() variant to handle fixed-format and
|
||||
//- free-format NASTRAN.
|
||||
// Returns the substr until the next comma (if found)
|
||||
// or the given fixed width
|
||||
static std::string nextNasField
|
||||
(
|
||||
//! The string to extract from
|
||||
const std::string& str,
|
||||
//! [in,out] The parse position within \p str
|
||||
std::string::size_type& pos,
|
||||
std::string::size_type len
|
||||
//! The fixed-format width to use (if comma is not found)
|
||||
const std::string::size_type width,
|
||||
//! The input is known to be free-format
|
||||
const bool free_format = false
|
||||
);
|
||||
|
||||
|
||||
|
||||
@ -135,8 +135,9 @@ int Foam::fileFormats::STLCore::detectBinaryHeader
|
||||
|
||||
bad =
|
||||
(
|
||||
nTris < int(dataFileSize - STLHeaderSize)/50
|
||||
|| nTris > int(dataFileSize - STLHeaderSize)/25
|
||||
dataFileSize < STLHeaderSize
|
||||
|| nTris < (dataFileSize - STLHeaderSize)/50
|
||||
|| nTris > (dataFileSize - STLHeaderSize)/25
|
||||
);
|
||||
}
|
||||
|
||||
@ -208,8 +209,9 @@ Foam::fileFormats::STLCore::readBinaryHeader
|
||||
|
||||
bad =
|
||||
(
|
||||
nTris < int(dataFileSize - STLHeaderSize)/50
|
||||
|| nTris > int(dataFileSize - STLHeaderSize)/25
|
||||
dataFileSize < STLHeaderSize
|
||||
|| nTris < (dataFileSize - STLHeaderSize)/50
|
||||
|| nTris > (dataFileSize - STLHeaderSize)/25
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2016-2017 Wikki Ltd
|
||||
Copyright (C) 2018-2022 OpenCFD Ltd.
|
||||
Copyright (C) 2018-2023 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -180,12 +180,26 @@ Foam::faBoundaryMesh::faBoundaryMesh
|
||||
|
||||
void Foam::faBoundaryMesh::calcGeometry()
|
||||
{
|
||||
// processorFaPatch geometry triggers calculation of pointNormals.
|
||||
// processor initGeometry send/recv the following:
|
||||
// - edgeCentres() : faMesh::edgeCentres()
|
||||
// - edgeLengths() : faMesh::Le()
|
||||
// - edgeFaceCentres() : faMesh::areaCentres()
|
||||
//
|
||||
// faMesh::Le() has its own point-to-point communication (OK) but
|
||||
// triggers either/or edgeAreaNormals(), pointAreaNormals()
|
||||
// with their own communication that can block.
|
||||
|
||||
// This uses parallel comms and hence will not be trigggered
|
||||
// on processors that do not have a processorFaPatch so instead
|
||||
// force construction.
|
||||
|
||||
(void)mesh_.edgeAreaNormals();
|
||||
(void)mesh_.pointAreaNormals();
|
||||
|
||||
(void)mesh_.areaCentres();
|
||||
(void)mesh_.faceAreaNormals();
|
||||
|
||||
|
||||
PstreamBuffers pBufs(Pstream::defaultCommsType);
|
||||
|
||||
if
|
||||
@ -773,12 +787,15 @@ bool Foam::faBoundaryMesh::checkDefinition(const bool report) const
|
||||
|
||||
void Foam::faBoundaryMesh::movePoints(const pointField& p)
|
||||
{
|
||||
// processorFaPatch geometry triggers calculation of pointNormals.
|
||||
// This uses parallel comms and hence will not be trigggered
|
||||
// on processors that do not have a processorFaPatch so instead
|
||||
// force construction.
|
||||
// See comments in calcGeometry()
|
||||
|
||||
(void)mesh_.edgeAreaNormals();
|
||||
(void)mesh_.pointAreaNormals();
|
||||
|
||||
(void)mesh_.areaCentres();
|
||||
(void)mesh_.faceAreaNormals();
|
||||
|
||||
|
||||
PstreamBuffers pBufs(Pstream::defaultCommsType);
|
||||
|
||||
if
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2016-2017 Wikki Ltd
|
||||
Copyright (C) 2020-2022 OpenCFD Ltd.
|
||||
Copyright (C) 2020-2023 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -277,6 +277,27 @@ void Foam::faMesh::clearOut() const
|
||||
|
||||
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
|
||||
|
||||
void Foam::faMesh::syncGeom()
|
||||
{
|
||||
if (UPstream::parRun())
|
||||
{
|
||||
// areaCentres()
|
||||
if (faceCentresPtr_)
|
||||
{
|
||||
faceCentresPtr_->boundaryFieldRef()
|
||||
.evaluateCoupled<processorFaPatch>();
|
||||
}
|
||||
|
||||
// faceAreaNormals()
|
||||
if (faceAreaNormalsPtr_)
|
||||
{
|
||||
faceAreaNormalsPtr_->boundaryFieldRef()
|
||||
.evaluateCoupled<processorFaPatch>();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
bool Foam::faMesh::init(const bool doInit)
|
||||
{
|
||||
if (doInit)
|
||||
@ -296,18 +317,7 @@ bool Foam::faMesh::init(const bool doInit)
|
||||
// Calculate the geometry for the patches (transformation tensors etc.)
|
||||
boundary_.calcGeometry();
|
||||
|
||||
// Ensure processor/processor information is properly synchronised
|
||||
if (Pstream::parRun())
|
||||
{
|
||||
const_cast<areaVectorField&>(areaCentres()).boundaryFieldRef()
|
||||
.evaluateCoupled<processorFaPatch>();
|
||||
|
||||
// This roughly corresponds to what OpenFOAM-v2112 (and earlier) had,
|
||||
// but should nominally be unnecessary.
|
||||
//
|
||||
/// const_cast<areaVectorField&>(faceAreaNormals()).boundaryFieldRef()
|
||||
/// .evaluateCoupled<processorFaPatch>();
|
||||
}
|
||||
syncGeom();
|
||||
|
||||
return false;
|
||||
}
|
||||
@ -989,7 +999,6 @@ bool Foam::faMesh::movePoints()
|
||||
|
||||
clearGeomNotAreas();
|
||||
|
||||
// To satisfy the motion interface for MeshObject, const cast is needed
|
||||
if (patchPtr_)
|
||||
{
|
||||
patchPtr_->movePoints(newPoints);
|
||||
@ -1003,6 +1012,8 @@ bool Foam::faMesh::movePoints()
|
||||
|
||||
// Note: Fluxes were dummy?
|
||||
|
||||
syncGeom();
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2016-2017 Wikki Ltd
|
||||
Copyright (C) 2021-2022 OpenCFD Ltd.
|
||||
Copyright (C) 2021-2023 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -617,6 +617,10 @@ public:
|
||||
//- Initialise non-demand-driven data etc
|
||||
bool init(const bool doInit);
|
||||
|
||||
//- Processor/processor synchronisation for geometry fields.
|
||||
// Largely internal use only (slightly hacky).
|
||||
void syncGeom();
|
||||
|
||||
|
||||
// Database
|
||||
|
||||
|
||||
@ -898,6 +898,12 @@ void Foam::faMesh::calcFaceCentres() const
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parallel consistency, exchange on processor patches
|
||||
if (UPstream::parRun())
|
||||
{
|
||||
centres.boundaryFieldRef().evaluateCoupled<processorFaPatch>();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1110,6 +1116,12 @@ void Foam::faMesh::calcFaceAreaNormals() const
|
||||
= edgeNormalsBoundary[patchi];
|
||||
}
|
||||
}
|
||||
|
||||
// Parallel consistency, exchange on processor patches
|
||||
if (UPstream::parRun())
|
||||
{
|
||||
faceNormals.boundaryFieldRef().evaluateCoupled<processorFaPatch>();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2012-2016 OpenFOAM Foundation
|
||||
Copyright (C) 2015-2022 OpenCFD Ltd.
|
||||
Copyright (C) 2015-2023 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -61,6 +61,8 @@ void Foam::faMeshTools::forceDemandDriven(faMesh& mesh)
|
||||
(void)mesh.pointAreaNormals();
|
||||
(void)mesh.faceCurvatures();
|
||||
(void)mesh.edgeTransformTensors();
|
||||
|
||||
mesh.syncGeom();
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -179,7 +179,7 @@ void Foam::InjectedParticleDistributionInjection<CloudType>::initialise()
|
||||
sumPow3 += pow3(diameters[particlei]);
|
||||
}
|
||||
|
||||
const scalar volume = sumPow3*mathematical::pi/16.0;
|
||||
const scalar volume = sumPow3*mathematical::pi/6.0;
|
||||
sumVolume += volume;
|
||||
volumeFlowRate_[injectori] = volume/dTime;
|
||||
|
||||
|
||||
@ -119,7 +119,7 @@ void Foam::InjectedParticleInjection<CloudType>::initialise()
|
||||
scalar sumVolume = 0;
|
||||
forAll(volume, i)
|
||||
{
|
||||
scalar vol = pow3(diameter_[i])*mathematical::pi/16.0;
|
||||
scalar vol = pow3(diameter_[i])*mathematical::pi/6.0;
|
||||
volume[i] = vol;
|
||||
sumVolume += vol;
|
||||
}
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2011-2017 OpenFOAM Foundation
|
||||
Copyright (C) 2019-2022 OpenCFD Ltd.
|
||||
Copyright (C) 2019-2023 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -62,6 +62,53 @@ Foam::ThermoSurfaceFilm<CloudType>::ThermoSurfaceFilm
|
||||
|
||||
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
|
||||
|
||||
template<class CloudType>
|
||||
template<class filmType>
|
||||
void Foam::ThermoSurfaceFilm<CloudType>::absorbInteraction
|
||||
(
|
||||
filmType& film,
|
||||
const parcelType& p,
|
||||
const polyPatch& pp,
|
||||
const label facei,
|
||||
const scalar mass,
|
||||
bool& keepParticle
|
||||
)
|
||||
{
|
||||
DebugInfo<< "Parcel " << p.origId() << " absorbInteraction" << endl;
|
||||
|
||||
// Patch face normal
|
||||
const vector& nf = pp.faceNormals()[facei];
|
||||
|
||||
// Patch velocity
|
||||
const vector& Up = this->owner().U().boundaryField()[pp.index()][facei];
|
||||
|
||||
// Relative parcel velocity
|
||||
const vector Urel(p.U() - Up);
|
||||
|
||||
// Parcel normal velocity
|
||||
const vector Un(nf*(Urel & nf));
|
||||
|
||||
// Parcel tangential velocity
|
||||
const vector Ut(Urel - Un);
|
||||
|
||||
film.addSources
|
||||
(
|
||||
pp.index(),
|
||||
facei,
|
||||
mass, // mass
|
||||
mass*Ut, // tangential momentum
|
||||
mass*mag(Un), // impingement pressure
|
||||
mass*p.hs() // energy
|
||||
);
|
||||
|
||||
this->nParcelsTransferred()++;
|
||||
|
||||
this->totalMassTransferred() += mass;
|
||||
|
||||
keepParticle = false;
|
||||
}
|
||||
|
||||
|
||||
template<class CloudType>
|
||||
bool Foam::ThermoSurfaceFilm<CloudType>::transferParcel
|
||||
(
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2011-2017 OpenFOAM Foundation
|
||||
Copyright (C) 2021 OpenCFD Ltd.
|
||||
Copyright (C) 2021-2023 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -143,6 +143,21 @@ public:
|
||||
|
||||
// Member Functions
|
||||
|
||||
// Interaction models
|
||||
|
||||
//- Absorb parcel into film
|
||||
template<class filmType>
|
||||
void absorbInteraction
|
||||
(
|
||||
filmType&,
|
||||
const parcelType& p,
|
||||
const polyPatch& pp,
|
||||
const label facei,
|
||||
const scalar mass,
|
||||
bool& keepParticle
|
||||
);
|
||||
|
||||
|
||||
// Evaluation
|
||||
|
||||
//- Transfer parcel from cloud to surface film
|
||||
|
||||
@ -1368,6 +1368,7 @@ Foam::label Foam::snappyRefineDriver::refinementInterfaceRefine
|
||||
(
|
||||
face2i != facei
|
||||
&& surfaceIndex[face2i] != -1
|
||||
&& cutter.faceLevel(face2i) > cLevel
|
||||
)
|
||||
{
|
||||
// Get outwards pointing normal
|
||||
|
||||
@ -269,7 +269,8 @@ bool Foam::faceAreaWeightAMI::setNextFaces
|
||||
return false;
|
||||
}
|
||||
|
||||
const labelList& srcNbrFaces = this->srcPatch().faceFaces()[srcFacei];
|
||||
const auto& srcPatch = this->srcPatch();
|
||||
const labelList& srcNbrFaces = srcPatch.faceFaces()[srcFacei];
|
||||
|
||||
// Initialise tgtFacei
|
||||
tgtFacei = -1;
|
||||
@ -360,6 +361,7 @@ bool Foam::faceAreaWeightAMI::setNextFaces
|
||||
{
|
||||
FatalErrorInFunction
|
||||
<< "Unable to set target face for source face " << srcFacei
|
||||
<< " with centre: " << srcPatch.faceCentres()[srcFacei]
|
||||
<< abort(FatalError);
|
||||
}
|
||||
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2011-2017 OpenFOAM Foundation
|
||||
Copyright (C) 2017 OpenCFD Ltd.
|
||||
Copyright (C) 2017-2024 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -62,13 +62,17 @@ bool Foam::fileFormats::NASedgeFormat::read
|
||||
|
||||
while (is.good())
|
||||
{
|
||||
string::size_type linei = 0; // parsing position within current line
|
||||
string line;
|
||||
is.getLine(line);
|
||||
|
||||
if (line.empty() || line[0] == '$')
|
||||
if (line.empty())
|
||||
{
|
||||
continue; // Skip empty or comment
|
||||
continue; // Ignore empty
|
||||
}
|
||||
else if (line[0] == '$')
|
||||
{
|
||||
// Ignore comment
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if character 72 is continuation
|
||||
@ -94,38 +98,66 @@ bool Foam::fileFormats::NASedgeFormat::read
|
||||
}
|
||||
|
||||
|
||||
// Parsing position within current line
|
||||
std::string::size_type linei = 0;
|
||||
|
||||
// Is free format if line contains a comma
|
||||
const bool freeFormat = line.contains(',');
|
||||
|
||||
// First word (column 0-8)
|
||||
const word cmd(word::validate(nextNasField(line, linei, 8)));
|
||||
|
||||
if (cmd == "CBEAM" || cmd == "CROD")
|
||||
{
|
||||
// discard elementId (8-16)
|
||||
(void) nextNasField(line, linei, 8); // 8-16
|
||||
// discard groupId (16-24)
|
||||
(void) nextNasField(line, linei, 8); // 16-24
|
||||
// Fixed format:
|
||||
// 8-16 : element id
|
||||
// 16-24 : group id
|
||||
// 24-32 : vertex
|
||||
// 32-40 : vertex
|
||||
|
||||
label a = readLabel(nextNasField(line, linei, 8)); // 24-32
|
||||
label b = readLabel(nextNasField(line, linei, 8)); // 32-40
|
||||
// discard elementId
|
||||
(void) nextNasField(line, linei, 8, freeFormat);
|
||||
// discard groupId
|
||||
(void) nextNasField(line, linei, 8, freeFormat);
|
||||
|
||||
label a = readLabel(nextNasField(line, linei, 8, freeFormat));
|
||||
label b = readLabel(nextNasField(line, linei, 8, freeFormat));
|
||||
|
||||
dynEdges.append(edge(a,b));
|
||||
}
|
||||
else if (cmd == "PLOTEL")
|
||||
{
|
||||
// discard elementId (8-16)
|
||||
(void) nextNasField(line, linei, 8); // 8-16
|
||||
// Fixed format:
|
||||
// 8-16 : element id
|
||||
// 16-24 : vertex
|
||||
// 24-32 : vertex
|
||||
// 32-40 : vertex
|
||||
|
||||
label a = readLabel(nextNasField(line, linei, 8)); // 16-24
|
||||
label b = readLabel(nextNasField(line, linei, 8)); // 24-32
|
||||
// discard elementId (8-16)
|
||||
(void) nextNasField(line, linei, 8, freeFormat);
|
||||
|
||||
label a = readLabel(nextNasField(line, linei, 8, freeFormat));
|
||||
label b = readLabel(nextNasField(line, linei, 8, freeFormat));
|
||||
|
||||
dynEdges.append(edge(a,b));
|
||||
}
|
||||
else if (cmd == "GRID")
|
||||
{
|
||||
label index = readLabel(nextNasField(line, linei, 8)); // 8-16
|
||||
(void) nextNasField(line, linei, 8); // 16-24
|
||||
scalar x = readNasScalar(nextNasField(line, linei, 8)); // 24-32
|
||||
scalar y = readNasScalar(nextNasField(line, linei, 8)); // 32-40
|
||||
scalar z = readNasScalar(nextNasField(line, linei, 8)); // 40-48
|
||||
// Fixed (short) format:
|
||||
// 8-16 : point id
|
||||
// 16-24 : coordinate system (unsupported)
|
||||
// 24-32 : point x coordinate
|
||||
// 32-40 : point y coordinate
|
||||
// 40-48 : point z coordinate
|
||||
// 48-56 : displacement coordinate system (optional, unsupported)
|
||||
// 56-64 : single point constraints (optional, unsupported)
|
||||
// 64-70 : super-element id (optional, unsupported)
|
||||
|
||||
label index = readLabel(nextNasField(line, linei, 8, freeFormat));
|
||||
(void) nextNasField(line, linei, 8, freeFormat);
|
||||
scalar x = readNasScalar(nextNasField(line, linei, 8, freeFormat));
|
||||
scalar y = readNasScalar(nextNasField(line, linei, 8, freeFormat));
|
||||
scalar z = readNasScalar(nextNasField(line, linei, 8, freeFormat));
|
||||
|
||||
pointId.append(index);
|
||||
dynPoints.append(point(x, y, z));
|
||||
@ -138,6 +170,8 @@ bool Foam::fileFormats::NASedgeFormat::read
|
||||
// GRID* 126 0 -5.55999875E+02 -5.68730474E+02
|
||||
// * 2.14897901E+02
|
||||
|
||||
// Cannot be long format and free format at the same time!
|
||||
|
||||
label index = readLabel(nextNasField(line, linei, 16)); // 8-24
|
||||
(void) nextNasField(line, linei, 16); // 24-40
|
||||
scalar x = readNasScalar(nextNasField(line, linei, 16)); // 40-56
|
||||
|
||||
@ -155,7 +155,7 @@ bool Foam::vtk::writePointSet
|
||||
|
||||
if (parallel)
|
||||
{
|
||||
vtk::writeListParallel(format(), mesh.points(), pointLabels);
|
||||
vtk::writeListParallel(format.ref(), mesh.points(), pointLabels);
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
@ -4,7 +4,6 @@ simpleGeomDecomp/simpleGeomDecomp.C
|
||||
hierarchGeomDecomp/hierarchGeomDecomp.C
|
||||
manualDecomp/manualDecomp.C
|
||||
multiLevelDecomp/multiLevelDecomp.C
|
||||
multiNodeDecomp/multiNodeDecomp.C
|
||||
metisLikeDecomp/metisLikeDecomp.C
|
||||
structuredDecomp/structuredDecomp.C
|
||||
randomDecomp/randomDecomp.C
|
||||
|
||||
@ -1,58 +0,0 @@
|
||||
# New Multi-Level Decomposition
|
||||
The multi-node decomposition is an extension of the existing multi-level decomposition. It supports the syntax of the current multi-level decomposition, but allows to change the decomposition tree as you wish. For example, you may split into unbalanced nodes, set the weights of some nodes to be bigger than others, or perhaps use a different decomposition method for some nodes.
|
||||
You may set up the decomposition in two ways:
|
||||
1. Using a domains list and a default method:
|
||||
```
|
||||
numberOfSubdomains 8;
|
||||
multiNodeCoeffs {
|
||||
domains (2 4);
|
||||
method metis;
|
||||
}
|
||||
```
|
||||
2. Using a dictionary for each level:
|
||||
```
|
||||
numberOfSubdomains 8;
|
||||
multiLevelCoeffs {
|
||||
nodes {
|
||||
numberOfSubdomains 2;
|
||||
method metis;
|
||||
}
|
||||
cores {
|
||||
numberOfSubdomains 4;
|
||||
method scotch;
|
||||
}
|
||||
}
|
||||
```
|
||||
Note that if the total number of subdomains does not match the product of the number of subdomains at each level, but a default method is provided, a new level will be inferred in order to match the total number of subdomains.
|
||||
|
||||
This creates a "decomposition tree" - for example, the dictionaries above create a tree, where the root has two children, and each child has four children (who are the leaves of the tree). Every leaf in the tree is a subdomain in the final decomposition.
|
||||
After setting up the decomposition, we may edit specific nodes or ranges of nodes. For example, suppose we want to split into two nodes, the first one having four subdomains and the second having eight subdomains. We can use the above dictionaries, and then use:
|
||||
```
|
||||
domains[1] (8);
|
||||
```
|
||||
The squared brackets indicate which nodes in the tree should we edit - We want the second child of the root (the indexing starts from zero). If we wanted to change the first two children of the third child of the root, we would write:
|
||||
```
|
||||
domains[2][0-1] (8);
|
||||
```
|
||||
|
||||
Note that the total number of subdomains must match the number of subdomains declared after all modifications. In addition, note that the decomposition into two nodes will be done as if they were of the same size, hence the first four subdomains will be bigger than the other eight. In order to fix this, we may:
|
||||
1. Change the weight of the second node into twice the weight:
|
||||
```
|
||||
weight[1] 2;
|
||||
```
|
||||
2. Set the weights initialization into relative - this will cause the weights of the children to first be computed by the amount of leaves in their subtree. Note that this updates the whole subtree initialization, but using the `weight` parameter, we can override this initialization.
|
||||
```
|
||||
weightsInitialization[1] relative;
|
||||
```
|
||||
|
||||
|
||||
We may also set a special method dictionary that decomposes differently for some nodes:
|
||||
```
|
||||
method[2-4] {
|
||||
numberOfSubdomains 4;
|
||||
method metis;
|
||||
coeffs {
|
||||
...
|
||||
}
|
||||
}
|
||||
```
|
||||
@ -1,788 +0,0 @@
|
||||
/*---------------------------------------------------------------------------*\
|
||||
========= |
|
||||
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
||||
\\ / O peration |
|
||||
\\ / A nd | www.openfoam.com
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2011-2017 OpenFOAM Foundation
|
||||
Copyright (C) 2017-2021 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
|
||||
OpenFOAM is free software: you can redistribute it and/or modify it
|
||||
under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
|
||||
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
\*---------------------------------------------------------------------------*/
|
||||
|
||||
#include "multiNodeDecomp.H"
|
||||
#include "addToRunTimeSelectionTable.H"
|
||||
#include "IFstream.H"
|
||||
#include "globalIndex.H"
|
||||
#include "mapDistribute.H"
|
||||
#include "DynamicList.H"
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * //
|
||||
|
||||
namespace Foam
|
||||
{
|
||||
defineTypeNameAndDebug(multiNodeDecomp, 0);
|
||||
addToRunTimeSelectionTable
|
||||
(
|
||||
decompositionMethod,
|
||||
multiNodeDecomp,
|
||||
dictionary
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * Private Member Functions * * * * * * * * * * * //
|
||||
namespace Foam {
|
||||
void multiNodeDecomp::initializeMetadata(const dictionary& coeffsDict) {
|
||||
word defaultMethod;
|
||||
dictionary defaultMethodDict;
|
||||
if(coeffsDict.readIfPresent("method", defaultMethod, keyType::LITERAL)) {
|
||||
defaultMethodDict.add("method",defaultMethod);
|
||||
const dictionary& subMethodCoeffsDict
|
||||
(
|
||||
findCoeffsDict
|
||||
(
|
||||
coeffsDict,
|
||||
defaultMethod + "Coeffs",
|
||||
selectionType::NULL_DICT
|
||||
)
|
||||
);
|
||||
if(subMethodCoeffsDict.size())
|
||||
defaultMethodDict.add(subMethodCoeffsDict.dictName(), subMethodCoeffsDict);
|
||||
}
|
||||
labelList domains;
|
||||
|
||||
label nTotal = 0;
|
||||
label nLevels = 0;
|
||||
|
||||
//Check if any meta argument is changed using the new syntax.
|
||||
//If they are, we cannot infer an additional level of decomposition,
|
||||
//as it may interfere with the indices.
|
||||
List<string> domainChanges = metaParser::getEntries(coeffsDict, "domains");
|
||||
List<string> methodChanges = metaParser::getEntries(coeffsDict, "method");
|
||||
List<string> weightChanges = metaParser::getEntries(coeffsDict, "weight");
|
||||
//We can parse weightMode without brackets too
|
||||
List<string> weightModeChanges = metaParser::getEntries(coeffsDict, "weightsInitialization", true);
|
||||
|
||||
bool bChangesDomains = !domainChanges.empty();
|
||||
bool bChangesArguments = bChangesDomains
|
||||
|| (!methodChanges.empty())
|
||||
|| (!weightChanges.empty())
|
||||
|| (!weightModeChanges.empty());
|
||||
|
||||
bool bMetadataInitialized = false;
|
||||
|
||||
// Found (non-recursive, no patterns) "method" and "domains" ?
|
||||
// Allow as quick short-cut entry
|
||||
if
|
||||
(
|
||||
// non-recursive, no patterns
|
||||
coeffsDict.readIfPresent("method", defaultMethod, keyType::LITERAL)
|
||||
// non-recursive, no patterns
|
||||
&& coeffsDict.readIfPresent("domains", domains, keyType::LITERAL)
|
||||
)
|
||||
{
|
||||
// Short-cut version specified by method, domains only
|
||||
|
||||
nTotal = (domains.empty() ? 0 : 1);
|
||||
for (const label n : domains)
|
||||
{
|
||||
nTotal *= n;
|
||||
++nLevels;
|
||||
}
|
||||
|
||||
//Update domains here
|
||||
if(nTotal != 0 && bChangesDomains) {
|
||||
rootMetadata_.initialize(
|
||||
domains,
|
||||
&defaultMethodDict
|
||||
);
|
||||
bMetadataInitialized = true;
|
||||
for(string key : domainChanges)
|
||||
rootMetadata_.updateDomains( key,
|
||||
coeffsDict.get<labelList>(key, keyType::LITERAL));
|
||||
|
||||
nTotal = rootMetadata_.getSize();
|
||||
}
|
||||
|
||||
if (nTotal == 1)
|
||||
{
|
||||
// Emit Warning
|
||||
nTotal = nDomains();
|
||||
nLevels = 1;
|
||||
domains.setSize(1);
|
||||
domains[0] = nTotal;
|
||||
}
|
||||
//If bChangesDomains is true, we do not want to add another dimension as this
|
||||
//may affect the user's assignments of domains/weights/methods later on.
|
||||
else if (nTotal > 0 && nTotal < nDomains() && !(nDomains() % nTotal) && !bChangesArguments)
|
||||
{
|
||||
// nTotal < nDomains, but with an integral factor,
|
||||
// which we insert as level 0
|
||||
++nLevels;
|
||||
|
||||
labelList old(std::move(domains));
|
||||
|
||||
domains.setSize(old.size()+1);
|
||||
|
||||
domains[0] = nDomains() / nTotal;
|
||||
forAll(old, i)
|
||||
{
|
||||
domains[i+1] = old[i];
|
||||
}
|
||||
nTotal *= domains[0];
|
||||
|
||||
|
||||
Info<<" inferred level 0 with " << domains[0]
|
||||
<< " domains" << nl << nl;
|
||||
}
|
||||
|
||||
if (!nLevels || nTotal != nDomains())
|
||||
{
|
||||
FatalErrorInFunction
|
||||
<< "Top level decomposition specifies " << nDomains()
|
||||
<< " domains which is not equal to the product of"
|
||||
<< " all sub domains " << nTotal
|
||||
<< exit(FatalError);
|
||||
}
|
||||
|
||||
if(!bMetadataInitialized) {
|
||||
bMetadataInitialized = true;
|
||||
rootMetadata_.initialize(
|
||||
domains,
|
||||
&defaultMethodDict
|
||||
);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Specified by full dictionaries
|
||||
|
||||
// Create editable methods dictionaries
|
||||
// - Only consider sub-dictionaries with a "numberOfSubdomains" entry
|
||||
// This automatically filters out any coeffs dictionaries
|
||||
|
||||
label nTotal = 1;
|
||||
List<const dictionary*> methods;
|
||||
for (const entry& dEntry : coeffsDict)
|
||||
{
|
||||
word methodName;
|
||||
|
||||
if
|
||||
(
|
||||
dEntry.isDict()
|
||||
// non-recursive, no patterns
|
||||
&& dEntry.dict().found("numberOfSubdomains", keyType::LITERAL)
|
||||
)
|
||||
{
|
||||
domains.append(dEntry.dict().get<label>("numberOfSubdomains"));
|
||||
nTotal *= domains.last();
|
||||
// No method specified? can use a default method?
|
||||
|
||||
const bool addDefaultMethod
|
||||
(
|
||||
!(dEntry.dict().found("method", keyType::LITERAL))
|
||||
&& !defaultMethod.empty()
|
||||
);
|
||||
if(!(dEntry.dict().found("method",keyType::LITERAL)) && defaultMethod.empty()) {
|
||||
FatalErrorInFunction <<
|
||||
dEntry.keyword() <<
|
||||
" dictionary does not contain method, and no default method is specified."
|
||||
<< nl << exit(FatalError);
|
||||
}
|
||||
dictionary* levelDict = new dictionary(dEntry.dict());
|
||||
levelDict->remove("numberOfSubdomains");
|
||||
if(addDefaultMethod) levelDict->add("method", defaultMethod);
|
||||
methods.append(levelDict);
|
||||
}
|
||||
}
|
||||
if(domains.empty())
|
||||
nTotal = 0;
|
||||
|
||||
|
||||
rootMetadata_.initialize(domains, methods[0]);
|
||||
bMetadataInitialized = true;
|
||||
for(string key : domainChanges)
|
||||
rootMetadata_.updateDomains( key,
|
||||
coeffsDict.get<labelList>(key, keyType::LITERAL));
|
||||
|
||||
if(nTotal != nDomains()) {
|
||||
FatalErrorInFunction
|
||||
<< "Top level decomposition specifies " << nDomains()
|
||||
<< " domains which is not equal to the product of"
|
||||
<< " all sub domains " << nTotal << " manually defined by dictionaries. "
|
||||
<< exit(FatalError);
|
||||
}
|
||||
rootMetadata_.setLeveledDictionaries(methods);
|
||||
for(const dictionary* method : methods)
|
||||
delete method;
|
||||
}
|
||||
|
||||
|
||||
for(string key : methodChanges)
|
||||
rootMetadata_.updateMethod(key, coeffsDict.subDict(key, keyType::LITERAL));
|
||||
|
||||
for(string key : weightChanges)
|
||||
rootMetadata_.updateWeight(key, coeffsDict.get<label>(key, keyType::LITERAL));
|
||||
|
||||
for(string key : weightModeChanges) {
|
||||
word value = coeffsDict.get<word>(key, keyType::LITERAL);
|
||||
WeightsInitialization newValue = UNKNOWN;
|
||||
|
||||
if(value=="uniform")
|
||||
newValue = UNIFORM;
|
||||
else if(value == "relative")
|
||||
newValue = RELATIVE;
|
||||
else
|
||||
FatalErrorInFunction <<
|
||||
"unknown weights initialization (" << value << "). Must be one of: relative, uniform."
|
||||
<< nl << exit(FatalError);
|
||||
|
||||
rootMetadata_.updateWeightsInitialization(key, newValue);
|
||||
}
|
||||
|
||||
if(!rootMetadata_.isLeaf())
|
||||
rootMetadata_.constructMethods();
|
||||
}
|
||||
|
||||
|
||||
// Given a subset of cells determine the new global indices. The problem
|
||||
// is in the cells from neighbouring processors which need to be renumbered.
|
||||
void multiNodeDecomp::subsetGlobalCellCells
|
||||
(
|
||||
const label nDomains,
|
||||
const label domainI,
|
||||
const labelList& dist,
|
||||
|
||||
const labelListList& cellCells,
|
||||
const labelList& set,
|
||||
labelListList& subCellCells,
|
||||
labelList& cutConnections
|
||||
) const
|
||||
{
|
||||
// Determine new index for cells by inverting subset
|
||||
labelList oldToNew(invert(cellCells.size(), set));
|
||||
|
||||
globalIndex globalCells(cellCells.size());
|
||||
|
||||
// Subset locally the elements for which I have data
|
||||
subCellCells = UIndirectList<labelList>(cellCells, set);
|
||||
|
||||
// Get new indices for neighbouring processors
|
||||
List<Map<label>> compactMap;
|
||||
mapDistribute map(globalCells, subCellCells, compactMap);
|
||||
map.distribute(oldToNew);
|
||||
labelList allDist(dist);
|
||||
map.distribute(allDist);
|
||||
|
||||
// Now we have:
|
||||
// oldToNew : the locally-compact numbering of all our cellCells. -1 if
|
||||
// cellCell is not in set.
|
||||
// allDist : destination domain for all our cellCells
|
||||
// subCellCells : indexes into oldToNew and allDist
|
||||
|
||||
// Globally compact numbering for cells in set.
|
||||
globalIndex globalSubCells(set.size());
|
||||
|
||||
// Now subCellCells contains indices into oldToNew which are the
|
||||
// new locations of the neighbouring cells.
|
||||
|
||||
cutConnections.setSize(nDomains);
|
||||
cutConnections = 0;
|
||||
|
||||
forAll(subCellCells, subCelli)
|
||||
{
|
||||
labelList& cCells = subCellCells[subCelli];
|
||||
|
||||
// Keep the connections to valid mapped cells
|
||||
label newI = 0;
|
||||
forAll(cCells, i)
|
||||
{
|
||||
// Get locally-compact cell index of neighbouring cell
|
||||
const label nbrCelli = oldToNew[cCells[i]];
|
||||
if (nbrCelli == -1)
|
||||
{
|
||||
cutConnections[allDist[cCells[i]]]++;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Reconvert local cell index into global one
|
||||
|
||||
// Get original neighbour
|
||||
const label celli = set[subCelli];
|
||||
const label oldNbrCelli = cellCells[celli][i];
|
||||
// Get processor from original neighbour
|
||||
const label proci = globalCells.whichProcID(oldNbrCelli);
|
||||
// Convert into global compact numbering
|
||||
cCells[newI++] = globalSubCells.toGlobal(proci, nbrCelli);
|
||||
}
|
||||
}
|
||||
cCells.setSize(newI);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void multiNodeDecomp::decompose
|
||||
(
|
||||
const labelListList& pointPoints,
|
||||
const pointField& points,
|
||||
const scalarField& pointWeights,
|
||||
const labelUList& pointMap, // map back to original points
|
||||
const nodeMetadata& decomposeData,
|
||||
const label leafOffset,
|
||||
|
||||
labelList& finalDecomp
|
||||
) const
|
||||
{
|
||||
labelList dist
|
||||
(
|
||||
decomposeData.getMethod()->decompose
|
||||
(
|
||||
pointPoints,
|
||||
points,
|
||||
pointWeights
|
||||
)
|
||||
);
|
||||
|
||||
// Number of domains at the current level
|
||||
const label nCurrDomains = decomposeData.nDomains();
|
||||
|
||||
// Calculate the domain remapping.
|
||||
// The decompose() method delivers a distribution of [0..nDomains-1]
|
||||
// which we map to the final location according to the decomposition
|
||||
// leaf we are on.
|
||||
|
||||
labelList domainOffsets(nCurrDomains);
|
||||
domainOffsets[0] = leafOffset;
|
||||
for(label nDomain = 1; nDomain < nCurrDomains; ++nDomain) {
|
||||
domainOffsets[nDomain] = domainOffsets[nDomain-1] + decomposeData.getChild(nDomain-1)->getSize();
|
||||
}
|
||||
|
||||
// Extract processor+local index from point-point addressing
|
||||
forAll(pointMap, i)
|
||||
{
|
||||
finalDecomp[pointMap[i]] = domainOffsets[dist[i]];
|
||||
}
|
||||
|
||||
if (nCurrDomains > 0)
|
||||
{
|
||||
// Recurse
|
||||
|
||||
// Determine points per domain
|
||||
labelListList domainToPoints(invertOneToMany(nCurrDomains, dist));
|
||||
|
||||
for (label domainI = 0; domainI < nCurrDomains; ++domainI)
|
||||
{
|
||||
if(decomposeData.getChild(domainI)->isLeaf()) continue;
|
||||
// Extract elements for current domain
|
||||
const labelList domainPoints(findIndices(dist, domainI));
|
||||
|
||||
// Subset point-wise data.
|
||||
pointField subPoints(points, domainPoints);
|
||||
scalarField subWeights(pointWeights, domainPoints);
|
||||
labelList subPointMap(labelUIndList(pointMap, domainPoints));
|
||||
// Subset point-point addressing (adapt global numbering)
|
||||
labelListList subPointPoints;
|
||||
labelList nOutsideConnections;
|
||||
subsetGlobalCellCells
|
||||
(
|
||||
nCurrDomains,
|
||||
domainI,
|
||||
dist,
|
||||
|
||||
pointPoints,
|
||||
domainPoints,
|
||||
|
||||
subPointPoints,
|
||||
nOutsideConnections
|
||||
);
|
||||
|
||||
decompose
|
||||
(
|
||||
subPointPoints,
|
||||
subPoints,
|
||||
subWeights,
|
||||
subPointMap,
|
||||
*decomposeData.getChild(domainI),
|
||||
domainOffsets[domainI], // The offset for this level and leaf
|
||||
|
||||
finalDecomp
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
|
||||
|
||||
multiNodeDecomp::multiNodeDecomp
|
||||
(
|
||||
const dictionary& decompDict,
|
||||
const word& regionName
|
||||
)
|
||||
:
|
||||
decompositionMethod(decompDict, regionName),
|
||||
rootMetadata_()
|
||||
{
|
||||
const dictionary& coeffsDict(
|
||||
findCoeffsDict(
|
||||
typeName + "Coeffs",
|
||||
(selectionType::EXACT | selectionType::MANDATORY)
|
||||
)
|
||||
);
|
||||
initializeMetadata(coeffsDict);
|
||||
}
|
||||
|
||||
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
|
||||
|
||||
|
||||
bool multiNodeDecomp::parallelAware() const
|
||||
{
|
||||
return rootMetadata_.parallelAware();
|
||||
}
|
||||
|
||||
|
||||
labelList multiNodeDecomp::decompose
|
||||
(
|
||||
const polyMesh& mesh,
|
||||
const pointField& cc,
|
||||
const scalarField& cWeights
|
||||
) const
|
||||
{
|
||||
CompactListList<label> cellCells;
|
||||
calcCellCells(mesh, identity(cc.size()), cc.size(), true, cellCells);
|
||||
|
||||
labelList finalDecomp(cc.size(), Zero);
|
||||
labelList cellMap(identity(cc.size()));
|
||||
|
||||
decompose
|
||||
(
|
||||
cellCells.unpack(),
|
||||
cc,
|
||||
cWeights,
|
||||
cellMap, // map back to original cells
|
||||
rootMetadata_,
|
||||
0,
|
||||
|
||||
finalDecomp
|
||||
);
|
||||
|
||||
return finalDecomp;
|
||||
}
|
||||
|
||||
|
||||
labelList multiNodeDecomp::decompose
|
||||
(
|
||||
const labelListList& globalPointPoints,
|
||||
const pointField& points,
|
||||
const scalarField& pointWeights
|
||||
) const
|
||||
{
|
||||
labelList finalDecomp(points.size(), Zero);
|
||||
labelList pointMap(identity(points.size()));
|
||||
|
||||
decompose
|
||||
(
|
||||
globalPointPoints,
|
||||
points,
|
||||
pointWeights,
|
||||
pointMap, // map back to original points
|
||||
rootMetadata_,
|
||||
0,
|
||||
|
||||
finalDecomp
|
||||
);
|
||||
|
||||
return finalDecomp;
|
||||
}
|
||||
|
||||
// * * * * * * * * * * * * Meta Parser Class * * * * * * * * * * * * * //
|
||||
|
||||
|
||||
|
||||
List<string> multiNodeDecomp::metaParser::getEntries(const dictionary& dict, const string& argument, bool allowWithoutBrackets) {
|
||||
string argumentBracket = argument + "[";
|
||||
DynamicList<string, 4> Result;
|
||||
for(auto& dEntry : dict) {
|
||||
if(dEntry.keyword().starts_with(argumentBracket) || (allowWithoutBrackets && dEntry.keyword() == argument))
|
||||
Result.push_back(dEntry.keyword());
|
||||
}
|
||||
return Result;
|
||||
}
|
||||
|
||||
List<Pair<label>> multiNodeDecomp::metaParser::parseRanges(const string& key) {
|
||||
|
||||
// First, discard the argument and process the indices only.
|
||||
// The current syntax is argument[...]
|
||||
// Assuming that this key was returned in getEntries,
|
||||
// if there is no '[', it is OK and we use the
|
||||
// empty string (update the root).
|
||||
string indices = "";
|
||||
if(key.find_first_of('[') != key.npos) {
|
||||
// There is a '[' in the string.
|
||||
// We can substr from that location.
|
||||
label nFirstBracket = key.find('[');
|
||||
indices = key.substr(nFirstBracket);
|
||||
}
|
||||
|
||||
// All checks print an error message if failed, explaining why.
|
||||
|
||||
DynamicList<Pair<label>, 4> Result;
|
||||
label nCurPtr = 0, nIndicesLength = indices.size();
|
||||
// As long as there are more ranges to parse.
|
||||
while(nCurPtr != nIndicesLength) {
|
||||
// First, check if there is an opening bracket.
|
||||
if(indices[nCurPtr]!='[')
|
||||
FatalError
|
||||
<< "Error when parsing indices "
|
||||
<< indices << ": Expected '[', found "
|
||||
<< indices[nCurPtr] << ". Aborting\n"
|
||||
<< exit(FatalError);
|
||||
|
||||
// Then, find the matching close bracket.
|
||||
label nEndIndex = indices.find(']', nCurPtr);
|
||||
if(nEndIndex == nIndicesLength) {
|
||||
FatalError
|
||||
<< "Error when parsing indices "
|
||||
<< indices << ": Expected ']' after '['. Aborting\n"
|
||||
<< exit(FatalError);
|
||||
}
|
||||
// Read inside the brackets, mark the hyphen if it exists, and make sure
|
||||
// every character is either a digit or a hyphen.
|
||||
// Note that only one hyphen may exist.
|
||||
label nHyphenIdx=-1;
|
||||
for(label nCurIndex = nCurPtr+1; nCurIndex < nEndIndex; ++nCurIndex) {
|
||||
if(!isdigit(indices[nCurIndex])&&indices[nCurIndex]!='-') {
|
||||
FatalError
|
||||
<< "Error when parsing indices "
|
||||
<< indices << ": Expected digit/'-'/']', found "
|
||||
<< indices[nCurIndex] << ". Aborting\n"
|
||||
<< exit(FatalError);
|
||||
}
|
||||
if(indices[nCurIndex]=='-') {
|
||||
if(nHyphenIdx!=-1)
|
||||
FatalError
|
||||
<< "Error when parsing indices "
|
||||
<< indices << ": Found two hyphens(-) inside an index. Aborting\n"
|
||||
<< exit(FatalError);
|
||||
|
||||
nHyphenIdx = nCurIndex;
|
||||
}
|
||||
}
|
||||
label nLeft,nRight;
|
||||
if(nHyphenIdx == -1) {
|
||||
// Not a range - just a single index, or empty brackets (indicating to change the whole range).
|
||||
if(nCurPtr+1==nEndIndex) nLeft = 0, nRight = -1;
|
||||
else {
|
||||
string sNum = indices.substr(nCurPtr+1,nEndIndex-nCurPtr-1);
|
||||
nLeft = nRight = atoi(sNum.c_str());
|
||||
}
|
||||
} else {
|
||||
// A range of indices.
|
||||
// Assert that the hyphen is not right next to the brackets.
|
||||
if(nHyphenIdx+1==nEndIndex||nCurPtr+1==nHyphenIdx)
|
||||
FatalError
|
||||
<< "Error when parsing indices "
|
||||
<< indices << ": Expected number, found "
|
||||
<< (nCurPtr+1==nHyphenIdx?'-':']')
|
||||
<< ". Aborting\n"
|
||||
<< exit(FatalError);
|
||||
|
||||
// Parse the numbers
|
||||
string sLeftNum = indices.substr(nCurPtr+1,nHyphenIdx-nCurPtr-1);
|
||||
string sRightNum = indices.substr(nHyphenIdx+1,nEndIndex-nHyphenIdx-1);
|
||||
nLeft = atoi(sLeftNum.c_str());
|
||||
nRight = atoi(sRightNum.c_str());
|
||||
// Make sure left endpoint is at most the right endpoint
|
||||
if(nLeft>nRight)
|
||||
FatalError
|
||||
<< "Error when parsing indices "
|
||||
<< indices << ": right endpoint("<< nRight
|
||||
<< ") cannot be smaller than left endpoint("
|
||||
<< nLeft << "). Aborting\n"
|
||||
<< exit(FatalError);
|
||||
}
|
||||
// Move the pointer after the closing bracket and append to the result list.
|
||||
nCurPtr = nEndIndex + 1;
|
||||
Result.push_back({nLeft,nRight});
|
||||
}
|
||||
return Result;
|
||||
}
|
||||
|
||||
// * * * * * * * * * * * * Node Metadata Class * * * * * * * * * * * * * //
|
||||
|
||||
void multiNodeDecomp::nodeMetadata::setLeveledDictionaries(const List<const dictionary*>& dictionaries) {
|
||||
setLeveledDictionaries(dictionaries, 0);
|
||||
}
|
||||
|
||||
bool multiNodeDecomp::nodeMetadata::parallelAware() const {
|
||||
// The decomposition tree is parallel aware if and only if all methods used are parallel aware.
|
||||
// If this is a leaf, we are OK.
|
||||
if(children.empty())
|
||||
return true;
|
||||
|
||||
// Otherwise, check if the method used in this node is parallel aware.
|
||||
if(!method->parallelAware())
|
||||
return false;
|
||||
|
||||
// Check recursively, and if any child is not parallel aware - return false.
|
||||
for(auto& child : children)
|
||||
if(!child->parallelAware())
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
void multiNodeDecomp::nodeMetadata::updateProcessorWeights() {
|
||||
label nDom = nDomains();
|
||||
word methodCoeffsName = coeffsDict->get<word>("method") + "Coeffs";
|
||||
// If processorWeights were set by the user, we do not modify them.
|
||||
if(
|
||||
// Check if the user did not specify processorWeights under the coeffs dictionary or the methodCoeffs dictionary
|
||||
!(coeffsDict->subDictOrAdd(methodCoeffsName).found("processorWeights", keyType::LITERAL)
|
||||
|| coeffsDict->subDictOrAdd("coeffs").found("processorWeights", keyType::LITERAL))) {
|
||||
// Then we should compute weights on our own
|
||||
Field<float> processorWeights(nDom);
|
||||
forAll(children, i) {
|
||||
if(children[i]->weight != 1)
|
||||
processorWeights[i] = children[i]->weight;
|
||||
else switch(weightsInitialization) {
|
||||
case RELATIVE:
|
||||
processorWeights[i] = children[i]->size;
|
||||
break;
|
||||
case UNIFORM:
|
||||
processorWeights[i] = 1;
|
||||
break;
|
||||
default:
|
||||
FatalError
|
||||
<< "Weights initialization is not handled in updateProcessorWeights. Aborting\n"
|
||||
<< exit(FatalError);
|
||||
}
|
||||
}
|
||||
|
||||
coeffsDict->subDictOrAdd(methodCoeffsName).add("processorWeights", processorWeights);
|
||||
}
|
||||
}
|
||||
void multiNodeDecomp::nodeMetadata::constructMethods() {
|
||||
// Special handling of nDomains = 1, because some decomposition methods crash when decomposing to one domain.
|
||||
label nDom = nDomains();
|
||||
if(nDom==1) {
|
||||
coeffsDict->clear();
|
||||
coeffsDict->add("method","none");
|
||||
} else
|
||||
updateProcessorWeights();
|
||||
coeffsDict->add("numberOfSubdomains",nDom);
|
||||
|
||||
// Non-verbose construction of decomposition methods would be nice
|
||||
method = decompositionMethod::New(*coeffsDict).release();
|
||||
// Cannot release coeffsDict from memory because method uses a reference that must stay alive
|
||||
|
||||
forAll(children, i) {
|
||||
if(!children[i]->isLeaf())
|
||||
children[i]->constructMethods();
|
||||
}
|
||||
}
|
||||
|
||||
// Recursively construct the decomposition tree, given the list of dimensions and a default method.
|
||||
void multiNodeDecomp::nodeMetadata::constructRecursive(const labelList& dims, const dictionary* defaultMethod) {
|
||||
if(!dims.empty()) {
|
||||
// The list of dimensions of the children is the current list without the first element.
|
||||
labelList newDims(dims.size() - 1);
|
||||
forAll(newDims, i)
|
||||
newDims[i] = dims[i+1];
|
||||
|
||||
// Construct children recursively
|
||||
// First, resize existing children
|
||||
// And delete the excess
|
||||
forAll(children, i) {
|
||||
if(i < dims[0])
|
||||
children[i]->constructRecursive(newDims, defaultMethod);
|
||||
else
|
||||
delete children[i];
|
||||
}
|
||||
label nOldSize = children.size();
|
||||
children.resize(dims[0]);
|
||||
// If the new array is bigger we will need to allocate new children.
|
||||
for(label i = nOldSize; i < dims[0]; ++i)
|
||||
children[i] = new nodeMetadata(newDims, defaultMethod);
|
||||
|
||||
// Compute size (number of leaves in subtree)
|
||||
size = dims[0];
|
||||
if(!children.empty())
|
||||
size *= children[0]->size;
|
||||
}
|
||||
}
|
||||
void multiNodeDecomp::nodeMetadata::updateNodes(const string& key, const std::function<void(nodeMetadata*)>& update) {
|
||||
List<Pair<label>> indicesList = metaParser::parseRanges(key);
|
||||
updateNodes(indicesList, update);
|
||||
}
|
||||
|
||||
// Parse the indices, and apply the update function to all matching nodes.
|
||||
// nCurPtr is used to indicate the index we are now parsing (instead of sending substrings of indices)
|
||||
void multiNodeDecomp::nodeMetadata::updateNodes(const List<Pair<label>>& indices, const std::function<void(nodeMetadata*)>& update, label nCurIdx) {
|
||||
if(nCurIdx == label(indices.size())) update(this);
|
||||
else {
|
||||
// Otherwise, call recursively.
|
||||
label nLeft, nRight, nChildren = children.size();
|
||||
nLeft = indices[nCurIdx].first();
|
||||
nRight = indices[nCurIdx].second();
|
||||
|
||||
// [0,-1] means the entire range.
|
||||
|
||||
if(nLeft==0 && nRight == -1)
|
||||
nRight = nChildren - 1;
|
||||
// Make sure that the indices do not exceed the number of children.
|
||||
if(nRight >= nChildren)
|
||||
FatalError
|
||||
<< "Error when parsing indices: The #" << (nCurIdx+1)
|
||||
<< " range ["<< nLeft <<"," << nRight<<"]:\n"
|
||||
<< " Cannot update indices bigger than number of children("
|
||||
<< nChildren << "). Aborting\n"
|
||||
<< exit(FatalError);
|
||||
|
||||
for(label nChildIdx = nLeft; nChildIdx <= nRight; ++nChildIdx)
|
||||
children[nChildIdx]->updateNodes(indices,update, nCurIdx+1);
|
||||
}
|
||||
// Recompute size assuming children are updated.
|
||||
if(!children.empty()) {
|
||||
size = 0;
|
||||
forAll(children, i)
|
||||
size += children[i]->size;
|
||||
}
|
||||
}
|
||||
|
||||
void multiNodeDecomp::nodeMetadata::setLeveledDictionaries(const List<const dictionary*>& dictionaries, label nLevel) {
|
||||
// Set the dictionary to this level, and to non-leaf children.
|
||||
setDict(*dictionaries[nLevel]);
|
||||
forAll(children, i) {
|
||||
if(children[i]->nDomains() > 0)
|
||||
children[i]->setLeveledDictionaries(dictionaries,nLevel+1);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// ************************************************************************* //
|
||||
@ -1,435 +0,0 @@
|
||||
/*---------------------------------------------------------------------------*\
|
||||
========= |
|
||||
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
||||
\\ / O peration |
|
||||
\\ / A nd | www.openfoam.com
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2011-2015 OpenFOAM Foundation
|
||||
Copyright (C) 2017-2021 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
|
||||
OpenFOAM is free software: you can redistribute it and/or modify it
|
||||
under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
|
||||
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Class
|
||||
Foam::multiNodeDecomp
|
||||
|
||||
Description
|
||||
Decompose given using consecutive application of decomposers,
|
||||
to perhaps uneven pieces.
|
||||
Note: If uneven pieces are required, the decomposition method
|
||||
used must support the processorWeights argument.
|
||||
|
||||
SourceFiles
|
||||
multiNodeDecomp.C
|
||||
|
||||
\*---------------------------------------------------------------------------*/
|
||||
|
||||
#ifndef multiNodeDecomp_H
|
||||
#define multiNodeDecomp_H
|
||||
|
||||
#include "decompositionMethod.H"
|
||||
// #include "List.H"
|
||||
|
||||
namespace Foam
|
||||
{
|
||||
|
||||
/*---------------------------------------------------------------------------*\
|
||||
Class multiNodeDecomp Declaration
|
||||
\*---------------------------------------------------------------------------*/
|
||||
|
||||
class multiNodeDecomp
|
||||
:
|
||||
public decompositionMethod
|
||||
{
|
||||
// Nested classes declarations
|
||||
|
||||
/*---------------------------------------------------------------------------*\
|
||||
Class metaParser Declaration
|
||||
\*---------------------------------------------------------------------------*/
|
||||
|
||||
// A class responsible for detecting and parsing metadata-related arguments.
|
||||
class metaParser {
|
||||
public:
|
||||
// Detect and return entries related to the given argument.
|
||||
// Input:
|
||||
// dict - the coeffs dictionary we are looking inside.
|
||||
// argument - the argument we're looking for.
|
||||
// allowWithoutBrackets - set to true if the argument can be detected without brackets
|
||||
// For example, domains should not be allowed without brackets, but weightsInitialization can.
|
||||
static List<string> getEntries(const dictionary& dict, const string& argument, bool allowWithoutBrackets = false);
|
||||
|
||||
// Given a key string of an entry returned from getEntries,
|
||||
// Parse and return all ranges described in the key.
|
||||
// Note that it is the user's responsibility to make sure that the right endpoint
|
||||
// does not exceed the number of children in each node.
|
||||
// The user may also write "[]" to specify all children of the node.
|
||||
// In this case, the range returned is [0,-1]. Otherwise, endpoints are always non-negative.
|
||||
// Input:
|
||||
// key - a key of an entry we are parsing, perhaps from an entry returned in getEntries.
|
||||
// Output:
|
||||
// A list of ranges, where the i-th range corresponds to the i-th level of the decomposition tree.
|
||||
// i.e. if two ranges are returned, we will traverse each child of the root in the first range, and recursively
|
||||
// each child in the second range, thus updating nodes at the third level.
|
||||
|
||||
static List<Pair<label>> parseRanges(const string& key);
|
||||
|
||||
};
|
||||
enum WeightsInitialization {
|
||||
RELATIVE,
|
||||
UNIFORM,
|
||||
UNKNOWN
|
||||
};
|
||||
|
||||
/*---------------------------------------------------------------------------*\
|
||||
Class nodeMetadata Declaration
|
||||
\*---------------------------------------------------------------------------*/
|
||||
// A class holding all the information necessary for a multi-node decomposition, without building
|
||||
// the decompositionMethod objects.
|
||||
// The size indicates the number of processors in this subtree. It will be used
|
||||
// When computing the offset of the decomposition, and when using the relative weights initialization.
|
||||
// The weight is a multiplicative factor applied when decomposing. It is 1 by default and can be set by the user.
|
||||
// If the uniform weights initialization is used, all nodes will have the same weight. If the relative weights
|
||||
// initialization is used, each node's weight is set relatively to their size.
|
||||
// Then, the weight field can be used to change the weight of a specific node.
|
||||
// Note that if the coeffs dictionary contains a processorWeights field, it will not be overwritten.
|
||||
// We will then construct a new dictionary with the required numberOfSubdomains and processorWeights.
|
||||
|
||||
class nodeMetadata {
|
||||
public:
|
||||
|
||||
nodeMetadata() : weight(1), size(1), weightsInitialization(UNIFORM), children(0), coeffsDict(nullptr), method(nullptr) {}
|
||||
|
||||
// Constructs a decomposition data tree with dimensions dims and a default method.
|
||||
// Input: A list of domains for each level, and a default dictionary method.
|
||||
nodeMetadata(const labelList& dims, const dictionary* defaultMethod) : nodeMetadata() {
|
||||
initialize(dims, defaultMethod);
|
||||
}
|
||||
|
||||
// Initializes an existing nodeMetadata object.
|
||||
void initialize(const labelList& dims, const dictionary* defaultMethod) {
|
||||
setDict(*defaultMethod);
|
||||
constructRecursive(dims, defaultMethod);
|
||||
}
|
||||
|
||||
~nodeMetadata() {
|
||||
// Since this class represents a tree, we will need to destruct recursively.
|
||||
for(nodeMetadata* child : children)
|
||||
delete child;
|
||||
|
||||
// Only delete method and dict if they were assigned.
|
||||
if(method != nullptr)
|
||||
delete method;
|
||||
if(coeffsDict != nullptr)
|
||||
delete coeffsDict;
|
||||
}
|
||||
|
||||
|
||||
// Getters
|
||||
|
||||
// Get the weight of this node, with respect to the decomposition done in this node's parent
|
||||
label getWeight() const {
|
||||
return weight;
|
||||
}
|
||||
|
||||
// Get the coeffs dictionary for the decomposition of this node.
|
||||
const dictionary* getDict() const {
|
||||
return coeffsDict;
|
||||
}
|
||||
|
||||
// Get the modifiable coeffs dictionary for the decomposition of this node.
|
||||
dictionary* getMutableDict() {
|
||||
return coeffsDict;
|
||||
}
|
||||
|
||||
// Get the number of leaves in this subtree, i.e., number of processors
|
||||
// created under this node.
|
||||
label getSize() const {
|
||||
return size;
|
||||
}
|
||||
|
||||
// Get the decomposition method object of this node.
|
||||
// Note that construct methods must be called first, otherwise
|
||||
// a null pointer will be returned.
|
||||
const Foam::decompositionMethod* getMethod() const {
|
||||
return method;
|
||||
}
|
||||
|
||||
// Get the current weights initialization mode.
|
||||
bool getWeightsInitialization() const {
|
||||
return weightsInitialization;
|
||||
}
|
||||
|
||||
// Get a const pointer to a child of this node.
|
||||
const nodeMetadata* getChild(label index) const {
|
||||
return children[index];
|
||||
}
|
||||
|
||||
// Get a non-const pointer to a child of this node.
|
||||
nodeMetadata* getMutableChild(label index) {
|
||||
return children[index];
|
||||
}
|
||||
|
||||
// Returns the number of direct subdomains this node has.
|
||||
label nDomains() const {
|
||||
return children.size();
|
||||
}
|
||||
|
||||
// Returns whether this node represents a leaf (i.e., has no children)
|
||||
bool isLeaf() const {
|
||||
return children.empty();
|
||||
}
|
||||
// Setters
|
||||
|
||||
// Set the weight of this node, with respect to the decomposition done in this node's parent
|
||||
void setWeight(label weight) {
|
||||
this->weight = weight;
|
||||
}
|
||||
|
||||
// Set the coeffs dictionary for the decomposition of this node.
|
||||
// This creates a copy of the dictionary (and deletes the previous copy created)
|
||||
void setDict(const dictionary& dict) {
|
||||
if(coeffsDict != nullptr) {
|
||||
delete coeffsDict;
|
||||
}
|
||||
coeffsDict = new dictionary(dict);
|
||||
}
|
||||
|
||||
// Set the decomposition method object of this node.
|
||||
void setMethod(Foam::decompositionMethod* method) {
|
||||
this->method = method;
|
||||
}
|
||||
|
||||
// Sets the weights initialization mode. If setRecursive is true, propagate to the entire subtree (i.e., the root and all of the descendents)
|
||||
void setWeightsInitialization(WeightsInitialization newMode, bool setRecursive = true) {
|
||||
weightsInitialization = newMode;
|
||||
if(setRecursive) {
|
||||
for(nodeMetadata* child : children)
|
||||
child->setWeightsInitialization(newMode, true);
|
||||
}
|
||||
}
|
||||
|
||||
// Updates
|
||||
|
||||
// Update the weights of the nodes at the given indices to the given weight.
|
||||
// Input: A string indicating the indices of the nodes to be updated, and
|
||||
// the new weight of the nodes.
|
||||
void updateWeight(const string& indices, label newWeight) {
|
||||
updateNodes(indices, [newWeight](nodeMetadata* node) {
|
||||
node->setWeight(newWeight);
|
||||
});
|
||||
}
|
||||
|
||||
// Update the dimensions array of the nodes at the given indices to the given dimensions array.
|
||||
// Input: A string indicating the indices of the nodes to be updated, and
|
||||
// the new list of dimensions.
|
||||
void updateDomains(const string& indices,const labelList& dims) {
|
||||
updateNodes(indices, [dims](nodeMetadata* node) {
|
||||
// Reconstruct using this node's dict.
|
||||
// Note that first all domain changes are done,
|
||||
// And only then dictionaries are set.
|
||||
// So the descendents' dictionaries are not overwritten.
|
||||
node->constructRecursive(dims,node->getDict());
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// Update the method of the nodes at the given indices to the given method dictionary.
|
||||
// Input: A string indicating the indices of the nodes to be updated, and
|
||||
// the new method dictionary.
|
||||
void updateMethod(const string& indices, const dictionary& dict) {
|
||||
updateNodes(indices, [dict](nodeMetadata* node) {
|
||||
node->setDict(dict);
|
||||
});
|
||||
}
|
||||
|
||||
// Update the weight initialization mode of nodes at the given indices and their descendents to the new mode.
|
||||
// Input: A string indicating the indices of the nodes to be updated, and
|
||||
// the new weight mode.
|
||||
void updateWeightsInitialization(const string& indices, WeightsInitialization newMode) {
|
||||
updateNodes(indices, [newMode](nodeMetadata* node) {
|
||||
node->setWeightsInitialization(newMode);
|
||||
});
|
||||
}
|
||||
|
||||
// Given a list of dictionaries for each level, set the dictionaries accordingly.
|
||||
// Input: A list of dictionaries for each level.
|
||||
void setLeveledDictionaries(const List<const dictionary*>& dictionaries);
|
||||
|
||||
// To be used within the decompositionMethod's parallelAware function.
|
||||
// Returns whether all decompositions in this subtree are parallel aware
|
||||
// (i.e., synchronize domains across proc boundaries)
|
||||
bool parallelAware() const;
|
||||
|
||||
// Calculate (and add to the dictionary) the new processor weights if reqired,
|
||||
// Using the children's weights and the weight initialization mode.
|
||||
void updateProcessorWeights();
|
||||
|
||||
// Construct the decompositionMethod object for this node and all its descendents.
|
||||
void constructMethods();
|
||||
|
||||
private:
|
||||
|
||||
// The weight of this node in the parent's decomposition, relative to the other nodes.
|
||||
// Overrides weights set by the weights initialization.
|
||||
label weight;
|
||||
|
||||
// The size of the node indicates the total number of subdomains this node has.
|
||||
label size;
|
||||
|
||||
// An enum describing the weights initialization.
|
||||
WeightsInitialization weightsInitialization;
|
||||
|
||||
// The direct descendents.
|
||||
List<nodeMetadata*> children;
|
||||
|
||||
// The dictionary used to construct the decomposition method.
|
||||
dictionary* coeffsDict;
|
||||
|
||||
// The decomposition method of this node.
|
||||
const Foam::decompositionMethod* method;
|
||||
|
||||
// Recursively constructs the subtree rooted at this node
|
||||
// Input: A list of dimensions and the dictionary of the default method.
|
||||
void constructRecursive(const labelList& dims, const dictionary* defaultMethod);
|
||||
|
||||
// Update all nodes matching the given indices with the given updating function.
|
||||
// Input: A list of ranges for each level, and a function that receives a pointer to nodeMetadata,
|
||||
// updates it accordingly and returns nothing.
|
||||
void updateNodes(const string& key, const std::function<void(nodeMetadata*)>& update);
|
||||
|
||||
// Internal implementation of updateNodes.
|
||||
// The list of ranges are constructed by passing the key argument to the meta parser.
|
||||
// nCurIdx is an internal variable that indicates our location inside the indices array.
|
||||
void updateNodes(const List<Pair<label>>& indices, const std::function<void(nodeMetadata*)>& update, label nCurIdx = 0);
|
||||
|
||||
// This function is used inside the public setLeveledDictionaries function.
|
||||
void setLeveledDictionaries(const List<const dictionary*>& dictionaries, label nLevel);
|
||||
|
||||
// Parse the range of indices starting at the (string) index nStartIndex.
|
||||
// Input: The indices string, and the starting position of the range
|
||||
// (i.e, the position of the opening bracket)
|
||||
// Returns a pair representing the range if succeeded,
|
||||
// or crashes the program with an appropriate error message if failed to parse.
|
||||
Pair<label> parseRange(const string& indices, label nStartIndex) const;
|
||||
|
||||
};
|
||||
|
||||
|
||||
// Private Data
|
||||
|
||||
//- The decomposition metadata.
|
||||
nodeMetadata rootMetadata_;
|
||||
|
||||
|
||||
// Private Member Functions
|
||||
|
||||
//- Read coeffsDict and construct the decomposition metadata.
|
||||
void initializeMetadata(const dictionary& coeffsDict);
|
||||
|
||||
|
||||
//- Given connectivity across processors work out connectivity
|
||||
// for a (consistent) subset
|
||||
void subsetGlobalCellCells
|
||||
(
|
||||
const label nDomains,
|
||||
const label domainI,
|
||||
const labelList& dist,
|
||||
|
||||
const labelListList& cellCells,
|
||||
const labelList& set,
|
||||
labelListList& subCellCells,
|
||||
labelList& cutConnections
|
||||
) const;
|
||||
|
||||
//- Decompose at 'currLevel' without addressing
|
||||
void decompose
|
||||
(
|
||||
const labelListList& pointPoints,
|
||||
const pointField& points,
|
||||
const scalarField& pointWeights,
|
||||
const labelUList& pointMap, // map back to original points
|
||||
const nodeMetadata& decomposeData,
|
||||
const label leafOffset,
|
||||
|
||||
labelList& finalDecomp
|
||||
) const;
|
||||
|
||||
|
||||
//- No copy construct
|
||||
multiNodeDecomp(const multiNodeDecomp&) = delete;
|
||||
|
||||
//- No copy assignment
|
||||
void operator=(const multiNodeDecomp&) = delete;
|
||||
|
||||
public:
|
||||
|
||||
//- Runtime type information
|
||||
TypeName("multiNode");
|
||||
|
||||
|
||||
// Constructors
|
||||
|
||||
//- Construct given decomposition dictionary and optional region name
|
||||
explicit multiNodeDecomp
|
||||
(
|
||||
const dictionary& decompDict,
|
||||
const word& regionName = ""
|
||||
);
|
||||
|
||||
|
||||
//- Destructor
|
||||
virtual ~multiNodeDecomp() = default;
|
||||
|
||||
|
||||
// Member Functions
|
||||
|
||||
//- Is method parallel aware?
|
||||
// i.e. does it synchronize domains across proc boundaries
|
||||
virtual bool parallelAware() const;
|
||||
|
||||
//- Inherit decompose from decompositionMethod
|
||||
using decompositionMethod::decompose;
|
||||
|
||||
//- Return for every coordinate the wanted processor number.
|
||||
// Use the mesh connectivity (if needed)
|
||||
virtual labelList decompose
|
||||
(
|
||||
const polyMesh& mesh,
|
||||
const pointField& points,
|
||||
const scalarField& pointWeights
|
||||
) const;
|
||||
|
||||
//- Return for every coordinate the wanted processor number.
|
||||
// Explicitly provided connectivity - does not use mesh_.
|
||||
virtual labelList decompose
|
||||
(
|
||||
const labelListList& globalCellCells,
|
||||
const pointField& cc,
|
||||
const scalarField& cWeights
|
||||
) const;
|
||||
};
|
||||
|
||||
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
|
||||
} // End namespace Foam
|
||||
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||
|
||||
#endif
|
||||
|
||||
// ************************************************************************* //
|
||||
@ -1949,7 +1949,7 @@ Foam::distributedTriSurfaceMesh::independentlyDistributedBbs
|
||||
// //globalBorderTris.gather
|
||||
// //(
|
||||
// // UPstream::worldComm,
|
||||
// // UPstream::procID(Pstream::worldComm),
|
||||
// // UPstream::allProcs(UPstream::worldComm),
|
||||
// // globalBorderCentres
|
||||
// //);
|
||||
// pointField globalBorderCentres(allCentres);
|
||||
@ -1996,7 +1996,7 @@ Foam::distributedTriSurfaceMesh::independentlyDistributedBbs
|
||||
// //globalBorderTris.scatter
|
||||
// //(
|
||||
// // UPstream::worldComm,
|
||||
// // UPstream::procID(Pstream::worldComm),
|
||||
// // UPstream::allProcs(UPstream::worldComm),
|
||||
// // isMasterPoint
|
||||
// //);
|
||||
// //boolList isMasterBorder(s.size(), false);
|
||||
@ -2094,7 +2094,7 @@ Foam::distributedTriSurfaceMesh::independentlyDistributedBbs
|
||||
globalTris().gather
|
||||
(
|
||||
UPstream::worldComm,
|
||||
UPstream::procID(Pstream::worldComm),
|
||||
UPstream::allProcs(UPstream::worldComm),
|
||||
allCentres
|
||||
);
|
||||
}
|
||||
@ -2144,7 +2144,7 @@ Foam::distributedTriSurfaceMesh::independentlyDistributedBbs
|
||||
globalTris().scatter
|
||||
(
|
||||
UPstream::worldComm,
|
||||
UPstream::procID(Pstream::worldComm),
|
||||
UPstream::allProcs(UPstream::worldComm),
|
||||
allDistribution,
|
||||
distribution
|
||||
);
|
||||
|
||||
@ -75,7 +75,7 @@ alphatPhaseChangeWallFunctionFvPatchScalarField
|
||||
|
||||
if (dict.found("mDotL"))
|
||||
{
|
||||
dmdt_ = scalarField("mDotL", dict, p.size());
|
||||
mDotL_ = scalarField("mDotL", dict, p.size());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -834,15 +834,15 @@ Foam::meshToMesh::mapTgtToSrc
|
||||
label srcPatchi = srcPatchID_[i];
|
||||
label tgtPatchi = tgtPatchID_[i];
|
||||
|
||||
if (!srcPatchFields.set(tgtPatchi))
|
||||
if (!srcPatchFields.set(srcPatchi))
|
||||
{
|
||||
srcPatchFields.set
|
||||
(
|
||||
srcPatchi,
|
||||
fvPatchField<Type>::New
|
||||
(
|
||||
tgtBfld[srcPatchi],
|
||||
srcMesh.boundary()[tgtPatchi],
|
||||
tgtBfld[tgtPatchi],
|
||||
srcMesh.boundary()[srcPatchi],
|
||||
DimensionedField<Type, volMesh>::null(),
|
||||
directFvPatchFieldMapper
|
||||
(
|
||||
|
||||
@ -88,7 +88,7 @@ Foam::boundaryDataSurfaceReader::readField
|
||||
{
|
||||
refPtr<Time> timePtr(Time::New(argList::envGlobalPath()));
|
||||
|
||||
return readField<Type>(baseDir, timeDir, fieldName, avg);
|
||||
return readField<Type>(*timePtr, baseDir, timeDir, fieldName, avg);
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -82,7 +82,7 @@ Foam::tmp<Foam::Field<Type>> Foam::ensightSurfaceReader::readField
|
||||
}
|
||||
|
||||
// Check that data type is as expected
|
||||
// (assumes OpenFOAM generated the data set)
|
||||
// (assuming OpenFOAM generated the data set)
|
||||
string primitiveType;
|
||||
is.read(primitiveType);
|
||||
|
||||
@ -90,7 +90,8 @@ Foam::tmp<Foam::Field<Type>> Foam::ensightSurfaceReader::readField
|
||||
|
||||
if
|
||||
(
|
||||
primitiveType != ensightPTraits<Type>::typeName
|
||||
debug
|
||||
&& primitiveType != ensightPTraits<Type>::typeName
|
||||
&& primitiveType != pTraits<Type>::typeName
|
||||
)
|
||||
{
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2011-2015 OpenFOAM Foundation
|
||||
Copyright (C) 2017-2022 OpenCFD Ltd.
|
||||
Copyright (C) 2017-2024 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -149,7 +149,6 @@ bool Foam::fileFormats::NASsurfaceFormat<Face>::read
|
||||
string line;
|
||||
while (is.good())
|
||||
{
|
||||
string::size_type linei = 0; // Parsing position within current line
|
||||
is.getLine(line);
|
||||
|
||||
// ANSA extension
|
||||
@ -223,16 +222,30 @@ bool Foam::fileFormats::NASsurfaceFormat<Face>::read
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Parsing position within current line
|
||||
std::string::size_type linei = 0;
|
||||
|
||||
// Is free format if line contains a comma
|
||||
const bool freeFormat = line.contains(',');
|
||||
|
||||
// First word (column 0-8)
|
||||
const word cmd(word::validate(nextNasField(line, linei, 8)));
|
||||
|
||||
if (cmd == "CTRIA3")
|
||||
{
|
||||
label elemId = readLabel(nextNasField(line, linei, 8)); // 8-16
|
||||
label groupId = readLabel(nextNasField(line, linei, 8)); // 16-24
|
||||
const auto a = readLabel(nextNasField(line, linei, 8)); // 24-32
|
||||
const auto b = readLabel(nextNasField(line, linei, 8)); // 32-40
|
||||
const auto c = readLabel(nextNasField(line, linei, 8)); // 40-48
|
||||
// Fixed format:
|
||||
// 8-16 : element id
|
||||
// 16-24 : group id
|
||||
// 24-32 : vertex
|
||||
// 32-40 : vertex
|
||||
// 40-48 : vertex
|
||||
|
||||
label elemId = readLabel(nextNasField(line, linei, 8, freeFormat));
|
||||
label groupId = readLabel(nextNasField(line, linei, 8, freeFormat));
|
||||
const auto a = readLabel(nextNasField(line, linei, 8, freeFormat));
|
||||
const auto b = readLabel(nextNasField(line, linei, 8, freeFormat));
|
||||
const auto c = readLabel(nextNasField(line, linei, 8, freeFormat));
|
||||
|
||||
// Convert groupId into zoneId
|
||||
const auto iterZone = zoneLookup.cfind(groupId);
|
||||
@ -261,12 +274,20 @@ bool Foam::fileFormats::NASsurfaceFormat<Face>::read
|
||||
}
|
||||
else if (cmd == "CQUAD4")
|
||||
{
|
||||
label elemId = readLabel(nextNasField(line, linei, 8)); // 8-16
|
||||
label groupId = readLabel(nextNasField(line, linei, 8)); // 16-24
|
||||
const auto a = readLabel(nextNasField(line, linei, 8)); // 24-32
|
||||
const auto b = readLabel(nextNasField(line, linei, 8)); // 32-40
|
||||
const auto c = readLabel(nextNasField(line, linei, 8)); // 40-48
|
||||
const auto d = readLabel(nextNasField(line, linei, 8)); // 48-56
|
||||
// Fixed format:
|
||||
// 8-16 : element id
|
||||
// 16-24 : group id
|
||||
// 24-32 : vertex
|
||||
// 32-40 : vertex
|
||||
// 40-48 : vertex
|
||||
// 48-56 : vertex
|
||||
|
||||
label elemId = readLabel(nextNasField(line, linei, 8, freeFormat));
|
||||
label groupId = readLabel(nextNasField(line, linei, 8, freeFormat));
|
||||
const auto a = readLabel(nextNasField(line, linei, 8, freeFormat));
|
||||
const auto b = readLabel(nextNasField(line, linei, 8, freeFormat));
|
||||
const auto c = readLabel(nextNasField(line, linei, 8, freeFormat));
|
||||
const auto d = readLabel(nextNasField(line, linei, 8, freeFormat));
|
||||
|
||||
// Convert groupId into zoneId
|
||||
const auto iterZone = zoneLookup.cfind(groupId);
|
||||
@ -310,11 +331,21 @@ bool Foam::fileFormats::NASsurfaceFormat<Face>::read
|
||||
}
|
||||
else if (cmd == "GRID")
|
||||
{
|
||||
label index = readLabel(nextNasField(line, linei, 8)); // 8-16
|
||||
(void) nextNasField(line, linei, 8); // 16-24
|
||||
scalar x = readNasScalar(nextNasField(line, linei, 8)); // 24-32
|
||||
scalar y = readNasScalar(nextNasField(line, linei, 8)); // 32-40
|
||||
scalar z = readNasScalar(nextNasField(line, linei, 8)); // 40-48
|
||||
// Fixed (short) format:
|
||||
// 8-16 : point id
|
||||
// 16-24 : coordinate system (not supported)
|
||||
// 24-32 : point x coordinate
|
||||
// 32-40 : point y coordinate
|
||||
// 40-48 : point z coordinate
|
||||
// 48-56 : displacement coordinate system (optional, unsupported)
|
||||
// 56-64 : single point constraints (optional, unsupported)
|
||||
// 64-70 : super-element id (optional, unsupported)
|
||||
|
||||
label index = readLabel(nextNasField(line, linei, 8, freeFormat));
|
||||
(void) nextNasField(line, linei, 8, freeFormat);
|
||||
scalar x = readNasScalar(nextNasField(line, linei, 8, freeFormat));
|
||||
scalar y = readNasScalar(nextNasField(line, linei, 8, freeFormat));
|
||||
scalar z = readNasScalar(nextNasField(line, linei, 8, freeFormat));
|
||||
|
||||
pointId.append(index);
|
||||
dynPoints.append(point(x, y, z));
|
||||
@ -327,6 +358,8 @@ bool Foam::fileFormats::NASsurfaceFormat<Face>::read
|
||||
// GRID* 126 0 -5.55999875E+02 -5.68730474E+02
|
||||
// * 2.14897901E+02
|
||||
|
||||
// Cannot be long format and free format at the same time!
|
||||
|
||||
label index = readLabel(nextNasField(line, linei, 16)); // 8-24
|
||||
(void) nextNasField(line, linei, 16); // 24-40
|
||||
scalar x = readNasScalar(nextNasField(line, linei, 16)); // 40-56
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2012-2016 OpenFOAM Foundation
|
||||
Copyright (C) 2015-2022 OpenCFD Ltd.
|
||||
Copyright (C) 2015-2024 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -307,16 +307,10 @@ void Foam::surfaceWriters::nastranWriter::writeGeometry
|
||||
Foam::surfaceWriters::nastranWriter::nastranWriter()
|
||||
:
|
||||
surfaceWriter(),
|
||||
writeFormat_(fieldFormat::SHORT),
|
||||
fieldMap_(),
|
||||
writeFormat_(fieldFormat::FREE),
|
||||
commonGeometry_(false),
|
||||
separator_()
|
||||
{
|
||||
// if (writeFormat_ == fieldFormat::FREE)
|
||||
// {
|
||||
// separator_ = ",";
|
||||
// }
|
||||
}
|
||||
separator_(",") // FREE format
|
||||
{}
|
||||
|
||||
|
||||
Foam::surfaceWriters::nastranWriter::nastranWriter
|
||||
@ -331,12 +325,10 @@ Foam::surfaceWriters::nastranWriter::nastranWriter
|
||||
(
|
||||
"format",
|
||||
options,
|
||||
fieldFormat::LONG
|
||||
fieldFormat::FREE
|
||||
)
|
||||
),
|
||||
fieldMap_(),
|
||||
commonGeometry_(options.getOrDefault("commonGeometry", false)),
|
||||
separator_()
|
||||
commonGeometry_(options.getOrDefault("commonGeometry", false))
|
||||
{
|
||||
if (writeFormat_ == fieldFormat::FREE)
|
||||
{
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2012-2016 OpenFOAM Foundation
|
||||
Copyright (C) 2015-2022 OpenCFD Ltd.
|
||||
Copyright (C) 2015-2024 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -33,13 +33,13 @@ Description
|
||||
The formatOptions for nastran:
|
||||
\table
|
||||
Property | Description | Reqd | Default
|
||||
fields | Field pairs for PLOAD2/PLOAD4 | yes |
|
||||
format | Nastran format (short/long/free) | no | long
|
||||
format | Nastran format (short/long/free) | no | free
|
||||
scale | Output geometry scaling | no | 1
|
||||
transform | Output coordinate transform | no |
|
||||
fieldLevel | Subtract field level before scaling | no | empty dict
|
||||
fieldScale | Output field scaling | no | empty dict
|
||||
commonGeometry | use separate geometry files | no | false
|
||||
fields | Field pairs for PLOAD2/PLOAD4 | yes |
|
||||
\endtable
|
||||
|
||||
For example,
|
||||
@ -48,13 +48,6 @@ Description
|
||||
{
|
||||
nastran
|
||||
{
|
||||
// OpenFOAM field name to NASTRAN load types
|
||||
fields
|
||||
(
|
||||
(pMean PLOAD2)
|
||||
(p PLOAD4)
|
||||
);
|
||||
|
||||
format free; // format type
|
||||
|
||||
scale 1000; // [m] -> [mm]
|
||||
@ -62,6 +55,13 @@ Description
|
||||
{
|
||||
"p.*" 0.01; // [Pa] -> [mbar]
|
||||
}
|
||||
|
||||
// OpenFOAM field name to NASTRAN load types
|
||||
fields
|
||||
(
|
||||
(pMean PLOAD2)
|
||||
(p PLOAD4)
|
||||
);
|
||||
}
|
||||
}
|
||||
\endverbatim
|
||||
@ -93,7 +93,6 @@ Description
|
||||
|
||||
Note
|
||||
Output variable scaling does not apply to integer types such as Ids.
|
||||
Field pairs default to PLOAD2 for scalars and PLOAD4 for vectors etc.
|
||||
|
||||
SourceFiles
|
||||
nastranSurfaceWriter.C
|
||||
@ -221,10 +220,10 @@ public:
|
||||
|
||||
// Constructors
|
||||
|
||||
//- Default construct. Default SHORT format
|
||||
//- Default construct. Default FREE format
|
||||
nastranWriter();
|
||||
|
||||
//- Construct with some output options. Default LONG format
|
||||
//- Construct with some output options. Default FREE format
|
||||
explicit nastranWriter(const dictionary& options);
|
||||
|
||||
//- Construct from components
|
||||
|
||||
@ -128,7 +128,7 @@ Foam::scalar Foam::ReversibleReaction
|
||||
const scalarField& c
|
||||
) const
|
||||
{
|
||||
return kfwd/max(this->Kc(p, T), 1e-6);
|
||||
return kfwd/max(this->Kc(p, T), VSMALL);
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -7,7 +7,7 @@ tracer0
|
||||
log off;
|
||||
|
||||
resetOnStartUp false;
|
||||
// writeControl writeTime;
|
||||
writeControl writeTime;
|
||||
// writeInterval 1;
|
||||
field tracer0;
|
||||
D 0.001;
|
||||
|
||||
@ -61,6 +61,8 @@ functions
|
||||
fvOptions
|
||||
{
|
||||
}
|
||||
|
||||
writeControl writeTime;
|
||||
}
|
||||
|
||||
fileUpdate
|
||||
|
||||
Reference in New Issue
Block a user