mirror of
https://develop.openfoam.com/Development/openfoam.git
synced 2025-11-28 03:28:01 +00:00
ENH: refactor coordSet writers (#2347)
- the very old 'writer' class was fully stateless and always templated on an particular output type. This is now replaced with a 'coordSetWriter' with similar concepts as previously introduced for surface writers (#1206). - writers change from being a generic state-less set of routines to more properly conforming to the normal notion of a writer. - Parallel data is done *outside* of the writers, since they are used in a wide variety of contexts and the caller is currently still in a better position for deciding how to combine parallel data. ENH: update sampleSets to sample on per-field basis (#2347) - sample/write a field in a single step. - support for 'sampleOnExecute' to obtain values at execution intervals without writing. - support 'sets' input as a dictionary entry (as well as a list), which is similar to the changes for sampled-surface and permits use of changeDictionary to modify content. - globalIndex for gather to reduce parallel communication, less code - qualify the sampleSet results (properties) with the name of the set. The sample results were previously without a qualifier, which meant that only the last property value was actually saved (previous ones overwritten). For example, ``` sample1 { scalar { average(line,T) 349.96521; min(line,T) 349.9544281; max(line,T) 350; average(cells,T) 349.9854619; min(cells,T) 349.6589286; max(cells,T) 350.4967271; average(line,epsilon) 0.04947733869; min(line,epsilon) 0.04449639927; max(line,epsilon) 0.06452856475; } label { size(line,T) 79; size(cells,T) 1720; size(line,epsilon) 79; } } ``` ENH: update particleTracks application - use globalIndex to manage original parcel addressing and for gathering. Simplify code by introducing a helper class, storing intermediate fields in hash tables instead of separate lists. ADDITIONAL NOTES: - the regionSizeDistribution largely retains separate writers since the utility of placing sum/dev/count for all fields into a single file is questionable. - the streamline writing remains a "soft" upgrade, which means that scalar and vector fields are still collected a priori and not on-the-fly. This is due to how the streamline infrastructure is currently handled (should be upgraded in the future).
This commit is contained in:
@ -5,7 +5,7 @@
|
||||
\\ / A nd | www.openfoam.com
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2016-2020 OpenCFD Ltd.
|
||||
Copyright (C) 2016-2022 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -78,8 +78,13 @@ bool Foam::functionObjects::particleDistribution::read(const dictionary& dict)
|
||||
dict.readEntry("cloud", cloudName_);
|
||||
dict.readIfPresent("tagField", tagFieldName_);
|
||||
dict.readEntry("nameVsBinWidth", nameVsBinWidth_);
|
||||
const word format(dict.get<word>("setFormat"));
|
||||
writerPtr_ = writer<scalar>::New(format);
|
||||
|
||||
const word setFormat(dict.get<word>("setFormat"));
|
||||
writerPtr_ = coordSetWriter::New
|
||||
(
|
||||
setFormat,
|
||||
dict.subOrEmptyDict("formatOptions").optionalSubDict(setFormat)
|
||||
);
|
||||
|
||||
Info<< type() << " " << name() << " output:" << nl
|
||||
<< " Processing cloud : " << cloudName_ << nl
|
||||
@ -104,12 +109,10 @@ bool Foam::functionObjects::particleDistribution::write()
|
||||
|
||||
if (!mesh_.foundObject<cloud>(cloudName_))
|
||||
{
|
||||
wordList cloudNames(mesh_.names<cloud>());
|
||||
|
||||
WarningInFunction
|
||||
<< "Unable to find cloud " << cloudName_
|
||||
<< " in the mesh database. Available clouds include:"
|
||||
<< cloudNames << endl;
|
||||
<< flatOutput(mesh_.sortedNames<cloud>()) << endl;
|
||||
|
||||
return false;
|
||||
}
|
||||
@ -153,16 +156,17 @@ bool Foam::functionObjects::particleDistribution::write()
|
||||
}
|
||||
|
||||
|
||||
bool ok = false;
|
||||
forAll(nameVsBinWidth_, i)
|
||||
{
|
||||
ok = false;
|
||||
ok = ok || processField<scalar>(cloudObr, i, tagAddr);
|
||||
ok = ok || processField<vector>(cloudObr, i, tagAddr);
|
||||
ok = ok || processField<tensor>(cloudObr, i, tagAddr);
|
||||
ok = ok || processField<sphericalTensor>(cloudObr, i, tagAddr);
|
||||
ok = ok || processField<symmTensor>(cloudObr, i, tagAddr);
|
||||
ok = ok || processField<tensor>(cloudObr, i, tagAddr);
|
||||
const bool ok
|
||||
(
|
||||
processField<scalar>(cloudObr, i, tagAddr)
|
||||
|| processField<vector>(cloudObr, i, tagAddr)
|
||||
|| processField<tensor>(cloudObr, i, tagAddr)
|
||||
|| processField<sphericalTensor>(cloudObr, i, tagAddr)
|
||||
|| processField<symmTensor>(cloudObr, i, tagAddr)
|
||||
|| processField<tensor>(cloudObr, i, tagAddr)
|
||||
);
|
||||
|
||||
if (log && !ok)
|
||||
{
|
||||
@ -189,10 +193,10 @@ void Foam::functionObjects::particleDistribution::generateDistribution
|
||||
return;
|
||||
}
|
||||
|
||||
word fName(fieldName);
|
||||
word fldName(fieldName);
|
||||
if (tag != -1)
|
||||
{
|
||||
fName = fName + '_' + Foam::name(tag);
|
||||
fldName += '_' + Foam::name(tag);
|
||||
}
|
||||
|
||||
distributionModels::general distribution
|
||||
@ -202,31 +206,20 @@ void Foam::functionObjects::particleDistribution::generateDistribution
|
||||
rndGen_
|
||||
);
|
||||
|
||||
const Field<scalar> distX(distribution.x());
|
||||
const Field<scalar> distY(distribution.y());
|
||||
Field<scalar> distX(distribution.x());
|
||||
Field<scalar> distY(distribution.y());
|
||||
|
||||
pointField xBin(distX.size(), Zero);
|
||||
xBin.replace(0, distX);
|
||||
const coordSet coords
|
||||
(
|
||||
fName,
|
||||
"x",
|
||||
xBin,
|
||||
distX
|
||||
);
|
||||
xBin.replace(vector::X, distX);
|
||||
|
||||
const wordList fieldNames(1, fName);
|
||||
const coordSet coords(fldName, "x", std::move(xBin), std::move(distX));
|
||||
|
||||
fileName outputPath(baseTimeDir());
|
||||
mkDir(outputPath);
|
||||
OFstream graphFile(outputPath/writerPtr_->getFileName(coords, fieldNames));
|
||||
writerPtr_->open(coords, baseTimeDir() / fldName);
|
||||
fileName outFile = writerPtr_->write(fldName, distY);
|
||||
writerPtr_->close(true);
|
||||
|
||||
Log << " Writing distribution of " << fieldName
|
||||
<< " to " << graphFile.name() << endl;
|
||||
|
||||
List<const scalarField*> yPtrs(1);
|
||||
yPtrs[0] = &distY;
|
||||
writerPtr_->write(coords, fieldNames, yPtrs, graphFile);
|
||||
Log << " Wrote distribution of " << fieldName
|
||||
<< " to " << time_.relativePath(outFile) << endl;
|
||||
}
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user