mirror of
https://develop.openfoam.com/Development/openfoam.git
synced 2025-11-28 03:28:01 +00:00
ENH: refactor coordSet writers (#2347)
- the very old 'writer' class was fully stateless and always templated on an particular output type. This is now replaced with a 'coordSetWriter' with similar concepts as previously introduced for surface writers (#1206). - writers change from being a generic state-less set of routines to more properly conforming to the normal notion of a writer. - Parallel data is done *outside* of the writers, since they are used in a wide variety of contexts and the caller is currently still in a better position for deciding how to combine parallel data. ENH: update sampleSets to sample on per-field basis (#2347) - sample/write a field in a single step. - support for 'sampleOnExecute' to obtain values at execution intervals without writing. - support 'sets' input as a dictionary entry (as well as a list), which is similar to the changes for sampled-surface and permits use of changeDictionary to modify content. - globalIndex for gather to reduce parallel communication, less code - qualify the sampleSet results (properties) with the name of the set. The sample results were previously without a qualifier, which meant that only the last property value was actually saved (previous ones overwritten). For example, ``` sample1 { scalar { average(line,T) 349.96521; min(line,T) 349.9544281; max(line,T) 350; average(cells,T) 349.9854619; min(cells,T) 349.6589286; max(cells,T) 350.4967271; average(line,epsilon) 0.04947733869; min(line,epsilon) 0.04449639927; max(line,epsilon) 0.06452856475; } label { size(line,T) 79; size(cells,T) 1720; size(line,epsilon) 79; } } ``` ENH: update particleTracks application - use globalIndex to manage original parcel addressing and for gathering. Simplify code by introducing a helper class, storing intermediate fields in hash tables instead of separate lists. ADDITIONAL NOTES: - the regionSizeDistribution largely retains separate writers since the utility of placing sum/dev/count for all fields into a single file is questionable. - the streamline writing remains a "soft" upgrade, which means that scalar and vector fields are still collected a priori and not on-the-fly. This is due to how the streamline infrastructure is currently handled (should be upgraded in the future).
This commit is contained in:
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2015 OpenFOAM Foundation
|
||||
Copyright (C) 2015-2021 OpenCFD Ltd.
|
||||
Copyright (C) 2015-2022 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -29,6 +29,7 @@ License
|
||||
#include "streamLineBase.H"
|
||||
#include "fvMesh.H"
|
||||
#include "ReadFields.H"
|
||||
#include "OFstream.H"
|
||||
#include "sampledSet.H"
|
||||
#include "globalIndex.H"
|
||||
#include "mapDistribute.H"
|
||||
@ -626,8 +627,7 @@ bool Foam::functionObjects::streamLineBase::writeToFile()
|
||||
|
||||
|
||||
// Note: filenames scattered below since used in global call
|
||||
fileName scalarVtkFile;
|
||||
fileName vectorVtkFile;
|
||||
HashTable<fileName> outputFileNames;
|
||||
|
||||
if (Pstream::master())
|
||||
{
|
||||
@ -706,117 +706,112 @@ bool Foam::functionObjects::streamLineBase::writeToFile()
|
||||
}
|
||||
}
|
||||
|
||||
// Convert scalar values
|
||||
|
||||
if (!allScalars_.empty() && !tracks.empty())
|
||||
const bool canWrite =
|
||||
(
|
||||
!tracks.empty()
|
||||
&& trackWriterPtr_
|
||||
&& trackWriterPtr_->enabled()
|
||||
&& (!allScalars_.empty() || !allVectors_.empty())
|
||||
);
|
||||
|
||||
if (canWrite)
|
||||
{
|
||||
List<List<scalarField>> scalarValues(allScalars_.size());
|
||||
auto& writer = trackWriterPtr_();
|
||||
|
||||
forAll(allScalars_, scalari)
|
||||
writer.nFields(allScalars_.size() + allVectors_.size());
|
||||
|
||||
writer.open
|
||||
(
|
||||
tracks,
|
||||
(vtkPath / tracks[0].name())
|
||||
);
|
||||
|
||||
|
||||
// Temporary measure
|
||||
if (!allScalars_.empty())
|
||||
{
|
||||
DynamicList<scalarList>& allTrackVals = allScalars_[scalari];
|
||||
scalarValues[scalari].setSize(nTracks);
|
||||
List<List<scalarField>> scalarValues(allScalars_.size());
|
||||
|
||||
forAll(allTrackVals, tracki)
|
||||
forAll(allScalars_, scalari)
|
||||
{
|
||||
scalarList& vals = allTrackVals[tracki];
|
||||
if (vals.size())
|
||||
DynamicList<scalarList>& allTrackVals = allScalars_[scalari];
|
||||
scalarValues[scalari].resize(nTracks);
|
||||
|
||||
forAll(allTrackVals, tracki)
|
||||
{
|
||||
const label newTracki = oldToNewTrack[tracki];
|
||||
scalarValues[scalari][newTracki].transfer(vals);
|
||||
scalarList& vals = allTrackVals[tracki];
|
||||
if (vals.size())
|
||||
{
|
||||
const label newTracki = oldToNewTrack[tracki];
|
||||
scalarValues[scalari][newTracki].transfer(vals);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
forAll(scalarNames_, i)
|
||||
{
|
||||
fileName outFile =
|
||||
writer.write(scalarNames_[i], scalarValues[i]);
|
||||
|
||||
outputFileNames.insert
|
||||
(
|
||||
scalarNames_[i],
|
||||
time_.relativePath(outFile, true)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
scalarVtkFile = fileName
|
||||
(
|
||||
vtkPath
|
||||
/ scalarFormatterPtr_().getFileName
|
||||
(
|
||||
tracks[0],
|
||||
scalarNames_
|
||||
)
|
||||
);
|
||||
|
||||
Log << " Writing data to " << scalarVtkFile.path() << endl;
|
||||
|
||||
scalarFormatterPtr_().write
|
||||
(
|
||||
true, // writeTracks
|
||||
List<scalarField>(), // times
|
||||
tracks,
|
||||
scalarNames_,
|
||||
scalarValues,
|
||||
OFstream(scalarVtkFile)()
|
||||
);
|
||||
}
|
||||
|
||||
// Convert vector values
|
||||
|
||||
if (!allVectors_.empty() && !tracks.empty())
|
||||
{
|
||||
List<List<vectorField>> vectorValues(allVectors_.size());
|
||||
|
||||
forAll(allVectors_, vectori)
|
||||
if (!allVectors_.empty())
|
||||
{
|
||||
DynamicList<vectorList>& allTrackVals = allVectors_[vectori];
|
||||
vectorValues[vectori].setSize(nTracks);
|
||||
List<List<vectorField>> vectorValues(allVectors_.size());
|
||||
|
||||
forAll(allTrackVals, tracki)
|
||||
forAll(allVectors_, vectori)
|
||||
{
|
||||
vectorList& vals = allTrackVals[tracki];
|
||||
if (vals.size())
|
||||
DynamicList<vectorList>& allTrackVals = allVectors_[vectori];
|
||||
vectorValues[vectori].setSize(nTracks);
|
||||
|
||||
forAll(allTrackVals, tracki)
|
||||
{
|
||||
const label newTracki = oldToNewTrack[tracki];
|
||||
vectorValues[vectori][newTracki].transfer(vals);
|
||||
vectorList& vals = allTrackVals[tracki];
|
||||
if (vals.size())
|
||||
{
|
||||
const label newTracki = oldToNewTrack[tracki];
|
||||
vectorValues[vectori][newTracki].transfer(vals);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
forAll(vectorNames_, i)
|
||||
{
|
||||
fileName outFile =
|
||||
writer.write(vectorNames_[i], vectorValues[i]);
|
||||
|
||||
outputFileNames.insert
|
||||
(
|
||||
scalarNames_[i],
|
||||
time_.relativePath(outFile, true)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
vectorVtkFile = fileName
|
||||
(
|
||||
vtkPath
|
||||
/ vectorFormatterPtr_().getFileName(tracks[0], vectorNames_)
|
||||
);
|
||||
|
||||
//Info<< " Writing vector data to " << vectorVtkFile << endl;
|
||||
|
||||
vectorFormatterPtr_().write
|
||||
(
|
||||
true, // writeTracks
|
||||
List<scalarField>(), // times
|
||||
tracks,
|
||||
vectorNames_,
|
||||
vectorValues,
|
||||
OFstream(vectorVtkFile)()
|
||||
);
|
||||
writer.close(true);
|
||||
}
|
||||
|
||||
// Log << " Writing data to " << scalarVtkFile.path() << endl;
|
||||
}
|
||||
|
||||
|
||||
// File names are generated on the master but setProperty needs to
|
||||
// be across all procs
|
||||
Pstream::scatter(scalarVtkFile);
|
||||
for (const word& fieldName : scalarNames_)
|
||||
{
|
||||
dictionary propsDict;
|
||||
propsDict.add
|
||||
(
|
||||
"file",
|
||||
time_.relativePath(scalarVtkFile, true)
|
||||
);
|
||||
setProperty(fieldName, propsDict);
|
||||
}
|
||||
// File names generated on the master but setProperty needed everywher
|
||||
Pstream::scatter(outputFileNames);
|
||||
|
||||
Pstream::scatter(vectorVtkFile);
|
||||
for (const word& fieldName : vectorNames_)
|
||||
forAllConstIters(outputFileNames, iter)
|
||||
{
|
||||
const word& fieldName = iter.key();
|
||||
const fileName& outputName = iter.val();
|
||||
|
||||
dictionary propsDict;
|
||||
propsDict.add
|
||||
(
|
||||
"file",
|
||||
time_.relativePath(vectorVtkFile, true)
|
||||
);
|
||||
propsDict.add("file", outputName);
|
||||
setProperty(fieldName, propsDict);
|
||||
}
|
||||
|
||||
@ -844,7 +839,7 @@ Foam::functionObjects::streamLineBase::streamLineBase
|
||||
const dictionary& dict
|
||||
)
|
||||
:
|
||||
fvMeshFunctionObject(name, runTime, dict),
|
||||
functionObjects::fvMeshFunctionObject(name, runTime, dict),
|
||||
dict_(dict),
|
||||
fields_()
|
||||
{}
|
||||
@ -858,7 +853,7 @@ Foam::functionObjects::streamLineBase::streamLineBase
|
||||
const wordList& fieldNames
|
||||
)
|
||||
:
|
||||
fvMeshFunctionObject(name, runTime, dict),
|
||||
functionObjects::fvMeshFunctionObject(name, runTime, dict),
|
||||
dict_(dict),
|
||||
fields_(fieldNames)
|
||||
{}
|
||||
@ -959,8 +954,13 @@ bool Foam::functionObjects::streamLineBase::read(const dictionary& dict)
|
||||
sampledSetPtr_.clear();
|
||||
sampledSetAxis_.clear();
|
||||
|
||||
scalarFormatterPtr_ = writer<scalar>::New(dict.get<word>("setFormat"));
|
||||
vectorFormatterPtr_ = writer<vector>::New(dict.get<word>("setFormat"));
|
||||
const word setFormat(dict.get<word>("setFormat"));
|
||||
|
||||
trackWriterPtr_ = coordSetWriter::New
|
||||
(
|
||||
setFormat,
|
||||
dict.subOrEmptyDict("formatOptions").optionalSubDict(setFormat)
|
||||
);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user