mirror of
https://develop.openfoam.com/Development/openfoam.git
synced 2025-11-28 03:28:01 +00:00
ENH: code improvements for vtkCloud function object (issue #926)
- use parallel list writing, beginDataArray methods. - use static_assert to restrict conversion of non-label integral types - cache .vtp.series information by fileName instead of by cloud name. This issues if the output directory changes, and simplifies code. ENH: emit TimeValue in files generated by vtkCloud - additional information for passing to ParaView ENH: vtkCloud output to postProcessing/ (issue #866) - better alignment with other function objects, no collision with foamToVTK output.
This commit is contained in:
@ -44,36 +44,36 @@ namespace functionObjects
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// * * * * * * * * * * * * * Private Member Functions * * * * * * * * * * * //
|
// * * * * * * * * * * * * * Private Member Functions * * * * * * * * * * * //
|
||||||
|
|
||||||
void Foam::functionObjects::vtkCloud::writeVerts
|
void Foam::functionObjects::vtkCloud::writeVerts
|
||||||
(
|
(
|
||||||
autoPtr<vtk::formatter>& format,
|
autoPtr<vtk::formatter>& format,
|
||||||
const label nParcels
|
const label nTotParcels
|
||||||
) const
|
) const
|
||||||
{
|
{
|
||||||
if (Pstream::master())
|
// No collectives - can skip on slave processors
|
||||||
{
|
if (!format) return;
|
||||||
format().tag(vtk::fileTag::VERTS);
|
|
||||||
|
|
||||||
// Same payload throughout
|
// Same payload for connectivity and offsets
|
||||||
const uint64_t payLoad = (nParcels * sizeof(label));
|
const uint64_t payLoad = vtk::sizeofData<label>(nTotParcels);
|
||||||
|
|
||||||
|
format().tag(vtk::fileTag::VERTS);
|
||||||
|
|
||||||
//
|
//
|
||||||
// 'connectivity'
|
// 'connectivity'
|
||||||
// = linear mapping onto points
|
// = linear mapping onto points
|
||||||
//
|
//
|
||||||
{
|
{
|
||||||
format().openDataArray<label>(vtk::dataArrayAttr::CONNECTIVITY)
|
format().beginDataArray<label>(vtk::dataArrayAttr::CONNECTIVITY);
|
||||||
.closeTag();
|
|
||||||
|
|
||||||
format().writeSize(payLoad);
|
format().writeSize(payLoad);
|
||||||
for (label i=0; i < nParcels; ++i)
|
|
||||||
|
for (label i=0; i < nTotParcels; ++i)
|
||||||
{
|
{
|
||||||
format().write(i);
|
format().write(i);
|
||||||
}
|
}
|
||||||
format().flush();
|
format().flush();
|
||||||
|
|
||||||
format().endDataArray();
|
format().endDataArray();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -82,27 +82,24 @@ void Foam::functionObjects::vtkCloud::writeVerts
|
|||||||
// = linear mapping onto points (with 1 offset)
|
// = linear mapping onto points (with 1 offset)
|
||||||
//
|
//
|
||||||
{
|
{
|
||||||
format().openDataArray<label>(vtk::dataArrayAttr::OFFSETS)
|
format().beginDataArray<label>(vtk::dataArrayAttr::OFFSETS);
|
||||||
.closeTag();
|
|
||||||
|
|
||||||
format().writeSize(payLoad);
|
format().writeSize(payLoad);
|
||||||
for (label i=0; i < nParcels; ++i)
|
|
||||||
|
for (label i=0; i < nTotParcels; ++i)
|
||||||
{
|
{
|
||||||
format().write(i+1);
|
format().write(i+1);
|
||||||
}
|
}
|
||||||
format().flush();
|
format().flush();
|
||||||
|
|
||||||
format().endDataArray();
|
format().endDataArray();
|
||||||
}
|
}
|
||||||
|
|
||||||
format().endTag(vtk::fileTag::VERTS);
|
format().endTag(vtk::fileTag::VERTS);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
bool Foam::functionObjects::vtkCloud::writeCloud
|
bool Foam::functionObjects::vtkCloud::writeCloud
|
||||||
(
|
(
|
||||||
const fileName& outputName,
|
const fileName& file,
|
||||||
const word& cloudName
|
const word& cloudName
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
@ -136,8 +133,8 @@ bool Foam::functionObjects::vtkCloud::writeCloud
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Total number of parcels on all processes
|
// Total number of parcels on all processes
|
||||||
label nTotParcels = pointsPtr->size();
|
const label nTotParcels =
|
||||||
reduce(nTotParcels, sumOp<label>());
|
returnReduce(pointsPtr->size(), sumOp<label>());
|
||||||
|
|
||||||
if (pruneEmpty_ && !nTotParcels)
|
if (pruneEmpty_ && !nTotParcels)
|
||||||
{
|
{
|
||||||
@ -147,89 +144,92 @@ bool Foam::functionObjects::vtkCloud::writeCloud
|
|||||||
std::ofstream os;
|
std::ofstream os;
|
||||||
autoPtr<vtk::formatter> format;
|
autoPtr<vtk::formatter> format;
|
||||||
|
|
||||||
// Header
|
if (!file.hasExt("vtp"))
|
||||||
|
{
|
||||||
|
FatalErrorInFunction
|
||||||
|
<< type() << " File missing .vtp extension!" << nl << endl
|
||||||
|
<< exit(FatalError);
|
||||||
|
}
|
||||||
|
|
||||||
if (Pstream::master())
|
if (Pstream::master())
|
||||||
{
|
{
|
||||||
os.open(outputName);
|
mkDir(file.path());
|
||||||
|
os.open(file);
|
||||||
|
|
||||||
format = writeOpts_.newFormatter(os);
|
format = writeOpts_.newFormatter(os);
|
||||||
|
|
||||||
|
// beginFile()
|
||||||
|
|
||||||
// XML (inline)
|
// XML (inline)
|
||||||
format()
|
format()
|
||||||
.xmlHeader()
|
.xmlHeader()
|
||||||
.xmlComment
|
.xmlComment
|
||||||
(
|
(
|
||||||
"cloud=" + cloudName
|
"case='" + time_.globalCaseName()
|
||||||
+ " time=" + time_.timeName()
|
+ "' cloud='" + cloudName
|
||||||
+ " index=" + Foam::name(time_.timeIndex())
|
+ "' time='" + time_.timeName()
|
||||||
|
+ "' index='" + Foam::name(time_.timeIndex())
|
||||||
|
+ "'"
|
||||||
)
|
)
|
||||||
.beginVTKFile(vtk::fileTag::POLY_DATA, "0.1");
|
.beginVTKFile<vtk::fileTag::POLY_DATA>();
|
||||||
|
|
||||||
// Begin piece
|
|
||||||
|
// FieldData with TimeValue
|
||||||
|
format()
|
||||||
|
.beginFieldData()
|
||||||
|
.writeTimeValue(time_.value())
|
||||||
|
.endFieldData();
|
||||||
|
|
||||||
|
|
||||||
|
// writeGeometry()
|
||||||
|
|
||||||
|
// beginPiece()
|
||||||
if (useVerts_)
|
if (useVerts_)
|
||||||
{
|
{
|
||||||
format()
|
format()
|
||||||
.openTag(vtk::fileTag::PIECE)
|
.tag
|
||||||
.xmlAttr(vtk::fileAttr::NUMBER_OF_POINTS, nTotParcels)
|
(
|
||||||
.xmlAttr(vtk::fileAttr::NUMBER_OF_VERTS, nTotParcels)
|
vtk::fileTag::PIECE,
|
||||||
.closeTag();
|
vtk::fileAttr::NUMBER_OF_POINTS, nTotParcels,
|
||||||
|
vtk::fileAttr::NUMBER_OF_VERTS, nTotParcels
|
||||||
|
);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
format()
|
format()
|
||||||
.openTag(vtk::fileTag::PIECE)
|
.tag
|
||||||
.xmlAttr(vtk::fileAttr::NUMBER_OF_POINTS, nTotParcels)
|
(
|
||||||
.closeTag();
|
vtk::fileTag::PIECE,
|
||||||
}
|
vtk::fileAttr::NUMBER_OF_POINTS, nTotParcels
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// writePoints()
|
||||||
// Points
|
|
||||||
if (Pstream::master())
|
|
||||||
{
|
{
|
||||||
const uint64_t payLoad = (nTotParcels * 3 * sizeof(float));
|
const uint64_t payLoad = vtk::sizeofData<float,3>(nTotParcels);
|
||||||
|
|
||||||
format().tag(vtk::fileTag::POINTS)
|
format().tag(vtk::fileTag::POINTS)
|
||||||
.openDataArray<float,3>(vtk::dataArrayAttr::POINTS)
|
.beginDataArray<float,3>(vtk::dataArrayAttr::POINTS);
|
||||||
.closeTag();
|
|
||||||
|
|
||||||
format().writeSize(payLoad);
|
format().writeSize(payLoad);
|
||||||
|
}
|
||||||
// Master
|
|
||||||
vtk::writeList(format(), *pointsPtr);
|
|
||||||
|
|
||||||
// Slaves - recv
|
|
||||||
for (int slave=1; slave<Pstream::nProcs(); ++slave)
|
|
||||||
{
|
|
||||||
IPstream fromSlave(Pstream::commsTypes::scheduled, slave);
|
|
||||||
pointList points(fromSlave);
|
|
||||||
|
|
||||||
vtk::writeList(format(), points);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
format().flush();
|
|
||||||
|
|
||||||
format()
|
vtk::writeListParallel(format.ref(), *pointsPtr);
|
||||||
.endDataArray()
|
|
||||||
.endTag(vtk::fileTag::POINTS);
|
|
||||||
|
if (Pstream::master())
|
||||||
|
{
|
||||||
|
format().flush();
|
||||||
|
format().endDataArray();
|
||||||
|
format().endTag(vtk::fileTag::POINTS);
|
||||||
|
|
||||||
if (useVerts_)
|
if (useVerts_)
|
||||||
{
|
{
|
||||||
writeVerts(format, nTotParcels);
|
writeVerts(format, nTotParcels);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
|
||||||
{
|
|
||||||
// Slaves - send
|
|
||||||
|
|
||||||
OPstream toMaster
|
|
||||||
(
|
|
||||||
Pstream::commsTypes::scheduled,
|
|
||||||
Pstream::masterNo()
|
|
||||||
);
|
|
||||||
|
|
||||||
toMaster
|
|
||||||
<< *pointsPtr;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// Prevent any possible conversion of positions as a field
|
// Prevent any possible conversion of positions as a field
|
||||||
@ -237,8 +237,7 @@ bool Foam::functionObjects::vtkCloud::writeCloud
|
|||||||
(
|
(
|
||||||
[](const word& k)
|
[](const word& k)
|
||||||
{
|
{
|
||||||
return k.startsWith("position")
|
return k.startsWith("position") || k.startsWith("coordinate");
|
||||||
|| k.startsWith("coordinate");
|
|
||||||
},
|
},
|
||||||
true // prune
|
true // prune
|
||||||
);
|
);
|
||||||
@ -251,16 +250,17 @@ bool Foam::functionObjects::vtkCloud::writeCloud
|
|||||||
|
|
||||||
|
|
||||||
// Write fields
|
// Write fields
|
||||||
const vtk::fileTag dataType =
|
|
||||||
(
|
|
||||||
useVerts_
|
|
||||||
? vtk::fileTag::CELL_DATA
|
|
||||||
: vtk::fileTag::POINT_DATA
|
|
||||||
);
|
|
||||||
|
|
||||||
if (Pstream::master())
|
if (Pstream::master())
|
||||||
{
|
{
|
||||||
format().tag(dataType);
|
if (useVerts_)
|
||||||
|
{
|
||||||
|
format().beginCellData();
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
format().beginPointData();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
DynamicList<word> written(obrTmp.size());
|
DynamicList<word> written(obrTmp.size());
|
||||||
@ -271,15 +271,16 @@ bool Foam::functionObjects::vtkCloud::writeCloud
|
|||||||
|
|
||||||
if (Pstream::master())
|
if (Pstream::master())
|
||||||
{
|
{
|
||||||
format().endTag(dataType);
|
if (useVerts_)
|
||||||
|
{
|
||||||
|
format().endCellData();
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
format().endPointData();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Footer
|
format().endPiece();
|
||||||
if (Pstream::master())
|
|
||||||
{
|
|
||||||
// slight cheat. </Piece> too
|
|
||||||
format().endTag(vtk::fileTag::PIECE);
|
|
||||||
|
|
||||||
format().endTag(vtk::fileTag::POLY_DATA)
|
format().endTag(vtk::fileTag::POLY_DATA)
|
||||||
.endVTKFile();
|
.endVTKFile();
|
||||||
}
|
}
|
||||||
@ -291,7 +292,7 @@ bool Foam::functionObjects::vtkCloud::writeCloud
|
|||||||
// {
|
// {
|
||||||
// cloudName
|
// cloudName
|
||||||
// {
|
// {
|
||||||
// file "<case>/VTK/cloud1_000.vtp";
|
// file "<case>/postProcessing/name/cloud1_0001.vtp";
|
||||||
// fields (U T rho);
|
// fields (U T rho);
|
||||||
// }
|
// }
|
||||||
// }
|
// }
|
||||||
@ -299,9 +300,9 @@ bool Foam::functionObjects::vtkCloud::writeCloud
|
|||||||
dictionary propsDict;
|
dictionary propsDict;
|
||||||
|
|
||||||
// Use case-local filename and "<case>" shortcut for readable output
|
// Use case-local filename and "<case>" shortcut for readable output
|
||||||
// and possibly relocation of files
|
// and for possibly relocation of files
|
||||||
|
|
||||||
fileName fName(outputName.relative(stringOps::expand("<case>")));
|
fileName fName(file.relative(time_.globalPath()));
|
||||||
if (fName.isAbsolute())
|
if (fName.isAbsolute())
|
||||||
{
|
{
|
||||||
propsDict.add("file", fName);
|
propsDict.add("file", fName);
|
||||||
@ -334,17 +335,18 @@ Foam::functionObjects::vtkCloud::vtkCloud
|
|||||||
pruneEmpty_(false),
|
pruneEmpty_(false),
|
||||||
selectClouds_(),
|
selectClouds_(),
|
||||||
selectFields_(),
|
selectFields_(),
|
||||||
dirName_("VTK"),
|
directory_(),
|
||||||
series_()
|
series_()
|
||||||
{
|
{
|
||||||
if (postProcess)
|
// May still want this? (OCT-2018)
|
||||||
{
|
// if (postProcess)
|
||||||
// Disable for post-process mode.
|
// {
|
||||||
// Emit as FatalError for the try/catch in the caller.
|
// // Disable for post-process mode.
|
||||||
FatalError
|
// // Emit as FatalError for the try/catch in the caller.
|
||||||
<< type() << " disabled in post-process mode"
|
// FatalError
|
||||||
<< exit(FatalError);
|
// << type() << " disabled in post-process mode"
|
||||||
}
|
// << exit(FatalError);
|
||||||
|
// }
|
||||||
|
|
||||||
read(dict);
|
read(dict);
|
||||||
}
|
}
|
||||||
@ -356,8 +358,11 @@ bool Foam::functionObjects::vtkCloud::read(const dictionary& dict)
|
|||||||
{
|
{
|
||||||
fvMeshFunctionObject::read(dict);
|
fvMeshFunctionObject::read(dict);
|
||||||
|
|
||||||
|
// We probably cannot trust old information after a reread
|
||||||
|
series_.clear();
|
||||||
|
|
||||||
//
|
//
|
||||||
// writer options - default is xml base64. Legacy is not desired.
|
// Default format is xml base64. Legacy is not desired.
|
||||||
//
|
//
|
||||||
writeOpts_ = vtk::formatType::INLINE_BASE64;
|
writeOpts_ = vtk::formatType::INLINE_BASE64;
|
||||||
|
|
||||||
@ -374,7 +379,7 @@ bool Foam::functionObjects::vtkCloud::read(const dictionary& dict)
|
|||||||
(
|
(
|
||||||
dict.lookupOrDefault
|
dict.lookupOrDefault
|
||||||
(
|
(
|
||||||
"writePrecision",
|
"precision",
|
||||||
IOstream::defaultPrecision()
|
IOstream::defaultPrecision()
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
@ -382,7 +387,7 @@ bool Foam::functionObjects::vtkCloud::read(const dictionary& dict)
|
|||||||
// Info<< type() << " " << name() << " output-format: "
|
// Info<< type() << " " << name() << " output-format: "
|
||||||
// << writeOpts_.description() << nl;
|
// << writeOpts_.description() << nl;
|
||||||
|
|
||||||
int padWidth = dict.lookupOrDefault<int>("width", 8);
|
const int padWidth = dict.lookupOrDefault<int>("width", 8);
|
||||||
|
|
||||||
// Appropriate printf format - Enforce min/max sanity limits
|
// Appropriate printf format - Enforce min/max sanity limits
|
||||||
if (padWidth < 1 || padWidth > 31)
|
if (padWidth < 1 || padWidth > 31)
|
||||||
@ -400,11 +405,6 @@ bool Foam::functionObjects::vtkCloud::read(const dictionary& dict)
|
|||||||
pruneEmpty_ = dict.lookupOrDefault<bool>("prune", false);
|
pruneEmpty_ = dict.lookupOrDefault<bool>("prune", false);
|
||||||
|
|
||||||
|
|
||||||
//
|
|
||||||
// other options
|
|
||||||
//
|
|
||||||
dict.readIfPresent("directory", dirName_);
|
|
||||||
|
|
||||||
selectClouds_.clear();
|
selectClouds_.clear();
|
||||||
dict.readIfPresent("clouds", selectClouds_);
|
dict.readIfPresent("clouds", selectClouds_);
|
||||||
|
|
||||||
@ -418,6 +418,28 @@ bool Foam::functionObjects::vtkCloud::read(const dictionary& dict)
|
|||||||
selectFields_.clear();
|
selectFields_.clear();
|
||||||
dict.readIfPresent("fields", selectFields_);
|
dict.readIfPresent("fields", selectFields_);
|
||||||
|
|
||||||
|
|
||||||
|
// Output directory
|
||||||
|
|
||||||
|
directory_.clear();
|
||||||
|
dict.readIfPresent("directory", directory_);
|
||||||
|
|
||||||
|
if (directory_.size())
|
||||||
|
{
|
||||||
|
// User-defined output directory
|
||||||
|
directory_.expand();
|
||||||
|
if (!directory_.isAbsolute())
|
||||||
|
{
|
||||||
|
directory_ = time_.globalPath()/directory_;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Standard postProcessing/ naming
|
||||||
|
directory_ = time_.globalPath()/functionObject::outputPrefix/name();
|
||||||
|
}
|
||||||
|
directory_.clean();
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -437,30 +459,15 @@ bool Foam::functionObjects::vtkCloud::write()
|
|||||||
return true; // skip - not available
|
return true; // skip - not available
|
||||||
}
|
}
|
||||||
|
|
||||||
// const word timeDesc =
|
const scalar timeValue = time_.value();
|
||||||
// (
|
|
||||||
// useTimeName_
|
|
||||||
// ? time_.timeName()
|
|
||||||
// : printf_.empty()
|
|
||||||
// ? Foam::name(time_.timeIndex())
|
|
||||||
// : word::printf(printf_, time_.timeIndex())
|
|
||||||
// );
|
|
||||||
|
|
||||||
const word timeDesc =
|
const word timeDesc = "_" +
|
||||||
(
|
(
|
||||||
printf_.empty()
|
printf_.empty()
|
||||||
? Foam::name(time_.timeIndex())
|
? Foam::name(time_.timeIndex())
|
||||||
: word::printf(printf_, time_.timeIndex())
|
: word::printf(printf_, time_.timeIndex())
|
||||||
);
|
);
|
||||||
|
|
||||||
fileName vtkDir(dirName_);
|
|
||||||
vtkDir.expand();
|
|
||||||
if (!vtkDir.isAbsolute())
|
|
||||||
{
|
|
||||||
vtkDir = stringOps::expand("<case>")/vtkDir;
|
|
||||||
}
|
|
||||||
mkDir(vtkDir);
|
|
||||||
|
|
||||||
Log << name() << " output Time: " << time_.timeName() << nl;
|
Log << name() << " output Time: " << time_.timeName() << nl;
|
||||||
|
|
||||||
// Each cloud separately
|
// Each cloud separately
|
||||||
@ -470,26 +477,33 @@ bool Foam::functionObjects::vtkCloud::write()
|
|||||||
|
|
||||||
const fileName outputName
|
const fileName outputName
|
||||||
(
|
(
|
||||||
vtkDir/cloudName + "_" + timeDesc + ".vtp"
|
directory_/cloudName + timeDesc + ".vtp"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// writeCloud() includes mkDir (on master)
|
||||||
|
|
||||||
if (writeCloud(outputName, cloudName))
|
if (writeCloud(outputName, cloudName))
|
||||||
{
|
{
|
||||||
Log << " cloud : " << outputName << endl;
|
Log << " cloud : "
|
||||||
|
<< outputName.relative(time_.globalPath()) << endl;
|
||||||
|
|
||||||
if (Pstream::master())
|
if (Pstream::master())
|
||||||
{
|
{
|
||||||
// Add to file-series and emit as JSON
|
// Add to file-series and emit as JSON
|
||||||
// - misbehaves if vtkDir changes during the run,
|
fileName seriesName(vtk::seriesWriter::base(outputName));
|
||||||
// but that causes other issues too.
|
|
||||||
|
|
||||||
series_(cloudName).append({time_.value(), timeDesc});
|
vtk::seriesWriter& series = series_(seriesName);
|
||||||
|
|
||||||
vtk::seriesWrite
|
// First time?
|
||||||
(
|
// Load from file, verify against filesystem,
|
||||||
vtkDir/cloudName + ".vtp",
|
// prune time >= currentTime
|
||||||
series_[cloudName]
|
if (series.empty())
|
||||||
);
|
{
|
||||||
|
series.load(seriesName, true, timeValue);
|
||||||
|
}
|
||||||
|
|
||||||
|
series.append(timeValue, outputName);
|
||||||
|
series.write(seriesName);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -55,12 +55,12 @@ Usage
|
|||||||
cloud | | no | defaultCloud
|
cloud | | no | defaultCloud
|
||||||
clouds | wordRe list of clouds | no |
|
clouds | wordRe list of clouds | no |
|
||||||
fields | wordRe list of fields | no |
|
fields | wordRe list of fields | no |
|
||||||
|
prune | Suppress writing of empty clouds | no | false
|
||||||
cellData | Emit cellData instead of pointData | no | false
|
cellData | Emit cellData instead of pointData | no | false
|
||||||
directory | The output directory name | no | VTK
|
directory | The output directory name | no | postProcessing/NAME
|
||||||
width | Padding width for file name | no | 8
|
width | Padding width for file name | no | 8
|
||||||
format | ascii or binary format | no | binary
|
format | Format as ascii or binary | no | binary
|
||||||
prune | suppress writing of empty clouds | no | false
|
precision | Write precision in ascii | no | same as IOstream
|
||||||
writePrecision | write precision in ascii | no | same as IOstream
|
|
||||||
\endtable
|
\endtable
|
||||||
|
|
||||||
The output filename and fields are added to the functionObjectProperties
|
The output filename and fields are added to the functionObjectProperties
|
||||||
@ -94,12 +94,10 @@ SourceFiles
|
|||||||
|
|
||||||
#include "fvMeshFunctionObject.H"
|
#include "fvMeshFunctionObject.H"
|
||||||
#include "foamVtkOutputOptions.H"
|
#include "foamVtkOutputOptions.H"
|
||||||
|
#include "foamVtkSeriesWriter.H"
|
||||||
#include "wordRes.H"
|
#include "wordRes.H"
|
||||||
#include "instant.H"
|
|
||||||
#include "DynamicList.H"
|
|
||||||
#include "HashTable.H"
|
#include "HashTable.H"
|
||||||
|
|
||||||
|
|
||||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||||
|
|
||||||
namespace Foam
|
namespace Foam
|
||||||
@ -135,27 +133,25 @@ class vtkCloud
|
|||||||
//- Subset of cloud fields to process
|
//- Subset of cloud fields to process
|
||||||
wordRes selectFields_;
|
wordRes selectFields_;
|
||||||
|
|
||||||
//- Output directory name
|
//- Output directory
|
||||||
fileName dirName_;
|
fileName directory_;
|
||||||
|
|
||||||
//- Per cloud output for file series
|
//- Per cloud output for file series
|
||||||
HashTable<DynamicList<instant>> series_;
|
HashTable<vtk::seriesWriter, fileName> series_;
|
||||||
|
|
||||||
|
|
||||||
// Private Member Functions
|
// Private Member Functions
|
||||||
|
|
||||||
//- Write a cloud to disk, and record on the cloud OutputProperties
|
//- Write a cloud to disk (creates parent directory),
|
||||||
bool writeCloud
|
//- and record on the cloud OutputProperties.
|
||||||
(
|
// \param file is the output file name, with extension.
|
||||||
const fileName& outputName,
|
bool writeCloud(const fileName& file, const word& cloudName);
|
||||||
const word& cloudName
|
|
||||||
);
|
|
||||||
|
|
||||||
//- Write VERTS connectivity
|
//- Write vertex (cells) - callable on master only
|
||||||
void writeVerts
|
void writeVerts
|
||||||
(
|
(
|
||||||
autoPtr<vtk::formatter>& format,
|
autoPtr<vtk::formatter>& format,
|
||||||
const label nParcels
|
const label nTotParcels
|
||||||
) const;
|
) const;
|
||||||
|
|
||||||
//- Write fields of IOField<Type>
|
//- Write fields of IOField<Type>
|
||||||
|
|||||||
@ -35,10 +35,23 @@ Foam::wordList Foam::functionObjects::vtkCloud::writeFields
|
|||||||
const label nTotParcels
|
const label nTotParcels
|
||||||
) const
|
) const
|
||||||
{
|
{
|
||||||
const int nCmpt(pTraits<Type>::nComponents);
|
const direction nCmpt(pTraits<Type>::nComponents);
|
||||||
|
|
||||||
|
static_assert
|
||||||
|
(
|
||||||
|
(
|
||||||
|
std::is_same<typename pTraits<Type>::cmptType,label>::value
|
||||||
|
|| std::is_floating_point<typename pTraits<Type>::cmptType>::value
|
||||||
|
),
|
||||||
|
"Label and Floating-point vector space only"
|
||||||
|
);
|
||||||
|
|
||||||
|
const bool isLabel =
|
||||||
|
std::is_same<typename pTraits<Type>::cmptType,label>::value;
|
||||||
|
|
||||||
|
// Other integral types (eg, bool etc) would need cast/convert to label.
|
||||||
|
// Similarly for labelVector etc.
|
||||||
|
|
||||||
const bool useIntField =
|
|
||||||
std::is_integral<typename pTraits<Type>::cmptType>();
|
|
||||||
|
|
||||||
// Fields are not always on all processors (eg, multi-component parcels).
|
// Fields are not always on all processors (eg, multi-component parcels).
|
||||||
// Thus need to resolve names between all processors.
|
// Thus need to resolve names between all processors.
|
||||||
@ -47,104 +60,42 @@ Foam::wordList Foam::functionObjects::vtkCloud::writeFields
|
|||||||
Pstream::combineGather(fieldNames, ListOps::uniqueEqOp<word>());
|
Pstream::combineGather(fieldNames, ListOps::uniqueEqOp<word>());
|
||||||
Pstream::combineScatter(fieldNames);
|
Pstream::combineScatter(fieldNames);
|
||||||
|
|
||||||
// Sort to get identical order of fields on all processors
|
// Consistent order on all processors
|
||||||
Foam::sort(fieldNames);
|
Foam::sort(fieldNames);
|
||||||
|
|
||||||
for (const word& fieldName : fieldNames)
|
for (const word& fieldName : fieldNames)
|
||||||
{
|
{
|
||||||
const auto* fldPtr = obrTmp.findObject<IOField<Type>>(fieldName);
|
const List<Type>* fldPtr = obrTmp.findObject<IOField<Type>>(fieldName);
|
||||||
|
const List<Type>& values = (fldPtr ? *fldPtr : List<Type>());
|
||||||
|
|
||||||
if (Pstream::master())
|
if (Pstream::master())
|
||||||
{
|
{
|
||||||
if (useIntField)
|
if (isLabel)
|
||||||
{
|
{
|
||||||
const uint64_t payLoad(nTotParcels * nCmpt * sizeof(label));
|
const uint64_t payLoad =
|
||||||
|
vtk::sizeofData<label, nCmpt>(nTotParcels);
|
||||||
format().openDataArray<label, nCmpt>(fieldName)
|
|
||||||
.closeTag();
|
|
||||||
|
|
||||||
|
format().beginDataArray<label, nCmpt>(fieldName);
|
||||||
format().writeSize(payLoad);
|
format().writeSize(payLoad);
|
||||||
|
|
||||||
if (fldPtr)
|
|
||||||
{
|
|
||||||
// Data on master
|
|
||||||
const auto& fld = *fldPtr;
|
|
||||||
|
|
||||||
// Ensure consistent output width
|
|
||||||
for (const Type& val : fld)
|
|
||||||
{
|
|
||||||
for (int cmpt=0; cmpt < nCmpt; ++cmpt)
|
|
||||||
{
|
|
||||||
format().write(label(component(val, cmpt)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Slaves - recv
|
|
||||||
for (int slave=1; slave<Pstream::nProcs(); ++slave)
|
|
||||||
{
|
|
||||||
IPstream fromSlave(Pstream::commsTypes::scheduled, slave);
|
|
||||||
Field<Type> recv(fromSlave);
|
|
||||||
|
|
||||||
for (const Type& val : recv)
|
|
||||||
{
|
|
||||||
for (int cmpt=0; cmpt < nCmpt; ++cmpt)
|
|
||||||
{
|
|
||||||
format().write(label(component(val, cmpt)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
const uint64_t payLoad(nTotParcels * nCmpt * sizeof(float));
|
const uint64_t payLoad =
|
||||||
|
vtk::sizeofData<float, nCmpt>(nTotParcels);
|
||||||
format().openDataArray<float, nCmpt>(fieldName)
|
|
||||||
.closeTag();
|
|
||||||
|
|
||||||
|
format().beginDataArray<float, nCmpt>(fieldName);
|
||||||
format().writeSize(payLoad);
|
format().writeSize(payLoad);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (fldPtr)
|
vtk::writeListParallel(format.ref(), values);
|
||||||
|
|
||||||
|
if (Pstream::master())
|
||||||
{
|
{
|
||||||
// Data on master
|
// Non-legacy
|
||||||
vtk::writeList(format(), *fldPtr);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Slaves - recv
|
|
||||||
for (int slave=1; slave<Pstream::nProcs(); ++slave)
|
|
||||||
{
|
|
||||||
IPstream fromSlave(Pstream::commsTypes::scheduled, slave);
|
|
||||||
Field<Type> recv(fromSlave);
|
|
||||||
|
|
||||||
vtk::writeList(format(), recv);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
format().flush();
|
format().flush();
|
||||||
|
|
||||||
format()
|
format().endDataArray();
|
||||||
.endDataArray();
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
// Slaves - send
|
|
||||||
|
|
||||||
OPstream toMaster
|
|
||||||
(
|
|
||||||
Pstream::commsTypes::scheduled,
|
|
||||||
Pstream::masterNo()
|
|
||||||
);
|
|
||||||
|
|
||||||
if (fldPtr)
|
|
||||||
{
|
|
||||||
toMaster
|
|
||||||
<< *fldPtr;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
toMaster
|
|
||||||
<< Field<Type>();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -19,7 +19,7 @@ cloudWrite1
|
|||||||
// format ascii;
|
// format ascii;
|
||||||
// writePrecision 12;
|
// writePrecision 12;
|
||||||
|
|
||||||
//- Output directory name - Default="VTK"
|
//- Output directory name - Default postProcessing
|
||||||
// directory "VTK";
|
// directory "VTK";
|
||||||
|
|
||||||
//- Write more frequent than fields
|
//- Write more frequent than fields
|
||||||
|
|||||||
Reference in New Issue
Block a user