mirror of
https://develop.openfoam.com/Development/openfoam.git
synced 2025-11-28 03:28:01 +00:00
ENH: particleTracks - updated to include field data
The utility will now add field data to all tracks (previous version only
created the geometry)
The new 'fields' entry can be used to output specific fields.
Example
cloud reactingCloud1;
sampleFrequency 1;
maxPositions 1000000;
fields (d U); // includes wildcard support
STYLE: minor typo fix
This commit is contained in:
committed by
Mark Olesen
parent
98c25d163a
commit
0f155daf86
@ -15,4 +15,12 @@ const label sampleFrequency(propsDict.get<label>("sampleFrequency"));
|
|||||||
|
|
||||||
const label maxPositions(propsDict.get<label>("maxPositions"));
|
const label maxPositions(propsDict.get<label>("maxPositions"));
|
||||||
|
|
||||||
|
const label maxTracks(propsDict.getOrDefault<label>("maxTracks", -1));
|
||||||
|
|
||||||
const word setFormat(propsDict.getOrDefault<word>("setFormat", "vtk"));
|
const word setFormat(propsDict.getOrDefault<word>("setFormat", "vtk"));
|
||||||
|
|
||||||
|
const wordRes fieldNames(propsDict.getOrDefault<wordRes>("fields", wordRes()));
|
||||||
|
|
||||||
|
const word UName(propsDict.getOrDefault<word>("U", "U"));
|
||||||
|
|
||||||
|
const dictionary formatOptions = propsDict.subOrEmptyDict("formatOptions");
|
||||||
|
|||||||
@ -6,6 +6,7 @@
|
|||||||
\\/ M anipulation |
|
\\/ M anipulation |
|
||||||
-------------------------------------------------------------------------------
|
-------------------------------------------------------------------------------
|
||||||
Copyright (C) 2011-2016 OpenFOAM Foundation
|
Copyright (C) 2011-2016 OpenFOAM Foundation
|
||||||
|
Copyright (C) 2021 OpenCFD Ltd.
|
||||||
-------------------------------------------------------------------------------
|
-------------------------------------------------------------------------------
|
||||||
License
|
License
|
||||||
This file is part of OpenFOAM.
|
This file is part of OpenFOAM.
|
||||||
@ -30,8 +31,8 @@ Group
|
|||||||
grpPostProcessingUtilities
|
grpPostProcessingUtilities
|
||||||
|
|
||||||
Description
|
Description
|
||||||
Generate a legacy VTK file of particle tracks for cases that were
|
Generate particle tracks for cases that were computed using a
|
||||||
computed using a tracked-parcel-type cloud.
|
tracked-parcel-type cloud.
|
||||||
|
|
||||||
\*---------------------------------------------------------------------------*/
|
\*---------------------------------------------------------------------------*/
|
||||||
|
|
||||||
@ -44,16 +45,206 @@ Description
|
|||||||
#include "OFstream.H"
|
#include "OFstream.H"
|
||||||
#include "passiveParticleCloud.H"
|
#include "passiveParticleCloud.H"
|
||||||
#include "writer.H"
|
#include "writer.H"
|
||||||
|
#include "ListOps.H"
|
||||||
|
|
||||||
|
#define createTrack(field, trackValues) \
|
||||||
|
createTrackField \
|
||||||
|
( \
|
||||||
|
field, \
|
||||||
|
sampleFrequency, \
|
||||||
|
maxPositions, \
|
||||||
|
startIds, \
|
||||||
|
allOrigProcs, \
|
||||||
|
allOrigIds, \
|
||||||
|
trackValues \
|
||||||
|
);
|
||||||
|
|
||||||
|
#define setFields(fields, fieldNames) \
|
||||||
|
setTrackFields \
|
||||||
|
( \
|
||||||
|
obr, \
|
||||||
|
fields, \
|
||||||
|
fieldNames, \
|
||||||
|
nTracks, \
|
||||||
|
startIds, \
|
||||||
|
allOrigProcs, \
|
||||||
|
allOrigIds, \
|
||||||
|
maxPositions, \
|
||||||
|
sampleFrequency \
|
||||||
|
);
|
||||||
|
|
||||||
|
#define writeFields(fields, fieldNames, tracks, times, dirs) \
|
||||||
|
writeTrackFields \
|
||||||
|
( \
|
||||||
|
fields, \
|
||||||
|
fieldNames, \
|
||||||
|
tracks, \
|
||||||
|
times, \
|
||||||
|
dirs, \
|
||||||
|
setFormat, \
|
||||||
|
formatOptions, \
|
||||||
|
cloudName \
|
||||||
|
);
|
||||||
|
|
||||||
using namespace Foam;
|
using namespace Foam;
|
||||||
|
|
||||||
|
template<class Type>
|
||||||
|
void createTrackField
|
||||||
|
(
|
||||||
|
const Field<Type>& values,
|
||||||
|
const label sampleFrequency,
|
||||||
|
const label maxPositions,
|
||||||
|
const labelList& startIds,
|
||||||
|
const List<labelField>& allOrigProcs,
|
||||||
|
const List<labelField>& allOrigIds,
|
||||||
|
List<DynamicList<Type>>& trackValues
|
||||||
|
)
|
||||||
|
{
|
||||||
|
List<Field<Type>> procField(Pstream::nProcs());
|
||||||
|
procField[Pstream::myProcNo()] = values;
|
||||||
|
Pstream::gatherList(procField);
|
||||||
|
|
||||||
|
if (!Pstream::master())
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const label nTracks = trackValues.size();
|
||||||
|
|
||||||
|
forAll(procField, proci)
|
||||||
|
{
|
||||||
|
forAll(procField[proci], i)
|
||||||
|
{
|
||||||
|
const label globalId =
|
||||||
|
startIds[allOrigProcs[proci][i]] + allOrigIds[proci][i];
|
||||||
|
|
||||||
|
if (globalId % sampleFrequency == 0)
|
||||||
|
{
|
||||||
|
const label trackId = globalId/sampleFrequency;
|
||||||
|
|
||||||
|
if
|
||||||
|
(
|
||||||
|
trackId < nTracks
|
||||||
|
&& trackValues[trackId].size() < maxPositions
|
||||||
|
)
|
||||||
|
{
|
||||||
|
trackValues[trackId].append(procField[proci][i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
template<class Type>
|
||||||
|
void writeTrackFields
|
||||||
|
(
|
||||||
|
List<List<DynamicList<Type>>>& fieldValues,
|
||||||
|
const wordList& fieldNames,
|
||||||
|
const PtrList<coordSet>& tracks,
|
||||||
|
const List<scalarField>& times,
|
||||||
|
const List<vectorField>& dirs,
|
||||||
|
const word& setFormat,
|
||||||
|
const dictionary& formatOptions,
|
||||||
|
const word& cloudName
|
||||||
|
)
|
||||||
|
{
|
||||||
|
if (fieldValues.empty())
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
auto writerPtr = writer<Type>::New(setFormat, formatOptions);
|
||||||
|
|
||||||
|
const fileName outFile(writerPtr().getFileName(tracks[0], wordList(0)));
|
||||||
|
|
||||||
|
const fileName outPath
|
||||||
|
(
|
||||||
|
functionObject::outputPrefix/cloud::prefix/cloudName/"particleTracks"
|
||||||
|
);
|
||||||
|
mkDir(outPath);
|
||||||
|
|
||||||
|
OFstream os(outPath/(pTraits<Type>::typeName & "tracks." + outFile.ext()));
|
||||||
|
|
||||||
|
Info<< "Writing " << pTraits<Type>::typeName << " particle tracks in "
|
||||||
|
<< setFormat << " format to " << os.name() << endl;
|
||||||
|
|
||||||
|
|
||||||
|
List<List<Field<Type>>> fields(fieldValues.size());
|
||||||
|
forAll(fields, fieldi)
|
||||||
|
{
|
||||||
|
fields[fieldi].setSize(fieldValues[fieldi].size());
|
||||||
|
forAll(fields[fieldi], tracki)
|
||||||
|
{
|
||||||
|
fields[fieldi][tracki].transfer(fieldValues[fieldi][tracki]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
writerPtr().write(true, times, tracks, fieldNames, fields, os);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
template<class Type>
|
||||||
|
Foam::label setTrackFields
|
||||||
|
(
|
||||||
|
const objectRegistry& obr,
|
||||||
|
List<List<DynamicList<Type>>>& fields,
|
||||||
|
List<word>& fieldNames,
|
||||||
|
const label nTracks,
|
||||||
|
const labelList& startIds,
|
||||||
|
const List<labelField>& allOrigProcs,
|
||||||
|
const List<labelField>& allOrigIds,
|
||||||
|
const label maxPositions,
|
||||||
|
const label sampleFrequency
|
||||||
|
)
|
||||||
|
{
|
||||||
|
const auto availableFieldPtrs = obr.lookupClass<IOField<Type>>();
|
||||||
|
|
||||||
|
fieldNames = availableFieldPtrs.toc();
|
||||||
|
|
||||||
|
if (Pstream::parRun())
|
||||||
|
{
|
||||||
|
Pstream::combineGather(fieldNames, ListOps::uniqueEqOp<word>());
|
||||||
|
Pstream::combineScatter(fieldNames);
|
||||||
|
Foam::sort(fieldNames);
|
||||||
|
}
|
||||||
|
|
||||||
|
const label nFields = fieldNames.size();
|
||||||
|
|
||||||
|
if (fields.empty())
|
||||||
|
{
|
||||||
|
fields.setSize(nFields);
|
||||||
|
fieldNames.setSize(nFields);
|
||||||
|
forAll(fields, i)
|
||||||
|
{
|
||||||
|
fields[i].setSize(nTracks);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
forAll(fieldNames, fieldi)
|
||||||
|
{
|
||||||
|
const word& fieldName = fieldNames[fieldi];
|
||||||
|
|
||||||
|
const auto* fldPtr = obr.cfindObject<IOField<Type>>(fieldName);
|
||||||
|
|
||||||
|
createTrack
|
||||||
|
(
|
||||||
|
fldPtr ? static_cast<const Field<Type>>(*fldPtr) : Field<Type>(),
|
||||||
|
fields[fieldi]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return nFields;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||||
|
|
||||||
int main(int argc, char *argv[])
|
int main(int argc, char *argv[])
|
||||||
{
|
{
|
||||||
argList::addNote
|
argList::addNote
|
||||||
(
|
(
|
||||||
"Generate a legacy VTK file of particle tracks for cases that were"
|
"Generate a file of particle tracks for cases that were"
|
||||||
" computed using a tracked-parcel-type cloud"
|
" computed using a tracked-parcel-type cloud"
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -76,9 +267,9 @@ int main(int argc, char *argv[])
|
|||||||
<< nl << endl;
|
<< nl << endl;
|
||||||
|
|
||||||
labelList maxIds(Pstream::nProcs(), -1);
|
labelList maxIds(Pstream::nProcs(), -1);
|
||||||
forAll(timeDirs, timeI)
|
forAll(timeDirs, timei)
|
||||||
{
|
{
|
||||||
runTime.setTime(timeDirs[timeI], timeI);
|
runTime.setTime(timeDirs[timei], timei);
|
||||||
Info<< "Time = " << runTime.timeName() << endl;
|
Info<< "Time = " << runTime.timeName() << endl;
|
||||||
|
|
||||||
Info<< " Reading particle positions" << endl;
|
Info<< " Reading particle positions" << endl;
|
||||||
@ -92,6 +283,8 @@ int main(int argc, char *argv[])
|
|||||||
const label origId = p.origId();
|
const label origId = p.origId();
|
||||||
const label origProc = p.origProc();
|
const label origProc = p.origProc();
|
||||||
|
|
||||||
|
// Handle case where we are processing particle data generated in
|
||||||
|
// parallel using more cores than when running this application.
|
||||||
if (origProc >= maxIds.size())
|
if (origProc >= maxIds.size())
|
||||||
{
|
{
|
||||||
maxIds.setSize(origProc+1, -1);
|
maxIds.setSize(origProc+1, -1);
|
||||||
@ -124,7 +317,7 @@ int main(int argc, char *argv[])
|
|||||||
|
|
||||||
// Calculate starting ids for particles on each processor
|
// Calculate starting ids for particles on each processor
|
||||||
labelList startIds(numIds.size(), Zero);
|
labelList startIds(numIds.size(), Zero);
|
||||||
for (label i = 0; i < numIds.size()-1; i++)
|
for (label i = 0; i < numIds.size()-1; ++i)
|
||||||
{
|
{
|
||||||
startIds[i+1] += startIds[i] + numIds[i];
|
startIds[i+1] += startIds[i] + numIds[i];
|
||||||
}
|
}
|
||||||
@ -132,130 +325,135 @@ int main(int argc, char *argv[])
|
|||||||
|
|
||||||
|
|
||||||
// Number of tracks to generate
|
// Number of tracks to generate
|
||||||
label nTracks = nParticle/sampleFrequency;
|
const label nTracks =
|
||||||
|
maxTracks > 0
|
||||||
|
? min(nParticle/sampleFrequency, maxTracks)
|
||||||
|
: nParticle/sampleFrequency;
|
||||||
|
|
||||||
// Storage for all particle tracks
|
// Storage for all particle tracks
|
||||||
List<DynamicList<vector>> allTracks(nTracks);
|
List<DynamicList<vector>> allTracks(nTracks);
|
||||||
|
List<DynamicList<scalar>> allTrackTimes(nTracks);
|
||||||
|
|
||||||
|
// Lists of field, tracki, trackValues
|
||||||
|
//List<List<DynamicList<label>>> labelFields;
|
||||||
|
List<List<DynamicList<scalar>>> scalarFields;
|
||||||
|
List<List<DynamicList<vector>>> vectorFields;
|
||||||
|
List<List<DynamicList<sphericalTensor>>> sphTensorFields;
|
||||||
|
List<List<DynamicList<symmTensor>>> symTensorFields;
|
||||||
|
List<List<DynamicList<tensor>>> tensorFields;
|
||||||
|
//List<word> labelFieldNames;
|
||||||
|
List<word> scalarFieldNames;
|
||||||
|
List<word> vectorFieldNames;
|
||||||
|
List<word> sphTensorFieldNames;
|
||||||
|
List<word> symTensorFieldNames;
|
||||||
|
List<word> tensorFieldNames;
|
||||||
|
|
||||||
Info<< "\nGenerating " << nTracks << " particle tracks for cloud "
|
Info<< "\nGenerating " << nTracks << " particle tracks for cloud "
|
||||||
<< cloudName << nl << endl;
|
<< cloudName << nl << endl;
|
||||||
|
|
||||||
forAll(timeDirs, timeI)
|
forAll(timeDirs, timei)
|
||||||
{
|
{
|
||||||
runTime.setTime(timeDirs[timeI], timeI);
|
runTime.setTime(timeDirs[timei], timei);
|
||||||
Info<< "Time = " << runTime.timeName() << endl;
|
Info<< "Time = " << runTime.timeName() << endl;
|
||||||
|
|
||||||
List<pointField> allPositions(Pstream::nProcs());
|
|
||||||
List<labelField> allOrigIds(Pstream::nProcs());
|
|
||||||
List<labelField> allOrigProcs(Pstream::nProcs());
|
|
||||||
|
|
||||||
// Read particles. Will be size 0 if no particles.
|
// Read particles. Will be size 0 if no particles.
|
||||||
Info<< " Reading particle positions" << endl;
|
Info<< " Reading particle positions" << endl;
|
||||||
passiveParticleCloud myCloud(mesh, cloudName);
|
passiveParticleCloud myCloud(mesh, cloudName);
|
||||||
|
|
||||||
|
pointField localPositions(myCloud.size(), Zero);
|
||||||
|
scalarField localTimes(myCloud.size(), Zero);
|
||||||
|
|
||||||
|
List<labelField> allOrigIds(Pstream::nProcs());
|
||||||
|
List<labelField> allOrigProcs(Pstream::nProcs());
|
||||||
|
|
||||||
// Collect the track data on all processors that have positions
|
// Collect the track data on all processors that have positions
|
||||||
allPositions[Pstream::myProcNo()].setSize
|
|
||||||
(
|
|
||||||
myCloud.size(),
|
|
||||||
point::zero
|
|
||||||
);
|
|
||||||
allOrigIds[Pstream::myProcNo()].setSize(myCloud.size(), Zero);
|
allOrigIds[Pstream::myProcNo()].setSize(myCloud.size(), Zero);
|
||||||
allOrigProcs[Pstream::myProcNo()].setSize(myCloud.size(), Zero);
|
allOrigProcs[Pstream::myProcNo()].setSize(myCloud.size(), Zero);
|
||||||
|
|
||||||
label i = 0;
|
label i = 0;
|
||||||
for (const passiveParticle& p : myCloud)
|
for (const passiveParticle& p : myCloud)
|
||||||
{
|
{
|
||||||
allPositions[Pstream::myProcNo()][i] = p.position();
|
|
||||||
allOrigIds[Pstream::myProcNo()][i] = p.origId();
|
allOrigIds[Pstream::myProcNo()][i] = p.origId();
|
||||||
allOrigProcs[Pstream::myProcNo()][i] = p.origProc();
|
allOrigProcs[Pstream::myProcNo()][i] = p.origProc();
|
||||||
|
localPositions[i] = p.position();
|
||||||
|
localTimes[i] = runTime.value();
|
||||||
++i;
|
++i;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Collect the track data on the master processor
|
// Collect the track data on the master processor
|
||||||
Pstream::gatherList(allPositions);
|
|
||||||
Pstream::gatherList(allOrigIds);
|
Pstream::gatherList(allOrigIds);
|
||||||
Pstream::gatherList(allOrigProcs);
|
Pstream::gatherList(allOrigProcs);
|
||||||
|
|
||||||
Info<< " Constructing tracks" << nl << endl;
|
objectRegistry obr
|
||||||
if (Pstream::master())
|
|
||||||
{
|
|
||||||
forAll(allPositions, proci)
|
|
||||||
{
|
|
||||||
forAll(allPositions[proci], i)
|
|
||||||
{
|
|
||||||
label globalId =
|
|
||||||
startIds[allOrigProcs[proci][i]]
|
|
||||||
+ allOrigIds[proci][i];
|
|
||||||
|
|
||||||
if (globalId % sampleFrequency == 0)
|
|
||||||
{
|
|
||||||
label trackId = globalId/sampleFrequency;
|
|
||||||
if (allTracks[trackId].size() < maxPositions)
|
|
||||||
{
|
|
||||||
allTracks[trackId].append
|
|
||||||
(
|
(
|
||||||
allPositions[proci][i]
|
IOobject
|
||||||
|
(
|
||||||
|
"cloudFields",
|
||||||
|
runTime.timeName(),
|
||||||
|
runTime
|
||||||
|
)
|
||||||
);
|
);
|
||||||
}
|
|
||||||
}
|
myCloud.readFromFiles(obr, fieldNames);
|
||||||
}
|
|
||||||
}
|
// Create track positions and track time fields
|
||||||
}
|
// (not registered as IOFields)
|
||||||
|
// Note: createTrack is a local #define to reduce arg count...
|
||||||
|
createTrack(localPositions, allTracks);
|
||||||
|
createTrack(localTimes, allTrackTimes);
|
||||||
|
|
||||||
|
// Create the track fields
|
||||||
|
// Note: setFields is a local #define to reduce arg count...
|
||||||
|
//setFields(labelFields, labelFieldNames);
|
||||||
|
setFields(scalarFields, scalarFieldNames);
|
||||||
|
setFields(vectorFields, vectorFieldNames);
|
||||||
|
setFields(sphTensorFields, sphTensorFieldNames);
|
||||||
|
setFields(symTensorFields, symTensorFieldNames);
|
||||||
|
setFields(tensorFields, tensorFieldNames);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
if (Pstream::master())
|
if (Pstream::master())
|
||||||
{
|
{
|
||||||
PtrList<coordSet> tracks(allTracks.size());
|
PtrList<coordSet> tracks(allTracks.size());
|
||||||
forAll(allTracks, trackI)
|
List<scalarField> times(tracks.size());
|
||||||
|
forAll(tracks, tracki)
|
||||||
{
|
{
|
||||||
tracks.set
|
tracks.set
|
||||||
(
|
(
|
||||||
trackI,
|
tracki,
|
||||||
new coordSet
|
new coordSet("track" + Foam::name(tracki), "distance")
|
||||||
(
|
|
||||||
"track" + Foam::name(trackI),
|
|
||||||
"distance"
|
|
||||||
)
|
|
||||||
);
|
);
|
||||||
tracks[trackI].transfer(allTracks[trackI]);
|
tracks[tracki].transfer(allTracks[tracki]);
|
||||||
|
times[tracki].transfer(allTrackTimes[tracki]);
|
||||||
}
|
}
|
||||||
|
|
||||||
autoPtr<writer<scalar>> scalarFormatterPtr = writer<scalar>::New
|
Info<< nl;
|
||||||
(
|
|
||||||
setFormat
|
|
||||||
);
|
|
||||||
|
|
||||||
//OFstream vtkTracks(vtkPath/"particleTracks.vtk");
|
const label Uid = vectorFieldNames.find(UName);
|
||||||
fileName vtkFile
|
List<vectorField> dirs(nTracks);
|
||||||
(
|
|
||||||
scalarFormatterPtr().getFileName
|
|
||||||
(
|
|
||||||
tracks[0],
|
|
||||||
wordList(0)
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
OFstream vtkTracks
|
if (Uid != -1)
|
||||||
(
|
{
|
||||||
vtkPath/("particleTracks." + vtkFile.ext())
|
const auto& UTracks = vectorFields[Uid];
|
||||||
);
|
forAll(UTracks, tracki)
|
||||||
|
{
|
||||||
Info<< "\nWriting particle tracks in " << setFormat
|
const auto& U = UTracks[tracki];
|
||||||
<< " format to " << vtkTracks.name()
|
dirs[tracki] = U/(mag(U) + ROOTVSMALL);
|
||||||
<< nl << endl;
|
}
|
||||||
|
|
||||||
scalarFormatterPtr().write
|
|
||||||
(
|
|
||||||
true,
|
|
||||||
tracks,
|
|
||||||
wordList(0),
|
|
||||||
List<List<scalarField>>(0),
|
|
||||||
vtkTracks
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Info<< "End\n" << endl;
|
// Write track fields
|
||||||
|
// Note: writeFields is a local #define to reduce arg count...
|
||||||
|
//writeFields(allLabelFields, labelFieldNames, tracks);
|
||||||
|
writeFields(scalarFields, scalarFieldNames, tracks, times, dirs);
|
||||||
|
writeFields(vectorFields, vectorFieldNames, tracks, times, dirs);
|
||||||
|
writeFields(sphTensorFields, sphTensorFieldNames, tracks, times, dirs);
|
||||||
|
writeFields(symTensorFields, symTensorFieldNames, tracks, times, dirs);
|
||||||
|
writeFields(tensorFields, tensorFieldNames, tracks, times, dirs);
|
||||||
|
}
|
||||||
|
|
||||||
|
Info<< nl << "End\n" << endl;
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -27,7 +27,7 @@ Application
|
|||||||
steadyParticleTracks
|
steadyParticleTracks
|
||||||
|
|
||||||
Group
|
Group
|
||||||
grpPostProcessingUtilitie
|
grpPostProcessingUtilities
|
||||||
|
|
||||||
Description
|
Description
|
||||||
Generate a legacy VTK file of particle tracks for cases that were
|
Generate a legacy VTK file of particle tracks for cases that were
|
||||||
|
|||||||
Reference in New Issue
Block a user