sampledSets, streamlines: Various improvements

Sampled sets and streamlines now write all their fields to the same
file. This prevents excessive duplication of the geometry and makes
post-processing tasks more convenient.

"axis" entries are now optional in sampled sets and streamlines. When
omitted, a default entry will be used, which is chosen appropriately for
the coordinate set and the write format. Some combinations are not
supported. For example, a scalar ("x", "y", "z" or "distance") axis
cannot be used to write in the vtk format, as vtk requires 3D locations
with which to associate data. Similarly, a point ("xyz") axis cannot be
used with the gnuplot format, as gnuplot needs a single scalar to
associate with the x-axis.

Streamlines can now write out fields of any type, not just scalars and
vectors, and there is no longer a strict requirement for velocity to be
one of the fields.

Streamlines now output to postProcessing/<functionName>/time/<file> in
the same way as other functions. The additional "sets" subdirectory has
been removed.

The raw set writer now aligns columns correctly.

The handling of segments in coordSet and sampledSet has been
fixed/completed. Segments mean that a coordinate set can represent a
number of contiguous lines, disconnected points, or some combination
thereof. This works in parallel; segments remain contiguous across
processor boundaries. Set writers now only need one write method, as the
previous "writeTracks" functionality is now handled by streamlines
providing the writer with the appropriate segment structure.

Coordinate sets and set writers now have a convenient programmatic
interface. To write a graph of A and B against some coordinate X, in
gnuplot format, we can call the following:

    setWriter::New("gnuplot")->write
    (
        directoryName,
        graphName,
        coordSet(true, "X", X), // <-- "true" indicates a contiguous
        "A",                    //     line, "false" would mean
        A,                      //     disconnected points
        "B",
        B
    );

This write function is variadic. It supports any number of
field-name-field pairs, and they can be of any primitive type.

Support for Jplot and Xmgrace formats has been removed. Raw, CSV,
Gnuplot, VTK and Ensight formats are all still available.

The old "graph" functionality has been removed from the code, with the
exception of the randomProcesses library and associated applications
(noise, DNSFoam and boxTurb). The intention is that these should also
eventually be converted to use the setWriters. For now, so that it is
clear that the "graph" functionality is not to be used elsewhere, it has
been moved into a subdirectory of the randomProcesses library.
This commit is contained in:
Will Bainbridge
2021-11-25 09:42:19 +00:00
parent 50fb2477bd
commit 25a6d068f0
185 changed files with 4727 additions and 6308 deletions

View File

@ -24,6 +24,7 @@ License
\*---------------------------------------------------------------------------*/
#include "histogram.H"
#include "coordSet.H"
#include "volFields.H"
#include "addToRunTimeSelectionTable.H"
@ -39,33 +40,6 @@ namespace functionObjects
}
// * * * * * * * * * * * * * Private Member Functions * * * * * * * * * * * //
void Foam::functionObjects::histogram::writeGraph
(
const coordSet& coords,
const word& fieldName,
const scalarField& values
) const
{
const wordList fieldNames(1, fieldName);
fileName outputPath = file_.baseTimeDir();
mkDir(outputPath);
OFstream graphFile
(
outputPath/formatterPtr_().getFileName(coords, fieldNames)
);
Log << " Writing histogram of " << fieldName
<< " to " << graphFile.name() << endl;
List<const scalarField*> yPtrs(1);
yPtrs[0] = &values;
formatterPtr_().write(coords, fieldNames, yPtrs, graphFile);
}
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
Foam::functionObjects::histogram::histogram
@ -97,8 +71,7 @@ bool Foam::functionObjects::histogram::read(const dictionary& dict)
min_ = dict.lookupOrDefault<scalar>("min", 0);
dict.lookup("nBins") >> nBins_;
word format(dict.lookup("setFormat"));
formatterPtr_ = setWriter<scalar>::New(format);
formatterPtr_ = setWriter::New(dict.lookup("setFormat"), dict);
return true;
}
@ -153,13 +126,12 @@ bool Foam::functionObjects::histogram::write()
);
// Calculate the mid-points of bins for the graph axis
pointField xBin(nBins_);
scalarField xBin(nBins_);
const scalar delta = (max_- min_)/nBins_;
scalar x = min_ + 0.5*delta;
forAll(xBin, i)
{
xBin[i] = point(x, 0, 0);
xBin[i] = x;
x += delta;
}
@ -185,15 +157,14 @@ bool Foam::functionObjects::histogram::write()
{
volFrac /= sumVol;
const coordSet coords
formatterPtr_().write
(
"Volume_Fraction",
"x",
xBin,
mag(xBin)
file_.baseTimeDir(),
typeName,
coordSet(true, fieldName_, xBin),
field.name(),
volFrac
);
writeGraph(coords, field.name(), volFrac);
}
}

View File

@ -103,17 +103,7 @@ class histogram
label nBins_;
//- Output formatter to write
autoPtr<setWriter<scalar>> formatterPtr_;
// Private Member Functions
void writeGraph
(
const coordSet& coords,
const word& valueName,
const scalarField& values
) const;
autoPtr<setWriter> formatterPtr_;
public:

View File

@ -77,13 +77,16 @@ void Foam::functionObjects::interfaceHeight::writePositions()
"",
mesh_,
meshSearch(mesh_),
"xyz",
coordSet::axisTypeNames_[coordSet::axisType::XYZ],
locations_[li] + gHat*mesh_.bounds().mag(),
locations_[li] - gHat*mesh_.bounds().mag()
);
// Find the height of the location above the boundary
scalar hLB = set.size() ? - gHat & (locations_[li] - set[0]) : - vGreat;
scalar hLB =
set.size()
? - gHat & (locations_[li] - set.pointCoord(0))
: - vGreat;
reduce(hLB, maxOp<scalar>());
// Calculate the integrals of length and length*alpha along the sampling
@ -97,7 +100,7 @@ void Foam::functionObjects::interfaceHeight::writePositions()
continue;
}
const vector& p0 = set[si], p1 = set[si+1];
const vector& p0 = set.pointCoord(si), p1 = set.pointCoord(si+1);
const label c0 = set.cells()[si], c1 = set.cells()[si+1];
const label f0 = set.faces()[si], f1 = set.faces()[si+1];
const scalar a0 = interpolator->interpolate(p0, c0, f0);

View File

@ -65,29 +65,6 @@ namespace Foam
// * * * * * * * * * * * * * Private Member Functions * * * * * * * * * * * //
void Foam::functionObjects::regionSizeDistribution::writeGraph
(
const coordSet& coords,
const word& valueName,
const scalarField& values
) const
{
const wordList valNames(1, valueName);
fileName outputPath = file_.baseTimeDir();
mkDir(outputPath);
OFstream str(outputPath/formatterPtr_().getFileName(coords, valNames));
Info<< " Writing distribution of " << valueName << " to " << str.name()
<< endl;
List<const scalarField*> valPtrs(1);
valPtrs[0] = &values;
formatterPtr_().write(coords, valNames, valPtrs, str);
}
void Foam::functionObjects::regionSizeDistribution::writeAlphaFields
(
const regionSplit& regions,
@ -216,15 +193,16 @@ Foam::functionObjects::regionSizeDistribution::findPatchRegions
}
Foam::tmp<Foam::scalarField>
template<class Type>
Foam::tmp<Foam::Field<Type>>
Foam::functionObjects::regionSizeDistribution::divide
(
const scalarField& num,
const Field<Type>& num,
const scalarField& denom
)
{
tmp<scalarField> tresult(new scalarField(num.size()));
scalarField& result = tresult.ref();
tmp<Field<Type>> tresult(new Field<Type>(num.size()));
Field<Type>& result = tresult.ref();
forAll(denom, i)
{
@ -234,35 +212,71 @@ Foam::functionObjects::regionSizeDistribution::divide
}
else
{
result[i] = 0.0;
result[i] = Zero;
}
}
return tresult;
}
void Foam::functionObjects::regionSizeDistribution::writeGraphs
template<class Type>
void Foam::functionObjects::regionSizeDistribution::generateFields
(
const word& fieldName, // name of field
const labelList& indices, // index of bin for each region
const scalarField& sortedField, // per region field data
const Field<Type>& sortedField, // per region field data
const scalarField& binCount, // per bin number of regions
const coordSet& coords // graph data for bins
wordList& fieldNames,
PtrList<Field<Type>>& fields
) const
{
if (Pstream::master())
{
// Calculate per-bin average
scalarField binSum(nBins_, 0.0);
// Calculate per-bin sum
Field<Type> binSum(nBins_, Zero);
forAll(sortedField, i)
{
binSum[indices[i]] += sortedField[i];
}
// Calculate per-bin average
Field<Type> binAvg(divide(binSum, binCount));
// Append
fields.setSize(fieldNames.size());
fieldNames.append(fieldName + "_sum");
fields.append(binSum);
fieldNames.append(fieldName + "_avg");
fields.append(binAvg);
}
}
void Foam::functionObjects::regionSizeDistribution::generateFields
(
const word& fieldName, // name of field
const labelList& indices, // index of bin for each region
const scalarField& sortedField, // per region field data
const scalarField& binCount, // per bin number of regions
wordList& fieldNames,
PtrList<scalarField>& fields
) const
{
if (Pstream::master())
{
// Calculate per-bin sum
scalarField binSum(nBins_, Zero);
forAll(sortedField, i)
{
binSum[indices[i]] += sortedField[i];
}
// Calculate per-bin average
scalarField binAvg(divide(binSum, binCount));
// Per bin deviation
scalarField binSqrSum(nBins_, 0.0);
// Calculate per-bin deviation
scalarField binSqrSum(nBins_, Zero);
forAll(sortedField, i)
{
binSqrSum[indices[i]] += Foam::sqr(sortedField[i]);
@ -272,50 +286,50 @@ void Foam::functionObjects::regionSizeDistribution::writeGraphs
sqrt(divide(binSqrSum, binCount) - Foam::sqr(binAvg))
);
// Write average
writeGraph(coords, fieldName + "_sum", binSum);
// Write average
writeGraph(coords, fieldName + "_avg", binAvg);
// Write deviation
writeGraph(coords, fieldName + "_dev", binDev);
// Append
fields.setSize(fieldNames.size());
fieldNames.append(fieldName + "_sum");
fields.append(binSum);
fieldNames.append(fieldName + "_avg");
fields.append(binAvg);
fieldNames.append(fieldName + "_dev");
fields.append(binDev);
}
}
void Foam::functionObjects::regionSizeDistribution::writeGraphs
template<class Type>
void Foam::functionObjects::regionSizeDistribution::generateFields
(
const word& fieldName, // name of field
const scalarField& cellField, // per cell field data
const Field<Type>& cellField, // per cell field data
const regionSplit& regions, // per cell the region(=droplet)
const labelList& sortedRegions, // valid regions in sorted order
const scalarField& sortedNormalisation,
const labelList& indices, // per region index of bin
const labelList& indices, // index of bin for each region
const scalarField& binCount, // per bin number of regions
const coordSet& coords // graph data for bins
wordList& fieldNames,
PtrList<Field<Type>>& fields
) const
{
// Sum on a per-region basis. Parallel reduced.
Map<scalar> regionField(regionSum(regions, cellField));
Map<Type> regionField(regionSum(regions, cellField));
// Extract in region order
scalarField sortedField
Field<Type> sortedField
(
sortedNormalisation
* extractData
(
sortedRegions,
regionField
)
sortedNormalisation*extractData(sortedRegions, regionField)
);
writeGraphs
// Generate fields
generateFields
(
fieldName, // name of field
indices, // index of bin for each region
sortedField, // per region field data
binCount, // per bin number of regions
coords // graph data for bins
fieldNames,
fields
);
}
@ -348,7 +362,7 @@ Foam::functionObjects::regionSizeDistribution::~regionSizeDistribution()
bool Foam::functionObjects::regionSizeDistribution::read(const dictionary& dict)
{
dict.lookup("field") >> alphaName_;
dict.lookup("alpha") >> alphaName_;
dict.lookup("patches") >> patchNames_;
dict.lookup("threshold") >> threshold_;
dict.lookup("maxDiameter") >> maxDiam_;
@ -357,16 +371,7 @@ bool Foam::functionObjects::regionSizeDistribution::read(const dictionary& dict)
dict.lookup("nBins") >> nBins_;
dict.lookup("fields") >> fields_;
word format(dict.lookup("setFormat"));
formatterPtr_ = setWriter<scalar>::New(format);
if (dict.found("coordinateSystem"))
{
coordSysPtr_ = coordinateSystem::New(obr_, dict);
Info<< "Transforming all vectorFields with coordinate system "
<< coordSysPtr_().name() << endl;
}
formatterPtr_ = setWriter::New(dict.lookup("setFormat"), dict);
return true;
}
@ -664,18 +669,14 @@ bool Foam::functionObjects::regionSizeDistribution::write()
if (allRegionVolume.size())
{
// Construct mids of bins for plotting
pointField xBin(nBins_);
scalarField xBin(nBins_);
scalar x = 0.5*delta;
forAll(xBin, i)
{
xBin[i] = point(x, 0, 0);
xBin[i] = x;
x += delta;
}
const coordSet coords("diameter", "x", xBin, mag(xBin));
// Get in region order the alpha*volume and diameter
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@ -715,12 +716,6 @@ bool Foam::functionObjects::regionSizeDistribution::write()
binCount[indices[i]] += 1.0;
}
// Write counts
if (Pstream::master())
{
writeGraph(coords, "count", binCount);
}
// Write to log
{
Info<< " Bins:" << endl;
@ -740,103 +735,80 @@ bool Foam::functionObjects::regionSizeDistribution::write()
Info<< endl;
}
// Declare fields and field names
wordList fieldNames;
#define DeclareTypeFields(Type, nullArg) \
PtrList<Field<Type>> Type##Fields;
FOR_ALL_FIELD_TYPES(DeclareTypeFields);
#undef DeclareTypeFields
// Write average and deviation of droplet volume.
writeGraphs
// Add the bin count
fieldNames.append("binCount");
#define TypeFieldsAppend(Type, nullArg) \
appendFields(binCount, Type##Fields);
#undef TypeFieldsAppend
// Add the volumes
generateFields
(
"volume", // name of field
indices, // per region the bin index
sortedVols, // per region field data
binCount, // per bin number of regions
coords // graph data for bins
"volume",
indices,
sortedVols,
binCount,
fieldNames,
scalarFields
);
// Collect some more field
// Add other sampled fields
forAll(fields_, fieldi)
{
forAll(fields_, i)
{
if (obr_.foundObject<volScalarField>(fields_[i]))
{
Info<< " Scalar field " << fields_[i] << endl;
bool found = false;
const scalarField& fld =
obr_.lookupObject<volScalarField>(fields_[i])
.primitiveField();
writeGraphs
(
fields_[i], // name of field
alphaVol*fld, // per cell field data
regions, // per cell the region(=droplet)
sortedRegions, // valid regions in sorted order
1.0/sortedVols, // per region normalisation
indices, // index of bin for each region
binCount, // per bin number of regions
coords // graph data for bins
);
#define GenerateTypeFields(Type, nullArg) \
\
if (obr_.foundObject<VolField<Type>>(fields_[fieldi])) \
{ \
found = true; \
\
const VolField<Type>& field = \
obr_.lookupObject<VolField<Type>>(fields_[fieldi]); \
\
generateFields \
( \
fields_[fieldi], \
(alphaVol*field)(), \
regions, \
sortedRegions, \
1.0/sortedVols, \
indices, \
binCount, \
fieldNames, \
Type##Fields \
); \
}
}
FOR_ALL_FIELD_TYPES(GenerateTypeFields);
#undef GenerateTypeFields
if (!found) cannotFindObject(fields_[fieldi]);
}
{
forAll(fields_, i)
{
if (obr_.foundObject<volScalarField>(fields_[i]))
{
Info<< " Vector field " << fields_[i] << endl;
vectorField fld =
obr_.lookupObject<volVectorField>(fields_[i])
.primitiveField();
// Expand all field lists
#define TypeFieldsExpand(Type, nullArg) \
Type##Fields.setSize(fieldNames.size());
FOR_ALL_FIELD_TYPES(TypeFieldsExpand)
#undef TypeFieldsAppend
if (coordSysPtr_.valid())
{
Info<< "Transforming vector field " << fields_[i]
<< " with coordinate system "
<< coordSysPtr_().name()
<< endl;
fld = coordSysPtr_().localVector(fld);
}
// Components
for (direction cmp = 0; cmp < vector::nComponents; cmp++)
{
writeGraphs
(
fields_[i] + vector::componentNames[cmp],
alphaVol*fld.component(cmp),// per cell field data
regions, // per cell the region(=droplet)
sortedRegions, // valid regions in sorted order
1.0/sortedVols, // per region normalisation
indices, // index of bin for each region
binCount, // per bin number of regions
coords // graph data for bins
);
}
// Magnitude
writeGraphs
(
fields_[i] + "mag", // name of field
alphaVol*mag(fld), // per cell field data
regions, // per cell the region(=droplet)
sortedRegions, // valid regions in sorted order
1.0/sortedVols, // per region normalisation
indices, // index of bin for each region
binCount, // per bin number of regions
coords // graph data for bins
);
}
}
}
// Write
formatterPtr_().write
(
file_.baseTimeDir(),
typeName,
coordSet(true, "diameter", xBin),
fieldNames
#define TypeFieldsParameter(Type, nullArg) , Type##Fields
FOR_ALL_FIELD_TYPES(TypeFieldsParameter)
#undef TypeFieldsParameter
);
}
return true;

View File

@ -66,13 +66,6 @@ Description
maxDiameter 0.5e-4;
minDiameter 0;
setFormat gnuplot;
coordinateSystem
{
type cartesian;
origin (0 0 0);
e3 (0 1 1);
e1 (1 0 0);
}
}
\endverbatim
@ -88,7 +81,6 @@ Usage
maxDiameter | maximum region equivalent diameter | yes |
minDiameter | minimum region equivalent diameter | no | 0
setFormat | writing format | yes |
coordinateSystem | transformation for vector fields | no |
\endtable
See also
@ -157,10 +149,7 @@ class regionSizeDistribution
wordList fields_;
//- Output formatter to write
autoPtr<setWriter<scalar>> formatterPtr_;
//- Optional coordinate system
autoPtr<coordinateSystem> coordSysPtr_;
autoPtr<setWriter> formatterPtr_;
// Private Member Functions
@ -173,13 +162,6 @@ class regionSizeDistribution
List<Type> extractData(const UList<label>& keys, const Map<Type>&)
const;
void writeGraph
(
const coordSet& coords,
const word& valueName,
const scalarField& values
) const;
//- Write volfields with the parts of alpha which are not
// droplets (liquidCore, backGround)
void writeAlphaFields
@ -194,31 +176,46 @@ class regionSizeDistribution
Map<label> findPatchRegions(const regionSplit&) const;
//- Helper: divide if denom != 0
static tmp<scalarField> divide(const scalarField&, const scalarField&);
template<class Type>
static tmp<Field<Type>> divide(const Field<Type>&, const scalarField&);
//- Given per-region data calculate per-bin average/deviation and graph
void writeGraphs
//- Generate fields for writing
template<class Type>
void generateFields
(
const word& fieldName, // name of field
const labelList& indices, // index of bin for each region
const Field<Type>& sortedField, // per region field data
const scalarField& binCount, // per bin number of regions
wordList& fieldNames,
PtrList<Field<Type>>& fields
) const;
//- Generate fields for writing. Scalar overload. It is possible to
// generate more fields for scalar input.
void generateFields
(
const word& fieldName, // name of field
const labelList& indices, // index of bin for each region
const scalarField& sortedField, // per region field data
const scalarField& binCount, // per bin number of regions
const coordSet& coords // graph data for bins
wordList& fieldNames,
PtrList<scalarField>& fields
) const;
//- Given per-cell data calculate per-bin average/deviation and graph
void writeGraphs
//- Generate fields for writing
template<class Type>
void generateFields
(
const word& fieldName, // name of field
const scalarField& cellField, // per cell field data
const Field<Type>& cellField, // per cell field data
const regionSplit& regions, // per cell the region(=droplet)
const labelList& sortedRegions, // valid regions in sorted order
const scalarField& sortedNormalisation,
const labelList& indices, // index of bin for each region
const scalarField& binCount, // per bin number of regions
const coordSet& coords // graph data for bins
wordList& fieldNames,
PtrList<Field<Type>>& fields
) const;

View File

@ -38,6 +38,29 @@ License
#include "writeFile.H"
#include "addToRunTimeSelectionTable.H"
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
namespace Foam
{
template<class Type>
void gatherAndFlatten(DynamicField<Type>& field)
{
List<List<Type>> gatheredField(Pstream::nProcs());
gatheredField[Pstream::myProcNo()] = field;
Pstream::gatherList(gatheredField);
field =
ListListOps::combine<List<Type>>
(
gatheredField,
accessOp<List<Type>>()
);
}
}
// * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * //
namespace Foam
@ -58,250 +81,6 @@ namespace Foam
}
// * * * * * * * * * * * * * Private Member Functions * * * * * * * * * * * //
Foam::autoPtr<Foam::indirectPrimitivePatch>
Foam::functionObjects::streamlines::wallPatch() const
{
const polyBoundaryMesh& patches = mesh_.boundaryMesh();
label nFaces = 0;
forAll(patches, patchi)
{
if (isA<wallPolyPatch>(patches[patchi]))
{
nFaces += patches[patchi].size();
}
}
labelList addressing(nFaces);
nFaces = 0;
forAll(patches, patchi)
{
if (isA<wallPolyPatch>(patches[patchi]))
{
const polyPatch& pp = patches[patchi];
forAll(pp, i)
{
addressing[nFaces++] = pp.start()+i;
}
}
}
return autoPtr<indirectPrimitivePatch>
(
new indirectPrimitivePatch
(
IndirectList<face>
(
mesh_.faces(),
addressing
),
mesh_.points()
)
);
}
void Foam::functionObjects::streamlines::track()
{
IDLList<streamlinesParticle> initialParticles;
streamlinesCloud particles
(
mesh_,
cloudName_,
initialParticles
);
const sampledSet& seedPoints = sampledSetPtr_();
forAll(seedPoints, i)
{
particles.addParticle
(
new streamlinesParticle
(
mesh_,
seedPoints[i],
seedPoints.cells()[i],
lifeTime_
)
);
}
label nSeeds = returnReduce(particles.size(), sumOp<label>());
Info << " seeded " << nSeeds << " particles" << endl;
// Read or lookup fields
PtrList<volScalarField> vsFlds;
PtrList<interpolation<scalar>> vsInterp;
PtrList<volVectorField> vvFlds;
PtrList<interpolation<vector>> vvInterp;
label UIndex = -1;
label nScalar = 0;
label nVector = 0;
forAll(fields_, i)
{
if (mesh_.foundObject<volScalarField>(fields_[i]))
{
nScalar++;
}
else if (mesh_.foundObject<volVectorField>(fields_[i]))
{
nVector++;
}
else
{
FatalErrorInFunction
<< "Cannot find field " << fields_[i] << nl
<< "Valid scalar fields are:"
<< mesh_.names(volScalarField::typeName) << nl
<< "Valid vector fields are:"
<< mesh_.names(volVectorField::typeName)
<< exit(FatalError);
}
}
vsInterp.setSize(nScalar);
nScalar = 0;
vvInterp.setSize(nVector);
nVector = 0;
forAll(fields_, i)
{
if (mesh_.foundObject<volScalarField>(fields_[i]))
{
const volScalarField& f = mesh_.lookupObject<volScalarField>
(
fields_[i]
);
vsInterp.set
(
nScalar++,
interpolation<scalar>::New
(
interpolationScheme_,
f
)
);
}
else if (mesh_.foundObject<volVectorField>(fields_[i]))
{
const volVectorField& f = mesh_.lookupObject<volVectorField>
(
fields_[i]
);
if (f.name() == UName_)
{
UIndex = nVector;
}
vvInterp.set
(
nVector++,
interpolation<vector>::New
(
interpolationScheme_,
f
)
);
}
}
// Store the names
scalarNames_.setSize(vsInterp.size());
forAll(vsInterp, i)
{
scalarNames_[i] = vsInterp[i].psi().name();
}
vectorNames_.setSize(vvInterp.size());
forAll(vvInterp, i)
{
vectorNames_[i] = vvInterp[i].psi().name();
}
// Check that we know the index of U in the interpolators.
if (UIndex == -1)
{
FatalErrorInFunction
<< "Cannot find field to move particles with : " << UName_ << nl
<< "This field has to be present in the sampled fields " << fields_
<< " and in the objectRegistry."
<< exit(FatalError);
}
// Sampled data
// ~~~~~~~~~~~~
// Size to maximum expected sizes.
allTracks_.clear();
allTracks_.setCapacity(nSeeds);
allAges_.clear();
allAges_.setCapacity(nSeeds);
allScalars_.setSize(vsInterp.size());
forAll(allScalars_, i)
{
allScalars_[i].clear();
allScalars_[i].setCapacity(nSeeds);
}
allVectors_.setSize(vvInterp.size());
forAll(allVectors_, i)
{
allVectors_[i].clear();
allVectors_[i].setCapacity(nSeeds);
}
// Additional particle info
streamlinesParticle::trackingData td
(
particles,
vsInterp,
vvInterp,
UIndex, // index of U in vvInterp
trackDirection_ == trackDirection::forward,
trackOutside_,
nSubCycle_, // automatic track control:step through cells in steps?
trackLength_, // fixed track length
allTracks_,
allAges_,
allScalars_,
allVectors_
);
// Set very large dt. Note: cannot use great since 1/great is small
// which is a trigger value for the tracking...
const scalar trackTime = Foam::sqrt(great);
// Track
if (trackDirection_ == trackDirection::both)
{
initialParticles = particles;
}
particles.move(particles, td, trackTime);
if (trackDirection_ == trackDirection::both)
{
particles.IDLList<streamlinesParticle>::operator=(initialParticles);
td.trackForward_ = !td.trackForward_;
particles.move(particles, td, trackTime);
}
}
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
Foam::functionObjects::streamlines::streamlines
@ -337,34 +116,17 @@ bool Foam::functionObjects::streamlines::read(const dictionary& dict)
Info<< type() << " " << name() << ":" << nl;
dict.lookup("fields") >> fields_;
UName_ = dict.lookupOrDefault("U", word("U"));
if (findIndex(fields_, UName_) == -1)
{
FatalIOErrorInFunction(dict)
<< "Velocity field for tracking " << UName_
<< " should be present in the list of fields " << fields_
<< exit(FatalIOError);
}
UName_ = dict.lookupOrDefault("U", word("U"));
writeAge_ = dict.lookupOrDefault<Switch>("writeAge", true);
// The trackForward entry is maintained here for backwards compatibility
if (!dict.found("direction") && dict.found("trackForward"))
{
trackDirection_ =
dict.lookup<bool>("trackForward")
? trackDirection::forward
: trackDirection::backward;
}
else
{
trackDirection_ = trackDirectionNames_[word(dict.lookup("direction"))];
}
trackDirection_ = trackDirectionNames_[word(dict.lookup("direction"))];
trackOutside_ = dict.lookupOrDefault<Switch>("outside", false);
dict.lookup("lifeTime") >> lifeTime_;
if (lifeTime_ < 1)
{
FatalErrorInFunction
@ -372,10 +134,8 @@ bool Foam::functionObjects::streamlines::read(const dictionary& dict)
<< exit(FatalError);
}
bool subCycling = dict.found("nSubCycle");
bool fixedLength = dict.found("trackLength");
if (subCycling && fixedLength)
{
FatalIOErrorInFunction(dict)
@ -384,33 +144,27 @@ bool Foam::functionObjects::streamlines::read(const dictionary& dict)
<< "trackLength')"
<< exit(FatalIOError);
}
nSubCycle_ = 1;
if (dict.readIfPresent("nSubCycle", nSubCycle_))
if (subCycling)
{
nSubCycle_ = max(dict.lookup<scalar>("nSubCycle"), 1);
trackLength_ = vGreat;
if (nSubCycle_ < 1)
{
nSubCycle_ = 1;
}
Info<< " automatic track length specified through"
<< " number of sub cycles : " << nSubCycle_ << nl << endl;
}
else
{
nSubCycle_ = 1;
dict.lookup("trackLength") >> trackLength_;
Info<< " fixed track length specified : "
<< trackLength_ << nl << endl;
}
interpolationScheme_ = dict.lookupOrDefault
(
"interpolationScheme",
interpolationCellPoint<scalar>::typeName
);
interpolationScheme_ =
dict.lookupOrDefault
(
"interpolationScheme",
interpolationCellPoint<scalar>::typeName
);
cloudName_ = dict.lookupOrDefault<word>("cloudName", "streamlines");
@ -423,10 +177,8 @@ bool Foam::functionObjects::streamlines::read(const dictionary& dict)
meshSearchPtr_(),
dict.subDict("seedSampleSet")
);
sampledSetAxis_ = sampledSetPtr_->axis();
scalarFormatterPtr_ = setWriter<scalar>::New(dict.lookup("setFormat"));
vectorFormatterPtr_ = setWriter<vector>::New(dict.lookup("setFormat"));
formatterPtr_ = setWriter::New(dict.lookup("setFormat"), dict);
return true;
}
@ -451,276 +203,382 @@ bool Foam::functionObjects::streamlines::write()
{
Info<< type() << " " << name() << " write:" << nl;
const Time& runTime = obr_.time();
// Create list of available fields
wordList fieldNames;
forAll(fields_, fieldi)
{
if
(
false
#define FoundTypeField(Type, nullArg) \
|| foundObject<VolField<Type>>(fields_[fieldi])
FOR_ALL_FIELD_TYPES(FoundTypeField)
#undef FoundTypeField
)
{
fieldNames.append(fields_[fieldi]);
}
else
{
cannotFindObject(fields_[fieldi]);
}
}
// Do all injection and tracking
track();
// Lookup fields and construct interpolators
#define DeclareTypeInterpolator(Type, nullArg) \
PtrList<interpolation<Type>> Type##Interp(fieldNames.size());
FOR_ALL_FIELD_TYPES(DeclareTypeInterpolator);
#undef DeclareTypeInterpolator
forAll(fieldNames, fieldi)
{
#define ConstructTypeInterpolator(Type, nullArg) \
if (mesh_.foundObject<VolField<Type>>(fieldNames[fieldi])) \
{ \
Type##Interp.set \
( \
fieldi, \
interpolation<Type>::New \
( \
interpolationScheme_, \
mesh_.lookupObject<VolField<Type>>(fieldNames[fieldi]) \
) \
); \
}
FOR_ALL_FIELD_TYPES(ConstructTypeInterpolator);
#undef ConstructTypeInterpolator
}
// Create a velocity interpolator if it is not already available
const label UIndex = findIndex(fieldNames, UName_);
tmpNrc<interpolation<vector>> UInterp(nullptr);
if (UIndex == -1)
{
UInterp =
tmpNrc<interpolation<vector>>
(
interpolation<vector>::New
(
interpolationScheme_,
mesh_.lookupObject<volVectorField>(UName_)
).ptr()
);
}
// Do tracking to create sampled data
DynamicField<point> allPositions;
DynamicField<label> allTracks;
DynamicField<label> allTrackParts;
DynamicField<scalar> allAges;
#define DeclareAllTypes(Type, nullArg) \
List<DynamicField<Type>> all##Type##s(fieldNames.size());
FOR_ALL_FIELD_TYPES(DeclareAllTypes);
#undef DeclareAllTypes
{
// Create a cloud and initialise with points from the sampled set
globalIndex gi(sampledSetPtr_().size());
streamlinesCloud particles
(
mesh_,
cloudName_,
IDLList<streamlinesParticle>()
);
forAll(sampledSetPtr_(), i)
{
particles.addParticle
(
new streamlinesParticle
(
mesh_,
sampledSetPtr_().positions()[i],
sampledSetPtr_().cells()[i],
lifeTime_,
gi.toGlobal(i)
)
);
}
// Report the number of successful seeds
const label nSeeds = returnReduce(particles.size(), sumOp<label>());
Info << " Seeded " << nSeeds << " particles" << endl;
// Create tracking data
streamlinesParticle::trackingData td
(
particles,
#define TypeInterpolatorParameter(Type, nullArg) \
Type##Interp,
FOR_ALL_FIELD_TYPES(TypeInterpolatorParameter)
#undef TypeInterpolatorParameter
UIndex != -1 ? vectorInterp[UIndex] : UInterp(),
trackDirection_ == trackDirection::forward,
trackOutside_,
nSubCycle_,
trackLength_,
allPositions,
allTracks,
allTrackParts,
allAges
#define AllTypesParameter(Type, nullArg) \
, all##Type##s
FOR_ALL_FIELD_TYPES(AllTypesParameter)
#undef AllTypesParameter
);
// Track
IDLList<streamlinesParticle> initialParticles;
if (trackDirection_ == trackDirection::both)
{
initialParticles = particles;
}
particles.move(particles, td, rootGreat);
if (trackDirection_ == trackDirection::both)
{
particles.IDLList<streamlinesParticle>::operator=(initialParticles);
td.trackForward_ = !td.trackForward_;
particles.move(particles, td, rootGreat);
}
}
// Gather data on the master
if (Pstream::parRun())
{
// Append slave tracks to master ones
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
globalIndex globalTrackIDs(allTracks_.size());
// Construct a distribution map to pull all to the master.
labelListList sendMap(Pstream::nProcs());
labelListList recvMap(Pstream::nProcs());
if (Pstream::master())
gatherAndFlatten(allPositions);
gatherAndFlatten(allTracks);
gatherAndFlatten(allTrackParts);
gatherAndFlatten(allAges);
forAll(fieldNames, fieldi)
{
// Master: receive all. My own first, then consecutive
// processors.
label trackI = 0;
forAll(recvMap, proci)
{
labelList& fromProc = recvMap[proci];
fromProc.setSize(globalTrackIDs.localSize(proci));
forAll(fromProc, i)
{
fromProc[i] = trackI++;
#define GatherAndFlattenAllTypes(Type, nullArg) \
if (Type##Interp.set(fieldi)) \
{ \
gatherAndFlatten(all##Type##s[fieldi]); \
}
FOR_ALL_FIELD_TYPES(GatherAndFlattenAllTypes);
#undef GatherAndFlattenAllTypes
}
}
// Report the total number of samples
Info<< " Sampled " << allPositions.size() << " locations" << endl;
// Bin-sort by track and trackPart to build an ordering
labelList order(allPositions.size());
if (Pstream::master())
{
const label nTracks = max(allTracks) + 1;
const label trackParti0 = min(allTrackParts);
const label trackParti1 = max(allTrackParts) + 1;
labelListList trackPartCounts
(
nTracks,
labelList(trackParti1 - trackParti0, 0)
);
forAll(allPositions, samplei)
{
const label tracki = allTracks[samplei];
const label trackParti = -trackParti0 + allTrackParts[samplei];
trackPartCounts[tracki][trackParti] ++;
}
label offset = 0;
labelListList trackPartOffsets
(
nTracks,
labelList(trackParti1 - trackParti0, 0)
);
forAll(trackPartOffsets, tracki)
{
forAll(trackPartOffsets[tracki], trackParti)
{
trackPartOffsets[tracki][trackParti] += offset;
offset += trackPartCounts[tracki][trackParti];
}
}
labelList& toMaster = sendMap[0];
toMaster.setSize(globalTrackIDs.localSize());
forAll(toMaster, i)
forAll(trackPartCounts, tracki)
{
toMaster[i] = i;
trackPartCounts[tracki] = 0;
}
const mapDistribute distMap
(
globalTrackIDs.size(),
move(sendMap),
move(recvMap)
);
// Distribute the track positions. Note: use scheduled comms
// to prevent buffering.
mapDistributeBase::distribute
(
Pstream::commsTypes::scheduled,
distMap.schedule(),
distMap.constructSize(),
distMap.subMap(),
false,
distMap.constructMap(),
false,
allTracks_,
flipOp()
);
// Distribute the ages
mapDistributeBase::distribute
(
Pstream::commsTypes::scheduled,
distMap.schedule(),
distMap.constructSize(),
distMap.subMap(),
false,
distMap.constructMap(),
false,
allAges_,
flipOp()
);
// Distribute the scalars
forAll(allScalars_, scalari)
forAll(allPositions, samplei)
{
allScalars_[scalari].shrink();
mapDistributeBase::distribute
(
Pstream::commsTypes::scheduled,
distMap.schedule(),
distMap.constructSize(),
distMap.subMap(),
false,
distMap.constructMap(),
false,
allScalars_[scalari],
flipOp()
);
allScalars_[scalari].setCapacity(allScalars_[scalari].size());
}
// Distribute the vectors
forAll(allVectors_, vectori)
{
allVectors_[vectori].shrink();
mapDistributeBase::distribute
(
Pstream::commsTypes::scheduled,
distMap.schedule(),
distMap.constructSize(),
distMap.subMap(),
false,
distMap.constructMap(),
false,
allVectors_[vectori],
flipOp()
);
allVectors_[vectori].setCapacity(allVectors_[vectori].size());
const label tracki = allTracks[samplei];
const label trackParti = -trackParti0 + allTrackParts[samplei];
order[samplei] =
trackPartOffsets[tracki][trackParti]
+ trackPartCounts[tracki][trackParti];
trackPartCounts[tracki][trackParti] ++;
}
}
//auto reportTrackParts = [&]()
//{
// Info<< nl;
// forAll(allPositions, samplei)
// {
// if
// (
// samplei == 0
// || allTracks[samplei] != allTracks[samplei - 1]
// || allTrackParts[samplei] != allTrackParts[samplei - 1]
// )
// {
// Info<< "track #" << allTracks[samplei]
// << " part #" << allTrackParts[samplei]
// << " from i=" << samplei << " to ";
// }
// if
// (
// samplei == allPositions.size() - 1
// || allTracks[samplei + 1] != allTracks[samplei]
// || allTrackParts[samplei + 1] != allTrackParts[samplei]
// )
// {
// Info<< "i=" << samplei << nl;
// }
// }
//};
label n = 0;
forAll(allTracks_, trackI)
//reportTrackParts();
// Reorder
if (Pstream::master())
{
n += allTracks_[trackI].size();
allPositions.rmap(allPositions, order);
allTracks.rmap(allTracks, order);
allTrackParts.rmap(allTrackParts, order);
allAges.rmap(allAges, order);
forAll(fieldNames, fieldi)
{
#define RMapAllTypes(Type, nullArg) \
if (Type##Interp.set(fieldi)) \
{ \
all##Type##s[fieldi].rmap(all##Type##s[fieldi], order); \
}
FOR_ALL_FIELD_TYPES(RMapAllTypes);
#undef RMapAllTypes
}
}
Info<< " Tracks:" << allTracks_.size() << nl
<< " Total samples:" << n
<< endl;
//reportTrackParts();
// Relabel tracks and track parts into track labels only, and join the
// forward and backward track parts that are connected to the seed
if (Pstream::master())
{
label samplei = 0, tracki = 0;
forAll(allPositions, samplej)
{
const label trackj = allTracks[samplej];
const label trackPartj = allTrackParts[samplej];
// Massage into form suitable for writers
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
allPositions[samplei] = allPositions[samplej];
allTracks[samplei] = tracki;
allTrackParts[samplei] = 0;
allAges[samplei] = allAges[samplej];
forAll(fieldNames, fieldi)
{
#define ShuffleUpAllTypes(Type, nullArg) \
if (Type##Interp.set(fieldi)) \
{ \
all##Type##s[fieldi][samplei] = \
all##Type##s[fieldi][samplej]; \
}
FOR_ALL_FIELD_TYPES(ShuffleUpAllTypes);
#undef ShuffleUpAllTypes
}
if (Pstream::master() && allTracks_.size())
const bool joinNewParts =
samplej != allPositions.size() - 1
&& trackPartj == -1
&& allTrackParts[samplej + 1] == 0;
if (!joinNewParts) samplei ++;
const bool newPart =
samplej == allPositions.size() - 1
|| trackj != allTracks[samplej + 1]
|| trackPartj != allTrackParts[samplej + 1];
if (!joinNewParts && newPart) tracki ++;
}
allPositions.resize(samplei);
allTracks.resize(samplei);
allTrackParts.resize(samplei);
allAges.resize(samplei);
forAll(fieldNames, fieldi)
{
#define ResizeAllTypes(Type, nullArg) \
if (Type##Interp.set(fieldi)) \
{ \
all##Type##s[fieldi].resize(samplei); \
}
FOR_ALL_FIELD_TYPES(ResizeAllTypes);
#undef ResizeAllTypes
}
}
//reportTrackParts();
// Write
if (Pstream::master() && allPositions.size())
{
// Make output directory
fileName vtkPath
fileName outputPath
(
runTime.globalPath()/writeFile::outputPrefix/"sets"/name()
mesh_.time().globalPath()/writeFile::outputPrefix/name()
);
if (mesh_.name() != fvMesh::defaultRegion)
{
vtkPath = vtkPath/mesh_.name();
outputPath = outputPath/mesh_.name();
}
vtkPath = vtkPath/mesh_.time().timeName();
mkDir(vtkPath);
outputPath = outputPath/mesh_.time().timeName();
mkDir(outputPath);
// Convert track positions
PtrList<coordSet> tracks(allTracks_.size());
forAll(allTracks_, trackI)
// Pass data to the formatter to write
const label nValueSets = fieldNames.size() + writeAge_;
wordList valueSetNames(nValueSets);
#define DeclareTypeValueSets(Type, nullArg) \
UPtrList<const Field<Type>> Type##ValueSets(nValueSets);
FOR_ALL_FIELD_TYPES(DeclareTypeValueSets);
#undef DeclareTypeValueSets
if (writeAge_)
{
tracks.set
(
trackI,
new coordSet
(
"track" + Foam::name(trackI),
sampledSetAxis_ //"xyz"
)
);
tracks[trackI].transfer(allTracks_[trackI]);
valueSetNames[0] = "age";
scalarValueSets.set(0, &allAges);
}
// Convert scalar values
if (allScalars_.size() > 0 || writeAge_)
forAll(fieldNames, fieldi)
{
List<List<scalarField>> ageAndScalarValues
(
allScalars_.size() + writeAge_
);
wordList ageAndScalarNames(allScalars_.size() + writeAge_);
valueSetNames[fieldi + writeAge_] = fieldNames[fieldi];
if (writeAge_)
{
DynamicList<scalarList>& allTrackAges = allAges_;
ageAndScalarValues[0].setSize(allTrackAges.size());
forAll(allTrackAges, trackI)
{
ageAndScalarValues[0][trackI].transfer
(
allTrackAges[trackI]
);
#define SetTypeValueSetPtr(Type, nullArg) \
if (Type##Interp.set(fieldi)) \
{ \
Type##ValueSets.set \
( \
fieldi + writeAge_, \
&all##Type##s[fieldi] \
); \
}
ageAndScalarNames[0] = "age";
}
forAll(allScalars_, scalari)
{
const label ageAndScalari = scalari + writeAge_;
DynamicList<scalarList>& allTrackVals = allScalars_[scalari];
ageAndScalarValues[ageAndScalari].setSize(allTrackVals.size());
forAll(allTrackVals, trackI)
{
ageAndScalarValues[ageAndScalari][trackI].transfer
(
allTrackVals[trackI]
);
}
ageAndScalarNames[ageAndScalari] = scalarNames_[scalari];
}
fileName vtkFile
(
vtkPath
/ scalarFormatterPtr_().getFileName
(
tracks[0],
ageAndScalarNames
)
);
Info<< " Writing data to " << vtkFile.path() << endl;
scalarFormatterPtr_().write
(
true, // writeTracks
tracks,
ageAndScalarNames,
ageAndScalarValues,
OFstream(vtkFile)()
);
}
// Convert vector values
if (allVectors_.size() > 0)
{
List<List<vectorField>> vectorValues(allVectors_.size());
forAll(allVectors_, vectori)
{
DynamicList<vectorList>& allTrackVals = allVectors_[vectori];
vectorValues[vectori].setSize(allTrackVals.size());
forAll(allTrackVals, trackI)
{
vectorValues[vectori][trackI].transfer
(
allTrackVals[trackI]
);
}
}
fileName vtkFile
(
vtkPath
/ vectorFormatterPtr_().getFileName
(
tracks[0],
vectorNames_
)
);
Info<< " Writing data to " << vtkFile.path() << endl;
vectorFormatterPtr_().write
(
true, // writeTracks
tracks,
vectorNames_,
vectorValues,
OFstream(vtkFile)()
);
FOR_ALL_FIELD_TYPES(SetTypeValueSetPtr);
#undef SetTypeValueSetPtr
}
formatterPtr_->write
(
outputPath,
"tracks",
coordSet(allTracks, word::null, allPositions),
valueSetNames
#define TypeValueSetsParameter(Type, nullArg) , Type##ValueSets
FOR_ALL_FIELD_TYPES(TypeValueSetsParameter)
#undef TypeValueSetsParameter
);
}
return true;

View File

@ -50,12 +50,11 @@ Description
lifeTime 10000;
trackLength 1e-3;
nSubCycle 5;
cloudName particleTracks;
seedSampleSet
{
type uniform;
axis x; // distance;
type lineUniform;
axis xyz;
start (-0.0205 0.0001 0.00001);
end (-0.0205 0.0005 0.00001);
nPoints 100;
@ -65,26 +64,27 @@ Description
Usage
\table
Property | Description | Required | Default value
type | Type name: streamlines | yes |
setFormat | Output data type | yes |
U | Tracking velocity field name | no | U
direction | Direction in which to track | yes |
outside | Track outside of periodic meshes | no | no
fields | Fields to sample | yes |
writeTime | Write the flow time along the streamlines | no | no
lifetime | Maximum number of particle tracking steps | yes |
trackLength | Tracking segment length | no |
nSubCycle | Number of tracking steps per cell | no |
cloudName | Cloud name to use | yes |
seedSampleSet| Seeding method (see below)| yes |
Property | Description | Required | Default value
type | Type name: streamlines | yes |
setFormat | Output data type | yes |
U | Tracking velocity field name | no | U
direction | Direction in which to track | yes |
outside | Track outside of periodic meshes | no | no
fields | Fields to sample | yes |
writeTime | Write the flow time along the streamlines | no | no
lifetime | Maximum number of particle tracking steps | yes |
trackLength | Tracking segment length | no |
nSubCycle | Number of tracking steps per cell | no |
cloudName | Cloud name to use | no |
seedSampleSet | Seeding method (see below) | yes |
\endtable
Where \c seedSampleSet \c type is typically one of
Where the \c seedSampleSet \c type is typically one of
\plaintable
uniform | uniform particle seeding
cloud | cloud of points
triSurfaceMeshPointSet | points according to a tri-surface mesh
lineUniform | uniform particle seeding along a line
sphereRandom | random particle seeding within a sphere
boundaryRandom | random particle seeding on a number of patches
points | a specified set of locations
\endplaintable
Note
@ -107,6 +107,7 @@ SourceFiles
#include "fvMeshFunctionObject.H"
#include "volFieldsFwd.H"
#include "DynamicList.H"
#include "DynamicField.H"
#include "scalarList.H"
#include "vectorList.H"
#include "setWriter.H"
@ -183,15 +184,6 @@ private:
//- Optional specified name of particles
word cloudName_;
//- Type of seed
word seedSet_;
//- Names of scalar fields
wordList scalarNames_;
//- Names of vector fields
wordList vectorNames_;
//- Write the streamlines ages
Switch writeAge_;
@ -204,36 +196,8 @@ private:
//- Seed set engine
autoPtr<sampledSet> sampledSetPtr_;
//- Axis of the sampled points to output
word sampledSetAxis_;
//- File writer for scalar data
autoPtr<setWriter<scalar>> scalarFormatterPtr_;
//- File writer for vector data
autoPtr<setWriter<vector>> vectorFormatterPtr_;
// Generated data
//- All tracks. Per particle the points it passed through
DynamicList<List<point>> allTracks_;
//- All ages. Per particle the age when it passed through the points
DynamicList<List<scalar>> allAges_;
//- Per scalarField, per particle, the sampled value.
List<DynamicList<scalarList>> allScalars_;
//- Per scalarField, per particle, the sampled value.
List<DynamicList<vectorList>> allVectors_;
//- Construct patch out of all wall patch faces
autoPtr<indirectPrimitivePatch> wallPatch() const;
//- Do all seeding and tracking
void track();
//- File writer
autoPtr<setWriter> formatterPtr_;
public:

View File

@ -45,35 +45,46 @@ Foam::vector Foam::streamlinesParticle::interpolateFields
<< "Cell:" << celli << abort(FatalError);
}
sampledScalars_.setSize(td.vsInterp_.size());
forAll(td.vsInterp_, scalarI)
{
const scalar s =
td.vsInterp_[scalarI].interpolate(position, celli, facei);
sampledScalars_[scalarI].append
(
td.trackOutside_ ? transform_.invTransform(s) : s
);
}
bool interpolatedU = false;
vector U = vector::uniform(NaN);
sampledVectors_.setSize(td.vvInterp_.size());
forAll(td.vvInterp_, vectorI)
forAll(td.scalarInterp_, fieldi)
{
const vector v =
td.vvInterp_[vectorI].interpolate(position, celli, facei);
#define InterpolateType(Type, nullArg) \
if (td.Type##Interp_.set(fieldi)) \
{ \
const Type s = \
td.Type##Interp_[fieldi].interpolate \
( \
position, \
celli, \
facei \
); \
\
sampled##Type##s_.setSize(td.Type##Interp_.size()); \
sampled##Type##s_[fieldi].append \
( \
td.trackOutside_ ? transform_.invTransform(s) : s \
); \
}
FOR_ALL_FIELD_TYPES(InterpolateType);
#undef InterpolateType
if (vectorI == td.UIndex_)
{
U = v;
}
sampledVectors_[vectorI].append
if
(
td.trackOutside_ ? transform_.invTransform(v) : v
);
td.vectorInterp_.set(fieldi)
&& &td.vectorInterp_[fieldi] == &td.UInterp_
)
{
interpolatedU = true;
U = sampledvectors_[fieldi].last();
}
}
// Interpolate the velocity if it has not already been done
if (!interpolatedU)
{
U = td.UInterp_.interpolate(position, celli, facei);
}
return U;
@ -82,30 +93,34 @@ Foam::vector Foam::streamlinesParticle::interpolateFields
void Foam::streamlinesParticle::endTrack(trackingData& td)
{
{
td.allPositions_.append(vectorList());
vectorList& top = td.allPositions_.last();
top.transfer(sampledPositions_);
}
const label n = sampledPositions_.size();
{
td.allTimes_.append(scalarList());
scalarList& top = td.allTimes_.last();
top.transfer(sampledTimes_);
}
const label trackPartIndex =
td.trackForward_ ? trackPartIndex_ : -1 - trackPartIndex_;
forAll(sampledScalars_, i)
{
td.allScalars_[i].append(scalarList());
scalarList& top = td.allScalars_[i].last();
top.transfer(sampledScalars_[i]);
}
if (!td.trackForward_) reverse(sampledPositions_);
td.allPositions_.append(sampledPositions_);
sampledPositions_.clear();
forAll(sampledVectors_, i)
td.allTracks_.append(List<label>(n, trackIndex_));
td.allTrackParts_.append(List<label>(n, trackPartIndex));
trackPartIndex_ ++;
if (!td.trackForward_) reverse(sampledAges_);
td.allAges_.append(sampledAges_);
sampledAges_.clear();
forAll(td.scalarInterp_, fieldi)
{
td.allVectors_[i].append(vectorList());
vectorList& top = td.allVectors_[i].last();
top.transfer(sampledVectors_[i]);
#define EndTrackType(Type, nullArg) \
if (td.Type##Interp_.set(fieldi)) \
{ \
if (!td.trackForward_) reverse(sampled##Type##s_[fieldi]); \
td.all##Type##s_[fieldi].append(sampled##Type##s_[fieldi]); \
sampled##Type##s_[fieldi].clear(); \
}
FOR_ALL_FIELD_TYPES(EndTrackType);
#undef EndTrackType
}
}
@ -117,13 +132,16 @@ Foam::streamlinesParticle::streamlinesParticle
const polyMesh& mesh,
const vector& position,
const label celli,
const label lifeTime
const label lifeTime,
const label trackIndex
)
:
particle(mesh, position, celli),
lifeTime_(lifeTime),
transform_(transformer::I),
age_(0)
trackIndex_(trackIndex),
trackPartIndex_(0),
age_(0),
transform_(transformer::I)
{}
@ -138,22 +156,19 @@ Foam::streamlinesParticle::streamlinesParticle
{
if (readFields)
{
List<scalarList> sampledScalars;
List<vectorList> sampledVectors;
is >> lifeTime_ >> trackIndex_ >> trackPartIndex_ >> age_
>> transform_ >> sampledPositions_ >> sampledAges_;
is >> lifeTime_ >> transform_ >> age_ >> sampledPositions_
>> sampledTimes_ >> sampledScalars >> sampledVectors;
sampledScalars_.setSize(sampledScalars.size());
forAll(sampledScalars, i)
{
sampledScalars_[i].transfer(sampledScalars[i]);
}
sampledVectors_.setSize(sampledVectors.size());
forAll(sampledVectors, i)
{
sampledVectors_[i].transfer(sampledVectors[i]);
}
#define ReadSampledTypes(Type, nullArg) \
List<List<Type>> sampled##Type##s; \
is >> sampled##Type##s; \
sampled##Type##s_.setSize(sampled##Type##s.size()); \
forAll(sampled##Type##s, i) \
{ \
sampled##Type##s_[i].transfer(sampled##Type##s[i]); \
}
FOR_ALL_FIELD_TYPES(ReadSampledTypes);
#undef ReadSampledTypes
}
// Check state of Istream
@ -172,12 +187,16 @@ Foam::streamlinesParticle::streamlinesParticle
:
particle(p),
lifeTime_(p.lifeTime_),
transform_(p.transform_),
trackIndex_(p.trackIndex_),
trackPartIndex_(p.trackPartIndex_),
age_(p.age_),
transform_(p.transform_),
sampledPositions_(p.sampledPositions_),
sampledTimes_(p.sampledTimes_),
sampledScalars_(p.sampledScalars_),
sampledVectors_(p.sampledVectors_)
sampledAges_(p.sampledAges_)
#define SampledTypesInit(Type, nullArg) \
, sampled##Type##s_(p.sampled##Type##s_)
FOR_ALL_FIELD_TYPES(SampledTypesInit)
#undef SampledTypesInit
{}
@ -213,7 +232,7 @@ bool Foam::streamlinesParticle::move
? transform_.invTransformPosition(position())
: position()
);
sampledTimes_.append(age_);
sampledAges_.append(age_);
vector U = interpolateFields(td, position(), cell(), face());
if (!td.trackForward_)
@ -284,7 +303,7 @@ bool Foam::streamlinesParticle::move
? transform_.invTransformPosition(position())
: position()
);
sampledTimes_.append(age_);
sampledAges_.append(age_);
interpolateFields(td, position(), cell(), face());
if (debug)
@ -464,11 +483,33 @@ void Foam::streamlinesParticle::readFields(Cloud<streamlinesParticle>& c)
);
c.checkFieldIOobject(c, lifeTime);
IOField<label> trackIndex
(
c.fieldIOobject("trackIndex", IOobject::MUST_READ),
valid
);
c.checkFieldIOobject(c, trackIndex);
IOField<label> trackPartIndex
(
c.fieldIOobject("trackPartIndex", IOobject::MUST_READ),
valid
);
c.checkFieldIOobject(c, trackPartIndex);
IOField<scalar> age
(
c.fieldIOobject("age", IOobject::MUST_READ),
valid
);
c.checkFieldIOobject(c, age);
transformerIOList transform
(
c.fieldIOobject("transform", IOobject::MUST_READ),
valid
);
//c.checkFieldIOobject(c, transform);
vectorFieldIOField sampledPositions
(
@ -477,20 +518,23 @@ void Foam::streamlinesParticle::readFields(Cloud<streamlinesParticle>& c)
);
c.checkFieldIOobject(c, sampledPositions);
scalarFieldIOField sampledTimes
scalarFieldIOField sampledAges
(
c.fieldIOobject("sampledTimes", IOobject::MUST_READ),
c.fieldIOobject("sampledAges", IOobject::MUST_READ),
valid
);
c.checkFieldIOobject(c, sampledTimes);
c.checkFieldIOobject(c, sampledAges);
label i = 0;
forAllIter(Cloud<streamlinesParticle>, c, iter)
{
iter().lifeTime_ = lifeTime[i];
iter().trackIndex_ = trackIndex[i];
iter().trackPartIndex_ = trackPartIndex[i];
iter().age_ = age[i];
iter().transform_ = transform[i];
iter().sampledPositions_.transfer(sampledPositions[i]);
iter().sampledTimes_.transfer(sampledTimes[i]);
iter().sampledAges_.transfer(sampledAges[i]);
i++;
}
}
@ -508,6 +552,24 @@ void Foam::streamlinesParticle::writeFields(const Cloud<streamlinesParticle>& c)
np
);
IOList<label> trackIndex
(
c.fieldIOobject("trackIndex", IOobject::NO_READ),
np
);
IOList<label> trackPartIndex
(
c.fieldIOobject("trackPartIndex", IOobject::NO_READ),
np
);
IOField<scalar> age
(
c.fieldIOobject("age", IOobject::NO_READ),
np
);
transformerIOList transform
(
c.fieldIOobject("transform", IOobject::NO_READ),
@ -520,9 +582,9 @@ void Foam::streamlinesParticle::writeFields(const Cloud<streamlinesParticle>& c)
np
);
scalarFieldIOField sampledTimes
scalarFieldIOField sampledAges
(
c.fieldIOobject("sampledTimes", IOobject::NO_READ),
c.fieldIOobject("sampledAges", IOobject::NO_READ),
np
);
@ -530,15 +592,22 @@ void Foam::streamlinesParticle::writeFields(const Cloud<streamlinesParticle>& c)
forAllConstIter(Cloud<streamlinesParticle>, c, iter)
{
lifeTime[i] = iter().lifeTime_;
trackIndex[i] = iter().trackIndex_;
trackPartIndex[i] = iter().trackPartIndex_;
age[i] = iter().age_;
transform[i] = iter().transform_;
sampledPositions[i] = iter().sampledPositions_;
sampledTimes[i] = iter().sampledTimes_;
sampledAges[i] = iter().sampledAges_;
i++;
}
lifeTime.write(np > 0);
trackIndex.write(np > 0);
trackPartIndex.write(np > 0);
age.write(np > 0);
transform.write(np > 0);
sampledPositions.write(np > 0);
sampledTimes.write(np > 0);
sampledAges.write(np > 0);
}
@ -546,14 +615,19 @@ void Foam::streamlinesParticle::writeFields(const Cloud<streamlinesParticle>& c)
Foam::Ostream& Foam::operator<<(Ostream& os, const streamlinesParticle& p)
{
os << static_cast<const particle&>(p)
<< token::SPACE << p.lifeTime_
<< token::SPACE << p.transform_
<< token::SPACE << p.trackIndex_
<< token::SPACE << p.trackPartIndex_
<< token::SPACE << p.age_
<< token::SPACE << p.transform_
<< token::SPACE << p.sampledPositions_
<< token::SPACE << p.sampledTimes_
<< token::SPACE << p.sampledScalars_
<< token::SPACE << p.sampledVectors_;
<< token::SPACE << p.sampledAges_
#define WriteSampledTypes(Type, nullArg) \
<< token::SPACE << p.sampled##Type##s_
FOR_ALL_FIELD_TYPES(WriteSampledTypes);
#undef WriteSampledTypes
// Check state of Ostream
os.check("Ostream& operator<<(Ostream&, const streamlinesParticle&)");

View File

@ -41,6 +41,7 @@ SourceFiles
#include "autoPtr.H"
#include "interpolation.H"
#include "vectorList.H"
#include "DynamicField.H"
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
@ -72,11 +73,12 @@ public:
// Public data
const PtrList<interpolation<scalar>>& vsInterp_;
#define DeclareTypeInterpolator(Type, nullArg) \
const PtrList<interpolation<Type>>& Type##Interp_;
FOR_ALL_FIELD_TYPES(DeclareTypeInterpolator);
#undef DeclareTypeInterpolator
const PtrList<interpolation<vector>>& vvInterp_;
const label UIndex_;
const interpolation<vector>& UInterp_;
bool trackForward_;
@ -86,13 +88,20 @@ public:
const scalar trackLength_;
DynamicList<vectorList>& allPositions_;
label tracki;
DynamicList<scalarList>& allTimes_;
DynamicField<point>& allPositions_;
List<DynamicList<scalarList>>& allScalars_;
DynamicField<label>& allTracks_;
List<DynamicList<vectorList>>& allVectors_;
DynamicField<label>& allTrackParts_;
DynamicField<scalar>& allAges_;
#define DeclareAllTypes(Type, nullArg) \
List<DynamicField<Type>>& all##Type##s_;
FOR_ALL_FIELD_TYPES(DeclareAllTypes);
#undef DeclareAllTypes
// Constructors
@ -101,31 +110,43 @@ public:
trackingData
(
streamlinesCloud& cloud,
const PtrList<interpolation<scalar>>& vsInterp,
const PtrList<interpolation<vector>>& vvInterp,
const label UIndex,
#define TypeInterpolatorArg(Type, nullArg) \
const PtrList<interpolation<Type>>& Type##Interp,
FOR_ALL_FIELD_TYPES(TypeInterpolatorArg)
#undef TypeInterpolatorArg
const interpolation<vector>& UInterp,
const bool trackForward,
const bool trackOutside,
const label nSubCycle,
const scalar trackLength,
DynamicList<List<point>>& allPositions,
DynamicList<List<scalar>>& allTimes,
List<DynamicList<scalarList>>& allScalars,
List<DynamicList<vectorList>>& allVectors
DynamicField<point>& allPositions,
DynamicField<label>& allTracks,
DynamicField<label>& allTrackParts,
DynamicField<scalar>& allAges
#define AllTypesArg(Type, nullArg) \
, List<DynamicField<Type>>& all##Type##s
FOR_ALL_FIELD_TYPES(AllTypesArg)
#undef AllTypesArg
)
:
particle::trackingData(cloud),
vsInterp_(vsInterp),
vvInterp_(vvInterp),
UIndex_(UIndex),
#define TypeInterpolatorInit(Type, nullArg) \
Type##Interp_(Type##Interp),
FOR_ALL_FIELD_TYPES(TypeInterpolatorInit)
#undef TypeInterpolatorInit
UInterp_(UInterp),
trackForward_(trackForward),
trackOutside_(trackOutside),
nSubCycle_(nSubCycle),
trackLength_(trackLength),
allPositions_(allPositions),
allTimes_(allTimes),
allScalars_(allScalars),
allVectors_(allVectors)
allTracks_(allTracks),
allTrackParts_(allTrackParts),
allAges_(allAges)
#define AllTypesInit(Type, nullArg) \
, all##Type##s_(all##Type##s)
FOR_ALL_FIELD_TYPES(AllTypesInit)
#undef AllTypesInit
{}
};
@ -137,23 +158,29 @@ private:
//- Lifetime of particle. Particle dies when reaches 0.
label lifeTime_;
//- Current compound transform
transformer transform_;
//- Index of the track
label trackIndex_;
//- Index of the part of the track
label trackPartIndex_;
//- Age of the particle
scalar age_;
//- Current compound transform
transformer transform_;
//- Sampled positions
DynamicList<point> sampledPositions_;
DynamicField<point> sampledPositions_;
//- Sampled times
DynamicList<scalar> sampledTimes_;
//- Sampled ages
DynamicField<scalar> sampledAges_;
//- Sampled scalars
List<DynamicList<scalar>> sampledScalars_;
//- Sampled vectors
List<DynamicList<vector>> sampledVectors_;
//- Sampled types
#define DeclareSampledTypes(Type, nullArg) \
List<DynamicField<Type>> sampled##Type##s_;
FOR_ALL_FIELD_TYPES(DeclareSampledTypes);
#undef DeclareSampledTypes
// Private Member Functions
@ -181,7 +208,8 @@ public:
const polyMesh& c,
const vector& position,
const label celli,
const label lifeTime
const label lifeTime,
const label trackIndex
);
//- Construct from Istream