foamToEnsightParts updated

- handle new cloud locations, got missed before the release
  - handle multiple clouds
  - more efficient checking of fields etc.
  - write case file at the end, thus we can potentially
    do something more intelligent about the time set handling
This commit is contained in:
Mark Olesen
2008-08-05 20:32:23 +02:00
parent a9b8a507e7
commit 6ffd5a3358
10 changed files with 655 additions and 473 deletions

View File

@ -1,19 +0,0 @@
// check for lagrangian/positions information in the final directory
bool hasLagrangian = false;
if (timeDirs.size() > 1)
{
IOobject io
(
"positions",
timeDirs[timeDirs.size() - 1].name(),
"lagrangian",
mesh,
IOobject::NO_READ
);
if (io.headerOk())
{
hasLagrangian = true;
}
}

View File

@ -1,15 +0,0 @@
// check that the spray variable is present for this time
//
bool hasSprayField = true;
{
IOobject ioHeader
(
fieldName,
mesh.time().timeName(),
"lagrangian",
mesh,
IOobject::NO_READ
);
hasSprayField = ioHeader.headerOk();
}

View File

@ -1,23 +0,0 @@
// check that the variable is present for all times
//
bool hasValidField = true;
{
for (label i=0; i < timeDirs.size() && hasValidField; ++i)
{
if (fieldName.size() > 2 && fieldName(fieldName.size() - 2, 2) == "_0")
{
hasValidField = false;
break;
}
IOobject ioHeader
(
fieldName,
timeDirs[i].name(),
mesh,
IOobject::NO_READ
);
hasValidField = ioHeader.headerOk();
}
}

View File

@ -1,91 +0,0 @@
// write time values to case file
{
scalar timeCorrection = 0;
if (timeDirs[0].value() < 0)
{
timeCorrection = - timeDirs[0].value();
Info<< "Correcting time values. Adding " << timeCorrection << endl;
}
caseFile.setf(ios_base::scientific, ios_base::floatfield);
caseFile.precision(5);
// time set 1 - geometry and volume fields
if (fieldFileNumbers.size())
{
caseFile
<< "time set: " << 1 << nl
<< "number of steps: " << fieldFileNumbers.size() << nl
<< "filename numbers:" << nl;
label count = 0;
forAll (fieldFileNumbers, i)
{
caseFile
<< " " << setw(12) << fieldFileNumbers[i];
if (++count % 6 == 0)
{
caseFile << nl;
}
}
caseFile
<< nl << "time values:" << nl;
count = 0;
forAll (fieldFileNumbers, i)
{
caseFile
<< " " << setw(12)
<< timeIndices[fieldFileNumbers[i]] + timeCorrection;
if (++count % 6 == 0)
{
caseFile << nl;
}
}
caseFile << nl << nl;
}
// time set 2 - lagrangian fields
if (hasLagrangian && sprayFileNumbers.size())
{
caseFile
<< "time set: " << 2 << nl
<< "number of steps: " << sprayFileNumbers.size() << nl
<< "filename numbers:" << nl;
label count = 0;
forAll (sprayFileNumbers, i)
{
caseFile
<< " " << setw(12) << sprayFileNumbers[i];
if (++count % 6 == 0)
{
caseFile << nl;
}
}
caseFile
<< nl << "time values:" << nl;
count = 0;
forAll (sprayFileNumbers, i)
{
caseFile
<< " " << setw(12)
<< timeIndices[sprayFileNumbers[i]] + timeCorrection;
if (++count % 6 == 0)
{
caseFile << nl;
}
}
caseFile << nl << nl;
}
caseFile << "# end" << nl;
}

View File

@ -0,0 +1,224 @@
// write time values to case file
scalar timeCorrection = 0;
if (timeDirs[0].value() < 0)
{
timeCorrection = - timeDirs[0].value();
Info<< "Correcting time values. Adding " << timeCorrection << endl;
}
// the case file is always ASCII
Info << "write case: " << caseFileName.c_str() << endl;
OFstream caseFile(ensightDir/caseFileName, IOstream::ASCII);
caseFile.setf(ios_base::left);
caseFile.setf(ios_base::scientific, ios_base::floatfield);
caseFile.precision(5);
caseFile
<< "FORMAT" << nl
<< setw(16) << "type:" << "ensight gold" << nl << nl;
if (hasMovingMesh)
{
caseFile
<< "GEOMETRY" << nl
<< setw(16) << "model: 1" << (dataMask/geometryName).c_str() << nl;
}
else
{
caseFile
<< "GEOMETRY" << nl
<< setw(16) << "model:" << geometryName << nl;
}
// add information for clouds
// multiple clouds currently require the same time index
forAllConstIter(HashTable<HashTable<word> >, cloudFields, cloudIter)
{
const word& cloudName = cloudIter.key();
caseFile
<< setw(16) << "measured: 2"
<< fileName(dataMask/"lagrangian"/cloudName/"positions").c_str()
<< nl;
}
caseFile
<< nl << "VARIABLE" << nl;
forAllConstIter(HashTable<word>, volumeFields, fieldIter)
{
const word& fieldName = fieldIter.key();
const word& fieldType = fieldIter();
string ensightType;
if (fieldType == volScalarField::typeName)
{
ensightType = "scalar";
}
else if (fieldType == volVectorField::typeName)
{
ensightType = "vector";
}
else if (fieldType == volSphericalTensorField::typeName)
{
ensightType = "tensor symm";
}
else if (fieldType == volSymmTensorField::typeName)
{
ensightType = "tensor symm";
}
else if (fieldType == volTensorField::typeName)
{
ensightType = "tensor asym";
}
if (ensightType.size())
{
ensightCaseEntry
(
caseFile,
ensightType,
fieldName,
dataMask
);
}
}
// TODO: allow similar/different time-steps for each cloud
label cloudNo = 0;
forAllConstIter(HashTable<HashTable<word> >, cloudFields, cloudIter)
{
const word& cloudName = cloudIter.key();
forAllConstIter(HashTable<word>, cloudIter(), fieldIter)
{
const word& fieldName = fieldIter.key();
const word& fieldType = fieldIter();
string ensightType;
if (fieldType == scalarIOField::typeName)
{
ensightType = "scalar";
}
else if (fieldType == vectorIOField::typeName)
{
ensightType = "vector";
}
else if (fieldType == tensorIOField::typeName)
{
ensightType = "tensor";
}
if (ensightType.size())
{
ensightCaseEntry
(
caseFile,
ensightType,
fieldName,
dataMask,
"lagrangian"/cloudName,
cloudNo,
2
);
}
}
cloudNo++;
}
// add time values
caseFile << nl << "TIME" << nl;
// time set 1 - geometry and volume fields
if (fieldTimesUsed.size())
{
caseFile
<< "time set: " << 1 << nl
<< "number of steps: " << fieldTimesUsed.size() << nl
<< "filename numbers:" << nl;
label count = 0;
forAll (fieldTimesUsed, i)
{
caseFile
<< " " << setw(12) << fieldTimesUsed[i];
if (++count % 6 == 0)
{
caseFile << nl;
}
}
caseFile
<< nl << "time values:" << nl;
count = 0;
forAll (fieldTimesUsed, i)
{
caseFile
<< " " << setw(12)
<< timeIndices[fieldTimesUsed[i]] + timeCorrection;
if (++count % 6 == 0)
{
caseFile << nl;
}
}
caseFile << nl << nl;
}
// TODO: allow similar/different time-steps for each cloud
cloudNo = 0;
forAllConstIter(HashTable<DynamicList<label> >, cloudTimesUsed, cloudIter)
{
// const word& cloudName = cloudIter.key();
const DynamicList<label>& timesUsed = cloudIter();
if (timesUsed.size() && cloudNo == 0)
{
caseFile
<< "time set: " << 2 << nl
<< "number of steps: " << timesUsed.size() << nl
<< "filename numbers:" << nl;
label count = 0;
forAll (timesUsed, i)
{
caseFile
<< " " << setw(12) << timesUsed[i];
if (++count % 6 == 0)
{
caseFile << nl;
}
}
caseFile
<< nl << "time values:" << nl;
count = 0;
forAll (timesUsed, i)
{
caseFile
<< " " << setw(12)
<< timeIndices[timesUsed[i]] + timeCorrection;
if (++count % 6 == 0)
{
caseFile << nl;
}
}
caseFile << nl << nl;
cloudNo++;
}
}
caseFile << "# end" << nl;

View File

@ -22,7 +22,6 @@ License
along with OpenFOAM; if not, write to the Free Software Foundation,
Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
$Date: 2008/04/02 11:37:10 $
\*---------------------------------------------------------------------------*/
#include "ensightOutputFunctions.H"
@ -41,36 +40,51 @@ namespace Foam
// * * * * * * * * * * * * * * * Global Functions * * * * * * * * * * * * * //
template<class Type>
void ensightCaseEntry
(
OFstream& caseFile,
const IOobject& fieldObject,
const string& ensightType,
const word& fieldName,
const fileName& dataMask,
bool measured
const fileName& local,
const label cloudNo,
const label timeSet
)
{
caseFile.setf(ios_base::left);
if (measured)
fileName dirName(dataMask);
if (local.size())
{
dirName = dirName/local;
}
if (cloudNo >= 0)
{
label ts = 1;
if (timeSet > ts)
{
ts = timeSet;
}
// prefix variables with 'c' (cloud)
caseFile
<< pTraits<Type>::typeName
<< " per measured node: 2 "
<< ensightType.c_str()
<< " per measured node: " << ts << " "
<< setw(15)
<< ("s" + fieldObject.name()).c_str()
<< ("c" + Foam::name(cloudNo) + fieldName).c_str()
<< " "
<< (dataMask/"lagrangian"/fieldObject.name()).c_str()
<< (dirName/fieldName).c_str()
<< nl;
}
else
{
caseFile
<< pTraits<Type>::typeName
<< " per element: "
<< setw(15) << fieldObject.name()
<< ensightType.c_str()
<< " per element: "
<< setw(15) << fieldName
<< " "
<< (dataMask/fieldObject.name()).c_str()
<< (dirName/fieldName).c_str()
<< nl;
}
}
@ -81,16 +95,17 @@ void ensightParticlePositions
const polyMesh& mesh,
const fileName& dataDir,
const fileName& subDir,
const word& cloudName,
IOstream::streamFormat format
)
{
Cloud<passiveParticle> parcels(mesh);
Cloud<passiveParticle> parcels(mesh, cloudName, false);
fileName lagrangianDir = subDir/"lagrangian";
fileName postFileName = lagrangianDir/"positions";
fileName cloudDir = subDir/"lagrangian"/cloudName;
fileName postFileName = cloudDir/"positions";
// the ITER/lagrangian subdirectory must exist
mkDir(dataDir/lagrangianDir);
mkDir(dataDir/cloudDir);
ensightFile os(dataDir/postFileName, format);
// tag binary format (just like geometry files)
@ -139,20 +154,22 @@ void ensightParticlePositions
template<class Type>
void ensightSprayField
void ensightLagrangianField
(
const IOobject& fieldObject,
const fileName& dataDir,
const fileName& subDir,
const word& cloudName,
IOstream::streamFormat format
)
{
Info<< " " << fieldObject.name() << flush;
fileName lagrangianDir = subDir/"lagrangian";
fileName postFileName = lagrangianDir/fieldObject.name();
fileName cloudDir = subDir/"lagrangian"/cloudName;
fileName postFileName = cloudDir/fieldObject.name();
string title = postFileName + " with " + pTraits<Type>::typeName + " values";
string title =
postFileName + " with " + pTraits<Type>::typeName + " values";
ensightFile os(dataDir/postFileName, format);
os.write(title);
@ -225,9 +242,9 @@ void ensightVolField
);
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
} // end namespace Foam
} // namespace Foam
// ************************************************************************* //

View File

@ -44,13 +44,15 @@ namespace Foam
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
template<class Type>
void ensightCaseEntry
(
OFstream& caseFile,
const IOobject& fieldObject,
const string& ensightType,
const word& fieldName,
const fileName& dataMask,
bool measured = false
const fileName& local=fileName::null,
const label cloudNo=-1,
const label timeSet=1
);
@ -59,17 +61,19 @@ void ensightParticlePositions
const polyMesh& mesh,
const fileName& dataDir,
const fileName& subDir,
const word& cloudName,
IOstream::streamFormat format
);
//- write spray parcels
//- write lagrangian parcels
template<class Type>
void ensightSprayField
void ensightLagrangianField
(
const IOobject& fieldObject,
const fileName& dataDir,
const fileName& subDir,
const word& cloudName,
IOstream::streamFormat format
);
@ -87,7 +91,7 @@ void ensightVolField
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
} // end namespace Foam
} // namespace Foam
#ifdef NoRepository
# include "ensightOutputFunctions.C"

View File

@ -0,0 +1,88 @@
// check the final time directory for
// 1. volume fields
HashTable<word> volumeFields;
// 2. the fields for each cloud:
HashTable<HashTable<word> > cloudFields;
if (timeDirs.size() > 1)
{
IOobjectList objs(mesh, timeDirs[timeDirs.size()-1].name());
forAllConstIter(IOobjectList, objs, fieldIter)
{
const IOobject& obj = *fieldIter();
if
(
obj.headerClassName() == volScalarField::typeName
|| obj.headerClassName() == volVectorField::typeName
)
{
// Add field and field type
volumeFields.insert
(
obj.name(),
obj.headerClassName()
);
}
}
// now check for lagrangian/<cloudName>
fileNameList cloudDirs = readDir
(
runTime.path()
/ timeDirs[timeDirs.size() - 1].name()
/ regionPrefix
/ "lagrangian",
fileName::DIRECTORY
);
forAll(cloudDirs, cloudI)
{
const word& cloudName = cloudDirs[cloudI];
// Create a new hash table for each cloud
cloudFields.insert(cloudName, HashTable<word>());
// Identify the new cloud in the hash table
HashTable<HashTable<word> >::iterator cloudIter =
cloudFields.find(cloudName);
IOobjectList cloudObjs
(
mesh,
timeDirs[timeDirs.size() - 1].name(),
"lagrangian"/cloudName
);
bool hasPositions = false;
forAllConstIter(IOobjectList, cloudObjs, fieldIter)
{
const IOobject obj = *fieldIter();
if (obj.name() == "positions")
{
hasPositions = true;
}
else
{
// Add field and field type
cloudIter().insert
(
obj.name(),
obj.headerClassName()
);
}
}
// drop this cloud if it has no positions
if (!hasPositions)
{
cloudFields.erase(cloudIter);
}
}
}

View File

@ -71,20 +71,26 @@ int main(int argc, char *argv[])
argList::noParallel();
argList::validOptions.insert("ascii", "");
const label nTypes = 2;
const word fieldTypes[] =
const word volFieldTypes[] =
{
volScalarField::typeName,
volVectorField::typeName
volVectorField::typeName,
volSphericalTensorField::typeName,
volSymmTensorField::typeName,
volTensorField::typeName,
word::null
};
const label nSprayFieldTypes = 2;
const word sprayFieldTypes[] =
{
scalarIOField::typeName,
vectorIOField::typeName
vectorIOField::typeName,
tensorIOField::typeName,
word::null
};
const char* geometryName = "geometry";
# include "setRootCase.H"
# include "createTime.H"
@ -104,14 +110,26 @@ int main(int argc, char *argv[])
fileName dataMask = fileName("data")/ensightFile::mask();
// Ensight and Ensight/data directories must exist
// do not remove old data - we might wish to convert new results
// or a particular time interval
if (dir(ensightDir))
{
rmDir(ensightDir);
Info<<"Warning: reusing existing directory" << nl
<< " " << ensightDir << endl;
}
mkDir(ensightDir);
mkDir(dataDir);
# include "createMesh.H"
# include "createNamedMesh.H"
// Mesh instance (region0 gets filtered out)
fileName regionPrefix;
if (regionName != polyMesh::defaultRegion)
{
regionPrefix = regionName;
}
// Construct the list of ensight parts for the entire mesh
ensightParts partsList(mesh);
@ -125,53 +143,24 @@ int main(int argc, char *argv[])
}
# include "checkHasMovingMesh.H"
# include "checkHasLagrangian.H"
# include "findFields.H"
# include "validateFields.H"
// only take the objects that exists at the end of the calculation
IOobjectList objects(mesh, timeDirs[timeDirs.size()-1].name());
IOobjectList sprayObjects(mesh, timeDirs[timeDirs.size()-1].name(), "lagrangian");
// write single geometry or one per time step
fileName geometryFileName("geometry");
if (hasMovingMesh)
{
geometryFileName = dataMask/geometryFileName;
}
// the case file is always ASCII
Info << "write case: " << caseFileName.c_str() << endl;
OFstream caseFile(ensightDir/caseFileName, IOstream::ASCII);
caseFile.setf(ios_base::left);
caseFile
<< "FORMAT" << nl
<< setw(16) << "type:" << "ensight gold" << nl << nl
<< "GEOMETRY" << nl
<< setw(16) << "model: 1" << geometryFileName.c_str() << nl;
if (hasLagrangian)
{
caseFile
<< setw(16) << "measured: 2"
<< fileName(dataMask/"lagrangian"/"positions").c_str() << nl;
}
caseFile
<< nl << "VARIABLE" << nl;
label nFieldTime = timeDirs.size();
if (nFieldTime < 0)
{
nFieldTime = 0;
}
List<label> fieldFileNumbers(nFieldTime);
List<label> sprayFileNumbers(nFieldTime);
// map used times used
// map times used
Map<scalar> timeIndices;
nFieldTime = 0;
label nSprayTime = 0;
// Track the time indices used by the volume fields
DynamicList<label> fieldTimesUsed;
// Track the time indices used by each cloud
HashTable<DynamicList<label> > cloudTimesUsed;
// Create a new DynamicList for each cloud
forAllConstIter(HashTable<HashTable<word> >, cloudFields, cloudIter)
{
cloudTimesUsed.insert(cloudIter.key(), DynamicList<label>());
}
forAll(timeDirs, timeI)
{
@ -179,7 +168,8 @@ int main(int argc, char *argv[])
# include "getTimeIndex.H"
fieldFileNumbers[nFieldTime++] = timeIndex;
// remember the time index
fieldTimesUsed.append(timeIndex);
// the data/ITER subdirectory must exist
fileName subDir = ensightFile::subDir(timeIndex);
@ -195,300 +185,203 @@ int main(int argc, char *argv[])
# include "moveMesh.H"
if (nFieldTime == 1 || mesh.moving())
if (timeI == 0 || mesh.moving())
{
if (hasMovingMesh)
{
geometryFileName = dataDir/subDir/"geometry";
}
if (mesh.moving())
{
partsList.recalculate(mesh);
}
ensightGeoFile geoFile(ensightDir/geometryFileName, format);
fileName geomDir;
if (hasMovingMesh)
{
geomDir = dataDir/subDir;
}
ensightGeoFile geoFile(ensightDir/geomDir/geometryName, format);
partsList.writeGeometry(geoFile);
Info << nl;
}
Info<< "write volume field: " << flush;
Info<< "write volume field (" << flush;
for (label i=0; i < nTypes; i++)
forAllConstIter(HashTable<word>, volumeFields, fieldIter)
{
wordList fieldNames = objects.names(fieldTypes[i]);
const word& fieldName = fieldIter.key();
const word& fieldType = fieldIter();
forAll (fieldNames, fieldI)
IOobject fieldObject
(
fieldName,
mesh.time().timeName(),
mesh,
IOobject::MUST_READ,
IOobject::NO_WRITE
);
if (fieldType == volScalarField::typeName)
{
word fieldName = fieldNames[fieldI];
ensightVolField<scalar>
(
partsList,
fieldObject,
mesh,
dataDir,
subDir,
format
);
# include "checkHasValidField.H"
}
else if (fieldType == volVectorField::typeName)
{
ensightVolField<vector>
(
partsList,
fieldObject,
mesh,
dataDir,
subDir,
format
);
if (!hasValidField)
}
else if (fieldType == volSphericalTensorField::typeName)
{
ensightVolField<sphericalTensor>
(
partsList,
fieldObject,
mesh,
dataDir,
subDir,
format
);
}
else if (fieldType == volSymmTensorField::typeName)
{
ensightVolField<symmTensor>
(
partsList,
fieldObject,
mesh,
dataDir,
subDir,
format
);
}
else if (fieldType == volTensorField::typeName)
{
ensightVolField<tensor>
(
partsList,
fieldObject,
mesh,
dataDir,
subDir,
format
);
}
}
Info<< " )" << endl;
// check for clouds
forAllConstIter(HashTable<HashTable<word> >, cloudFields, cloudIter)
{
const word& cloudName = cloudIter.key();
if (!dir(runTime.timePath()/regionPrefix/"lagrangian"/cloudName))
{
continue;
}
IOobjectList cloudObjs
(
mesh,
runTime.timeName(),
"lagrangian"/cloudName
);
// check that the positions field is present for this time
if (cloudObjs.lookup("positions"))
{
ensightParticlePositions
(
mesh,
dataDir,
subDir,
cloudName,
format
);
}
else
{
continue;
}
Info<< "write " << cloudName << " (" << flush;
forAllConstIter(HashTable<word>, cloudIter(), fieldIter)
{
const word& fieldName = fieldIter.key();
const word& fieldType = fieldIter();
IOobject *fieldObject = cloudObjs.lookup(fieldName);
if (!fieldObject)
{
Info<< "missing "
<< runTime.timeName()/"lagrangian"/cloudName/fieldName
<< endl;
continue;
}
IOobject fieldObject
(
fieldName,
mesh.time().timeName(),
mesh,
IOobject::MUST_READ,
IOobject::NO_WRITE
);
if (fieldTypes[i] == volScalarField::typeName)
if (fieldType == scalarIOField::typeName)
{
if (nFieldTime == 1)
{
ensightCaseEntry<scalar>
(
caseFile,
fieldObject,
dataMask
);
}
ensightVolField<scalar>
ensightLagrangianField<scalar>
(
partsList,
fieldObject,
mesh,
*fieldObject,
dataDir,
subDir,
cloudName,
format
);
}
else if (fieldTypes[i] == volVectorField::typeName)
else if (fieldType == vectorIOField::typeName)
{
if (nFieldTime == 1)
{
ensightCaseEntry<vector>
(
caseFile,
fieldObject,
dataMask
);
}
ensightVolField<vector>
ensightLagrangianField<vector>
(
partsList,
fieldObject,
mesh,
*fieldObject,
dataDir,
subDir,
cloudName,
format
);
}
else if (fieldTypes[i] == volSphericalTensorField::typeName)
else if (fieldType == tensorIOField::typeName)
{
if (nFieldTime == 1)
{
ensightCaseEntry<sphericalTensor>
(
caseFile,
fieldObject,
dataMask
);
}
ensightVolField<sphericalTensor>
ensightLagrangianField<tensor>
(
partsList,
fieldObject,
mesh,
dataDir,
subDir,
format
);
}
else if (fieldTypes[i] == volSymmTensorField::typeName)
{
if (nFieldTime == 1)
{
ensightCaseEntry<symmTensor>
(
caseFile,
fieldObject,
dataMask
);
}
ensightVolField<symmTensor>
(
partsList,
fieldObject,
mesh,
dataDir,
subDir,
format
);
}
else if (fieldTypes[i] == volTensorField::typeName)
{
if (nFieldTime == 1)
{
ensightCaseEntry<tensor>
(
caseFile,
fieldObject,
dataMask
);
}
ensightVolField<tensor>
(
partsList,
fieldObject,
mesh,
*fieldObject,
dataDir,
subDir,
cloudName,
format
);
}
}
}
Info<< endl;
Info<< " )" << endl;
if (hasLagrangian)
{
// check that the positions field is present for this time
{
IOobject ioHeader
(
"positions",
mesh.time().timeName(),
"lagrangian",
mesh,
IOobject::NO_READ
);
if (ioHeader.headerOk())
{
sprayFileNumbers[nSprayTime++] = timeIndex;
}
}
Info<< "write spray field: " << flush;
ensightParticlePositions
(
mesh,
dataDir,
subDir,
format
);
for (label i=0; i < nSprayFieldTypes; i++)
{
wordList fieldNames = sprayObjects.names(sprayFieldTypes[i]);
forAll (fieldNames, fieldI)
{
word fieldName = fieldNames[fieldI];
# include "checkHasSprayField.H"
if (!hasSprayField)
{
continue;
}
IOobject fieldObject
(
fieldName,
mesh.time().timeName(),
"lagrangian",
mesh,
IOobject::MUST_READ,
IOobject::NO_WRITE
);
if (sprayFieldTypes[i] == scalarIOField::typeName)
{
if (nSprayTime == 1)
{
ensightCaseEntry<scalar>
(
caseFile,
fieldObject,
dataMask,
true
);
}
ensightSprayField<scalar>
(
fieldObject,
dataDir,
subDir,
format
);
}
else if (sprayFieldTypes[i] == vectorIOField::typeName)
{
if (nSprayTime == 1)
{
ensightCaseEntry<vector>
(
caseFile,
fieldObject,
dataMask,
true
);
}
ensightSprayField<vector>
(
fieldObject,
dataDir,
subDir,
format
);
}
else if (sprayFieldTypes[i] == tensorIOField::typeName)
{
if (nSprayTime == 1)
{
ensightCaseEntry<tensor>
(
caseFile,
fieldObject,
dataMask,
true
);
}
ensightSprayField<tensor>
(
fieldObject,
dataDir,
subDir,
format
);
}
}
}
Info<< endl;
// remember the time index
cloudTimesUsed[cloudName].append(timeIndex);
}
}
fieldFileNumbers.setSize(nFieldTime);
sprayFileNumbers.setSize(nSprayTime);
// add time values
caseFile << nl << "TIME" << nl;
# include "ensightCaseTimes.H"
# include "ensightOutputCase.H"
Info<< "\nEnd\n"<< endl;

View File

@ -0,0 +1,104 @@
// ignore special fields or fields that we don't handle
//
forAllIter(HashTable<word>, volumeFields, fieldIter)
{
const word& fieldName = fieldIter.key();
const word& fieldType = fieldIter();
if (fieldName.size() > 2 && fieldName(fieldName.size() - 2, 2) != "_0")
{
volumeFields.erase(fieldIter);
}
else
{
// remove types that we don't handle:
bool invalid = true;
for (label typeI=0; invalid && volFieldTypes[typeI].size(); ++typeI)
{
if (fieldType == volFieldTypes[typeI])
{
invalid = false;
}
}
if (invalid)
{
Info<< "ignoring " << fieldType << ": " << fieldName << endl;
volumeFields.erase(fieldIter);
}
}
}
// verify that the variable is present for all times
//
for (label i=0; volumeFields.size() && i < timeDirs.size(); ++i)
{
IOobjectList objs(mesh, timeDirs[i].name());
forAllIter(HashTable<word>, volumeFields, fieldIter)
{
const word& fieldName = fieldIter.key();
if (!objs.found(fieldName))
{
volumeFields.erase(fieldIter);
}
}
}
// ignore fields that we don't handle
//
forAllIter(HashTable<HashTable<word> >, cloudFields, cloudIter)
{
const word& cloudName = cloudIter.key();
forAllIter(HashTable<word>, cloudIter(), fieldIter)
{
const word& fieldName = fieldIter.key();
const word& fieldType = fieldIter();
// remove types that we don't handle:
bool invalid = true;
for (label typeI=0; invalid && sprayFieldTypes[typeI].size(); ++typeI)
{
if (fieldType == sprayFieldTypes[typeI])
{
invalid = false;
}
}
if (invalid)
{
Info<< "ignoring " << fieldType << ": " << fieldName << endl;
cloudIter().erase(fieldIter);
}
}
if (!cloudIter().size())
{
Info<< "removing cloud " << cloudName<< endl;
cloudFields.erase(cloudIter);
}
}
// DEBUGGING
// Info<<"final fields (";
// forAllConstIter(HashTable<word>, volumeFields, fieldIter)
// {
// Info<< " " << fieldIter.key();
// }
//
// Info<< " )\n";
//
// forAllConstIter(HashTable<HashTable<word> >, cloudFields, cloudIter)
// {
// Info<<"final fields for lagrangian/" << cloudIter.key() << " (";
// forAllConstIter(HashTable<word>, cloudIter(), fieldIter)
// {
// Info<< " " << fieldIter.key();
// }
// Info<< " )\n";
// }