Merge branch 'develop' of develop.openfoam.com:Development/OpenFOAM-plus into develop

This commit is contained in:
mattijs
2017-09-18 08:43:21 +01:00
54 changed files with 1147 additions and 706 deletions

View File

@ -65,7 +65,7 @@ int main(int argc, char *argv[])
#include "createTime.H"
#include "createDynamicFvMesh.H"
#include "initContinuityErrs.H"
#include "createControl.H"
pimpleControl pimple(mesh);
#include "createTimeControls.H"
#include "createDyMControls.H"
#include "createFields.H"

View File

@ -46,6 +46,13 @@
phiHbyA += phig;
if (adjustFringe)
{
fvc::makeRelative(phiHbyA, U);
oversetAdjustPhi(phiHbyA, U);
fvc::makeAbsolute(phiHbyA, U);
}
// Update the pressure BCs to ensure flux consistency
constrainPressure(p_rgh, U, phiHbyA, rAUf, MRF);
@ -72,7 +79,8 @@
"minGradP",
fvc::reconstruct((phig - p_rghEqn.flux())/rAUf)
);
U = HbyA + rAU*cellMask*minGradP;
//U = HbyA + rAU*cellMask*minGradP;
U = fvc::reconstruct(phi);
U.correctBoundaryConditions();
fvOptions.correct(U);
}

View File

@ -11,5 +11,6 @@ moveMeshOuterCorrectors =
massFluxInterpolation =
pimple.dict().lookupOrDefault("massFluxInterpolation", false);
ddtCorr =
pimple.dict().lookupOrDefault("ddtCorr", true);
ddtCorr = pimple.dict().lookupOrDefault("ddtCorr", true);
adjustFringe = pimple.dict().lookupOrDefault("oversetAdjustPhi", false);

View File

@ -41,10 +41,15 @@ using namespace Foam;
autoPtr<triSurface> loadSurface
(
const Foam::Time& runTime,
const fileName& surfName
const fileName& surfName,
const scalar scaleFactor
)
{
Info<< "Reading surface " << surfName << endl;
Info<< "Reading surface " << surfName << nl;
if (scaleFactor > 0)
{
Info<<"Scaling : " << scaleFactor << nl;
}
const fileName fallback =
runTime.constantPath()/triSurfaceMesh::meshSubDir/surfName;
@ -52,11 +57,11 @@ autoPtr<triSurface> loadSurface
autoPtr<triSurface> surfPtr;
if (isFile(surfName))
{
surfPtr.set(new triSurface(surfName));
surfPtr.set(new triSurface(surfName, scaleFactor));
}
else if (isFile(fallback))
{
surfPtr.set(new triSurface(fallback));
surfPtr.set(new triSurface(fallback, scaleFactor));
}
else
{
@ -102,6 +107,12 @@ int main(int argc, char *argv[])
"mergeTol",
"merge points (and edges) using the specified tolerance"
);
argList::addOption
(
"scale",
"factor",
"geometry scaling factor"
);
#include "addDictOption.H"
@ -117,16 +128,18 @@ int main(int argc, char *argv[])
#include "setRootCase.H"
#include "createTime.H"
const scalar scaleFactor = args.optionLookupOrDefault<scalar>("scale", -1);
const word outputFile(args.executable() + ".obj");
const fileName surf1Name(args[1]);
triSurface surf1 = loadSurface(runTime, surf1Name)();
triSurface surf1 = loadSurface(runTime, surf1Name, scaleFactor)();
Info<< surf1Name << " statistics:" << endl;
surf1.writeStats(Info);
Info<< endl;
const fileName surf2Name(args[2]);
triSurface surf2 = loadSurface(runTime, surf2Name)();
triSurface surf2 = loadSurface(runTime, surf2Name, scaleFactor)();
Info<< surf2Name << " statistics:" << endl;
surf2.writeStats(Info);
Info<< endl;

View File

@ -0,0 +1,3 @@
Test-surfaceMeshConvert.C
EXE = $(FOAM_APPBIN)/Test-surfaceMeshConvert

View File

@ -22,17 +22,16 @@ License
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
Application
surfaceMeshConvertTesting
Test-surfaceMeshConvert
Group
grpSurfaceUtilities
Description
Converts from one surface mesh format to another, but primarily
used for testing functionality.
Test conversions from one surface mesh format to another.
Usage
\b surfaceMeshConvertTesting inputFile outputFile [OPTION]
\b Test-surfaceMeshConvert inputFile outputFile [OPTION]
Options:
- \par -clean

View File

@ -91,7 +91,7 @@ int main(int argc, char *argv[])
// ~~~~~~~~~~~~~~~~~~~~~
fileFormats::FIREMeshWriter::binary = !args.optionFound("ascii");
// default: rescale from [m] to [mm]
// Default: no rescaling
scalar scaleFactor = 1;
if (args.optionReadIfPresent("scale", scaleFactor))
{

View File

@ -43,8 +43,8 @@ Usage
or -yawPitchRoll (yawdegrees pitchdegrees rolldegrees)
or -rollPitchYaw (rolldegrees pitchdegrees yawdegrees)
-scale vector
Scales the points by the given vector.
-scale scalar|vector
Scales the points by the given scalar or vector.
The any or all of the three options may be specified and are processed
in the above order.
@ -182,9 +182,9 @@ int main(int argc, char *argv[])
argList::addOption
(
"scale",
"vector",
"scale by the specified amount - eg, '(0.001 0.001 0.001)' for a "
"uniform [mm] to [m] scaling"
"scalar | vector",
"scale by the specified amount - eg, for a uniform [mm] to [m] scaling "
"use either (0.001 0.001 0.001)' or simply '0.001'"
);
#include "addRegionOption.H"
@ -262,10 +262,10 @@ int main(int argc, char *argv[])
<< " pitch " << v.y() << nl
<< " yaw " << v.z() << nl;
// Convert to radians
// degToRad
v *= pi/180.0;
quaternion R(quaternion::rotationSequence::XYZ, v);
const quaternion R(quaternion::rotationSequence::XYZ, v);
Info<< "Rotating points by quaternion " << R << endl;
points = transform(R, points);
@ -282,16 +282,10 @@ int main(int argc, char *argv[])
<< " pitch " << v.y() << nl
<< " roll " << v.z() << nl;
// Convert to radians
// degToRad
v *= pi/180.0;
scalar yaw = v.x();
scalar pitch = v.y();
scalar roll = v.z();
quaternion R = quaternion(vector(0, 0, 1), yaw);
R *= quaternion(vector(0, 1, 0), pitch);
R *= quaternion(vector(1, 0, 0), roll);
const quaternion R(quaternion::rotationSequence::ZYX, v);
Info<< "Rotating points by quaternion " << R << endl;
points = transform(R, points);
@ -302,13 +296,34 @@ int main(int argc, char *argv[])
}
}
if (args.optionReadIfPresent("scale", v))
if (args.optionFound("scale"))
{
Info<< "Scaling points by " << v << endl;
// Use readList to handle single or multiple values
const List<scalar> scaling = args.optionReadList<scalar>("scale");
points.replace(vector::X, v.x()*points.component(vector::X));
points.replace(vector::Y, v.y()*points.component(vector::Y));
points.replace(vector::Z, v.z()*points.component(vector::Z));
if (scaling.size() == 1)
{
Info<< "Scaling points uniformly by " << scaling[0] << nl;
points *= scaling[0];
}
else if (scaling.size() == 3)
{
Info<< "Scaling points by ("
<< scaling[0] << " "
<< scaling[1] << " "
<< scaling[2] << ")" << nl;
points.replace(vector::X, scaling[0]*points.component(vector::X));
points.replace(vector::Y, scaling[1]*points.component(vector::Y));
points.replace(vector::Z, scaling[2]*points.component(vector::Z));
}
else
{
FatalError
<< "-scale with 1 or 3 components only" << nl
<< "given: " << args["scale"] << endl
<< exit(FatalError);
}
}
// Set the precision of the points data to 10

View File

@ -160,13 +160,59 @@ public:
const std::string& datasetName
);
//- Add names to array selection
template<class StringType>
static label addToArray
(
vtkDataArraySelection* select,
const std::string& prefix,
const UList<StringType>& names
);
//- Add names to array selection
template<class StringType>
static label addToArray
(
vtkDataArraySelection* select,
const UList<StringType>& names,
const std::string& suffix = string::null
);
//- Add objects of Type to array selection
template<class Type>
static label addToSelection
(
vtkDataArraySelection* select,
const std::string& prefix,
const IOobjectList& objects
);
//- Add objects of Type to array selection
template<class Type>
static label addToSelection
(
vtkDataArraySelection* select,
const IOobjectList& objects,
const std::string& prefix = string::null
const std::string& suffix = string::null
);
//- Add objects of Type to array selection
template<class Type>
static label addToSelection
(
vtkDataArraySelection* select,
const std::string& prefix,
const HashTable<wordHashSet>& objects
);
//- Add objects of Type to array selection
template<class Type>
static label addToSelection
(
vtkDataArraySelection* select,
const HashTable<wordHashSet>& objects,
const std::string& suffix = string::null
);

View File

@ -28,29 +28,119 @@ License
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
template<class StringType>
Foam::label Foam::foamPvCore::addToArray
(
vtkDataArraySelection *select,
const std::string& prefix,
const UList<StringType>& names
)
{
if (prefix.empty())
{
for (const auto& name : names)
{
select->AddArray(name.c_str());
}
}
else
{
for (const auto& name : names)
{
select->AddArray((prefix + name).c_str());
}
}
return names.size();
}
template<class StringType>
Foam::label Foam::foamPvCore::addToArray
(
vtkDataArraySelection *select,
const UList<StringType>& names,
const std::string& suffix
)
{
if (suffix.empty())
{
for (const auto& name : names)
{
select->AddArray(name.c_str());
}
}
else
{
for (const auto& name : names)
{
select->AddArray((name + suffix).c_str());
}
}
return names.size();
}
template<class Type>
Foam::label Foam::foamPvCore::addToSelection
(
vtkDataArraySelection *select,
const std::string& prefix,
const IOobjectList& objects
)
{
return addToArray(select, prefix, objects.sortedNames(Type::typeName));
}
template<class Type>
Foam::label Foam::foamPvCore::addToSelection
(
vtkDataArraySelection *select,
const IOobjectList& objects,
const std::string& prefix
const std::string& suffix
)
{
const wordList names = objects.sortedNames(Type::typeName);
return addToArray(select, objects.sortedNames(Type::typeName), suffix);
}
forAll(names, i)
template<class Type>
Foam::label Foam::foamPvCore::addToSelection
(
vtkDataArraySelection *select,
const std::string& prefix,
const HashTable<wordHashSet>& objects
)
{
auto iter = objects.cfind(Type::typeName);
if (iter.found())
{
if (prefix.empty())
{
select->AddArray(names[i].c_str());
}
else
{
select->AddArray((prefix + names[i]).c_str());
}
return addToArray(select, prefix, iter.object().sortedToc());
}
return names.size();
return 0;
}
template<class Type>
Foam::label Foam::foamPvCore::addToSelection
(
vtkDataArraySelection *select,
const HashTable<wordHashSet>& objects,
const std::string& suffix
)
{
auto iter = objects.cfind(Type::typeName);
if (iter.found())
{
return addToArray(select, iter.object().sortedToc(), suffix);
}
return 0;
}

View File

@ -155,7 +155,10 @@ class vtkPVFoam
vtkSmartPointer<dataType> getCopy() const
{
auto copy = vtkSmartPointer<dataType>::New();
copy->ShallowCopy(vtkgeom);
if (vtkgeom)
{
copy->ShallowCopy(vtkgeom);
}
return copy;
}
@ -163,7 +166,10 @@ class vtkPVFoam
void reuse()
{
dataset = vtkSmartPointer<dataType>::New();
dataset->ShallowCopy(vtkgeom);
if (vtkgeom)
{
dataset->ShallowCopy(vtkgeom);
}
}
//- Set the geometry and make a shallow copy to dataset

View File

@ -85,11 +85,11 @@ Foam::wordList Foam::vtkPVFoam::getZoneNames
wordList names(zmesh.size());
label nZone = 0;
forAll(zmesh, zonei)
for (const auto& zn : zmesh)
{
if (!zmesh[zonei].empty())
if (!zn.empty())
{
names[nZone++] = zmesh[zonei].name();
names[nZone++] = zn.name();
}
}
names.setSize(nZone);
@ -100,8 +100,6 @@ Foam::wordList Foam::vtkPVFoam::getZoneNames
Foam::wordList Foam::vtkPVFoam::getZoneNames(const word& zoneType) const
{
wordList names;
// mesh not loaded - read from file
IOobject ioObj
(
@ -119,14 +117,17 @@ Foam::wordList Foam::vtkPVFoam::getZoneNames(const word& zoneType) const
false
);
wordList names;
if (ioObj.typeHeaderOk<cellZoneMesh>(false, false))
{
zonesEntries zones(ioObj);
names.setSize(zones.size());
forAll(zones, zonei)
label nZone = 0;
for (const auto& zn : zones)
{
names[zonei] = zones[zonei].keyword();
names[nZone++] = zn.keyword();
}
}
@ -167,7 +168,7 @@ void Foam::vtkPVFoam::updateInfoLagrangian
<< " " << dbPtr_->timePath()/cloud::prefix << endl;
}
// use the db directly since this might be called without a mesh,
// Use the db directly since this might be called without a mesh,
// but the region must get added back in
fileName lagrangianPrefix(cloud::prefix);
if (meshRegion_ != polyMesh::defaultRegion)
@ -175,22 +176,23 @@ void Foam::vtkPVFoam::updateInfoLagrangian
lagrangianPrefix = meshRegion_/cloud::prefix;
}
// Search for list of lagrangian objects for this time
fileNameList cloudDirs
(
readDir(dbPtr_->timePath()/lagrangianPrefix, fileName::DIRECTORY)
);
// List of lagrangian objects across all times
HashSet<fileName> names;
for (const instant& t : dbPtr_().times())
{
names.insert
(
readDir
(
dbPtr_->path()/t.name()/lagrangianPrefix,
fileName::DIRECTORY
)
);
}
rangeLagrangian_.reset(select->GetNumberOfArrays());
forAll(cloudDirs, cloudi)
{
// Add cloud to GUI list
select->AddArray
(
("lagrangian/" + cloudDirs[cloudi]).c_str()
);
++rangeLagrangian_;
}
rangeLagrangian_ += addToArray(select, "lagrangian/", names.sortedToc());
if (debug)
{
@ -257,39 +259,26 @@ void Foam::vtkPVFoam::updateInfoPatches
const polyPatch& pp = patches[patchId];
if (pp.size())
{
enabledEntries.insert
(
"patch/" + pp.name()
);
enabledEntries.insert("patch/" + pp.name());
}
}
}
}
// Sort group names
Foam::sort(displayNames);
for (const auto& name : displayNames)
{
select->AddArray(name.c_str());
++rangePatches_;
}
Foam::sort(displayNames); // Sorted group names
rangePatches_ += addToArray(select, displayNames);
// Add (non-zero) patches to the list of mesh parts
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if (!reader_->GetShowGroupsOnly())
{
forAll(patches, patchi)
for (const polyPatch& pp : patches)
{
const polyPatch& pp = patches[patchi];
if (pp.size())
{
// Add patch to GUI list
select->AddArray
(
("patch/" + pp.name()).c_str()
);
select->AddArray(("patch/" + pp.name()).c_str());
++rangePatches_;
}
}
@ -339,9 +328,9 @@ void Foam::vtkPVFoam::updateInfoPatches
&& patchDict.readIfPresent("inGroups", groupNames)
)
{
forAll(groupNames, groupI)
for (const auto& groupName : groupNames)
{
groups(groupNames[groupI]).insert(patchi);
groups(groupName).insert(patchi);
}
}
}
@ -370,21 +359,13 @@ void Foam::vtkPVFoam::updateInfoPatches
{
for (auto patchId : patchIDs)
{
enabledEntries.insert
(
"patch/" + names[patchId]
);
enabledEntries.insert("patch/" + names[patchId]);
}
}
}
// Sort group names
Foam::sort(displayNames);
for (const auto& name : displayNames)
{
select->AddArray(name.c_str());
++rangePatches_;
}
Foam::sort(displayNames); // Sorted group names
rangePatches_ += addToArray(select, displayNames);
// Add (non-zero) patches to the list of mesh parts
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@ -396,10 +377,7 @@ void Foam::vtkPVFoam::updateInfoPatches
// Valid patch if nFace > 0 - add patch to GUI list
if (sizes[patchi])
{
select->AddArray
(
("patch/" + names[patchi]).c_str()
);
select->AddArray(("patch/" + names[patchi]).c_str());
++rangePatches_;
}
}
@ -430,74 +408,44 @@ void Foam::vtkPVFoam::updateInfoZones
<< " [meshPtr=" << (meshPtr_ ? "set" : "null") << "]" << endl;
}
wordList namesLst;
//
// cellZones information
// ~~~~~~~~~~~~~~~~~~~~~
if (meshPtr_)
// cellZones
{
namesLst = getZoneNames(meshPtr_->cellZones());
}
else
{
namesLst = getZoneNames("cellZones");
}
rangeCellZones_.reset(select->GetNumberOfArrays());
forAll(namesLst, elemI)
{
select->AddArray
const wordList names =
(
("cellZone/" + namesLst[elemI]).c_str()
meshPtr_
? getZoneNames(meshPtr_->cellZones())
: getZoneNames("cellZones")
);
++rangeCellZones_;
rangeCellZones_.reset(select->GetNumberOfArrays());
rangeCellZones_ += addToArray(select, "cellZone/", names);
}
//
// faceZones information
// ~~~~~~~~~~~~~~~~~~~~~
if (meshPtr_)
// faceZones
{
namesLst = getZoneNames(meshPtr_->faceZones());
}
else
{
namesLst = getZoneNames("faceZones");
}
rangeFaceZones_.reset(select->GetNumberOfArrays());
forAll(namesLst, elemI)
{
select->AddArray
const wordList names =
(
("faceZone/" + namesLst[elemI]).c_str()
meshPtr_
? getZoneNames(meshPtr_->faceZones())
: getZoneNames("faceZones")
);
++rangeFaceZones_;
rangeFaceZones_.reset(select->GetNumberOfArrays());
rangeFaceZones_ += addToArray(select, "faceZone/", names);
}
//
// pointZones information
// ~~~~~~~~~~~~~~~~~~~~~~
if (meshPtr_)
// pointZones
{
namesLst = getZoneNames(meshPtr_->pointZones());
}
else
{
namesLst = getZoneNames("pointZones");
}
rangePointZones_.reset(select->GetNumberOfArrays());
forAll(namesLst, elemI)
{
select->AddArray
const wordList names =
(
("pointZone/" + namesLst[elemI]).c_str()
meshPtr_
? getZoneNames(meshPtr_->pointZones())
: getZoneNames("pointZones")
);
++rangePointZones_;
rangePointZones_.reset(select->GetNumberOfArrays());
rangePointZones_ += addToArray(select, "pointZone/", names);
}
if (debug)
@ -525,14 +473,14 @@ void Foam::vtkPVFoam::updateInfoSets
// Add names of sets. Search for last time directory with a sets
// subdirectory. Take care not to search beyond the last mesh.
word facesInstance = dbPtr_().findInstance
const word facesInstance = dbPtr_().findInstance
(
meshDir_,
"faces",
IOobject::READ_IF_PRESENT
);
word setsInstance = dbPtr_().findInstance
const word setsInstance = dbPtr_().findInstance
(
meshDir_/"sets",
word::null,
@ -540,7 +488,7 @@ void Foam::vtkPVFoam::updateInfoSets
facesInstance
);
IOobjectList objects(dbPtr_(), setsInstance, meshDir_/"sets");
const IOobjectList objects(dbPtr_(), setsInstance, meshDir_/"sets");
if (debug)
{
@ -553,24 +501,24 @@ void Foam::vtkPVFoam::updateInfoSets
rangeCellSets_ += addToSelection<cellSet>
(
select,
objects,
"cellSet/"
"cellSet/",
objects
);
rangeFaceSets_.reset(select->GetNumberOfArrays());
rangeFaceSets_ += addToSelection<faceSet>
(
select,
objects,
"faceSet/"
"faceSet/",
objects
);
rangePointSets_.reset(select->GetNumberOfArrays());
rangePointSets_ += addToSelection<pointSet>
(
select,
objects,
"pointSet/"
"pointSet/",
objects
);
if (debug)
@ -594,21 +542,20 @@ void Foam::vtkPVFoam::updateInfoLagrangianFields
HashSet<string> enabled = getSelectedArraySet(select);
select->RemoveAllArrays();
// TODO - currently only get fields from ONE cloud
// have to decide if the second set of fields get mixed in
// or dealt with separately
const arrayRange& range = rangeLagrangian_;
if (range.empty())
{
return;
}
// Add Lagrangian fields even if particles are not enabled?
const int partId = range.start();
const word cloudName = getReaderPartName(partId);
// Reuse the previously determined cloud information.
DynamicList<word> cloudNames(range.size());
for (auto partId : range)
{
cloudNames.append(getReaderPartName(partId));
}
// use the db directly since this might be called without a mesh,
// Use the db directly since this might be called without a mesh,
// but the region must get added back in
fileName lagrangianPrefix(cloud::prefix);
if (meshRegion_ != polyMesh::defaultRegion)
@ -616,19 +563,37 @@ void Foam::vtkPVFoam::updateInfoLagrangianFields
lagrangianPrefix = meshRegion_/cloud::prefix;
}
IOobjectList objects
(
dbPtr_(),
dbPtr_().timeName(),
lagrangianPrefix/cloudName
);
// List of lagrangian fields across all clouds and all times.
// ParaView displays "(partial)" after field names that only apply
// to some of the clouds.
HashTable<wordHashSet> fields;
addToSelection<IOField<label>>(select, objects);
addToSelection<IOField<scalar>>(select, objects);
addToSelection<IOField<vector>>(select, objects);
addToSelection<IOField<sphericalTensor>>(select, objects);
addToSelection<IOField<symmTensor>>(select, objects);
addToSelection<IOField<tensor>>(select, objects);
for (const instant& t : dbPtr_().times())
{
for (const auto& cloudName : cloudNames)
{
const HashTable<wordHashSet> localFields =
IOobjectList
(
dbPtr_(),
t.name(),
lagrangianPrefix/cloudName
).classes();
forAllConstIters(localFields, iter)
{
fields(iter.key()) |= iter.object();
}
}
}
// Known/supported field-types
addToSelection<IOField<label>>(select, fields);
addToSelection<IOField<scalar>>(select, fields);
addToSelection<IOField<vector>>(select, fields);
addToSelection<IOField<sphericalTensor>>(select, fields);
addToSelection<IOField<symmTensor>>(select, fields);
addToSelection<IOField<tensor>>(select, fields);
// Restore the enabled selections
setSelectedArrayEntries(select, enabled);

View File

@ -71,6 +71,12 @@ int main(int argc, char *argv[])
"mergeRegions",
"combine regions from both surfaces"
);
argList::addOption
(
"scale",
"factor",
"geometry scaling factor on input surfaces"
);
argList args(argc, argv);
@ -81,6 +87,8 @@ int main(int argc, char *argv[])
const bool addPoint = args.optionFound("points");
const bool mergeRegions = args.optionFound("mergeRegions");
const scalar scaleFactor = args.optionLookupOrDefault<scalar>("scale", -1);
if (addPoint)
{
Info<< "Reading a surface and adding points from a file"
@ -117,8 +125,12 @@ int main(int argc, char *argv[])
<< "Writing : " << outFileName << nl << endl;
}
const triSurface surface1(inFileName1);
if (scaleFactor > 0)
{
Info<< "Scaling : " << scaleFactor << nl;
}
const triSurface surface1(inFileName1, scaleFactor);
Info<< "Surface1:" << endl;
surface1.writeStats(Info);
Info<< endl;
@ -131,7 +143,7 @@ int main(int argc, char *argv[])
if (addPoint)
{
IFstream pointsFile(args["points"]);
pointField extraPoints(pointsFile);
const pointField extraPoints(pointsFile);
Info<< "Additional Points:" << extraPoints.size() << endl;
@ -139,17 +151,16 @@ int main(int argc, char *argv[])
label pointi = pointsAll.size();
pointsAll.setSize(pointsAll.size() + extraPoints.size());
forAll(extraPoints, i)
for (const auto& pt : extraPoints)
{
pointsAll[pointi++] = extraPoints[i];
pointsAll[pointi++] = pt;
}
combinedSurf = triSurface(surface1, surface1.patches(), pointsAll);
}
else
{
const triSurface surface2(inFileName2);
const triSurface surface2(inFileName2, scaleFactor);
Info<< "Surface2:" << endl;
surface2.writeStats(Info);
Info<< endl;
@ -165,21 +176,19 @@ int main(int argc, char *argv[])
label pointi = 0;
// Copy points1 into pointsAll
forAll(points1, point1i)
for (const auto& pt : points1)
{
pointsAll[pointi++] = points1[point1i];
pointsAll[pointi++] = pt;
}
// Add surface2 points
forAll(points2, point2i)
for (const auto& pt : points2)
{
pointsAll[pointi++] = points2[point2i];
pointsAll[pointi++] = pt;
}
label trianglei = 0;
// Determine map for both regions
label nNewPatches = 0;
labelList patch1Map(surface1.patches().size());
@ -192,17 +201,17 @@ int main(int argc, char *argv[])
forAll(surface1.patches(), i)
{
const word& name = surface1.patches()[i].name();
HashTable<label>::iterator iter = nameToPatch.find(name);
auto iter = nameToPatch.find(name);
label combinedi;
if (iter == nameToPatch.end())
if (iter.found())
{
combinedi = nameToPatch.size();
nameToPatch.insert(name, combinedi);
combinedi = iter.object();
}
else
{
combinedi = iter();
combinedi = nameToPatch.size();
nameToPatch.insert(name, combinedi);
}
patch1Map[i] = combinedi;
}
@ -212,17 +221,17 @@ int main(int argc, char *argv[])
forAll(surface2.patches(), i)
{
const word& name = surface2.patches()[i].name();
HashTable<label>::iterator iter = nameToPatch.find(name);
auto iter = nameToPatch.find(name);
label combinedi;
if (iter == nameToPatch.end())
if (iter.found())
{
combinedi = nameToPatch.size();
nameToPatch.insert(name, combinedi);
combinedi = iter.object();
}
else
{
combinedi = iter();
combinedi = nameToPatch.size();
nameToPatch.insert(name, combinedi);
}
patch2Map[i] = combinedi;
}
@ -245,11 +254,9 @@ int main(int argc, char *argv[])
}
// Copy triangles1 into trianglesAll
forAll(surface1, facei)
for (const labelledTri& tri : surface1)
{
const labelledTri& tri = surface1[facei];
labelledTri& destTri = facesAll[trianglei++];
destTri.triFace::operator=(tri);
@ -257,10 +264,8 @@ int main(int argc, char *argv[])
}
// Add (renumbered) surface2 triangles
forAll(surface2, facei)
for (const labelledTri& tri : surface2)
{
const labelledTri& tri = surface2[facei];
labelledTri& destTri = facesAll[trianglei++];
destTri[0] = tri[0] + points1.size();
destTri[1] = tri[1] + points1.size();

View File

@ -1517,6 +1517,12 @@ int main(int argc, char *argv[])
argList::validArgs.append("surfaceFile1");
argList::validArgs.append("surfaceFile2");
argList::addOption
(
"scale",
"factor",
"Geometry scaling factor (both surfaces)"
);
argList::addBoolOption
(
"surf1Baffle",
@ -1587,6 +1593,10 @@ int main(int argc, char *argv[])
}
// Scale factor for both surfaces:
const scalar scaleFactor
= args.optionLookupOrDefault<scalar>("scale", -1);
const word surf1Name(args[2]);
Info<< "Reading surface " << surf1Name << endl;
triSurfaceMesh surf1
@ -1599,6 +1609,11 @@ int main(int argc, char *argv[])
runTime
)
);
if (scaleFactor > 0)
{
Info<< "Scaling : " << scaleFactor << nl;
surf1.scalePoints(scaleFactor);
}
Info<< surf1Name << " statistics:" << endl;
surf1.writeStats(Info);
@ -1616,6 +1631,11 @@ int main(int argc, char *argv[])
runTime
)
);
if (scaleFactor > 0)
{
Info<< "Scaling : " << scaleFactor << nl;
surf2.scalePoints(scaleFactor);
}
Info<< surf2Name << " statistics:" << endl;
surf2.writeStats(Info);
@ -1627,7 +1647,7 @@ int main(int argc, char *argv[])
edgeIntersections edgeCuts1;
edgeIntersections edgeCuts2;
bool invertedSpace = args.optionFound("invertedSpace");
const bool invertedSpace = args.optionFound("invertedSpace");
if (invertedSpace && validActions[action] == booleanSurface::DIFFERENCE)
{
@ -1736,9 +1756,7 @@ int main(int argc, char *argv[])
const extendedFeatureEdgeMesh& feMesh = feMeshPtr();
feMesh.writeStats(Info);
feMesh.write();
feMesh.writeObj(feMesh.path()/sFeatFileName);
{

View File

@ -60,7 +60,13 @@ int main(int argc, char *argv[])
argList::addBoolOption
(
"noClean",
"suppress surface checking/cleanup on the input surface"
"Suppress surface checking/cleanup on the input surface"
);
argList::addOption
(
"scale",
"factor",
"Input geometry scaling factor"
);
argList args(argc, argv);
@ -77,7 +83,12 @@ int main(int argc, char *argv[])
Info<< "Reading surface from " << inFileName << " ..." << nl << endl;
triSurface surf(inFileName);
triSurface surf
(
inFileName,
args.optionLookupOrDefault<scalar>("scale", -1)
);
surf.writeStats(Info);
if (!args.optionFound("noClean"))
@ -90,7 +101,7 @@ int main(int argc, char *argv[])
while (true)
{
label nEdgeCollapse = collapseEdge(surf, minLen);
const label nEdgeCollapse = collapseEdge(surf, minLen);
if (nEdgeCollapse == 0)
{
@ -99,7 +110,7 @@ int main(int argc, char *argv[])
}
while (true)
{
label nSplitEdge = collapseBase(surf, minLen, minQuality);
const label nSplitEdge = collapseBase(surf, minLen, minQuality);
if (nSplitEdge == 0)
{

View File

@ -28,7 +28,7 @@ Group
grpSurfaceUtilities
Description
Surface coarsening using `bunnylod'
Surface coarsening using 'bunnylod'
Reference:
\verbatim
@ -75,6 +75,13 @@ int main(int argc, char *argv[])
argList::validArgs.append("surfaceFile");
argList::validArgs.append("reductionFactor");
argList::validArgs.append("output surfaceFile");
argList::addOption
(
"scale",
"factor",
"input geometry scaling factor"
);
argList args(argc, argv);
const fileName inFileName = args[1];
@ -90,40 +97,39 @@ int main(int argc, char *argv[])
<< exit(FatalError);
}
Info<< "Input surface :" << inFileName << endl
<< "Reduction factor:" << reduction << endl
<< "Output surface :" << outFileName << endl << endl;
const scalar scaleFactor = args.optionLookupOrDefault<scalar>("scale", -1);
const triSurface surf(inFileName);
Info<< "Input surface :" << inFileName << nl
<< "Scaling factor :" << scaleFactor << nl
<< "Reduction factor:" << reduction << nl
<< "Output surface :" << outFileName << nl
<< endl;
const triSurface surf(inFileName, scaleFactor);
Info<< "Surface:" << endl;
surf.writeStats(Info);
Info<< endl;
::List< ::Vector> vert; // global list of vertices
::List< ::tridata> tri; // global list of triangles
::List<::Vector> vert; // global list of vertices
::List<::tridata> tri; // global list of triangles
// Convert triSurface to progmesh format. Note: can use global point
// numbering since surface read in from file.
const pointField& pts = surf.points();
forAll(pts, ptI)
for (const point& pt : pts)
{
const point& pt = pts[ptI];
vert.Add( ::Vector(pt.x(), pt.y(), pt.z()));
vert.Add(::Vector(pt.x(), pt.y(), pt.z()));
}
forAll(surf, facei)
for (const labelledTri& f : surf)
{
const labelledTri& f = surf[facei];
tridata td;
td.v[0]=f[0];
td.v[1]=f[1];
td.v[2]=f[2];
td.v[0] = f[0];
td.v[1] = f[1];
td.v[2] = f[2];
tri.Add(td);
}
@ -133,20 +139,20 @@ int main(int argc, char *argv[])
::ProgressiveMesh(vert,tri,collapse_map,permutation);
// rearrange the vertex list
::List< ::Vector> temp_list;
for (int i=0;i<vert.num;i++)
::List<::Vector> temp_list;
for (int i=0; i<vert.num; i++)
{
temp_list.Add(vert[i]);
}
for (int i=0;i<vert.num;i++)
for (int i=0; i<vert.num; i++)
{
vert[permutation[i]]=temp_list[i];
vert[permutation[i]] = temp_list[i];
}
// update the changes in the entries in the triangle list
for (int i=0;i<tri.num;i++)
for (int i=0; i<tri.num; i++)
{
for (int j=0;j<3;j++)
for (int j=0; j<3; j++)
{
tri[i].v[j] = permutation[tri[i].v[j]];
}

View File

@ -74,24 +74,24 @@ int main(int argc, char *argv[])
argList::addBoolOption
(
"clean",
"perform some surface checking/cleanup on the input surface"
"Perform some surface checking/cleanup on the input surface"
);
argList::addBoolOption
(
"group",
"reorder faces into groups; one per region"
"Reorder faces into groups; one per region"
);
argList::addOption
(
"scale",
"factor",
"geometry scaling factor - default is 1"
"Input geometry scaling factor"
);
argList::addOption
(
"writePrecision",
"label",
"write to output with the specified precision"
"Write to output with the specified precision"
);
argList args(argc, argv);
@ -116,8 +116,10 @@ int main(int argc, char *argv[])
<< exit(FatalError);
}
const scalar scaleFactor = args.optionLookupOrDefault<scalar>("scale", -1);
Info<< "Reading : " << importName << endl;
triSurface surf(importName);
triSurface surf(importName, scaleFactor);
Info<< "Read surface:" << endl;
surf.writeStats(Info);
@ -144,13 +146,6 @@ int main(int argc, char *argv[])
}
Info<< "writing " << exportName;
scalar scaleFactor = 0;
if (args.optionReadIfPresent("scale", scaleFactor) && scaleFactor > 0)
{
Info<< " with scaling " << scaleFactor;
surf.scalePoints(scaleFactor);
}
Info<< endl;
surf.write(exportName, sortByRegion);

View File

@ -339,8 +339,15 @@ int main(int argc, char *argv[])
<< " writeObj=" << writeObj
<< " writeVTK=" << writeVTK << nl;
scalar scaleFactor = -1;
// Allow rescaling of the surface points (eg, mm -> m)
if (surfaceDict.readIfPresent("scale", scaleFactor) && scaleFactor > 0)
{
Info<<"Scaling : " << scaleFactor << nl;
}
// Load a single file, or load and combine multiple selected files
autoPtr<triSurface> surfPtr = loader.load(loadingOption);
autoPtr<triSurface> surfPtr = loader.load(loadingOption, scaleFactor);
if (!surfPtr.valid() || surfPtr().empty())
{
FatalErrorInFunction

View File

@ -50,6 +50,9 @@ outputName1
surfaces (surface1.stl surface2.nas);
// mm -> m scaling
// scale 0.001;
// Generate additional intersection features (none | self | region)
intersectionMethod self;

View File

@ -123,9 +123,8 @@ int main(int argc, char *argv[])
Info<< " " << points[f[fp]] << "\n";
}
Info<< endl;
Info<< "End\n" << endl;
Info<< nl
<< "End\n" << endl;
return 0;
}

View File

@ -90,7 +90,7 @@ int main(int argc, char *argv[])
vector refPt = Zero;
bool calcAroundRefPt = args.optionReadIfPresent("referencePoint", refPt);
triSurface surf(surfFileName);
const triSurface surf(surfFileName);
scalar m = 0.0;
vector cM = Zero;

View File

@ -44,7 +44,7 @@ Usage
Specify a feature angle
E.g. inflate surface by 2cm with 1.5 safety factor:
E.g. inflate surface by 20mm with 1.5 safety factor:
surfaceInflate DTC-scaled.obj 0.02 1.5 -featureAngle 45 -nSmooth 2
\*---------------------------------------------------------------------------*/

View File

@ -1,3 +0,0 @@
surfaceMeshConvertTesting.C
EXE = $(FOAM_APPBIN)/surfaceMeshConvertTesting

View File

@ -87,7 +87,7 @@ int main(int argc, char *argv[])
(
"scale",
"factor",
"geometry scaling factor - default is 1"
"input geometry scaling factor"
);
argList::addBoolOption
(
@ -119,10 +119,10 @@ int main(int argc, char *argv[])
// use UnsortedMeshedSurface, not MeshedSurface to maintain ordering
UnsortedMeshedSurface<face> surf(importName);
scalar scaling = 0;
if (args.optionReadIfPresent("scale", scaling) && scaling > 0)
const scalar scaling = args.optionLookupOrDefault<scalar>("scale", -1);
if (scaling > 0)
{
Info<< " -scale " << scaling << endl;
Info<< " -scale " << scaling << nl;
surf.scalePoints(scaling);
}

View File

@ -63,6 +63,12 @@ int main(int argc, char *argv[])
"usePierceTest",
"determine orientation by counting number of intersections"
);
argList::addOption
(
"scale",
"factor",
"input geometry scaling factor"
);
argList args(argc, argv);
@ -86,11 +92,14 @@ int main(int argc, char *argv[])
Info<< "outside" << endl;
}
const scalar scaling = args.optionLookupOrDefault<scalar>("scale", -1);
if (scaling > 0)
{
Info<< "Input scaling: " << scaling << nl;
}
// Load surface
triSurface surf(surfFileName);
triSurface surf(surfFileName, scaling);
bool anyFlipped = false;
@ -118,11 +127,11 @@ int main(int argc, char *argv[])
if (anyFlipped)
{
Info<< "Flipped orientation of (part of) surface." << endl;
Info<< "Flipped orientation of (part of) surface." << nl;
}
else
{
Info<< "Did not flip orientation of any triangle of surface." << endl;
Info<< "Did not flip orientation of any triangle of surface." << nl;
}
Info<< "Writing new surface to " << outFileName << endl;

View File

@ -46,31 +46,47 @@ using namespace Foam;
int main(int argc, char *argv[])
{
argList::addNote
(
"Merge points on surface if they are within absolute distance [m]."
);
argList::noParallel();
argList::validArgs.append("surfaceFile");
argList::validArgs.append("merge distance");
argList::validArgs.append("output surfaceFile");
argList::addOption
(
"scale",
"factor",
"input geometry scaling factor"
);
argList args(argc, argv);
const fileName surfFileName = args[1];
const scalar mergeTol = args.argRead<scalar>(2);
const fileName outFileName = args[3];
Info<< "Reading surface from " << surfFileName << " ..." << endl;
Info<< "Merging points within " << mergeTol << " metre." << endl;
const scalar scaling = args.optionLookupOrDefault<scalar>("scale", -1);
triSurface surf1(surfFileName);
Info<< "Reading surface from " << surfFileName << " ..." << nl
<< "Merging points within " << mergeTol << " metre." << nl;
if (scaling > 0)
{
Info<< "input scaling " << scaling << nl;
}
Info<< "Original surface:" << endl;
const triSurface surf1(surfFileName, scaling);
Info<< "Original surface:" << nl;
surf1.writeStats(Info);
triSurface cleanSurf(surf1);
while (true)
{
label nOldVert = cleanSurf.nPoints();
const label nOldVert = cleanSurf.nPoints();
cleanSurf = triSurfaceTools::mergePoints(cleanSurf, mergeTol);

View File

@ -105,7 +105,8 @@ int main(int argc, char *argv[])
argList::addNote
(
"Redistribute a triSurface. "
"The specified surface must be located in the constant/triSurface directory"
"The specified surface must be located in the constant/triSurface "
"directory"
);
argList::validArgs.append("triSurfaceMesh");

View File

@ -85,9 +85,9 @@ int main(int argc, char *argv[])
argList::addOption
(
"scale",
"vector",
"scale by the specified amount - eg, '(0.001 0.001 0.001)' for a "
"uniform [mm] to [m] scaling"
"scalar | vector",
"scale by the specified amount - eg, for a uniform [mm] to [m] scaling "
"use either (0.001 0.001 0.001)' or simply '0.001'"
);
argList::addOption
(
@ -138,7 +138,7 @@ int main(int argc, char *argv[])
n1n2[0] /= mag(n1n2[0]);
n1n2[1] /= mag(n1n2[1]);
tensor T = rotationTensor(n1n2[0], n1n2[1]);
const tensor T = rotationTensor(n1n2[0], n1n2[1]);
Info<< "Rotating points by " << T << endl;
@ -151,10 +151,10 @@ int main(int argc, char *argv[])
<< " pitch " << v.y() << nl
<< " yaw " << v.z() << nl;
// Convert to radians
// degToRad
v *= pi/180.0;
quaternion R(quaternion::rotationSequence::XYZ, v);
const quaternion R(quaternion::rotationSequence::XYZ, v);
Info<< "Rotating points by quaternion " << R << endl;
points = transform(R, points);
@ -166,29 +166,43 @@ int main(int argc, char *argv[])
<< " pitch " << v.y() << nl
<< " roll " << v.z() << nl;
// Convert to radians
// degToRad
v *= pi/180.0;
scalar yaw = v.x();
scalar pitch = v.y();
scalar roll = v.z();
quaternion R = quaternion(vector(0, 0, 1), yaw);
R *= quaternion(vector(0, 1, 0), pitch);
R *= quaternion(vector(1, 0, 0), roll);
const quaternion R(quaternion::rotationSequence::ZYX, v);
Info<< "Rotating points by quaternion " << R << endl;
points = transform(R, points);
}
if (args.optionReadIfPresent("scale", v))
if (args.optionFound("scale"))
{
Info<< "Scaling points by " << v << endl;
// Use readList to handle single or multiple values
const List<scalar> scaling = args.optionReadList<scalar>("scale");
points.replace(vector::X, v.x()*points.component(vector::X));
points.replace(vector::Y, v.y()*points.component(vector::Y));
points.replace(vector::Z, v.z()*points.component(vector::Z));
if (scaling.size() == 1)
{
Info<< "Scaling points uniformly by " << scaling[0] << nl;
points *= scaling[0];
}
else if (scaling.size() == 3)
{
Info<< "Scaling points by ("
<< scaling[0] << " "
<< scaling[1] << " "
<< scaling[2] << ")" << nl;
points.replace(vector::X, scaling[0]*points.component(vector::X));
points.replace(vector::Y, scaling[1]*points.component(vector::Y));
points.replace(vector::Z, scaling[2]*points.component(vector::Z));
}
else
{
FatalError
<< "-scale with 1 or 3 components only" << nl
<< "given: " << args["scale"] << endl
<< exit(FatalError);
}
}
surf1.movePoints(points);

View File

@ -224,7 +224,6 @@ _of_complete_cache_[surfaceInertia]="-case -density -referencePoint | -noFunctio
_of_complete_cache_[surfaceInflate]="-case -featureAngle -nSmooth | -checkSelfIntersection -debug -noFunctionObjects -srcDoc -doc -help"
_of_complete_cache_[surfaceLambdaMuSmooth]="-featureFile | -srcDoc -doc -help"
_of_complete_cache_[surfaceMeshConvert]="-case -dict -from -scaleIn -scaleOut -to | -clean -noFunctionObjects -tri -srcDoc -doc -help"
_of_complete_cache_[surfaceMeshConvertTesting]="-case -scale | -clean -noFunctionObjects -orient -stdout -surfMesh -testModify -triFace -triSurface -unsorted -srcDoc -doc -help"
_of_complete_cache_[surfaceMeshExport]="-case -dict -from -name -scaleIn -scaleOut -to | -clean -noFunctionObjects -srcDoc -doc -help"
_of_complete_cache_[surfaceMeshImport]="-case -dict -from -name -scaleIn -scaleOut -to | -clean -noFunctionObjects -srcDoc -doc -help"
_of_complete_cache_[surfaceMeshInfo]="-case -scale | -areas -noFunctionObjects -xml -srcDoc -doc -help"

View File

@ -125,7 +125,8 @@ public:
// Constructors
//- Create a writer object with given output scaling
//- Create a writer object with given output scaling.
// Treats a zero or negative scale factor as unity scaling.
meshWriter
(
const polyMesh&,

View File

@ -97,7 +97,8 @@ public:
// Constructors
//- Prepare for writing, optionally with scaling
//- Prepare for writing, optionally with scaling.
// Treats a zero or negative scale factor as unity scaling.
FIREMeshWriter(const polyMesh&, const scalar scaleFactor = 1.0);

View File

@ -34,28 +34,33 @@ Foam::fileFormats::NASCore::NASCore()
// * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * * //
Foam::scalar Foam::fileFormats::NASCore::parseNASCoord
(
const string& s
)
Foam::scalar Foam::fileFormats::NASCore::parseNASCoord(const string& s)
{
size_t expSign = s.find_last_of("+-");
scalar value = 0;
if (expSign != string::npos && expSign > 0 && !isspace(s[expSign-1]))
const size_t expSign = s.find_last_of("+-");
if (expSign != std::string::npos && expSign > 0 && !isspace(s[expSign-1]))
{
scalar mantissa = readScalar(IStringStream(s.substr(0, expSign))());
scalar exponent = readScalar(IStringStream(s.substr(expSign+1))());
scalar exponent = 0;
// Parse as per strtod/strtof - allowing trailing space or [Ee]
readScalar(s.substr(0, expSign).c_str(), value); // mantissa
readScalar(s.substr(expSign+1).c_str(), exponent);
if (s[expSign] == '-')
{
exponent = -exponent;
}
return mantissa * pow(10, exponent);
value *= ::pow(10, exponent);
}
else
{
return readScalar(IStringStream(s)());
readScalar(s.c_str(), value);
}
return value;
}

View File

@ -55,8 +55,8 @@ public:
// Public Member Functions
//- Do weird things to extract number
static scalar parseNASCoord(const string&);
//- Extract numbers from things like "-2.358-8" (same as "-2.358e-8")
static scalar parseNASCoord(const string& s);
// Constructors

View File

@ -361,6 +361,23 @@ Foam::label Foam::isoCutCell::calcSubCell
// Cell cut at least at one face
cellStatus_ = 0;
calcIsoFaceCentreAndArea();
// In the rare but occuring cases where a cell is only touched at a
// point or a line the isoFaceArea_ will have zero length and here the
// cell should be treated as either completely empty or full.
if (mag(isoFaceArea_) < 10*SMALL)
{
if (fullySubFaces_.empty())
{
// Cell fully above isosurface
cellStatus_ = 1;
}
else
{
// Cell fully below isosurface
cellStatus_ = -1;
}
}
}
else if (fullySubFaces_.empty())
{

View File

@ -69,23 +69,32 @@ const Foam::Enum
>
Foam::functionObjects::fieldValues::surfaceFieldValue::operationTypeNames_
{
// Normal operations
{ operationType::opNone, "none" },
{ operationType::opMin, "min" },
{ operationType::opMax, "max" },
{ operationType::opSum, "sum" },
{ operationType::opWeightedSum, "weightedSum" },
{ operationType::opSumMag, "sumMag" },
{ operationType::opSumDirection, "sumDirection" },
{ operationType::opSumDirectionBalance, "sumDirectionBalance" },
{ operationType::opAverage, "average" },
{ operationType::opWeightedAverage, "weightedAverage" },
{ operationType::opAreaAverage, "areaAverage" },
{ operationType::opWeightedAreaAverage, "weightedAreaAverage" },
{ operationType::opAreaIntegrate, "areaIntegrate" },
{ operationType::opWeightedAreaIntegrate, "weightedAreaIntegrate" },
{ operationType::opMin, "min" },
{ operationType::opMax, "max" },
{ operationType::opCoV, "CoV" },
{ operationType::opAreaNormalAverage, "areaNormalAverage" },
{ operationType::opAreaNormalIntegrate, "areaNormalIntegrate" },
// Using weighting
{ operationType::opWeightedSum, "weightedSum" },
{ operationType::opWeightedAverage, "weightedAverage" },
{ operationType::opWeightedAreaAverage, "weightedAreaAverage" },
{ operationType::opWeightedAreaIntegrate, "weightedAreaIntegrate" },
// Using absolute weighting
{ operationType::opAbsWeightedSum, "absWeightedSum" },
{ operationType::opAbsWeightedAverage, "absWeightedAverage" },
{ operationType::opAbsWeightedAreaAverage, "absWeightedAreaAverage" },
{ operationType::opAbsWeightedAreaIntegrate, "absWeightedAreaIntegrate" },
};
const Foam::Enum
@ -244,7 +253,7 @@ void Foam::functionObjects::fieldValues::surfaceFieldValue::combineMeshGeometry
{
if (facePatchId_[i] != -1)
{
label patchi = facePatchId_[i];
const label patchi = facePatchId_[i];
globalFacesIs[i] += mesh_.boundaryMesh()[patchi].start();
}
}
@ -279,9 +288,8 @@ void Foam::functionObjects::fieldValues::surfaceFieldValue::combineMeshGeometry
// My own data first
{
const faceList& fcs = allFaces[Pstream::myProcNo()];
forAll(fcs, i)
for (const face& f : fcs)
{
const face& f = fcs[i];
face& newF = faces[nFaces++];
newF.setSize(f.size());
forAll(f, fp)
@ -291,9 +299,9 @@ void Foam::functionObjects::fieldValues::surfaceFieldValue::combineMeshGeometry
}
const pointField& pts = allPoints[Pstream::myProcNo()];
forAll(pts, i)
for (const point& pt : pts)
{
points[nPoints++] = pts[i];
points[nPoints++] = pt;
}
}
@ -303,9 +311,8 @@ void Foam::functionObjects::fieldValues::surfaceFieldValue::combineMeshGeometry
if (proci != Pstream::myProcNo())
{
const faceList& fcs = allFaces[proci];
forAll(fcs, i)
for (const face& f : fcs)
{
const face& f = fcs[i];
face& newF = faces[nFaces++];
newF.setSize(f.size());
forAll(f, fp)
@ -315,9 +322,9 @@ void Foam::functionObjects::fieldValues::surfaceFieldValue::combineMeshGeometry
}
const pointField& pts = allPoints[proci];
forAll(pts, i)
for (const point& pt : pts)
{
points[nPoints++] = pts[i];
points[nPoints++] = pt;
}
}
}
@ -343,9 +350,9 @@ void Foam::functionObjects::fieldValues::surfaceFieldValue::combineMeshGeometry
}
points.transfer(newPoints);
forAll(faces, i)
for (face& f : faces)
{
inplaceRenumber(oldToNew, faces[i]);
inplaceRenumber(oldToNew, f);
}
}
}
@ -447,17 +454,19 @@ Foam::functionObjects::fieldValues::surfaceFieldValue::totalArea() const
// * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * //
bool Foam::functionObjects::fieldValues::surfaceFieldValue::needsSf() const
bool Foam::functionObjects::fieldValues::surfaceFieldValue::usesSf() const
{
// Many operations use the Sf field
// Only a few operations do not require the Sf field
switch (operation_)
{
case opNone:
case opMin:
case opMax:
case opSum:
case opSumMag:
case opAverage:
case opMin:
case opMax:
case opSumDirection:
case opSumDirectionBalance:
{
return false;
}
@ -469,26 +478,6 @@ bool Foam::functionObjects::fieldValues::surfaceFieldValue::needsSf() const
}
bool Foam::functionObjects::fieldValues::surfaceFieldValue::needsWeight() const
{
// Only a few operations use weight field
switch (operation_)
{
case opWeightedSum:
case opWeightedAverage:
case opWeightedAreaAverage:
case opWeightedAreaIntegrate:
{
return true;
}
default:
{
return false;
}
}
}
void Foam::functionObjects::fieldValues::surfaceFieldValue::initialise
(
const dictionary& dict
@ -582,19 +571,32 @@ void Foam::functionObjects::fieldValues::surfaceFieldValue::initialise
weightFieldName_ = "none";
if (needsWeight())
if (usesWeight())
{
if (regionType_ == stSampledSurface)
{
FatalIOErrorInFunction(dict)
<< "Cannot use weighted operation '"
<< operationTypeNames_[operation_]
<< "' for sampledSurface"
<< exit(FatalIOError);
}
if (dict.readIfPresent("weightField", weightFieldName_))
{
if (regionType_ == stSampledSurface)
{
FatalIOErrorInFunction(dict)
<< "Cannot use weightField for sampledSurface"
<< exit(FatalIOError);
}
Info<< " weight field = " << weightFieldName_ << nl;
}
else
{
// Suggest possible alternative unweighted operation?
FatalIOErrorInFunction(dict)
<< "The '" << operationTypeNames_[operation_]
<< "' operation is missing a weightField." << nl
<< "Either provide the weightField, "
<< "use weightField 'none' to suppress weighting," << nl
<< "or use a different operation."
<< exit(FatalIOError);
}
}
// Backwards compatibility for v1612+ and older
@ -660,10 +662,10 @@ void Foam::functionObjects::fieldValues::surfaceFieldValue::writeFileHeader
os << tab << "Area";
}
forAll(fields_, i)
for (const word& fieldName : fields_)
{
os << tab << operationTypeNames_[operation_]
<< "(" << fields_[i] << ")";
<< "(" << fieldName << ")";
}
os << endl;
@ -684,12 +686,12 @@ Foam::functionObjects::fieldValues::surfaceFieldValue::processValues
{
case opSumDirection:
{
vector n(dict_.lookup("direction"));
const vector n(dict_.lookup("direction"));
return gSum(pos(values*(Sf & n))*mag(values));
}
case opSumDirectionBalance:
{
vector n(dict_.lookup("direction"));
const vector n(dict_.lookup("direction"));
const scalarField nv(values*(Sf & n));
return gSum(pos(nv)*mag(values) - neg(nv)*mag(values));
@ -754,10 +756,17 @@ Foam::tmp<Foam::scalarField>
Foam::functionObjects::fieldValues::surfaceFieldValue::weightingFactor
(
const Field<scalar>& weightField
)
) const
{
// pass through
return weightField;
if (usesMag())
{
return mag(weightField);
}
else
{
// pass through
return weightField;
}
}
@ -767,16 +776,21 @@ Foam::functionObjects::fieldValues::surfaceFieldValue::weightingFactor
(
const Field<scalar>& weightField,
const vectorField& Sf
)
) const
{
// scalar * Area
if (returnReduce(weightField.empty(), andOp<bool>()))
{
// No weight field - revert to unweighted form
return mag(Sf);
}
else if (usesMag())
{
return mag(weightField * mag(Sf));
}
else
{
return weightField * mag(Sf);
return (weightField * mag(Sf));
}
}
@ -787,16 +801,21 @@ Foam::functionObjects::fieldValues::surfaceFieldValue::weightingFactor
(
const Field<vector>& weightField,
const vectorField& Sf
)
) const
{
// vector (dot) Area
if (returnReduce(weightField.empty(), andOp<bool>()))
{
// No weight field - revert to unweighted form
return mag(Sf);
}
else if (usesMag())
{
return mag(weightField & Sf);
}
else
{
return weightField & Sf;
return (weightField & Sf);
}
}
@ -915,7 +934,7 @@ bool Foam::functionObjects::fieldValues::surfaceFieldValue::write()
// Many operations use the Sf field
vectorField Sf;
if (needsSf())
if (usesSf())
{
if (regionType_ == stSurface)
{

View File

@ -103,22 +103,26 @@ Usage
The \c operation is one of:
\plaintable
none | no operation
sum | sum
weightedSum | weighted sum
sumMag | sum of component magnitudes
sumDirection | sum values which are positive in given direction
sumDirectionBalance | sum of balance of values in given direction
average | ensemble average
weightedAverage | weighted average
areaAverage | area-weighted average
weightedAreaAverage | weighted area average
areaIntegrate | area integral
weightedAreaIntegrate | weighted area integral
min | minimum
max | maximum
sum | sum
sumMag | sum of component magnitudes
sumDirection | sum values that are positive in given direction
sumDirectionBalance | sum of balance of values in given direction
average | ensemble average
areaAverage | area-weighted average
areaIntegrate | area integral
CoV | coefficient of variation: standard deviation/mean
areaNormalAverage| area-weighted average in face normal direction
areaNormalIntegrate | area-weighted integral in face normal directon
weightedSum | weighted sum
weightedAverage | weighted average
weightedAreaAverage | weighted area average
weightedAreaIntegrate | weighted area integral
absWeightedSum | sum using absolute weighting
absWeightedAverage | average using absolute weighting
absWeightedAreaAverage | area average using absolute weighting
absWeightedAreaIntegrate | area integral using absolute weighting
\endplaintable
Note
@ -190,36 +194,73 @@ public:
//- Region type enumeration
enum regionTypes
{
stFaceZone,
stPatch,
stSurface,
stSampledSurface
stFaceZone, //!< Calculate on a faceZone
stPatch, //!< Calculate on a patch
stSurface, //!< Calculate with fields on a surfMesh
stSampledSurface //!< Sample onto surface and calculate
};
//- Region type names
static const Enum<regionTypes> regionTypeNames_;
//- Bitmask values for operation variants
enum operationVariant
{
typeBase = 0, //!< Base operation
typeWeighted = 0x100, //!< Operation using weighting
typeAbsolute = 0x200, //!< Operation using mag (eg, for weighting)
};
//- Operation type enumeration
enum operationType
{
opNone, //!< None
opSum, //!< Sum
opWeightedSum, //!< Weighted sum
opSumMag, //!< Magnitude of sum
// Normal operations
opNone = 0, //!< No operation
opMin, //!< Minimum value
opMax, //!< Maximum value
opSum, //!< Sum of values
opSumMag, //!< Sum of component magnitudes
opSumDirection, //!< Sum in a given direction
opSumDirectionBalance, //!< Sum in a given direction for multiple
opAverage, //!< Average
opWeightedAverage, //!< Weighted average
//! Sum of balance of values in given direction
opSumDirectionBalance,
opAverage, //!< Ensemble average
opAreaAverage, //!< Area average
opWeightedAreaAverage, //!< Weighted area average
opAreaIntegrate, //!< Area integral
opWeightedAreaIntegrate, //!< Weighted area integral
opMin, //!< Minimum
opMax, //!< Maximum
opCoV, //!< Coefficient of variation
opAreaNormalAverage, //!< Area average in normal direction
opAreaNormalIntegrate //!< Area integral in normal direction
opAreaNormalIntegrate, //!< Area integral in normal direction
// Weighted variants
//! Weighted sum
opWeightedSum = (opSum | typeWeighted),
//! Weighted average
opWeightedAverage = (opAverage | typeWeighted),
//! Weighted area average
opWeightedAreaAverage = (opAreaAverage | typeWeighted),
//! Weighted area integral
opWeightedAreaIntegrate = (opAreaIntegrate | typeWeighted),
// Variants using absolute weighting
//! Sum using abs weighting
opAbsWeightedSum = (opWeightedSum | typeAbsolute),
//! Average using abs weighting
opAbsWeightedAverage = (opWeightedAverage | typeAbsolute),
//! Area average using abs weighting
opAbsWeightedAreaAverage = (opWeightedAreaAverage | typeAbsolute),
//! Area integral using abs weighting
opAbsWeightedAreaIntegrate =
(opWeightedAreaIntegrate | typeAbsolute),
};
//- Operation type names
@ -229,8 +270,8 @@ public:
//- Post-operation type enumeration
enum postOperationType
{
postOpNone,
postOpSqrt
postOpNone, //!< No additional operation after calculation
postOpSqrt //!< Perform sqrt after normal operation
};
//- Operation type names
@ -325,11 +366,19 @@ protected:
//- Return the local true/false list representing the face flip map
inline const boolList& faceFlip() const;
//- True if the specified operation needs a surface Sf
bool needsSf() const;
//- True if the operation needs a surface Sf
bool usesSf() const;
//- True if the specified operation needs a weight-field
bool needsWeight() const;
//- True if the operation variant uses mag
inline bool usesMag() const;
//- True if the operation variant uses a weight-field
inline bool usesWeight() const;
//- True if operation variant uses a weight-field that is available.
// Checks for availability on any processor.
template<class WeightType>
inline bool canWeight(const Field<WeightType>& weightField) const;
//- Initialise, e.g. face addressing
void initialise(const dictionary& dict);
@ -365,6 +414,7 @@ protected:
const Field<WeightType>& weightField
) const;
//- Filter a surface field according to faceIds
template<class Type>
tmp<Field<Type>> filterField
@ -379,20 +429,24 @@ protected:
const GeometricField<Type, fvPatchField, volMesh>& field
) const;
//- Weighting factor
//- Weighting factor.
// Possibly applies mag() depending on the operation type.
template<class WeightType>
static tmp<scalarField> weightingFactor
tmp<scalarField> weightingFactor
(
const Field<WeightType>& weightField
);
) const;
//- Weighting factor, weight field with the area
//- Weighting factor, weight field with the area.
// Possibly applies mag() depending on the operation type.
// Reverts to mag(Sf) if the weight field is not available.
template<class WeightType>
static tmp<scalarField> weightingFactor
tmp<scalarField> weightingFactor
(
const Field<WeightType>& weightField,
const vectorField& Sf
);
) const;
//- Templated helper function to output field values
@ -489,7 +543,7 @@ template<>
tmp<scalarField> surfaceFieldValue::weightingFactor
(
const Field<scalar>& weightField
);
) const;
//- Specialisation for scalar - scalar * Area
@ -498,7 +552,7 @@ tmp<scalarField> surfaceFieldValue::weightingFactor
(
const Field<scalar>& weightField,
const vectorField& Sf
);
) const;
//- Specialisation for vector - vector (dot) Area
@ -507,7 +561,7 @@ tmp<scalarField> surfaceFieldValue::weightingFactor
(
const Field<vector>& weightField,
const vectorField& Sf
);
) const;
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //

View File

@ -23,7 +23,6 @@ License
\*---------------------------------------------------------------------------*/
#include "surfaceFieldValue.H"
#include "Time.H"
// * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * //
@ -51,6 +50,22 @@ Foam::functionObjects::fieldValues::surfaceFieldValue::faceFlip() const
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
inline bool
Foam::functionObjects::fieldValues::surfaceFieldValue::usesMag() const
{
// Operation specifically tagged to use mag
return (operation_ & typeAbsolute);
}
inline bool
Foam::functionObjects::fieldValues::surfaceFieldValue::usesWeight() const
{
// Operation specifically tagged to require a weight field
return (operation_ & typeWeighted);
}
inline const Foam::functionObjects::fieldValues::surfaceFieldValue::regionTypes&
Foam::functionObjects::fieldValues::surfaceFieldValue::regionType() const
{

View File

@ -33,6 +33,20 @@ License
// * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * //
template<class WeightType>
inline bool Foam::functionObjects::fieldValues::surfaceFieldValue::canWeight
(
const Field<WeightType>& weightField
) const
{
return
(
usesWeight()
&& returnReduce(!weightField.empty(), orOp<bool>()) // On some processor
);
}
template<class Type>
bool Foam::functionObjects::fieldValues::surfaceFieldValue::validField
(
@ -142,23 +156,14 @@ processSameTypeValues
{
break;
}
case opSum:
case opMin:
{
result = gSum(values);
result = gMin(values);
break;
}
case opWeightedSum:
case opMax:
{
if (returnReduce(weightField.empty(), andOp<bool>()))
{
result = gSum(values);
}
else
{
tmp<scalarField> weight(weightingFactor(weightField));
result = gSum(weight*values);
}
result = gMax(values);
break;
}
case opSumMag:
@ -166,6 +171,23 @@ processSameTypeValues
result = gSum(cmptMag(values));
break;
}
case opSum:
case opWeightedSum:
case opAbsWeightedSum:
{
if (canWeight(weightField))
{
tmp<scalarField> weight(weightingFactor(weightField));
result = gSum(weight*values);
}
else
{
// Unweighted form
result = gSum(values);
}
break;
}
case opSumDirection:
case opSumDirectionBalance:
{
@ -178,62 +200,56 @@ processSameTypeValues
break;
}
case opAverage:
{
const label n = returnReduce(values.size(), sumOp<label>());
result = gSum(values)/(scalar(n) + ROOTVSMALL);
break;
}
case opWeightedAverage:
case opAbsWeightedAverage:
{
if (returnReduce(weightField.empty(), andOp<bool>()))
{
const label n = returnReduce(values.size(), sumOp<label>());
result = gSum(values)/(scalar(n) + ROOTVSMALL);
}
else
if (canWeight(weightField))
{
const scalarField factor(weightingFactor(weightField));
result = gSum(factor*values)/(gSum(factor) + ROOTVSMALL);
}
else
{
// Unweighted form
const label n = returnReduce(values.size(), sumOp<label>());
result = gSum(values)/(scalar(n) + ROOTVSMALL);
}
break;
}
case opAreaAverage:
{
const scalarField factor(mag(Sf));
result = gSum(factor*values)/gSum(factor);
break;
}
case opWeightedAreaAverage:
case opAbsWeightedAreaAverage:
{
const scalarField factor(weightingFactor(weightField, Sf));
if (canWeight(weightField))
{
const scalarField factor(weightingFactor(weightField, Sf));
result = gSum(factor*values)/gSum(factor + ROOTVSMALL);
result = gSum(factor*values)/gSum(factor + ROOTVSMALL);
}
else
{
// Unweighted form
const scalarField factor(mag(Sf));
result = gSum(factor*values)/gSum(factor);
}
break;
}
case opAreaIntegrate:
{
const scalarField factor(mag(Sf));
result = gSum(factor*values);
break;
}
case opWeightedAreaIntegrate:
case opAbsWeightedAreaIntegrate:
{
const scalarField factor(weightingFactor(weightField, Sf));
result = gSum(factor*values);
break;
}
case opMin:
{
result = gMin(values);
break;
}
case opMax:
{
result = gMax(values);
if (canWeight(weightField))
{
tmp<scalarField> factor(weightingFactor(weightField, Sf));
result = gSum(factor*values);
}
else
{
// Unweighted form
tmp<scalarField> factor(mag(Sf));
result = gSum(factor*values);
}
break;
}
case opCoV:
@ -245,8 +261,8 @@ processSameTypeValues
for (direction d=0; d < pTraits<Type>::nComponents; ++d)
{
tmp<scalarField> vals = values.component(d);
scalar mean = component(meanValue, d);
tmp<scalarField> vals(values.component(d));
const scalar mean = component(meanValue, d);
scalar& res = setComponent(result, d);
res =
@ -286,8 +302,10 @@ Foam::tmp<Foam::scalarField>
Foam::functionObjects::fieldValues::surfaceFieldValue::weightingFactor
(
const Field<WeightType>& weightField
)
) const
{
// The scalar form is specialized.
// For other types always need mag() to generate a scalar field.
return mag(weightField);
}
@ -302,10 +320,8 @@ Foam::label Foam::functionObjects::fieldValues::surfaceFieldValue::writeAll
{
label nProcessed = 0;
forAll(fields_, i)
for (const word& fieldName : fields_)
{
const word& fieldName = fields_[i];
if
(
writeValues<scalar>(fieldName, Sf, weightField, surfToWrite)
@ -437,8 +453,8 @@ Foam::functionObjects::fieldValues::surfaceFieldValue::filterField
forAll(values, i)
{
label facei = faceId_[i];
label patchi = facePatchId_[i];
const label facei = faceId_[i];
const label patchi = facePatchId_[i];
if (patchi >= 0)
{
values[i] = field.boundaryField()[patchi][facei];
@ -472,8 +488,8 @@ Foam::functionObjects::fieldValues::surfaceFieldValue::filterField
forAll(values, i)
{
label facei = faceId_[i];
label patchi = facePatchId_[i];
const label facei = faceId_[i];
const label patchi = facePatchId_[i];
if (patchi >= 0)
{
values[i] = field.boundaryField()[patchi][facei];

View File

@ -49,32 +49,35 @@ const Foam::Enum
>
Foam::functionObjects::fieldValues::volFieldValue::operationTypeNames_
{
// Normal operations
{ operationType::opNone, "none" },
{ operationType::opSum, "sum" },
{ operationType::opWeightedSum, "weightedSum" },
{ operationType::opSumMag, "sumMag" },
{ operationType::opAverage, "average" },
{ operationType::opWeightedAverage, "weightedAverage" },
{ operationType::opVolAverage, "volAverage" },
{ operationType::opWeightedVolAverage, "weightedVolAverage" },
{ operationType::opVolIntegrate, "volIntegrate" },
{ operationType::opWeightedVolIntegrate, "weightedVolIntegrate" },
{ operationType::opMin, "min" },
{ operationType::opMax, "max" },
{ operationType::opSum, "sum" },
{ operationType::opSumMag, "sumMag" },
{ operationType::opAverage, "average" },
{ operationType::opVolAverage, "volAverage" },
{ operationType::opVolIntegrate, "volIntegrate" },
{ operationType::opCoV, "CoV" },
// Using weighting
{ operationType::opWeightedSum, "weightedSum" },
{ operationType::opWeightedAverage, "weightedAverage" },
{ operationType::opWeightedVolAverage, "weightedVolAverage" },
{ operationType::opWeightedVolIntegrate, "weightedVolIntegrate" },
};
// * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * //
bool Foam::functionObjects::fieldValues::volFieldValue::needsVol() const
bool Foam::functionObjects::fieldValues::volFieldValue::usesVol() const
{
// Only some operations need the cell volume
// Only a few operations require the cell volume
switch (operation_)
{
case opVolAverage:
case opWeightedVolAverage:
case opVolIntegrate:
case opWeightedVolAverage:
case opWeightedVolIntegrate:
case opCoV:
return true;
@ -85,20 +88,23 @@ bool Foam::functionObjects::fieldValues::volFieldValue::needsVol() const
}
bool Foam::functionObjects::fieldValues::volFieldValue::needsWeight() const
bool Foam::functionObjects::fieldValues::volFieldValue::usesWeight() const
{
// Only a few operations use weight field
switch (operation_)
{
case opWeightedSum:
case opWeightedAverage:
case opWeightedVolAverage:
case opWeightedVolIntegrate:
return true;
// Operation specifically tagged to require a weight field
return (operation_ & typeWeighted);
}
default:
return false;
}
bool Foam::functionObjects::fieldValues::volFieldValue::canWeight
(
const scalarField& weightField
) const
{
return
(
usesWeight()
&& returnReduce(!weightField.empty(), orOp<bool>()) // On some processor
);
}
@ -108,12 +114,23 @@ void Foam::functionObjects::fieldValues::volFieldValue::initialise
)
{
weightFieldName_ = "none";
if (needsWeight())
if (usesWeight())
{
if (dict.readIfPresent("weightField", weightFieldName_))
{
Info<< " weight field = " << weightFieldName_;
}
else
{
// Suggest possible alternative unweighted operation?
FatalIOErrorInFunction(dict)
<< "The '" << operationTypeNames_[operation_]
<< "' operation is missing a weightField." << nl
<< "Either provide the weightField, "
<< "use weightField 'none' to suppress weighting," << nl
<< "or use a different operation."
<< exit(FatalIOError);
}
}
Info<< nl << endl;
@ -133,10 +150,10 @@ void Foam::functionObjects::fieldValues::volFieldValue::writeFileHeader
writeCommented(os, "Time");
forAll(fields_, fieldi)
for (const word& fieldName : fields_)
{
os << tab << operationTypeNames_[operation_]
<< "(" << fields_[fieldi] << ")";
<< "(" << fieldName << ")";
}
os << endl;
@ -151,10 +168,8 @@ Foam::label Foam::functionObjects::fieldValues::volFieldValue::writeAll
{
label nProcessed = 0;
forAll(fields_, i)
for (const word& fieldName : fields_)
{
const word& fieldName = fields_[i];
if
(
writeValues<scalar>(fieldName, V, weightField)
@ -245,7 +260,7 @@ bool Foam::functionObjects::fieldValues::volFieldValue::write()
// Only some operations need the cell volume
scalarField V;
if (needsVol())
if (usesVol())
{
V = filterField(fieldValue::mesh_.V());
}

View File

@ -81,18 +81,18 @@ Usage
The \c operation is one of:
\plaintable
none | No operation
sum | Sum
weightedSum | Weighted sum
sumMag | Sum of component magnitudes
average | Ensemble average
weightedAverage | Weighted average
volAverage | Volume weighted average
weightedVolAverage | Weighted volume average
volIntegrate | Volume integral
weightedVolIntegrate | Weighted volume integral
min | Minimum
max | Maximum
sum | Sum
sumMag | Sum of component magnitudes
average | Ensemble average
volAverage | Volume weighted average
volIntegrate | Volume integral
CoV | Coefficient of variation: standard deviation/mean
weightedSum | Weighted sum
weightedAverage | Weighted average
weightedVolAverage | Weighted volume average
weightedVolIntegrate | Weighted volume integral
\endplaintable
See also
@ -134,22 +134,41 @@ public:
// Public data types
//- Bitmask values for operation variants
enum operationVariant
{
typeBase = 0, //!< Base operation
typeWeighted = 0x100, //!< Operation using weighting
};
//- Operation type enumeration
enum operationType
{
opNone, //!< None
opSum, //!< Sum
opWeightedSum, //!< Weighted sum
opSumMag, //!< Magnitude of sum
opAverage, //!< Average
opWeightedAverage, //!< Weighted average
opVolAverage, //!< Volume average
opWeightedVolAverage, //!< Weighted volume average
opVolIntegrate, //!< Volume integral
opWeightedVolIntegrate, //!< Weighted volume integral
// Normal operations
opNone = 0, //!< No operation
opMin, //!< Minimum
opMax, //!< Maximum
opCoV //!< Coefficient of variation
opSum, //!< Sum
opSumMag, //!< Magnitude of sum
opAverage, //!< Average
opVolAverage, //!< Volume average
opVolIntegrate, //!< Volume integral
opCoV, //!< Coefficient of variation
// Weighted variants
//! Weighted sum
opWeightedSum = (opSum | typeWeighted),
//! Weighted average
opWeightedAverage = (opAverage | typeWeighted),
//! Weighted volume average
opWeightedVolAverage = (opVolAverage | typeWeighted),
//! Weighted volume integral
opWeightedVolIntegrate = (opVolIntegrate | typeWeighted),
};
//- Operation type names
@ -169,11 +188,15 @@ protected:
// Protected Member Functions
//- True if the specified operation needs the cell-volume
bool needsVol() const;
//- True if the operation needs the cell-volume
bool usesVol() const;
//- True if the specified operation needs a weight-field
bool needsWeight() const;
//- True if the operation variant uses a weight-field
bool usesWeight() const;
//- True if operation variant uses a weight-field that is available.
// Checks for availability on any processor.
inline bool canWeight(const scalarField& weightField) const;
//- Initialise, e.g. cell addressing
void initialise(const dictionary& dict);

View File

@ -82,81 +82,8 @@ Type Foam::functionObjects::fieldValues::volFieldValue::processValues
Type result = Zero;
switch (operation_)
{
case opSum:
case opNone:
{
result = gSum(values);
break;
}
case opWeightedSum:
{
if (returnReduce(weightField.empty(), andOp<bool>()))
{
result = gSum(values);
}
else
{
result = gSum(weightField*values);
}
break;
}
case opSumMag:
{
result = gSum(cmptMag(values));
break;
}
case opAverage:
{
const label n = returnReduce(values.size(), sumOp<label>());
result = gSum(values)/(scalar(n) + ROOTVSMALL);
break;
}
case opWeightedAverage:
{
if (returnReduce(weightField.empty(), andOp<bool>()))
{
const label n = returnReduce(values.size(), sumOp<label>());
result = gSum(values)/(scalar(n) + ROOTVSMALL);
}
else
{
result =
gSum(weightField*values)/(gSum(weightField) + ROOTVSMALL);
}
break;
}
case opVolAverage:
{
result = gSum(values*V)/gSum(V);
break;
}
case opWeightedVolAverage:
{
if (returnReduce(weightField.empty(), andOp<bool>()))
{
result = gSum(V*values)/(gSum(V) + ROOTVSMALL);
}
else
{
result = gSum(weightField*V*values)
/(gSum(weightField*V) + ROOTVSMALL);
}
break;
}
case opVolIntegrate:
{
result = gSum(V*values);
break;
}
case opWeightedVolIntegrate:
{
if (returnReduce(weightField.empty(), andOp<bool>()))
{
result = gSum(V*values);
}
else
{
result = gSum(weightField*V*values);
}
break;
}
case opMin:
@ -169,6 +96,70 @@ Type Foam::functionObjects::fieldValues::volFieldValue::processValues
result = gMax(values);
break;
}
case opSumMag:
{
result = gSum(cmptMag(values));
break;
}
case opSum:
case opWeightedSum:
{
if (canWeight(weightField))
{
result = gSum(weightField*values);
}
else
{
// Unweighted form
result = gSum(values);
}
break;
}
case opAverage:
case opWeightedAverage:
{
if (canWeight(weightField))
{
result =
gSum(weightField*values)/(gSum(weightField) + ROOTVSMALL);
}
else
{
// Unweighted form
const label n = returnReduce(values.size(), sumOp<label>());
result = gSum(values)/(scalar(n) + ROOTVSMALL);
}
break;
}
case opVolAverage:
case opWeightedVolAverage:
{
if (canWeight(weightField))
{
result = gSum(weightField*V*values)
/(gSum(weightField*V) + ROOTVSMALL);
}
else
{
// Unweighted form
result = gSum(V*values)/(gSum(V) + ROOTVSMALL);
}
break;
}
case opVolIntegrate:
case opWeightedVolIntegrate:
{
if (canWeight(weightField))
{
result = gSum(weightField*V*values);
}
else
{
// Unweighted form
result = gSum(V*values);
}
break;
}
case opCoV:
{
const scalar sumV = gSum(V);
@ -177,8 +168,8 @@ Type Foam::functionObjects::fieldValues::volFieldValue::processValues
for (direction d=0; d < pTraits<Type>::nComponents; ++d)
{
scalarField vals(values.component(d));
scalar mean = component(meanValue, d);
tmp<scalarField> vals(values.component(d));
const scalar mean = component(meanValue, d);
scalar& res = setComponent(result, d);
res = sqrt(gSum(V*sqr(vals - mean))/sumV)/(mean + ROOTVSMALL);
@ -186,8 +177,6 @@ Type Foam::functionObjects::fieldValues::volFieldValue::processValues
break;
}
case opNone:
{}
}
return result;

View File

@ -3,7 +3,7 @@
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2011-2016 OpenFOAM Foundation
\\/ M anipulation |
\\/ M anipulation | Copyright (C) 2017 OpenCFD Ltd.
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
@ -435,7 +435,14 @@ Foam::surfaceToCell::surfaceToCell
),
nearDist_(readScalar(dict.lookup("nearDistance"))),
curvature_(readScalar(dict.lookup("curvature"))),
surfPtr_(new triSurface(surfName_)),
surfPtr_
(
new triSurface
(
surfName_,
dict.lookupOrDefault<scalar>("scale", -1)
)
),
querySurfPtr_(new triSurfaceSearch(*surfPtr_)),
IOwnPtrs_(true)
{

View File

@ -3,7 +3,7 @@
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2011-2016 OpenFOAM Foundation
\\/ M anipulation |
\\/ M anipulation | Copyright (C) 2017 OpenCFD Ltd.
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
@ -59,7 +59,7 @@ void Foam::surfaceToPoint::combine(topoSet& set, const bool add) const
{
cpuTime timer;
triSurface surf(surfName_);
triSurface surf(surfName_, scale_);
Info<< " Read surface from " << surfName_
<< " in = "<< timer.cpuTimeIncrement() << " s" << endl << endl;
@ -131,6 +131,7 @@ Foam::surfaceToPoint::surfaceToPoint
:
topoSetSource(mesh),
surfName_(surfName),
scale_(1.0),
nearDist_(nearDist),
includeInside_(includeInside),
includeOutside_(includeOutside)
@ -147,6 +148,7 @@ Foam::surfaceToPoint::surfaceToPoint
:
topoSetSource(mesh),
surfName_(fileName(dict.lookup("file")).expand()),
scale_(dict.lookupOrDefault<scalar>("scale", 1.0)),
nearDist_(readScalar(dict.lookup("nearDistance"))),
includeInside_(readBool(dict.lookup("includeInside"))),
includeOutside_(readBool(dict.lookup("includeOutside")))
@ -163,6 +165,7 @@ Foam::surfaceToPoint::surfaceToPoint
:
topoSetSource(mesh),
surfName_(checkIs(is)),
scale_(1.0),
nearDist_(readScalar(checkIs(is))),
includeInside_(readBool(checkIs(is))),
includeOutside_(readBool(checkIs(is)))

View File

@ -67,6 +67,9 @@ class surfaceToPoint
//- Name of surface file
const fileName surfName_;
//- Optional scaling for surface
const scalar scale_;
//- If > 0 : include points with distance to surface less than nearDist.
const scalar nearDist_;

View File

@ -218,7 +218,8 @@ Foam::label Foam::triSurfaceLoader::select(const wordReList& matcher)
Foam::autoPtr<Foam::triSurface> Foam::triSurfaceLoader::load
(
const enum loadingOption opt
const enum loadingOption opt,
const scalar scaleFactor
) const
{
autoPtr<triSurface> output;
@ -229,7 +230,8 @@ Foam::autoPtr<Foam::triSurface> Foam::triSurfaceLoader::load
}
else if (selected_.size() == 1)
{
output.set(new triSurface(directory_/selected_[0]));
// Use scaling (if any)
output.set(new triSurface(directory_/selected_[0], scaleFactor));
triSurface& surf = output();
@ -392,6 +394,12 @@ Foam::autoPtr<Foam::triSurface> Foam::triSurfaceLoader::load
}
}
// Apply scaling (if any)
if (scaleFactor > VSMALL)
{
points *= scaleFactor;
}
output.set(new triSurface(faces, patches, points, true));
return output;

View File

@ -154,9 +154,12 @@ public:
label select(const wordReList& matcher);
//- Load a single file, or load and combine multiple selected files
// Optionally scale the surface(s) on input, with a zero or negative
// scale factor treated as no scaling.
autoPtr<triSurface> load
(
const enum loadingOption opt = loadingOption::OFFSET_REGION
const enum loadingOption opt = loadingOption::OFFSET_REGION,
const scalar scaleFactor = -1
) const;
};

View File

@ -2,7 +2,7 @@
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2016 OpenCFD Ltd.
\\ / A nd | Copyright (C) 2016-2017 OpenCFD Ltd.
\\/ M anipulation |
-------------------------------------------------------------------------------
License
@ -58,10 +58,10 @@ void Foam::surfMeshSamplers::checkOutNames
{
objectRegistry& reg = const_cast<objectRegistry&>(registry);
forAll(names, namei)
for (const word& fldName : names)
{
objectRegistry::iterator iter = reg.find(names[namei]);
if (iter != reg.end())
objectRegistry::iterator iter = reg.find(fldName);
if (iter.found())
{
registry.checkOut(*iter());
}
@ -156,10 +156,8 @@ bool Foam::surfMeshSamplers::execute()
DynamicList<word> added(derivedNames_.size());
DynamicList<word> cleanup(derivedNames_.size());
forAll(derivedNames_, namei)
for (const word& derivedName : derivedNames_)
{
const word& derivedName = derivedNames_[namei];
if (derivedName == "rhoU")
{
added.append(derivedName);
@ -190,20 +188,48 @@ bool Foam::surfMeshSamplers::execute()
{
cleanup.append(derivedName);
db.store
(
new volScalarField
const volScalarField& p =
mesh_.lookupObject<volScalarField>("p");
if (p.dimensions() == dimPressure)
{
db.store
(
derivedName,
// pTotal = p + U^2 / 2
new volScalarField
(
mesh_.lookupObject<volScalarField>("p")
+ 0.5
* mesh_.lookupObject<volScalarField>("rho")
* magSqr(mesh_.lookupObject<volVectorField>("U"))
derivedName,
// pTotal = p + rho U^2 / 2
(
p
+ 0.5
* mesh_.lookupObject<volScalarField>("rho")
* magSqr
(
mesh_.lookupObject<volVectorField>("U")
)
)
)
)
);
);
}
else
{
db.store
(
new volScalarField
(
derivedName,
// pTotal = p + U^2 / 2
(
p
+ 0.5
* magSqr
(
mesh_.lookupObject<volVectorField>("U")
)
)
)
);
}
}
}
else
@ -226,10 +252,8 @@ bool Foam::surfMeshSamplers::execute()
const wordList fields = acceptable.sortedToc();
if (!fields.empty())
{
forAll(*this, surfI)
for (surfMeshSampler& s : surfaces())
{
surfMeshSampler& s = operator[](surfI);
// Potentially monitor the update for writing geometry?
if (s.needsUpdate())
{
@ -258,21 +282,20 @@ bool Foam::surfMeshSamplers::write()
wordReList select(fieldSelection_.size() + derivedNames_.size());
label nElem = 0;
forAll(fieldSelection_, i)
for (const auto& item : fieldSelection_)
{
select[nElem++] = fieldSelection_[i];
select[nElem++] = item;
}
forAll(derivedNames_, i)
for (const auto& derivedName : derivedNames_)
{
select[nElem++] = derivedNames_[i];
select[nElem++] = derivedName;
}
// avoid duplicate entries
select = wordRes::uniq(select);
forAll(*this, surfI)
for (const surfMeshSampler& s : surfaces())
{
const surfMeshSampler& s = operator[](surfI);
s.write(select);
}
@ -317,10 +340,8 @@ bool Foam::surfMeshSamplers::read(const dictionary& dict)
if (this->size())
{
Info<< "Reading surface description:" << nl;
forAll(*this, surfI)
for (surfMeshSampler& s : surfaces())
{
surfMeshSampler& s = operator[](surfI);
Info<< " " << s.name() << nl;
if (createOnRead)
{
@ -370,9 +391,9 @@ void Foam::surfMeshSamplers::readUpdate(const polyMesh::readUpdateState state)
bool Foam::surfMeshSamplers::needsUpdate() const
{
forAll(*this, surfI)
for (const surfMeshSampler& s : surfaces())
{
if (operator[](surfI).needsUpdate())
if (s.needsUpdate())
{
return true;
}
@ -386,9 +407,9 @@ bool Foam::surfMeshSamplers::expire()
{
bool justExpired = false;
forAll(*this, surfI)
for (surfMeshSampler& s : surfaces())
{
if (operator[](surfI).expire())
if (s.expire())
{
justExpired = true;
}
@ -407,9 +428,9 @@ bool Foam::surfMeshSamplers::update()
}
bool updated = false;
forAll(*this, surfI)
for (surfMeshSampler& s : surfaces())
{
if (operator[](surfI).update())
if (s.update())
{
updated = true;
}

View File

@ -134,6 +134,18 @@ class surfMeshSamplers
const UList<word>& names
);
//- Access the sampling surfaces
inline const PtrList<surfMeshSampler>& surfaces() const
{
return static_cast<const PtrList<surfMeshSampler>&>(*this);
}
//- Access the sampling surfaces
inline PtrList<surfMeshSampler>& surfaces()
{
return static_cast<PtrList<surfMeshSampler>&>(*this);
}
//- Filter acceptable fields types
template<class Type>

View File

@ -35,6 +35,7 @@ Description
\*---------------------------------------------------------------------------*/
#include "triSurface.H"
#include "NASCore.H"
#include "IFstream.H"
#include "StringStream.H"
@ -46,25 +47,9 @@ namespace Foam
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
// Do weird things to extract number
static scalar parseNASCoord(const string& s)
static inline scalar parseNASCoord(const string& s)
{
size_t expSign = s.find_last_of("+-");
if (expSign != string::npos && expSign > 0 && !isspace(s[expSign-1]))
{
scalar mantissa = readScalar(IStringStream(s.substr(0, expSign))());
scalar exponent = readScalar(IStringStream(s.substr(expSign+1))());
if (s[expSign] == '-')
{
exponent = -exponent;
}
return mantissa*pow(10, exponent);
}
else
{
return readScalar(IStringStream(s)());
}
return fileFormats::NASCore::parseNASCoord(s);
}
@ -300,8 +285,8 @@ bool triSurface::readNAS(const fileName& fName)
// GRID* 126 0 -5.55999875E+02 -5.68730474E+02
// * 2.14897901E+02
label index =
readLabel(IStringStream(readNASToken(line, 8, linei))());
readNASToken(line, 8, linei);
readLabel(IStringStream(readNASToken(line, 16, linei))());
readNASToken(line, 16, linei);
scalar x = parseNASCoord(readNASToken(line, 16, linei));
scalar y = parseNASCoord(readNASToken(line, 16, linei));

View File

@ -760,17 +760,19 @@ Foam::triSurface::triSurface
}
Foam::triSurface::triSurface(const fileName& name)
Foam::triSurface::triSurface(const fileName& name, const scalar scaleFactor)
:
ParentType(List<Face>(), pointField()),
patches_(),
sortedEdgeFacesPtr_(nullptr),
edgeOwnerPtr_(nullptr)
{
word ext = name.ext();
const word ext = name.ext();
read(name, ext);
scalePoints(scaleFactor);
setDefaultPatches();
}
@ -886,8 +888,8 @@ void Foam::triSurface::movePoints(const pointField& newPoints)
void Foam::triSurface::scalePoints(const scalar scaleFactor)
{
// avoid bad scaling
if (scaleFactor > 0 && scaleFactor != 1.0)
// Avoid bad scaling
if (scaleFactor > VSMALL && scaleFactor != 1.0)
{
// Remove all geometry dependent data
clearTopology();

View File

@ -264,46 +264,55 @@ public:
//- Construct from triangles, patches, points.
triSurface
(
const List<labelledTri>&,
const geometricSurfacePatchList&,
const pointField&
const List<labelledTri>& triangles,
const geometricSurfacePatchList& patches,
const pointField& points
);
//- Construct from triangles, patches, points. Reuse storage.
triSurface
(
List<labelledTri>&,
const geometricSurfacePatchList&,
pointField&,
List<labelledTri>& triangles,
const geometricSurfacePatchList& patches,
pointField& points,
const bool reuse
);
//- Construct by transferring (triangles, points) components.
triSurface
(
const Xfer<List<labelledTri>>&,
const geometricSurfacePatchList&,
const Xfer<List<point>>&
const Xfer<List<labelledTri>>& triangles,
const geometricSurfacePatchList& patches,
const Xfer<List<point>>& points
);
//- Construct from triangles, points. Set patch names to default.
triSurface(const List<labelledTri>&, const pointField&);
triSurface
(
const List<labelledTri>& triangles,
const pointField& points
);
//- Construct from triangles, points. Set region to 0 and default
// patchName.
triSurface(const triFaceList&, const pointField&);
triSurface
(
const triFaceList& triangles,
const pointField& points
);
//- Construct from file name (uses extension to determine type)
triSurface(const fileName&);
//- Construct from file name (uses extension to determine type).
// Optional (positive, non-zero) point scaling is possible.
triSurface(const fileName& name, const scalar scaleFactor = -1);
//- Construct from Istream
triSurface(Istream&);
triSurface(Istream& is);
//- Construct from objectRegistry
triSurface(const Time& d);
//- Construct as copy
triSurface(const triSurface&);
triSurface(const triSurface& ts);
//- Destructor
@ -382,10 +391,10 @@ public:
// Edit
//- Move points
virtual void movePoints(const pointField&);
virtual void movePoints(const pointField& newPoints);
//- Scale points. A non-positive factor is ignored
virtual void scalePoints(const scalar);
//- Scale points. A non-positive factor is ignored.
virtual void scalePoints(const scalar scaleFactor);
//- Check/remove duplicate/degenerate triangles
void checkTriangles(const bool verbose);
@ -447,13 +456,13 @@ public:
// Write
//- Write to Ostream in simple FOAM format
void write(Ostream&) const;
void write(Ostream& os) const;
//- Generic write routine. Chooses writer based on extension.
void write(const fileName&, const bool sortByRegion = false) const;
//- Write to database
void write(const Time&) const;
void write(const Time& d) const;
//- Write some statistics
void writeStats(Ostream& os) const;

View File

@ -57,6 +57,11 @@ oversetInterpolation
method inverseDistance;
}
oversetInterpolationRequired
{
alpha.water;
}
fluxRequired
{
default no;

View File

@ -85,6 +85,8 @@ PIMPLE
ddtCorr yes;
correctPhi no;
oversetAdjustPhi no;
moveMeshOuterCorrectors no;
turbOnFinalIterOnly no;
}