mirror of
https://develop.openfoam.com/Development/openfoam.git
synced 2025-11-28 03:28:01 +00:00
Merge branch 'develop' of develop.openfoam.com:Development/OpenFOAM-plus into develop
This commit is contained in:
@ -65,7 +65,7 @@ int main(int argc, char *argv[])
|
|||||||
#include "createTime.H"
|
#include "createTime.H"
|
||||||
#include "createDynamicFvMesh.H"
|
#include "createDynamicFvMesh.H"
|
||||||
#include "initContinuityErrs.H"
|
#include "initContinuityErrs.H"
|
||||||
#include "createControl.H"
|
pimpleControl pimple(mesh);
|
||||||
#include "createTimeControls.H"
|
#include "createTimeControls.H"
|
||||||
#include "createDyMControls.H"
|
#include "createDyMControls.H"
|
||||||
#include "createFields.H"
|
#include "createFields.H"
|
||||||
|
|||||||
@ -46,6 +46,13 @@
|
|||||||
|
|
||||||
phiHbyA += phig;
|
phiHbyA += phig;
|
||||||
|
|
||||||
|
if (adjustFringe)
|
||||||
|
{
|
||||||
|
fvc::makeRelative(phiHbyA, U);
|
||||||
|
oversetAdjustPhi(phiHbyA, U);
|
||||||
|
fvc::makeAbsolute(phiHbyA, U);
|
||||||
|
}
|
||||||
|
|
||||||
// Update the pressure BCs to ensure flux consistency
|
// Update the pressure BCs to ensure flux consistency
|
||||||
constrainPressure(p_rgh, U, phiHbyA, rAUf, MRF);
|
constrainPressure(p_rgh, U, phiHbyA, rAUf, MRF);
|
||||||
|
|
||||||
@ -72,7 +79,8 @@
|
|||||||
"minGradP",
|
"minGradP",
|
||||||
fvc::reconstruct((phig - p_rghEqn.flux())/rAUf)
|
fvc::reconstruct((phig - p_rghEqn.flux())/rAUf)
|
||||||
);
|
);
|
||||||
U = HbyA + rAU*cellMask*minGradP;
|
//U = HbyA + rAU*cellMask*minGradP;
|
||||||
|
U = fvc::reconstruct(phi);
|
||||||
U.correctBoundaryConditions();
|
U.correctBoundaryConditions();
|
||||||
fvOptions.correct(U);
|
fvOptions.correct(U);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -11,5 +11,6 @@ moveMeshOuterCorrectors =
|
|||||||
massFluxInterpolation =
|
massFluxInterpolation =
|
||||||
pimple.dict().lookupOrDefault("massFluxInterpolation", false);
|
pimple.dict().lookupOrDefault("massFluxInterpolation", false);
|
||||||
|
|
||||||
ddtCorr =
|
ddtCorr = pimple.dict().lookupOrDefault("ddtCorr", true);
|
||||||
pimple.dict().lookupOrDefault("ddtCorr", true);
|
|
||||||
|
adjustFringe = pimple.dict().lookupOrDefault("oversetAdjustPhi", false);
|
||||||
|
|||||||
@ -41,10 +41,15 @@ using namespace Foam;
|
|||||||
autoPtr<triSurface> loadSurface
|
autoPtr<triSurface> loadSurface
|
||||||
(
|
(
|
||||||
const Foam::Time& runTime,
|
const Foam::Time& runTime,
|
||||||
const fileName& surfName
|
const fileName& surfName,
|
||||||
|
const scalar scaleFactor
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
Info<< "Reading surface " << surfName << endl;
|
Info<< "Reading surface " << surfName << nl;
|
||||||
|
if (scaleFactor > 0)
|
||||||
|
{
|
||||||
|
Info<<"Scaling : " << scaleFactor << nl;
|
||||||
|
}
|
||||||
|
|
||||||
const fileName fallback =
|
const fileName fallback =
|
||||||
runTime.constantPath()/triSurfaceMesh::meshSubDir/surfName;
|
runTime.constantPath()/triSurfaceMesh::meshSubDir/surfName;
|
||||||
@ -52,11 +57,11 @@ autoPtr<triSurface> loadSurface
|
|||||||
autoPtr<triSurface> surfPtr;
|
autoPtr<triSurface> surfPtr;
|
||||||
if (isFile(surfName))
|
if (isFile(surfName))
|
||||||
{
|
{
|
||||||
surfPtr.set(new triSurface(surfName));
|
surfPtr.set(new triSurface(surfName, scaleFactor));
|
||||||
}
|
}
|
||||||
else if (isFile(fallback))
|
else if (isFile(fallback))
|
||||||
{
|
{
|
||||||
surfPtr.set(new triSurface(fallback));
|
surfPtr.set(new triSurface(fallback, scaleFactor));
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@ -102,6 +107,12 @@ int main(int argc, char *argv[])
|
|||||||
"mergeTol",
|
"mergeTol",
|
||||||
"merge points (and edges) using the specified tolerance"
|
"merge points (and edges) using the specified tolerance"
|
||||||
);
|
);
|
||||||
|
argList::addOption
|
||||||
|
(
|
||||||
|
"scale",
|
||||||
|
"factor",
|
||||||
|
"geometry scaling factor"
|
||||||
|
);
|
||||||
|
|
||||||
#include "addDictOption.H"
|
#include "addDictOption.H"
|
||||||
|
|
||||||
@ -117,16 +128,18 @@ int main(int argc, char *argv[])
|
|||||||
#include "setRootCase.H"
|
#include "setRootCase.H"
|
||||||
#include "createTime.H"
|
#include "createTime.H"
|
||||||
|
|
||||||
|
const scalar scaleFactor = args.optionLookupOrDefault<scalar>("scale", -1);
|
||||||
|
|
||||||
const word outputFile(args.executable() + ".obj");
|
const word outputFile(args.executable() + ".obj");
|
||||||
|
|
||||||
const fileName surf1Name(args[1]);
|
const fileName surf1Name(args[1]);
|
||||||
triSurface surf1 = loadSurface(runTime, surf1Name)();
|
triSurface surf1 = loadSurface(runTime, surf1Name, scaleFactor)();
|
||||||
Info<< surf1Name << " statistics:" << endl;
|
Info<< surf1Name << " statistics:" << endl;
|
||||||
surf1.writeStats(Info);
|
surf1.writeStats(Info);
|
||||||
Info<< endl;
|
Info<< endl;
|
||||||
|
|
||||||
const fileName surf2Name(args[2]);
|
const fileName surf2Name(args[2]);
|
||||||
triSurface surf2 = loadSurface(runTime, surf2Name)();
|
triSurface surf2 = loadSurface(runTime, surf2Name, scaleFactor)();
|
||||||
Info<< surf2Name << " statistics:" << endl;
|
Info<< surf2Name << " statistics:" << endl;
|
||||||
surf2.writeStats(Info);
|
surf2.writeStats(Info);
|
||||||
Info<< endl;
|
Info<< endl;
|
||||||
|
|||||||
3
applications/test/surfaceMeshConvert/Make/files
Normal file
3
applications/test/surfaceMeshConvert/Make/files
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
Test-surfaceMeshConvert.C
|
||||||
|
|
||||||
|
EXE = $(FOAM_APPBIN)/Test-surfaceMeshConvert
|
||||||
@ -22,17 +22,16 @@ License
|
|||||||
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
|
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
Application
|
Application
|
||||||
surfaceMeshConvertTesting
|
Test-surfaceMeshConvert
|
||||||
|
|
||||||
Group
|
Group
|
||||||
grpSurfaceUtilities
|
grpSurfaceUtilities
|
||||||
|
|
||||||
Description
|
Description
|
||||||
Converts from one surface mesh format to another, but primarily
|
Test conversions from one surface mesh format to another.
|
||||||
used for testing functionality.
|
|
||||||
|
|
||||||
Usage
|
Usage
|
||||||
\b surfaceMeshConvertTesting inputFile outputFile [OPTION]
|
\b Test-surfaceMeshConvert inputFile outputFile [OPTION]
|
||||||
|
|
||||||
Options:
|
Options:
|
||||||
- \par -clean
|
- \par -clean
|
||||||
@ -91,7 +91,7 @@ int main(int argc, char *argv[])
|
|||||||
// ~~~~~~~~~~~~~~~~~~~~~
|
// ~~~~~~~~~~~~~~~~~~~~~
|
||||||
fileFormats::FIREMeshWriter::binary = !args.optionFound("ascii");
|
fileFormats::FIREMeshWriter::binary = !args.optionFound("ascii");
|
||||||
|
|
||||||
// default: rescale from [m] to [mm]
|
// Default: no rescaling
|
||||||
scalar scaleFactor = 1;
|
scalar scaleFactor = 1;
|
||||||
if (args.optionReadIfPresent("scale", scaleFactor))
|
if (args.optionReadIfPresent("scale", scaleFactor))
|
||||||
{
|
{
|
||||||
|
|||||||
@ -43,8 +43,8 @@ Usage
|
|||||||
or -yawPitchRoll (yawdegrees pitchdegrees rolldegrees)
|
or -yawPitchRoll (yawdegrees pitchdegrees rolldegrees)
|
||||||
or -rollPitchYaw (rolldegrees pitchdegrees yawdegrees)
|
or -rollPitchYaw (rolldegrees pitchdegrees yawdegrees)
|
||||||
|
|
||||||
-scale vector
|
-scale scalar|vector
|
||||||
Scales the points by the given vector.
|
Scales the points by the given scalar or vector.
|
||||||
|
|
||||||
The any or all of the three options may be specified and are processed
|
The any or all of the three options may be specified and are processed
|
||||||
in the above order.
|
in the above order.
|
||||||
@ -182,9 +182,9 @@ int main(int argc, char *argv[])
|
|||||||
argList::addOption
|
argList::addOption
|
||||||
(
|
(
|
||||||
"scale",
|
"scale",
|
||||||
"vector",
|
"scalar | vector",
|
||||||
"scale by the specified amount - eg, '(0.001 0.001 0.001)' for a "
|
"scale by the specified amount - eg, for a uniform [mm] to [m] scaling "
|
||||||
"uniform [mm] to [m] scaling"
|
"use either (0.001 0.001 0.001)' or simply '0.001'"
|
||||||
);
|
);
|
||||||
|
|
||||||
#include "addRegionOption.H"
|
#include "addRegionOption.H"
|
||||||
@ -262,10 +262,10 @@ int main(int argc, char *argv[])
|
|||||||
<< " pitch " << v.y() << nl
|
<< " pitch " << v.y() << nl
|
||||||
<< " yaw " << v.z() << nl;
|
<< " yaw " << v.z() << nl;
|
||||||
|
|
||||||
// Convert to radians
|
// degToRad
|
||||||
v *= pi/180.0;
|
v *= pi/180.0;
|
||||||
|
|
||||||
quaternion R(quaternion::rotationSequence::XYZ, v);
|
const quaternion R(quaternion::rotationSequence::XYZ, v);
|
||||||
|
|
||||||
Info<< "Rotating points by quaternion " << R << endl;
|
Info<< "Rotating points by quaternion " << R << endl;
|
||||||
points = transform(R, points);
|
points = transform(R, points);
|
||||||
@ -282,16 +282,10 @@ int main(int argc, char *argv[])
|
|||||||
<< " pitch " << v.y() << nl
|
<< " pitch " << v.y() << nl
|
||||||
<< " roll " << v.z() << nl;
|
<< " roll " << v.z() << nl;
|
||||||
|
|
||||||
// Convert to radians
|
// degToRad
|
||||||
v *= pi/180.0;
|
v *= pi/180.0;
|
||||||
|
|
||||||
scalar yaw = v.x();
|
const quaternion R(quaternion::rotationSequence::ZYX, v);
|
||||||
scalar pitch = v.y();
|
|
||||||
scalar roll = v.z();
|
|
||||||
|
|
||||||
quaternion R = quaternion(vector(0, 0, 1), yaw);
|
|
||||||
R *= quaternion(vector(0, 1, 0), pitch);
|
|
||||||
R *= quaternion(vector(1, 0, 0), roll);
|
|
||||||
|
|
||||||
Info<< "Rotating points by quaternion " << R << endl;
|
Info<< "Rotating points by quaternion " << R << endl;
|
||||||
points = transform(R, points);
|
points = transform(R, points);
|
||||||
@ -302,13 +296,34 @@ int main(int argc, char *argv[])
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (args.optionReadIfPresent("scale", v))
|
if (args.optionFound("scale"))
|
||||||
{
|
{
|
||||||
Info<< "Scaling points by " << v << endl;
|
// Use readList to handle single or multiple values
|
||||||
|
const List<scalar> scaling = args.optionReadList<scalar>("scale");
|
||||||
|
|
||||||
points.replace(vector::X, v.x()*points.component(vector::X));
|
if (scaling.size() == 1)
|
||||||
points.replace(vector::Y, v.y()*points.component(vector::Y));
|
{
|
||||||
points.replace(vector::Z, v.z()*points.component(vector::Z));
|
Info<< "Scaling points uniformly by " << scaling[0] << nl;
|
||||||
|
points *= scaling[0];
|
||||||
|
}
|
||||||
|
else if (scaling.size() == 3)
|
||||||
|
{
|
||||||
|
Info<< "Scaling points by ("
|
||||||
|
<< scaling[0] << " "
|
||||||
|
<< scaling[1] << " "
|
||||||
|
<< scaling[2] << ")" << nl;
|
||||||
|
|
||||||
|
points.replace(vector::X, scaling[0]*points.component(vector::X));
|
||||||
|
points.replace(vector::Y, scaling[1]*points.component(vector::Y));
|
||||||
|
points.replace(vector::Z, scaling[2]*points.component(vector::Z));
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
FatalError
|
||||||
|
<< "-scale with 1 or 3 components only" << nl
|
||||||
|
<< "given: " << args["scale"] << endl
|
||||||
|
<< exit(FatalError);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set the precision of the points data to 10
|
// Set the precision of the points data to 10
|
||||||
|
|||||||
@ -160,13 +160,59 @@ public:
|
|||||||
const std::string& datasetName
|
const std::string& datasetName
|
||||||
);
|
);
|
||||||
|
|
||||||
|
//- Add names to array selection
|
||||||
|
template<class StringType>
|
||||||
|
static label addToArray
|
||||||
|
(
|
||||||
|
vtkDataArraySelection* select,
|
||||||
|
const std::string& prefix,
|
||||||
|
const UList<StringType>& names
|
||||||
|
);
|
||||||
|
|
||||||
|
//- Add names to array selection
|
||||||
|
template<class StringType>
|
||||||
|
static label addToArray
|
||||||
|
(
|
||||||
|
vtkDataArraySelection* select,
|
||||||
|
const UList<StringType>& names,
|
||||||
|
const std::string& suffix = string::null
|
||||||
|
);
|
||||||
|
|
||||||
|
//- Add objects of Type to array selection
|
||||||
|
template<class Type>
|
||||||
|
static label addToSelection
|
||||||
|
(
|
||||||
|
vtkDataArraySelection* select,
|
||||||
|
const std::string& prefix,
|
||||||
|
const IOobjectList& objects
|
||||||
|
);
|
||||||
|
|
||||||
//- Add objects of Type to array selection
|
//- Add objects of Type to array selection
|
||||||
template<class Type>
|
template<class Type>
|
||||||
static label addToSelection
|
static label addToSelection
|
||||||
(
|
(
|
||||||
vtkDataArraySelection* select,
|
vtkDataArraySelection* select,
|
||||||
const IOobjectList& objects,
|
const IOobjectList& objects,
|
||||||
const std::string& prefix = string::null
|
const std::string& suffix = string::null
|
||||||
|
);
|
||||||
|
|
||||||
|
//- Add objects of Type to array selection
|
||||||
|
template<class Type>
|
||||||
|
static label addToSelection
|
||||||
|
(
|
||||||
|
vtkDataArraySelection* select,
|
||||||
|
const std::string& prefix,
|
||||||
|
const HashTable<wordHashSet>& objects
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
//- Add objects of Type to array selection
|
||||||
|
template<class Type>
|
||||||
|
static label addToSelection
|
||||||
|
(
|
||||||
|
vtkDataArraySelection* select,
|
||||||
|
const HashTable<wordHashSet>& objects,
|
||||||
|
const std::string& suffix = string::null
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -28,29 +28,119 @@ License
|
|||||||
|
|
||||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||||
|
|
||||||
|
template<class StringType>
|
||||||
|
Foam::label Foam::foamPvCore::addToArray
|
||||||
|
(
|
||||||
|
vtkDataArraySelection *select,
|
||||||
|
const std::string& prefix,
|
||||||
|
const UList<StringType>& names
|
||||||
|
)
|
||||||
|
{
|
||||||
|
if (prefix.empty())
|
||||||
|
{
|
||||||
|
for (const auto& name : names)
|
||||||
|
{
|
||||||
|
select->AddArray(name.c_str());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
for (const auto& name : names)
|
||||||
|
{
|
||||||
|
select->AddArray((prefix + name).c_str());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return names.size();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
template<class StringType>
|
||||||
|
Foam::label Foam::foamPvCore::addToArray
|
||||||
|
(
|
||||||
|
vtkDataArraySelection *select,
|
||||||
|
const UList<StringType>& names,
|
||||||
|
const std::string& suffix
|
||||||
|
)
|
||||||
|
{
|
||||||
|
if (suffix.empty())
|
||||||
|
{
|
||||||
|
for (const auto& name : names)
|
||||||
|
{
|
||||||
|
select->AddArray(name.c_str());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
for (const auto& name : names)
|
||||||
|
{
|
||||||
|
select->AddArray((name + suffix).c_str());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return names.size();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
template<class Type>
|
||||||
|
Foam::label Foam::foamPvCore::addToSelection
|
||||||
|
(
|
||||||
|
vtkDataArraySelection *select,
|
||||||
|
const std::string& prefix,
|
||||||
|
const IOobjectList& objects
|
||||||
|
)
|
||||||
|
{
|
||||||
|
return addToArray(select, prefix, objects.sortedNames(Type::typeName));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
template<class Type>
|
template<class Type>
|
||||||
Foam::label Foam::foamPvCore::addToSelection
|
Foam::label Foam::foamPvCore::addToSelection
|
||||||
(
|
(
|
||||||
vtkDataArraySelection *select,
|
vtkDataArraySelection *select,
|
||||||
const IOobjectList& objects,
|
const IOobjectList& objects,
|
||||||
const std::string& prefix
|
const std::string& suffix
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
const wordList names = objects.sortedNames(Type::typeName);
|
return addToArray(select, objects.sortedNames(Type::typeName), suffix);
|
||||||
|
}
|
||||||
|
|
||||||
forAll(names, i)
|
|
||||||
|
template<class Type>
|
||||||
|
Foam::label Foam::foamPvCore::addToSelection
|
||||||
|
(
|
||||||
|
vtkDataArraySelection *select,
|
||||||
|
const std::string& prefix,
|
||||||
|
const HashTable<wordHashSet>& objects
|
||||||
|
)
|
||||||
|
{
|
||||||
|
auto iter = objects.cfind(Type::typeName);
|
||||||
|
|
||||||
|
if (iter.found())
|
||||||
{
|
{
|
||||||
if (prefix.empty())
|
return addToArray(select, prefix, iter.object().sortedToc());
|
||||||
{
|
|
||||||
select->AddArray(names[i].c_str());
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
select->AddArray((prefix + names[i]).c_str());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return names.size();
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
template<class Type>
|
||||||
|
Foam::label Foam::foamPvCore::addToSelection
|
||||||
|
(
|
||||||
|
vtkDataArraySelection *select,
|
||||||
|
const HashTable<wordHashSet>& objects,
|
||||||
|
const std::string& suffix
|
||||||
|
)
|
||||||
|
{
|
||||||
|
auto iter = objects.cfind(Type::typeName);
|
||||||
|
|
||||||
|
if (iter.found())
|
||||||
|
{
|
||||||
|
return addToArray(select, iter.object().sortedToc(), suffix);
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -155,7 +155,10 @@ class vtkPVFoam
|
|||||||
vtkSmartPointer<dataType> getCopy() const
|
vtkSmartPointer<dataType> getCopy() const
|
||||||
{
|
{
|
||||||
auto copy = vtkSmartPointer<dataType>::New();
|
auto copy = vtkSmartPointer<dataType>::New();
|
||||||
copy->ShallowCopy(vtkgeom);
|
if (vtkgeom)
|
||||||
|
{
|
||||||
|
copy->ShallowCopy(vtkgeom);
|
||||||
|
}
|
||||||
return copy;
|
return copy;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -163,7 +166,10 @@ class vtkPVFoam
|
|||||||
void reuse()
|
void reuse()
|
||||||
{
|
{
|
||||||
dataset = vtkSmartPointer<dataType>::New();
|
dataset = vtkSmartPointer<dataType>::New();
|
||||||
dataset->ShallowCopy(vtkgeom);
|
if (vtkgeom)
|
||||||
|
{
|
||||||
|
dataset->ShallowCopy(vtkgeom);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//- Set the geometry and make a shallow copy to dataset
|
//- Set the geometry and make a shallow copy to dataset
|
||||||
|
|||||||
@ -85,11 +85,11 @@ Foam::wordList Foam::vtkPVFoam::getZoneNames
|
|||||||
wordList names(zmesh.size());
|
wordList names(zmesh.size());
|
||||||
label nZone = 0;
|
label nZone = 0;
|
||||||
|
|
||||||
forAll(zmesh, zonei)
|
for (const auto& zn : zmesh)
|
||||||
{
|
{
|
||||||
if (!zmesh[zonei].empty())
|
if (!zn.empty())
|
||||||
{
|
{
|
||||||
names[nZone++] = zmesh[zonei].name();
|
names[nZone++] = zn.name();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
names.setSize(nZone);
|
names.setSize(nZone);
|
||||||
@ -100,8 +100,6 @@ Foam::wordList Foam::vtkPVFoam::getZoneNames
|
|||||||
|
|
||||||
Foam::wordList Foam::vtkPVFoam::getZoneNames(const word& zoneType) const
|
Foam::wordList Foam::vtkPVFoam::getZoneNames(const word& zoneType) const
|
||||||
{
|
{
|
||||||
wordList names;
|
|
||||||
|
|
||||||
// mesh not loaded - read from file
|
// mesh not loaded - read from file
|
||||||
IOobject ioObj
|
IOobject ioObj
|
||||||
(
|
(
|
||||||
@ -119,14 +117,17 @@ Foam::wordList Foam::vtkPVFoam::getZoneNames(const word& zoneType) const
|
|||||||
false
|
false
|
||||||
);
|
);
|
||||||
|
|
||||||
|
wordList names;
|
||||||
if (ioObj.typeHeaderOk<cellZoneMesh>(false, false))
|
if (ioObj.typeHeaderOk<cellZoneMesh>(false, false))
|
||||||
{
|
{
|
||||||
zonesEntries zones(ioObj);
|
zonesEntries zones(ioObj);
|
||||||
|
|
||||||
names.setSize(zones.size());
|
names.setSize(zones.size());
|
||||||
forAll(zones, zonei)
|
label nZone = 0;
|
||||||
|
|
||||||
|
for (const auto& zn : zones)
|
||||||
{
|
{
|
||||||
names[zonei] = zones[zonei].keyword();
|
names[nZone++] = zn.keyword();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -167,7 +168,7 @@ void Foam::vtkPVFoam::updateInfoLagrangian
|
|||||||
<< " " << dbPtr_->timePath()/cloud::prefix << endl;
|
<< " " << dbPtr_->timePath()/cloud::prefix << endl;
|
||||||
}
|
}
|
||||||
|
|
||||||
// use the db directly since this might be called without a mesh,
|
// Use the db directly since this might be called without a mesh,
|
||||||
// but the region must get added back in
|
// but the region must get added back in
|
||||||
fileName lagrangianPrefix(cloud::prefix);
|
fileName lagrangianPrefix(cloud::prefix);
|
||||||
if (meshRegion_ != polyMesh::defaultRegion)
|
if (meshRegion_ != polyMesh::defaultRegion)
|
||||||
@ -175,22 +176,23 @@ void Foam::vtkPVFoam::updateInfoLagrangian
|
|||||||
lagrangianPrefix = meshRegion_/cloud::prefix;
|
lagrangianPrefix = meshRegion_/cloud::prefix;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Search for list of lagrangian objects for this time
|
// List of lagrangian objects across all times
|
||||||
fileNameList cloudDirs
|
HashSet<fileName> names;
|
||||||
(
|
|
||||||
readDir(dbPtr_->timePath()/lagrangianPrefix, fileName::DIRECTORY)
|
for (const instant& t : dbPtr_().times())
|
||||||
);
|
{
|
||||||
|
names.insert
|
||||||
|
(
|
||||||
|
readDir
|
||||||
|
(
|
||||||
|
dbPtr_->path()/t.name()/lagrangianPrefix,
|
||||||
|
fileName::DIRECTORY
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
rangeLagrangian_.reset(select->GetNumberOfArrays());
|
rangeLagrangian_.reset(select->GetNumberOfArrays());
|
||||||
forAll(cloudDirs, cloudi)
|
rangeLagrangian_ += addToArray(select, "lagrangian/", names.sortedToc());
|
||||||
{
|
|
||||||
// Add cloud to GUI list
|
|
||||||
select->AddArray
|
|
||||||
(
|
|
||||||
("lagrangian/" + cloudDirs[cloudi]).c_str()
|
|
||||||
);
|
|
||||||
++rangeLagrangian_;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (debug)
|
if (debug)
|
||||||
{
|
{
|
||||||
@ -257,39 +259,26 @@ void Foam::vtkPVFoam::updateInfoPatches
|
|||||||
const polyPatch& pp = patches[patchId];
|
const polyPatch& pp = patches[patchId];
|
||||||
if (pp.size())
|
if (pp.size())
|
||||||
{
|
{
|
||||||
enabledEntries.insert
|
enabledEntries.insert("patch/" + pp.name());
|
||||||
(
|
|
||||||
"patch/" + pp.name()
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sort group names
|
Foam::sort(displayNames); // Sorted group names
|
||||||
Foam::sort(displayNames);
|
rangePatches_ += addToArray(select, displayNames);
|
||||||
for (const auto& name : displayNames)
|
|
||||||
{
|
|
||||||
select->AddArray(name.c_str());
|
|
||||||
++rangePatches_;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add (non-zero) patches to the list of mesh parts
|
// Add (non-zero) patches to the list of mesh parts
|
||||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
if (!reader_->GetShowGroupsOnly())
|
if (!reader_->GetShowGroupsOnly())
|
||||||
{
|
{
|
||||||
forAll(patches, patchi)
|
for (const polyPatch& pp : patches)
|
||||||
{
|
{
|
||||||
const polyPatch& pp = patches[patchi];
|
|
||||||
|
|
||||||
if (pp.size())
|
if (pp.size())
|
||||||
{
|
{
|
||||||
// Add patch to GUI list
|
// Add patch to GUI list
|
||||||
select->AddArray
|
select->AddArray(("patch/" + pp.name()).c_str());
|
||||||
(
|
|
||||||
("patch/" + pp.name()).c_str()
|
|
||||||
);
|
|
||||||
++rangePatches_;
|
++rangePatches_;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -339,9 +328,9 @@ void Foam::vtkPVFoam::updateInfoPatches
|
|||||||
&& patchDict.readIfPresent("inGroups", groupNames)
|
&& patchDict.readIfPresent("inGroups", groupNames)
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
forAll(groupNames, groupI)
|
for (const auto& groupName : groupNames)
|
||||||
{
|
{
|
||||||
groups(groupNames[groupI]).insert(patchi);
|
groups(groupName).insert(patchi);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -370,21 +359,13 @@ void Foam::vtkPVFoam::updateInfoPatches
|
|||||||
{
|
{
|
||||||
for (auto patchId : patchIDs)
|
for (auto patchId : patchIDs)
|
||||||
{
|
{
|
||||||
enabledEntries.insert
|
enabledEntries.insert("patch/" + names[patchId]);
|
||||||
(
|
|
||||||
"patch/" + names[patchId]
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sort group names
|
Foam::sort(displayNames); // Sorted group names
|
||||||
Foam::sort(displayNames);
|
rangePatches_ += addToArray(select, displayNames);
|
||||||
for (const auto& name : displayNames)
|
|
||||||
{
|
|
||||||
select->AddArray(name.c_str());
|
|
||||||
++rangePatches_;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add (non-zero) patches to the list of mesh parts
|
// Add (non-zero) patches to the list of mesh parts
|
||||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
@ -396,10 +377,7 @@ void Foam::vtkPVFoam::updateInfoPatches
|
|||||||
// Valid patch if nFace > 0 - add patch to GUI list
|
// Valid patch if nFace > 0 - add patch to GUI list
|
||||||
if (sizes[patchi])
|
if (sizes[patchi])
|
||||||
{
|
{
|
||||||
select->AddArray
|
select->AddArray(("patch/" + names[patchi]).c_str());
|
||||||
(
|
|
||||||
("patch/" + names[patchi]).c_str()
|
|
||||||
);
|
|
||||||
++rangePatches_;
|
++rangePatches_;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -430,74 +408,44 @@ void Foam::vtkPVFoam::updateInfoZones
|
|||||||
<< " [meshPtr=" << (meshPtr_ ? "set" : "null") << "]" << endl;
|
<< " [meshPtr=" << (meshPtr_ ? "set" : "null") << "]" << endl;
|
||||||
}
|
}
|
||||||
|
|
||||||
wordList namesLst;
|
|
||||||
|
|
||||||
//
|
// cellZones
|
||||||
// cellZones information
|
|
||||||
// ~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
if (meshPtr_)
|
|
||||||
{
|
{
|
||||||
namesLst = getZoneNames(meshPtr_->cellZones());
|
const wordList names =
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
namesLst = getZoneNames("cellZones");
|
|
||||||
}
|
|
||||||
|
|
||||||
rangeCellZones_.reset(select->GetNumberOfArrays());
|
|
||||||
forAll(namesLst, elemI)
|
|
||||||
{
|
|
||||||
select->AddArray
|
|
||||||
(
|
(
|
||||||
("cellZone/" + namesLst[elemI]).c_str()
|
meshPtr_
|
||||||
|
? getZoneNames(meshPtr_->cellZones())
|
||||||
|
: getZoneNames("cellZones")
|
||||||
);
|
);
|
||||||
++rangeCellZones_;
|
|
||||||
|
rangeCellZones_.reset(select->GetNumberOfArrays());
|
||||||
|
rangeCellZones_ += addToArray(select, "cellZone/", names);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// faceZones
|
||||||
//
|
|
||||||
// faceZones information
|
|
||||||
// ~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
if (meshPtr_)
|
|
||||||
{
|
{
|
||||||
namesLst = getZoneNames(meshPtr_->faceZones());
|
const wordList names =
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
namesLst = getZoneNames("faceZones");
|
|
||||||
}
|
|
||||||
|
|
||||||
rangeFaceZones_.reset(select->GetNumberOfArrays());
|
|
||||||
forAll(namesLst, elemI)
|
|
||||||
{
|
|
||||||
select->AddArray
|
|
||||||
(
|
(
|
||||||
("faceZone/" + namesLst[elemI]).c_str()
|
meshPtr_
|
||||||
|
? getZoneNames(meshPtr_->faceZones())
|
||||||
|
: getZoneNames("faceZones")
|
||||||
);
|
);
|
||||||
++rangeFaceZones_;
|
|
||||||
|
rangeFaceZones_.reset(select->GetNumberOfArrays());
|
||||||
|
rangeFaceZones_ += addToArray(select, "faceZone/", names);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// pointZones
|
||||||
//
|
|
||||||
// pointZones information
|
|
||||||
// ~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
if (meshPtr_)
|
|
||||||
{
|
{
|
||||||
namesLst = getZoneNames(meshPtr_->pointZones());
|
const wordList names =
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
namesLst = getZoneNames("pointZones");
|
|
||||||
}
|
|
||||||
|
|
||||||
rangePointZones_.reset(select->GetNumberOfArrays());
|
|
||||||
forAll(namesLst, elemI)
|
|
||||||
{
|
|
||||||
select->AddArray
|
|
||||||
(
|
(
|
||||||
("pointZone/" + namesLst[elemI]).c_str()
|
meshPtr_
|
||||||
|
? getZoneNames(meshPtr_->pointZones())
|
||||||
|
: getZoneNames("pointZones")
|
||||||
);
|
);
|
||||||
++rangePointZones_;
|
|
||||||
|
rangePointZones_.reset(select->GetNumberOfArrays());
|
||||||
|
rangePointZones_ += addToArray(select, "pointZone/", names);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (debug)
|
if (debug)
|
||||||
@ -525,14 +473,14 @@ void Foam::vtkPVFoam::updateInfoSets
|
|||||||
// Add names of sets. Search for last time directory with a sets
|
// Add names of sets. Search for last time directory with a sets
|
||||||
// subdirectory. Take care not to search beyond the last mesh.
|
// subdirectory. Take care not to search beyond the last mesh.
|
||||||
|
|
||||||
word facesInstance = dbPtr_().findInstance
|
const word facesInstance = dbPtr_().findInstance
|
||||||
(
|
(
|
||||||
meshDir_,
|
meshDir_,
|
||||||
"faces",
|
"faces",
|
||||||
IOobject::READ_IF_PRESENT
|
IOobject::READ_IF_PRESENT
|
||||||
);
|
);
|
||||||
|
|
||||||
word setsInstance = dbPtr_().findInstance
|
const word setsInstance = dbPtr_().findInstance
|
||||||
(
|
(
|
||||||
meshDir_/"sets",
|
meshDir_/"sets",
|
||||||
word::null,
|
word::null,
|
||||||
@ -540,7 +488,7 @@ void Foam::vtkPVFoam::updateInfoSets
|
|||||||
facesInstance
|
facesInstance
|
||||||
);
|
);
|
||||||
|
|
||||||
IOobjectList objects(dbPtr_(), setsInstance, meshDir_/"sets");
|
const IOobjectList objects(dbPtr_(), setsInstance, meshDir_/"sets");
|
||||||
|
|
||||||
if (debug)
|
if (debug)
|
||||||
{
|
{
|
||||||
@ -553,24 +501,24 @@ void Foam::vtkPVFoam::updateInfoSets
|
|||||||
rangeCellSets_ += addToSelection<cellSet>
|
rangeCellSets_ += addToSelection<cellSet>
|
||||||
(
|
(
|
||||||
select,
|
select,
|
||||||
objects,
|
"cellSet/",
|
||||||
"cellSet/"
|
objects
|
||||||
);
|
);
|
||||||
|
|
||||||
rangeFaceSets_.reset(select->GetNumberOfArrays());
|
rangeFaceSets_.reset(select->GetNumberOfArrays());
|
||||||
rangeFaceSets_ += addToSelection<faceSet>
|
rangeFaceSets_ += addToSelection<faceSet>
|
||||||
(
|
(
|
||||||
select,
|
select,
|
||||||
objects,
|
"faceSet/",
|
||||||
"faceSet/"
|
objects
|
||||||
);
|
);
|
||||||
|
|
||||||
rangePointSets_.reset(select->GetNumberOfArrays());
|
rangePointSets_.reset(select->GetNumberOfArrays());
|
||||||
rangePointSets_ += addToSelection<pointSet>
|
rangePointSets_ += addToSelection<pointSet>
|
||||||
(
|
(
|
||||||
select,
|
select,
|
||||||
objects,
|
"pointSet/",
|
||||||
"pointSet/"
|
objects
|
||||||
);
|
);
|
||||||
|
|
||||||
if (debug)
|
if (debug)
|
||||||
@ -594,21 +542,20 @@ void Foam::vtkPVFoam::updateInfoLagrangianFields
|
|||||||
HashSet<string> enabled = getSelectedArraySet(select);
|
HashSet<string> enabled = getSelectedArraySet(select);
|
||||||
select->RemoveAllArrays();
|
select->RemoveAllArrays();
|
||||||
|
|
||||||
// TODO - currently only get fields from ONE cloud
|
|
||||||
// have to decide if the second set of fields get mixed in
|
|
||||||
// or dealt with separately
|
|
||||||
|
|
||||||
const arrayRange& range = rangeLagrangian_;
|
const arrayRange& range = rangeLagrangian_;
|
||||||
if (range.empty())
|
if (range.empty())
|
||||||
{
|
{
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add Lagrangian fields even if particles are not enabled?
|
// Reuse the previously determined cloud information.
|
||||||
const int partId = range.start();
|
DynamicList<word> cloudNames(range.size());
|
||||||
const word cloudName = getReaderPartName(partId);
|
for (auto partId : range)
|
||||||
|
{
|
||||||
|
cloudNames.append(getReaderPartName(partId));
|
||||||
|
}
|
||||||
|
|
||||||
// use the db directly since this might be called without a mesh,
|
// Use the db directly since this might be called without a mesh,
|
||||||
// but the region must get added back in
|
// but the region must get added back in
|
||||||
fileName lagrangianPrefix(cloud::prefix);
|
fileName lagrangianPrefix(cloud::prefix);
|
||||||
if (meshRegion_ != polyMesh::defaultRegion)
|
if (meshRegion_ != polyMesh::defaultRegion)
|
||||||
@ -616,19 +563,37 @@ void Foam::vtkPVFoam::updateInfoLagrangianFields
|
|||||||
lagrangianPrefix = meshRegion_/cloud::prefix;
|
lagrangianPrefix = meshRegion_/cloud::prefix;
|
||||||
}
|
}
|
||||||
|
|
||||||
IOobjectList objects
|
// List of lagrangian fields across all clouds and all times.
|
||||||
(
|
// ParaView displays "(partial)" after field names that only apply
|
||||||
dbPtr_(),
|
// to some of the clouds.
|
||||||
dbPtr_().timeName(),
|
HashTable<wordHashSet> fields;
|
||||||
lagrangianPrefix/cloudName
|
|
||||||
);
|
|
||||||
|
|
||||||
addToSelection<IOField<label>>(select, objects);
|
for (const instant& t : dbPtr_().times())
|
||||||
addToSelection<IOField<scalar>>(select, objects);
|
{
|
||||||
addToSelection<IOField<vector>>(select, objects);
|
for (const auto& cloudName : cloudNames)
|
||||||
addToSelection<IOField<sphericalTensor>>(select, objects);
|
{
|
||||||
addToSelection<IOField<symmTensor>>(select, objects);
|
const HashTable<wordHashSet> localFields =
|
||||||
addToSelection<IOField<tensor>>(select, objects);
|
IOobjectList
|
||||||
|
(
|
||||||
|
dbPtr_(),
|
||||||
|
t.name(),
|
||||||
|
lagrangianPrefix/cloudName
|
||||||
|
).classes();
|
||||||
|
|
||||||
|
forAllConstIters(localFields, iter)
|
||||||
|
{
|
||||||
|
fields(iter.key()) |= iter.object();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Known/supported field-types
|
||||||
|
addToSelection<IOField<label>>(select, fields);
|
||||||
|
addToSelection<IOField<scalar>>(select, fields);
|
||||||
|
addToSelection<IOField<vector>>(select, fields);
|
||||||
|
addToSelection<IOField<sphericalTensor>>(select, fields);
|
||||||
|
addToSelection<IOField<symmTensor>>(select, fields);
|
||||||
|
addToSelection<IOField<tensor>>(select, fields);
|
||||||
|
|
||||||
// Restore the enabled selections
|
// Restore the enabled selections
|
||||||
setSelectedArrayEntries(select, enabled);
|
setSelectedArrayEntries(select, enabled);
|
||||||
|
|||||||
@ -71,6 +71,12 @@ int main(int argc, char *argv[])
|
|||||||
"mergeRegions",
|
"mergeRegions",
|
||||||
"combine regions from both surfaces"
|
"combine regions from both surfaces"
|
||||||
);
|
);
|
||||||
|
argList::addOption
|
||||||
|
(
|
||||||
|
"scale",
|
||||||
|
"factor",
|
||||||
|
"geometry scaling factor on input surfaces"
|
||||||
|
);
|
||||||
|
|
||||||
argList args(argc, argv);
|
argList args(argc, argv);
|
||||||
|
|
||||||
@ -81,6 +87,8 @@ int main(int argc, char *argv[])
|
|||||||
const bool addPoint = args.optionFound("points");
|
const bool addPoint = args.optionFound("points");
|
||||||
const bool mergeRegions = args.optionFound("mergeRegions");
|
const bool mergeRegions = args.optionFound("mergeRegions");
|
||||||
|
|
||||||
|
const scalar scaleFactor = args.optionLookupOrDefault<scalar>("scale", -1);
|
||||||
|
|
||||||
if (addPoint)
|
if (addPoint)
|
||||||
{
|
{
|
||||||
Info<< "Reading a surface and adding points from a file"
|
Info<< "Reading a surface and adding points from a file"
|
||||||
@ -117,8 +125,12 @@ int main(int argc, char *argv[])
|
|||||||
<< "Writing : " << outFileName << nl << endl;
|
<< "Writing : " << outFileName << nl << endl;
|
||||||
}
|
}
|
||||||
|
|
||||||
const triSurface surface1(inFileName1);
|
if (scaleFactor > 0)
|
||||||
|
{
|
||||||
|
Info<< "Scaling : " << scaleFactor << nl;
|
||||||
|
}
|
||||||
|
|
||||||
|
const triSurface surface1(inFileName1, scaleFactor);
|
||||||
Info<< "Surface1:" << endl;
|
Info<< "Surface1:" << endl;
|
||||||
surface1.writeStats(Info);
|
surface1.writeStats(Info);
|
||||||
Info<< endl;
|
Info<< endl;
|
||||||
@ -131,7 +143,7 @@ int main(int argc, char *argv[])
|
|||||||
if (addPoint)
|
if (addPoint)
|
||||||
{
|
{
|
||||||
IFstream pointsFile(args["points"]);
|
IFstream pointsFile(args["points"]);
|
||||||
pointField extraPoints(pointsFile);
|
const pointField extraPoints(pointsFile);
|
||||||
|
|
||||||
Info<< "Additional Points:" << extraPoints.size() << endl;
|
Info<< "Additional Points:" << extraPoints.size() << endl;
|
||||||
|
|
||||||
@ -139,17 +151,16 @@ int main(int argc, char *argv[])
|
|||||||
label pointi = pointsAll.size();
|
label pointi = pointsAll.size();
|
||||||
pointsAll.setSize(pointsAll.size() + extraPoints.size());
|
pointsAll.setSize(pointsAll.size() + extraPoints.size());
|
||||||
|
|
||||||
forAll(extraPoints, i)
|
for (const auto& pt : extraPoints)
|
||||||
{
|
{
|
||||||
pointsAll[pointi++] = extraPoints[i];
|
pointsAll[pointi++] = pt;
|
||||||
}
|
}
|
||||||
|
|
||||||
combinedSurf = triSurface(surface1, surface1.patches(), pointsAll);
|
combinedSurf = triSurface(surface1, surface1.patches(), pointsAll);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
const triSurface surface2(inFileName2);
|
const triSurface surface2(inFileName2, scaleFactor);
|
||||||
|
|
||||||
Info<< "Surface2:" << endl;
|
Info<< "Surface2:" << endl;
|
||||||
surface2.writeStats(Info);
|
surface2.writeStats(Info);
|
||||||
Info<< endl;
|
Info<< endl;
|
||||||
@ -165,21 +176,19 @@ int main(int argc, char *argv[])
|
|||||||
|
|
||||||
label pointi = 0;
|
label pointi = 0;
|
||||||
// Copy points1 into pointsAll
|
// Copy points1 into pointsAll
|
||||||
forAll(points1, point1i)
|
for (const auto& pt : points1)
|
||||||
{
|
{
|
||||||
pointsAll[pointi++] = points1[point1i];
|
pointsAll[pointi++] = pt;
|
||||||
}
|
}
|
||||||
// Add surface2 points
|
// Add surface2 points
|
||||||
forAll(points2, point2i)
|
for (const auto& pt : points2)
|
||||||
{
|
{
|
||||||
pointsAll[pointi++] = points2[point2i];
|
pointsAll[pointi++] = pt;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
label trianglei = 0;
|
label trianglei = 0;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// Determine map for both regions
|
// Determine map for both regions
|
||||||
label nNewPatches = 0;
|
label nNewPatches = 0;
|
||||||
labelList patch1Map(surface1.patches().size());
|
labelList patch1Map(surface1.patches().size());
|
||||||
@ -192,17 +201,17 @@ int main(int argc, char *argv[])
|
|||||||
forAll(surface1.patches(), i)
|
forAll(surface1.patches(), i)
|
||||||
{
|
{
|
||||||
const word& name = surface1.patches()[i].name();
|
const word& name = surface1.patches()[i].name();
|
||||||
HashTable<label>::iterator iter = nameToPatch.find(name);
|
auto iter = nameToPatch.find(name);
|
||||||
|
|
||||||
label combinedi;
|
label combinedi;
|
||||||
if (iter == nameToPatch.end())
|
if (iter.found())
|
||||||
{
|
{
|
||||||
combinedi = nameToPatch.size();
|
combinedi = iter.object();
|
||||||
nameToPatch.insert(name, combinedi);
|
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
combinedi = iter();
|
combinedi = nameToPatch.size();
|
||||||
|
nameToPatch.insert(name, combinedi);
|
||||||
}
|
}
|
||||||
patch1Map[i] = combinedi;
|
patch1Map[i] = combinedi;
|
||||||
}
|
}
|
||||||
@ -212,17 +221,17 @@ int main(int argc, char *argv[])
|
|||||||
forAll(surface2.patches(), i)
|
forAll(surface2.patches(), i)
|
||||||
{
|
{
|
||||||
const word& name = surface2.patches()[i].name();
|
const word& name = surface2.patches()[i].name();
|
||||||
HashTable<label>::iterator iter = nameToPatch.find(name);
|
auto iter = nameToPatch.find(name);
|
||||||
|
|
||||||
label combinedi;
|
label combinedi;
|
||||||
if (iter == nameToPatch.end())
|
if (iter.found())
|
||||||
{
|
{
|
||||||
combinedi = nameToPatch.size();
|
combinedi = iter.object();
|
||||||
nameToPatch.insert(name, combinedi);
|
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
combinedi = iter();
|
combinedi = nameToPatch.size();
|
||||||
|
nameToPatch.insert(name, combinedi);
|
||||||
}
|
}
|
||||||
patch2Map[i] = combinedi;
|
patch2Map[i] = combinedi;
|
||||||
}
|
}
|
||||||
@ -245,11 +254,9 @@ int main(int argc, char *argv[])
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// Copy triangles1 into trianglesAll
|
// Copy triangles1 into trianglesAll
|
||||||
forAll(surface1, facei)
|
for (const labelledTri& tri : surface1)
|
||||||
{
|
{
|
||||||
const labelledTri& tri = surface1[facei];
|
|
||||||
labelledTri& destTri = facesAll[trianglei++];
|
labelledTri& destTri = facesAll[trianglei++];
|
||||||
|
|
||||||
destTri.triFace::operator=(tri);
|
destTri.triFace::operator=(tri);
|
||||||
@ -257,10 +264,8 @@ int main(int argc, char *argv[])
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Add (renumbered) surface2 triangles
|
// Add (renumbered) surface2 triangles
|
||||||
forAll(surface2, facei)
|
for (const labelledTri& tri : surface2)
|
||||||
{
|
{
|
||||||
const labelledTri& tri = surface2[facei];
|
|
||||||
|
|
||||||
labelledTri& destTri = facesAll[trianglei++];
|
labelledTri& destTri = facesAll[trianglei++];
|
||||||
destTri[0] = tri[0] + points1.size();
|
destTri[0] = tri[0] + points1.size();
|
||||||
destTri[1] = tri[1] + points1.size();
|
destTri[1] = tri[1] + points1.size();
|
||||||
|
|||||||
@ -1517,6 +1517,12 @@ int main(int argc, char *argv[])
|
|||||||
argList::validArgs.append("surfaceFile1");
|
argList::validArgs.append("surfaceFile1");
|
||||||
argList::validArgs.append("surfaceFile2");
|
argList::validArgs.append("surfaceFile2");
|
||||||
|
|
||||||
|
argList::addOption
|
||||||
|
(
|
||||||
|
"scale",
|
||||||
|
"factor",
|
||||||
|
"Geometry scaling factor (both surfaces)"
|
||||||
|
);
|
||||||
argList::addBoolOption
|
argList::addBoolOption
|
||||||
(
|
(
|
||||||
"surf1Baffle",
|
"surf1Baffle",
|
||||||
@ -1587,6 +1593,10 @@ int main(int argc, char *argv[])
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Scale factor for both surfaces:
|
||||||
|
const scalar scaleFactor
|
||||||
|
= args.optionLookupOrDefault<scalar>("scale", -1);
|
||||||
|
|
||||||
const word surf1Name(args[2]);
|
const word surf1Name(args[2]);
|
||||||
Info<< "Reading surface " << surf1Name << endl;
|
Info<< "Reading surface " << surf1Name << endl;
|
||||||
triSurfaceMesh surf1
|
triSurfaceMesh surf1
|
||||||
@ -1599,6 +1609,11 @@ int main(int argc, char *argv[])
|
|||||||
runTime
|
runTime
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
if (scaleFactor > 0)
|
||||||
|
{
|
||||||
|
Info<< "Scaling : " << scaleFactor << nl;
|
||||||
|
surf1.scalePoints(scaleFactor);
|
||||||
|
}
|
||||||
|
|
||||||
Info<< surf1Name << " statistics:" << endl;
|
Info<< surf1Name << " statistics:" << endl;
|
||||||
surf1.writeStats(Info);
|
surf1.writeStats(Info);
|
||||||
@ -1616,6 +1631,11 @@ int main(int argc, char *argv[])
|
|||||||
runTime
|
runTime
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
if (scaleFactor > 0)
|
||||||
|
{
|
||||||
|
Info<< "Scaling : " << scaleFactor << nl;
|
||||||
|
surf2.scalePoints(scaleFactor);
|
||||||
|
}
|
||||||
|
|
||||||
Info<< surf2Name << " statistics:" << endl;
|
Info<< surf2Name << " statistics:" << endl;
|
||||||
surf2.writeStats(Info);
|
surf2.writeStats(Info);
|
||||||
@ -1627,7 +1647,7 @@ int main(int argc, char *argv[])
|
|||||||
edgeIntersections edgeCuts1;
|
edgeIntersections edgeCuts1;
|
||||||
edgeIntersections edgeCuts2;
|
edgeIntersections edgeCuts2;
|
||||||
|
|
||||||
bool invertedSpace = args.optionFound("invertedSpace");
|
const bool invertedSpace = args.optionFound("invertedSpace");
|
||||||
|
|
||||||
if (invertedSpace && validActions[action] == booleanSurface::DIFFERENCE)
|
if (invertedSpace && validActions[action] == booleanSurface::DIFFERENCE)
|
||||||
{
|
{
|
||||||
@ -1736,9 +1756,7 @@ int main(int argc, char *argv[])
|
|||||||
const extendedFeatureEdgeMesh& feMesh = feMeshPtr();
|
const extendedFeatureEdgeMesh& feMesh = feMeshPtr();
|
||||||
|
|
||||||
feMesh.writeStats(Info);
|
feMesh.writeStats(Info);
|
||||||
|
|
||||||
feMesh.write();
|
feMesh.write();
|
||||||
|
|
||||||
feMesh.writeObj(feMesh.path()/sFeatFileName);
|
feMesh.writeObj(feMesh.path()/sFeatFileName);
|
||||||
|
|
||||||
{
|
{
|
||||||
|
|||||||
@ -60,7 +60,13 @@ int main(int argc, char *argv[])
|
|||||||
argList::addBoolOption
|
argList::addBoolOption
|
||||||
(
|
(
|
||||||
"noClean",
|
"noClean",
|
||||||
"suppress surface checking/cleanup on the input surface"
|
"Suppress surface checking/cleanup on the input surface"
|
||||||
|
);
|
||||||
|
argList::addOption
|
||||||
|
(
|
||||||
|
"scale",
|
||||||
|
"factor",
|
||||||
|
"Input geometry scaling factor"
|
||||||
);
|
);
|
||||||
argList args(argc, argv);
|
argList args(argc, argv);
|
||||||
|
|
||||||
@ -77,7 +83,12 @@ int main(int argc, char *argv[])
|
|||||||
|
|
||||||
|
|
||||||
Info<< "Reading surface from " << inFileName << " ..." << nl << endl;
|
Info<< "Reading surface from " << inFileName << " ..." << nl << endl;
|
||||||
triSurface surf(inFileName);
|
|
||||||
|
triSurface surf
|
||||||
|
(
|
||||||
|
inFileName,
|
||||||
|
args.optionLookupOrDefault<scalar>("scale", -1)
|
||||||
|
);
|
||||||
surf.writeStats(Info);
|
surf.writeStats(Info);
|
||||||
|
|
||||||
if (!args.optionFound("noClean"))
|
if (!args.optionFound("noClean"))
|
||||||
@ -90,7 +101,7 @@ int main(int argc, char *argv[])
|
|||||||
|
|
||||||
while (true)
|
while (true)
|
||||||
{
|
{
|
||||||
label nEdgeCollapse = collapseEdge(surf, minLen);
|
const label nEdgeCollapse = collapseEdge(surf, minLen);
|
||||||
|
|
||||||
if (nEdgeCollapse == 0)
|
if (nEdgeCollapse == 0)
|
||||||
{
|
{
|
||||||
@ -99,7 +110,7 @@ int main(int argc, char *argv[])
|
|||||||
}
|
}
|
||||||
while (true)
|
while (true)
|
||||||
{
|
{
|
||||||
label nSplitEdge = collapseBase(surf, minLen, minQuality);
|
const label nSplitEdge = collapseBase(surf, minLen, minQuality);
|
||||||
|
|
||||||
if (nSplitEdge == 0)
|
if (nSplitEdge == 0)
|
||||||
{
|
{
|
||||||
|
|||||||
@ -28,7 +28,7 @@ Group
|
|||||||
grpSurfaceUtilities
|
grpSurfaceUtilities
|
||||||
|
|
||||||
Description
|
Description
|
||||||
Surface coarsening using `bunnylod'
|
Surface coarsening using 'bunnylod'
|
||||||
|
|
||||||
Reference:
|
Reference:
|
||||||
\verbatim
|
\verbatim
|
||||||
@ -75,6 +75,13 @@ int main(int argc, char *argv[])
|
|||||||
argList::validArgs.append("surfaceFile");
|
argList::validArgs.append("surfaceFile");
|
||||||
argList::validArgs.append("reductionFactor");
|
argList::validArgs.append("reductionFactor");
|
||||||
argList::validArgs.append("output surfaceFile");
|
argList::validArgs.append("output surfaceFile");
|
||||||
|
argList::addOption
|
||||||
|
(
|
||||||
|
"scale",
|
||||||
|
"factor",
|
||||||
|
"input geometry scaling factor"
|
||||||
|
);
|
||||||
|
|
||||||
argList args(argc, argv);
|
argList args(argc, argv);
|
||||||
|
|
||||||
const fileName inFileName = args[1];
|
const fileName inFileName = args[1];
|
||||||
@ -90,40 +97,39 @@ int main(int argc, char *argv[])
|
|||||||
<< exit(FatalError);
|
<< exit(FatalError);
|
||||||
}
|
}
|
||||||
|
|
||||||
Info<< "Input surface :" << inFileName << endl
|
const scalar scaleFactor = args.optionLookupOrDefault<scalar>("scale", -1);
|
||||||
<< "Reduction factor:" << reduction << endl
|
|
||||||
<< "Output surface :" << outFileName << endl << endl;
|
|
||||||
|
|
||||||
const triSurface surf(inFileName);
|
Info<< "Input surface :" << inFileName << nl
|
||||||
|
<< "Scaling factor :" << scaleFactor << nl
|
||||||
|
<< "Reduction factor:" << reduction << nl
|
||||||
|
<< "Output surface :" << outFileName << nl
|
||||||
|
<< endl;
|
||||||
|
|
||||||
|
const triSurface surf(inFileName, scaleFactor);
|
||||||
|
|
||||||
Info<< "Surface:" << endl;
|
Info<< "Surface:" << endl;
|
||||||
surf.writeStats(Info);
|
surf.writeStats(Info);
|
||||||
Info<< endl;
|
Info<< endl;
|
||||||
|
|
||||||
|
::List<::Vector> vert; // global list of vertices
|
||||||
::List< ::Vector> vert; // global list of vertices
|
::List<::tridata> tri; // global list of triangles
|
||||||
::List< ::tridata> tri; // global list of triangles
|
|
||||||
|
|
||||||
|
|
||||||
// Convert triSurface to progmesh format. Note: can use global point
|
// Convert triSurface to progmesh format. Note: can use global point
|
||||||
// numbering since surface read in from file.
|
// numbering since surface read in from file.
|
||||||
const pointField& pts = surf.points();
|
const pointField& pts = surf.points();
|
||||||
|
|
||||||
forAll(pts, ptI)
|
for (const point& pt : pts)
|
||||||
{
|
{
|
||||||
const point& pt = pts[ptI];
|
vert.Add(::Vector(pt.x(), pt.y(), pt.z()));
|
||||||
|
|
||||||
vert.Add( ::Vector(pt.x(), pt.y(), pt.z()));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
forAll(surf, facei)
|
for (const labelledTri& f : surf)
|
||||||
{
|
{
|
||||||
const labelledTri& f = surf[facei];
|
|
||||||
|
|
||||||
tridata td;
|
tridata td;
|
||||||
td.v[0]=f[0];
|
td.v[0] = f[0];
|
||||||
td.v[1]=f[1];
|
td.v[1] = f[1];
|
||||||
td.v[2]=f[2];
|
td.v[2] = f[2];
|
||||||
tri.Add(td);
|
tri.Add(td);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -133,20 +139,20 @@ int main(int argc, char *argv[])
|
|||||||
::ProgressiveMesh(vert,tri,collapse_map,permutation);
|
::ProgressiveMesh(vert,tri,collapse_map,permutation);
|
||||||
|
|
||||||
// rearrange the vertex list
|
// rearrange the vertex list
|
||||||
::List< ::Vector> temp_list;
|
::List<::Vector> temp_list;
|
||||||
for (int i=0;i<vert.num;i++)
|
for (int i=0; i<vert.num; i++)
|
||||||
{
|
{
|
||||||
temp_list.Add(vert[i]);
|
temp_list.Add(vert[i]);
|
||||||
}
|
}
|
||||||
for (int i=0;i<vert.num;i++)
|
for (int i=0; i<vert.num; i++)
|
||||||
{
|
{
|
||||||
vert[permutation[i]]=temp_list[i];
|
vert[permutation[i]] = temp_list[i];
|
||||||
}
|
}
|
||||||
|
|
||||||
// update the changes in the entries in the triangle list
|
// update the changes in the entries in the triangle list
|
||||||
for (int i=0;i<tri.num;i++)
|
for (int i=0; i<tri.num; i++)
|
||||||
{
|
{
|
||||||
for (int j=0;j<3;j++)
|
for (int j=0; j<3; j++)
|
||||||
{
|
{
|
||||||
tri[i].v[j] = permutation[tri[i].v[j]];
|
tri[i].v[j] = permutation[tri[i].v[j]];
|
||||||
}
|
}
|
||||||
|
|||||||
@ -74,24 +74,24 @@ int main(int argc, char *argv[])
|
|||||||
argList::addBoolOption
|
argList::addBoolOption
|
||||||
(
|
(
|
||||||
"clean",
|
"clean",
|
||||||
"perform some surface checking/cleanup on the input surface"
|
"Perform some surface checking/cleanup on the input surface"
|
||||||
);
|
);
|
||||||
argList::addBoolOption
|
argList::addBoolOption
|
||||||
(
|
(
|
||||||
"group",
|
"group",
|
||||||
"reorder faces into groups; one per region"
|
"Reorder faces into groups; one per region"
|
||||||
);
|
);
|
||||||
argList::addOption
|
argList::addOption
|
||||||
(
|
(
|
||||||
"scale",
|
"scale",
|
||||||
"factor",
|
"factor",
|
||||||
"geometry scaling factor - default is 1"
|
"Input geometry scaling factor"
|
||||||
);
|
);
|
||||||
argList::addOption
|
argList::addOption
|
||||||
(
|
(
|
||||||
"writePrecision",
|
"writePrecision",
|
||||||
"label",
|
"label",
|
||||||
"write to output with the specified precision"
|
"Write to output with the specified precision"
|
||||||
);
|
);
|
||||||
|
|
||||||
argList args(argc, argv);
|
argList args(argc, argv);
|
||||||
@ -116,8 +116,10 @@ int main(int argc, char *argv[])
|
|||||||
<< exit(FatalError);
|
<< exit(FatalError);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const scalar scaleFactor = args.optionLookupOrDefault<scalar>("scale", -1);
|
||||||
|
|
||||||
Info<< "Reading : " << importName << endl;
|
Info<< "Reading : " << importName << endl;
|
||||||
triSurface surf(importName);
|
triSurface surf(importName, scaleFactor);
|
||||||
|
|
||||||
Info<< "Read surface:" << endl;
|
Info<< "Read surface:" << endl;
|
||||||
surf.writeStats(Info);
|
surf.writeStats(Info);
|
||||||
@ -144,13 +146,6 @@ int main(int argc, char *argv[])
|
|||||||
}
|
}
|
||||||
|
|
||||||
Info<< "writing " << exportName;
|
Info<< "writing " << exportName;
|
||||||
|
|
||||||
scalar scaleFactor = 0;
|
|
||||||
if (args.optionReadIfPresent("scale", scaleFactor) && scaleFactor > 0)
|
|
||||||
{
|
|
||||||
Info<< " with scaling " << scaleFactor;
|
|
||||||
surf.scalePoints(scaleFactor);
|
|
||||||
}
|
|
||||||
Info<< endl;
|
Info<< endl;
|
||||||
|
|
||||||
surf.write(exportName, sortByRegion);
|
surf.write(exportName, sortByRegion);
|
||||||
|
|||||||
@ -339,8 +339,15 @@ int main(int argc, char *argv[])
|
|||||||
<< " writeObj=" << writeObj
|
<< " writeObj=" << writeObj
|
||||||
<< " writeVTK=" << writeVTK << nl;
|
<< " writeVTK=" << writeVTK << nl;
|
||||||
|
|
||||||
|
scalar scaleFactor = -1;
|
||||||
|
// Allow rescaling of the surface points (eg, mm -> m)
|
||||||
|
if (surfaceDict.readIfPresent("scale", scaleFactor) && scaleFactor > 0)
|
||||||
|
{
|
||||||
|
Info<<"Scaling : " << scaleFactor << nl;
|
||||||
|
}
|
||||||
|
|
||||||
// Load a single file, or load and combine multiple selected files
|
// Load a single file, or load and combine multiple selected files
|
||||||
autoPtr<triSurface> surfPtr = loader.load(loadingOption);
|
autoPtr<triSurface> surfPtr = loader.load(loadingOption, scaleFactor);
|
||||||
if (!surfPtr.valid() || surfPtr().empty())
|
if (!surfPtr.valid() || surfPtr().empty())
|
||||||
{
|
{
|
||||||
FatalErrorInFunction
|
FatalErrorInFunction
|
||||||
|
|||||||
@ -50,6 +50,9 @@ outputName1
|
|||||||
|
|
||||||
surfaces (surface1.stl surface2.nas);
|
surfaces (surface1.stl surface2.nas);
|
||||||
|
|
||||||
|
// mm -> m scaling
|
||||||
|
// scale 0.001;
|
||||||
|
|
||||||
// Generate additional intersection features (none | self | region)
|
// Generate additional intersection features (none | self | region)
|
||||||
intersectionMethod self;
|
intersectionMethod self;
|
||||||
|
|
||||||
|
|||||||
@ -123,9 +123,8 @@ int main(int argc, char *argv[])
|
|||||||
Info<< " " << points[f[fp]] << "\n";
|
Info<< " " << points[f[fp]] << "\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
Info<< endl;
|
Info<< nl
|
||||||
|
<< "End\n" << endl;
|
||||||
Info<< "End\n" << endl;
|
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -90,7 +90,7 @@ int main(int argc, char *argv[])
|
|||||||
vector refPt = Zero;
|
vector refPt = Zero;
|
||||||
bool calcAroundRefPt = args.optionReadIfPresent("referencePoint", refPt);
|
bool calcAroundRefPt = args.optionReadIfPresent("referencePoint", refPt);
|
||||||
|
|
||||||
triSurface surf(surfFileName);
|
const triSurface surf(surfFileName);
|
||||||
|
|
||||||
scalar m = 0.0;
|
scalar m = 0.0;
|
||||||
vector cM = Zero;
|
vector cM = Zero;
|
||||||
|
|||||||
@ -44,7 +44,7 @@ Usage
|
|||||||
Specify a feature angle
|
Specify a feature angle
|
||||||
|
|
||||||
|
|
||||||
E.g. inflate surface by 2cm with 1.5 safety factor:
|
E.g. inflate surface by 20mm with 1.5 safety factor:
|
||||||
surfaceInflate DTC-scaled.obj 0.02 1.5 -featureAngle 45 -nSmooth 2
|
surfaceInflate DTC-scaled.obj 0.02 1.5 -featureAngle 45 -nSmooth 2
|
||||||
|
|
||||||
\*---------------------------------------------------------------------------*/
|
\*---------------------------------------------------------------------------*/
|
||||||
|
|||||||
@ -1,3 +0,0 @@
|
|||||||
surfaceMeshConvertTesting.C
|
|
||||||
|
|
||||||
EXE = $(FOAM_APPBIN)/surfaceMeshConvertTesting
|
|
||||||
@ -87,7 +87,7 @@ int main(int argc, char *argv[])
|
|||||||
(
|
(
|
||||||
"scale",
|
"scale",
|
||||||
"factor",
|
"factor",
|
||||||
"geometry scaling factor - default is 1"
|
"input geometry scaling factor"
|
||||||
);
|
);
|
||||||
argList::addBoolOption
|
argList::addBoolOption
|
||||||
(
|
(
|
||||||
@ -119,10 +119,10 @@ int main(int argc, char *argv[])
|
|||||||
// use UnsortedMeshedSurface, not MeshedSurface to maintain ordering
|
// use UnsortedMeshedSurface, not MeshedSurface to maintain ordering
|
||||||
UnsortedMeshedSurface<face> surf(importName);
|
UnsortedMeshedSurface<face> surf(importName);
|
||||||
|
|
||||||
scalar scaling = 0;
|
const scalar scaling = args.optionLookupOrDefault<scalar>("scale", -1);
|
||||||
if (args.optionReadIfPresent("scale", scaling) && scaling > 0)
|
if (scaling > 0)
|
||||||
{
|
{
|
||||||
Info<< " -scale " << scaling << endl;
|
Info<< " -scale " << scaling << nl;
|
||||||
surf.scalePoints(scaling);
|
surf.scalePoints(scaling);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -63,6 +63,12 @@ int main(int argc, char *argv[])
|
|||||||
"usePierceTest",
|
"usePierceTest",
|
||||||
"determine orientation by counting number of intersections"
|
"determine orientation by counting number of intersections"
|
||||||
);
|
);
|
||||||
|
argList::addOption
|
||||||
|
(
|
||||||
|
"scale",
|
||||||
|
"factor",
|
||||||
|
"input geometry scaling factor"
|
||||||
|
);
|
||||||
|
|
||||||
argList args(argc, argv);
|
argList args(argc, argv);
|
||||||
|
|
||||||
@ -86,11 +92,14 @@ int main(int argc, char *argv[])
|
|||||||
Info<< "outside" << endl;
|
Info<< "outside" << endl;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const scalar scaling = args.optionLookupOrDefault<scalar>("scale", -1);
|
||||||
|
if (scaling > 0)
|
||||||
|
{
|
||||||
|
Info<< "Input scaling: " << scaling << nl;
|
||||||
|
}
|
||||||
|
|
||||||
// Load surface
|
// Load surface
|
||||||
triSurface surf(surfFileName);
|
triSurface surf(surfFileName, scaling);
|
||||||
|
|
||||||
|
|
||||||
bool anyFlipped = false;
|
bool anyFlipped = false;
|
||||||
|
|
||||||
@ -118,11 +127,11 @@ int main(int argc, char *argv[])
|
|||||||
|
|
||||||
if (anyFlipped)
|
if (anyFlipped)
|
||||||
{
|
{
|
||||||
Info<< "Flipped orientation of (part of) surface." << endl;
|
Info<< "Flipped orientation of (part of) surface." << nl;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
Info<< "Did not flip orientation of any triangle of surface." << endl;
|
Info<< "Did not flip orientation of any triangle of surface." << nl;
|
||||||
}
|
}
|
||||||
|
|
||||||
Info<< "Writing new surface to " << outFileName << endl;
|
Info<< "Writing new surface to " << outFileName << endl;
|
||||||
|
|||||||
@ -46,31 +46,47 @@ using namespace Foam;
|
|||||||
|
|
||||||
int main(int argc, char *argv[])
|
int main(int argc, char *argv[])
|
||||||
{
|
{
|
||||||
|
argList::addNote
|
||||||
|
(
|
||||||
|
"Merge points on surface if they are within absolute distance [m]."
|
||||||
|
);
|
||||||
argList::noParallel();
|
argList::noParallel();
|
||||||
argList::validArgs.append("surfaceFile");
|
argList::validArgs.append("surfaceFile");
|
||||||
argList::validArgs.append("merge distance");
|
argList::validArgs.append("merge distance");
|
||||||
argList::validArgs.append("output surfaceFile");
|
argList::validArgs.append("output surfaceFile");
|
||||||
|
|
||||||
|
argList::addOption
|
||||||
|
(
|
||||||
|
"scale",
|
||||||
|
"factor",
|
||||||
|
"input geometry scaling factor"
|
||||||
|
);
|
||||||
|
|
||||||
argList args(argc, argv);
|
argList args(argc, argv);
|
||||||
|
|
||||||
const fileName surfFileName = args[1];
|
const fileName surfFileName = args[1];
|
||||||
const scalar mergeTol = args.argRead<scalar>(2);
|
const scalar mergeTol = args.argRead<scalar>(2);
|
||||||
const fileName outFileName = args[3];
|
const fileName outFileName = args[3];
|
||||||
|
|
||||||
Info<< "Reading surface from " << surfFileName << " ..." << endl;
|
const scalar scaling = args.optionLookupOrDefault<scalar>("scale", -1);
|
||||||
Info<< "Merging points within " << mergeTol << " metre." << endl;
|
|
||||||
|
|
||||||
triSurface surf1(surfFileName);
|
Info<< "Reading surface from " << surfFileName << " ..." << nl
|
||||||
|
<< "Merging points within " << mergeTol << " metre." << nl;
|
||||||
|
if (scaling > 0)
|
||||||
|
{
|
||||||
|
Info<< "input scaling " << scaling << nl;
|
||||||
|
}
|
||||||
|
|
||||||
Info<< "Original surface:" << endl;
|
const triSurface surf1(surfFileName, scaling);
|
||||||
|
|
||||||
|
Info<< "Original surface:" << nl;
|
||||||
surf1.writeStats(Info);
|
surf1.writeStats(Info);
|
||||||
|
|
||||||
|
|
||||||
triSurface cleanSurf(surf1);
|
triSurface cleanSurf(surf1);
|
||||||
|
|
||||||
while (true)
|
while (true)
|
||||||
{
|
{
|
||||||
label nOldVert = cleanSurf.nPoints();
|
const label nOldVert = cleanSurf.nPoints();
|
||||||
|
|
||||||
cleanSurf = triSurfaceTools::mergePoints(cleanSurf, mergeTol);
|
cleanSurf = triSurfaceTools::mergePoints(cleanSurf, mergeTol);
|
||||||
|
|
||||||
|
|||||||
@ -105,7 +105,8 @@ int main(int argc, char *argv[])
|
|||||||
argList::addNote
|
argList::addNote
|
||||||
(
|
(
|
||||||
"Redistribute a triSurface. "
|
"Redistribute a triSurface. "
|
||||||
"The specified surface must be located in the constant/triSurface directory"
|
"The specified surface must be located in the constant/triSurface "
|
||||||
|
"directory"
|
||||||
);
|
);
|
||||||
|
|
||||||
argList::validArgs.append("triSurfaceMesh");
|
argList::validArgs.append("triSurfaceMesh");
|
||||||
|
|||||||
@ -85,9 +85,9 @@ int main(int argc, char *argv[])
|
|||||||
argList::addOption
|
argList::addOption
|
||||||
(
|
(
|
||||||
"scale",
|
"scale",
|
||||||
"vector",
|
"scalar | vector",
|
||||||
"scale by the specified amount - eg, '(0.001 0.001 0.001)' for a "
|
"scale by the specified amount - eg, for a uniform [mm] to [m] scaling "
|
||||||
"uniform [mm] to [m] scaling"
|
"use either (0.001 0.001 0.001)' or simply '0.001'"
|
||||||
);
|
);
|
||||||
argList::addOption
|
argList::addOption
|
||||||
(
|
(
|
||||||
@ -138,7 +138,7 @@ int main(int argc, char *argv[])
|
|||||||
n1n2[0] /= mag(n1n2[0]);
|
n1n2[0] /= mag(n1n2[0]);
|
||||||
n1n2[1] /= mag(n1n2[1]);
|
n1n2[1] /= mag(n1n2[1]);
|
||||||
|
|
||||||
tensor T = rotationTensor(n1n2[0], n1n2[1]);
|
const tensor T = rotationTensor(n1n2[0], n1n2[1]);
|
||||||
|
|
||||||
Info<< "Rotating points by " << T << endl;
|
Info<< "Rotating points by " << T << endl;
|
||||||
|
|
||||||
@ -151,10 +151,10 @@ int main(int argc, char *argv[])
|
|||||||
<< " pitch " << v.y() << nl
|
<< " pitch " << v.y() << nl
|
||||||
<< " yaw " << v.z() << nl;
|
<< " yaw " << v.z() << nl;
|
||||||
|
|
||||||
// Convert to radians
|
// degToRad
|
||||||
v *= pi/180.0;
|
v *= pi/180.0;
|
||||||
|
|
||||||
quaternion R(quaternion::rotationSequence::XYZ, v);
|
const quaternion R(quaternion::rotationSequence::XYZ, v);
|
||||||
|
|
||||||
Info<< "Rotating points by quaternion " << R << endl;
|
Info<< "Rotating points by quaternion " << R << endl;
|
||||||
points = transform(R, points);
|
points = transform(R, points);
|
||||||
@ -166,29 +166,43 @@ int main(int argc, char *argv[])
|
|||||||
<< " pitch " << v.y() << nl
|
<< " pitch " << v.y() << nl
|
||||||
<< " roll " << v.z() << nl;
|
<< " roll " << v.z() << nl;
|
||||||
|
|
||||||
|
// degToRad
|
||||||
// Convert to radians
|
|
||||||
v *= pi/180.0;
|
v *= pi/180.0;
|
||||||
|
|
||||||
scalar yaw = v.x();
|
const quaternion R(quaternion::rotationSequence::ZYX, v);
|
||||||
scalar pitch = v.y();
|
|
||||||
scalar roll = v.z();
|
|
||||||
|
|
||||||
quaternion R = quaternion(vector(0, 0, 1), yaw);
|
|
||||||
R *= quaternion(vector(0, 1, 0), pitch);
|
|
||||||
R *= quaternion(vector(1, 0, 0), roll);
|
|
||||||
|
|
||||||
Info<< "Rotating points by quaternion " << R << endl;
|
Info<< "Rotating points by quaternion " << R << endl;
|
||||||
points = transform(R, points);
|
points = transform(R, points);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (args.optionReadIfPresent("scale", v))
|
if (args.optionFound("scale"))
|
||||||
{
|
{
|
||||||
Info<< "Scaling points by " << v << endl;
|
// Use readList to handle single or multiple values
|
||||||
|
const List<scalar> scaling = args.optionReadList<scalar>("scale");
|
||||||
|
|
||||||
points.replace(vector::X, v.x()*points.component(vector::X));
|
if (scaling.size() == 1)
|
||||||
points.replace(vector::Y, v.y()*points.component(vector::Y));
|
{
|
||||||
points.replace(vector::Z, v.z()*points.component(vector::Z));
|
Info<< "Scaling points uniformly by " << scaling[0] << nl;
|
||||||
|
points *= scaling[0];
|
||||||
|
}
|
||||||
|
else if (scaling.size() == 3)
|
||||||
|
{
|
||||||
|
Info<< "Scaling points by ("
|
||||||
|
<< scaling[0] << " "
|
||||||
|
<< scaling[1] << " "
|
||||||
|
<< scaling[2] << ")" << nl;
|
||||||
|
|
||||||
|
points.replace(vector::X, scaling[0]*points.component(vector::X));
|
||||||
|
points.replace(vector::Y, scaling[1]*points.component(vector::Y));
|
||||||
|
points.replace(vector::Z, scaling[2]*points.component(vector::Z));
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
FatalError
|
||||||
|
<< "-scale with 1 or 3 components only" << nl
|
||||||
|
<< "given: " << args["scale"] << endl
|
||||||
|
<< exit(FatalError);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
surf1.movePoints(points);
|
surf1.movePoints(points);
|
||||||
|
|||||||
@ -224,7 +224,6 @@ _of_complete_cache_[surfaceInertia]="-case -density -referencePoint | -noFunctio
|
|||||||
_of_complete_cache_[surfaceInflate]="-case -featureAngle -nSmooth | -checkSelfIntersection -debug -noFunctionObjects -srcDoc -doc -help"
|
_of_complete_cache_[surfaceInflate]="-case -featureAngle -nSmooth | -checkSelfIntersection -debug -noFunctionObjects -srcDoc -doc -help"
|
||||||
_of_complete_cache_[surfaceLambdaMuSmooth]="-featureFile | -srcDoc -doc -help"
|
_of_complete_cache_[surfaceLambdaMuSmooth]="-featureFile | -srcDoc -doc -help"
|
||||||
_of_complete_cache_[surfaceMeshConvert]="-case -dict -from -scaleIn -scaleOut -to | -clean -noFunctionObjects -tri -srcDoc -doc -help"
|
_of_complete_cache_[surfaceMeshConvert]="-case -dict -from -scaleIn -scaleOut -to | -clean -noFunctionObjects -tri -srcDoc -doc -help"
|
||||||
_of_complete_cache_[surfaceMeshConvertTesting]="-case -scale | -clean -noFunctionObjects -orient -stdout -surfMesh -testModify -triFace -triSurface -unsorted -srcDoc -doc -help"
|
|
||||||
_of_complete_cache_[surfaceMeshExport]="-case -dict -from -name -scaleIn -scaleOut -to | -clean -noFunctionObjects -srcDoc -doc -help"
|
_of_complete_cache_[surfaceMeshExport]="-case -dict -from -name -scaleIn -scaleOut -to | -clean -noFunctionObjects -srcDoc -doc -help"
|
||||||
_of_complete_cache_[surfaceMeshImport]="-case -dict -from -name -scaleIn -scaleOut -to | -clean -noFunctionObjects -srcDoc -doc -help"
|
_of_complete_cache_[surfaceMeshImport]="-case -dict -from -name -scaleIn -scaleOut -to | -clean -noFunctionObjects -srcDoc -doc -help"
|
||||||
_of_complete_cache_[surfaceMeshInfo]="-case -scale | -areas -noFunctionObjects -xml -srcDoc -doc -help"
|
_of_complete_cache_[surfaceMeshInfo]="-case -scale | -areas -noFunctionObjects -xml -srcDoc -doc -help"
|
||||||
|
|||||||
@ -125,7 +125,8 @@ public:
|
|||||||
|
|
||||||
// Constructors
|
// Constructors
|
||||||
|
|
||||||
//- Create a writer object with given output scaling
|
//- Create a writer object with given output scaling.
|
||||||
|
// Treats a zero or negative scale factor as unity scaling.
|
||||||
meshWriter
|
meshWriter
|
||||||
(
|
(
|
||||||
const polyMesh&,
|
const polyMesh&,
|
||||||
|
|||||||
@ -97,7 +97,8 @@ public:
|
|||||||
|
|
||||||
// Constructors
|
// Constructors
|
||||||
|
|
||||||
//- Prepare for writing, optionally with scaling
|
//- Prepare for writing, optionally with scaling.
|
||||||
|
// Treats a zero or negative scale factor as unity scaling.
|
||||||
FIREMeshWriter(const polyMesh&, const scalar scaleFactor = 1.0);
|
FIREMeshWriter(const polyMesh&, const scalar scaleFactor = 1.0);
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -34,28 +34,33 @@ Foam::fileFormats::NASCore::NASCore()
|
|||||||
|
|
||||||
// * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * * //
|
// * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * * //
|
||||||
|
|
||||||
Foam::scalar Foam::fileFormats::NASCore::parseNASCoord
|
Foam::scalar Foam::fileFormats::NASCore::parseNASCoord(const string& s)
|
||||||
(
|
|
||||||
const string& s
|
|
||||||
)
|
|
||||||
{
|
{
|
||||||
size_t expSign = s.find_last_of("+-");
|
scalar value = 0;
|
||||||
|
|
||||||
if (expSign != string::npos && expSign > 0 && !isspace(s[expSign-1]))
|
const size_t expSign = s.find_last_of("+-");
|
||||||
|
|
||||||
|
if (expSign != std::string::npos && expSign > 0 && !isspace(s[expSign-1]))
|
||||||
{
|
{
|
||||||
scalar mantissa = readScalar(IStringStream(s.substr(0, expSign))());
|
scalar exponent = 0;
|
||||||
scalar exponent = readScalar(IStringStream(s.substr(expSign+1))());
|
|
||||||
|
// Parse as per strtod/strtof - allowing trailing space or [Ee]
|
||||||
|
readScalar(s.substr(0, expSign).c_str(), value); // mantissa
|
||||||
|
readScalar(s.substr(expSign+1).c_str(), exponent);
|
||||||
|
|
||||||
if (s[expSign] == '-')
|
if (s[expSign] == '-')
|
||||||
{
|
{
|
||||||
exponent = -exponent;
|
exponent = -exponent;
|
||||||
}
|
}
|
||||||
return mantissa * pow(10, exponent);
|
|
||||||
|
value *= ::pow(10, exponent);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
return readScalar(IStringStream(s)());
|
readScalar(s.c_str(), value);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -55,8 +55,8 @@ public:
|
|||||||
|
|
||||||
// Public Member Functions
|
// Public Member Functions
|
||||||
|
|
||||||
//- Do weird things to extract number
|
//- Extract numbers from things like "-2.358-8" (same as "-2.358e-8")
|
||||||
static scalar parseNASCoord(const string&);
|
static scalar parseNASCoord(const string& s);
|
||||||
|
|
||||||
|
|
||||||
// Constructors
|
// Constructors
|
||||||
|
|||||||
@ -361,6 +361,23 @@ Foam::label Foam::isoCutCell::calcSubCell
|
|||||||
// Cell cut at least at one face
|
// Cell cut at least at one face
|
||||||
cellStatus_ = 0;
|
cellStatus_ = 0;
|
||||||
calcIsoFaceCentreAndArea();
|
calcIsoFaceCentreAndArea();
|
||||||
|
|
||||||
|
// In the rare but occuring cases where a cell is only touched at a
|
||||||
|
// point or a line the isoFaceArea_ will have zero length and here the
|
||||||
|
// cell should be treated as either completely empty or full.
|
||||||
|
if (mag(isoFaceArea_) < 10*SMALL)
|
||||||
|
{
|
||||||
|
if (fullySubFaces_.empty())
|
||||||
|
{
|
||||||
|
// Cell fully above isosurface
|
||||||
|
cellStatus_ = 1;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Cell fully below isosurface
|
||||||
|
cellStatus_ = -1;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else if (fullySubFaces_.empty())
|
else if (fullySubFaces_.empty())
|
||||||
{
|
{
|
||||||
|
|||||||
@ -69,23 +69,32 @@ const Foam::Enum
|
|||||||
>
|
>
|
||||||
Foam::functionObjects::fieldValues::surfaceFieldValue::operationTypeNames_
|
Foam::functionObjects::fieldValues::surfaceFieldValue::operationTypeNames_
|
||||||
{
|
{
|
||||||
|
// Normal operations
|
||||||
{ operationType::opNone, "none" },
|
{ operationType::opNone, "none" },
|
||||||
|
{ operationType::opMin, "min" },
|
||||||
|
{ operationType::opMax, "max" },
|
||||||
{ operationType::opSum, "sum" },
|
{ operationType::opSum, "sum" },
|
||||||
{ operationType::opWeightedSum, "weightedSum" },
|
|
||||||
{ operationType::opSumMag, "sumMag" },
|
{ operationType::opSumMag, "sumMag" },
|
||||||
{ operationType::opSumDirection, "sumDirection" },
|
{ operationType::opSumDirection, "sumDirection" },
|
||||||
{ operationType::opSumDirectionBalance, "sumDirectionBalance" },
|
{ operationType::opSumDirectionBalance, "sumDirectionBalance" },
|
||||||
{ operationType::opAverage, "average" },
|
{ operationType::opAverage, "average" },
|
||||||
{ operationType::opWeightedAverage, "weightedAverage" },
|
|
||||||
{ operationType::opAreaAverage, "areaAverage" },
|
{ operationType::opAreaAverage, "areaAverage" },
|
||||||
{ operationType::opWeightedAreaAverage, "weightedAreaAverage" },
|
|
||||||
{ operationType::opAreaIntegrate, "areaIntegrate" },
|
{ operationType::opAreaIntegrate, "areaIntegrate" },
|
||||||
{ operationType::opWeightedAreaIntegrate, "weightedAreaIntegrate" },
|
|
||||||
{ operationType::opMin, "min" },
|
|
||||||
{ operationType::opMax, "max" },
|
|
||||||
{ operationType::opCoV, "CoV" },
|
{ operationType::opCoV, "CoV" },
|
||||||
{ operationType::opAreaNormalAverage, "areaNormalAverage" },
|
{ operationType::opAreaNormalAverage, "areaNormalAverage" },
|
||||||
{ operationType::opAreaNormalIntegrate, "areaNormalIntegrate" },
|
{ operationType::opAreaNormalIntegrate, "areaNormalIntegrate" },
|
||||||
|
|
||||||
|
// Using weighting
|
||||||
|
{ operationType::opWeightedSum, "weightedSum" },
|
||||||
|
{ operationType::opWeightedAverage, "weightedAverage" },
|
||||||
|
{ operationType::opWeightedAreaAverage, "weightedAreaAverage" },
|
||||||
|
{ operationType::opWeightedAreaIntegrate, "weightedAreaIntegrate" },
|
||||||
|
|
||||||
|
// Using absolute weighting
|
||||||
|
{ operationType::opAbsWeightedSum, "absWeightedSum" },
|
||||||
|
{ operationType::opAbsWeightedAverage, "absWeightedAverage" },
|
||||||
|
{ operationType::opAbsWeightedAreaAverage, "absWeightedAreaAverage" },
|
||||||
|
{ operationType::opAbsWeightedAreaIntegrate, "absWeightedAreaIntegrate" },
|
||||||
};
|
};
|
||||||
|
|
||||||
const Foam::Enum
|
const Foam::Enum
|
||||||
@ -244,7 +253,7 @@ void Foam::functionObjects::fieldValues::surfaceFieldValue::combineMeshGeometry
|
|||||||
{
|
{
|
||||||
if (facePatchId_[i] != -1)
|
if (facePatchId_[i] != -1)
|
||||||
{
|
{
|
||||||
label patchi = facePatchId_[i];
|
const label patchi = facePatchId_[i];
|
||||||
globalFacesIs[i] += mesh_.boundaryMesh()[patchi].start();
|
globalFacesIs[i] += mesh_.boundaryMesh()[patchi].start();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -279,9 +288,8 @@ void Foam::functionObjects::fieldValues::surfaceFieldValue::combineMeshGeometry
|
|||||||
// My own data first
|
// My own data first
|
||||||
{
|
{
|
||||||
const faceList& fcs = allFaces[Pstream::myProcNo()];
|
const faceList& fcs = allFaces[Pstream::myProcNo()];
|
||||||
forAll(fcs, i)
|
for (const face& f : fcs)
|
||||||
{
|
{
|
||||||
const face& f = fcs[i];
|
|
||||||
face& newF = faces[nFaces++];
|
face& newF = faces[nFaces++];
|
||||||
newF.setSize(f.size());
|
newF.setSize(f.size());
|
||||||
forAll(f, fp)
|
forAll(f, fp)
|
||||||
@ -291,9 +299,9 @@ void Foam::functionObjects::fieldValues::surfaceFieldValue::combineMeshGeometry
|
|||||||
}
|
}
|
||||||
|
|
||||||
const pointField& pts = allPoints[Pstream::myProcNo()];
|
const pointField& pts = allPoints[Pstream::myProcNo()];
|
||||||
forAll(pts, i)
|
for (const point& pt : pts)
|
||||||
{
|
{
|
||||||
points[nPoints++] = pts[i];
|
points[nPoints++] = pt;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -303,9 +311,8 @@ void Foam::functionObjects::fieldValues::surfaceFieldValue::combineMeshGeometry
|
|||||||
if (proci != Pstream::myProcNo())
|
if (proci != Pstream::myProcNo())
|
||||||
{
|
{
|
||||||
const faceList& fcs = allFaces[proci];
|
const faceList& fcs = allFaces[proci];
|
||||||
forAll(fcs, i)
|
for (const face& f : fcs)
|
||||||
{
|
{
|
||||||
const face& f = fcs[i];
|
|
||||||
face& newF = faces[nFaces++];
|
face& newF = faces[nFaces++];
|
||||||
newF.setSize(f.size());
|
newF.setSize(f.size());
|
||||||
forAll(f, fp)
|
forAll(f, fp)
|
||||||
@ -315,9 +322,9 @@ void Foam::functionObjects::fieldValues::surfaceFieldValue::combineMeshGeometry
|
|||||||
}
|
}
|
||||||
|
|
||||||
const pointField& pts = allPoints[proci];
|
const pointField& pts = allPoints[proci];
|
||||||
forAll(pts, i)
|
for (const point& pt : pts)
|
||||||
{
|
{
|
||||||
points[nPoints++] = pts[i];
|
points[nPoints++] = pt;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -343,9 +350,9 @@ void Foam::functionObjects::fieldValues::surfaceFieldValue::combineMeshGeometry
|
|||||||
}
|
}
|
||||||
|
|
||||||
points.transfer(newPoints);
|
points.transfer(newPoints);
|
||||||
forAll(faces, i)
|
for (face& f : faces)
|
||||||
{
|
{
|
||||||
inplaceRenumber(oldToNew, faces[i]);
|
inplaceRenumber(oldToNew, f);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -447,17 +454,19 @@ Foam::functionObjects::fieldValues::surfaceFieldValue::totalArea() const
|
|||||||
|
|
||||||
// * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * //
|
// * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * //
|
||||||
|
|
||||||
bool Foam::functionObjects::fieldValues::surfaceFieldValue::needsSf() const
|
bool Foam::functionObjects::fieldValues::surfaceFieldValue::usesSf() const
|
||||||
{
|
{
|
||||||
// Many operations use the Sf field
|
// Only a few operations do not require the Sf field
|
||||||
switch (operation_)
|
switch (operation_)
|
||||||
{
|
{
|
||||||
case opNone:
|
case opNone:
|
||||||
|
case opMin:
|
||||||
|
case opMax:
|
||||||
case opSum:
|
case opSum:
|
||||||
case opSumMag:
|
case opSumMag:
|
||||||
case opAverage:
|
case opAverage:
|
||||||
case opMin:
|
case opSumDirection:
|
||||||
case opMax:
|
case opSumDirectionBalance:
|
||||||
{
|
{
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@ -469,26 +478,6 @@ bool Foam::functionObjects::fieldValues::surfaceFieldValue::needsSf() const
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
bool Foam::functionObjects::fieldValues::surfaceFieldValue::needsWeight() const
|
|
||||||
{
|
|
||||||
// Only a few operations use weight field
|
|
||||||
switch (operation_)
|
|
||||||
{
|
|
||||||
case opWeightedSum:
|
|
||||||
case opWeightedAverage:
|
|
||||||
case opWeightedAreaAverage:
|
|
||||||
case opWeightedAreaIntegrate:
|
|
||||||
{
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
{
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void Foam::functionObjects::fieldValues::surfaceFieldValue::initialise
|
void Foam::functionObjects::fieldValues::surfaceFieldValue::initialise
|
||||||
(
|
(
|
||||||
const dictionary& dict
|
const dictionary& dict
|
||||||
@ -582,19 +571,32 @@ void Foam::functionObjects::fieldValues::surfaceFieldValue::initialise
|
|||||||
|
|
||||||
|
|
||||||
weightFieldName_ = "none";
|
weightFieldName_ = "none";
|
||||||
if (needsWeight())
|
if (usesWeight())
|
||||||
{
|
{
|
||||||
|
if (regionType_ == stSampledSurface)
|
||||||
|
{
|
||||||
|
FatalIOErrorInFunction(dict)
|
||||||
|
<< "Cannot use weighted operation '"
|
||||||
|
<< operationTypeNames_[operation_]
|
||||||
|
<< "' for sampledSurface"
|
||||||
|
<< exit(FatalIOError);
|
||||||
|
}
|
||||||
|
|
||||||
if (dict.readIfPresent("weightField", weightFieldName_))
|
if (dict.readIfPresent("weightField", weightFieldName_))
|
||||||
{
|
{
|
||||||
if (regionType_ == stSampledSurface)
|
|
||||||
{
|
|
||||||
FatalIOErrorInFunction(dict)
|
|
||||||
<< "Cannot use weightField for sampledSurface"
|
|
||||||
<< exit(FatalIOError);
|
|
||||||
}
|
|
||||||
|
|
||||||
Info<< " weight field = " << weightFieldName_ << nl;
|
Info<< " weight field = " << weightFieldName_ << nl;
|
||||||
}
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Suggest possible alternative unweighted operation?
|
||||||
|
FatalIOErrorInFunction(dict)
|
||||||
|
<< "The '" << operationTypeNames_[operation_]
|
||||||
|
<< "' operation is missing a weightField." << nl
|
||||||
|
<< "Either provide the weightField, "
|
||||||
|
<< "use weightField 'none' to suppress weighting," << nl
|
||||||
|
<< "or use a different operation."
|
||||||
|
<< exit(FatalIOError);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Backwards compatibility for v1612+ and older
|
// Backwards compatibility for v1612+ and older
|
||||||
@ -660,10 +662,10 @@ void Foam::functionObjects::fieldValues::surfaceFieldValue::writeFileHeader
|
|||||||
os << tab << "Area";
|
os << tab << "Area";
|
||||||
}
|
}
|
||||||
|
|
||||||
forAll(fields_, i)
|
for (const word& fieldName : fields_)
|
||||||
{
|
{
|
||||||
os << tab << operationTypeNames_[operation_]
|
os << tab << operationTypeNames_[operation_]
|
||||||
<< "(" << fields_[i] << ")";
|
<< "(" << fieldName << ")";
|
||||||
}
|
}
|
||||||
|
|
||||||
os << endl;
|
os << endl;
|
||||||
@ -684,12 +686,12 @@ Foam::functionObjects::fieldValues::surfaceFieldValue::processValues
|
|||||||
{
|
{
|
||||||
case opSumDirection:
|
case opSumDirection:
|
||||||
{
|
{
|
||||||
vector n(dict_.lookup("direction"));
|
const vector n(dict_.lookup("direction"));
|
||||||
return gSum(pos(values*(Sf & n))*mag(values));
|
return gSum(pos(values*(Sf & n))*mag(values));
|
||||||
}
|
}
|
||||||
case opSumDirectionBalance:
|
case opSumDirectionBalance:
|
||||||
{
|
{
|
||||||
vector n(dict_.lookup("direction"));
|
const vector n(dict_.lookup("direction"));
|
||||||
const scalarField nv(values*(Sf & n));
|
const scalarField nv(values*(Sf & n));
|
||||||
|
|
||||||
return gSum(pos(nv)*mag(values) - neg(nv)*mag(values));
|
return gSum(pos(nv)*mag(values) - neg(nv)*mag(values));
|
||||||
@ -754,10 +756,17 @@ Foam::tmp<Foam::scalarField>
|
|||||||
Foam::functionObjects::fieldValues::surfaceFieldValue::weightingFactor
|
Foam::functionObjects::fieldValues::surfaceFieldValue::weightingFactor
|
||||||
(
|
(
|
||||||
const Field<scalar>& weightField
|
const Field<scalar>& weightField
|
||||||
)
|
) const
|
||||||
{
|
{
|
||||||
// pass through
|
if (usesMag())
|
||||||
return weightField;
|
{
|
||||||
|
return mag(weightField);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// pass through
|
||||||
|
return weightField;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -767,16 +776,21 @@ Foam::functionObjects::fieldValues::surfaceFieldValue::weightingFactor
|
|||||||
(
|
(
|
||||||
const Field<scalar>& weightField,
|
const Field<scalar>& weightField,
|
||||||
const vectorField& Sf
|
const vectorField& Sf
|
||||||
)
|
) const
|
||||||
{
|
{
|
||||||
// scalar * Area
|
// scalar * Area
|
||||||
if (returnReduce(weightField.empty(), andOp<bool>()))
|
if (returnReduce(weightField.empty(), andOp<bool>()))
|
||||||
{
|
{
|
||||||
|
// No weight field - revert to unweighted form
|
||||||
return mag(Sf);
|
return mag(Sf);
|
||||||
}
|
}
|
||||||
|
else if (usesMag())
|
||||||
|
{
|
||||||
|
return mag(weightField * mag(Sf));
|
||||||
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
return weightField * mag(Sf);
|
return (weightField * mag(Sf));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -787,16 +801,21 @@ Foam::functionObjects::fieldValues::surfaceFieldValue::weightingFactor
|
|||||||
(
|
(
|
||||||
const Field<vector>& weightField,
|
const Field<vector>& weightField,
|
||||||
const vectorField& Sf
|
const vectorField& Sf
|
||||||
)
|
) const
|
||||||
{
|
{
|
||||||
// vector (dot) Area
|
// vector (dot) Area
|
||||||
if (returnReduce(weightField.empty(), andOp<bool>()))
|
if (returnReduce(weightField.empty(), andOp<bool>()))
|
||||||
{
|
{
|
||||||
|
// No weight field - revert to unweighted form
|
||||||
return mag(Sf);
|
return mag(Sf);
|
||||||
}
|
}
|
||||||
|
else if (usesMag())
|
||||||
|
{
|
||||||
|
return mag(weightField & Sf);
|
||||||
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
return weightField & Sf;
|
return (weightField & Sf);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -915,7 +934,7 @@ bool Foam::functionObjects::fieldValues::surfaceFieldValue::write()
|
|||||||
|
|
||||||
// Many operations use the Sf field
|
// Many operations use the Sf field
|
||||||
vectorField Sf;
|
vectorField Sf;
|
||||||
if (needsSf())
|
if (usesSf())
|
||||||
{
|
{
|
||||||
if (regionType_ == stSurface)
|
if (regionType_ == stSurface)
|
||||||
{
|
{
|
||||||
|
|||||||
@ -103,22 +103,26 @@ Usage
|
|||||||
The \c operation is one of:
|
The \c operation is one of:
|
||||||
\plaintable
|
\plaintable
|
||||||
none | no operation
|
none | no operation
|
||||||
sum | sum
|
|
||||||
weightedSum | weighted sum
|
|
||||||
sumMag | sum of component magnitudes
|
|
||||||
sumDirection | sum values which are positive in given direction
|
|
||||||
sumDirectionBalance | sum of balance of values in given direction
|
|
||||||
average | ensemble average
|
|
||||||
weightedAverage | weighted average
|
|
||||||
areaAverage | area-weighted average
|
|
||||||
weightedAreaAverage | weighted area average
|
|
||||||
areaIntegrate | area integral
|
|
||||||
weightedAreaIntegrate | weighted area integral
|
|
||||||
min | minimum
|
min | minimum
|
||||||
max | maximum
|
max | maximum
|
||||||
|
sum | sum
|
||||||
|
sumMag | sum of component magnitudes
|
||||||
|
sumDirection | sum values that are positive in given direction
|
||||||
|
sumDirectionBalance | sum of balance of values in given direction
|
||||||
|
average | ensemble average
|
||||||
|
areaAverage | area-weighted average
|
||||||
|
areaIntegrate | area integral
|
||||||
CoV | coefficient of variation: standard deviation/mean
|
CoV | coefficient of variation: standard deviation/mean
|
||||||
areaNormalAverage| area-weighted average in face normal direction
|
areaNormalAverage| area-weighted average in face normal direction
|
||||||
areaNormalIntegrate | area-weighted integral in face normal directon
|
areaNormalIntegrate | area-weighted integral in face normal directon
|
||||||
|
weightedSum | weighted sum
|
||||||
|
weightedAverage | weighted average
|
||||||
|
weightedAreaAverage | weighted area average
|
||||||
|
weightedAreaIntegrate | weighted area integral
|
||||||
|
absWeightedSum | sum using absolute weighting
|
||||||
|
absWeightedAverage | average using absolute weighting
|
||||||
|
absWeightedAreaAverage | area average using absolute weighting
|
||||||
|
absWeightedAreaIntegrate | area integral using absolute weighting
|
||||||
\endplaintable
|
\endplaintable
|
||||||
|
|
||||||
Note
|
Note
|
||||||
@ -190,36 +194,73 @@ public:
|
|||||||
//- Region type enumeration
|
//- Region type enumeration
|
||||||
enum regionTypes
|
enum regionTypes
|
||||||
{
|
{
|
||||||
stFaceZone,
|
stFaceZone, //!< Calculate on a faceZone
|
||||||
stPatch,
|
stPatch, //!< Calculate on a patch
|
||||||
stSurface,
|
stSurface, //!< Calculate with fields on a surfMesh
|
||||||
stSampledSurface
|
stSampledSurface //!< Sample onto surface and calculate
|
||||||
};
|
};
|
||||||
|
|
||||||
//- Region type names
|
//- Region type names
|
||||||
static const Enum<regionTypes> regionTypeNames_;
|
static const Enum<regionTypes> regionTypeNames_;
|
||||||
|
|
||||||
|
//- Bitmask values for operation variants
|
||||||
|
enum operationVariant
|
||||||
|
{
|
||||||
|
typeBase = 0, //!< Base operation
|
||||||
|
typeWeighted = 0x100, //!< Operation using weighting
|
||||||
|
typeAbsolute = 0x200, //!< Operation using mag (eg, for weighting)
|
||||||
|
};
|
||||||
|
|
||||||
//- Operation type enumeration
|
//- Operation type enumeration
|
||||||
enum operationType
|
enum operationType
|
||||||
{
|
{
|
||||||
opNone, //!< None
|
// Normal operations
|
||||||
opSum, //!< Sum
|
|
||||||
opWeightedSum, //!< Weighted sum
|
opNone = 0, //!< No operation
|
||||||
opSumMag, //!< Magnitude of sum
|
opMin, //!< Minimum value
|
||||||
|
opMax, //!< Maximum value
|
||||||
|
opSum, //!< Sum of values
|
||||||
|
opSumMag, //!< Sum of component magnitudes
|
||||||
opSumDirection, //!< Sum in a given direction
|
opSumDirection, //!< Sum in a given direction
|
||||||
opSumDirectionBalance, //!< Sum in a given direction for multiple
|
|
||||||
opAverage, //!< Average
|
//! Sum of balance of values in given direction
|
||||||
opWeightedAverage, //!< Weighted average
|
opSumDirectionBalance,
|
||||||
|
|
||||||
|
opAverage, //!< Ensemble average
|
||||||
opAreaAverage, //!< Area average
|
opAreaAverage, //!< Area average
|
||||||
opWeightedAreaAverage, //!< Weighted area average
|
|
||||||
opAreaIntegrate, //!< Area integral
|
opAreaIntegrate, //!< Area integral
|
||||||
opWeightedAreaIntegrate, //!< Weighted area integral
|
|
||||||
opMin, //!< Minimum
|
|
||||||
opMax, //!< Maximum
|
|
||||||
opCoV, //!< Coefficient of variation
|
opCoV, //!< Coefficient of variation
|
||||||
opAreaNormalAverage, //!< Area average in normal direction
|
opAreaNormalAverage, //!< Area average in normal direction
|
||||||
opAreaNormalIntegrate //!< Area integral in normal direction
|
opAreaNormalIntegrate, //!< Area integral in normal direction
|
||||||
|
|
||||||
|
// Weighted variants
|
||||||
|
|
||||||
|
//! Weighted sum
|
||||||
|
opWeightedSum = (opSum | typeWeighted),
|
||||||
|
|
||||||
|
//! Weighted average
|
||||||
|
opWeightedAverage = (opAverage | typeWeighted),
|
||||||
|
|
||||||
|
//! Weighted area average
|
||||||
|
opWeightedAreaAverage = (opAreaAverage | typeWeighted),
|
||||||
|
|
||||||
|
//! Weighted area integral
|
||||||
|
opWeightedAreaIntegrate = (opAreaIntegrate | typeWeighted),
|
||||||
|
|
||||||
|
// Variants using absolute weighting
|
||||||
|
|
||||||
|
//! Sum using abs weighting
|
||||||
|
opAbsWeightedSum = (opWeightedSum | typeAbsolute),
|
||||||
|
|
||||||
|
//! Average using abs weighting
|
||||||
|
opAbsWeightedAverage = (opWeightedAverage | typeAbsolute),
|
||||||
|
|
||||||
|
//! Area average using abs weighting
|
||||||
|
opAbsWeightedAreaAverage = (opWeightedAreaAverage | typeAbsolute),
|
||||||
|
|
||||||
|
//! Area integral using abs weighting
|
||||||
|
opAbsWeightedAreaIntegrate =
|
||||||
|
(opWeightedAreaIntegrate | typeAbsolute),
|
||||||
};
|
};
|
||||||
|
|
||||||
//- Operation type names
|
//- Operation type names
|
||||||
@ -229,8 +270,8 @@ public:
|
|||||||
//- Post-operation type enumeration
|
//- Post-operation type enumeration
|
||||||
enum postOperationType
|
enum postOperationType
|
||||||
{
|
{
|
||||||
postOpNone,
|
postOpNone, //!< No additional operation after calculation
|
||||||
postOpSqrt
|
postOpSqrt //!< Perform sqrt after normal operation
|
||||||
};
|
};
|
||||||
|
|
||||||
//- Operation type names
|
//- Operation type names
|
||||||
@ -325,11 +366,19 @@ protected:
|
|||||||
//- Return the local true/false list representing the face flip map
|
//- Return the local true/false list representing the face flip map
|
||||||
inline const boolList& faceFlip() const;
|
inline const boolList& faceFlip() const;
|
||||||
|
|
||||||
//- True if the specified operation needs a surface Sf
|
//- True if the operation needs a surface Sf
|
||||||
bool needsSf() const;
|
bool usesSf() const;
|
||||||
|
|
||||||
//- True if the specified operation needs a weight-field
|
//- True if the operation variant uses mag
|
||||||
bool needsWeight() const;
|
inline bool usesMag() const;
|
||||||
|
|
||||||
|
//- True if the operation variant uses a weight-field
|
||||||
|
inline bool usesWeight() const;
|
||||||
|
|
||||||
|
//- True if operation variant uses a weight-field that is available.
|
||||||
|
// Checks for availability on any processor.
|
||||||
|
template<class WeightType>
|
||||||
|
inline bool canWeight(const Field<WeightType>& weightField) const;
|
||||||
|
|
||||||
//- Initialise, e.g. face addressing
|
//- Initialise, e.g. face addressing
|
||||||
void initialise(const dictionary& dict);
|
void initialise(const dictionary& dict);
|
||||||
@ -365,6 +414,7 @@ protected:
|
|||||||
const Field<WeightType>& weightField
|
const Field<WeightType>& weightField
|
||||||
) const;
|
) const;
|
||||||
|
|
||||||
|
|
||||||
//- Filter a surface field according to faceIds
|
//- Filter a surface field according to faceIds
|
||||||
template<class Type>
|
template<class Type>
|
||||||
tmp<Field<Type>> filterField
|
tmp<Field<Type>> filterField
|
||||||
@ -379,20 +429,24 @@ protected:
|
|||||||
const GeometricField<Type, fvPatchField, volMesh>& field
|
const GeometricField<Type, fvPatchField, volMesh>& field
|
||||||
) const;
|
) const;
|
||||||
|
|
||||||
//- Weighting factor
|
|
||||||
|
//- Weighting factor.
|
||||||
|
// Possibly applies mag() depending on the operation type.
|
||||||
template<class WeightType>
|
template<class WeightType>
|
||||||
static tmp<scalarField> weightingFactor
|
tmp<scalarField> weightingFactor
|
||||||
(
|
(
|
||||||
const Field<WeightType>& weightField
|
const Field<WeightType>& weightField
|
||||||
);
|
) const;
|
||||||
|
|
||||||
//- Weighting factor, weight field with the area
|
//- Weighting factor, weight field with the area.
|
||||||
|
// Possibly applies mag() depending on the operation type.
|
||||||
|
// Reverts to mag(Sf) if the weight field is not available.
|
||||||
template<class WeightType>
|
template<class WeightType>
|
||||||
static tmp<scalarField> weightingFactor
|
tmp<scalarField> weightingFactor
|
||||||
(
|
(
|
||||||
const Field<WeightType>& weightField,
|
const Field<WeightType>& weightField,
|
||||||
const vectorField& Sf
|
const vectorField& Sf
|
||||||
);
|
) const;
|
||||||
|
|
||||||
|
|
||||||
//- Templated helper function to output field values
|
//- Templated helper function to output field values
|
||||||
@ -489,7 +543,7 @@ template<>
|
|||||||
tmp<scalarField> surfaceFieldValue::weightingFactor
|
tmp<scalarField> surfaceFieldValue::weightingFactor
|
||||||
(
|
(
|
||||||
const Field<scalar>& weightField
|
const Field<scalar>& weightField
|
||||||
);
|
) const;
|
||||||
|
|
||||||
|
|
||||||
//- Specialisation for scalar - scalar * Area
|
//- Specialisation for scalar - scalar * Area
|
||||||
@ -498,7 +552,7 @@ tmp<scalarField> surfaceFieldValue::weightingFactor
|
|||||||
(
|
(
|
||||||
const Field<scalar>& weightField,
|
const Field<scalar>& weightField,
|
||||||
const vectorField& Sf
|
const vectorField& Sf
|
||||||
);
|
) const;
|
||||||
|
|
||||||
|
|
||||||
//- Specialisation for vector - vector (dot) Area
|
//- Specialisation for vector - vector (dot) Area
|
||||||
@ -507,7 +561,7 @@ tmp<scalarField> surfaceFieldValue::weightingFactor
|
|||||||
(
|
(
|
||||||
const Field<vector>& weightField,
|
const Field<vector>& weightField,
|
||||||
const vectorField& Sf
|
const vectorField& Sf
|
||||||
);
|
) const;
|
||||||
|
|
||||||
|
|
||||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
|
||||||
|
|||||||
@ -23,7 +23,6 @@ License
|
|||||||
|
|
||||||
\*---------------------------------------------------------------------------*/
|
\*---------------------------------------------------------------------------*/
|
||||||
|
|
||||||
#include "surfaceFieldValue.H"
|
|
||||||
#include "Time.H"
|
#include "Time.H"
|
||||||
|
|
||||||
// * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * //
|
// * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * //
|
||||||
@ -51,6 +50,22 @@ Foam::functionObjects::fieldValues::surfaceFieldValue::faceFlip() const
|
|||||||
|
|
||||||
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
|
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
|
||||||
|
|
||||||
|
inline bool
|
||||||
|
Foam::functionObjects::fieldValues::surfaceFieldValue::usesMag() const
|
||||||
|
{
|
||||||
|
// Operation specifically tagged to use mag
|
||||||
|
return (operation_ & typeAbsolute);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
inline bool
|
||||||
|
Foam::functionObjects::fieldValues::surfaceFieldValue::usesWeight() const
|
||||||
|
{
|
||||||
|
// Operation specifically tagged to require a weight field
|
||||||
|
return (operation_ & typeWeighted);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
inline const Foam::functionObjects::fieldValues::surfaceFieldValue::regionTypes&
|
inline const Foam::functionObjects::fieldValues::surfaceFieldValue::regionTypes&
|
||||||
Foam::functionObjects::fieldValues::surfaceFieldValue::regionType() const
|
Foam::functionObjects::fieldValues::surfaceFieldValue::regionType() const
|
||||||
{
|
{
|
||||||
|
|||||||
@ -33,6 +33,20 @@ License
|
|||||||
|
|
||||||
// * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * //
|
// * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * //
|
||||||
|
|
||||||
|
template<class WeightType>
|
||||||
|
inline bool Foam::functionObjects::fieldValues::surfaceFieldValue::canWeight
|
||||||
|
(
|
||||||
|
const Field<WeightType>& weightField
|
||||||
|
) const
|
||||||
|
{
|
||||||
|
return
|
||||||
|
(
|
||||||
|
usesWeight()
|
||||||
|
&& returnReduce(!weightField.empty(), orOp<bool>()) // On some processor
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
template<class Type>
|
template<class Type>
|
||||||
bool Foam::functionObjects::fieldValues::surfaceFieldValue::validField
|
bool Foam::functionObjects::fieldValues::surfaceFieldValue::validField
|
||||||
(
|
(
|
||||||
@ -142,23 +156,14 @@ processSameTypeValues
|
|||||||
{
|
{
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case opSum:
|
case opMin:
|
||||||
{
|
{
|
||||||
result = gSum(values);
|
result = gMin(values);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case opWeightedSum:
|
case opMax:
|
||||||
{
|
{
|
||||||
if (returnReduce(weightField.empty(), andOp<bool>()))
|
result = gMax(values);
|
||||||
{
|
|
||||||
result = gSum(values);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
tmp<scalarField> weight(weightingFactor(weightField));
|
|
||||||
|
|
||||||
result = gSum(weight*values);
|
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case opSumMag:
|
case opSumMag:
|
||||||
@ -166,6 +171,23 @@ processSameTypeValues
|
|||||||
result = gSum(cmptMag(values));
|
result = gSum(cmptMag(values));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
case opSum:
|
||||||
|
case opWeightedSum:
|
||||||
|
case opAbsWeightedSum:
|
||||||
|
{
|
||||||
|
if (canWeight(weightField))
|
||||||
|
{
|
||||||
|
tmp<scalarField> weight(weightingFactor(weightField));
|
||||||
|
|
||||||
|
result = gSum(weight*values);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Unweighted form
|
||||||
|
result = gSum(values);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
case opSumDirection:
|
case opSumDirection:
|
||||||
case opSumDirectionBalance:
|
case opSumDirectionBalance:
|
||||||
{
|
{
|
||||||
@ -178,62 +200,56 @@ processSameTypeValues
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case opAverage:
|
case opAverage:
|
||||||
{
|
|
||||||
const label n = returnReduce(values.size(), sumOp<label>());
|
|
||||||
result = gSum(values)/(scalar(n) + ROOTVSMALL);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case opWeightedAverage:
|
case opWeightedAverage:
|
||||||
|
case opAbsWeightedAverage:
|
||||||
{
|
{
|
||||||
if (returnReduce(weightField.empty(), andOp<bool>()))
|
if (canWeight(weightField))
|
||||||
{
|
|
||||||
const label n = returnReduce(values.size(), sumOp<label>());
|
|
||||||
result = gSum(values)/(scalar(n) + ROOTVSMALL);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
{
|
||||||
const scalarField factor(weightingFactor(weightField));
|
const scalarField factor(weightingFactor(weightField));
|
||||||
|
|
||||||
result = gSum(factor*values)/(gSum(factor) + ROOTVSMALL);
|
result = gSum(factor*values)/(gSum(factor) + ROOTVSMALL);
|
||||||
}
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Unweighted form
|
||||||
|
const label n = returnReduce(values.size(), sumOp<label>());
|
||||||
|
result = gSum(values)/(scalar(n) + ROOTVSMALL);
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case opAreaAverage:
|
case opAreaAverage:
|
||||||
{
|
|
||||||
const scalarField factor(mag(Sf));
|
|
||||||
|
|
||||||
result = gSum(factor*values)/gSum(factor);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case opWeightedAreaAverage:
|
case opWeightedAreaAverage:
|
||||||
|
case opAbsWeightedAreaAverage:
|
||||||
{
|
{
|
||||||
const scalarField factor(weightingFactor(weightField, Sf));
|
if (canWeight(weightField))
|
||||||
|
{
|
||||||
|
const scalarField factor(weightingFactor(weightField, Sf));
|
||||||
|
|
||||||
result = gSum(factor*values)/gSum(factor + ROOTVSMALL);
|
result = gSum(factor*values)/gSum(factor + ROOTVSMALL);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Unweighted form
|
||||||
|
const scalarField factor(mag(Sf));
|
||||||
|
result = gSum(factor*values)/gSum(factor);
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case opAreaIntegrate:
|
case opAreaIntegrate:
|
||||||
{
|
|
||||||
const scalarField factor(mag(Sf));
|
|
||||||
|
|
||||||
result = gSum(factor*values);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case opWeightedAreaIntegrate:
|
case opWeightedAreaIntegrate:
|
||||||
|
case opAbsWeightedAreaIntegrate:
|
||||||
{
|
{
|
||||||
const scalarField factor(weightingFactor(weightField, Sf));
|
if (canWeight(weightField))
|
||||||
|
{
|
||||||
result = gSum(factor*values);
|
tmp<scalarField> factor(weightingFactor(weightField, Sf));
|
||||||
break;
|
result = gSum(factor*values);
|
||||||
}
|
}
|
||||||
case opMin:
|
else
|
||||||
{
|
{
|
||||||
result = gMin(values);
|
// Unweighted form
|
||||||
break;
|
tmp<scalarField> factor(mag(Sf));
|
||||||
}
|
result = gSum(factor*values);
|
||||||
case opMax:
|
}
|
||||||
{
|
|
||||||
result = gMax(values);
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case opCoV:
|
case opCoV:
|
||||||
@ -245,8 +261,8 @@ processSameTypeValues
|
|||||||
|
|
||||||
for (direction d=0; d < pTraits<Type>::nComponents; ++d)
|
for (direction d=0; d < pTraits<Type>::nComponents; ++d)
|
||||||
{
|
{
|
||||||
tmp<scalarField> vals = values.component(d);
|
tmp<scalarField> vals(values.component(d));
|
||||||
scalar mean = component(meanValue, d);
|
const scalar mean = component(meanValue, d);
|
||||||
scalar& res = setComponent(result, d);
|
scalar& res = setComponent(result, d);
|
||||||
|
|
||||||
res =
|
res =
|
||||||
@ -286,8 +302,10 @@ Foam::tmp<Foam::scalarField>
|
|||||||
Foam::functionObjects::fieldValues::surfaceFieldValue::weightingFactor
|
Foam::functionObjects::fieldValues::surfaceFieldValue::weightingFactor
|
||||||
(
|
(
|
||||||
const Field<WeightType>& weightField
|
const Field<WeightType>& weightField
|
||||||
)
|
) const
|
||||||
{
|
{
|
||||||
|
// The scalar form is specialized.
|
||||||
|
// For other types always need mag() to generate a scalar field.
|
||||||
return mag(weightField);
|
return mag(weightField);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -302,10 +320,8 @@ Foam::label Foam::functionObjects::fieldValues::surfaceFieldValue::writeAll
|
|||||||
{
|
{
|
||||||
label nProcessed = 0;
|
label nProcessed = 0;
|
||||||
|
|
||||||
forAll(fields_, i)
|
for (const word& fieldName : fields_)
|
||||||
{
|
{
|
||||||
const word& fieldName = fields_[i];
|
|
||||||
|
|
||||||
if
|
if
|
||||||
(
|
(
|
||||||
writeValues<scalar>(fieldName, Sf, weightField, surfToWrite)
|
writeValues<scalar>(fieldName, Sf, weightField, surfToWrite)
|
||||||
@ -437,8 +453,8 @@ Foam::functionObjects::fieldValues::surfaceFieldValue::filterField
|
|||||||
|
|
||||||
forAll(values, i)
|
forAll(values, i)
|
||||||
{
|
{
|
||||||
label facei = faceId_[i];
|
const label facei = faceId_[i];
|
||||||
label patchi = facePatchId_[i];
|
const label patchi = facePatchId_[i];
|
||||||
if (patchi >= 0)
|
if (patchi >= 0)
|
||||||
{
|
{
|
||||||
values[i] = field.boundaryField()[patchi][facei];
|
values[i] = field.boundaryField()[patchi][facei];
|
||||||
@ -472,8 +488,8 @@ Foam::functionObjects::fieldValues::surfaceFieldValue::filterField
|
|||||||
|
|
||||||
forAll(values, i)
|
forAll(values, i)
|
||||||
{
|
{
|
||||||
label facei = faceId_[i];
|
const label facei = faceId_[i];
|
||||||
label patchi = facePatchId_[i];
|
const label patchi = facePatchId_[i];
|
||||||
if (patchi >= 0)
|
if (patchi >= 0)
|
||||||
{
|
{
|
||||||
values[i] = field.boundaryField()[patchi][facei];
|
values[i] = field.boundaryField()[patchi][facei];
|
||||||
|
|||||||
@ -49,32 +49,35 @@ const Foam::Enum
|
|||||||
>
|
>
|
||||||
Foam::functionObjects::fieldValues::volFieldValue::operationTypeNames_
|
Foam::functionObjects::fieldValues::volFieldValue::operationTypeNames_
|
||||||
{
|
{
|
||||||
|
// Normal operations
|
||||||
{ operationType::opNone, "none" },
|
{ operationType::opNone, "none" },
|
||||||
{ operationType::opSum, "sum" },
|
|
||||||
{ operationType::opWeightedSum, "weightedSum" },
|
|
||||||
{ operationType::opSumMag, "sumMag" },
|
|
||||||
{ operationType::opAverage, "average" },
|
|
||||||
{ operationType::opWeightedAverage, "weightedAverage" },
|
|
||||||
{ operationType::opVolAverage, "volAverage" },
|
|
||||||
{ operationType::opWeightedVolAverage, "weightedVolAverage" },
|
|
||||||
{ operationType::opVolIntegrate, "volIntegrate" },
|
|
||||||
{ operationType::opWeightedVolIntegrate, "weightedVolIntegrate" },
|
|
||||||
{ operationType::opMin, "min" },
|
{ operationType::opMin, "min" },
|
||||||
{ operationType::opMax, "max" },
|
{ operationType::opMax, "max" },
|
||||||
|
{ operationType::opSum, "sum" },
|
||||||
|
{ operationType::opSumMag, "sumMag" },
|
||||||
|
{ operationType::opAverage, "average" },
|
||||||
|
{ operationType::opVolAverage, "volAverage" },
|
||||||
|
{ operationType::opVolIntegrate, "volIntegrate" },
|
||||||
{ operationType::opCoV, "CoV" },
|
{ operationType::opCoV, "CoV" },
|
||||||
|
|
||||||
|
// Using weighting
|
||||||
|
{ operationType::opWeightedSum, "weightedSum" },
|
||||||
|
{ operationType::opWeightedAverage, "weightedAverage" },
|
||||||
|
{ operationType::opWeightedVolAverage, "weightedVolAverage" },
|
||||||
|
{ operationType::opWeightedVolIntegrate, "weightedVolIntegrate" },
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
// * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * //
|
// * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * //
|
||||||
|
|
||||||
bool Foam::functionObjects::fieldValues::volFieldValue::needsVol() const
|
bool Foam::functionObjects::fieldValues::volFieldValue::usesVol() const
|
||||||
{
|
{
|
||||||
// Only some operations need the cell volume
|
// Only a few operations require the cell volume
|
||||||
switch (operation_)
|
switch (operation_)
|
||||||
{
|
{
|
||||||
case opVolAverage:
|
case opVolAverage:
|
||||||
case opWeightedVolAverage:
|
|
||||||
case opVolIntegrate:
|
case opVolIntegrate:
|
||||||
|
case opWeightedVolAverage:
|
||||||
case opWeightedVolIntegrate:
|
case opWeightedVolIntegrate:
|
||||||
case opCoV:
|
case opCoV:
|
||||||
return true;
|
return true;
|
||||||
@ -85,20 +88,23 @@ bool Foam::functionObjects::fieldValues::volFieldValue::needsVol() const
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
bool Foam::functionObjects::fieldValues::volFieldValue::needsWeight() const
|
bool Foam::functionObjects::fieldValues::volFieldValue::usesWeight() const
|
||||||
{
|
{
|
||||||
// Only a few operations use weight field
|
// Operation specifically tagged to require a weight field
|
||||||
switch (operation_)
|
return (operation_ & typeWeighted);
|
||||||
{
|
}
|
||||||
case opWeightedSum:
|
|
||||||
case opWeightedAverage:
|
|
||||||
case opWeightedVolAverage:
|
|
||||||
case opWeightedVolIntegrate:
|
|
||||||
return true;
|
|
||||||
|
|
||||||
default:
|
|
||||||
return false;
|
bool Foam::functionObjects::fieldValues::volFieldValue::canWeight
|
||||||
}
|
(
|
||||||
|
const scalarField& weightField
|
||||||
|
) const
|
||||||
|
{
|
||||||
|
return
|
||||||
|
(
|
||||||
|
usesWeight()
|
||||||
|
&& returnReduce(!weightField.empty(), orOp<bool>()) // On some processor
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -108,12 +114,23 @@ void Foam::functionObjects::fieldValues::volFieldValue::initialise
|
|||||||
)
|
)
|
||||||
{
|
{
|
||||||
weightFieldName_ = "none";
|
weightFieldName_ = "none";
|
||||||
if (needsWeight())
|
if (usesWeight())
|
||||||
{
|
{
|
||||||
if (dict.readIfPresent("weightField", weightFieldName_))
|
if (dict.readIfPresent("weightField", weightFieldName_))
|
||||||
{
|
{
|
||||||
Info<< " weight field = " << weightFieldName_;
|
Info<< " weight field = " << weightFieldName_;
|
||||||
}
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Suggest possible alternative unweighted operation?
|
||||||
|
FatalIOErrorInFunction(dict)
|
||||||
|
<< "The '" << operationTypeNames_[operation_]
|
||||||
|
<< "' operation is missing a weightField." << nl
|
||||||
|
<< "Either provide the weightField, "
|
||||||
|
<< "use weightField 'none' to suppress weighting," << nl
|
||||||
|
<< "or use a different operation."
|
||||||
|
<< exit(FatalIOError);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Info<< nl << endl;
|
Info<< nl << endl;
|
||||||
@ -133,10 +150,10 @@ void Foam::functionObjects::fieldValues::volFieldValue::writeFileHeader
|
|||||||
|
|
||||||
writeCommented(os, "Time");
|
writeCommented(os, "Time");
|
||||||
|
|
||||||
forAll(fields_, fieldi)
|
for (const word& fieldName : fields_)
|
||||||
{
|
{
|
||||||
os << tab << operationTypeNames_[operation_]
|
os << tab << operationTypeNames_[operation_]
|
||||||
<< "(" << fields_[fieldi] << ")";
|
<< "(" << fieldName << ")";
|
||||||
}
|
}
|
||||||
|
|
||||||
os << endl;
|
os << endl;
|
||||||
@ -151,10 +168,8 @@ Foam::label Foam::functionObjects::fieldValues::volFieldValue::writeAll
|
|||||||
{
|
{
|
||||||
label nProcessed = 0;
|
label nProcessed = 0;
|
||||||
|
|
||||||
forAll(fields_, i)
|
for (const word& fieldName : fields_)
|
||||||
{
|
{
|
||||||
const word& fieldName = fields_[i];
|
|
||||||
|
|
||||||
if
|
if
|
||||||
(
|
(
|
||||||
writeValues<scalar>(fieldName, V, weightField)
|
writeValues<scalar>(fieldName, V, weightField)
|
||||||
@ -245,7 +260,7 @@ bool Foam::functionObjects::fieldValues::volFieldValue::write()
|
|||||||
|
|
||||||
// Only some operations need the cell volume
|
// Only some operations need the cell volume
|
||||||
scalarField V;
|
scalarField V;
|
||||||
if (needsVol())
|
if (usesVol())
|
||||||
{
|
{
|
||||||
V = filterField(fieldValue::mesh_.V());
|
V = filterField(fieldValue::mesh_.V());
|
||||||
}
|
}
|
||||||
|
|||||||
@ -81,18 +81,18 @@ Usage
|
|||||||
The \c operation is one of:
|
The \c operation is one of:
|
||||||
\plaintable
|
\plaintable
|
||||||
none | No operation
|
none | No operation
|
||||||
sum | Sum
|
|
||||||
weightedSum | Weighted sum
|
|
||||||
sumMag | Sum of component magnitudes
|
|
||||||
average | Ensemble average
|
|
||||||
weightedAverage | Weighted average
|
|
||||||
volAverage | Volume weighted average
|
|
||||||
weightedVolAverage | Weighted volume average
|
|
||||||
volIntegrate | Volume integral
|
|
||||||
weightedVolIntegrate | Weighted volume integral
|
|
||||||
min | Minimum
|
min | Minimum
|
||||||
max | Maximum
|
max | Maximum
|
||||||
|
sum | Sum
|
||||||
|
sumMag | Sum of component magnitudes
|
||||||
|
average | Ensemble average
|
||||||
|
volAverage | Volume weighted average
|
||||||
|
volIntegrate | Volume integral
|
||||||
CoV | Coefficient of variation: standard deviation/mean
|
CoV | Coefficient of variation: standard deviation/mean
|
||||||
|
weightedSum | Weighted sum
|
||||||
|
weightedAverage | Weighted average
|
||||||
|
weightedVolAverage | Weighted volume average
|
||||||
|
weightedVolIntegrate | Weighted volume integral
|
||||||
\endplaintable
|
\endplaintable
|
||||||
|
|
||||||
See also
|
See also
|
||||||
@ -134,22 +134,41 @@ public:
|
|||||||
|
|
||||||
// Public data types
|
// Public data types
|
||||||
|
|
||||||
|
//- Bitmask values for operation variants
|
||||||
|
enum operationVariant
|
||||||
|
{
|
||||||
|
typeBase = 0, //!< Base operation
|
||||||
|
typeWeighted = 0x100, //!< Operation using weighting
|
||||||
|
};
|
||||||
|
|
||||||
//- Operation type enumeration
|
//- Operation type enumeration
|
||||||
enum operationType
|
enum operationType
|
||||||
{
|
{
|
||||||
opNone, //!< None
|
// Normal operations
|
||||||
opSum, //!< Sum
|
|
||||||
opWeightedSum, //!< Weighted sum
|
opNone = 0, //!< No operation
|
||||||
opSumMag, //!< Magnitude of sum
|
|
||||||
opAverage, //!< Average
|
|
||||||
opWeightedAverage, //!< Weighted average
|
|
||||||
opVolAverage, //!< Volume average
|
|
||||||
opWeightedVolAverage, //!< Weighted volume average
|
|
||||||
opVolIntegrate, //!< Volume integral
|
|
||||||
opWeightedVolIntegrate, //!< Weighted volume integral
|
|
||||||
opMin, //!< Minimum
|
opMin, //!< Minimum
|
||||||
opMax, //!< Maximum
|
opMax, //!< Maximum
|
||||||
opCoV //!< Coefficient of variation
|
opSum, //!< Sum
|
||||||
|
opSumMag, //!< Magnitude of sum
|
||||||
|
opAverage, //!< Average
|
||||||
|
opVolAverage, //!< Volume average
|
||||||
|
opVolIntegrate, //!< Volume integral
|
||||||
|
opCoV, //!< Coefficient of variation
|
||||||
|
|
||||||
|
// Weighted variants
|
||||||
|
|
||||||
|
//! Weighted sum
|
||||||
|
opWeightedSum = (opSum | typeWeighted),
|
||||||
|
|
||||||
|
//! Weighted average
|
||||||
|
opWeightedAverage = (opAverage | typeWeighted),
|
||||||
|
|
||||||
|
//! Weighted volume average
|
||||||
|
opWeightedVolAverage = (opVolAverage | typeWeighted),
|
||||||
|
|
||||||
|
//! Weighted volume integral
|
||||||
|
opWeightedVolIntegrate = (opVolIntegrate | typeWeighted),
|
||||||
};
|
};
|
||||||
|
|
||||||
//- Operation type names
|
//- Operation type names
|
||||||
@ -169,11 +188,15 @@ protected:
|
|||||||
|
|
||||||
// Protected Member Functions
|
// Protected Member Functions
|
||||||
|
|
||||||
//- True if the specified operation needs the cell-volume
|
//- True if the operation needs the cell-volume
|
||||||
bool needsVol() const;
|
bool usesVol() const;
|
||||||
|
|
||||||
//- True if the specified operation needs a weight-field
|
//- True if the operation variant uses a weight-field
|
||||||
bool needsWeight() const;
|
bool usesWeight() const;
|
||||||
|
|
||||||
|
//- True if operation variant uses a weight-field that is available.
|
||||||
|
// Checks for availability on any processor.
|
||||||
|
inline bool canWeight(const scalarField& weightField) const;
|
||||||
|
|
||||||
//- Initialise, e.g. cell addressing
|
//- Initialise, e.g. cell addressing
|
||||||
void initialise(const dictionary& dict);
|
void initialise(const dictionary& dict);
|
||||||
|
|||||||
@ -82,81 +82,8 @@ Type Foam::functionObjects::fieldValues::volFieldValue::processValues
|
|||||||
Type result = Zero;
|
Type result = Zero;
|
||||||
switch (operation_)
|
switch (operation_)
|
||||||
{
|
{
|
||||||
case opSum:
|
case opNone:
|
||||||
{
|
{
|
||||||
result = gSum(values);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case opWeightedSum:
|
|
||||||
{
|
|
||||||
if (returnReduce(weightField.empty(), andOp<bool>()))
|
|
||||||
{
|
|
||||||
result = gSum(values);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
result = gSum(weightField*values);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case opSumMag:
|
|
||||||
{
|
|
||||||
result = gSum(cmptMag(values));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case opAverage:
|
|
||||||
{
|
|
||||||
const label n = returnReduce(values.size(), sumOp<label>());
|
|
||||||
result = gSum(values)/(scalar(n) + ROOTVSMALL);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case opWeightedAverage:
|
|
||||||
{
|
|
||||||
if (returnReduce(weightField.empty(), andOp<bool>()))
|
|
||||||
{
|
|
||||||
const label n = returnReduce(values.size(), sumOp<label>());
|
|
||||||
result = gSum(values)/(scalar(n) + ROOTVSMALL);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
result =
|
|
||||||
gSum(weightField*values)/(gSum(weightField) + ROOTVSMALL);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case opVolAverage:
|
|
||||||
{
|
|
||||||
result = gSum(values*V)/gSum(V);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case opWeightedVolAverage:
|
|
||||||
{
|
|
||||||
if (returnReduce(weightField.empty(), andOp<bool>()))
|
|
||||||
{
|
|
||||||
result = gSum(V*values)/(gSum(V) + ROOTVSMALL);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
result = gSum(weightField*V*values)
|
|
||||||
/(gSum(weightField*V) + ROOTVSMALL);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case opVolIntegrate:
|
|
||||||
{
|
|
||||||
result = gSum(V*values);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case opWeightedVolIntegrate:
|
|
||||||
{
|
|
||||||
if (returnReduce(weightField.empty(), andOp<bool>()))
|
|
||||||
{
|
|
||||||
result = gSum(V*values);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
result = gSum(weightField*V*values);
|
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case opMin:
|
case opMin:
|
||||||
@ -169,6 +96,70 @@ Type Foam::functionObjects::fieldValues::volFieldValue::processValues
|
|||||||
result = gMax(values);
|
result = gMax(values);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
case opSumMag:
|
||||||
|
{
|
||||||
|
result = gSum(cmptMag(values));
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case opSum:
|
||||||
|
case opWeightedSum:
|
||||||
|
{
|
||||||
|
if (canWeight(weightField))
|
||||||
|
{
|
||||||
|
result = gSum(weightField*values);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Unweighted form
|
||||||
|
result = gSum(values);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case opAverage:
|
||||||
|
case opWeightedAverage:
|
||||||
|
{
|
||||||
|
if (canWeight(weightField))
|
||||||
|
{
|
||||||
|
result =
|
||||||
|
gSum(weightField*values)/(gSum(weightField) + ROOTVSMALL);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Unweighted form
|
||||||
|
const label n = returnReduce(values.size(), sumOp<label>());
|
||||||
|
result = gSum(values)/(scalar(n) + ROOTVSMALL);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case opVolAverage:
|
||||||
|
case opWeightedVolAverage:
|
||||||
|
{
|
||||||
|
if (canWeight(weightField))
|
||||||
|
{
|
||||||
|
result = gSum(weightField*V*values)
|
||||||
|
/(gSum(weightField*V) + ROOTVSMALL);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Unweighted form
|
||||||
|
result = gSum(V*values)/(gSum(V) + ROOTVSMALL);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case opVolIntegrate:
|
||||||
|
case opWeightedVolIntegrate:
|
||||||
|
{
|
||||||
|
if (canWeight(weightField))
|
||||||
|
{
|
||||||
|
result = gSum(weightField*V*values);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Unweighted form
|
||||||
|
result = gSum(V*values);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
case opCoV:
|
case opCoV:
|
||||||
{
|
{
|
||||||
const scalar sumV = gSum(V);
|
const scalar sumV = gSum(V);
|
||||||
@ -177,8 +168,8 @@ Type Foam::functionObjects::fieldValues::volFieldValue::processValues
|
|||||||
|
|
||||||
for (direction d=0; d < pTraits<Type>::nComponents; ++d)
|
for (direction d=0; d < pTraits<Type>::nComponents; ++d)
|
||||||
{
|
{
|
||||||
scalarField vals(values.component(d));
|
tmp<scalarField> vals(values.component(d));
|
||||||
scalar mean = component(meanValue, d);
|
const scalar mean = component(meanValue, d);
|
||||||
scalar& res = setComponent(result, d);
|
scalar& res = setComponent(result, d);
|
||||||
|
|
||||||
res = sqrt(gSum(V*sqr(vals - mean))/sumV)/(mean + ROOTVSMALL);
|
res = sqrt(gSum(V*sqr(vals - mean))/sumV)/(mean + ROOTVSMALL);
|
||||||
@ -186,8 +177,6 @@ Type Foam::functionObjects::fieldValues::volFieldValue::processValues
|
|||||||
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case opNone:
|
|
||||||
{}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
|
|||||||
@ -3,7 +3,7 @@
|
|||||||
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
||||||
\\ / O peration |
|
\\ / O peration |
|
||||||
\\ / A nd | Copyright (C) 2011-2016 OpenFOAM Foundation
|
\\ / A nd | Copyright (C) 2011-2016 OpenFOAM Foundation
|
||||||
\\/ M anipulation |
|
\\/ M anipulation | Copyright (C) 2017 OpenCFD Ltd.
|
||||||
-------------------------------------------------------------------------------
|
-------------------------------------------------------------------------------
|
||||||
License
|
License
|
||||||
This file is part of OpenFOAM.
|
This file is part of OpenFOAM.
|
||||||
@ -435,7 +435,14 @@ Foam::surfaceToCell::surfaceToCell
|
|||||||
),
|
),
|
||||||
nearDist_(readScalar(dict.lookup("nearDistance"))),
|
nearDist_(readScalar(dict.lookup("nearDistance"))),
|
||||||
curvature_(readScalar(dict.lookup("curvature"))),
|
curvature_(readScalar(dict.lookup("curvature"))),
|
||||||
surfPtr_(new triSurface(surfName_)),
|
surfPtr_
|
||||||
|
(
|
||||||
|
new triSurface
|
||||||
|
(
|
||||||
|
surfName_,
|
||||||
|
dict.lookupOrDefault<scalar>("scale", -1)
|
||||||
|
)
|
||||||
|
),
|
||||||
querySurfPtr_(new triSurfaceSearch(*surfPtr_)),
|
querySurfPtr_(new triSurfaceSearch(*surfPtr_)),
|
||||||
IOwnPtrs_(true)
|
IOwnPtrs_(true)
|
||||||
{
|
{
|
||||||
|
|||||||
@ -3,7 +3,7 @@
|
|||||||
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
||||||
\\ / O peration |
|
\\ / O peration |
|
||||||
\\ / A nd | Copyright (C) 2011-2016 OpenFOAM Foundation
|
\\ / A nd | Copyright (C) 2011-2016 OpenFOAM Foundation
|
||||||
\\/ M anipulation |
|
\\/ M anipulation | Copyright (C) 2017 OpenCFD Ltd.
|
||||||
-------------------------------------------------------------------------------
|
-------------------------------------------------------------------------------
|
||||||
License
|
License
|
||||||
This file is part of OpenFOAM.
|
This file is part of OpenFOAM.
|
||||||
@ -59,7 +59,7 @@ void Foam::surfaceToPoint::combine(topoSet& set, const bool add) const
|
|||||||
{
|
{
|
||||||
cpuTime timer;
|
cpuTime timer;
|
||||||
|
|
||||||
triSurface surf(surfName_);
|
triSurface surf(surfName_, scale_);
|
||||||
|
|
||||||
Info<< " Read surface from " << surfName_
|
Info<< " Read surface from " << surfName_
|
||||||
<< " in = "<< timer.cpuTimeIncrement() << " s" << endl << endl;
|
<< " in = "<< timer.cpuTimeIncrement() << " s" << endl << endl;
|
||||||
@ -131,6 +131,7 @@ Foam::surfaceToPoint::surfaceToPoint
|
|||||||
:
|
:
|
||||||
topoSetSource(mesh),
|
topoSetSource(mesh),
|
||||||
surfName_(surfName),
|
surfName_(surfName),
|
||||||
|
scale_(1.0),
|
||||||
nearDist_(nearDist),
|
nearDist_(nearDist),
|
||||||
includeInside_(includeInside),
|
includeInside_(includeInside),
|
||||||
includeOutside_(includeOutside)
|
includeOutside_(includeOutside)
|
||||||
@ -147,6 +148,7 @@ Foam::surfaceToPoint::surfaceToPoint
|
|||||||
:
|
:
|
||||||
topoSetSource(mesh),
|
topoSetSource(mesh),
|
||||||
surfName_(fileName(dict.lookup("file")).expand()),
|
surfName_(fileName(dict.lookup("file")).expand()),
|
||||||
|
scale_(dict.lookupOrDefault<scalar>("scale", 1.0)),
|
||||||
nearDist_(readScalar(dict.lookup("nearDistance"))),
|
nearDist_(readScalar(dict.lookup("nearDistance"))),
|
||||||
includeInside_(readBool(dict.lookup("includeInside"))),
|
includeInside_(readBool(dict.lookup("includeInside"))),
|
||||||
includeOutside_(readBool(dict.lookup("includeOutside")))
|
includeOutside_(readBool(dict.lookup("includeOutside")))
|
||||||
@ -163,6 +165,7 @@ Foam::surfaceToPoint::surfaceToPoint
|
|||||||
:
|
:
|
||||||
topoSetSource(mesh),
|
topoSetSource(mesh),
|
||||||
surfName_(checkIs(is)),
|
surfName_(checkIs(is)),
|
||||||
|
scale_(1.0),
|
||||||
nearDist_(readScalar(checkIs(is))),
|
nearDist_(readScalar(checkIs(is))),
|
||||||
includeInside_(readBool(checkIs(is))),
|
includeInside_(readBool(checkIs(is))),
|
||||||
includeOutside_(readBool(checkIs(is)))
|
includeOutside_(readBool(checkIs(is)))
|
||||||
|
|||||||
@ -67,6 +67,9 @@ class surfaceToPoint
|
|||||||
//- Name of surface file
|
//- Name of surface file
|
||||||
const fileName surfName_;
|
const fileName surfName_;
|
||||||
|
|
||||||
|
//- Optional scaling for surface
|
||||||
|
const scalar scale_;
|
||||||
|
|
||||||
//- If > 0 : include points with distance to surface less than nearDist.
|
//- If > 0 : include points with distance to surface less than nearDist.
|
||||||
const scalar nearDist_;
|
const scalar nearDist_;
|
||||||
|
|
||||||
|
|||||||
@ -218,7 +218,8 @@ Foam::label Foam::triSurfaceLoader::select(const wordReList& matcher)
|
|||||||
|
|
||||||
Foam::autoPtr<Foam::triSurface> Foam::triSurfaceLoader::load
|
Foam::autoPtr<Foam::triSurface> Foam::triSurfaceLoader::load
|
||||||
(
|
(
|
||||||
const enum loadingOption opt
|
const enum loadingOption opt,
|
||||||
|
const scalar scaleFactor
|
||||||
) const
|
) const
|
||||||
{
|
{
|
||||||
autoPtr<triSurface> output;
|
autoPtr<triSurface> output;
|
||||||
@ -229,7 +230,8 @@ Foam::autoPtr<Foam::triSurface> Foam::triSurfaceLoader::load
|
|||||||
}
|
}
|
||||||
else if (selected_.size() == 1)
|
else if (selected_.size() == 1)
|
||||||
{
|
{
|
||||||
output.set(new triSurface(directory_/selected_[0]));
|
// Use scaling (if any)
|
||||||
|
output.set(new triSurface(directory_/selected_[0], scaleFactor));
|
||||||
|
|
||||||
triSurface& surf = output();
|
triSurface& surf = output();
|
||||||
|
|
||||||
@ -392,6 +394,12 @@ Foam::autoPtr<Foam::triSurface> Foam::triSurfaceLoader::load
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Apply scaling (if any)
|
||||||
|
if (scaleFactor > VSMALL)
|
||||||
|
{
|
||||||
|
points *= scaleFactor;
|
||||||
|
}
|
||||||
|
|
||||||
output.set(new triSurface(faces, patches, points, true));
|
output.set(new triSurface(faces, patches, points, true));
|
||||||
|
|
||||||
return output;
|
return output;
|
||||||
|
|||||||
@ -154,9 +154,12 @@ public:
|
|||||||
label select(const wordReList& matcher);
|
label select(const wordReList& matcher);
|
||||||
|
|
||||||
//- Load a single file, or load and combine multiple selected files
|
//- Load a single file, or load and combine multiple selected files
|
||||||
|
// Optionally scale the surface(s) on input, with a zero or negative
|
||||||
|
// scale factor treated as no scaling.
|
||||||
autoPtr<triSurface> load
|
autoPtr<triSurface> load
|
||||||
(
|
(
|
||||||
const enum loadingOption opt = loadingOption::OFFSET_REGION
|
const enum loadingOption opt = loadingOption::OFFSET_REGION,
|
||||||
|
const scalar scaleFactor = -1
|
||||||
) const;
|
) const;
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|||||||
@ -2,7 +2,7 @@
|
|||||||
========= |
|
========= |
|
||||||
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
|
||||||
\\ / O peration |
|
\\ / O peration |
|
||||||
\\ / A nd | Copyright (C) 2016 OpenCFD Ltd.
|
\\ / A nd | Copyright (C) 2016-2017 OpenCFD Ltd.
|
||||||
\\/ M anipulation |
|
\\/ M anipulation |
|
||||||
-------------------------------------------------------------------------------
|
-------------------------------------------------------------------------------
|
||||||
License
|
License
|
||||||
@ -58,10 +58,10 @@ void Foam::surfMeshSamplers::checkOutNames
|
|||||||
{
|
{
|
||||||
objectRegistry& reg = const_cast<objectRegistry&>(registry);
|
objectRegistry& reg = const_cast<objectRegistry&>(registry);
|
||||||
|
|
||||||
forAll(names, namei)
|
for (const word& fldName : names)
|
||||||
{
|
{
|
||||||
objectRegistry::iterator iter = reg.find(names[namei]);
|
objectRegistry::iterator iter = reg.find(fldName);
|
||||||
if (iter != reg.end())
|
if (iter.found())
|
||||||
{
|
{
|
||||||
registry.checkOut(*iter());
|
registry.checkOut(*iter());
|
||||||
}
|
}
|
||||||
@ -156,10 +156,8 @@ bool Foam::surfMeshSamplers::execute()
|
|||||||
DynamicList<word> added(derivedNames_.size());
|
DynamicList<word> added(derivedNames_.size());
|
||||||
DynamicList<word> cleanup(derivedNames_.size());
|
DynamicList<word> cleanup(derivedNames_.size());
|
||||||
|
|
||||||
forAll(derivedNames_, namei)
|
for (const word& derivedName : derivedNames_)
|
||||||
{
|
{
|
||||||
const word& derivedName = derivedNames_[namei];
|
|
||||||
|
|
||||||
if (derivedName == "rhoU")
|
if (derivedName == "rhoU")
|
||||||
{
|
{
|
||||||
added.append(derivedName);
|
added.append(derivedName);
|
||||||
@ -190,20 +188,48 @@ bool Foam::surfMeshSamplers::execute()
|
|||||||
{
|
{
|
||||||
cleanup.append(derivedName);
|
cleanup.append(derivedName);
|
||||||
|
|
||||||
db.store
|
const volScalarField& p =
|
||||||
(
|
mesh_.lookupObject<volScalarField>("p");
|
||||||
new volScalarField
|
|
||||||
|
if (p.dimensions() == dimPressure)
|
||||||
|
{
|
||||||
|
db.store
|
||||||
(
|
(
|
||||||
derivedName,
|
new volScalarField
|
||||||
// pTotal = p + U^2 / 2
|
|
||||||
(
|
(
|
||||||
mesh_.lookupObject<volScalarField>("p")
|
derivedName,
|
||||||
+ 0.5
|
// pTotal = p + rho U^2 / 2
|
||||||
* mesh_.lookupObject<volScalarField>("rho")
|
(
|
||||||
* magSqr(mesh_.lookupObject<volVectorField>("U"))
|
p
|
||||||
|
+ 0.5
|
||||||
|
* mesh_.lookupObject<volScalarField>("rho")
|
||||||
|
* magSqr
|
||||||
|
(
|
||||||
|
mesh_.lookupObject<volVectorField>("U")
|
||||||
|
)
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
);
|
||||||
);
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
db.store
|
||||||
|
(
|
||||||
|
new volScalarField
|
||||||
|
(
|
||||||
|
derivedName,
|
||||||
|
// pTotal = p + U^2 / 2
|
||||||
|
(
|
||||||
|
p
|
||||||
|
+ 0.5
|
||||||
|
* magSqr
|
||||||
|
(
|
||||||
|
mesh_.lookupObject<volVectorField>("U")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@ -226,10 +252,8 @@ bool Foam::surfMeshSamplers::execute()
|
|||||||
const wordList fields = acceptable.sortedToc();
|
const wordList fields = acceptable.sortedToc();
|
||||||
if (!fields.empty())
|
if (!fields.empty())
|
||||||
{
|
{
|
||||||
forAll(*this, surfI)
|
for (surfMeshSampler& s : surfaces())
|
||||||
{
|
{
|
||||||
surfMeshSampler& s = operator[](surfI);
|
|
||||||
|
|
||||||
// Potentially monitor the update for writing geometry?
|
// Potentially monitor the update for writing geometry?
|
||||||
if (s.needsUpdate())
|
if (s.needsUpdate())
|
||||||
{
|
{
|
||||||
@ -258,21 +282,20 @@ bool Foam::surfMeshSamplers::write()
|
|||||||
wordReList select(fieldSelection_.size() + derivedNames_.size());
|
wordReList select(fieldSelection_.size() + derivedNames_.size());
|
||||||
|
|
||||||
label nElem = 0;
|
label nElem = 0;
|
||||||
forAll(fieldSelection_, i)
|
for (const auto& item : fieldSelection_)
|
||||||
{
|
{
|
||||||
select[nElem++] = fieldSelection_[i];
|
select[nElem++] = item;
|
||||||
}
|
}
|
||||||
forAll(derivedNames_, i)
|
for (const auto& derivedName : derivedNames_)
|
||||||
{
|
{
|
||||||
select[nElem++] = derivedNames_[i];
|
select[nElem++] = derivedName;
|
||||||
}
|
}
|
||||||
|
|
||||||
// avoid duplicate entries
|
// avoid duplicate entries
|
||||||
select = wordRes::uniq(select);
|
select = wordRes::uniq(select);
|
||||||
|
|
||||||
forAll(*this, surfI)
|
for (const surfMeshSampler& s : surfaces())
|
||||||
{
|
{
|
||||||
const surfMeshSampler& s = operator[](surfI);
|
|
||||||
s.write(select);
|
s.write(select);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -317,10 +340,8 @@ bool Foam::surfMeshSamplers::read(const dictionary& dict)
|
|||||||
if (this->size())
|
if (this->size())
|
||||||
{
|
{
|
||||||
Info<< "Reading surface description:" << nl;
|
Info<< "Reading surface description:" << nl;
|
||||||
forAll(*this, surfI)
|
for (surfMeshSampler& s : surfaces())
|
||||||
{
|
{
|
||||||
surfMeshSampler& s = operator[](surfI);
|
|
||||||
|
|
||||||
Info<< " " << s.name() << nl;
|
Info<< " " << s.name() << nl;
|
||||||
if (createOnRead)
|
if (createOnRead)
|
||||||
{
|
{
|
||||||
@ -370,9 +391,9 @@ void Foam::surfMeshSamplers::readUpdate(const polyMesh::readUpdateState state)
|
|||||||
|
|
||||||
bool Foam::surfMeshSamplers::needsUpdate() const
|
bool Foam::surfMeshSamplers::needsUpdate() const
|
||||||
{
|
{
|
||||||
forAll(*this, surfI)
|
for (const surfMeshSampler& s : surfaces())
|
||||||
{
|
{
|
||||||
if (operator[](surfI).needsUpdate())
|
if (s.needsUpdate())
|
||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -386,9 +407,9 @@ bool Foam::surfMeshSamplers::expire()
|
|||||||
{
|
{
|
||||||
bool justExpired = false;
|
bool justExpired = false;
|
||||||
|
|
||||||
forAll(*this, surfI)
|
for (surfMeshSampler& s : surfaces())
|
||||||
{
|
{
|
||||||
if (operator[](surfI).expire())
|
if (s.expire())
|
||||||
{
|
{
|
||||||
justExpired = true;
|
justExpired = true;
|
||||||
}
|
}
|
||||||
@ -407,9 +428,9 @@ bool Foam::surfMeshSamplers::update()
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool updated = false;
|
bool updated = false;
|
||||||
forAll(*this, surfI)
|
for (surfMeshSampler& s : surfaces())
|
||||||
{
|
{
|
||||||
if (operator[](surfI).update())
|
if (s.update())
|
||||||
{
|
{
|
||||||
updated = true;
|
updated = true;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -134,6 +134,18 @@ class surfMeshSamplers
|
|||||||
const UList<word>& names
|
const UList<word>& names
|
||||||
);
|
);
|
||||||
|
|
||||||
|
//- Access the sampling surfaces
|
||||||
|
inline const PtrList<surfMeshSampler>& surfaces() const
|
||||||
|
{
|
||||||
|
return static_cast<const PtrList<surfMeshSampler>&>(*this);
|
||||||
|
}
|
||||||
|
|
||||||
|
//- Access the sampling surfaces
|
||||||
|
inline PtrList<surfMeshSampler>& surfaces()
|
||||||
|
{
|
||||||
|
return static_cast<PtrList<surfMeshSampler>&>(*this);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
//- Filter acceptable fields types
|
//- Filter acceptable fields types
|
||||||
template<class Type>
|
template<class Type>
|
||||||
|
|||||||
@ -35,6 +35,7 @@ Description
|
|||||||
\*---------------------------------------------------------------------------*/
|
\*---------------------------------------------------------------------------*/
|
||||||
|
|
||||||
#include "triSurface.H"
|
#include "triSurface.H"
|
||||||
|
#include "NASCore.H"
|
||||||
#include "IFstream.H"
|
#include "IFstream.H"
|
||||||
#include "StringStream.H"
|
#include "StringStream.H"
|
||||||
|
|
||||||
@ -46,25 +47,9 @@ namespace Foam
|
|||||||
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
|
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
|
||||||
|
|
||||||
// Do weird things to extract number
|
// Do weird things to extract number
|
||||||
static scalar parseNASCoord(const string& s)
|
static inline scalar parseNASCoord(const string& s)
|
||||||
{
|
{
|
||||||
size_t expSign = s.find_last_of("+-");
|
return fileFormats::NASCore::parseNASCoord(s);
|
||||||
|
|
||||||
if (expSign != string::npos && expSign > 0 && !isspace(s[expSign-1]))
|
|
||||||
{
|
|
||||||
scalar mantissa = readScalar(IStringStream(s.substr(0, expSign))());
|
|
||||||
scalar exponent = readScalar(IStringStream(s.substr(expSign+1))());
|
|
||||||
|
|
||||||
if (s[expSign] == '-')
|
|
||||||
{
|
|
||||||
exponent = -exponent;
|
|
||||||
}
|
|
||||||
return mantissa*pow(10, exponent);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
return readScalar(IStringStream(s)());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -300,8 +285,8 @@ bool triSurface::readNAS(const fileName& fName)
|
|||||||
// GRID* 126 0 -5.55999875E+02 -5.68730474E+02
|
// GRID* 126 0 -5.55999875E+02 -5.68730474E+02
|
||||||
// * 2.14897901E+02
|
// * 2.14897901E+02
|
||||||
label index =
|
label index =
|
||||||
readLabel(IStringStream(readNASToken(line, 8, linei))());
|
readLabel(IStringStream(readNASToken(line, 16, linei))());
|
||||||
readNASToken(line, 8, linei);
|
readNASToken(line, 16, linei);
|
||||||
scalar x = parseNASCoord(readNASToken(line, 16, linei));
|
scalar x = parseNASCoord(readNASToken(line, 16, linei));
|
||||||
scalar y = parseNASCoord(readNASToken(line, 16, linei));
|
scalar y = parseNASCoord(readNASToken(line, 16, linei));
|
||||||
|
|
||||||
|
|||||||
@ -760,17 +760,19 @@ Foam::triSurface::triSurface
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Foam::triSurface::triSurface(const fileName& name)
|
Foam::triSurface::triSurface(const fileName& name, const scalar scaleFactor)
|
||||||
:
|
:
|
||||||
ParentType(List<Face>(), pointField()),
|
ParentType(List<Face>(), pointField()),
|
||||||
patches_(),
|
patches_(),
|
||||||
sortedEdgeFacesPtr_(nullptr),
|
sortedEdgeFacesPtr_(nullptr),
|
||||||
edgeOwnerPtr_(nullptr)
|
edgeOwnerPtr_(nullptr)
|
||||||
{
|
{
|
||||||
word ext = name.ext();
|
const word ext = name.ext();
|
||||||
|
|
||||||
read(name, ext);
|
read(name, ext);
|
||||||
|
|
||||||
|
scalePoints(scaleFactor);
|
||||||
|
|
||||||
setDefaultPatches();
|
setDefaultPatches();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -886,8 +888,8 @@ void Foam::triSurface::movePoints(const pointField& newPoints)
|
|||||||
|
|
||||||
void Foam::triSurface::scalePoints(const scalar scaleFactor)
|
void Foam::triSurface::scalePoints(const scalar scaleFactor)
|
||||||
{
|
{
|
||||||
// avoid bad scaling
|
// Avoid bad scaling
|
||||||
if (scaleFactor > 0 && scaleFactor != 1.0)
|
if (scaleFactor > VSMALL && scaleFactor != 1.0)
|
||||||
{
|
{
|
||||||
// Remove all geometry dependent data
|
// Remove all geometry dependent data
|
||||||
clearTopology();
|
clearTopology();
|
||||||
|
|||||||
@ -264,46 +264,55 @@ public:
|
|||||||
//- Construct from triangles, patches, points.
|
//- Construct from triangles, patches, points.
|
||||||
triSurface
|
triSurface
|
||||||
(
|
(
|
||||||
const List<labelledTri>&,
|
const List<labelledTri>& triangles,
|
||||||
const geometricSurfacePatchList&,
|
const geometricSurfacePatchList& patches,
|
||||||
const pointField&
|
const pointField& points
|
||||||
);
|
);
|
||||||
|
|
||||||
//- Construct from triangles, patches, points. Reuse storage.
|
//- Construct from triangles, patches, points. Reuse storage.
|
||||||
triSurface
|
triSurface
|
||||||
(
|
(
|
||||||
List<labelledTri>&,
|
List<labelledTri>& triangles,
|
||||||
const geometricSurfacePatchList&,
|
const geometricSurfacePatchList& patches,
|
||||||
pointField&,
|
pointField& points,
|
||||||
const bool reuse
|
const bool reuse
|
||||||
);
|
);
|
||||||
|
|
||||||
//- Construct by transferring (triangles, points) components.
|
//- Construct by transferring (triangles, points) components.
|
||||||
triSurface
|
triSurface
|
||||||
(
|
(
|
||||||
const Xfer<List<labelledTri>>&,
|
const Xfer<List<labelledTri>>& triangles,
|
||||||
const geometricSurfacePatchList&,
|
const geometricSurfacePatchList& patches,
|
||||||
const Xfer<List<point>>&
|
const Xfer<List<point>>& points
|
||||||
);
|
);
|
||||||
|
|
||||||
//- Construct from triangles, points. Set patch names to default.
|
//- Construct from triangles, points. Set patch names to default.
|
||||||
triSurface(const List<labelledTri>&, const pointField&);
|
triSurface
|
||||||
|
(
|
||||||
|
const List<labelledTri>& triangles,
|
||||||
|
const pointField& points
|
||||||
|
);
|
||||||
|
|
||||||
//- Construct from triangles, points. Set region to 0 and default
|
//- Construct from triangles, points. Set region to 0 and default
|
||||||
// patchName.
|
// patchName.
|
||||||
triSurface(const triFaceList&, const pointField&);
|
triSurface
|
||||||
|
(
|
||||||
|
const triFaceList& triangles,
|
||||||
|
const pointField& points
|
||||||
|
);
|
||||||
|
|
||||||
//- Construct from file name (uses extension to determine type)
|
//- Construct from file name (uses extension to determine type).
|
||||||
triSurface(const fileName&);
|
// Optional (positive, non-zero) point scaling is possible.
|
||||||
|
triSurface(const fileName& name, const scalar scaleFactor = -1);
|
||||||
|
|
||||||
//- Construct from Istream
|
//- Construct from Istream
|
||||||
triSurface(Istream&);
|
triSurface(Istream& is);
|
||||||
|
|
||||||
//- Construct from objectRegistry
|
//- Construct from objectRegistry
|
||||||
triSurface(const Time& d);
|
triSurface(const Time& d);
|
||||||
|
|
||||||
//- Construct as copy
|
//- Construct as copy
|
||||||
triSurface(const triSurface&);
|
triSurface(const triSurface& ts);
|
||||||
|
|
||||||
|
|
||||||
//- Destructor
|
//- Destructor
|
||||||
@ -382,10 +391,10 @@ public:
|
|||||||
// Edit
|
// Edit
|
||||||
|
|
||||||
//- Move points
|
//- Move points
|
||||||
virtual void movePoints(const pointField&);
|
virtual void movePoints(const pointField& newPoints);
|
||||||
|
|
||||||
//- Scale points. A non-positive factor is ignored
|
//- Scale points. A non-positive factor is ignored.
|
||||||
virtual void scalePoints(const scalar);
|
virtual void scalePoints(const scalar scaleFactor);
|
||||||
|
|
||||||
//- Check/remove duplicate/degenerate triangles
|
//- Check/remove duplicate/degenerate triangles
|
||||||
void checkTriangles(const bool verbose);
|
void checkTriangles(const bool verbose);
|
||||||
@ -447,13 +456,13 @@ public:
|
|||||||
// Write
|
// Write
|
||||||
|
|
||||||
//- Write to Ostream in simple FOAM format
|
//- Write to Ostream in simple FOAM format
|
||||||
void write(Ostream&) const;
|
void write(Ostream& os) const;
|
||||||
|
|
||||||
//- Generic write routine. Chooses writer based on extension.
|
//- Generic write routine. Chooses writer based on extension.
|
||||||
void write(const fileName&, const bool sortByRegion = false) const;
|
void write(const fileName&, const bool sortByRegion = false) const;
|
||||||
|
|
||||||
//- Write to database
|
//- Write to database
|
||||||
void write(const Time&) const;
|
void write(const Time& d) const;
|
||||||
|
|
||||||
//- Write some statistics
|
//- Write some statistics
|
||||||
void writeStats(Ostream& os) const;
|
void writeStats(Ostream& os) const;
|
||||||
|
|||||||
@ -57,6 +57,11 @@ oversetInterpolation
|
|||||||
method inverseDistance;
|
method inverseDistance;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
oversetInterpolationRequired
|
||||||
|
{
|
||||||
|
alpha.water;
|
||||||
|
}
|
||||||
|
|
||||||
fluxRequired
|
fluxRequired
|
||||||
{
|
{
|
||||||
default no;
|
default no;
|
||||||
|
|||||||
@ -85,6 +85,8 @@ PIMPLE
|
|||||||
ddtCorr yes;
|
ddtCorr yes;
|
||||||
correctPhi no;
|
correctPhi no;
|
||||||
|
|
||||||
|
oversetAdjustPhi no;
|
||||||
|
|
||||||
moveMeshOuterCorrectors no;
|
moveMeshOuterCorrectors no;
|
||||||
turbOnFinalIterOnly no;
|
turbOnFinalIterOnly no;
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user