Merge commit 'OpenCFD/master' into olesenm

This commit is contained in:
Mark Olesen
2010-01-22 13:23:43 +01:00
79 changed files with 1873 additions and 1753 deletions

View File

@ -124,11 +124,6 @@ public:
// Member Functions // Member Functions
tmp<volScalarField> mut() const
{
return mut_;
}
//- Return the effective diffusivity for k //- Return the effective diffusivity for k
tmp<volScalarField> DkEff() const tmp<volScalarField> DkEff() const
{ {
@ -147,41 +142,44 @@ public:
); );
} }
//- Return the effective turbulent thermal diffusivity //- Return the turbulence viscosity
tmp<volScalarField> alphaEff() const virtual tmp<volScalarField> mut() const
{ {
return tmp<volScalarField> return mut_;
( }
new volScalarField("alphaEff", alphat_ + alpha())
); //- Return the turbulence thermal diffusivity
virtual tmp<volScalarField> alphat() const
{
return alphat_;
} }
//- Return the turbulence kinetic energy //- Return the turbulence kinetic energy
tmp<volScalarField> k() const virtual tmp<volScalarField> k() const
{ {
return k_; return k_;
} }
//- Return the turbulence kinetic energy dissipation rate //- Return the turbulence kinetic energy dissipation rate
tmp<volScalarField> epsilon() const virtual tmp<volScalarField> epsilon() const
{ {
return epsilon_; return epsilon_;
} }
//- Return the Reynolds stress tensor //- Return the Reynolds stress tensor
tmp<volSymmTensorField> R() const; virtual tmp<volSymmTensorField> R() const;
//- Return the effective stress tensor including the laminar stress //- Return the effective stress tensor including the laminar stress
tmp<volSymmTensorField> devRhoReff() const; virtual tmp<volSymmTensorField> devRhoReff() const;
//- Return the source term for the momentum equation //- Return the source term for the momentum equation
tmp<fvVectorMatrix> divDevRhoReff(volVectorField& U) const; virtual tmp<fvVectorMatrix> divDevRhoReff(volVectorField& U) const;
//- Solve the turbulence equations and correct the turbulence viscosity //- Solve the turbulence equations and correct the turbulence viscosity
void correct(); virtual void correct();
//- Read turbulenceProperties dictionary //- Read turbulenceProperties dictionary
bool read(); virtual bool read();
}; };

View File

@ -40,4 +40,3 @@
// Momentum corrector // Momentum corrector
U -= fvc::grad(p)/AU; U -= fvc::grad(p)/AU;
U.correctBoundaryConditions(); U.correctBoundaryConditions();

View File

@ -40,6 +40,10 @@ geometry
{ {
type triSurfaceMesh; type triSurfaceMesh;
//tolerance 1E-6; // optional:non-default tolerance on intersections
//maxTreeDepth 10; // optional:depth of octree. Decrease only in case
// of memory limitations.
// Per region the patchname. If not provided will be <name>_<region>. // Per region the patchname. If not provided will be <name>_<region>.
regions regions
{ {

View File

@ -94,7 +94,7 @@ void backup
{ {
if (fromSet.size()) if (fromSet.size())
{ {
Pout<< " Backing up " << fromName << " into " << toName << endl; Info<< " Backing up " << fromName << " into " << toName << endl;
topoSet::New(setType, mesh, toName, fromSet)().write(); topoSet::New(setType, mesh, toName, fromSet)().write();
} }
@ -525,7 +525,7 @@ bool doCommand
{ {
topoSet& currentSet = currentSetPtr(); topoSet& currentSet = currentSetPtr();
Pout<< " Set:" << currentSet.name() Info<< " Set:" << currentSet.name()
<< " Size:" << currentSet.size() << " Size:" << currentSet.size()
<< " Action:" << actionName << " Action:" << actionName
<< endl; << endl;
@ -622,7 +622,7 @@ bool doCommand
+ ".vtk" + ".vtk"
); );
Pout<< " Writing " << currentSet.name() Info<< " Writing " << currentSet.name()
<< " (size " << currentSet.size() << ") to " << " (size " << currentSet.size() << ") to "
<< currentSet.instance()/currentSet.local() << currentSet.instance()/currentSet.local()
/currentSet.name() /currentSet.name()
@ -634,7 +634,7 @@ bool doCommand
} }
else else
{ {
Pout<< " Writing " << currentSet.name() Info<< " Writing " << currentSet.name()
<< " (size " << currentSet.size() << ") to " << " (size " << currentSet.size() << ") to "
<< currentSet.instance()/currentSet.local() << currentSet.instance()/currentSet.local()
/currentSet.name() << endl << endl; /currentSet.name() << endl << endl;
@ -683,7 +683,7 @@ enum commandStatus
void printMesh(const Time& runTime, const polyMesh& mesh) void printMesh(const Time& runTime, const polyMesh& mesh)
{ {
Pout<< "Time:" << runTime.timeName() Info<< "Time:" << runTime.timeName()
<< " cells:" << mesh.nCells() << " cells:" << mesh.nCells()
<< " faces:" << mesh.nFaces() << " faces:" << mesh.nFaces()
<< " points:" << mesh.nPoints() << " points:" << mesh.nPoints()
@ -703,19 +703,19 @@ commandStatus parseType
{ {
if (setType.empty()) if (setType.empty())
{ {
Pout<< "Type 'help' for usage information" << endl; Info<< "Type 'help' for usage information" << endl;
return INVALID; return INVALID;
} }
else if (setType == "help") else if (setType == "help")
{ {
printHelp(Pout); printHelp(Info);
return INVALID; return INVALID;
} }
else if (setType == "list") else if (setType == "list")
{ {
printAllSets(mesh, Pout); printAllSets(mesh, Info);
return INVALID; return INVALID;
} }
@ -726,7 +726,7 @@ commandStatus parseType
label nearestIndex = Time::findClosestTimeIndex(Times, requestedTime); label nearestIndex = Time::findClosestTimeIndex(Times, requestedTime);
Pout<< "Changing time from " << runTime.timeName() Info<< "Changing time from " << runTime.timeName()
<< " to " << Times[nearestIndex].name() << " to " << Times[nearestIndex].name()
<< endl; << endl;
@ -737,24 +737,24 @@ commandStatus parseType
{ {
case polyMesh::UNCHANGED: case polyMesh::UNCHANGED:
{ {
Pout<< " mesh not changed." << endl; Info<< " mesh not changed." << endl;
break; break;
} }
case polyMesh::POINTS_MOVED: case polyMesh::POINTS_MOVED:
{ {
Pout<< " points moved; topology unchanged." << endl; Info<< " points moved; topology unchanged." << endl;
break; break;
} }
case polyMesh::TOPO_CHANGE: case polyMesh::TOPO_CHANGE:
{ {
Pout<< " topology changed; patches unchanged." << nl Info<< " topology changed; patches unchanged." << nl
<< " "; << " ";
printMesh(runTime, mesh); printMesh(runTime, mesh);
break; break;
} }
case polyMesh::TOPO_PATCH_CHANGE: case polyMesh::TOPO_PATCH_CHANGE:
{ {
Pout<< " topology changed and patches changed." << nl Info<< " topology changed and patches changed." << nl
<< " "; << " ";
printMesh(runTime, mesh); printMesh(runTime, mesh);
@ -773,7 +773,7 @@ commandStatus parseType
} }
else if (setType == "quit") else if (setType == "quit")
{ {
Pout<< "Quitting ..." << endl; Info<< "Quitting ..." << endl;
return QUIT; return QUIT;
} }
@ -864,7 +864,7 @@ int main(int argc, char *argv[])
printMesh(runTime, mesh); printMesh(runTime, mesh);
// Print current sets // Print current sets
printAllSets(mesh, Pout); printAllSets(mesh, Info);
@ -874,7 +874,7 @@ int main(int argc, char *argv[])
{ {
fileName batchFile(args.option("batch")); fileName batchFile(args.option("batch"));
Pout<< "Reading commands from file " << batchFile << endl; Info<< "Reading commands from file " << batchFile << endl;
// we cannot handle .gz files // we cannot handle .gz files
if (!isFile(batchFile, false)) if (!isFile(batchFile, false))
@ -888,11 +888,11 @@ int main(int argc, char *argv[])
#if READLINE != 0 #if READLINE != 0
else if (!read_history(historyFile)) else if (!read_history(historyFile))
{ {
Pout<< "Successfully read history from " << historyFile << endl; Info<< "Successfully read history from " << historyFile << endl;
} }
#endif #endif
Pout<< "Please type 'help', 'quit' or a set command after prompt." << endl; Info<< "Please type 'help', 'quit' or a set command after prompt." << endl;
bool ok = true; bool ok = true;
@ -916,7 +916,7 @@ int main(int argc, char *argv[])
{ {
if (!fileStreamPtr->good()) if (!fileStreamPtr->good())
{ {
Pout<< "End of batch file" << endl; Info<< "End of batch file" << endl;
break; break;
} }
@ -924,7 +924,7 @@ int main(int argc, char *argv[])
if (rawLine.size()) if (rawLine.size())
{ {
Pout<< "Doing:" << rawLine << endl; Info<< "Doing:" << rawLine << endl;
} }
} }
else else
@ -945,7 +945,7 @@ int main(int argc, char *argv[])
} }
# else # else
{ {
Pout<< "Command>" << flush; Info<< "Command>" << flush;
std::getline(std::cin, rawLine); std::getline(std::cin, rawLine);
} }
# endif # endif
@ -992,7 +992,7 @@ int main(int argc, char *argv[])
delete fileStreamPtr; delete fileStreamPtr;
} }
Pout<< "\nEnd\n" << endl; Info<< "\nEnd\n" << endl;
return 0; return 0;
} }

View File

@ -96,6 +96,7 @@ void checkPatch(const polyBoundaryMesh& bMesh, const word& name)
int main(int argc, char *argv[]) int main(int argc, char *argv[])
{ {
argList::noParallel(); argList::noParallel();
# include "addRegionOption.H"
argList::validArgs.append("masterPatch"); argList::validArgs.append("masterPatch");
argList::validArgs.append("slavePatch"); argList::validArgs.append("slavePatch");
@ -109,7 +110,7 @@ int main(int argc, char *argv[])
# include "setRootCase.H" # include "setRootCase.H"
# include "createTime.H" # include "createTime.H"
runTime.functionObjects().off(); runTime.functionObjects().off();
# include "createMesh.H" # include "createNamedMesh.H"
const word oldInstance = mesh.pointsInstance(); const word oldInstance = mesh.pointsInstance();

View File

@ -31,6 +31,14 @@ Description
Mainly used to convert binary mesh/field files to ASCII. Mainly used to convert binary mesh/field files to ASCII.
Problem: any zero-size List written binary gets written as '0'. When
reading the file as a dictionary this is interpreted as a label. This
is (usually) not a problem when doing patch fields since these get the
'uniform', 'nonuniform' prefix. However zone contents are labelLists
not labelFields and these go wrong. For now hacked a solution where
we detect the keywords in zones and redo the dictionary entries
to be labelLists.
\*---------------------------------------------------------------------------*/ \*---------------------------------------------------------------------------*/
#include "argList.H" #include "argList.H"
@ -56,6 +64,82 @@ namespace Foam
} }
// Hack to do zones which have Lists in them. See above.
bool writeZones(const word& name, Time& runTime)
{
IOobject io
(
name,
runTime.timeName(),
polyMesh::meshSubDir,
runTime,
IOobject::MUST_READ,
IOobject::NO_WRITE,
false
);
bool writeOk = false;
if (io.headerOk())
{
Info<< " Reading " << io.headerClassName()
<< " : " << name << endl;
// Switch off type checking (for reading e.g. faceZones as
// generic list of dictionaries).
const word oldTypeName = IOPtrList<entry>::typeName;
const_cast<word&>(IOPtrList<entry>::typeName) = word::null;
IOPtrList<entry> meshObject(io);
forAll(meshObject, i)
{
if (meshObject[i].isDict())
{
dictionary& d = meshObject[i].dict();
if (d.found("faceLabels"))
{
d.set("faceLabels", labelList(d.lookup("faceLabels")));
}
if (d.found("flipMap"))
{
d.set("flipMap", boolList(d.lookup("flipMap")));
}
if (d.found("cellLabels"))
{
d.set("cellLabels", labelList(d.lookup("cellLabels")));
}
if (d.found("pointLabels"))
{
d.set("pointLabels", labelList(d.lookup("pointLabels")));
}
}
}
const_cast<word&>(IOPtrList<entry>::typeName) = oldTypeName;
// Fake type back to what was in field
const_cast<word&>(meshObject.type()) = io.headerClassName();
Info<< " Writing " << name << endl;
// Force writing as ascii
writeOk = meshObject.regIOobject::writeObject
(
IOstream::ASCII,
IOstream::currentVersion,
runTime.writeCompression()
);
}
return writeOk;
}
// Main program: // Main program:
int main(int argc, char *argv[]) int main(int argc, char *argv[])
@ -76,9 +160,19 @@ int main(int argc, char *argv[])
writeMeshObject<labelIOList>("neighbour", runTime); writeMeshObject<labelIOList>("neighbour", runTime);
writeMeshObject<faceIOList>("faces", runTime); writeMeshObject<faceIOList>("faces", runTime);
writeMeshObject<pointIOField>("points", runTime); writeMeshObject<pointIOField>("points", runTime);
writeMeshObject<IOPtrList<entry> >("cellZones", runTime); writeMeshObject<labelIOList>("pointProcAddressing", runTime);
writeMeshObject<IOPtrList<entry> >("faceZones", runTime); writeMeshObject<labelIOList>("faceProcAddressing", runTime);
writeMeshObject<IOPtrList<entry> >("pointZones", runTime); writeMeshObject<labelIOList>("cellProcAddressing", runTime);
writeMeshObject<labelIOList>("boundaryProcAddressing", runTime);
if (runTime.writeFormat() == IOstream::ASCII)
{
// Only do zones when converting from binary to ascii
// The other way gives problems since working on dictionary level.
writeZones("cellZones", runTime);
writeZones("faceZones", runTime);
writeZones("pointZones", runTime);
}
// Get list of objects from the database // Get list of objects from the database
IOobjectList objects(runTime, runTime.timeName()); IOobjectList objects(runTime, runTime.timeName());

View File

@ -1077,83 +1077,92 @@ int main(int argc, char *argv[])
{ {
const faceZone& pp = zones[zoneI]; const faceZone& pp = zones[zoneI];
const indirectPrimitivePatch ipp if (pp.size() > 0)
( {
IndirectList<face>(mesh.faces(), pp), const indirectPrimitivePatch ipp
mesh.points() (
); IndirectList<face>(mesh.faces(), pp),
mesh.points()
);
writer.writePolygonalZone writer.writePolygonalZone
( (
pp.name(), pp.name(),
strandID++, //1+patchIDs.size()+zoneI, //strandID, strandID++, //1+patchIDs.size()+zoneI, //strandID,
ipp, ipp,
allVarLocation allVarLocation
); );
// Write coordinates // Write coordinates
writer.writeField(ipp.localPoints().component(0)()); writer.writeField(ipp.localPoints().component(0)());
writer.writeField(ipp.localPoints().component(1)()); writer.writeField(ipp.localPoints().component(1)());
writer.writeField(ipp.localPoints().component(2)()); writer.writeField(ipp.localPoints().component(2)());
// Write all volfields // Write all volfields
forAll(vsf, i) forAll(vsf, i)
{ {
writer.writeField writer.writeField
(
writer.getFaceField
( (
linearInterpolate(vsf[i])(), writer.getFaceField
pp (
)() linearInterpolate(vsf[i])(),
); pp
} )()
forAll(vvf, i) );
{ }
writer.writeField forAll(vvf, i)
( {
writer.getFaceField writer.writeField
( (
linearInterpolate(vvf[i])(), writer.getFaceField
pp (
)() linearInterpolate(vvf[i])(),
); pp
} )()
forAll(vSpheretf, i) );
{ }
writer.writeField forAll(vSpheretf, i)
( {
writer.getFaceField writer.writeField
( (
linearInterpolate(vSpheretf[i])(), writer.getFaceField
pp (
)() linearInterpolate(vSpheretf[i])(),
); pp
} )()
forAll(vSymmtf, i) );
{ }
writer.writeField forAll(vSymmtf, i)
( {
writer.getFaceField writer.writeField
( (
linearInterpolate(vSymmtf[i])(), writer.getFaceField
pp (
)() linearInterpolate(vSymmtf[i])(),
); pp
} )()
forAll(vtf, i) );
{ }
writer.writeField forAll(vtf, i)
( {
writer.getFaceField writer.writeField
( (
linearInterpolate(vtf[i])(), writer.getFaceField
pp (
)() linearInterpolate(vtf[i])(),
); pp
} )()
);
}
writer.writeConnectivity(ipp); writer.writeConnectivity(ipp);
}
else
{
Info<< " Skipping zero sized faceZone " << zoneI
<< "\t" << pp.name()
<< nl << endl;
}
} }
} }

View File

@ -68,7 +68,7 @@ fi
if [ -r $ParaView_DIR ] if [ -r $ParaView_DIR ]
then then
export PATH=$ParaView_DIR/bin:$PATH export PATH=$ParaView_DIR/bin:$PATH
export PV_PLUGIN_PATH=$FOAM_LIBBIN export PV_PLUGIN_PATH=$FOAM_LIBBIN/paraview
fi fi
unset cmake ParaView_PYTHON_DIR unset cmake ParaView_PYTHON_DIR

View File

@ -62,7 +62,7 @@ endif
if ( -r $ParaView_INST_DIR ) then if ( -r $ParaView_INST_DIR ) then
set path=($ParaView_DIR/bin $path) set path=($ParaView_DIR/bin $path)
setenv PV_PLUGIN_PATH $FOAM_LIBBIN setenv PV_PLUGIN_PATH $FOAM_LIBBIN/paraview
endif endif
unset cmake paraviewMajor paraviewPython unset cmake paraviewMajor paraviewPython

View File

@ -91,7 +91,7 @@ bool Foam::IOobject::readHeader(Istream& is)
<< "First token could not be read or is not the keyword 'FoamFile'" << "First token could not be read or is not the keyword 'FoamFile'"
<< nl << nl << "Check header is of the form:" << nl << endl; << nl << nl << "Check header is of the form:" << nl << endl;
writeHeader(SeriousError); writeHeader(Info);
return false; return false;
} }

View File

@ -77,6 +77,9 @@ public:
//- Apply and accumulate the effect of the given constraint direction //- Apply and accumulate the effect of the given constraint direction
inline void applyConstraint(const vector& cd); inline void applyConstraint(const vector& cd);
//- Combine constraints
inline void combine(const pointConstraint&);
//- Return the accumulated constraint transformation tensor //- Return the accumulated constraint transformation tensor
inline tensor constraintTransformation() const; inline tensor constraintTransformation() const;
}; };

View File

@ -69,6 +69,47 @@ void Foam::pointConstraint::applyConstraint(const vector& cd)
} }
void Foam::pointConstraint::combine(const pointConstraint& pc)
{
if (first() == 0)
{
operator=(pc);
}
else if (first() == 1)
{
// Save single normal
vector n = second();
// Apply to supplied point constaint
operator=(pc);
applyConstraint(n);
}
else if (first() == 2)
{
if (pc.first() == 0)
{}
else if (pc.first() == 1)
{
applyConstraint(pc.second());
}
else if (pc.first() == 2)
{
// Both constrained to line. Same (+-)direction?
if (mag(second() & pc.second()) <= (1.0-1e-3))
{
// Different directions
first() = 3;
second() = vector::zero;
}
}
else
{
first() = 3;
second() = vector::zero;
}
}
}
Foam::tensor Foam::pointConstraint::constraintTransformation() const Foam::tensor Foam::pointConstraint::constraintTransformation() const
{ {
if (first() == 0) if (first() == 0)

View File

@ -132,7 +132,7 @@ public:
// associated with any faces // associated with any faces
virtual const labelList& loneMeshPoints() const; virtual const labelList& loneMeshPoints() const;
//- Return point unit normals. Not impelemented. //- Return point unit normals. Not implemented.
virtual const vectorField& pointNormals() const; virtual const vectorField& pointNormals() const;
}; };

View File

@ -199,13 +199,20 @@ Foam::scalarField Foam::coupledPolyPatch::calcFaceTol
const face& f = faces[faceI]; const face& f = faces[faceI];
// 1. calculate a typical size of the face. Use maximum distance
// to face centre
scalar maxLenSqr = -GREAT; scalar maxLenSqr = -GREAT;
// 2. as measure of truncation error when comparing two coordinates
// use SMALL * maximum component
scalar maxCmpt = -GREAT;
forAll(f, fp) forAll(f, fp)
{ {
maxLenSqr = max(maxLenSqr, magSqr(points[f[fp]] - cc)); const point& pt = points[f[fp]];
maxLenSqr = max(maxLenSqr, magSqr(pt - cc));
maxCmpt = max(maxCmpt, cmptMax(cmptMag(pt)));
} }
tols[faceI] = matchTol * Foam::sqrt(maxLenSqr); tols[faceI] = max(SMALL*maxCmpt, matchTol*Foam::sqrt(maxLenSqr));
} }
return tols; return tols;
} }

View File

@ -446,19 +446,4 @@ Foam::directions::directions
} }
// * * * * * * * * * * * * * * * * Destructor * * * * * * * * * * * * * * * //
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
// * * * * * * * * * * * * * * * Member Operators * * * * * * * * * * * * * //
// * * * * * * * * * * * * * * * Friend Functions * * * * * * * * * * * * * //
// * * * * * * * * * * * * * * * Friend Operators * * * * * * * * * * * * * //
// ************************************************************************* // // ************************************************************************* //

View File

@ -824,7 +824,7 @@ bool Foam::polyMeshGeometry::checkFaceSkewness
// (i.e. treat as if mirror cell on other side) // (i.e. treat as if mirror cell on other side)
vector faceNormal = faceAreas[faceI]; vector faceNormal = faceAreas[faceI];
faceNormal /= mag(faceNormal) + VSMALL; faceNormal /= mag(faceNormal) + ROOTVSMALL;
vector dOwn = faceCentres[faceI] - cellCentres[own[faceI]]; vector dOwn = faceCentres[faceI] - cellCentres[own[faceI]];
@ -834,7 +834,7 @@ bool Foam::polyMeshGeometry::checkFaceSkewness
scalar skewness = scalar skewness =
mag(faceCentres[faceI] - faceIntersection) mag(faceCentres[faceI] - faceIntersection)
/(2*mag(dWall) + VSMALL); /(2*mag(dWall) + ROOTVSMALL);
// Check if the skewness vector is greater than the PN vector. // Check if the skewness vector is greater than the PN vector.
// This does not cause trouble but is a good indication of a poor // This does not cause trouble but is a good indication of a poor

View File

@ -142,7 +142,7 @@ public:
// Access // Access
//- Retirn local reference cast into the cyclic patch //- Return local reference cast into the cyclic patch
const cyclicFvPatch& cyclicPatch() const const cyclicFvPatch& cyclicPatch() const
{ {
return cyclicPatch_; return cyclicPatch_;

View File

@ -95,7 +95,7 @@ protected:
class unionEqOp class unionEqOp
{ {
public: public:
void operator()( labelList& x, const labelList& y ) const; void operator()(labelList& x, const labelList& y) const;
}; };
//- Collect cell neighbours of faces in global numbering //- Collect cell neighbours of faces in global numbering

View File

@ -42,13 +42,31 @@ Description
namespace Foam namespace Foam
{ {
typedef ReactingMultiphaseCloud<BasicReactingParcel<constGasThermoPhysics> > typedef ReactingMultiphaseCloud
<
BasicReactingMultiphaseParcel
<
constGasThermoPhysics
>
>
constThermoReactingMultiphaseCloud; constThermoReactingMultiphaseCloud;
typedef ReactingMultiphaseCloud<BasicReactingParcel<gasThermoPhysics> > typedef ReactingMultiphaseCloud
<
BasicReactingMultiphaseParcel
<
gasThermoPhysics
>
>
thermoReactingMultiphaseCloud; thermoReactingMultiphaseCloud;
typedef ReactingMultiphaseCloud<BasicReactingParcel<icoPoly8ThermoPhysics> > typedef ReactingMultiphaseCloud
<
BasicReactingMultiphaseParcel
<
icoPoly8ThermoPhysics
>
>
icoPoly8ThermoReactingMultiphaseCloud; icoPoly8ThermoReactingMultiphaseCloud;
} }

View File

@ -401,7 +401,7 @@ void Foam::ReactingMultiphaseParcel<ParcelType>::calc
td.cloud().hcTrans()[cellI] += td.cloud().hcTrans()[cellI] +=
np0 np0
*dMassGas[i] *dMassGas[i]
*td.cloud().mcCarrierThermo().speciesData()[gid].H(T0); *td.cloud().mcCarrierThermo().speciesData()[gid].Hc();
} }
forAll(YLiquid_, i) forAll(YLiquid_, i)
{ {
@ -410,7 +410,7 @@ void Foam::ReactingMultiphaseParcel<ParcelType>::calc
td.cloud().hcTrans()[cellI] += td.cloud().hcTrans()[cellI] +=
np0 np0
*dMassLiquid[i] *dMassLiquid[i]
*td.cloud().mcCarrierThermo().speciesData()[gid].H(T0); *td.cloud().mcCarrierThermo().speciesData()[gid].Hc();
} }
/* /*
// No mapping between solid components and carrier phase // No mapping between solid components and carrier phase
@ -421,7 +421,7 @@ void Foam::ReactingMultiphaseParcel<ParcelType>::calc
td.cloud().hcTrans()[cellI] += td.cloud().hcTrans()[cellI] +=
np0 np0
*dMassSolid[i] *dMassSolid[i]
*td.cloud().mcCarrierThermo().speciesData()[gid].H(T0); *td.cloud().mcCarrierThermo().speciesData()[gid].Hc();
} }
*/ */
forAll(dMassSRCarrier, i) forAll(dMassSRCarrier, i)
@ -430,7 +430,7 @@ void Foam::ReactingMultiphaseParcel<ParcelType>::calc
td.cloud().hcTrans()[cellI] += td.cloud().hcTrans()[cellI] +=
np0 np0
*dMassSRCarrier[i] *dMassSRCarrier[i]
*td.cloud().mcCarrierThermo().speciesData()[i].H(T0); *td.cloud().mcCarrierThermo().speciesData()[i].Hc();
} }
// Update momentum transfer // Update momentum transfer

View File

@ -122,7 +122,7 @@ void Foam::ReactingParcel<ParcelType>::correctSurfaceValues
} }
// Far field carrier molar fractions // Far field carrier molar fractions
scalarField Xinf(Y_.size()); scalarField Xinf(td.cloud().mcCarrierThermo().speciesData().size());
forAll(Xinf, i) forAll(Xinf, i)
{ {

View File

@ -2490,13 +2490,18 @@ void Foam::autoLayerDriver::mergePatchFacesUndo
<< "---------------------------" << nl << "---------------------------" << nl
<< " - which are on the same patch" << nl << " - which are on the same patch" << nl
<< " - which make an angle < " << layerParams.featureAngle() << " - which make an angle < " << layerParams.featureAngle()
<< "- which are on the same patch" << nl
<< "- which make an angle < " << layerParams.featureAngle()
<< " degrees" << " degrees"
<< nl << nl
<< " (cos:" << minCos << ')' << nl << " (cos:" << minCos << ')' << nl
<< " - as long as the resulting face doesn't become concave" << " - as long as the resulting face doesn't become concave"
<< " (cos:" << minCos << ')' << nl
<< "- as long as the resulting face doesn't become concave"
<< " by more than " << " by more than "
<< layerParams.concaveAngle() << " degrees" << nl << layerParams.concaveAngle() << " degrees" << nl
<< " (0=straight, 180=fully concave)" << nl << " (0=straight, 180=fully concave)" << nl
<< " (0=straight, 180=fully concave)" << nl
<< endl; << endl;
label nChanged = mergePatchFacesUndo(minCos, concaveCos, motionDict); label nChanged = mergePatchFacesUndo(minCos, concaveCos, motionDict);
@ -2546,11 +2551,6 @@ void Foam::autoLayerDriver::addLayers
); );
// Undistorted edge length
const scalar edge0Len = meshRefiner_.meshCutter().level0EdgeLength();
const labelList& cellLevel = meshRefiner_.meshCutter().cellLevel();
// Point-wise extrusion data // Point-wise extrusion data
// ~~~~~~~~~~~~~~~~~~~~~~~~~ // ~~~~~~~~~~~~~~~~~~~~~~~~~
@ -2612,6 +2612,10 @@ void Foam::autoLayerDriver::addLayers
// Disable extrusion on warped faces // Disable extrusion on warped faces
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Undistorted edge length
const scalar edge0Len = meshRefiner_.meshCutter().level0EdgeLength();
const labelList& cellLevel = meshRefiner_.meshCutter().cellLevel();
handleWarpedFaces handleWarpedFaces
( (
pp, pp,
@ -2651,6 +2655,10 @@ void Foam::autoLayerDriver::addLayers
} }
// Undistorted edge length
const scalar edge0Len = meshRefiner_.meshCutter().level0EdgeLength();
const labelList& cellLevel = meshRefiner_.meshCutter().cellLevel();
// Determine (wanted) point-wise layer thickness and expansion ratio // Determine (wanted) point-wise layer thickness and expansion ratio
scalarField thickness(pp().nPoints()); scalarField thickness(pp().nPoints());
scalarField minThickness(pp().nPoints()); scalarField minThickness(pp().nPoints());

View File

@ -54,6 +54,7 @@ License
#include "Random.H" #include "Random.H"
#include "searchableSurfaces.H" #include "searchableSurfaces.H"
#include "treeBoundBox.H" #include "treeBoundBox.H"
#include "zeroGradientFvPatchFields.H"
// * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * // // * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * //
@ -2128,7 +2129,8 @@ void Foam::meshRefinement::dumpRefinementLevel() const
false false
), ),
mesh_, mesh_,
dimensionedScalar("zero", dimless, 0) dimensionedScalar("zero", dimless, 0),
zeroGradientFvPatchScalarField::typeName
); );
const labelList& cellLevel = meshCutter_.cellLevel(); const labelList& cellLevel = meshCutter_.cellLevel();

View File

@ -496,23 +496,63 @@ Foam::labelList Foam::refinementSurfaces::getClosedNamedSurfaces() const
} }
// Count number of triangles per surface region // // Count number of triangles per surface region
Foam::labelList Foam::refinementSurfaces::countRegions(const triSurface& s) // Foam::labelList Foam::refinementSurfaces::countRegions(const triSurface& s)
{ // {
const geometricSurfacePatchList& regions = s.patches(); // const geometricSurfacePatchList& regions = s.patches();
//
// labelList nTris(regions.size(), 0);
//
// forAll(s, triI)
// {
// nTris[s[triI].region()]++;
// }
// return nTris;
// }
labelList nTris(regions.size(), 0);
forAll(s, triI) // // Pre-calculate the refinement level for every element
{ // void Foam::refinementSurfaces::wantedRefinementLevel
nTris[s[triI].region()]++; // (
} // const shellSurfaces& shells,
return nTris; // const label surfI,
} // const List<pointIndexHit>& info, // Indices
// const pointField& ctrs, // Representative coordinate
// labelList& minLevelField
// ) const
// {
// const searchableSurface& geom = allGeometry_[surfaces_[surfI]];
//
// // Get per element the region
// labelList region;
// geom.getRegion(info, region);
//
// // Initialise fields to region wise minLevel
// minLevelField.setSize(ctrs.size());
// minLevelField = -1;
//
// forAll(minLevelField, i)
// {
// if (info[i].hit())
// {
// minLevelField[i] = minLevel(surfI, region[i]);
// }
// }
//
// // Find out if triangle inside shell with higher level
// // What level does shell want to refine fc to?
// labelList shellLevel;
// shells.findHigherLevel(ctrs, minLevelField, shellLevel);
//
// forAll(minLevelField, i)
// {
// minLevelField[i] = max(minLevelField[i], shellLevel[i]);
// }
// }
// Precalculate the refinement level for every element of the searchable // Precalculate the refinement level for every element of the searchable
// surface. This is currently hardcoded for triSurfaceMesh only. // surface.
void Foam::refinementSurfaces::setMinLevelFields void Foam::refinementSurfaces::setMinLevelFields
( (
const shellSurfaces& shells const shellSurfaces& shells
@ -522,58 +562,55 @@ void Foam::refinementSurfaces::setMinLevelFields
{ {
const searchableSurface& geom = allGeometry_[surfaces_[surfI]]; const searchableSurface& geom = allGeometry_[surfaces_[surfI]];
if (isA<triSurfaceMesh>(geom)) // Precalculation only makes sense if there are different regions
// (so different refinement levels possible) and there are some
// elements. Possibly should have 'enough' elements to have fine
// enough resolution but for now just make sure we don't catch e.g.
// searchableBox (size=6)
if (geom.regions().size() > 1 && geom.globalSize() > 10)
{ {
const triSurfaceMesh& triMesh = refCast<const triSurfaceMesh>(geom); // Representative local coordinates
const pointField ctrs = geom.coordinates();
autoPtr<triSurfaceLabelField> minLevelFieldPtr labelList minLevelField(ctrs.size(), -1);
(
new triSurfaceLabelField
(
IOobject
(
"minLevel",
triMesh.objectRegistry::time().timeName(), // instance
"triSurface", // local
triMesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
triMesh,
dimless
)
);
triSurfaceLabelField& minLevelField = minLevelFieldPtr();
const triSurface& s = static_cast<const triSurface&>(triMesh);
// Initialise fields to region wise minLevel
forAll(s, triI)
{ {
minLevelField[triI] = minLevel(surfI, s[triI].region()); // Get the element index in a roundabout way. Problem is e.g.
// distributed surface where local indices differ from global
// ones (needed for getRegion call)
List<pointIndexHit> info;
geom.findNearest
(
ctrs,
scalarField(ctrs.size(), sqr(GREAT)),
info
);
// Get per element the region
labelList region;
geom.getRegion(info, region);
// From the region get the surface-wise refinement level
forAll(minLevelField, i)
{
if (info[i].hit())
{
minLevelField[i] = minLevel(surfI, region[i]);
}
}
} }
// Find out if triangle inside shell with higher level // Find out if triangle inside shell with higher level
pointField fc(s.size());
forAll(s, triI)
{
fc[triI] = s[triI].centre(s.points());
}
// What level does shell want to refine fc to? // What level does shell want to refine fc to?
labelList shellLevel; labelList shellLevel;
shells.findHigherLevel(fc, minLevelField, shellLevel); shells.findHigherLevel(ctrs, minLevelField, shellLevel);
forAll(minLevelField, triI) forAll(minLevelField, i)
{ {
minLevelField[triI] = max minLevelField[i] = max(minLevelField[i], shellLevel[i]);
(
minLevelField[triI],
shellLevel[triI]
);
} }
// Store field on triMesh // Store minLevelField on surface
minLevelFieldPtr.ptr()->store(); const_cast<searchableSurface&>(geom).setField(minLevelField);
} }
} }
} }
@ -601,44 +638,89 @@ void Foam::refinementSurfaces::findHigherIntersection
return; return;
} }
if (surfaces_.size() == 1)
{
// Optimisation: single segmented surface. No need to duplicate
// point storage.
label surfI = 0;
const searchableSurface& geom = allGeometry_[surfaces_[surfI]];
// Do intersection test
List<pointIndexHit> intersectionInfo(start.size());
geom.findLineAny(start, end, intersectionInfo);
// See if a cached level field available
labelList minLevelField;
geom.getField(intersectionInfo, minLevelField);
bool haveLevelField =
(
returnReduce(minLevelField.size(), sumOp<label>())
> 0
);
if (!haveLevelField && geom.regions().size() == 1)
{
minLevelField = labelList
(
intersectionInfo.size(),
minLevel(surfI, 0)
);
haveLevelField = true;
}
if (haveLevelField)
{
forAll(intersectionInfo, i)
{
if
(
intersectionInfo[i].hit()
&& minLevelField[i] > currentLevel[i]
)
{
surfaces[i] = surfI; // index of surface
surfaceLevel[i] = minLevelField[i];
}
}
return;
}
}
// Work arrays // Work arrays
labelList hitMap(identity(start.size()));
pointField p0(start); pointField p0(start);
pointField p1(end); pointField p1(end);
List<pointIndexHit> hitInfo(start.size()); labelList intersectionToPoint(identity(start.size()));
List<pointIndexHit> intersectionInfo(start.size());
forAll(surfaces_, surfI) forAll(surfaces_, surfI)
{ {
const searchableSurface& geom = allGeometry_[surfaces_[surfI]]; const searchableSurface& geom = allGeometry_[surfaces_[surfI]];
geom.findLineAny(p0, p1, hitInfo); // Do intersection test
geom.findLineAny(p0, p1, intersectionInfo);
// See if a cached level field available
labelList minLevelField; labelList minLevelField;
if (isA<triSurfaceMesh>(geom)) geom.getField(intersectionInfo, minLevelField);
{
const triSurfaceMesh& triMesh = refCast<const triSurfaceMesh>(geom);
triMesh.getField
(
"minLevel",
hitInfo,
minLevelField
);
}
// Copy all hits into arguments, In-place compact misses.
// Copy all hits into arguments, continue with misses label missI = 0;
label newI = 0; forAll(intersectionInfo, i)
forAll(hitInfo, hitI)
{ {
// Get the minLevel for the point // Get the minLevel for the point
label minLocalLevel = -1; label minLocalLevel = -1;
if (hitInfo[hitI].hit()) if (intersectionInfo[i].hit())
{ {
// Check if minLevelField for this surface. // Check if minLevelField for this surface.
if (minLevelField.size()) if (minLevelField.size())
{ {
minLocalLevel = minLevelField[hitI]; minLocalLevel = minLevelField[i];
} }
else else
{ {
@ -648,36 +730,35 @@ void Foam::refinementSurfaces::findHigherIntersection
} }
} }
label pointI = hitMap[hitI];
label pointI = intersectionToPoint[i];
if (minLocalLevel > currentLevel[pointI]) if (minLocalLevel > currentLevel[pointI])
{ {
// Mark point for refinement
surfaces[pointI] = surfI; surfaces[pointI] = surfI;
surfaceLevel[pointI] = minLocalLevel; surfaceLevel[pointI] = minLocalLevel;
} }
else else
{ {
if (hitI != newI) p0[missI] = p0[pointI];
{ p1[missI] = p1[pointI];
hitMap[newI] = hitMap[hitI]; intersectionToPoint[missI] = pointI;
p0[newI] = p0[hitI]; missI++;
p1[newI] = p1[hitI];
}
newI++;
} }
} }
// All done? Note that this decision should be synchronised // All done? Note that this decision should be synchronised
if (returnReduce(newI, sumOp<label>()) == 0) if (returnReduce(missI, sumOp<label>()) == 0)
{ {
break; break;
} }
// Trim and continue // Trim misses
hitMap.setSize(newI); p0.setSize(missI);
p0.setSize(newI); p1.setSize(missI);
p1.setSize(newI); intersectionToPoint.setSize(missI);
hitInfo.setSize(newI); intersectionInfo.setSize(missI);
} }
} }

View File

@ -218,8 +218,8 @@ public:
const shellSurfaces& shells const shellSurfaces& shells
); );
//- Helper: count number of triangles per region ////- Helper: count number of triangles per region
static labelList countRegions(const triSurface&); //static labelList countRegions(const triSurface&);
// Searching // Searching

View File

@ -957,6 +957,7 @@ Foam::point Foam::indexedOctree<Type>::pushPointIntoFace
} }
else if (nFaces == 1) else if (nFaces == 1)
{ {
// Point is on a single face
keepFaceID = faceIndices[0]; keepFaceID = faceIndices[0];
} }
else else
@ -1782,16 +1783,6 @@ Foam::pointIndexHit Foam::indexedOctree<Type>::findLine
label i = 0; label i = 0;
for (; i < 100000; i++) for (; i < 100000; i++)
{ {
if (verbose)
{
Pout<< "iter:" << i
<< " at startPoint:" << hitInfo.rawPoint() << endl
<< " node:" << nodeI
<< " octant:" << octant
<< " bb:" << subBbox(nodeI, octant) << endl;
}
// Ray-trace to end of current node. Updates point (either on triangle // Ray-trace to end of current node. Updates point (either on triangle
// in case of hit or on node bounding box in case of miss) // in case of hit or on node bounding box in case of miss)
@ -1808,6 +1799,19 @@ Foam::pointIndexHit Foam::indexedOctree<Type>::findLine
) )
); );
if (verbose)
{
Pout<< "iter:" << i
<< " at current:" << hitInfo.rawPoint()
<< " (perturbed:" << startPoint << ")" << endl
<< " node:" << nodeI
<< " octant:" << octant
<< " bb:" << subBbox(nodeI, octant) << endl;
}
// Faces of current bounding box current point is on // Faces of current bounding box current point is on
direction hitFaceID = 0; direction hitFaceID = 0;
@ -1833,12 +1837,23 @@ Foam::pointIndexHit Foam::indexedOctree<Type>::findLine
break; break;
} }
if (hitFaceID == 0) if (hitFaceID == 0 || hitInfo.rawPoint() == treeEnd)
{ {
// endpoint inside the tree. Return miss. // endpoint inside the tree. Return miss.
break; break;
} }
// Create a point on other side of face.
point perturbedPoint
(
pushPoint
(
octantBb,
hitFaceID,
hitInfo.rawPoint(),
false // push outside of octantBb
)
);
if (verbose) if (verbose)
{ {
@ -1848,14 +1863,7 @@ Foam::pointIndexHit Foam::indexedOctree<Type>::findLine
<< " node:" << nodeI << " node:" << nodeI
<< " octant:" << octant << " octant:" << octant
<< " bb:" << subBbox(nodeI, octant) << nl << " bb:" << subBbox(nodeI, octant) << nl
<< " walking to neighbour containing:" << " walking to neighbour containing:" << perturbedPoint
<< pushPoint
(
octantBb,
hitFaceID,
hitInfo.rawPoint(),
false // push outside of octantBb
)
<< endl; << endl;
} }
@ -1866,13 +1874,7 @@ Foam::pointIndexHit Foam::indexedOctree<Type>::findLine
bool ok = walkToNeighbour bool ok = walkToNeighbour
( (
pushPoint perturbedPoint,
(
octantBb,
hitFaceID,
hitInfo.rawPoint(),
false // push outside of octantBb
),
hitFaceID, // face(s) that hitInfo is on hitFaceID, // face(s) that hitInfo is on
nodeI, nodeI,

View File

@ -463,7 +463,7 @@ const Foam::indexedOctree<Foam::treeDataFace>& Foam::meshSearch::boundaryTree()
treeBoundBox overallBb(mesh_.points()); treeBoundBox overallBb(mesh_.points());
Random rndGen(123456); Random rndGen(123456);
overallBb.extend(rndGen, 1E-4); overallBb = overallBb.extend(rndGen, 1E-4);
overallBb.min() -= point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL); overallBb.min() -= point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL);
overallBb.max() += point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL); overallBb.max() += point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL);
@ -497,7 +497,7 @@ const Foam::indexedOctree<Foam::treeDataCell>& Foam::meshSearch::cellTree()
treeBoundBox overallBb(mesh_.points()); treeBoundBox overallBb(mesh_.points());
Random rndGen(123456); Random rndGen(123456);
overallBb.extend(rndGen, 1E-4); overallBb = overallBb.extend(rndGen, 1E-4);
overallBb.min() -= point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL); overallBb.min() -= point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL);
overallBb.max() += point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL); overallBb.max() += point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL);
@ -531,7 +531,7 @@ const Foam::indexedOctree<Foam::treeDataPoint>&
treeBoundBox overallBb(mesh_.cellCentres()); treeBoundBox overallBb(mesh_.cellCentres());
Random rndGen(123456); Random rndGen(123456);
overallBb.extend(rndGen, 1E-4); overallBb = overallBb.extend(rndGen, 1E-4);
overallBb.min() -= point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL); overallBb.min() -= point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL);
overallBb.max() += point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL); overallBb.max() += point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL);

View File

@ -912,41 +912,6 @@ Foam::distributedTriSurfaceMesh::independentlyDistributedBbs
} }
void Foam::distributedTriSurfaceMesh::calcBounds
(
boundBox& bb,
label& nPoints
) const
{
// Unfortunately nPoints constructs meshPoints() so do compact version
// ourselves
PackedBoolList pointIsUsed(points().size());
nPoints = 0;
bb.min() = point(VGREAT, VGREAT, VGREAT);
bb.max() = point(-VGREAT, -VGREAT, -VGREAT);
const triSurface& s = static_cast<const triSurface&>(*this);
forAll(s, triI)
{
const labelledTri& f = s[triI];
forAll(f, fp)
{
label pointI = f[fp];
if (pointIsUsed.set(pointI, 1u))
{
bb.min() = ::Foam::min(bb.min(), points()[pointI]);
bb.max() = ::Foam::max(bb.max(), points()[pointI]);
nPoints++;
}
}
}
}
// Does any part of triangle overlap bb. // Does any part of triangle overlap bb.
bool Foam::distributedTriSurfaceMesh::overlaps bool Foam::distributedTriSurfaceMesh::overlaps
( (
@ -1935,20 +1900,7 @@ void Foam::distributedTriSurfaceMesh::getNormal
{ {
if (!Pstream::parRun()) if (!Pstream::parRun())
{ {
normal.setSize(info.size()); triSurfaceMesh::getNormal(info, normal);
forAll(info, i)
{
if (info[i].hit())
{
normal[i] = faceNormals()[info[i].index()];
}
else
{
// Set to what?
normal[i] = vector::zero;
}
}
return; return;
} }
@ -2006,70 +1958,64 @@ void Foam::distributedTriSurfaceMesh::getNormal
void Foam::distributedTriSurfaceMesh::getField void Foam::distributedTriSurfaceMesh::getField
( (
const word& fieldName,
const List<pointIndexHit>& info, const List<pointIndexHit>& info,
labelList& values labelList& values
) const ) const
{ {
const triSurfaceLabelField& fld = lookupObject<triSurfaceLabelField>
(
fieldName
);
if (!Pstream::parRun()) if (!Pstream::parRun())
{ {
values.setSize(info.size()); triSurfaceMesh::getField(info, values);
forAll(info, i)
{
if (info[i].hit())
{
values[i] = fld[info[i].index()];
}
}
return; return;
} }
if (foundObject<triSurfaceLabelField>("values"))
// Get query data (= local index of triangle)
// ~~~~~~~~~~~~~~
labelList triangleIndex(info.size());
autoPtr<mapDistribute> mapPtr
(
calcLocalQueries
(
info,
triangleIndex
)
);
const mapDistribute& map = mapPtr();
// Do my tests
// ~~~~~~~~~~~
values.setSize(triangleIndex.size());
forAll(triangleIndex, i)
{ {
label triI = triangleIndex[i]; const triSurfaceLabelField& fld = lookupObject<triSurfaceLabelField>
values[i] = fld[triI]; (
"values"
);
// Get query data (= local index of triangle)
// ~~~~~~~~~~~~~~
labelList triangleIndex(info.size());
autoPtr<mapDistribute> mapPtr
(
calcLocalQueries
(
info,
triangleIndex
)
);
const mapDistribute& map = mapPtr();
// Do my tests
// ~~~~~~~~~~~
values.setSize(triangleIndex.size());
forAll(triangleIndex, i)
{
label triI = triangleIndex[i];
values[i] = fld[triI];
}
// Send back results
// ~~~~~~~~~~~~~~~~~
map.distribute
(
Pstream::nonBlocking,
List<labelPair>(0),
info.size(),
map.constructMap(), // what to send
map.subMap(), // what to receive
values
);
} }
// Send back results
// ~~~~~~~~~~~~~~~~~
map.distribute
(
Pstream::nonBlocking,
List<labelPair>(0),
info.size(),
map.constructMap(), // what to send
map.subMap(), // what to receive
values
);
} }

View File

@ -217,9 +217,6 @@ private:
const triSurface& const triSurface&
); );
//- Calculate surface bounding box
void calcBounds(boundBox& bb, label& nPoints) const;
//- Does any part of triangle overlap bb. //- Does any part of triangle overlap bb.
static bool overlaps static bool overlaps
( (
@ -418,7 +415,7 @@ public:
// Should really be split into a routine to determine decomposition // Should really be split into a routine to determine decomposition
// and one that does actual distribution but determining // and one that does actual distribution but determining
// decomposition with duplicate triangle merging requires // decomposition with duplicate triangle merging requires
// same amoun as work as actual distribution. // same amount as work as actual distribution.
virtual void distribute virtual void distribute
( (
const List<treeBoundBox>&, const List<treeBoundBox>&,
@ -430,14 +427,9 @@ public:
// Other // Other
//- Specific to triSurfaceMesh: from a set of hits (points and //- WIP. From a set of hits (points and
// indices) get the specified field. Misses do not get set. // indices) get the specified field. Misses do not get set.
virtual void getField virtual void getField(const List<pointIndexHit>&, labelList&) const;
(
const word& fieldName,
const List<pointIndexHit>&,
labelList& values
) const;
//- Subset the part of surface that is overlapping bounds. //- Subset the part of surface that is overlapping bounds.
static triSurface overlappingSurface static triSurface overlappingSurface

View File

@ -229,6 +229,21 @@ const Foam::wordList& Foam::searchableBox::regions() const
} }
Foam::pointField Foam::searchableBox::coordinates() const
{
pointField ctrs(6);
const pointField pts = treeBoundBox::points();
const faceList& fcs = treeBoundBox::faces;
forAll(fcs, i)
{
ctrs[i] = fcs[i].centre(pts);
}
return ctrs;
}
Foam::pointIndexHit Foam::searchableBox::findNearest Foam::pointIndexHit Foam::searchableBox::findNearest
( (
const point& sample, const point& sample,

View File

@ -127,6 +127,9 @@ public:
return 6; return 6;
} }
//- Get representative set of element coordinates
// Usually the element centres (should be of length size()).
virtual pointField coordinates() const;
// Single point queries. // Single point queries.

View File

@ -40,6 +40,14 @@ addToRunTimeSelectionTable(searchableSurface, searchableCylinder, dict);
// * * * * * * * * * * * * * Private Member Functions * * * * * * * * * * * // // * * * * * * * * * * * * * Private Member Functions * * * * * * * * * * * //
Foam::pointField Foam::searchableCylinder::coordinates() const
{
pointField ctrs(1, 0.5*(point1_ + point2_));
return ctrs;
}
Foam::pointIndexHit Foam::searchableCylinder::findNearest Foam::pointIndexHit Foam::searchableCylinder::findNearest
( (
const point& sample, const point& sample,

View File

@ -150,6 +150,10 @@ public:
return 1; return 1;
} }
//- Get representative set of element coordinates
// Usually the element centres (should be of length size()).
virtual pointField coordinates() const;
// Multiple point queries. // Multiple point queries.

View File

@ -26,7 +26,7 @@ Class
Foam::searchablePlane Foam::searchablePlane
Description Description
Searching on plane. See plane.H Searching on (infinite) plane. See plane.H
SourceFiles SourceFiles
searchablePlane.C searchablePlane.C
@ -122,6 +122,14 @@ public:
return 1; return 1;
} }
//- Get representative set of element coordinates
// Usually the element centres (should be of length size()).
virtual pointField coordinates() const
{
//notImplemented("searchablePlane::coordinates()")
return pointField(1, refPoint());
}
// Multiple point queries. // Multiple point queries.

View File

@ -145,6 +145,13 @@ public:
return 1; return 1;
} }
//- Get representative set of element coordinates
// Usually the element centres (should be of length size()).
virtual pointField coordinates() const
{
return pointField(1, origin_);
}
// Multiple point queries. // Multiple point queries.

View File

@ -133,6 +133,13 @@ public:
return 1; return 1;
} }
//- Get representative set of element coordinates
// Usually the element centres (should be of length size()).
virtual pointField coordinates() const
{
return pointField(1, centre_);
}
// Multiple point queries. // Multiple point queries.

View File

@ -190,6 +190,10 @@ public:
return size(); return size();
} }
//- Get representative set of element coordinates
// Usually the element centres (should be of length size()).
virtual pointField coordinates() const = 0;
// Single point queries. // Single point queries.
@ -319,6 +323,18 @@ public:
) )
{} {}
//- WIP. Store element-wise field.
virtual void setField(const labelList& values)
{}
//- WIP. From a set of hits (points and
// indices) get the specified field. Misses do not get set. Return
// empty field if not supported.
virtual void getField(const List<pointIndexHit>&, labelList& values)
const
{
values.clear();
}
}; };

View File

@ -101,7 +101,11 @@ void Foam::searchableSurfaceCollection::findNearest
minDistSqr[pointI] = distSqr; minDistSqr[pointI] = distSqr;
nearestInfo[pointI].setPoint(globalPt); nearestInfo[pointI].setPoint(globalPt);
nearestInfo[pointI].setHit(); nearestInfo[pointI].setHit();
nearestInfo[pointI].setIndex(hitInfo[pointI].index()); nearestInfo[pointI].setIndex
(
hitInfo[pointI].index()
+ indexOffset_[surfI]
);
nearestSurf[pointI] = surfI; nearestSurf[pointI] = surfI;
} }
} }
@ -110,6 +114,62 @@ void Foam::searchableSurfaceCollection::findNearest
} }
// Sort hits into per-surface bins. Misses are rejected. Maintains map back
// to position
void Foam::searchableSurfaceCollection::sortHits
(
const List<pointIndexHit>& info,
List<List<pointIndexHit> >& surfInfo,
labelListList& infoMap
) const
{
// Count hits per surface.
labelList nHits(subGeom_.size(), 0);
forAll(info, pointI)
{
if (info[pointI].hit())
{
label index = info[pointI].index();
label surfI = findLower(indexOffset_, index+1);
nHits[surfI]++;
}
}
// Per surface the hit
surfInfo.setSize(subGeom_.size());
// Per surface the original position
infoMap.setSize(subGeom_.size());
forAll(surfInfo, surfI)
{
surfInfo[surfI].setSize(nHits[surfI]);
infoMap[surfI].setSize(nHits[surfI]);
}
nHits = 0;
forAll(info, pointI)
{
if (info[pointI].hit())
{
label index = info[pointI].index();
label surfI = findLower(indexOffset_, index+1);
// Store for correct surface and adapt indices back to local
// ones
label localI = nHits[surfI]++;
surfInfo[surfI][localI] = pointIndexHit
(
info[pointI].hit(),
info[pointI].rawPoint(),
index-indexOffset_[surfI]
);
infoMap[surfI][localI] = pointI;
}
}
}
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
Foam::searchableSurfaceCollection::searchableSurfaceCollection Foam::searchableSurfaceCollection::searchableSurfaceCollection
@ -123,11 +183,13 @@ Foam::searchableSurfaceCollection::searchableSurfaceCollection
scale_(dict.size()), scale_(dict.size()),
transform_(dict.size()), transform_(dict.size()),
subGeom_(dict.size()), subGeom_(dict.size()),
mergeSubRegions_(dict.lookup("mergeSubRegions")) mergeSubRegions_(dict.lookup("mergeSubRegions")),
indexOffset_(dict.size()+1)
{ {
Info<< "SearchableCollection : " << name() << endl; Info<< "SearchableCollection : " << name() << endl;
label surfI = 0; label surfI = 0;
label startIndex = 0;
forAllConstIter(dictionary, dict, iter) forAllConstIter(dictionary, dict, iter)
{ {
if (dict.isDict(iter().keyword())) if (dict.isDict(iter().keyword()))
@ -153,8 +215,24 @@ Foam::searchableSurfaceCollection::searchableSurfaceCollection
const searchableSurface& s = const searchableSurface& s =
io.db().lookupObject<searchableSurface>(subGeomName); io.db().lookupObject<searchableSurface>(subGeomName);
// I don't know yet how to handle the globalSize combined with
// regionOffset. Would cause non-consecutive indices locally
// if all indices offset by globalSize() of the local region...
if (s.size() != s.globalSize())
{
FatalErrorIn
(
"searchableSurfaceCollection::searchableSurfaceCollection"
"(const IOobject&, const dictionary&)"
) << "Cannot use a distributed surface in a collection."
<< exit(FatalError);
}
subGeom_.set(surfI, &const_cast<searchableSurface&>(s)); subGeom_.set(surfI, &const_cast<searchableSurface&>(s));
indexOffset_[surfI] = startIndex;
startIndex += subGeom_[surfI].size();
Info<< " instance : " << instance_[surfI] << endl; Info<< " instance : " << instance_[surfI] << endl;
Info<< " surface : " << s.name() << endl; Info<< " surface : " << s.name() << endl;
Info<< " scale : " << scale_[surfI] << endl; Info<< " scale : " << scale_[surfI] << endl;
@ -163,10 +241,13 @@ Foam::searchableSurfaceCollection::searchableSurfaceCollection
surfI++; surfI++;
} }
} }
indexOffset_[surfI] = startIndex;
instance_.setSize(surfI); instance_.setSize(surfI);
scale_.setSize(surfI); scale_.setSize(surfI);
transform_.setSize(surfI); transform_.setSize(surfI);
subGeom_.setSize(surfI); subGeom_.setSize(surfI);
indexOffset_.setSize(surfI+1);
} }
@ -212,12 +293,36 @@ const Foam::wordList& Foam::searchableSurfaceCollection::regions() const
Foam::label Foam::searchableSurfaceCollection::size() const Foam::label Foam::searchableSurfaceCollection::size() const
{ {
label n = 0; return indexOffset_[indexOffset_.size()-1];
}
Foam::pointField Foam::searchableSurfaceCollection::coordinates() const
{
// Get overall size
pointField coords(size());
// Append individual coordinates
label coordI = 0;
forAll(subGeom_, surfI) forAll(subGeom_, surfI)
{ {
n += subGeom_[surfI].size(); const pointField subCoords = subGeom_[surfI].coordinates();
forAll(subCoords, i)
{
coords[coordI++] = transform_[surfI].globalPosition
(
cmptMultiply
(
subCoords[i],
scale_[surfI]
)
);
}
} }
return n;
return coords;
} }
@ -296,6 +401,11 @@ void Foam::searchableSurfaceCollection::findLine
); );
info[pointI] = hitInfo[pointI]; info[pointI] = hitInfo[pointI];
info[pointI].rawPoint() = nearest[pointI]; info[pointI].rawPoint() = nearest[pointI];
info[pointI].setIndex
(
hitInfo[pointI].index()
+ indexOffset_[surfI]
);
} }
} }
} }
@ -397,82 +507,42 @@ void Foam::searchableSurfaceCollection::getRegion
} }
else else
{ {
// Multiple surfaces. Sort by surface.
// Per surface the hit
List<List<pointIndexHit> > surfInfo;
// Per surface the original position
List<List<label> > infoMap;
sortHits(info, surfInfo, infoMap);
region.setSize(info.size()); region.setSize(info.size());
region = -1; region = -1;
// Which region did point come from. Retest for now to see which // Do region tests
// surface it originates from - crap solution! Should use global indices
// in index inside pointIndexHit to do this better.
pointField samples(info.size()); if (mergeSubRegions_)
forAll(info, pointI)
{ {
if (info[pointI].hit()) // Actually no need for surfInfo. Just take region for surface.
forAll(infoMap, surfI)
{ {
samples[pointI] = info[pointI].hitPoint(); const labelList& map = infoMap[surfI];
} forAll(map, i)
else
{
samples[pointI] = vector::zero;
}
}
//scalarField minDistSqr(info.size(), SMALL);
scalarField minDistSqr(info.size(), GREAT);
labelList nearestSurf;
List<pointIndexHit> nearestInfo;
findNearest
(
samples,
minDistSqr,
nearestInfo,
nearestSurf
);
// Check
{
forAll(info, pointI)
{
if (info[pointI].hit() && nearestSurf[pointI] == -1)
{ {
FatalErrorIn region[map[i]] = regionOffset_[surfI];
(
"searchableSurfaceCollection::getRegion(..)"
) << "pointI:" << pointI
<< " sample:" << samples[pointI]
<< " nearest:" << nearestInfo[pointI]
<< " nearestsurf:" << nearestSurf[pointI]
<< abort(FatalError);
} }
} }
} }
else
forAll(subGeom_, surfI)
{ {
// Collect points from my surface forAll(infoMap, surfI)
labelList indices(findIndices(nearestSurf, surfI));
if (mergeSubRegions_)
{
forAll(indices, i)
{
region[indices[i]] = regionOffset_[surfI];
}
}
else
{ {
labelList surfRegion; labelList surfRegion;
subGeom_[surfI].getRegion subGeom_[surfI].getRegion(surfInfo[surfI], surfRegion);
(
List<pointIndexHit> const labelList& map = infoMap[surfI];
( forAll(map, i)
UIndirectList<pointIndexHit>(info, indices)
),
surfRegion
);
forAll(indices, i)
{ {
region[indices[i]] = regionOffset_[surfI] + surfRegion[i]; region[map[i]] = regionOffset_[surfI] + surfRegion[i];
} }
} }
} }
@ -494,52 +564,26 @@ void Foam::searchableSurfaceCollection::getNormal
} }
else else
{ {
// Multiple surfaces. Sort by surface.
// Per surface the hit
List<List<pointIndexHit> > surfInfo;
// Per surface the original position
List<List<label> > infoMap;
sortHits(info, surfInfo, infoMap);
normal.setSize(info.size()); normal.setSize(info.size());
// See above - crap retest to find surface point originates from. // Do region tests
pointField samples(info.size()); forAll(surfInfo, surfI)
forAll(info, pointI)
{ {
if (info[pointI].hit())
{
samples[pointI] = info[pointI].hitPoint();
}
else
{
samples[pointI] = vector::zero;
}
}
//scalarField minDistSqr(info.size(), SMALL);
scalarField minDistSqr(info.size(), GREAT);
labelList nearestSurf;
List<pointIndexHit> nearestInfo;
findNearest
(
samples,
minDistSqr,
nearestInfo,
nearestSurf
);
forAll(subGeom_, surfI)
{
// Collect points from my surface
labelList indices(findIndices(nearestSurf, surfI));
vectorField surfNormal; vectorField surfNormal;
subGeom_[surfI].getNormal subGeom_[surfI].getNormal(surfInfo[surfI], surfNormal);
(
List<pointIndexHit> const labelList& map = infoMap[surfI];
( forAll(map, i)
UIndirectList<pointIndexHit>(info, indices)
),
surfNormal
);
forAll(indices, i)
{ {
normal[indices[i]] = surfNormal[i]; normal[map[i]] = surfNormal[i];
} }
} }
} }
@ -561,4 +605,99 @@ void Foam::searchableSurfaceCollection::getVolumeType
} }
void Foam::searchableSurfaceCollection::distribute
(
const List<treeBoundBox>& bbs,
const bool keepNonLocal,
autoPtr<mapDistribute>& faceMap,
autoPtr<mapDistribute>& pointMap
)
{
forAll(subGeom_, surfI)
{
// Note:Tranform the bounding boxes? Something like
// pointField bbPoints =
// cmptDivide
// (
// transform_[surfI].localPosition
// (
// bbs[i].points()
// ),
// scale_[surfI]
// );
// treeBoundBox newBb(bbPoints);
// Note: what to do with faceMap, pointMap from multiple surfaces?
subGeom_[surfI].distribute
(
bbs,
keepNonLocal,
faceMap,
pointMap
);
}
}
void Foam::searchableSurfaceCollection::setField(const labelList& values)
{
forAll(subGeom_, surfI)
{
subGeom_[surfI].setField
(
static_cast<const labelList&>
(
SubList<label>
(
values,
subGeom_[surfI].size(),
indexOffset_[surfI]
)
)
);
}
}
void Foam::searchableSurfaceCollection::getField
(
const List<pointIndexHit>& info,
labelList& values
) const
{
if (subGeom_.size() == 0)
{}
else if (subGeom_.size() == 1)
{
subGeom_[0].getField(info, values);
}
else
{
// Multiple surfaces. Sort by surface.
// Per surface the hit
List<List<pointIndexHit> > surfInfo;
// Per surface the original position
List<List<label> > infoMap;
sortHits(info, surfInfo, infoMap);
values.setSize(info.size());
//?Misses do not get set? values = 0;
// Do surface tests
forAll(surfInfo, surfI)
{
labelList surfValues;
subGeom_[surfI].getField(surfInfo[surfI], surfValues);
const labelList& map = infoMap[surfI];
forAll(map, i)
{
values[map[i]] = surfValues[i];
}
}
}
}
// ************************************************************************* // // ************************************************************************* //

View File

@ -77,6 +77,10 @@ private:
Switch mergeSubRegions_; Switch mergeSubRegions_;
//- offsets for indices coming from different surfaces
// (sized with size() of each surface)
labelList indexOffset_;
//- Region names //- Region names
mutable wordList regions_; mutable wordList regions_;
//- From individual regions to collection regions //- From individual regions to collection regions
@ -95,6 +99,15 @@ private:
labelList& nearestSurf labelList& nearestSurf
) const; ) const;
//- Sort hits into per-surface bins. Misses are rejected.
// Maintains map back to position
void sortHits
(
const List<pointIndexHit>& info,
List<List<pointIndexHit> >& surfInfo,
labelListList& infoMap
) const;
//- Disallow default bitwise copy construct //- Disallow default bitwise copy construct
searchableSurfaceCollection(const searchableSurfaceCollection&); searchableSurfaceCollection(const searchableSurfaceCollection&);
@ -161,6 +174,10 @@ public:
//- Range of local indices that can be returned. //- Range of local indices that can be returned.
virtual label size() const; virtual label size() const;
//- Get representative set of element coordinates
// Usually the element centres (should be of length size()).
virtual pointField coordinates() const;
// Multiple point queries. // Multiple point queries.
@ -215,6 +232,27 @@ public:
List<volumeType>& List<volumeType>&
) const; ) const;
// Other
//- Set bounds of surface. Bounds currently set as list of
// bounding boxes. The bounds are hints to the surface as for
// the range of queries it can expect. faceMap/pointMap can be
// set if the surface has done any redistribution.
virtual void distribute
(
const List<treeBoundBox>&,
const bool keepNonLocal,
autoPtr<mapDistribute>& faceMap,
autoPtr<mapDistribute>& pointMap
);
//- WIP. Store element-wise field.
virtual void setField(const labelList& values);
//- WIP. From a set of hits (points and
// indices) get the specified field. Misses do not get set. Return
// empty field if not supported.
virtual void getField(const List<pointIndexHit>&, labelList&) const;
// regIOobject implementation // regIOobject implementation

View File

@ -151,6 +151,13 @@ public:
return surface().size(); return surface().size();
} }
//- Get representative set of element coordinates
// Usually the element centres (should be of length size()).
virtual pointField coordinates() const
{
return surface().coordinates();
}
// Multiple point queries. // Multiple point queries.
@ -225,6 +232,41 @@ public:
} }
// Other
//- Set bounds of surface. Bounds currently set as list of
// bounding boxes. The bounds are hints to the surface as for
// the range of queries it can expect. faceMap/pointMap can be
// set if the surface has done any redistribution.
virtual void distribute
(
const List<treeBoundBox>& bbs,
const bool keepNonLocal,
autoPtr<mapDistribute>& faceMap,
autoPtr<mapDistribute>& pointMap
)
{
subGeom_[0].distribute(bbs, keepNonLocal, faceMap, pointMap);
}
//- WIP. Store element-wise field.
virtual void setField(const labelList& values)
{
subGeom_[0].setField(values);
}
//- WIP. From a set of hits (points and
// indices) get the specified field. Misses do not get set. Return
// empty field if not supported.
virtual void getField
(
const List<pointIndexHit>& info,
labelList& values
) const
{
surface().getField(info, values);
}
// regIOobject implementation // regIOobject implementation
bool writeData(Ostream& os) const bool writeData(Ostream& os) const

View File

@ -30,6 +30,7 @@ License
#include "EdgeMap.H" #include "EdgeMap.H"
#include "triSurfaceFields.H" #include "triSurfaceFields.H"
#include "Time.H" #include "Time.H"
#include "PackedBoolList.H"
// * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * // // * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * //
@ -281,6 +282,36 @@ void Foam::triSurfaceMesh::getNextIntersections
} }
void Foam::triSurfaceMesh::calcBounds(boundBox& bb, label& nPoints) const
{
// Unfortunately nPoints constructs meshPoints() so do compact version
// ourselves
const triSurface& s = static_cast<const triSurface&>(*this);
PackedBoolList pointIsUsed(points().size());
nPoints = 0;
bb = boundBox::invertedBox;
forAll(s, triI)
{
const labelledTri& f = s[triI];
forAll(f, fp)
{
label pointI = f[fp];
if (pointIsUsed.set(pointI, 1u))
{
bb.min() = ::Foam::min(bb.min(), points()[pointI]);
bb.max() = ::Foam::max(bb.max(), points()[pointI]);
nPoints++;
}
}
}
}
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
Foam::triSurfaceMesh::triSurfaceMesh(const IOobject& io, const triSurface& s) Foam::triSurfaceMesh::triSurfaceMesh(const IOobject& io, const triSurface& s)
@ -301,6 +332,7 @@ Foam::triSurfaceMesh::triSurfaceMesh(const IOobject& io, const triSurface& s)
), ),
triSurface(s), triSurface(s),
tolerance_(indexedOctree<treeDataTriSurface>::perturbTol()), tolerance_(indexedOctree<treeDataTriSurface>::perturbTol()),
maxTreeDepth_(10),
surfaceClosed_(-1) surfaceClosed_(-1)
{} {}
@ -344,6 +376,7 @@ Foam::triSurfaceMesh::triSurfaceMesh(const IOobject& io)
) )
), ),
tolerance_(indexedOctree<treeDataTriSurface>::perturbTol()), tolerance_(indexedOctree<treeDataTriSurface>::perturbTol()),
maxTreeDepth_(10),
surfaceClosed_(-1) surfaceClosed_(-1)
{} {}
@ -390,6 +423,7 @@ Foam::triSurfaceMesh::triSurfaceMesh
) )
), ),
tolerance_(indexedOctree<treeDataTriSurface>::perturbTol()), tolerance_(indexedOctree<treeDataTriSurface>::perturbTol()),
maxTreeDepth_(10),
surfaceClosed_(-1) surfaceClosed_(-1)
{ {
scalar scaleFactor = 0; scalar scaleFactor = 0;
@ -410,6 +444,14 @@ Foam::triSurfaceMesh::triSurfaceMesh
Info<< searchableSurface::name() << " : using intersection tolerance " Info<< searchableSurface::name() << " : using intersection tolerance "
<< tolerance_ << endl; << tolerance_ << endl;
} }
// Have optional non-standard tree-depth to limit storage.
if (dict.readIfPresent("maxTreeDepth", maxTreeDepth_) && maxTreeDepth_ > 0)
{
Info<< searchableSurface::name() << " : using maximum tree depth "
<< maxTreeDepth_ << endl;
}
} }
@ -431,6 +473,17 @@ void Foam::triSurfaceMesh::clearOut()
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
Foam::pointField Foam::triSurfaceMesh::coordinates() const
{
// Use copy to calculate face centres so they don't get stored
return PrimitivePatch<labelledTri, SubList, const pointField&>
(
SubList<labelledTri>(*this, triSurface::size()),
triSurface::points()
).faceCentres();
}
void Foam::triSurfaceMesh::movePoints(const pointField& newPoints) void Foam::triSurfaceMesh::movePoints(const pointField& newPoints)
{ {
tree_.clear(); tree_.clear();
@ -444,15 +497,28 @@ const Foam::indexedOctree<Foam::treeDataTriSurface>&
{ {
if (tree_.empty()) if (tree_.empty())
{ {
// Calculate bb without constructing local point numbering.
treeBoundBox bb;
label nPoints;
calcBounds(bb, nPoints);
if (nPoints != points().size())
{
WarningIn("triSurfaceMesh::tree() const")
<< "Surface " << searchableSurface::name()
<< " does not have compact point numbering."
<< " Of " << points().size() << " only " << nPoints
<< " are used. This might give problems in some routines."
<< endl;
}
// Random number generator. Bit dodgy since not exactly random ;-) // Random number generator. Bit dodgy since not exactly random ;-)
Random rndGen(65431); Random rndGen(65431);
// Slightly extended bb. Slightly off-centred just so on symmetric // Slightly extended bb. Slightly off-centred just so on symmetric
// geometry there are less face/edge aligned items. // geometry there are less face/edge aligned items.
treeBoundBox bb bb = bb.extend(rndGen, 1E-4);
(
treeBoundBox(points(), meshPoints()).extend(rndGen, 1E-4)
);
bb.min() -= point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL); bb.min() -= point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL);
bb.max() += point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL); bb.max() += point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL);
@ -465,9 +531,9 @@ const Foam::indexedOctree<Foam::treeDataTriSurface>&
( (
treeDataTriSurface(*this), treeDataTriSurface(*this),
bb, bb,
10, // maxLevel maxTreeDepth_, // maxLevel
10, // leafsize 10, // leafsize
3.0 // duplicity 3.0 // duplicity
) )
); );
@ -494,15 +560,17 @@ const Foam::indexedOctree<Foam::treeDataEdge>&
+ nInternalEdges() + nInternalEdges()
); );
treeBoundBox bb;
label nPoints;
calcBounds(bb, nPoints);
// Random number generator. Bit dodgy since not exactly random ;-) // Random number generator. Bit dodgy since not exactly random ;-)
Random rndGen(65431); Random rndGen(65431);
// Slightly extended bb. Slightly off-centred just so on symmetric // Slightly extended bb. Slightly off-centred just so on symmetric
// geometry there are less face/edge aligned items. // geometry there are less face/edge aligned items.
treeBoundBox bb
( bb = bb.extend(rndGen, 1E-4);
treeBoundBox(points(), meshPoints()).extend(rndGen, 1E-4)
);
bb.min() -= point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL); bb.min() -= point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL);
bb.max() += point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL); bb.max() += point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL);
@ -517,10 +585,10 @@ const Foam::indexedOctree<Foam::treeDataEdge>&
localPoints(), // points localPoints(), // points
bEdges // selected edges bEdges // selected edges
), ),
bb, // bb bb, // bb
8, // maxLevel maxTreeDepth_, // maxLevel
10, // leafsize 10, // leafsize
3.0 // duplicity 3.0 // duplicity
) )
); );
} }
@ -754,24 +822,53 @@ void Foam::triSurfaceMesh::getNormal
} }
void Foam::triSurfaceMesh::setField(const labelList& values)
{
autoPtr<triSurfaceLabelField> fldPtr
(
new triSurfaceLabelField
(
IOobject
(
"values",
objectRegistry::time().timeName(), // instance
"triSurface", // local
*this,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
*this,
dimless,
labelField(values)
)
);
// Store field on triMesh
fldPtr.ptr()->store();
}
void Foam::triSurfaceMesh::getField void Foam::triSurfaceMesh::getField
( (
const word& fieldName,
const List<pointIndexHit>& info, const List<pointIndexHit>& info,
labelList& values labelList& values
) const ) const
{ {
const triSurfaceLabelField& fld = lookupObject<triSurfaceLabelField> if (foundObject<triSurfaceLabelField>("values"))
(
fieldName
);
values.setSize(info.size());
forAll(info, i)
{ {
if (info[i].hit()) values.setSize(info.size());
const triSurfaceLabelField& fld = lookupObject<triSurfaceLabelField>
(
"values"
);
forAll(info, i)
{ {
values[i] = fld[info[i].index()]; if (info[i].hit())
{
values[i] = fld[info[i].index()];
}
} }
} }
} }

View File

@ -71,6 +71,9 @@ private:
//- Optional tolerance to use in searches //- Optional tolerance to use in searches
scalar tolerance_; scalar tolerance_;
//- Optional max tree depth of octree
label maxTreeDepth_;
//- Search tree (triangles) //- Search tree (triangles)
mutable autoPtr<indexedOctree<treeDataTriSurface> > tree_; mutable autoPtr<indexedOctree<treeDataTriSurface> > tree_;
@ -129,6 +132,11 @@ private:
void operator=(const triSurfaceMesh&); void operator=(const triSurfaceMesh&);
protected:
//- Calculate (number of)used points and their bounding box
void calcBounds(boundBox& bb, label& nPoints) const;
public: public:
//- Runtime type information //- Runtime type information
@ -185,6 +193,10 @@ public:
return triSurface::size(); return triSurface::size();
} }
//- Get representative set of element coordinates
// Usually the element centres (should be of length size()).
virtual pointField coordinates() const;
virtual void findNearest virtual void findNearest
( (
const pointField& sample, const pointField& sample,
@ -236,6 +248,8 @@ public:
List<volumeType>& List<volumeType>&
) const; ) const;
// Other
//- Set bounds of surface. Bounds currently set as list of //- Set bounds of surface. Bounds currently set as list of
// bounding boxes. The bounds are hints to the surface as for // bounding boxes. The bounds are hints to the surface as for
// the range of queries it can expect. faceMap/pointMap can be // the range of queries it can expect. faceMap/pointMap can be
@ -249,16 +263,12 @@ public:
) )
{} {}
// Other //- WIP. Store element-wise field.
virtual void setField(const labelList& values);
//- Specific to triSurfaceMesh: from a set of hits (points and //- WIP. From a set of hits (points and
// indices) get the specified field. Misses do not get set. // indices) get the specified field. Misses do not get set.
virtual void getField virtual void getField(const List<pointIndexHit>&, labelList&) const;
(
const word& fieldName,
const List<pointIndexHit>&,
labelList& values
) const;
// regIOobject implementation // regIOobject implementation

View File

@ -22,40 +22,87 @@ License
along with OpenFOAM; if not, write to the Free Software Foundation, along with OpenFOAM; if not, write to the Free Software Foundation,
Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
Default strategy: From scotch forum:
Strat=b
{ By: Francois PELLEGRINI RE: Graph mapping 'strategy' string [ reply ]
job=t, 2008-08-22 10:09 Strategy handling in Scotch is a bit tricky. In order
map=t, not to be confused, you must have a clear view of how they are built.
poli=S, Here are some rules:
sep=
( 1- Strategies are made up of "methods" which are combined by means of
m "operators".
2- A method is of the form "m{param=value,param=value,...}", where "m"
is a single character (this is your first error: "f" is a method name,
not a parameter name).
3- There exist different sort of strategies : bipartitioning strategies,
mapping strategies, ordering strategies, which cannot be mixed. For
instance, you cannot build a bipartitioning strategy and feed it to a
mapping method (this is your second error).
To use the "mapCompute" routine, you must create a mapping strategy, not
a bipartitioning one, and so use stratGraphMap() and not
stratGraphBipart(). Your mapping strategy should however be based on the
"recursive bipartitioning" method ("b"). For instance, a simple (and
hence not very efficient) mapping strategy can be :
"b{sep=f}"
which computes mappings with the recursive bipartitioning method "b",
this latter using the Fiduccia-Mattheyses method "f" to compute its
separators.
If you want an exact partition (see your previous post), try
"b{sep=fx}".
However, these strategies are not the most efficient, as they do not
make use of the multi-level framework.
To use the multi-level framework, try for instance:
"b{sep=m{vert=100,low=h,asc=f}x}"
The current default mapping strategy in Scotch can be seen by using the
"-vs" option of program gmap. It is, to date:
b
{
job=t,
map=t,
poli=S,
sep=
(
m
{
asc=b
{ {
asc=b bnd=d{pass=40,dif=1,rem=1}f{move=80,pass=-1,bal=0.005},
{ org=f{move=80,pass=-1,bal=0.005},
bnd=d{pass=40,dif=1,rem=1}f{move=80,pass=-1,bal=0.005}, width=3
org=f{move=80,pass=-1,bal=0.005},width=3 },
}, low=h{pass=10}f{move=80,pass=-1,bal=0.0005},
low=h{pass=10}f{move=80,pass=-1,bal=0.0005}, type=h,
type=h, vert=80,
vert=80, rat=0.8
rat=0.8 }
} | m
| m {
asc=b
{ {
asc=b bnd=d{pass=40,dif=1,rem=1}f{move=80,pass=-1,bal=0.005},
{ org=f{move=80,pass=-1,bal=0.005},
bnd=d{pass=40,dif=1,rem=1}f{move=80,pass=-1,bal=0.005}, width=3
org=f{move=80,pass=-1,bal=0.005}, },
width=3}, low=h{pass=10}f{move=80,pass=-1,bal=0.0005},
low=h{pass=10}f{move=80,pass=-1,bal=0.0005}, type=h,
type=h, vert=80,
vert=80, rat=0.8
rat=0.8 }
} )
) }
}
\*---------------------------------------------------------------------------*/ \*---------------------------------------------------------------------------*/
#include "scotchDecomp.H" #include "scotchDecomp.H"
@ -126,6 +173,51 @@ Foam::label Foam::scotchDecomp::decompose
List<int>& finalDecomp List<int>& finalDecomp
) )
{ {
// Dump graph
if (decompositionDict_.found("scotchCoeffs"))
{
const dictionary& scotchCoeffs =
decompositionDict_.subDict("scotchCoeffs");
if (scotchCoeffs.found("writeGraph"))
{
Switch writeGraph(scotchCoeffs.lookup("writeGraph"));
if (writeGraph)
{
OFstream str(mesh_.time().path() / mesh_.name() + ".grf");
Info<< "Dumping Scotch graph file to " << str.name() << endl
<< "Use this in combination with gpart." << endl;
label version = 0;
str << version << nl;
// Numer of vertices
str << xadj.size()-1 << ' ' << adjncy.size() << nl;
// Numbering starts from 0
label baseval = 0;
// Has weights?
label hasEdgeWeights = 0;
label hasVertexWeights = 0;
label numericflag = 10*hasEdgeWeights+hasVertexWeights;
str << baseval << ' ' << numericflag << nl;
for (label cellI = 0; cellI < xadj.size()-1; cellI++)
{
label start = xadj[cellI];
label end = xadj[cellI+1];
str << end-start;
for (label i = start; i < end; i++)
{
str << ' ' << adjncy[i];
}
str << nl;
}
}
}
}
// Strategy // Strategy
// ~~~~~~~~ // ~~~~~~~~
@ -206,51 +298,6 @@ Foam::label Foam::scotchDecomp::decompose
check(SCOTCH_graphCheck(&grafdat), "SCOTCH_graphCheck"); check(SCOTCH_graphCheck(&grafdat), "SCOTCH_graphCheck");
// Dump graph
if (decompositionDict_.found("scotchCoeffs"))
{
const dictionary& scotchCoeffs =
decompositionDict_.subDict("scotchCoeffs");
if (scotchCoeffs.found("writeGraph"))
{
Switch writeGraph(scotchCoeffs.lookup("writeGraph"));
if (writeGraph)
{
OFstream str(mesh_.time().path() / mesh_.name() + ".grf");
Info<< "Dumping Scotch graph file to " << str.name() << endl
<< "Use this in combination with gpart." << endl;
label version = 0;
str << version << nl;
// Numer of vertices
str << xadj.size()-1 << ' ' << adjncy.size() << nl;
// Numbering starts from 0
label baseval = 0;
// Has weights?
label hasEdgeWeights = 0;
label hasVertexWeights = 0;
label numericflag = 10*hasEdgeWeights+hasVertexWeights;
str << baseval << ' ' << numericflag << nl;
for (label cellI = 0; cellI < xadj.size()-1; cellI++)
{
label start = xadj[cellI];
label end = xadj[cellI+1];
str << end-start;
for (label i = start; i < end; i++)
{
str << ' ' << adjncy[i];
}
str << nl;
}
}
}
}
// Architecture // Architecture
// ~~~~~~~~~~~~ // ~~~~~~~~~~~~
// (fully connected network topology since using switch) // (fully connected network topology since using switch)

View File

@ -14,7 +14,7 @@ FoamFile
} }
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
application oodles; application XXX;
startFrom latestTime; startFrom latestTime;
@ -54,6 +54,12 @@ functions
// Where to load it from (if not already in solver) // Where to load it from (if not already in solver)
functionObjectLibs ("libfieldAverage.so"); functionObjectLibs ("libfieldAverage.so");
// Function object enabled flag
enabled true;
// When to output the average fields
outputControl outputTime;
// Fields to be averaged - runTime modifiable // Fields to be averaged - runTime modifiable
fields fields
( (

View File

@ -46,10 +46,13 @@ namespace Foam
fieldValues::cellSource::sourceTypeNames_; fieldValues::cellSource::sourceTypeNames_;
template<> template<>
const char* NamedEnum<fieldValues::cellSource::operationType, 4>:: const char* NamedEnum<fieldValues::cellSource::operationType, 5>::
names[] = {"none", "sum", "volAverage", "volIntegrate"}; names[] =
{
"none", "sum", "volAverage", "volIntegrate", "weightedAverage"
};
const NamedEnum<fieldValues::cellSource::operationType, 4> const NamedEnum<fieldValues::cellSource::operationType, 5>
fieldValues::cellSource::operationTypeNames_; fieldValues::cellSource::operationTypeNames_;
} }
@ -93,7 +96,7 @@ void Foam::fieldValues::cellSource::setCellZoneCells()
// * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * // // * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * //
void Foam::fieldValues::cellSource::initialise() void Foam::fieldValues::cellSource::initialise(const dictionary& dict)
{ {
switch (source_) switch (source_)
{ {
@ -104,7 +107,7 @@ void Foam::fieldValues::cellSource::initialise()
} }
default: default:
{ {
FatalErrorIn("cellSource::constructCellAddressing()") FatalErrorIn("cellSource::initialise()")
<< "Unknown source type. Valid source types are:" << "Unknown source type. Valid source types are:"
<< sourceTypeNames_ << nl << exit(FatalError); << sourceTypeNames_ << nl << exit(FatalError);
} }
@ -114,6 +117,29 @@ void Foam::fieldValues::cellSource::initialise()
<< " total cells = " << cellId_.size() << nl << " total cells = " << cellId_.size() << nl
<< " total volume = " << sum(filterField(mesh().V())) << " total volume = " << sum(filterField(mesh().V()))
<< nl << endl; << nl << endl;
if (operation_ == opWeightedAverage)
{
dict.lookup("weightField") >> weightFieldName_;
if
(
obr().foundObject<volScalarField>(weightFieldName_)
)
{
Info<< " weight field = " << weightFieldName_;
}
else
{
FatalErrorIn("cellSource::initialise()")
<< type() << " " << name_ << ": "
<< sourceTypeNames_[source_] << "(" << sourceName_ << "):"
<< nl << " Weight field " << weightFieldName_
<< " must be a " << volScalarField::typeName
<< nl << exit(FatalError);
}
}
Info<< nl << endl;
} }
@ -172,7 +198,7 @@ void Foam::fieldValues::cellSource::read(const dictionary& dict)
if (active_) if (active_)
{ {
// no additional info to read // no additional info to read
initialise(); initialise(dict);
} }
} }
@ -183,9 +209,12 @@ void Foam::fieldValues::cellSource::write()
if (active_) if (active_)
{ {
outputFilePtr_() if (Pstream::master())
<< obr_.time().value() << tab {
<< sum(filterField(mesh().V())); outputFilePtr_()
<< obr_.time().value() << tab
<< sum(filterField(mesh().V()));
}
forAll(fields_, i) forAll(fields_, i)
{ {
@ -196,7 +225,10 @@ void Foam::fieldValues::cellSource::write()
writeValues<tensor>(fields_[i]); writeValues<tensor>(fields_[i]);
} }
outputFilePtr_()<< endl; if (Pstream::master())
{
outputFilePtr_()<< endl;
}
if (log_) if (log_)
{ {

View File

@ -39,7 +39,7 @@ Description
valueOutput true; // Write values at run-time output times? valueOutput true; // Write values at run-time output times?
source cellZone; // Type of cell source source cellZone; // Type of cell source
sourceName c0; sourceName c0;
operation volAverage; // none, sum, volAverage, volIntegrate operation volAverage;
fields fields
( (
p p
@ -47,6 +47,13 @@ Description
); );
} }
where operation is one of:
- none
- sum
- volAverage
- volIntegrate
- weightedAverage
SourceFiles SourceFiles
cellSource.C cellSource.C
@ -96,11 +103,12 @@ public:
opNone, opNone,
opSum, opSum,
opVolAverage, opVolAverage,
opVolIntegrate opVolIntegrate,
opWeightedAverage
}; };
//- Operation type names //- Operation type names
static const NamedEnum<operationType, 4> operationTypeNames_; static const NamedEnum<operationType, 5> operationTypeNames_;
private: private:
@ -127,23 +135,34 @@ protected:
//- Local list of cell IDs //- Local list of cell IDs
labelList cellId_; labelList cellId_;
//- Weight field name - only used for opWeightedAverage mode
word weightFieldName_;
// Protected member functions // Protected member functions
//- Initialise, e.g. cell addressing //- Initialise, e.g. cell addressing
void initialise(); void initialise(const dictionary& dict);
//- Return true if the field name is valid
template<class Type>
bool validField(const word& fieldName) const;
//- Insert field values into values list //- Insert field values into values list
template<class Type> template<class Type>
bool setFieldValues tmp<Field<Type> > setFieldValues
( (
const word& fieldName, const word& fieldName
List<Type>& values
) const; ) const;
//- Apply the 'operation' to the values //- Apply the 'operation' to the values
template<class Type> template<class Type>
Type processValues(const List<Type>& values) const; Type processValues
(
const Field<Type>& values,
const scalarField& V,
const scalarField& weightField
) const;
//- Output file header information //- Output file header information
virtual void writeFileHeader(); virtual void writeFileHeader();

View File

@ -27,27 +27,16 @@ License
#include "cellSource.H" #include "cellSource.H"
#include "volFields.H" #include "volFields.H"
#include "IOList.H" #include "IOList.H"
#include "ListListOps.H"
// * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * // // * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * //
template<class Type> template<class Type>
bool Foam::fieldValues::cellSource::setFieldValues bool Foam::fieldValues::cellSource::validField(const word& fieldName) const
(
const word& fieldName,
List<Type>& values
) const
{ {
values.setSize(cellId_.size(), pTraits<Type>::zero);
typedef GeometricField<Type, fvPatchField, volMesh> vf; typedef GeometricField<Type, fvPatchField, volMesh> vf;
if (obr_.foundObject<vf>(fieldName)) if (obr_.foundObject<vf>(fieldName))
{ {
const vf& field = obr_.lookupObject<vf>(fieldName);
values = UIndirectList<Type>(field, cellId_);
return true; return true;
} }
@ -55,10 +44,29 @@ bool Foam::fieldValues::cellSource::setFieldValues
} }
template<class Type>
Foam::tmp<Foam::Field<Type> > Foam::fieldValues::cellSource::setFieldValues
(
const word& fieldName
) const
{
typedef GeometricField<Type, fvPatchField, volMesh> vf;
if (obr_.foundObject<vf>(fieldName))
{
return filterField(obr_.lookupObject<vf>(fieldName));
}
return tmp<Field<Type> >(new Field<Type>(0.0));
}
template<class Type> template<class Type>
Type Foam::fieldValues::cellSource::processValues Type Foam::fieldValues::cellSource::processValues
( (
const List<Type>& values const Field<Type>& values,
const scalarField& V,
const scalarField& weightField
) const ) const
{ {
Type result = pTraits<Type>::zero; Type result = pTraits<Type>::zero;
@ -71,13 +79,17 @@ Type Foam::fieldValues::cellSource::processValues
} }
case opVolAverage: case opVolAverage:
{ {
tmp<scalarField> V = filterField(mesh().V()); result = sum(values*V)/sum(V);
result = sum(values*V())/sum(V());
break; break;
} }
case opVolIntegrate: case opVolIntegrate:
{ {
result = sum(values*filterField(mesh().V())); result = sum(values*V);
break;
}
case opWeightedAverage:
{
result = sum(values*weightField)/sum(weightField);
break; break;
} }
default: default:
@ -95,25 +107,20 @@ Type Foam::fieldValues::cellSource::processValues
template<class Type> template<class Type>
bool Foam::fieldValues::cellSource::writeValues(const word& fieldName) bool Foam::fieldValues::cellSource::writeValues(const word& fieldName)
{ {
List<List<Type> > allValues(Pstream::nProcs()); const bool ok = validField<Type>(fieldName);
bool validField = if (ok)
setFieldValues<Type>(fieldName, allValues[Pstream::myProcNo()]);
if (validField)
{ {
Pstream::gatherList(allValues); Field<Type> values = combineFields(setFieldValues<Type>(fieldName));
scalarField V = combineFields(filterField(mesh().V()));
scalarField weightField =
combineFields(setFieldValues<scalar>(weightFieldName_));
if (Pstream::master()) if (Pstream::master())
{ {
List<Type> values = Type result = processValues(values, V, weightField);
ListListOps::combine<List<Type> >
(
allValues,
accessOp<List<Type> >()
);
Type result = processValues(values);
if (valueOutput_) if (valueOutput_)
{ {
@ -144,7 +151,7 @@ bool Foam::fieldValues::cellSource::writeValues(const word& fieldName)
} }
} }
return validField; return ok;
} }

View File

@ -220,7 +220,7 @@ void Foam::fieldValues::faceSource::initialise(const dictionary& dict)
} }
default: default:
{ {
FatalErrorIn("faceSource::constructFaceAddressing()") FatalErrorIn("faceSource::initiliase()")
<< type() << " " << name_ << ": " << type() << " " << name_ << ": "
<< sourceTypeNames_[source_] << "(" << sourceName_ << "):" << sourceTypeNames_[source_] << "(" << sourceName_ << "):"
<< nl << " Unknown source type. Valid source types are:" << nl << " Unknown source type. Valid source types are:"
@ -245,7 +245,7 @@ void Foam::fieldValues::faceSource::initialise(const dictionary& dict)
} }
else else
{ {
FatalErrorIn("faceSource::constructFaceAddressing()") FatalErrorIn("faceSource::initialise()")
<< type() << " " << name_ << ": " << type() << " " << name_ << ": "
<< sourceTypeNames_[source_] << "(" << sourceName_ << "):" << sourceTypeNames_[source_] << "(" << sourceName_ << "):"
<< nl << " Weight field " << weightFieldName_ << nl << " Weight field " << weightFieldName_
@ -326,9 +326,12 @@ void Foam::fieldValues::faceSource::write()
if (active_) if (active_)
{ {
outputFilePtr_() if (Pstream::master())
<< obr_.time().value() << tab {
<< sum(filterField(mesh().magSf())); outputFilePtr_()
<< obr_.time().value() << tab
<< sum(filterField(mesh().magSf()));
}
forAll(fields_, i) forAll(fields_, i)
{ {
@ -339,7 +342,10 @@ void Foam::fieldValues::faceSource::write()
writeValues<tensor>(fields_[i]); writeValues<tensor>(fields_[i]);
} }
outputFilePtr_()<< endl; if (Pstream::master())
{
outputFilePtr_()<< endl;
}
if (log_) if (log_)
{ {
@ -350,4 +356,3 @@ void Foam::fieldValues::faceSource::write()
// ************************************************************************* // // ************************************************************************* //

View File

@ -153,17 +153,22 @@ protected:
//- Initialise, e.g. face addressing //- Initialise, e.g. face addressing
void initialise(const dictionary& dict); void initialise(const dictionary& dict);
//- Insert field values into values list //- Return true if the field name is valid
template<class Type> template<class Type>
bool setFieldValues bool validField(const word& fieldName) const;
(
const word& fieldName, //- Return field values by looking up field name
List<Type>& values template<class Type>
) const; tmp<Field<Type> > setFieldValues(const word& fieldName) const;
//- Apply the 'operation' to the values //- Apply the 'operation' to the values
template<class Type> template<class Type>
Type processValues(const List<Type>& values) const; Type processValues
(
const Field<Type>& values,
const scalarField& magSf,
const scalarField& weightField
) const;
//- Output file header information //- Output file header information
virtual void writeFileHeader(); virtual void writeFileHeader();

View File

@ -33,70 +33,17 @@ License
// * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * // // * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * //
template<class Type> template<class Type>
bool Foam::fieldValues::faceSource::setFieldValues bool Foam::fieldValues::faceSource::validField(const word& fieldName) const
(
const word& fieldName,
List<Type>& values
) const
{ {
values.setSize(faceId_.size(), pTraits<Type>::zero);
typedef GeometricField<Type, fvsPatchField, surfaceMesh> sf; typedef GeometricField<Type, fvsPatchField, surfaceMesh> sf;
typedef GeometricField<Type, fvPatchField, volMesh> vf; typedef GeometricField<Type, fvPatchField, volMesh> vf;
if (obr_.foundObject<sf>(fieldName)) if (obr_.foundObject<sf>(fieldName))
{ {
const sf& field = obr_.lookupObject<sf>(fieldName);
forAll(values, i)
{
label faceI = faceId_[i];
label patchI = facePatchId_[i];
if (patchI >= 0)
{
values[i] = field.boundaryField()[patchI][faceI];
}
else
{
values[i] = field[faceI];
}
values[i] *= flipMap_[i];
}
return true; return true;
} }
else if (obr_.foundObject<vf>(fieldName)) else if (obr_.foundObject<vf>(fieldName))
{ {
const vf& field = obr_.lookupObject<vf>(fieldName);
forAll(values, i)
{
label faceI = faceId_[i];
label patchI = facePatchId_[i];
if (patchI >= 0)
{
values[i] = field.boundaryField()[patchI][faceI];
}
else
{
FatalErrorIn
(
"fieldValues::faceSource::setFieldValues"
"("
"const word&, "
"List<Type>&"
") const"
) << type() << " " << name_ << ": "
<< sourceTypeNames_[source_] << "(" << sourceName_ << "):"
<< nl
<< " Unable to process internal faces for volume field "
<< fieldName << nl << abort(FatalError);
}
values[i] *= flipMap_[i];
}
return true; return true;
} }
@ -104,10 +51,34 @@ bool Foam::fieldValues::faceSource::setFieldValues
} }
template<class Type>
Foam::tmp<Foam::Field<Type> > Foam::fieldValues::faceSource::setFieldValues
(
const word& fieldName
) const
{
typedef GeometricField<Type, fvsPatchField, surfaceMesh> sf;
typedef GeometricField<Type, fvPatchField, volMesh> vf;
if (obr_.foundObject<sf>(fieldName))
{
return filterField(obr_.lookupObject<sf>(fieldName));
}
else if (obr_.foundObject<vf>(fieldName))
{
return filterField(obr_.lookupObject<vf>(fieldName));
}
return tmp<Field<Type> >(new Field<Type>(0.0));
}
template<class Type> template<class Type>
Type Foam::fieldValues::faceSource::processValues Type Foam::fieldValues::faceSource::processValues
( (
const List<Type>& values const Field<Type>& values,
const scalarField& magSf,
const scalarField& weightField
) const ) const
{ {
Type result = pTraits<Type>::zero; Type result = pTraits<Type>::zero;
@ -120,54 +91,17 @@ Type Foam::fieldValues::faceSource::processValues
} }
case opAreaAverage: case opAreaAverage:
{ {
tmp<scalarField> magSf = filterField(mesh().magSf()); result = sum(values*magSf)/sum(magSf);
result = sum(values*magSf())/sum(magSf());
break; break;
} }
case opAreaIntegrate: case opAreaIntegrate:
{ {
result = sum(values*filterField(mesh().magSf())); result = sum(values*magSf);
break; break;
} }
case opWeightedAverage: case opWeightedAverage:
{ {
if (mesh().foundObject<volScalarField>(weightFieldName_)) result = sum(values*weightField)/sum(weightField);
{
tmp<scalarField> wField =
filterField
(
mesh().lookupObject<volScalarField>(weightFieldName_)
);
result = sum(values*wField())/sum(wField());
}
else if (mesh().foundObject<surfaceScalarField>(weightFieldName_))
{
tmp<scalarField> wField =
filterField
(
mesh().lookupObject<surfaceScalarField>
(
weightFieldName_
)
);
result = sum(values*wField())/sum(wField());
}
else
{
FatalErrorIn
(
"fieldValues::faceSource::processValues"
"("
"List<Type>&"
") const"
) << type() << " " << name_ << ": "
<< sourceTypeNames_[source_] << "(" << sourceName_ << "):"
<< nl
<< " Weight field " << weightFieldName_
<< " must be either a " << volScalarField::typeName
<< " or " << surfaceScalarField::typeName << nl
<< abort(FatalError);
}
break; break;
} }
default: default:
@ -185,25 +119,20 @@ Type Foam::fieldValues::faceSource::processValues
template<class Type> template<class Type>
bool Foam::fieldValues::faceSource::writeValues(const word& fieldName) bool Foam::fieldValues::faceSource::writeValues(const word& fieldName)
{ {
List<List<Type> > allValues(Pstream::nProcs()); const bool ok = validField<Type>(fieldName);
bool validField = if (ok)
setFieldValues<Type>(fieldName, allValues[Pstream::myProcNo()]);
if (validField)
{ {
Pstream::gatherList(allValues); Field<Type> values = combineFields(setFieldValues<Type>(fieldName));
scalarField magSf = combineFields(filterField(mesh().magSf()));
scalarField weightField =
combineFields(setFieldValues<scalar>(weightFieldName_));
if (Pstream::master()) if (Pstream::master())
{ {
List<Type> values = Type result = processValues(values, magSf, weightField);
ListListOps::combine<List<Type> >
(
allValues,
accessOp<List<Type> >()
);
Type result = processValues(values);
if (valueOutput_) if (valueOutput_)
{ {
@ -222,7 +151,6 @@ bool Foam::fieldValues::faceSource::writeValues(const word& fieldName)
).write(); ).write();
} }
outputFilePtr_()<< tab << result; outputFilePtr_()<< tab << result;
if (log_) if (log_)
@ -234,7 +162,7 @@ bool Foam::fieldValues::faceSource::writeValues(const word& fieldName)
} }
} }
return validField; return ok;
} }

View File

@ -164,6 +164,13 @@ public:
//- Execute the at the final time-loop, currently does nothing //- Execute the at the final time-loop, currently does nothing
virtual void end(); virtual void end();
//- Comnbine fields from all processor domains into single field
template<class Type>
tmp<Field<Type> > combineFields
(
const tmp<Field<Type> >& field
) const;
}; };
@ -177,6 +184,12 @@ public:
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
#ifdef NoRepository
#include "fieldValueTemplates.C"
#endif
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
#endif #endif
// ************************************************************************* // // ************************************************************************* //

View File

@ -0,0 +1,66 @@
/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2009-2009 OpenCFD Ltd.
\\/ M anipulation |
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
OpenFOAM is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation; either version 2 of the License, or (at your
option) any later version.
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with OpenFOAM; if not, write to the Free Software Foundation,
Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
\*---------------------------------------------------------------------------*/
#include "fieldValue.H"
#include "ListListOps.H"
#include "Pstream.H"
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
template<class Type>
Foam::tmp<Foam::Field<Type> > Foam::fieldValue::combineFields
(
const tmp<Field<Type> >& field
) const
{
List<Field<Type> > allValues(Pstream::nProcs());
allValues[Pstream::myProcNo()] = field();
Pstream::gatherList(allValues);
if (Pstream::master())
{
return tmp<Field<Type> >
(
new Field<Type>
(
ListListOps::combine<Field<Type> >
(
allValues,
accessOp<Field<Type> >()
)
)
);
}
else
{
return field();
}
}
// ************************************************************************* //

View File

@ -26,6 +26,7 @@ License
#include "triSurface.H" #include "triSurface.H"
#include "mergePoints.H" #include "mergePoints.H"
#include "PackedBoolList.H"
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
@ -46,46 +47,44 @@ bool triSurface::stitchTriangles
pointField newPoints; pointField newPoints;
bool hasMerged = mergePoints(rawPoints, tol, verbose, pointMap, newPoints); bool hasMerged = mergePoints(rawPoints, tol, verbose, pointMap, newPoints);
pointField& ps = storedPoints();
// Set the coordinates to the merged ones
ps.transfer(newPoints);
if (hasMerged) if (hasMerged)
{ {
if (verbose) if (verbose)
{ {
Pout<< "stitchTriangles : Merged from " << rawPoints.size() Pout<< "stitchTriangles : Merged from " << rawPoints.size()
<< " points down to " << newPoints.size() << endl; << " points down to " << ps.size() << endl;
} }
pointField& ps = storedPoints();
// Set the coordinates to the merged ones
ps = newPoints;
// Reset the triangle point labels to the unique points array // Reset the triangle point labels to the unique points array
label newTriangleI = 0; label newTriangleI = 0;
forAll(*this, i) forAll(*this, i)
{ {
label newA = pointMap[operator[](i)[0]]; const labelledTri& tri = operator[](i);
label newB = pointMap[operator[](i)[1]]; labelledTri newTri
label newC = pointMap[operator[](i)[2]]; (
pointMap[tri[0]],
pointMap[tri[1]],
pointMap[tri[2]],
tri.region()
);
if ((newA != newB) && (newA != newC) && (newB != newC)) if ((newTri[0] != newTri[1]) && (newTri[0] != newTri[2]) && (newTri[1] != newTri[2]))
{ {
operator[](newTriangleI)[0] = newA; operator[](newTriangleI++) = newTri;
operator[](newTriangleI)[1] = newB;
operator[](newTriangleI)[2] = newC;
operator[](newTriangleI).region() = operator[](i).region();
newTriangleI++;
} }
else if (verbose) else if (verbose)
{ {
Pout<< "stitchTriangles : " Pout<< "stitchTriangles : "
<< "Removing triangle " << i << " with non-unique vertices." << "Removing triangle " << i
<< endl << " with non-unique vertices." << endl
<< " vertices :" << newA << ' ' << newB << ' ' << newC << " vertices :" << newTri << endl
<< endl << " coordinates:" << newTri.points(ps)
<< " coordinates:" << ps[newA] << ' ' << ps[newB] << endl;
<< ' ' << ps[newC] << endl;
} }
} }
@ -98,6 +97,58 @@ bool triSurface::stitchTriangles
<< " triangles" << endl; << " triangles" << endl;
} }
setSize(newTriangleI); setSize(newTriangleI);
// And possibly compact out any unused points (since used only
// by triangles that have just been deleted)
// Done in two passes to save memory (pointField)
// 1. Detect only
PackedBoolList pointIsUsed(ps.size());
label nPoints = 0;
forAll(*this, i)
{
const labelledTri& tri = operator[](i);
forAll(tri, fp)
{
label pointI = tri[fp];
if (pointIsUsed.set(pointI, 1))
{
nPoints++;
}
}
}
if (nPoints != ps.size())
{
// 2. Compact.
pointMap.setSize(ps.size());
label newPointI = 0;
forAll(pointIsUsed, pointI)
{
if (pointIsUsed[pointI])
{
ps[newPointI] = ps[pointI];
pointMap[pointI] = newPointI++;
}
}
ps.setSize(newPointI);
newTriangleI = 0;
forAll(*this, i)
{
const labelledTri& tri = operator[](i);
operator[](newTriangleI++) = labelledTri
(
pointMap[tri[0]],
pointMap[tri[1]],
pointMap[tri[2]],
tri.region()
);
}
}
} }
} }

View File

@ -126,15 +126,6 @@ public:
return alphaSgs_; return alphaSgs_;
} }
//- Return thermal diffusivity
virtual tmp<volScalarField> alphaEff() const
{
return tmp<volScalarField>
(
new volScalarField("alphaEff", alphaSgs_ + alpha())
);
}
//- Return the sub-grid stress tensor. //- Return the sub-grid stress tensor.
virtual tmp<volSymmTensorField> B() const; virtual tmp<volSymmTensorField> B() const;

View File

@ -127,15 +127,6 @@ public:
return alphaSgs_; return alphaSgs_;
} }
//- Return thermal conductivity
virtual tmp<volScalarField> alphaEff() const
{
return tmp<volScalarField>
(
new volScalarField("alphaEff", alphaSgs_ + alpha())
);
}
//- Return the sub-grid stress tensor //- Return the sub-grid stress tensor
virtual tmp<volSymmTensorField> B() const virtual tmp<volSymmTensorField> B() const
{ {

View File

@ -189,12 +189,6 @@ public:
} }
//- Return the SGS turbulent kinetic energy.
virtual tmp<volScalarField> k() const = 0;
//- Return the SGS turbulent dissipation.
virtual tmp<volScalarField> epsilon() const = 0;
//- Return the SGS turbulent viscosity //- Return the SGS turbulent viscosity
virtual tmp<volScalarField> muSgs() const = 0; virtual tmp<volScalarField> muSgs() const = 0;
@ -210,8 +204,22 @@ public:
//- Return the SGS turbulent thermal diffusivity //- Return the SGS turbulent thermal diffusivity
virtual tmp<volScalarField> alphaSgs() const = 0; virtual tmp<volScalarField> alphaSgs() const = 0;
//- Return the SGS thermal conductivity. //- Return the effective thermal diffusivity
virtual tmp<volScalarField> alphaEff() const = 0; virtual tmp<volScalarField> alphaEff() const
{
return tmp<volScalarField>
(
new volScalarField("alphaEff", alphaSgs() + alpha())
);
}
//- Return the effective turbulence thermal diffusivity for a patch
virtual tmp<scalarField> alphaEff(const label patchI) const
{
return
alphaSgs()().boundaryField()[patchI]
+ alpha().boundaryField()[patchI];
}
//- Return the sub-grid stress tensor. //- Return the sub-grid stress tensor.
virtual tmp<volSymmTensorField> B() const = 0; virtual tmp<volSymmTensorField> B() const = 0;
@ -261,13 +269,13 @@ public:
//- Correct Eddy-Viscosity and related properties. //- Correct Eddy-Viscosity and related properties.
// This calls correct(const tmp<volTensorField>& gradU) by supplying // This calls correct(const tmp<volTensorField>& gradU) by supplying
// gradU calculated locally. // gradU calculated locally.
void correct(); virtual void correct();
//- Correct Eddy-Viscosity and related properties //- Correct Eddy-Viscosity and related properties
virtual void correct(const tmp<volTensorField>& gradU); virtual void correct(const tmp<volTensorField>& gradU);
//- Read LESProperties dictionary //- Read LESProperties dictionary
virtual bool read() = 0; virtual bool read();
}; };

View File

@ -149,15 +149,6 @@ public:
return alphaSgs_; return alphaSgs_;
} }
//- Return thermal conductivity
virtual tmp<volScalarField> alphaEff() const
{
return tmp<volScalarField>
(
new volScalarField("alphaEff", alphaSgs_ + alpha())
);
}
//- Return the sub-grid stress tensor. //- Return the sub-grid stress tensor.
virtual tmp<volSymmTensorField> B() const; virtual tmp<volSymmTensorField> B() const;

View File

@ -153,13 +153,10 @@ public:
return mut_; return mut_;
} }
//- Return the effective turbulent thermal diffusivity //- Return the turbulence thermal diffusivity
virtual tmp<volScalarField> alphaEff() const virtual tmp<volScalarField> alphat() const
{ {
return tmp<volScalarField> return alphat_;
(
new volScalarField("alphaEff", alphat_ + alpha())
);
} }
//- Return the turbulence kinetic energy //- Return the turbulence kinetic energy

View File

@ -162,13 +162,10 @@ public:
return mut_; return mut_;
} }
//- Return the effective turbulent thermal diffusivity //- Return the turbulence thermal diffusivity
virtual tmp<volScalarField> alphaEff() const virtual tmp<volScalarField> alphat() const
{ {
return tmp<volScalarField> return alphat_;
(
new volScalarField("alphaEff", alphat_ + alpha())
);
} }
//- Return the turbulence kinetic energy //- Return the turbulence kinetic energy

View File

@ -146,13 +146,10 @@ public:
return mut_; return mut_;
} }
//- Return the effective turbulent thermal diffusivity //- Return the turbulence thermal diffusivity
virtual tmp<volScalarField> alphaEff() const virtual tmp<volScalarField> alphat() const
{ {
return tmp<volScalarField> return alphat_;
(
new volScalarField("alphaEff", alphat_ + alpha())
);
} }
//- Return the turbulence kinetic energy //- Return the turbulence kinetic energy

View File

@ -265,9 +265,6 @@ public:
} }
//- Return the turbulence viscosity
virtual tmp<volScalarField> mut() const = 0;
//- Return the effective viscosity //- Return the effective viscosity
virtual tmp<volScalarField> muEff() const virtual tmp<volScalarField> muEff() const
{ {
@ -278,22 +275,21 @@ public:
} }
//- Return the effective turbulent thermal diffusivity //- Return the effective turbulent thermal diffusivity
virtual tmp<volScalarField> alphaEff() const = 0; virtual tmp<volScalarField> alphaEff() const
{
return tmp<volScalarField>
(
new volScalarField("alphaEff", alphat() + alpha())
);
}
//- Return the turbulence kinetic energy //- Return the effective turbulent thermal diffusivity for a patch
virtual tmp<volScalarField> k() const = 0; virtual tmp<scalarField> alphaEff(const label patchI) const
{
//- Return the turbulence kinetic energy dissipation rate return
virtual tmp<volScalarField> epsilon() const = 0; alphat()().boundaryField()[patchI]
+ alpha().boundaryField()[patchI];
//- Return the Reynolds stress tensor }
virtual tmp<volSymmTensorField> R() const = 0;
//- Return the effective stress tensor including the laminar stress
virtual tmp<volSymmTensorField> devRhoReff() const = 0;
//- Return the source term for the momentum equation
virtual tmp<fvVectorMatrix> divDevRhoReff(volVectorField& U) const = 0;
//- Return yPlus for the given patch //- Return yPlus for the given patch
virtual tmp<scalarField> yPlus virtual tmp<scalarField> yPlus
@ -303,10 +299,10 @@ public:
) const; ) const;
//- Solve the turbulence equations and correct the turbulence viscosity //- Solve the turbulence equations and correct the turbulence viscosity
virtual void correct() = 0; virtual void correct();
//- Read RASProperties dictionary //- Read RASProperties dictionary
virtual bool read() = 0; virtual bool read();
}; };

View File

@ -142,13 +142,10 @@ public:
return mut_; return mut_;
} }
//- Return the effective turbulent thermal diffusivity //- Return the turbulence thermal diffusivity
virtual tmp<volScalarField> alphaEff() const virtual tmp<volScalarField> alphat() const
{ {
return tmp<volScalarField> return alphat_;
(
new volScalarField("alphaEff", alphat_ + alpha())
);
} }
//- Return the turbulence kinetic energy //- Return the turbulence kinetic energy

View File

@ -178,13 +178,10 @@ public:
return mut_; return mut_;
} }
//- Return the effective turbulent thermal diffusivity //- Return the turbulence thermal diffusivity
virtual tmp<volScalarField> alphaEff() const virtual tmp<volScalarField> alphat() const
{ {
return tmp<volScalarField> return alphat_;
(
new volScalarField("alphaEff", alphat_ + alpha())
);
} }
//- Return the turbulence kinetic energy //- Return the turbulence kinetic energy

View File

@ -37,6 +37,22 @@ namespace Foam
namespace compressible namespace compressible
{ {
// * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * //
template<>
const char*
NamedEnum<turbulentHeatFluxTemperatureFvPatchScalarField::heatSourceType, 2>::
names[] =
{
"power",
"flux"
};
const
NamedEnum<turbulentHeatFluxTemperatureFvPatchScalarField::heatSourceType, 2>
turbulentHeatFluxTemperatureFvPatchScalarField::heatSourceTypeNames_;
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
turbulentHeatFluxTemperatureFvPatchScalarField:: turbulentHeatFluxTemperatureFvPatchScalarField::
@ -47,8 +63,8 @@ turbulentHeatFluxTemperatureFvPatchScalarField
) )
: :
fixedGradientFvPatchScalarField(p, iF), fixedGradientFvPatchScalarField(p, iF),
q_(p.size(), 0.0), heatSource_(hsPower),
rhoName_("rho") q_(p.size(), 0.0)
{} {}
@ -62,8 +78,8 @@ turbulentHeatFluxTemperatureFvPatchScalarField
) )
: :
fixedGradientFvPatchScalarField(ptf, p, iF, mapper), fixedGradientFvPatchScalarField(ptf, p, iF, mapper),
q_(ptf.q_, mapper), heatSource_(ptf.heatSource_),
rhoName_(ptf.rhoName_) q_(ptf.q_, mapper)
{} {}
@ -76,8 +92,8 @@ turbulentHeatFluxTemperatureFvPatchScalarField
) )
: :
fixedGradientFvPatchScalarField(p, iF), fixedGradientFvPatchScalarField(p, iF),
q_("q", dict, p.size()), heatSource_(heatSourceTypeNames_.read(dict.lookup("heatSource"))),
rhoName_(dict.lookupOrDefault<word>("rho", "rho")) q_("q", dict, p.size())
{ {
fvPatchField<scalar>::operator=(patchInternalField()); fvPatchField<scalar>::operator=(patchInternalField());
gradient() = 0.0; gradient() = 0.0;
@ -91,8 +107,8 @@ turbulentHeatFluxTemperatureFvPatchScalarField
) )
: :
fixedGradientFvPatchScalarField(thftpsf), fixedGradientFvPatchScalarField(thftpsf),
q_(thftpsf.q_), heatSource_(thftpsf.heatSource_),
rhoName_(thftpsf.rhoName_) q_(thftpsf.q_)
{} {}
@ -104,8 +120,8 @@ turbulentHeatFluxTemperatureFvPatchScalarField
) )
: :
fixedGradientFvPatchScalarField(thftpsf, iF), fixedGradientFvPatchScalarField(thftpsf, iF),
q_(thftpsf.q_), heatSource_(thftpsf.heatSource_),
rhoName_(thftpsf.rhoName_) q_(thftpsf.q_)
{} {}
@ -150,22 +166,39 @@ void turbulentHeatFluxTemperatureFvPatchScalarField::updateCoeffs()
const RASModel& rasModel = db().lookupObject<RASModel>("RASProperties"); const RASModel& rasModel = db().lookupObject<RASModel>("RASProperties");
const scalarField alphaEffp = rasModel.alphaEff()().boundaryField()[patchI]; const scalarField alphaEffp = rasModel.alphaEff(patchI);
const basicThermo& thermo =
db().lookupObject<basicThermo>("thermophysicalProperties");
// const scalarField& Tp = thermo.T().boundaryField()[patchI];
const scalarField& Tp = *this; const scalarField& Tp = *this;
const scalarField Cpp = thermo.Cp(Tp, patchI); const scalarField Cpp = rasModel.thermo().Cp(Tp, patchI);
const scalarField& rhop = switch (heatSource_)
patch().lookupPatchField<volScalarField, scalar>(rhoName_); {
case hsPower:
const scalar Ap = gSum(patch().magSf()); {
const scalar Ap = gSum(patch().magSf());
gradient() = q_/(Ap*rhop*Cpp*alphaEffp); gradient() = q_/(Ap*Cpp*alphaEffp);
break;
}
case hsFlux:
{
gradient() = q_/(Cpp*alphaEffp);
break;
}
default:
{
FatalErrorIn
(
"turbulentHeatFluxTemperatureFvPatchScalarField"
"("
"const fvPatch&, "
"const DimensionedField<scalar, volMesh>&, "
"const dictionary&"
")"
) << "Unknown heat source type. Valid types are: "
<< heatSourceTypeNames_ << nl << exit(FatalError);
}
}
fixedGradientFvPatchScalarField::updateCoeffs(); fixedGradientFvPatchScalarField::updateCoeffs();
} }
@ -177,8 +210,9 @@ void turbulentHeatFluxTemperatureFvPatchScalarField::write
) const ) const
{ {
fvPatchScalarField::write(os); fvPatchScalarField::write(os);
os.writeKeyword("heatSource") << heatSourceTypeNames_[heatSource_]
<< token::END_STATEMENT << nl;
q_.writeEntry("q", os); q_.writeEntry("q", os);
os.writeKeyword("rho") << rhoName_ << token::END_STATEMENT << nl;
gradient().writeEntry("gradient", os); gradient().writeEntry("gradient", os);
writeEntry("value", os); writeEntry("value", os);
} }

View File

@ -26,7 +26,20 @@ Class
Foam::turbulentHeatFluxTemperatureFvPatchScalarField Foam::turbulentHeatFluxTemperatureFvPatchScalarField
Description Description
Fixed heat flux boundary condition for temperature. Fixed heat boundary condition to specify temperature gradient. Input
heat source either specified in terms of an absolute power [W], or as a
flux [W/m2].
Example usage:
hotWall
{
type compressible::turbulentHeatFluxTemperature;
heatSource flux; // power [W]; flux [W/m2]
q uniform 10; // heat power or flux
value uniform 300; // initial temperature value
}
SourceFiles SourceFiles
turbulentHeatFluxTemperatureFvPatchScalarField.C turbulentHeatFluxTemperatureFvPatchScalarField.C
@ -38,6 +51,7 @@ SourceFiles
#include "fvPatchFields.H" #include "fvPatchFields.H"
#include "fixedGradientFvPatchFields.H" #include "fixedGradientFvPatchFields.H"
#include "NamedEnum.H"
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
@ -47,20 +61,37 @@ namespace compressible
{ {
/*---------------------------------------------------------------------------*\ /*---------------------------------------------------------------------------*\
Class turbulentHeatFluxTemperatureFvPatchScalarField Declaration Class turbulentHeatFluxTemperatureFvPatchScalarField Declaration
\*---------------------------------------------------------------------------*/ \*---------------------------------------------------------------------------*/
class turbulentHeatFluxTemperatureFvPatchScalarField class turbulentHeatFluxTemperatureFvPatchScalarField
: :
public fixedGradientFvPatchScalarField public fixedGradientFvPatchScalarField
{ {
// Private data public:
//- Heat flux [W] // Data types
scalarField q_;
//- Name of density field //- Enumeration listing the possible hest source input modes
word rhoName_; enum heatSourceType
{
hsPower,
hsFlux
};
private:
// Private data
//- Heat source type names
static const NamedEnum<heatSourceType, 2> heatSourceTypeNames_;
//- Heat source type
heatSourceType heatSource_;
//- Heat power [W] or flux [W/m2]
scalarField q_;
public: public:

View File

@ -222,7 +222,7 @@ void alphatJayatillekeWallFunctionFvPatchScalarField::updateCoeffs()
const scalarField& rhow = rasModel.rho().boundaryField()[patchI]; const scalarField& rhow = rasModel.rho().boundaryField()[patchI];
const fvPatchScalarField& hw = const fvPatchScalarField& hw =
patch().lookupPatchField<volScalarField, scalar>("h"); rasModel.thermo().h().boundaryField()[patchI];
// Heat flux [W/m2] - lagging alphatw // Heat flux [W/m2] - lagging alphatw
const scalarField qDot = (alphaw + alphatw)*hw.snGrad(); const scalarField qDot = (alphaw + alphatw)*hw.snGrad();

View File

@ -138,13 +138,10 @@ public:
return mut_; return mut_;
} }
//- Return the effective turbulent thermal diffusivity //- Return the turbulence thermal diffusivity
virtual tmp<volScalarField> alphaEff() const virtual tmp<volScalarField> alphat() const
{ {
return tmp<volScalarField> return alphat_;
(
new volScalarField("alphaEff", alphat_ + alpha())
);
} }
//- Return the turbulence kinetic energy //- Return the turbulence kinetic energy

View File

@ -222,13 +222,10 @@ public:
return mut_; return mut_;
} }
//- Return the effective turbulent thermal diffusivity //- Return the turbulence thermal diffusivity
virtual tmp<volScalarField> alphaEff() const virtual tmp<volScalarField> alphat() const
{ {
return tmp<volScalarField> return alphat_;
(
new volScalarField("alphaEff", alphat_ + alpha())
);
} }
//- Return the turbulence kinetic energy //- Return the turbulence kinetic energy

View File

@ -78,6 +78,27 @@ tmp<volScalarField> laminar::mut() const
} }
tmp<volScalarField> laminar::alphat() const
{
return tmp<volScalarField>
(
new volScalarField
(
IOobject
(
"alphat",
runTime_.timeName(),
mesh_,
IOobject::NO_READ,
IOobject::NO_WRITE
),
mesh_,
dimensionedScalar("alphat", alpha().dimensions(), 0.0)
)
);
}
tmp<volScalarField> laminar::k() const tmp<volScalarField> laminar::k() const
{ {
return tmp<volScalarField> return tmp<volScalarField>

View File

@ -89,6 +89,9 @@ public:
return tmp<volScalarField>(new volScalarField("muEff", mu())); return tmp<volScalarField>(new volScalarField("muEff", mu()));
} }
//- Return the turbulence thermal diffusivity, i.e. 0 for laminar flow
virtual tmp<volScalarField> alphat() const;
//- Return the effective turbulent thermal diffusivity, //- Return the effective turbulent thermal diffusivity,
// i.e. the laminar thermal diffusivity // i.e. the laminar thermal diffusivity
virtual tmp<volScalarField> alphaEff() const virtual tmp<volScalarField> alphaEff() const

View File

@ -159,13 +159,10 @@ public:
return mut_; return mut_;
} }
//- Return the effective turbulent thermal diffusivity //- Return the turbulence thermal diffusivity
virtual tmp<volScalarField> alphaEff() const virtual tmp<volScalarField> alphat() const
{ {
return tmp<volScalarField> return alphat_;
(
new volScalarField("alphaEff", alphat_ + alpha())
);
} }
//- Return the turbulence kinetic energy //- Return the turbulence kinetic energy

View File

@ -96,6 +96,27 @@ tmp<volScalarField> laminar::mut() const
} }
tmp<volScalarField> laminar::alphat() const
{
return tmp<volScalarField>
(
new volScalarField
(
IOobject
(
"alphat",
runTime_.timeName(),
mesh_,
IOobject::NO_READ,
IOobject::NO_WRITE
),
mesh_,
dimensionedScalar("alphat", alpha().dimensions(), 0.0)
)
);
}
tmp<volScalarField> laminar::k() const tmp<volScalarField> laminar::k() const
{ {
return tmp<volScalarField> return tmp<volScalarField>

View File

@ -99,6 +99,9 @@ public:
return tmp<volScalarField>(new volScalarField("muEff", mu())); return tmp<volScalarField>(new volScalarField("muEff", mu()));
} }
//- Return the turbulence thermal diffusivity, i.e. 0 for laminar flow
virtual tmp<volScalarField> alphat() const;
//- Return the effective turbulent thermal diffusivity, //- Return the effective turbulent thermal diffusivity,
// i.e. the laminar thermal diffusivity // i.e. the laminar thermal diffusivity
virtual tmp<volScalarField> alphaEff() const virtual tmp<volScalarField> alphaEff() const
@ -106,6 +109,13 @@ public:
return tmp<volScalarField>(new volScalarField("alphaEff", alpha())); return tmp<volScalarField>(new volScalarField("alphaEff", alpha()));
} }
//- Return the effective turbulent thermal diffusivity for a patch,
// i.e. the laminar thermal diffusivity
virtual tmp<scalarField> alphaEff(const label patchI) const
{
return alpha().boundaryField()[patchI];
}
//- Return the turbulence kinetic energy, i.e. 0 for laminar flow //- Return the turbulence kinetic energy, i.e. 0 for laminar flow
virtual tmp<volScalarField> k() const; virtual tmp<volScalarField> k() const;

View File

@ -192,9 +192,15 @@ public:
//- Return the effective viscosity //- Return the effective viscosity
virtual tmp<volScalarField> muEff() const = 0; virtual tmp<volScalarField> muEff() const = 0;
//- Return the effective turbulent thermal diffusivity //- Return the turbulence thermal diffusivity
virtual tmp<volScalarField> alphat() const = 0;
//- Return the effective turbulence thermal diffusivity
virtual tmp<volScalarField> alphaEff() const = 0; virtual tmp<volScalarField> alphaEff() const = 0;
//- Return the effective turbulence thermal diffusivity for a patch
virtual tmp<scalarField> alphaEff(const label patchI) const = 0;
//- Return the turbulence kinetic energy //- Return the turbulence kinetic energy
virtual tmp<volScalarField> k() const = 0; virtual tmp<volScalarField> k() const = 0;

View File

@ -36,6 +36,22 @@ namespace Foam
namespace incompressible namespace incompressible
{ {
// * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * //
template<>
const char*
NamedEnum<turbulentHeatFluxTemperatureFvPatchScalarField::heatSourceType, 2>::
names[] =
{
"power",
"flux"
};
const
NamedEnum<turbulentHeatFluxTemperatureFvPatchScalarField::heatSourceType, 2>
turbulentHeatFluxTemperatureFvPatchScalarField::heatSourceTypeNames_;
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
turbulentHeatFluxTemperatureFvPatchScalarField:: turbulentHeatFluxTemperatureFvPatchScalarField::
@ -46,6 +62,7 @@ turbulentHeatFluxTemperatureFvPatchScalarField
) )
: :
fixedGradientFvPatchScalarField(p, iF), fixedGradientFvPatchScalarField(p, iF),
heatSource_(hsPower),
q_(p.size(), 0.0), q_(p.size(), 0.0),
alphaEffName_("undefinedAlphaEff"), alphaEffName_("undefinedAlphaEff"),
CpName_("undefinedCp") CpName_("undefinedCp")
@ -62,6 +79,7 @@ turbulentHeatFluxTemperatureFvPatchScalarField
) )
: :
fixedGradientFvPatchScalarField(ptf, p, iF, mapper), fixedGradientFvPatchScalarField(ptf, p, iF, mapper),
heatSource_(ptf.heatSource_),
q_(ptf.q_, mapper), q_(ptf.q_, mapper),
alphaEffName_(ptf.alphaEffName_), alphaEffName_(ptf.alphaEffName_),
CpName_(ptf.CpName_) CpName_(ptf.CpName_)
@ -77,6 +95,7 @@ turbulentHeatFluxTemperatureFvPatchScalarField
) )
: :
fixedGradientFvPatchScalarField(p, iF), fixedGradientFvPatchScalarField(p, iF),
heatSource_(heatSourceTypeNames_.read(dict.lookup("heatSource"))),
q_("q", dict, p.size()), q_("q", dict, p.size()),
alphaEffName_(dict.lookup("alphaEff")), alphaEffName_(dict.lookup("alphaEff")),
CpName_(dict.lookup("Cp")) CpName_(dict.lookup("Cp"))
@ -93,6 +112,7 @@ turbulentHeatFluxTemperatureFvPatchScalarField
) )
: :
fixedGradientFvPatchScalarField(thftpsf), fixedGradientFvPatchScalarField(thftpsf),
heatSource_(thftpsf.heatSource_),
q_(thftpsf.q_), q_(thftpsf.q_),
alphaEffName_(thftpsf.alphaEffName_), alphaEffName_(thftpsf.alphaEffName_),
CpName_(thftpsf.CpName_) CpName_(thftpsf.CpName_)
@ -107,6 +127,7 @@ turbulentHeatFluxTemperatureFvPatchScalarField
) )
: :
fixedGradientFvPatchScalarField(thftpsf, iF), fixedGradientFvPatchScalarField(thftpsf, iF),
heatSource_(thftpsf.heatSource_),
q_(thftpsf.q_), q_(thftpsf.q_),
alphaEffName_(thftpsf.alphaEffName_), alphaEffName_(thftpsf.alphaEffName_),
CpName_(thftpsf.CpName_) CpName_(thftpsf.CpName_)
@ -156,7 +177,33 @@ void turbulentHeatFluxTemperatureFvPatchScalarField::updateCoeffs()
const scalarField& Cpp = const scalarField& Cpp =
patch().lookupPatchField<volScalarField, scalar>(CpName_); patch().lookupPatchField<volScalarField, scalar>(CpName_);
gradient() = q_/(Cpp*alphaEffp); switch (heatSource_)
{
case hsPower:
{
const scalar Ap = gSum(patch().magSf());
gradient() = q_/(Ap*Cpp*alphaEffp);
break;
}
case hsFlux:
{
gradient() = q_/(Cpp*alphaEffp);
break;
}
default:
{
FatalErrorIn
(
"turbulentHeatFluxTemperatureFvPatchScalarField"
"("
"const fvPatch&, "
"const DimensionedField<scalar, volMesh>&, "
"const dictionary&"
")"
) << "Unknown heat source type. Valid types are: "
<< heatSourceTypeNames_ << nl << exit(FatalError);
}
}
fixedGradientFvPatchScalarField::updateCoeffs(); fixedGradientFvPatchScalarField::updateCoeffs();
} }
@ -165,6 +212,8 @@ void turbulentHeatFluxTemperatureFvPatchScalarField::updateCoeffs()
void turbulentHeatFluxTemperatureFvPatchScalarField::write(Ostream& os) const void turbulentHeatFluxTemperatureFvPatchScalarField::write(Ostream& os) const
{ {
fixedGradientFvPatchScalarField::write(os); fixedGradientFvPatchScalarField::write(os);
os.writeKeyword("heatSource") << heatSourceTypeNames_[heatSource_]
<< token::END_STATEMENT << nl;
q_.writeEntry("q", os); q_.writeEntry("q", os);
os.writeKeyword("alphaEff") << alphaEffName_ << token::END_STATEMENT << nl; os.writeKeyword("alphaEff") << alphaEffName_ << token::END_STATEMENT << nl;
os.writeKeyword("Cp") << CpName_ << token::END_STATEMENT << nl; os.writeKeyword("Cp") << CpName_ << token::END_STATEMENT << nl;

View File

@ -26,7 +26,22 @@ Class
Foam::turbulentHeatFluxTemperatureFvPatchScalarField Foam::turbulentHeatFluxTemperatureFvPatchScalarField
Description Description
Fixed heat flux boundary condition for temperature. Fixed heat boundary condition to specify temperature gradient. Input
heat source either specified in terms of an absolute power [W], or as a
flux [W/m2].
Example usage:
hotWall
{
type turbulentHeatFluxTemperature;
heatSource flux; // power [W]; flux [W/m2]
q uniform 10; // heat power or flux
alphaEff alphaEff; // alphaEff field name;
// alphaEff in [kg/m/s]
Cp Cp; // Cp field name; Cp in [J/kg/K]
value uniform 300; // initial temperature value
}
SourceFiles SourceFiles
turbulentHeatFluxTemperatureFvPatchScalarField.C turbulentHeatFluxTemperatureFvPatchScalarField.C
@ -38,6 +53,7 @@ SourceFiles
#include "fvPatchFields.H" #include "fvPatchFields.H"
#include "fixedGradientFvPatchFields.H" #include "fixedGradientFvPatchFields.H"
#include "NamedEnum.H"
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
@ -54,16 +70,37 @@ class turbulentHeatFluxTemperatureFvPatchScalarField
: :
public fixedGradientFvPatchScalarField public fixedGradientFvPatchScalarField
{ {
// Private data public:
//- Heat flux [W/m2] // Data types
scalarField q_;
//- Name of effective thermal diffusivity field //- Enumeration listing the possible hest source input modes
word alphaEffName_; enum heatSourceType
{
hsPower,
hsFlux
};
//- Name of specific heat capacity field
word CpName_; private:
// Private data
//- Heat source type names
static const NamedEnum<heatSourceType, 2> heatSourceTypeNames_;
//- Heat source type
heatSourceType heatSource_;
//- Heat power [W] or flux [W/m2]
// NOTE: to be divided by density, rho, if used in kinematic form
scalarField q_;
//- Name of effective thermal diffusivity field
word alphaEffName_;
//- Name of specific heat capacity field
word CpName_;
public: public:

View File

@ -23,411 +23,7 @@ boundaryField
floor floor
{ {
type fixedValue; type fixedValue;
value nonuniform List<scalar> value uniform 300;
400
(
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
600
600
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
600
600
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
)
;
} }
ceiling ceiling
{ {

View File

@ -23,411 +23,7 @@ boundaryField
floor floor
{ {
type fixedValue; type fixedValue;
value nonuniform List<scalar> value uniform 300;
400
(
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
600
600
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
600
600
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
300
)
;
} }
ceiling ceiling
{ {

View File

@ -41,7 +41,7 @@ boundaryField
type compressible::turbulentMixingLengthFrequencyInlet; type compressible::turbulentMixingLengthFrequencyInlet;
mixingLength 0.007; mixingLength 0.007;
k k; k k;
value uniform 4.5-3; value uniform 4.5e-3;
} }
outlet outlet
{ {