Merge branch 'master' of ssh://dm/home/dm4/OpenFOAM/OpenFOAM-dev

Conflicts:
	src/thermophysicalModels/basic/Make/files
	src/thermophysicalModels/basic/basicThermo/basicThermo.C
	src/thermophysicalModels/basic/psiThermo/hPsiThermo/hPsiThermo.C
	src/thermophysicalModels/basic/psiThermo/hsPsiThermo/hsPsiThermo.C
	src/thermophysicalModels/basic/rhoThermo/hRhoThermo/hRhoThermo.C
	src/thermophysicalModels/basic/rhoThermo/hsRhoThermo/hsRhoThermo.C
	src/thermophysicalModels/thermalPorousZone/thermalModel/fixedTemperature/fixedTemperature.C
This commit is contained in:
Henry
2012-05-30 15:32:20 +01:00
111 changed files with 3526 additions and 2049 deletions

View File

@ -15,6 +15,9 @@
const volScalarField& kappa = tkappa(); const volScalarField& kappa = tkappa();
//const volSymmTensorField& K = tK(); //const volSymmTensorField& K = tK();
tmp<volScalarField> trhoCp = cp*rho;
const volScalarField& rhoCp = trhoCp();
volScalarField& T = thermo.T(); volScalarField& T = thermo.T();
IObasicSourceList& sources = solidHeatSources[i]; IObasicSourceList& sources = solidHeatSources[i];

View File

@ -4,7 +4,7 @@
tmp<fvScalarMatrix> TEqn tmp<fvScalarMatrix> TEqn
( (
- fvm::laplacian(betav*kappa, T, "laplacian(K,T)") - fvm::laplacian(betav*kappa, T, "laplacian(K,T)")
+ sources(rho, T) + sources(rhoCp, T)
); );
TEqn().relax(); TEqn().relax();

View File

@ -15,6 +15,9 @@
const volScalarField& kappa = tkappa(); const volScalarField& kappa = tkappa();
//const volSymmTensorField& K = tK(); //const volSymmTensorField& K = tK();
tmp<volScalarField> trhoCp = cp*rho;
const volScalarField& rhoCp = trhoCp();
volScalarField& T = thermo.T(); volScalarField& T = thermo.T();
IObasicSourceList& sources = solidHeatSources[i]; IObasicSourceList& sources = solidHeatSources[i];

View File

@ -10,7 +10,7 @@ if (finalIter)
( (
fvm::ddt(betav*rho*cp, T) fvm::ddt(betav*rho*cp, T)
- fvm::laplacian(betav*kappa, T, "laplacian(K,T)") - fvm::laplacian(betav*kappa, T, "laplacian(K,T)")
+ sources(rho, T) + sources(rhoCp, T)
); );
TEqn().relax(); TEqn().relax();

View File

@ -0,0 +1,75 @@
/*--------------------------------*- C++ -*----------------------------------*\
| ========= | |
| \\ / F ield | OpenFOAM: The Open Source CFD Toolbox |
| \\ / O peration | Version: dev |
| \\ / A nd | Web: http://www.openfoam.org |
| \\/ M anipulation | |
\*---------------------------------------------------------------------------*/
FoamFile
{
version 2.0;
format ascii;
root "";
case "";
instance "";
local "";
class dictionary;
object meshQualityDict;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
//- Maximum non-orthogonality allowed. Set to 180 to disable.
maxNonOrtho 65;
//- Max skewness allowed. Set to <0 to disable.
maxBoundarySkewness 50;
//- Max skewness allowed. Set to <0 to disable.
maxInternalSkewness 10;
//- Max concaveness allowed. Is angle (in degrees) below which concavity
// is allowed. 0 is straight face, <0 would be convex face.
// Set to 180 to disable.
maxConcave 80;
//- Minimum pyramid volume. Is absolute volume of cell pyramid.
// Set to a sensible fraction of the smallest cell volume expected.
// Set to very negative number (e.g. -1E30) to disable.
minVol 1e-20;
//- Minimum quality of the tet formed by the face-centre
// and variable base point minimum decomposition triangles and
// the cell centre. This has to be a positive number for tracking
// to work. Set to very negative number (e.g. -1E30) to
// disable.
// <0 = inside out tet,
// 0 = flat tet
// 1 = regular tet
minTetQuality 1e-30;
//- Minimum face area. Set to <0 to disable.
minArea -1;
//- Minimum face twist. Set to <-1 to disable. dot product of face normal
//- and face centre triangles normal
minTwist 0.0;
//- minimum normalised cell determinant
//- 1 = hex, <= 0 = folded or flattened illegal cell
minDeterminant 0.001;
//- minFaceWeight (0 -> 0.5)
minFaceWeight 0.02;
//- minVolRatio (0 -> 1)
minVolRatio 0.01;
//must be >0 for Fluent compatibility
minTriangleTwist -1;
// ************************************************************************* //

View File

@ -43,7 +43,7 @@ inline bool Foam::pointEdgeCollapse::update
<< "problem." << abort(FatalError); << "problem." << abort(FatalError);
} }
if (w2.collapseIndex_ == -1) if (w2.collapseIndex_ == -1 || collapseIndex_ == -1)
{ {
// Not marked for collapse; only happens on edges. // Not marked for collapse; only happens on edges.
return false; return false;
@ -56,11 +56,11 @@ inline bool Foam::pointEdgeCollapse::update
} }
else else
{ {
// Same coordinate. Same string? // Take over w2 if it is 'better'
if (w2.collapseIndex_ < collapseIndex_) if (w2.collapseIndex_ < collapseIndex_)
{ {
// Take over string index from w2 (and also coordinate but this // Take over string index and coordinate from w2
// was same)
operator=(w2); operator=(w2);
return true; return true;
} }
@ -85,35 +85,6 @@ inline bool Foam::pointEdgeCollapse::update
{ {
return false; return false;
} }
// if (samePoint(w2.collapsePoint_))
// {
// // Same coordinate. Same string?
// if (w2.collapseIndex_ < collapseIndex_)
// {
// // Take over string index from w2 (and also coordinate but
// // this was same)
// operator=(w2);
// return true;
// }
// else
// {
// return false;
// }
// }
// else
// {
// // Find nearest coordinate
// if (magSqr(w2.collapsePoint_) < magSqr(collapsePoint_))
// {
// operator=(w2);
// return true;
// }
// else
// {
// return false;
// }
// }
} }
} }

View File

@ -14,7 +14,7 @@ FoamFile
} }
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
// Move points: // Move boundary points:
// Every entry is two coordinates. First one is location of the point to move, // Every entry is two coordinates. First one is location of the point to move,
// the second is the position to move to. // the second is the position to move to.
pointsToMove pointsToMove
@ -22,15 +22,15 @@ pointsToMove
(( -0.17861 -0.45073 0.75276)( -0.18 -0.45073 0.75276)) (( -0.17861 -0.45073 0.75276)( -0.18 -0.45073 0.75276))
); );
// Split edge in two: // Split boundary edge in two:
// First coord is a point on the edge to cut, second is the position of the // First coord is a point on the (boundary) edge to cut, second is the
// newly introduced point // position of the newly introduced point
edgesToSplit edgesToSplit
( (
(( -0.17692 -0.45312 0.74516)( -0.18 -0.45 0.742)) (( -0.17692 -0.45312 0.74516)( -0.18 -0.45 0.742))
); );
// Triangulate a face: // Triangulate a boundary face:
// First coord is a point on the face to triangulate. It will introduce a // First coord is a point on the face to triangulate. It will introduce a
// point on the face, triangulate and move the point to the second coordinate. // point on the face, triangulate and move the point to the second coordinate.
facesToTriangulate facesToTriangulate
@ -38,8 +38,8 @@ facesToTriangulate
(( -0.039123 -0.45045 0.74083) (-0.03844 -0.45049 0.73572)) (( -0.039123 -0.45045 0.74083) (-0.03844 -0.45049 0.73572))
); );
// Edges to collapse. First coord is point on the edge, second is coordinate // Boundary edges to collapse. First coord is point on the edge, second
// to collapse to. // is coordinate to collapse to.
edgesToCollapse edgesToCollapse
( (
((0.054975 0.099987 0.0044074)(0.054975 0.099987 0.0044074)) ((0.054975 0.099987 0.0044074)(0.054975 0.099987 0.0044074))
@ -55,7 +55,8 @@ cellsToSplit
); );
// Change patch: // Change patch:
// Changes patchID of faces. Coord selects the face, label is the patch index. // Changes patchID of boundary faces. Coord selects the face, label is the
// patch index.
facesToRepatch facesToRepatch
( (
(( -0.039123 -0.45045 0.74083) 1) (( -0.039123 -0.45045 0.74083) 1)

View File

@ -1,3 +0,0 @@
checkCvMesh.C
EXE = $(FOAM_APPBIN)/checkCvMesh

View File

@ -1,14 +0,0 @@
EXE_INC = \
-I$(LIB_SRC)/finiteVolume/lnInclude \
-I$(LIB_SRC)/triSurface/lnInclude \
-I$(LIB_SRC)/mesh/autoMesh/lnInclude \
-I$(LIB_SRC)/dynamicMesh/lnInclude \
-I$(LIB_SRC)/meshTools/lnInclude
EXE_LIBS = \
-lfiniteVolume \
-ldynamicMesh \
-ltriSurface \
-lautoMesh \
-lmeshTools

View File

@ -1,122 +0,0 @@
/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2012 OpenFOAM Foundation
\\/ M anipulation |
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
OpenFOAM is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
Application
checkCvMesh
Description
\*---------------------------------------------------------------------------*/
#include "argList.H"
#include "Time.H"
#include "fvMesh.H"
#include "autoSnapDriver.H"
#include "faceSet.H"
#include "motionSmoother.H"
#include "timeSelector.H"
using namespace Foam;
int main(int argc, char *argv[])
{
timeSelector::addOptions();
# include "addOverwriteOption.H"
# include "setRootCase.H"
# include "createTime.H"
instantList timeDirs = timeSelector::select0(runTime, args);
# include "createNamedPolyMesh.H"
runTime.functionObjects().off();
forAll(timeDirs, timeI)
{
runTime.setTime(timeDirs[timeI], timeI);
Info<< "Time = " << runTime.timeName()
<< nl << endl;
mesh.readUpdate();
// Check patches and faceZones are synchronised
mesh.boundaryMesh().checkParallelSync(true);
meshRefinement::checkCoupledFaceZones(mesh);
// Read meshing dictionary
IOdictionary cvMeshDict
(
IOobject
(
"cvMeshDict",
runTime.system(),
mesh,
IOobject::MUST_READ_IF_MODIFIED,
IOobject::NO_WRITE
)
);
// mesh motion and mesh quality parameters
const dictionary& meshQualityDict
= cvMeshDict.subDict("meshQualityControls");
Info<< "Checking mesh ..." << endl;
faceSet wrongFaces(mesh, "wrongFaces", mesh.nFaces()/100);
motionSmoother::checkMesh(false, mesh, meshQualityDict, wrongFaces);
const label nInitErrors = returnReduce
(
wrongFaces.size(),
sumOp<label>()
);
Info<< "Detected " << nInitErrors << " illegal faces"
<< " (concave, zero area or negative cell pyramid volume)"
<< endl;
if (nInitErrors > 0)
{
Info<< "Writing " << nInitErrors
<< " faces in error to set "
<< wrongFaces.name() << endl;
wrongFaces.instance() = mesh.pointsInstance();
wrongFaces.write();
}
Info<< nl << "End of time " << runTime.timeName() << nl << endl;
}
Info<< "End\n" << endl;
return 0;
}

View File

@ -96,8 +96,9 @@ bool Foam::cellSizeControlSurfaces::evalCellSizeFunctions
if (cellSizeFunctions_.size()) if (cellSizeFunctions_.size())
{ {
// Initialise to the last (lowest) priority // Maintain priority of current hit. Initialise so it always goes
label previousPriority = cellSizeFunctions_.last().priority(); // through at least once.
label previousPriority = -1;
forAll(cellSizeFunctions_, i) forAll(cellSizeFunctions_, i)
{ {

View File

@ -196,7 +196,7 @@ void Foam::conformalVoronoiMesh::buildSurfaceConformation
{ {
if (vit->internalPoint() && !vit->nearBoundary()) if (vit->internalPoint() && !vit->nearBoundary())
{ {
const Foam::point& pt = topoint(vit->point()); pointFromPoint pt = topoint(vit->point());
const scalar range = sqr(2.0*targetCellSize(pt)); const scalar range = sqr(2.0*targetCellSize(pt));
pointIndexHit pHit; pointIndexHit pHit;

View File

@ -238,6 +238,25 @@ int main(int argc, char *argv[])
dict.lookup("constructFrom") dict.lookup("constructFrom")
); );
// Any merging of small edges
const scalar mergeTol(dict.lookupOrDefault<scalar>("mergeTol", 1e-4));
Info<< "Extruding from " << ExtrudeModeNames[mode]
<< " using model " << model().type() << endl;
if (flipNormals)
{
Info<< "Flipping normals before extruding" << endl;
}
if (mergeTol > 0)
{
Info<< "Collapsing edges < " << mergeTol << " of bounding box" << endl;
}
else
{
Info<< "Not collapsing any edges after extrusion" << endl;
}
Info<< endl;
// Generated mesh (one of either) // Generated mesh (one of either)
autoPtr<fvMesh> meshFromMesh; autoPtr<fvMesh> meshFromMesh;
@ -251,10 +270,10 @@ int main(int argc, char *argv[])
if (mode == PATCH || mode == MESH) if (mode == PATCH || mode == MESH)
{ {
if (flipNormals) if (flipNormals && mode == MESH)
{ {
FatalErrorIn(args.executable()) FatalErrorIn(args.executable())
<< "Flipping normals not supported for extrusions from patch." << "Flipping normals not supported for extrusions from mesh."
<< exit(FatalError); << exit(FatalError);
} }
@ -297,6 +316,60 @@ int main(int argc, char *argv[])
addPatchCellLayer layerExtrude(mesh, (mode == MESH)); addPatchCellLayer layerExtrude(mesh, (mode == MESH));
const labelList meshFaces(patchFaces(patches, sourcePatches)); const labelList meshFaces(patchFaces(patches, sourcePatches));
if (mode == PATCH && flipNormals)
{
// Cheat. Flip patch faces in mesh. This invalidates the
// mesh (open cells) but does produce the correct extrusion.
polyTopoChange meshMod(mesh);
forAll(meshFaces, i)
{
label meshFaceI = meshFaces[i];
label patchI = patches.whichPatch(meshFaceI);
label own = mesh.faceOwner()[meshFaceI];
label nei = -1;
if (patchI == -1)
{
nei = mesh.faceNeighbour()[meshFaceI];
}
label zoneI = mesh.faceZones().whichZone(meshFaceI);
bool zoneFlip = false;
if (zoneI != -1)
{
label index = mesh.faceZones()[zoneI].whichFace(meshFaceI);
zoneFlip = mesh.faceZones()[zoneI].flipMap()[index];
}
meshMod.modifyFace
(
mesh.faces()[meshFaceI].reverseFace(), // modified face
meshFaceI, // label of face
own, // owner
nei, // neighbour
true, // face flip
patchI, // patch for face
zoneI, // zone for face
zoneFlip // face flip in zone
);
}
// Change the mesh. No inflation.
autoPtr<mapPolyMesh> map = meshMod.changeMesh(mesh, false);
// Update fields
mesh.updateMesh(map);
// Move mesh (since morphing does not do this)
if (map().hasMotionPoints())
{
mesh.movePoints(map().preMotionPoints());
}
}
indirectPrimitivePatch extrudePatch indirectPrimitivePatch extrudePatch
( (
IndirectList<face> IndirectList<face>
@ -471,11 +544,6 @@ int main(int argc, char *argv[])
displacement[pointI] = extrudePt - layer0Points[pointI]; displacement[pointI] = extrudePt - layer0Points[pointI];
} }
if (flipNormals)
{
Info<< "Flipping faces." << nl << endl;
displacement = -displacement;
}
// Check if wedge (has layer0 different from original patch points) // Check if wedge (has layer0 different from original patch points)
// If so move the mesh to starting position. // If so move the mesh to starting position.
@ -666,7 +734,7 @@ int main(int argc, char *argv[])
const boundBox& bb = mesh.bounds(); const boundBox& bb = mesh.bounds();
const vector span = bb.span(); const vector span = bb.span();
const scalar mergeDim = 1e-4 * bb.minDim(); const scalar mergeDim = mergeTol * bb.minDim();
Info<< "Mesh bounding box : " << bb << nl Info<< "Mesh bounding box : " << bb << nl
<< " with span : " << span << nl << " with span : " << span << nl
@ -677,6 +745,7 @@ int main(int argc, char *argv[])
// Collapse edges // Collapse edges
// ~~~~~~~~~~~~~~ // ~~~~~~~~~~~~~~
if (mergeDim > 0)
{ {
Info<< "Collapsing edges < " << mergeDim << " ..." << nl << endl; Info<< "Collapsing edges < " << mergeDim << " ..." << nl << endl;

View File

@ -31,7 +31,8 @@ exposedPatchName movingWall;
// If construct from surface: // If construct from surface:
surface "movingWall.stl"; surface "movingWall.stl";
// Flip surface normals before usage. // Flip surface normals before usage. Valid only for extrude from surface or
// patch.
flipNormals false; flipNormals false;
//- Linear extrusion in point-normal direction //- Linear extrusion in point-normal direction
@ -87,4 +88,8 @@ sigmaRadialCoeffs
// degree wedges. // degree wedges.
mergeFaces false; //true; mergeFaces false; //true;
// Merge small edges. Fraction of bounding box.
mergeTol 0;
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //

View File

@ -123,14 +123,6 @@ void writeMesh
int main(int argc, char *argv[]) int main(int argc, char *argv[])
{ {
# include "addOverwriteOption.H" # include "addOverwriteOption.H"
Foam::argList::addBoolOption
(
"checkOnly",
"check existing mesh against snappyHexMeshDict settings"
);
# include "setRootCase.H" # include "setRootCase.H"
# include "createTime.H" # include "createTime.H"
runTime.functionObjects().off(); runTime.functionObjects().off();
@ -141,8 +133,6 @@ int main(int argc, char *argv[])
const bool overwrite = args.optionFound("overwrite"); const bool overwrite = args.optionFound("overwrite");
const bool checkOnly = args.optionFound("checkOnly");
// Check patches and faceZones are synchronised // Check patches and faceZones are synchronised
mesh.boundaryMesh().checkParallelSync(true); mesh.boundaryMesh().checkParallelSync(true);
meshRefinement::checkCoupledFaceZones(mesh); meshRefinement::checkCoupledFaceZones(mesh);
@ -184,36 +174,6 @@ int main(int argc, char *argv[])
); );
if (checkOnly)
{
Info<< "Checking initial mesh ..." << endl;
faceSet wrongFaces(mesh, "wrongFaces", mesh.nFaces()/100);
motionSmoother::checkMesh(false, mesh, motionDict, wrongFaces);
const label nInitErrors = returnReduce
(
wrongFaces.size(),
sumOp<label>()
);
Info<< "Detected " << nInitErrors << " illegal faces"
<< " (concave, zero area or negative cell pyramid volume)"
<< endl;
if (nInitErrors > 0)
{
Info<< "Writing " << nInitErrors
<< " faces in error to set "
<< wrongFaces.name() << endl;
wrongFaces.instance() = mesh.pointsInstance();
wrongFaces.write();
}
Info<< "End\n" << endl;
return 0;
}
// Read decomposePar dictionary // Read decomposePar dictionary
IOdictionary decomposeDict IOdictionary decomposeDict

View File

@ -1,6 +1,7 @@
printMeshStats.C printMeshStats.C
checkTopology.C checkTopology.C
checkGeometry.C checkGeometry.C
checkMeshQuality.C
checkMesh.C checkMesh.C
EXE = $(FOAM_APPBIN)/checkMesh EXE = $(FOAM_APPBIN)/checkMesh

View File

@ -1,5 +1,7 @@
EXE_INC = \ EXE_INC = \
-I$(LIB_SRC)/meshTools/lnInclude -I$(LIB_SRC)/meshTools/lnInclude \
-I$(LIB_SRC)/dynamicMesh/lnInclude
EXE_LIBS = \ EXE_LIBS = \
-lmeshTools -lmeshTools \
-ldynamicMesh

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -27,6 +27,21 @@ Application
Description Description
Checks validity of a mesh Checks validity of a mesh
Usage
- checkMesh [OPTION]
\param -allGeometry \n
Checks all (including non finite-volume specific) geometry
\param -allTopology \n
Checks all (including non finite-volume specific) addressing
\param -meshQuality \n
Checks against user defined (in \a system/meshQualityDict) quality settings
\param -region \<name\> \n
Specify an alternative mesh region.
\*---------------------------------------------------------------------------*/ \*---------------------------------------------------------------------------*/
#include "argList.H" #include "argList.H"
@ -39,6 +54,7 @@ Description
#include "printMeshStats.H" #include "printMeshStats.H"
#include "checkTopology.H" #include "checkTopology.H"
#include "checkGeometry.H" #include "checkGeometry.H"
#include "checkMeshQuality.H"
using namespace Foam; using namespace Foam;
@ -63,6 +79,11 @@ int main(int argc, char *argv[])
"allTopology", "allTopology",
"include extra topology checks" "include extra topology checks"
); );
argList::addBoolOption
(
"meshQuality",
"read user-defined mesh quality criterions from system/meshQualityDict"
);
# include "setRootCase.H" # include "setRootCase.H"
# include "createTime.H" # include "createTime.H"
@ -72,6 +93,46 @@ int main(int argc, char *argv[])
const bool noTopology = args.optionFound("noTopology"); const bool noTopology = args.optionFound("noTopology");
const bool allGeometry = args.optionFound("allGeometry"); const bool allGeometry = args.optionFound("allGeometry");
const bool allTopology = args.optionFound("allTopology"); const bool allTopology = args.optionFound("allTopology");
const bool meshQuality = args.optionFound("meshQuality");
if (noTopology)
{
Info<< "Disabling all topology checks." << nl << endl;
}
if (allTopology)
{
Info<< "Enabling all (cell, face, edge, point) topology checks."
<< nl << endl;
}
if (allGeometry)
{
Info<< "Enabling all geometry checks." << nl << endl;
}
if (meshQuality)
{
Info<< "Enabling user-defined geometry checks." << nl << endl;
}
autoPtr<IOdictionary> qualDict;
if (meshQuality)
{
qualDict.reset
(
new IOdictionary
(
IOobject
(
"meshQualityDict",
mesh.time().system(),
mesh,
IOobject::MUST_READ,
IOobject::NO_WRITE
)
)
);
}
forAll(timeDirs, timeI) forAll(timeDirs, timeI)
{ {
@ -96,25 +157,31 @@ int main(int argc, char *argv[])
printMeshStats(mesh, allTopology); printMeshStats(mesh, allTopology);
label noFailedChecks = 0; label nFailedChecks = 0;
if (!noTopology) if (!noTopology)
{ {
noFailedChecks += checkTopology(mesh, allTopology, allGeometry); nFailedChecks += checkTopology(mesh, allTopology, allGeometry);
} }
noFailedChecks += checkGeometry(mesh, allGeometry); nFailedChecks += checkGeometry(mesh, allGeometry);
// Note: no reduction in noFailedChecks necessary since is if (meshQuality)
{
nFailedChecks += checkMeshQuality(mesh, qualDict());
}
// Note: no reduction in nFailedChecks necessary since is
// counter of checks, not counter of failed cells,faces etc. // counter of checks, not counter of failed cells,faces etc.
if (noFailedChecks == 0) if (nFailedChecks == 0)
{ {
Info<< "\nMesh OK.\n" << endl; Info<< "\nMesh OK.\n" << endl;
} }
else else
{ {
Info<< "\nFailed " << noFailedChecks << " mesh checks.\n" Info<< "\nFailed " << nFailedChecks << " mesh checks.\n"
<< endl; << endl;
} }
} }
@ -124,6 +191,12 @@ int main(int argc, char *argv[])
label nFailedChecks = checkGeometry(mesh, allGeometry); label nFailedChecks = checkGeometry(mesh, allGeometry);
if (meshQuality)
{
nFailedChecks += checkMeshQuality(mesh, qualDict());
}
if (nFailedChecks) if (nFailedChecks)
{ {
Info<< "\nFailed " << nFailedChecks << " mesh checks.\n" Info<< "\nFailed " << nFailedChecks << " mesh checks.\n"

View File

@ -0,0 +1,34 @@
#include "checkMeshQuality.H"
#include "polyMesh.H"
#include "cellSet.H"
#include "faceSet.H"
#include "motionSmoother.H"
Foam::label Foam::checkMeshQuality
(
const polyMesh& mesh,
const dictionary& dict
)
{
label noFailedChecks = 0;
{
faceSet faces(mesh, "meshQualityFaces", mesh.nFaces()/100+1);
motionSmoother::checkMesh(false, mesh, dict, faces);
label nFaces = returnReduce(faces.size(), sumOp<label>());
if (nFaces > 0)
{
noFailedChecks++;
Info<< " <<Writing " << nFaces
<< " faces in error to set " << faces.name() << endl;
faces.instance() = mesh.pointsInstance();
faces.write();
}
}
return noFailedChecks;
}

View File

@ -0,0 +1,6 @@
#include "polyMesh.H"
namespace Foam
{
label checkMeshQuality(const polyMesh& mesh, const dictionary&);
}

View File

@ -152,13 +152,17 @@ void Foam::printMeshStats(const polyMesh& mesh, const bool allTopology)
{ {
Pstream::mapCombineGather(polyhedralFaces, plusEqOp<label>()); Pstream::mapCombineGather(polyhedralFaces, plusEqOp<label>());
Info<< " Breakdown of polyhedra by number of faces:" << endl; Info<< " Breakdown of polyhedra by number of faces:" << nl
Info<< " faces" << " number of cells" << endl; << " faces" << " number of cells" << endl;
forAllConstIter(Map<label>, polyhedralFaces, iter) labelList sortedKeys = polyhedralFaces.sortedToc();
forAll(sortedKeys, keyI)
{ {
label nFaces = sortedKeys[keyI];
Info<< setf(std::ios::right) << setw(13) Info<< setf(std::ios::right) << setw(13)
<< iter.key() << " " << iter() << nl; << nFaces << " " << polyhedralFaces[nFaces] << nl;
} }
} }

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -206,6 +206,14 @@ int main(int argc, char *argv[])
runTime runTime
); );
// cvMesh vertices
writeMeshObject<pointIOField>
(
"internalDelaunayVertices",
regionPrefix,
runTime
);
if (runTime.writeFormat() == IOstream::ASCII) if (runTime.writeFormat() == IOstream::ASCII)
{ {
// Only do zones when converting from binary to ascii // Only do zones when converting from binary to ascii

View File

@ -51,9 +51,6 @@ Foam::autoPtr<Foam::fvMesh> Foam::loadOrCreateMesh
// Check who has a mesh // Check who has a mesh
const bool haveMesh = isDir(io.time().path()/io.instance()/meshSubDir); const bool haveMesh = isDir(io.time().path()/io.instance()/meshSubDir);
Pout<< "meshpath:" << io.time().path()/io.instance()/meshSubDir << endl;
Pout<< "haveMesh:" << haveMesh << endl;
if (!haveMesh) if (!haveMesh)
{ {
// Create dummy mesh. Only used on procs that don't have mesh. // Create dummy mesh. Only used on procs that don't have mesh.
@ -293,8 +290,10 @@ Pout<< "haveMesh:" << haveMesh << endl;
if (!haveMesh) if (!haveMesh)
{ {
// We created a dummy mesh file above. Delete it. // We created a dummy mesh file above. Delete it.
//Pout<< "Removing dummy mesh " << io.objectPath() << endl; const fileName meshFiles = io.time().path()/io.instance()/meshSubDir;
rmDir(io.objectPath()); //Pout<< "Removing dummy mesh " << meshFiles << endl;
mesh.removeFiles();
rmDir(meshFiles);
} }
// Force recreation of globalMeshData. // Force recreation of globalMeshData.

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -52,6 +52,7 @@ Foam::vtkTopo::vtkTopo(const polyMesh& mesh)
const cellModel& tet = *(cellModeller::lookup("tet")); const cellModel& tet = *(cellModeller::lookup("tet"));
const cellModel& pyr = *(cellModeller::lookup("pyr")); const cellModel& pyr = *(cellModeller::lookup("pyr"));
const cellModel& prism = *(cellModeller::lookup("prism")); const cellModel& prism = *(cellModeller::lookup("prism"));
const cellModel& wedge = *(cellModeller::lookup("wedge"));
const cellModel& tetWedge = *(cellModeller::lookup("tetWedge")); const cellModel& tetWedge = *(cellModeller::lookup("tetWedge"));
const cellModel& hex = *(cellModeller::lookup("hex")); const cellModel& hex = *(cellModeller::lookup("hex"));
@ -77,7 +78,7 @@ Foam::vtkTopo::vtkTopo(const polyMesh& mesh)
if if
( (
model != hex model != hex
// && model != wedge // See above. && model != wedge // See above.
&& model != prism && model != prism
&& model != pyr && model != pyr
&& model != tet && model != tet
@ -164,21 +165,21 @@ Foam::vtkTopo::vtkTopo(const polyMesh& mesh)
cellTypes_[cellI] = VTK_WEDGE; cellTypes_[cellI] = VTK_WEDGE;
} }
// else if (cellModel == wedge) else if (cellModel == wedge)
// { {
// // Treat as squeezed hex // Treat as squeezed hex
// vtkVerts.setSize(8); vtkVerts.setSize(8);
// vtkVerts[0] = cellShape[0]; vtkVerts[0] = cellShape[0];
// vtkVerts[1] = cellShape[1]; vtkVerts[1] = cellShape[1];
// vtkVerts[2] = cellShape[2]; vtkVerts[2] = cellShape[2];
// vtkVerts[3] = cellShape[0]; vtkVerts[3] = cellShape[2];
// vtkVerts[4] = cellShape[3]; vtkVerts[4] = cellShape[3];
// vtkVerts[5] = cellShape[4]; vtkVerts[5] = cellShape[4];
// vtkVerts[6] = cellShape[5]; vtkVerts[6] = cellShape[5];
// vtkVerts[7] = cellShape[6]; vtkVerts[7] = cellShape[6];
//
// cellTypes_[cellI] = VTK_HEXAHEDRON; cellTypes_[cellI] = VTK_HEXAHEDRON;
// } }
else if (cellModel == hex) else if (cellModel == hex)
{ {
vtkVerts = cellShape; vtkVerts = cellShape;

View File

@ -472,6 +472,33 @@ void unmarkBaffles
} }
void writeStats(const extendedFeatureEdgeMesh& fem, Ostream& os)
{
os << " points : " << fem.points().size() << nl
<< " of which" << nl
<< " convex : "
<< fem.concaveStart() << nl
<< " concave : "
<< (fem.mixedStart()-fem.concaveStart()) << nl
<< " mixed : "
<< (fem.nonFeatureStart()-fem.mixedStart()) << nl
<< " non-feature : "
<< (fem.points().size()-fem.nonFeatureStart()) << nl
<< " edges : " << fem.edges().size() << nl
<< " of which" << nl
<< " external edges : "
<< fem.internalStart() << nl
<< " internal edges : "
<< (fem.flatStart()- fem.internalStart()) << nl
<< " flat edges : "
<< (fem.openStart()- fem.flatStart()) << nl
<< " open edges : "
<< (fem.multipleStart()- fem.openStart()) << nl
<< " multiply connected : "
<< (fem.edges().size()- fem.multipleStart()) << nl;
}
// Main program: // Main program:
int main(int argc, char *argv[]) int main(int argc, char *argv[])
@ -514,7 +541,7 @@ int main(int argc, char *argv[])
forAllConstIter(dictionary, dict, iter) forAllConstIter(dictionary, dict, iter)
{ {
dictionary surfaceDict = dict.subDict(iter().keyword()); const dictionary& surfaceDict = iter().dict();
const fileName surfFileName = iter().keyword(); const fileName surfFileName = iter().keyword();
const fileName sFeatFileName = surfFileName.lessExt().name(); const fileName sFeatFileName = surfFileName.lessExt().name();
@ -522,16 +549,16 @@ int main(int argc, char *argv[])
Info<< "Surface : " << surfFileName << nl << endl; Info<< "Surface : " << surfFileName << nl << endl;
const Switch writeVTK = const Switch writeVTK =
surfaceDict.lookupOrAddDefault<Switch>("writeVTK", "off"); surfaceDict.lookupOrDefault<Switch>("writeVTK", "off");
const Switch writeObj = const Switch writeObj =
surfaceDict.lookupOrAddDefault<Switch>("writeObj", "off"); surfaceDict.lookupOrDefault<Switch>("writeObj", "off");
const Switch curvature = const Switch curvature =
surfaceDict.lookupOrAddDefault<Switch>("curvature", "off"); surfaceDict.lookupOrDefault<Switch>("curvature", "off");
const Switch featureProximity = const Switch featureProximity =
surfaceDict.lookupOrAddDefault<Switch>("featureProximity", "off"); surfaceDict.lookupOrDefault<Switch>("featureProximity", "off");
const Switch closeness = const Switch closeness =
surfaceDict.lookupOrAddDefault<Switch>("closeness", "off"); surfaceDict.lookupOrDefault<Switch>("closeness", "off");
const word extractionMethod = surfaceDict.lookup("extractionMethod"); const word extractionMethod = surfaceDict.lookup("extractionMethod");
@ -579,7 +606,7 @@ int main(int argc, char *argv[])
// Either construct features from surface & featureAngle or read set. // Either construct features from surface & featureAngle or read set.
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
surfaceFeatures set(surf); surfaceFeatures set(surf);
@ -670,7 +697,10 @@ int main(int argc, char *argv[])
if (surfaceDict.isDict("subsetFeatures")) if (surfaceDict.isDict("subsetFeatures"))
{ {
dictionary subsetDict = surfaceDict.subDict("subsetFeatures"); const dictionary& subsetDict = surfaceDict.subDict
(
"subsetFeatures"
);
if (subsetDict.found("insideBox")) if (subsetDict.found("insideBox"))
{ {
@ -704,7 +734,7 @@ int main(int argc, char *argv[])
} }
const Switch manifoldEdges = const Switch manifoldEdges =
subsetDict.lookupOrAddDefault<Switch>("manifoldEdges", "no"); subsetDict.lookupOrDefault<Switch>("manifoldEdges", "no");
if (manifoldEdges) if (manifoldEdges)
{ {
@ -746,16 +776,6 @@ int main(int argc, char *argv[])
// newSet.writeObj("final"); // newSet.writeObj("final");
//} //}
Info<< nl
<< "Final feature set after trimming and subsetting:" << nl
<< " feature points : " << newSet.featurePoints().size() << nl
<< " feature edges : " << newSet.featureEdges().size() << nl
<< " of which" << nl
<< " region edges : " << newSet.nRegionEdges() << nl
<< " external edges : " << newSet.nExternalEdges() << nl
<< " internal edges : " << newSet.nInternalEdges() << nl
<< endl;
// Extracting and writing a extendedFeatureEdgeMesh // Extracting and writing a extendedFeatureEdgeMesh
extendedFeatureEdgeMesh feMesh extendedFeatureEdgeMesh feMesh
( (
@ -764,6 +784,51 @@ int main(int argc, char *argv[])
sFeatFileName + ".extendedFeatureEdgeMesh" sFeatFileName + ".extendedFeatureEdgeMesh"
); );
if (surfaceDict.isDict("addFeatures"))
{
const dictionary& subsetDict = surfaceDict.subDict
(
"addFeatures"
);
const word addFeName = subsetDict["name"];
Info<< "Adding (without merging) features from " << addFeName
<< nl << endl;
const Switch flip = subsetDict["flip"];
extendedFeatureEdgeMesh addFeMesh
(
IOobject
(
addFeName,
runTime.time().constant(),
"extendedFeatureEdgeMesh",
runTime.time(),
IOobject::MUST_READ,
IOobject::NO_WRITE
)
);
Info<< "Read " << addFeMesh.name() << nl;
writeStats(addFeMesh, Info);
if (flip)
{
Info<< "Flipping " << addFeMesh.name() << endl;
addFeMesh.flipNormals();
Info<< "After flipping " << addFeMesh.name() << nl;
writeStats(addFeMesh, Info);
}
feMesh.add(addFeMesh);
}
Info<< nl
<< "Final feature set:" << nl;
writeStats(feMesh, Info);
Info<< nl << "Writing extendedFeatureEdgeMesh to " Info<< nl << "Writing extendedFeatureEdgeMesh to "
<< feMesh.objectPath() << endl; << feMesh.objectPath() << endl;

View File

@ -70,6 +70,12 @@ surface2.nas
manifoldEdges no; manifoldEdges no;
} }
addFeatures
{
// Add (without merging) another extendedFeatureEdgeMesh
name axZ.extendedFeatureEdgeMesh;
}
// Output the curvature of the surface // Output the curvature of the surface
curvature no; curvature no;

View File

@ -1,8 +1,8 @@
EXE_INC = \ EXE_INC = \
-I$(LIB_SRC)/meshTools/lnInclude \ -I$(LIB_SRC)/meshTools/lnInclude \
-I$(LIB_SRC)/triSurface/lnInclude -I$(LIB_SRC)/surfMesh/lnInclude
EXE_LIBS = \ EXE_LIBS = \
-lmeshTools \ -lmeshTools \
-ltriSurface -lsurfMesh

View File

@ -22,23 +22,29 @@ License
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>. along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
Description Description
Extracts triSurface from a polyMesh. Triangulates all boundary faces. Extracts surface from a polyMesh. Depending on output surface format
Region numbers on triangles are the patch numbers of the polyMesh. triangulates faces.
Region numbers on faces cannot be guaranteed to be the same as the patch
indices.
Optionally only triangulates named patches. Optionally only triangulates named patches.
If run in parallel the processor patches get filtered out by default and If run in parallel the processor patches get filtered out by default and
the mesh gets merged. (based on vertex position, not topology, so might go the mesh gets merged (based on topology).
wrong!).
\*---------------------------------------------------------------------------*/ \*---------------------------------------------------------------------------*/
#include "MeshedSurface.H"
#include "UnsortedMeshedSurface.H"
#include "argList.H" #include "argList.H"
#include "Time.H" #include "Time.H"
#include "polyMesh.H" #include "polyMesh.H"
#include "triSurface.H"
#include "triSurfaceTools.H"
#include "processorPolyPatch.H" #include "processorPolyPatch.H"
#include "ListListOps.H" #include "ListListOps.H"
#include "uindirectPrimitivePatch.H"
#include "globalMeshData.H"
#include "globalIndex.H"
using namespace Foam; using namespace Foam;
@ -50,7 +56,7 @@ int main(int argc, char *argv[])
{ {
argList::addNote argList::addNote
( (
"extract surface from a polyMesh and triangulate boundary faces" "extract surface from a polyMesh"
); );
argList::validArgs.append("output file"); argList::validArgs.append("output file");
#include "addRegionOption.H" #include "addRegionOption.H"
@ -63,27 +69,37 @@ int main(int argc, char *argv[])
( (
"patches", "patches",
"(patch0 .. patchN)", "(patch0 .. patchN)",
"only triangulate named patches" "only triangulate selected patches (wildcards supported)"
); );
#include "setRootCase.H" #include "setRootCase.H"
#include "createTime.H" #include "createTime.H"
const fileName outFileName(runTime.path()/args[1]); const word outFileName(args[1]);
Info<< "Extracting triSurface from boundaryMesh ..." Info<< "Extracting surface from boundaryMesh ..."
<< endl << endl; << endl << endl;
Pout<< "Reading mesh from time " << runTime.value() << endl;
#include "createNamedPolyMesh.H"
const bool includeProcPatches = const bool includeProcPatches =
!( !(
args.optionFound("excludeProcPatches") args.optionFound("excludeProcPatches")
|| Pstream::parRun() || Pstream::parRun()
); );
if (includeProcPatches)
{
Info<< "Including all processor patches." << nl << endl;
}
else if (Pstream::parRun())
{
Info<< "Excluding all processor patches." << nl << endl;
}
Info<< "Reading mesh from time " << runTime.value() << endl;
#include "createNamedPolyMesh.H"
// Create local surface from: // Create local surface from:
// - explicitly named patches only (-patches (at your option) // - explicitly named patches only (-patches (at your option)
// - all patches (default in sequential mode) // - all patches (default in sequential mode)
@ -98,24 +114,10 @@ int main(int argc, char *argv[])
if (args.optionFound("patches")) if (args.optionFound("patches"))
{ {
const wordList patchNames includePatches = bMesh.patchSet
( (
args.optionLookup("patches")() wordReList(args.optionLookup("patches")())
); );
forAll(patchNames, patchNameI)
{
const word& patchName = patchNames[patchNameI];
const label patchI = bMesh.findPatchID(patchName);
if (patchI == -1)
{
FatalErrorIn(args.executable()) << "No such patch "
<< patchName << endl << "Patches are " << bMesh.names()
<< exit(FatalError);
}
includePatches.insert(patchI);
}
} }
else else
{ {
@ -127,212 +129,155 @@ int main(int argc, char *argv[])
{ {
includePatches.insert(patchI); includePatches.insert(patchI);
} }
else
{
Pout<< patch.name() << " : skipped since processorPatch"
<< endl;
}
} }
} }
triSurface localSurface
// Collect sizes. Hash on names to handle local-only patches (e.g.
// processor patches)
HashTable<label> patchSize(1000);
label nFaces = 0;
forAllConstIter(labelHashSet, includePatches, iter)
{
const polyPatch& pp = bMesh[iter.key()];
patchSize.insert(pp.name(), pp.size());
nFaces += pp.size();
}
Pstream::mapCombineGather(patchSize, plusEqOp<label>());
// Allocate zone/patch for all patches
HashTable<label> compactZoneID(1000);
forAllConstIter(HashTable<label>, patchSize, iter)
{
label sz = compactZoneID.size();
compactZoneID.insert(iter.key(), sz);
}
Pstream::mapCombineScatter(compactZoneID);
// Rework HashTable into labelList just for speed of conversion
labelList patchToCompactZone(bMesh.size(), -1);
forAllConstIter(HashTable<label>, compactZoneID, iter)
{
label patchI = bMesh.findPatchID(iter.key());
if (patchI != -1)
{
patchToCompactZone[patchI] = iter();
}
}
// Collect faces on zones
DynamicList<label> faceLabels(nFaces);
DynamicList<label> compactZones(nFaces);
forAllConstIter(labelHashSet, includePatches, iter)
{
const polyPatch& pp = bMesh[iter.key()];
forAll(pp, i)
{
faceLabels.append(pp.start()+i);
compactZones.append(patchToCompactZone[pp.index()]);
}
}
// Addressing engine for all faces
uindirectPrimitivePatch allBoundary
( (
triSurfaceTools::triangulate UIndirectList<face>(mesh.faces(), faceLabels),
( mesh.points()
mesh.boundaryMesh(),
includePatches
)
); );
// Find correspondence to master points
labelList pointToGlobal;
labelList uniqueMeshPoints;
autoPtr<globalIndex> globalNumbers = mesh.globalData().mergePoints
(
allBoundary.meshPoints(),
allBoundary.meshPointMap(),
pointToGlobal,
uniqueMeshPoints
);
if (!Pstream::parRun()) // Gather all unique points on master
List<pointField> gatheredPoints(Pstream::nProcs());
gatheredPoints[Pstream::myProcNo()] = pointField
(
mesh.points(),
uniqueMeshPoints
);
Pstream::gatherList(gatheredPoints);
// Gather all faces
List<faceList> gatheredFaces(Pstream::nProcs());
gatheredFaces[Pstream::myProcNo()] = allBoundary.localFaces();
forAll(gatheredFaces[Pstream::myProcNo()], i)
{ {
Info<< "Writing surface to " << outFileName << endl; inplaceRenumber(pointToGlobal, gatheredFaces[Pstream::myProcNo()][i]);
localSurface.write(outFileName);
} }
else Pstream::gatherList(gatheredFaces);
// Gather all ZoneIDs
List<labelList> gatheredZones(Pstream::nProcs());
gatheredZones[Pstream::myProcNo()] = compactZones.xfer();
Pstream::gatherList(gatheredZones);
// On master combine all points, faces, zones
if (Pstream::master())
{ {
// Write local surface pointField allPoints = ListListOps::combine<pointField>
fileName localPath = runTime.path()/runTime.caseName() + ".obj"; (
gatheredPoints,
accessOp<pointField>()
);
gatheredPoints.clear();
Pout<< "Writing local surface to " << localPath << endl; faceList allFaces = ListListOps::combine<faceList>
(
gatheredFaces,
accessOp<faceList>()
);
gatheredFaces.clear();
localSurface.write(localPath); labelList allZones = ListListOps::combine<labelList>
(
Info<< endl; gatheredZones,
accessOp<labelList>()
);
gatheredZones.clear();
// Gather all points on master // Zones
List<pointField> gatheredPoints(Pstream::nProcs()); surfZoneIdentifierList surfZones(compactZoneID.size());
forAllConstIter(HashTable<label>, compactZoneID, iter)
gatheredPoints[Pstream::myProcNo()] = localSurface.points();
Pstream::gatherList(gatheredPoints);
// Gather all localSurface patches
List<geometricSurfacePatchList> gatheredPatches(Pstream::nProcs());
gatheredPatches[Pstream::myProcNo()] = localSurface.patches();
Pstream::gatherList(gatheredPatches);
// Gather all faces
List<List<labelledTri> > gatheredFaces(Pstream::nProcs());
gatheredFaces[Pstream::myProcNo()] = localSurface;
Pstream::gatherList(gatheredFaces);
if (Pstream::master())
{ {
// On master combine all points surfZones[iter()] = surfZoneIdentifier(iter.key(), iter());
pointField allPoints = Info<< "surfZone " << iter() << " : " << surfZones[iter()].name()
ListListOps::combine<pointField> << endl;
(
gatheredPoints,
accessOp<pointField>()
);
// Count number of patches.
label nPatches = 0;
forAll(gatheredPatches, procI)
{
nPatches += gatheredPatches[procI].size();
}
// Count number of faces.
label nFaces = 0;
forAll(gatheredFaces, procI)
{
nFaces += gatheredFaces[procI].size();
}
// Loop over all processors and
// - construct mapping from local to global patches
// - relabel faces (both points and regions)
label newPatchI = 0;
// Name to new patchI
HashTable<label> nameToIndex(2*nPatches);
// Storage (oversized) for all patches
geometricSurfacePatchList allPatches(nPatches);
label newFaceI = 0;
// Storage for all faces
List<labelledTri> allFaces(nFaces);
// Offset into allPoints for current processor
label pointOffset = 0;
for (label procI = 0; procI < Pstream::nProcs(); procI++)
{
Info<< "Processor " << procI << endl
<< "-----------" << endl;
const geometricSurfacePatchList& patches =
gatheredPatches[procI];
// From local patch numbering to global
labelList localToGlobal(patches.size());
forAll(patches, patchI)
{
const geometricSurfacePatch& sp = patches[patchI];
if (!nameToIndex.found(sp.name()))
{
nameToIndex.insert(sp.name(), newPatchI);
localToGlobal[patchI] = newPatchI;
allPatches[newPatchI] = sp;
newPatchI++;
}
else
{
localToGlobal[patchI] = nameToIndex[sp.name()];
}
}
Info<< "Local patch to global patch mapping:"
<< endl;
forAll(patches, patchI)
{
Info<< " name : " << patches[patchI].name() << endl
<< " local : " << patchI << endl
<< " global : " << localToGlobal[patchI]
<< endl;
}
Info<< "Local points added in global points starting at "
<< pointOffset
<< endl;
// Collect and relabel faces
const List<labelledTri>& localFaces = gatheredFaces[procI];
forAll(localFaces, faceI)
{
const labelledTri& f = localFaces[faceI];
allFaces[newFaceI++] =
labelledTri
(
f[0] + pointOffset,
f[1] + pointOffset,
f[2] + pointOffset,
localToGlobal[f.region()]
);
}
pointOffset += gatheredPoints[procI].size();
Info<< endl;
}
allPatches.setSize(newPatchI);
// We now have allPoints, allFaces and allPatches.
// Construct overall (yet unmerged) surface from these.
triSurface allSurf(allFaces, allPatches, allPoints);
// Cleanup (which does point merge as well)
allSurf.cleanup(false);
// Write surface mesh
label slashIndex = runTime.caseName().find_last_of('/');
fileName globalCasePath(runTime.caseName().substr(0, slashIndex));
Info<< "Writing merged surface to " << globalCasePath << endl;
// create database for the sequential run
fileName globalPath
(
runTime.rootPath()
/ globalCasePath
/ args[1]
);
allSurf.write(globalPath);
} }
UnsortedMeshedSurface<face> unsortedFace
(
xferMove(allPoints),
xferMove(allFaces),
xferMove(allZones),
xferMove(surfZones)
);
MeshedSurface<face> sortedFace(unsortedFace);
fileName globalCasePath
(
runTime.processorCase()
? runTime.path()/".."/outFileName
: runTime.path()/outFileName
);
Info<< "Writing merged surface to " << globalCasePath << endl;
sortedFace.write(globalCasePath);
} }
Info<< "End\n" << endl; Info<< "End\n" << endl;

View File

@ -3,7 +3,7 @@
# ========= | # ========= |
# \\ / F ield | OpenFOAM: The Open Source CFD Toolbox # \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
# \\ / O peration | # \\ / O peration |
# \\ / A nd | Copyright (C) 2011 OpenFOAM Foundation # \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
# \\/ M anipulation | # \\/ M anipulation |
#------------------------------------------------------------------------------ #------------------------------------------------------------------------------
# License # License
@ -107,15 +107,23 @@ then
echo "pack manually" 1>&2 echo "pack manually" 1>&2
foamPackSource $packDir $packFile foamPackSource $packDir $packFile
else else
echo "pack with git-archive" 1>&2 if [ ! -f $packDir/.build ]
( cd $packDir && git archive --format=tar --prefix=$packDir/ HEAD) > $packBase.tar then
echo "Error: $packDir/.build does not exists" 1>&2
echo " Please update this by running e.g. 'wmakePrintBuild -update' in $packDir" 1>&2
exit 2
fi
echo "add in time-stamp and lnInclude directories" 1>&2
tar cf $packBase.tar2 $packDir/.timeStamp $packDir/.build `find -H $packDir -type d -name lnInclude`
tar Af $packBase.tar $packBase.tar2
echo "gzip tar file" 1>&2 echo "pack with git-archive" 1>&2 &&
gzip -c9 $packBase.tar > $packFile ( cd $packDir && git archive --format=tar --prefix=$packDir/ HEAD) > $packBase.tar &&
echo "add in time-stamp and lnInclude directories" 1>&2 &&
tar cf $packBase.tar2 $packDir/.timeStamp $packDir/.build `find -H $packDir -type d -name lnInclude` &&
tar Af $packBase.tar $packBase.tar2 &&
echo "gzip tar file" 1>&2 &&
gzip -c9 $packBase.tar > $packFile &&
rm -f $packBase.tar $packBase.tar2 2>/dev/null rm -f $packBase.tar $packBase.tar2 2>/dev/null
fi fi

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -165,8 +165,14 @@ Foam::Istream& Foam::UIPstream::read(token& t)
} }
// String // String
case token::STRING :
case token::VERBATIMSTRING : case token::VERBATIMSTRING :
{
// Recurse to read actual string
read(t);
t.type() = token::VERBATIMSTRING;
return *this;
}
case token::STRING :
{ {
string* pval = new string; string* pval = new string;
if (read(*pval)) if (read(*pval))

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -151,10 +151,19 @@ Foam::UOPstream::~UOPstream()
// * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * * //
Foam::Ostream& Foam::UOPstream::write(const token&) Foam::Ostream& Foam::UOPstream::write(const token& t)
{ {
notImplemented("Ostream& UOPstream::write(const token&)"); // Raw token output only supported for verbatim strings for now
setBad(); if (t.type() == token::VERBATIMSTRING)
{
write(char(token::VERBATIMSTRING));
write(t.stringToken());
}
else
{
notImplemented("Ostream& UOPstream::write(const token&)");
setBad();
}
return *this; return *this;
} }

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -29,8 +29,16 @@ License
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
Foam::Ostream& Foam::OSstream::write(const token&) Foam::Ostream& Foam::OSstream::write(const token& t)
{ {
if (t.type() == token::VERBATIMSTRING)
{
write(char(token::HASH));
write(char(token::BEGIN_BLOCK));
writeQuoted(t.stringToken(), false);
write(char(token::HASH));
write(char(token::END_BLOCK));
}
return *this; return *this;
} }

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -65,8 +65,16 @@ void Foam::prefixOSstream::print(Ostream& os) const
} }
Foam::Ostream& Foam::prefixOSstream::write(const token&) Foam::Ostream& Foam::prefixOSstream::write(const token& t)
{ {
if (t.type() == token::VERBATIMSTRING)
{
write(char(token::HASH));
write(char(token::BEGIN_BLOCK));
writeQuoted(t.stringToken(), false);
write(char(token::HASH));
write(char(token::END_BLOCK));
}
return *this; return *this;
} }

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -225,9 +225,9 @@ void Foam::primitiveEntry::write(Ostream& os, const bool contentsOnly) const
const token& t = operator[](i); const token& t = operator[](i);
if (t.type() == token::VERBATIMSTRING) if (t.type() == token::VERBATIMSTRING)
{ {
os << token::HASH << token::BEGIN_BLOCK; // Bypass token output operator to avoid losing verbatimness.
os.writeQuoted(t.stringToken(), false); // Handle in Ostreams themselves
os << token::HASH << token::END_BLOCK; os.write(t);
} }
else else
{ {

View File

@ -59,8 +59,6 @@ void Foam::globalMeshData::syncData
if (slavePoints.size()+nTransformSlavePoints > 0) if (slavePoints.size()+nTransformSlavePoints > 0)
{ {
const labelList& transformSlavePoints = transformedSlaves[i];
// Combine master with untransformed slave data // Combine master with untransformed slave data
forAll(slavePoints, j) forAll(slavePoints, j)
{ {
@ -68,20 +66,27 @@ void Foam::globalMeshData::syncData
} }
// Combine master with transformed slave data // Combine master with transformed slave data
forAll(transformSlavePoints, j) if (nTransformSlavePoints)
{ {
cop(elem, elems[transformSlavePoints[j]]); const labelList& transformSlavePoints = transformedSlaves[i];
forAll(transformSlavePoints, j)
{
cop(elem, elems[transformSlavePoints[j]]);
}
} }
// Copy result back to slave slots // Copy result back to slave slots
forAll(slavePoints, j) forAll(slavePoints, j)
{ {
elems[slavePoints[j]] = elem; elems[slavePoints[j]] = elem;
} }
forAll(transformSlavePoints, j) if (nTransformSlavePoints)
{ {
elems[transformSlavePoints[j]] = elem; const labelList& transformSlavePoints = transformedSlaves[i];
forAll(transformSlavePoints, j)
{
elems[transformSlavePoints[j]] = elem;
}
} }
} }
} }
@ -125,8 +130,6 @@ void Foam::globalMeshData::syncData
if (slavePoints.size()+nTransformSlavePoints > 0) if (slavePoints.size()+nTransformSlavePoints > 0)
{ {
const labelList& transformSlavePoints = transformedSlaves[i];
// Combine master with untransformed slave data // Combine master with untransformed slave data
forAll(slavePoints, j) forAll(slavePoints, j)
{ {
@ -134,20 +137,27 @@ void Foam::globalMeshData::syncData
} }
// Combine master with transformed slave data // Combine master with transformed slave data
forAll(transformSlavePoints, j) if (nTransformSlavePoints)
{ {
cop(elem, elems[transformSlavePoints[j]]); const labelList& transformSlavePoints = transformedSlaves[i];
forAll(transformSlavePoints, j)
{
cop(elem, elems[transformSlavePoints[j]]);
}
} }
// Copy result back to slave slots // Copy result back to slave slots
forAll(slavePoints, j) forAll(slavePoints, j)
{ {
elems[slavePoints[j]] = elem; elems[slavePoints[j]] = elem;
} }
forAll(transformSlavePoints, j) if (nTransformSlavePoints)
{ {
elems[transformSlavePoints[j]] = elem; const labelList& transformSlavePoints = transformedSlaves[i];
forAll(transformSlavePoints, j)
{
elems[transformSlavePoints[j]] = elem;
}
} }
} }
} }

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -554,6 +554,15 @@ public:
const int tag = UPstream::msgType() const int tag = UPstream::msgType()
); );
//- Distribute data using default commsType.
template<class T>
void distribute
(
DynamicList<T>& fld,
const bool dummyTransform = true,
const int tag = UPstream::msgType()
) const;
//- Distribute data using default commsType. //- Distribute data using default commsType.
template<class T> template<class T>
void distribute void distribute

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -887,6 +887,25 @@ void Foam::mapDistribute::applyInverseTransforms
} }
//- Distribute data using default commsType.
template<class T>
void Foam::mapDistribute::distribute
(
DynamicList<T>& fld,
const bool dummyTransform,
const int tag
) const
{
fld.shrink();
List<T>& fldList = static_cast<List<T>& >(fld);
distribute(fldList, dummyTransform, tag);
fld.setCapacity(fldList.size());
}
//- Distribute data using default commsType. //- Distribute data using default commsType.
template<class T> template<class T>
void Foam::mapDistribute::distribute void Foam::mapDistribute::distribute

View File

@ -1217,7 +1217,7 @@ const Foam::globalMeshData& Foam::polyMesh::globalData() const
// Remove all files and some subdirs (eg, sets) // Remove all files and some subdirs (eg, sets)
void Foam::polyMesh::removeFiles(const fileName& instanceDir) const void Foam::polyMesh::removeFiles(const fileName& instanceDir) const
{ {
fileName meshFilesPath = thisDb().path()/instanceDir/meshDir(); fileName meshFilesPath = thisDb().time().path()/instanceDir/meshDir();
rm(meshFilesPath/"points"); rm(meshFilesPath/"points");
rm(meshFilesPath/"faces"); rm(meshFilesPath/"faces");

View File

@ -634,7 +634,7 @@ bool Foam::processorPolyPatch::order
fileName str fileName str
( (
boundaryMesh().mesh().time().path() boundaryMesh().mesh().time().path()
/name()/name()+"_faces.obj" /name() + "_faces.obj"
); );
Pout<< "processorPolyPatch::order :" Pout<< "processorPolyPatch::order :"
<< " Writing faces to OBJ file " << str.name() << endl; << " Writing faces to OBJ file " << str.name() << endl;

View File

@ -25,7 +25,6 @@ License
#include "primitiveMesh.H" #include "primitiveMesh.H"
#include "pyramidPointFaceRef.H" #include "pyramidPointFaceRef.H"
#include "tetrahedron.H"
#include "ListOps.H" #include "ListOps.H"
#include "unitConversion.H" #include "unitConversion.H"
#include "SortableList.H" #include "SortableList.H"

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -58,7 +58,7 @@ void Foam::ptscotchDecomp::check(const int retVal, const char* str)
{} {}
Foam::label Foam::ptscotchDecomp::decomposeZeroDomains Foam::label Foam::ptscotchDecomp::decompose
( (
const fileName& meshPath, const fileName& meshPath,
const List<int>& initxadj, const List<int>& initxadj,
@ -72,6 +72,7 @@ Foam::label Foam::ptscotchDecomp::decomposeZeroDomains
( (
"label ptscotchDecomp::decompose" "label ptscotchDecomp::decompose"
"(" "("
"onst fileName&,"
"const List<int>&, " "const List<int>&, "
"const List<int>&, " "const List<int>&, "
"const scalarField&, " "const scalarField&, "
@ -84,8 +85,10 @@ Foam::label Foam::ptscotchDecomp::decomposeZeroDomains
Foam::label Foam::ptscotchDecomp::decompose Foam::label Foam::ptscotchDecomp::decompose
( (
const fileName& meshPath, const fileName& meshPath,
const List<int>& adjncy, const int adjncySize,
const List<int>& xadj, const int adjncy[],
const int xadjSize,
const int xadj[],
const scalarField& cWeights, const scalarField& cWeights,
List<int>& finalDecomp List<int>& finalDecomp
) const ) const
@ -94,9 +97,12 @@ Foam::label Foam::ptscotchDecomp::decompose
( (
"label ptscotchDecomp::decompose" "label ptscotchDecomp::decompose"
"(" "("
"const List<int>&, " "const fileName&,"
"const List<int>&, " "const int,"
"const scalarField&, " "const int,"
"const int,"
"const int,"
"const scalarField&,"
"List<int>&" "List<int>&"
")" ")"
) << notImplementedMessage << exit(FatalError); ) << notImplementedMessage << exit(FatalError);

View File

@ -1,3 +0,0 @@
createShellMesh.C
LIB = $(FOAM_LIBBIN)/libcreateShellMesh

View File

@ -1,9 +0,0 @@
EXE_INC = \
-I$(LIB_SRC)/finiteVolume/lnInclude \
-I$(LIB_SRC)/meshTools/lnInclude \
-I$(LIB_SRC)/dynamicMesh/lnInclude -DFULLDEBUG -g -O0
LIB_LIBS = \
-lfiniteVolume \
-lmeshTools \
-ldynamicMesh

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -127,9 +127,7 @@ public:
// A point can be deleted if // A point can be deleted if
// - it is not used by any edge. // - it is not used by any edge.
// or // or
// - is not used by an internal edge // - is used by only two edges
// - is used by only two boundary edges. (note that these two
// edges will always be boundary ones!)
// - these two edges are sufficiently in line (cos > minCos) // - these two edges are sufficiently in line (cos > minCos)
// - all processors agree that point can be deleted. // - all processors agree that point can be deleted.
label countPointUsage label countPointUsage

View File

@ -33,6 +33,7 @@ License
#include "OFstream.H" #include "OFstream.H"
#include "IFstream.H" #include "IFstream.H"
#include "unitConversion.H" #include "unitConversion.H"
#include "DynamicField.H"
// * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * // // * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * //
@ -1059,12 +1060,368 @@ Foam::extendedFeatureEdgeMesh::edgeTreesByType() const
} }
void Foam::extendedFeatureEdgeMesh::add(const extendedFeatureEdgeMesh& fem)
{
// Points
// ~~~~~~
// From current points into combined points
labelList reversePointMap(points().size());
labelList reverseFemPointMap(fem.points().size());
label newPointI = 0;
for (label i = 0; i < concaveStart(); i++)
{
reversePointMap[i] = newPointI++;
}
for (label i = 0; i < fem.concaveStart(); i++)
{
reverseFemPointMap[i] = newPointI++;
}
// Concave
label newConcaveStart = newPointI;
for (label i = concaveStart(); i < mixedStart(); i++)
{
reversePointMap[i] = newPointI++;
}
for (label i = fem.concaveStart(); i < fem.mixedStart(); i++)
{
reverseFemPointMap[i] = newPointI++;
}
// Mixed
label newMixedStart = newPointI;
for (label i = mixedStart(); i < nonFeatureStart(); i++)
{
reversePointMap[i] = newPointI++;
}
for (label i = fem.mixedStart(); i < fem.nonFeatureStart(); i++)
{
reverseFemPointMap[i] = newPointI++;
}
// Non-feature
label newNonFeatureStart = newPointI;
for (label i = nonFeatureStart(); i < points().size(); i++)
{
reversePointMap[i] = newPointI++;
}
for (label i = fem.nonFeatureStart(); i < fem.points().size(); i++)
{
reverseFemPointMap[i] = newPointI++;
}
pointField newPoints(newPointI);
newPoints.rmap(points(), reversePointMap);
newPoints.rmap(fem.points(), reverseFemPointMap);
// Edges
// ~~~~~
// From current edges into combined edges
labelList reverseEdgeMap(edges().size());
labelList reverseFemEdgeMap(fem.edges().size());
// External
label newEdgeI = 0;
for (label i = 0; i < internalStart(); i++)
{
reverseEdgeMap[i] = newEdgeI++;
}
for (label i = 0; i < fem.internalStart(); i++)
{
reverseFemEdgeMap[i] = newEdgeI++;
}
// Internal
label newInternalStart = newEdgeI;
for (label i = internalStart(); i < flatStart(); i++)
{
reverseEdgeMap[i] = newEdgeI++;
}
for (label i = fem.internalStart(); i < fem.flatStart(); i++)
{
reverseFemEdgeMap[i] = newEdgeI++;
}
// Flat
label newFlatStart = newEdgeI;
for (label i = flatStart(); i < openStart(); i++)
{
reverseEdgeMap[i] = newEdgeI++;
}
for (label i = fem.flatStart(); i < fem.openStart(); i++)
{
reverseFemEdgeMap[i] = newEdgeI++;
}
// Open
label newOpenStart = newEdgeI;
for (label i = openStart(); i < multipleStart(); i++)
{
reverseEdgeMap[i] = newEdgeI++;
}
for (label i = fem.openStart(); i < fem.multipleStart(); i++)
{
reverseFemEdgeMap[i] = newEdgeI++;
}
// Multiple
label newMultipleStart = newEdgeI;
for (label i = multipleStart(); i < edges().size(); i++)
{
reverseEdgeMap[i] = newEdgeI++;
}
for (label i = fem.multipleStart(); i < fem.edges().size(); i++)
{
reverseFemEdgeMap[i] = newEdgeI++;
}
edgeList newEdges(newEdgeI);
forAll(edges(), i)
{
const edge& e = edges()[i];
newEdges[reverseEdgeMap[i]] = edge
(
reversePointMap[e[0]],
reversePointMap[e[1]]
);
}
forAll(fem.edges(), i)
{
const edge& e = fem.edges()[i];
newEdges[reverseFemEdgeMap[i]] = edge
(
reverseFemPointMap[e[0]],
reverseFemPointMap[e[1]]
);
}
pointField newEdgeDirections(newEdgeI);
newEdgeDirections.rmap(edgeDirections(), reverseEdgeMap);
newEdgeDirections.rmap(fem.edgeDirections(), reverseFemEdgeMap);
// Normals
// ~~~~~~~
// Combine normals
DynamicField<point> newNormals(normals().size()+fem.normals().size());
newNormals.append(normals());
newNormals.append(fem.normals());
// Combine and re-index into newNormals
labelListList newEdgeNormals(edgeNormals().size()+fem.edgeNormals().size());
UIndirectList<labelList>(newEdgeNormals, reverseEdgeMap) =
edgeNormals();
UIndirectList<labelList>(newEdgeNormals, reverseFemEdgeMap) =
fem.edgeNormals();
forAll(reverseFemEdgeMap, i)
{
label mapI = reverseFemEdgeMap[i];
labelList& en = newEdgeNormals[mapI];
forAll(en, j)
{
en[j] += edgeNormals().size();
}
}
// Combine and re-index into newFeaturePointNormals
labelListList newFeaturePointNormals
(
featurePointNormals().size()
+ fem.featurePointNormals().size()
);
// Note: featurePointNormals only go up to nonFeatureStart
UIndirectList<labelList>
(
newFeaturePointNormals,
SubList<label>(reversePointMap, featurePointNormals().size())
) = featurePointNormals();
UIndirectList<labelList>
(
newFeaturePointNormals,
SubList<label>(reverseFemPointMap, fem.featurePointNormals().size())
) = fem.featurePointNormals();
forAll(fem.featurePointNormals(), i)
{
label mapI = reverseFemPointMap[i];
labelList& fn = newFeaturePointNormals[mapI];
forAll(fn, j)
{
fn[j] += featurePointNormals().size();
}
}
// Combine regionEdges
DynamicList<label> newRegionEdges
(
regionEdges().size()
+ fem.regionEdges().size()
);
forAll(regionEdges(), i)
{
newRegionEdges.append(reverseEdgeMap[regionEdges()[i]]);
}
forAll(fem.regionEdges(), i)
{
newRegionEdges.append(reverseFemEdgeMap[fem.regionEdges()[i]]);
}
// Assign
// ~~~~~~
// Transfer
concaveStart_ = newConcaveStart;
mixedStart_ = newMixedStart;
nonFeatureStart_ = newNonFeatureStart;
// Reset points and edges
reset(xferMove(newPoints), newEdges.xfer());
// Transfer
internalStart_ = newInternalStart;
flatStart_ = newFlatStart;
openStart_ = newOpenStart;
multipleStart_ = newMultipleStart;
edgeDirections_.transfer(newEdgeDirections);
normals_.transfer(newNormals);
edgeNormals_.transfer(newEdgeNormals);
featurePointNormals_.transfer(newFeaturePointNormals);
regionEdges_.transfer(newRegionEdges);
pointTree_.clear();
edgeTree_.clear();
edgeTreesByType_.clear();
}
void Foam::extendedFeatureEdgeMesh::flipNormals()
{
// Points
// ~~~~~~
// From current points into new points
labelList reversePointMap(identity(points().size()));
// Flip convex and concave points
label newPointI = 0;
// Concave points become convex
for (label i = concaveStart(); i < mixedStart(); i++)
{
reversePointMap[i] = newPointI++;
}
// Convex points become concave
label newConcaveStart = newPointI;
for (label i = 0; i < concaveStart(); i++)
{
reversePointMap[i] = newPointI++;
}
// Edges
// ~~~~~~
// From current edges into new edges
labelList reverseEdgeMap(identity(edges().size()));
// Flip external and internal edges
label newEdgeI = 0;
// Internal become external
for (label i = internalStart(); i < flatStart(); i++)
{
reverseEdgeMap[i] = newEdgeI++;
}
// External become internal
label newInternalStart = newEdgeI;
for (label i = 0; i < internalStart(); i++)
{
reverseEdgeMap[i] = newEdgeI++;
}
pointField newPoints(points().size());
newPoints.rmap(points(), reversePointMap);
edgeList newEdges(edges().size());
forAll(edges(), i)
{
const edge& e = edges()[i];
newEdges[reverseEdgeMap[i]] = edge
(
reversePointMap[e[0]],
reversePointMap[e[1]]
);
}
// Normals are flipped
// ~~~~~~~~~~~~~~~~~~~
pointField newEdgeDirections(edges().size());
newEdgeDirections.rmap(-1.0*edgeDirections(), reverseEdgeMap);
pointField newNormals(-1.0*normals());
labelListList newEdgeNormals(edgeNormals().size());
UIndirectList<labelList>(newEdgeNormals, reverseEdgeMap) = edgeNormals();
labelListList newFeaturePointNormals(featurePointNormals().size());
// Note: featurePointNormals only go up to nonFeatureStart
UIndirectList<labelList>
(
newFeaturePointNormals,
SubList<label>(reversePointMap, featurePointNormals().size())
) = featurePointNormals();
labelList newRegionEdges(regionEdges().size());
forAll(regionEdges(), i)
{
newRegionEdges[i] = reverseEdgeMap[regionEdges()[i]];
}
// Transfer
concaveStart_ = newConcaveStart;
// Reset points and edges
reset(xferMove(newPoints), newEdges.xfer());
// Transfer
internalStart_ = newInternalStart;
edgeDirections_.transfer(newEdgeDirections);
normals_.transfer(newNormals);
edgeNormals_.transfer(newEdgeNormals);
featurePointNormals_.transfer(newFeaturePointNormals);
regionEdges_.transfer(newRegionEdges);
pointTree_.clear();
edgeTree_.clear();
edgeTreesByType_.clear();
}
//XXXXX
void Foam::extendedFeatureEdgeMesh::writeObj void Foam::extendedFeatureEdgeMesh::writeObj
( (
const fileName& prefix const fileName& prefix
) const ) const
{ {
Pout<< nl << "Writing extendedFeatureEdgeMesh components to " << prefix Info<< nl << "Writing extendedFeatureEdgeMesh components to " << prefix
<< endl; << endl;
label verti = 0; label verti = 0;
@ -1072,7 +1429,7 @@ void Foam::extendedFeatureEdgeMesh::writeObj
edgeMesh::write(prefix + "_edgeMesh.obj"); edgeMesh::write(prefix + "_edgeMesh.obj");
OFstream convexFtPtStr(prefix + "_convexFeaturePts.obj"); OFstream convexFtPtStr(prefix + "_convexFeaturePts.obj");
Pout<< "Writing convex feature points to " << convexFtPtStr.name() << endl; Info<< "Writing convex feature points to " << convexFtPtStr.name() << endl;
for(label i = 0; i < concaveStart_; i++) for(label i = 0; i < concaveStart_; i++)
{ {
@ -1080,7 +1437,7 @@ void Foam::extendedFeatureEdgeMesh::writeObj
} }
OFstream concaveFtPtStr(prefix + "_concaveFeaturePts.obj"); OFstream concaveFtPtStr(prefix + "_concaveFeaturePts.obj");
Pout<< "Writing concave feature points to " Info<< "Writing concave feature points to "
<< concaveFtPtStr.name() << endl; << concaveFtPtStr.name() << endl;
for(label i = concaveStart_; i < mixedStart_; i++) for(label i = concaveStart_; i < mixedStart_; i++)
@ -1089,7 +1446,7 @@ void Foam::extendedFeatureEdgeMesh::writeObj
} }
OFstream mixedFtPtStr(prefix + "_mixedFeaturePts.obj"); OFstream mixedFtPtStr(prefix + "_mixedFeaturePts.obj");
Pout<< "Writing mixed feature points to " << mixedFtPtStr.name() << endl; Info<< "Writing mixed feature points to " << mixedFtPtStr.name() << endl;
for(label i = mixedStart_; i < nonFeatureStart_; i++) for(label i = mixedStart_; i < nonFeatureStart_; i++)
{ {
@ -1097,7 +1454,7 @@ void Foam::extendedFeatureEdgeMesh::writeObj
} }
OFstream mixedFtPtStructureStr(prefix + "_mixedFeaturePtsStructure.obj"); OFstream mixedFtPtStructureStr(prefix + "_mixedFeaturePtsStructure.obj");
Pout<< "Writing mixed feature point structure to " Info<< "Writing mixed feature point structure to "
<< mixedFtPtStructureStr.name() << endl; << mixedFtPtStructureStr.name() << endl;
verti = 0; verti = 0;
@ -1116,7 +1473,7 @@ void Foam::extendedFeatureEdgeMesh::writeObj
} }
OFstream externalStr(prefix + "_externalEdges.obj"); OFstream externalStr(prefix + "_externalEdges.obj");
Pout<< "Writing external edges to " << externalStr.name() << endl; Info<< "Writing external edges to " << externalStr.name() << endl;
verti = 0; verti = 0;
for (label i = externalStart_; i < internalStart_; i++) for (label i = externalStart_; i < internalStart_; i++)
@ -1129,7 +1486,7 @@ void Foam::extendedFeatureEdgeMesh::writeObj
} }
OFstream internalStr(prefix + "_internalEdges.obj"); OFstream internalStr(prefix + "_internalEdges.obj");
Pout<< "Writing internal edges to " << internalStr.name() << endl; Info<< "Writing internal edges to " << internalStr.name() << endl;
verti = 0; verti = 0;
for (label i = internalStart_; i < flatStart_; i++) for (label i = internalStart_; i < flatStart_; i++)
@ -1142,7 +1499,7 @@ void Foam::extendedFeatureEdgeMesh::writeObj
} }
OFstream flatStr(prefix + "_flatEdges.obj"); OFstream flatStr(prefix + "_flatEdges.obj");
Pout<< "Writing flat edges to " << flatStr.name() << endl; Info<< "Writing flat edges to " << flatStr.name() << endl;
verti = 0; verti = 0;
for (label i = flatStart_; i < openStart_; i++) for (label i = flatStart_; i < openStart_; i++)
@ -1155,7 +1512,7 @@ void Foam::extendedFeatureEdgeMesh::writeObj
} }
OFstream openStr(prefix + "_openEdges.obj"); OFstream openStr(prefix + "_openEdges.obj");
Pout<< "Writing open edges to " << openStr.name() << endl; Info<< "Writing open edges to " << openStr.name() << endl;
verti = 0; verti = 0;
for (label i = openStart_; i < multipleStart_; i++) for (label i = openStart_; i < multipleStart_; i++)
@ -1168,7 +1525,7 @@ void Foam::extendedFeatureEdgeMesh::writeObj
} }
OFstream multipleStr(prefix + "_multipleEdges.obj"); OFstream multipleStr(prefix + "_multipleEdges.obj");
Pout<< "Writing multiple edges to " << multipleStr.name() << endl; Info<< "Writing multiple edges to " << multipleStr.name() << endl;
verti = 0; verti = 0;
for (label i = multipleStart_; i < edges().size(); i++) for (label i = multipleStart_; i < edges().size(); i++)
@ -1181,7 +1538,7 @@ void Foam::extendedFeatureEdgeMesh::writeObj
} }
OFstream regionStr(prefix + "_regionEdges.obj"); OFstream regionStr(prefix + "_regionEdges.obj");
Pout<< "Writing region edges to " << regionStr.name() << endl; Info<< "Writing region edges to " << regionStr.name() << endl;
verti = 0; verti = 0;
forAll(regionEdges_, i) forAll(regionEdges_, i)
@ -1197,23 +1554,26 @@ void Foam::extendedFeatureEdgeMesh::writeObj
bool Foam::extendedFeatureEdgeMesh::writeData(Ostream& os) const bool Foam::extendedFeatureEdgeMesh::writeData(Ostream& os) const
{ {
os << "// points, edges, concaveStart, mixedStart, nonFeatureStart, " << nl os << "// points" << nl
<< "// internalStart, flatStart, openStart, multipleStart, " << nl << points() << nl
<< "// normals, edgeNormals, featurePointNormals, regionEdges" << nl << "// edges" << nl
<< endl;
os << points() << nl
<< edges() << nl << edges() << nl
<< "// concaveStart mixedStart nonFeatureStart" << nl
<< concaveStart_ << token::SPACE << concaveStart_ << token::SPACE
<< mixedStart_ << token::SPACE << mixedStart_ << token::SPACE
<< nonFeatureStart_ << token::SPACE << nonFeatureStart_ << nl
<< "// internalStart flatStart openStart multipleStart" << nl
<< internalStart_ << token::SPACE << internalStart_ << token::SPACE
<< flatStart_ << token::SPACE << flatStart_ << token::SPACE
<< openStart_ << token::SPACE << openStart_ << token::SPACE
<< multipleStart_ << nl << multipleStart_ << nl
<< "// normals" << nl
<< normals_ << nl << normals_ << nl
<< "// edgeNormals" << nl
<< edgeNormals_ << nl << edgeNormals_ << nl
<< "// featurePointNormals" << nl
<< featurePointNormals_ << nl << featurePointNormals_ << nl
<< "// regionEdges" << nl
<< regionEdges_ << regionEdges_
<< endl; << endl;

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -30,8 +30,8 @@ Description
Feature points are a sorted subset at the start of the overall points list: Feature points are a sorted subset at the start of the overall points list:
0 .. concaveStart_-1 : convex points (w.r.t normals) 0 .. concaveStart_-1 : convex points (w.r.t normals)
concaveStart_-1 .. mixedStart_-1 : concave points concaveStart_ .. mixedStart_-1 : concave points
mixedStart_ .. nonFeatureStart_ : mixed internal/external points mixedStart_ .. nonFeatureStart_-1 : mixed internal/external points
nonFeatureStart_ .. size-1 : non-feature points nonFeatureStart_ .. size-1 : non-feature points
Feature edges are the edgeList of the edgeMesh and are sorted: Feature edges are the edgeList of the edgeMesh and are sorted:
@ -151,6 +151,7 @@ private:
labelListList edgeNormals_; labelListList edgeNormals_;
//- Indices of the normals that are adjacent to the feature points //- Indices of the normals that are adjacent to the feature points
// (only valid for 0..nonFeatureStart_-1)
labelListList featurePointNormals_; labelListList featurePointNormals_;
//- Feature edges which are on the boundary between regions //- Feature edges which are on the boundary between regions
@ -212,9 +213,10 @@ public:
const Xfer<edgeList>& const Xfer<edgeList>&
); );
//- Construct (read) given surfaceFeatures, an objectRegistry and a //- Construct given a surface with selected edges,point
// fileName to write to. Extracts, classifies and reorders the data // (surfaceFeatures), an objectRegistry and a
// from surfaceFeatures. // fileName to write to.
// Extracts, classifies and reorders the data from surfaceFeatures.
extendedFeatureEdgeMesh extendedFeatureEdgeMesh
( (
const surfaceFeatures& sFeat, const surfaceFeatures& sFeat,
@ -381,6 +383,16 @@ public:
edgeTreesByType() const; edgeTreesByType() const;
// Edit
//- Add extendedFeatureEdgeMesh. No filtering of duplicates.
void add(const extendedFeatureEdgeMesh&);
//- Flip normals. All concave become convex, all internal external
// etc.
void flipNormals();
// Write // Write
//- Write all components of the extendedFeatureEdgeMesh as obj files //- Write all components of the extendedFeatureEdgeMesh as obj files

View File

@ -83,7 +83,8 @@ Foam::pressureGradientExplicitSource::pressureGradientExplicitSource
Ubar_(coeffs_.lookup("Ubar")), Ubar_(coeffs_.lookup("Ubar")),
gradPini_(coeffs_.lookup("gradPini")), gradPini_(coeffs_.lookup("gradPini")),
gradP_(gradPini_), gradP_(gradPini_),
flowDir_(Ubar_/mag(Ubar_)) flowDir_(Ubar_/mag(Ubar_)),
invAPtr_(NULL)
{ {
coeffs_.lookup("fieldNames") >> fieldNames_; coeffs_.lookup("fieldNames") >> fieldNames_;
@ -124,36 +125,9 @@ Foam::pressureGradientExplicitSource::pressureGradientExplicitSource
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
void Foam::pressureGradientExplicitSource::addSup
(
fvMatrix<vector>& eqn,
const label fieldI
)
{
DimensionedField<vector, volMesh> Su
(
IOobject
(
name_ + fieldNames_[fieldI] + "Sup",
mesh_.time().timeName(),
mesh_,
IOobject::NO_READ,
IOobject::NO_WRITE
),
mesh_,
dimensionedVector("zero", gradP_.dimensions(), vector::zero)
);
UIndirectList<vector>(Su, cells_) = flowDir_*gradP_.value();
eqn += Su;
}
void Foam::pressureGradientExplicitSource::correct(volVectorField& U) void Foam::pressureGradientExplicitSource::correct(volVectorField& U)
{ {
const volScalarField& rAU = const scalarField& rAU = invAPtr_().internalField();
mesh_.lookupObject<volScalarField>("(1|A(" + U.name() + "))");
// Integrate flow variables over cell set // Integrate flow variables over cell set
scalar magUbarAve = 0.0; scalar magUbarAve = 0.0;
@ -196,4 +170,61 @@ void Foam::pressureGradientExplicitSource::correct(volVectorField& U)
} }
void Foam::pressureGradientExplicitSource::addSup
(
fvMatrix<vector>& eqn,
const label fieldI
)
{
DimensionedField<vector, volMesh> Su
(
IOobject
(
name_ + fieldNames_[fieldI] + "Sup",
mesh_.time().timeName(),
mesh_,
IOobject::NO_READ,
IOobject::NO_WRITE
),
mesh_,
dimensionedVector("zero", gradP_.dimensions(), vector::zero)
);
UIndirectList<vector>(Su, cells_) = flowDir_*gradP_.value();
eqn += Su;
}
void Foam::pressureGradientExplicitSource::setValue
(
fvMatrix<vector>& eqn,
const label
)
{
if (invAPtr_.empty())
{
invAPtr_.reset
(
new volScalarField
(
IOobject
(
name_ + "::invA",
mesh_.time().timeName(),
mesh_,
IOobject::NO_READ,
IOobject::NO_WRITE
),
1.0/eqn.A()
)
);
}
else
{
invAPtr_() = 1.0/eqn.A();
}
}
// ************************************************************************* // // ************************************************************************* //

View File

@ -33,7 +33,7 @@ Description
pressureGradientExplicitSourceCoeffs pressureGradientExplicitSourceCoeffs
{ {
UName U; // name of velocity field fieldNames (U); // name of velocity field
Ubar (10.0 0 0); // desired average velocity Ubar (10.0 0 0); // desired average velocity
gradPini gradPini [0 2 -2 0 0] 0; // initial pressure gradient gradPini gradPini [0 2 -2 0 0] 0; // initial pressure gradient
flowDir (1 0 0); // flow direction flowDir (1 0 0); // flow direction
@ -82,6 +82,9 @@ class pressureGradientExplicitSource
//- Flow direction //- Flow direction
vector flowDir_; vector flowDir_;
//- Matrix 1/A coefficients field pointer
autoPtr<volScalarField> invAPtr_;
// Private Member Functions // Private Member Functions
@ -126,6 +129,13 @@ public:
//- Add explicit contribution to equation //- Add explicit contribution to equation
virtual void addSup(fvMatrix<vector>& eqn, const label fieldI); virtual void addSup(fvMatrix<vector>& eqn, const label fieldI);
//- Set 1/A coefficient
virtual void setValue
(
fvMatrix<vector>& eqn,
const label fieldI
);
// I-O // I-O

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -47,33 +47,34 @@ void Foam::bladeModel::interpolateWeights
scalar& ddx scalar& ddx
) const ) const
{ {
scalar x = -GREAT; i2 = 0;
label nElem = values.size(); label nElem = values.size();
i2 = 0; if (nElem == 1)
while ((x < xIn) && (i2 < nElem))
{ {
x = values[i2];
i2++;
}
if (i2 == 0)
{
i1 = i2;
ddx = 0.0;
return;
}
else if (i2 == values.size())
{
i2 = values.size() - 1;
i1 = i2; i1 = i2;
ddx = 0.0; ddx = 0.0;
return; return;
} }
else else
{ {
i1 = i2 - 1; while ((values[i2] < xIn) && (i2 < nElem))
ddx = (xIn - values[i1])/(values[i2] - values[i1]); {
i2++;
}
if (i2 == nElem)
{
i2 = nElem - 1;
i1 = i2;
ddx = 0.0;
return;
}
else
{
i1 = i2 - 1;
ddx = (xIn - values[i1])/(values[i2] - values[i1]);
}
} }
} }
@ -120,14 +121,8 @@ Foam::bladeModel::bladeModel(const dictionary& dict)
} }
else else
{ {
FatalErrorIn FatalErrorIn("Foam::bladeModel::bladeModel(const dictionary&)")
( << "No blade data specified" << exit(FatalError);
"Foam::bladeModel::bladeModel"
"("
"const dictionary&, "
"const word&"
")"
) << "No blade data specified" << exit(FatalError);
} }
} }

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -49,33 +49,34 @@ void Foam::lookupProfile::interpolateWeights
scalar& ddx scalar& ddx
) const ) const
{ {
scalar x = -GREAT; i2 = 0;
label nElem = values.size(); label nElem = values.size();
i2 = 0; if (nElem == 1)
while ((x < xIn) && (i2 < nElem))
{ {
x = values[i2];
i2++;
}
if (i2 == 0)
{
i1 = i2;
ddx = 0.0;
return;
}
else if (i2 == values.size())
{
i2 = values.size() - 1;
i1 = i2; i1 = i2;
ddx = 0.0; ddx = 0.0;
return; return;
} }
else else
{ {
i1 = i2 - 1; while ((values[i2] < xIn) && (i2 < nElem))
ddx = (xIn - values[i1])/(values[i2] - values[i1]); {
i2++;
}
if (i2 == nElem)
{
i2 = nElem - 1;
i1 = i2;
ddx = 0.0;
return;
}
else
{
i1 = i2 - 1;
ddx = (xIn - values[i1])/(values[i2] - values[i1]);
}
} }
} }

View File

@ -38,7 +38,7 @@ namespace Foam
// * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * // // * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * //
Foam::scalar Foam::seriesProfile::evaluate Foam::scalar Foam::seriesProfile::evaluateDrag
( (
const scalar& xIn, const scalar& xIn,
const List<scalar>& values const List<scalar>& values
@ -48,7 +48,26 @@ Foam::scalar Foam::seriesProfile::evaluate
forAll(values, i) forAll(values, i)
{ {
result += values[i]*sin((i + 1)*xIn); result += values[i]*cos(i*xIn);
}
return result;
}
Foam::scalar Foam::seriesProfile::evaluateLift
(
const scalar& xIn,
const List<scalar>& values
) const
{
scalar result = 0.0;
forAll(values, i)
{
// note: first contribution always zero since sin(0) = 0, but
// keep zero base to be consitent with drag coeffs
result += values[i]*sin(i*xIn);
} }
return result; return result;
@ -108,8 +127,8 @@ Foam::seriesProfile::seriesProfile
void Foam::seriesProfile::Cdl(const scalar alpha, scalar& Cd, scalar& Cl) const void Foam::seriesProfile::Cdl(const scalar alpha, scalar& Cd, scalar& Cl) const
{ {
Cd = evaluate(alpha, CdCoeffs_); Cd = evaluateDrag(alpha, CdCoeffs_);
Cl = evaluate(alpha, ClCoeffs_); Cl = evaluateLift(alpha, ClCoeffs_);
} }

View File

@ -28,7 +28,7 @@ Description
Series-up based profile data - drag and lift coefficients computed as Series-up based profile data - drag and lift coefficients computed as
sum of cosine series sum of cosine series
Cd = sum_i(CdCoeff)*sin(i*AOA) Cd = sum_i(CdCoeff)*cos(i*AOA)
Cl = sum_i(ClCoeff)*sin(i*AOA) Cl = sum_i(ClCoeff)*sin(i*AOA)
where: where:
@ -79,12 +79,21 @@ protected:
// Protected Member Functions // Protected Member Functions
//- Evaluate // Evaluate
scalar evaluate
( //- Drag
const scalar& xIn, scalar evaluateDrag
const List<scalar>& values (
) const; const scalar& xIn,
const List<scalar>& values
) const;
//- Lift
scalar evaluateLift
(
const scalar& xIn,
const List<scalar>& values
) const;
public: public:

View File

@ -67,6 +67,7 @@ namespace Foam
void Foam::rotorDiskSource::checkData() void Foam::rotorDiskSource::checkData()
{ {
// set inflow type
switch (selectionMode()) switch (selectionMode())
{ {
case smCellSet: case smCellSet:
@ -129,11 +130,10 @@ void Foam::rotorDiskSource::setFaceArea(vector& axis, const bool correct)
const label nInternalFaces = mesh_.nInternalFaces(); const label nInternalFaces = mesh_.nInternalFaces();
const polyBoundaryMesh& pbm = mesh_.boundaryMesh(); const polyBoundaryMesh& pbm = mesh_.boundaryMesh();
const vectorField& Sf = mesh_.Sf().internalField(); const vectorField& Sf = mesh_.Sf();
const scalarField& magSf = mesh_.magSf().internalField(); const scalarField& magSf = mesh_.magSf();
vector n = vector::zero; vector n = vector::zero;
label nFace = 0;
// calculate cell addressing for selected cells // calculate cell addressing for selected cells
labelList cellAddr(mesh_.nCells(), -1); labelList cellAddr(mesh_.nCells(), -1);
@ -158,7 +158,6 @@ void Foam::rotorDiskSource::setFaceArea(vector& axis, const bool correct)
// correct for parallel running // correct for parallel running
syncTools::swapBoundaryFaceList(mesh_, nbrFaceCellAddr); syncTools::swapBoundaryFaceList(mesh_, nbrFaceCellAddr);
// add internal field contributions // add internal field contributions
for (label faceI = 0; faceI < nInternalFaces; faceI++) for (label faceI = 0; faceI < nInternalFaces; faceI++)
{ {
@ -173,7 +172,6 @@ void Foam::rotorDiskSource::setFaceArea(vector& axis, const bool correct)
{ {
area_[own] += magSf[faceI]; area_[own] += magSf[faceI];
n += Sf[faceI]; n += Sf[faceI];
nFace++;
} }
} }
else if ((own == -1) && (nbr != -1)) else if ((own == -1) && (nbr != -1))
@ -184,7 +182,6 @@ void Foam::rotorDiskSource::setFaceArea(vector& axis, const bool correct)
{ {
area_[nbr] += magSf[faceI]; area_[nbr] += magSf[faceI];
n -= Sf[faceI]; n -= Sf[faceI];
nFace++;
} }
} }
} }
@ -210,7 +207,6 @@ void Foam::rotorDiskSource::setFaceArea(vector& axis, const bool correct)
{ {
area_[own] += magSfp[j]; area_[own] += magSfp[j];
n += Sfp[j]; n += Sfp[j];
nFace++;
} }
} }
} }
@ -222,11 +218,10 @@ void Foam::rotorDiskSource::setFaceArea(vector& axis, const bool correct)
const label own = cellAddr[mesh_.faceOwner()[faceI]]; const label own = cellAddr[mesh_.faceOwner()[faceI]];
const vector nf = Sfp[j]/magSfp[j]; const vector nf = Sfp[j]/magSfp[j];
if ((own != -1) && ((nf & axis) > 0.8)) if ((own != -1) && ((nf & axis) > tol))
{ {
area_[own] += magSfp[j]; area_[own] += magSfp[j];
n += Sfp[j]; n += Sfp[j];
nFace++;
} }
} }
} }
@ -359,32 +354,30 @@ void Foam::rotorDiskSource::constructGeometry()
forAll(cells_, i) forAll(cells_, i)
{ {
const label cellI = cells_[i]; if (area_[i] > ROOTVSMALL)
// position in (planar) rotor co-ordinate system
x_[i] = coordSys_.localPosition(C[cellI]);
// cache max radius
rMax_ = max(rMax_, x_[i].x());
// swept angle relative to rDir axis [radians] in range 0 -> 2*pi
scalar psi = x_[i].y();
if (psi < 0)
{ {
psi += mathematical::twoPi; const label cellI = cells_[i];
// position in (planar) rotor co-ordinate system
x_[i] = coordSys_.localPosition(C[cellI]);
// cache max radius
rMax_ = max(rMax_, x_[i].x());
// swept angle relative to rDir axis [radians] in range 0 -> 2*pi
scalar psi = x_[i].y();
// blade flap angle [radians]
scalar beta =
flap_.beta0 - flap_.beta1*cos(psi) - flap_.beta2*sin(psi);
// determine rotation tensor to convert from planar system into the
// rotor cone system
scalar c = cos(beta);
scalar s = sin(beta);
R_[i] = tensor(c, 0, -s, 0, 1, 0, s, 0, c);
invR_[i] = R_[i].T();
} }
// blade flap angle [radians]
scalar beta = flap_.beta0 - flap_.beta1*cos(psi) - flap_.beta2*sin(psi);
// determine rotation tensor to convert from planar system into the
// rotor cone system
scalar cNeg = cos(-beta);
scalar sNeg = sin(-beta);
R_[i] = tensor(cNeg, 0.0, -sNeg, 0.0, 1.0, 0.0, sNeg, 0.0, cNeg);
scalar cPos = cos(beta);
scalar sPos = sin(beta);
invR_[i] = tensor(cPos, 0.0, -sPos, 0.0, 1.0, 0.0, sPos, 0.0, cPos);
} }
} }
@ -440,21 +433,22 @@ Foam::rotorDiskSource::rotorDiskSource
: :
basicSource(name, modelType, dict, mesh), basicSource(name, modelType, dict, mesh),
rhoName_("none"), rhoName_("none"),
rhoRef_(1.0),
omega_(0.0), omega_(0.0),
nBlades_(0), nBlades_(0),
inletFlow_(ifLocal), inletFlow_(ifLocal),
inletVelocity_(vector::zero), inletVelocity_(vector::zero),
tipEffect_(1.0), tipEffect_(1.0),
flap_(), flap_(),
trim_(trimModel::New(*this, coeffs_)),
blade_(coeffs_.subDict("blade")),
profiles_(coeffs_.subDict("profiles")),
x_(cells_.size(), vector::zero), x_(cells_.size(), vector::zero),
R_(cells_.size(), I), R_(cells_.size(), I),
invR_(cells_.size(), I), invR_(cells_.size(), I),
area_(cells_.size(), 0.0), area_(cells_.size(), 0.0),
coordSys_(false), coordSys_(false),
rMax_(0.0) rMax_(0.0),
trim_(trimModel::New(*this, coeffs_)),
blade_(coeffs_.subDict("blade")),
profiles_(coeffs_.subDict("profiles"))
{ {
read(dict); read(dict);
} }
@ -477,13 +471,15 @@ void Foam::rotorDiskSource::calculate
const bool output const bool output
) const ) const
{ {
tmp<volScalarField> trho;
if (rhoName_ != "none")
{
trho = mesh_.lookupObject<volScalarField>(rhoName_);
}
const scalarField& V = mesh_.V(); const scalarField& V = mesh_.V();
const bool compressible = rhoName_ != "none";
tmp<volScalarField> trho
(
compressible
? mesh_.lookupObject<volScalarField>(rhoName_)
: volScalarField::null()
);
// logging info // logging info
@ -503,17 +499,17 @@ void Foam::rotorDiskSource::calculate
// velocity in local cylindrical reference frame // velocity in local cylindrical reference frame
vector Uc = coordSys_.localVector(U[cellI]); vector Uc = coordSys_.localVector(U[cellI]);
// apply correction in local system due to coning // transform from rotor cylindrical into local coning system
Uc = R_[i] & Uc; Uc = R_[i] & Uc;
// set radial component of velocity to zero // set radial component of velocity to zero
Uc.x() = 0.0; Uc.x() = 0.0;
// remove blade linear velocity from blade normal component // set blade normal component of velocity
Uc.y() -= radius*omega_; Uc.y() = radius*omega_ - Uc.y();
// determine blade data for this radius // determine blade data for this radius
// i1 = index of upper bound data point in blade list // i2 = index of upper radius bound data point in blade list
scalar twist = 0.0; scalar twist = 0.0;
scalar chord = 0.0; scalar chord = 0.0;
label i1 = -1; label i1 = -1;
@ -521,18 +517,15 @@ void Foam::rotorDiskSource::calculate
scalar invDr = 0.0; scalar invDr = 0.0;
blade_.interpolate(radius, twist, chord, i1, i2, invDr); blade_.interpolate(radius, twist, chord, i1, i2, invDr);
// effective angle of attack // flip geometric angle if blade is spinning in reverse (clockwise)
scalar alphaEff = scalar alphaGeom = alphag[i] + twist;
mathematical::pi + atan2(Uc.z(), Uc.y()) - (alphag[i] + twist); if (omega_ < 0)
{
alphaGeom = mathematical::pi - alphaGeom;
}
if (alphaEff > mathematical::pi) // effective angle of attack
{ scalar alphaEff = alphaGeom - atan2(-Uc.z(), Uc.y());
alphaEff -= mathematical::twoPi;
}
if (alphaEff < -mathematical::pi)
{
alphaEff += mathematical::twoPi;
}
AOAmin = min(AOAmin, alphaEff); AOAmin = min(AOAmin, alphaEff);
AOAmax = max(AOAmax, alphaEff); AOAmax = max(AOAmax, alphaEff);
@ -557,21 +550,21 @@ void Foam::rotorDiskSource::calculate
// calculate forces perpendicular to blade // calculate forces perpendicular to blade
scalar pDyn = 0.5*magSqr(Uc); scalar pDyn = 0.5*magSqr(Uc);
if (trho.valid()) if (compressible)
{ {
pDyn *= trho()[cellI]; pDyn *= trho()[cellI];
} }
scalar f = pDyn*chord*nBlades_*area_[i]/mathematical::twoPi; scalar f = pDyn*chord*nBlades_*area_[i]/radius/mathematical::twoPi;
vector localForce = vector(0.0, f*Cd, tipFactor*f*Cl); vector localForce = vector(0.0, -f*Cd, tipFactor*f*Cl);
// accumulate forces
dragEff += rhoRef_*localForce.y();
liftEff += rhoRef_*localForce.z();
// convert force from local coning system into rotor cylindrical // convert force from local coning system into rotor cylindrical
localForce = invR_[i] & localForce; localForce = invR_[i] & localForce;
// accumulate forces
dragEff += localForce.y();
liftEff += localForce.z();
// convert force to global cartesian co-ordinate system // convert force to global cartesian co-ordinate system
force[cellI] = coordSys_.globalVector(localForce); force[cellI] = coordSys_.globalVector(localForce);
@ -609,6 +602,7 @@ void Foam::rotorDiskSource::addSup(fvMatrix<vector>& eqn, const label fieldI)
} }
else else
{ {
coeffs_.lookup("rhoRef") >> rhoRef_;
dims.reset(dimForce/dimVolume/dimDensity); dims.reset(dimForce/dimVolume/dimDensity);
} }
@ -659,6 +653,8 @@ bool Foam::rotorDiskSource::read(const dictionary& dict)
coeffs_.lookup("fieldNames") >> fieldNames_; coeffs_.lookup("fieldNames") >> fieldNames_;
applied_.setSize(fieldNames_.size(), false); applied_.setSize(fieldNames_.size(), false);
// read co-ordinate system/geometry invariant properties
scalar rpm(readScalar(coeffs_.lookup("rpm"))); scalar rpm(readScalar(coeffs_.lookup("rpm")));
omega_ = rpm/60.0*mathematical::twoPi; omega_ = rpm/60.0*mathematical::twoPi;
@ -676,14 +672,17 @@ bool Foam::rotorDiskSource::read(const dictionary& dict)
flap_.beta1 = degToRad(flap_.beta1); flap_.beta1 = degToRad(flap_.beta1);
flap_.beta2 = degToRad(flap_.beta2); flap_.beta2 = degToRad(flap_.beta2);
trim_->read(coeffs_);
checkData();
// create co-ordinate system
createCoordinateSystem(); createCoordinateSystem();
// read co-odinate system dependent properties
checkData();
constructGeometry(); constructGeometry();
trim_->read(coeffs_);
if (debug) if (debug)
{ {
writeField("alphag", trim_->thetag()(), true); writeField("alphag", trim_->thetag()(), true);

View File

@ -148,7 +148,11 @@ protected:
//- Name of density field //- Name of density field
word rhoName_; word rhoName_;
//- Reference density if rhoName = 'none'
scalar rhoRef_;
//- Rotational speed [rad/s] //- Rotational speed [rad/s]
// Positive anti-clockwise when looking along -ve lift direction
scalar omega_; scalar omega_;
//- Number of blades //- Number of blades
@ -167,15 +171,6 @@ protected:
//- Blade flap coefficients [rad/s] //- Blade flap coefficients [rad/s]
flapData flap_; flapData flap_;
//- Trim model
autoPtr<trimModel> trim_;
//- Blade data
bladeModel blade_;
//- Profile data
profileModelList profiles_;
//- Cell centre positions in local rotor frame //- Cell centre positions in local rotor frame
// (Cylindrical r, theta, z) // (Cylindrical r, theta, z)
List<point> x_; List<point> x_;
@ -195,6 +190,15 @@ protected:
//- Maximum radius //- Maximum radius
scalar rMax_; scalar rMax_;
//- Trim model
autoPtr<trimModel> trim_;
//- Blade data
bladeModel blade_;
//- Profile data
profileModelList profiles_;
// Protected Member Functions // Protected Member Functions

View File

@ -67,15 +67,10 @@ void Foam::fixedTrim::read(const dictionary& dict)
scalar theta1c = degToRad(readScalar(coeffs_.lookup("theta1c"))); scalar theta1c = degToRad(readScalar(coeffs_.lookup("theta1c")));
scalar theta1s = degToRad(readScalar(coeffs_.lookup("theta1s"))); scalar theta1s = degToRad(readScalar(coeffs_.lookup("theta1s")));
const List<vector>& x = rotor_.x(); const List<point>& x = rotor_.x();
forAll(thetag_, i) forAll(thetag_, i)
{ {
scalar psi = x[i].y(); scalar psi = x[i].y();
if (psi < 0)
{
psi += mathematical::twoPi;
}
thetag_[i] = theta0 + theta1c*cos(psi) + theta1s*sin(psi); thetag_[i] = theta0 + theta1c*cos(psi) + theta1s*sin(psi);
} }
} }

View File

@ -142,11 +142,6 @@ Foam::tmp<Foam::scalarField> Foam::targetForceTrim::thetag() const
forAll(t, i) forAll(t, i)
{ {
scalar psi = x[i].y(); scalar psi = x[i].y();
if (psi < 0)
{
psi += mathematical::twoPi;
}
t[i] = theta_[0] + theta_[1]*cos(psi) + theta_[2]*sin(psi); t[i] = theta_[0] + theta_[1]*cos(psi) + theta_[2]*sin(psi);
} }

View File

@ -174,6 +174,7 @@ $(derivedFvPatchFields)/waveSurfacePressure/waveSurfacePressureFvPatchScalarFiel
$(derivedFvPatchFields)/phaseHydrostaticPressure/phaseHydrostaticPressureFvPatchScalarField.C $(derivedFvPatchFields)/phaseHydrostaticPressure/phaseHydrostaticPressureFvPatchScalarField.C
$(derivedFvPatchFields)/variableHeightFlowRate/variableHeightFlowRateFvPatchField.C $(derivedFvPatchFields)/variableHeightFlowRate/variableHeightFlowRateFvPatchField.C
$(derivedFvPatchFields)/variableHeightFlowRateInletVelocity/variableHeightFlowRateInletVelocityFvPatchVectorField.C $(derivedFvPatchFields)/variableHeightFlowRateInletVelocity/variableHeightFlowRateInletVelocityFvPatchVectorField.C
$(derivedFvPatchFields)/temperatureJump/temperatureJumpFvPatchScalarField.C
fvsPatchFields = fields/fvsPatchFields fvsPatchFields = fields/fvsPatchFields
$(fvsPatchFields)/fvsPatchField/fvsPatchFields.C $(fvsPatchFields)/fvsPatchField/fvsPatchFields.C

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -48,7 +48,7 @@ void wedgeFvPatchField<scalar>::evaluate(const Pstream::commsTypes)
updateCoeffs(); updateCoeffs();
} }
operator==(patchInternalField()); this->operator==(patchInternalField());
} }

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -132,7 +132,7 @@ void Foam::cylindricalInletVelocityFvPatchVectorField::updateCoeffs()
vector hatAxis = axis_/mag(axis_); vector hatAxis = axis_/mag(axis_);
const vectorField r(patch().Cf() - centre_); const vectorField r(patch().Cf() - centre_);
tmp<vectorField> d = r - (hatAxis & r)*hatAxis; const vectorField d = r - (hatAxis & r)*hatAxis;
tmp<vectorField> tangVel tmp<vectorField> tangVel
( (

View File

@ -129,11 +129,19 @@ void Foam::flowRateInletVelocityFvPatchVectorField::updateCoeffs()
} }
else if (phi.dimensions() == dimDensity*dimVelocity*dimArea) else if (phi.dimensions() == dimDensity*dimVelocity*dimArea)
{ {
const fvPatchField<scalar>& rhop = if (rhoName_ == "none")
patch().lookupPatchField<volScalarField, scalar>(rhoName_); {
// volumetric flow-rate if density not given
operator==(n*avgU);
}
else
{
// mass flow-rate
const fvPatchField<scalar>& rhop =
patch().lookupPatchField<volScalarField, scalar>(rhoName_);
// mass flow-rate operator==(n*avgU/rhop);
operator==(n*avgU/rhop); }
} }
else else
{ {

View File

@ -30,8 +30,10 @@ Description
The basis of the patch (volumetric or mass) is determined by the The basis of the patch (volumetric or mass) is determined by the
dimensions of the flux, phi. dimensions of the flux, phi.
The current density is used to correct the velocity when applying the
mass basis. If the flux is mass-based
- the current density is used to correct the velocity
- volumetric flow rate can be applied by setting the 'rho' entry to 'none'
Example of the boundary condition specification: Example of the boundary condition specification:
\verbatim \verbatim
@ -39,6 +41,7 @@ Description
{ {
type flowRateInletVelocity; type flowRateInletVelocity;
flowRate 0.2; // Volumetric/mass flow rate [m3/s or kg/s] flowRate 0.2; // Volumetric/mass flow rate [m3/s or kg/s]
rho rho; // none | rho [m3/s or kg/s]
value uniform (0 0 0); // placeholder value uniform (0 0 0); // placeholder
} }
\endverbatim \endverbatim

View File

@ -0,0 +1,129 @@
/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2012 OpenFOAM Foundation
\\/ M anipulation |
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
OpenFOAM is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
\*---------------------------------------------------------------------------*/
#include "addToRunTimeSelectionTable.H"
#include "temperatureJumpFvPatchScalarField.H"
#include "volFields.H"
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
Foam::temperatureJumpFvPatchScalarField::temperatureJumpFvPatchScalarField
(
const fvPatch& p,
const DimensionedField<scalar, volMesh>& iF
)
:
fixedJumpFvPatchField<scalar>(p, iF),
jumpTable_(0)
{}
Foam::temperatureJumpFvPatchScalarField::temperatureJumpFvPatchScalarField
(
const temperatureJumpFvPatchScalarField& ptf,
const fvPatch& p,
const DimensionedField<scalar, volMesh>& iF,
const fvPatchFieldMapper& mapper
)
:
fixedJumpFvPatchField<scalar>(ptf, p, iF, mapper),
jumpTable_(ptf.jumpTable_().clone().ptr())
{}
Foam::temperatureJumpFvPatchScalarField::temperatureJumpFvPatchScalarField
(
const fvPatch& p,
const DimensionedField<scalar, volMesh>& iF,
const dictionary& dict
)
:
fixedJumpFvPatchField<scalar>(p, iF),
jumpTable_(new DataEntry<scalar>("jumpTable"))
{
if (this->cyclicPatch().owner())
{
jumpTable_ = DataEntry<scalar>::New("jumpTable", dict);
}
if (dict.found("value"))
{
fvPatchScalarField::operator=
(
scalarField("value", dict, p.size())
);
}
}
Foam::temperatureJumpFvPatchScalarField::temperatureJumpFvPatchScalarField
(
const temperatureJumpFvPatchScalarField& ptf
)
:
cyclicLduInterfaceField(),
fixedJumpFvPatchField<scalar>(ptf),
jumpTable_(ptf.jumpTable_().clone().ptr())
{}
Foam::temperatureJumpFvPatchScalarField::temperatureJumpFvPatchScalarField
(
const temperatureJumpFvPatchScalarField& ptf,
const DimensionedField<scalar, volMesh>& iF
)
:
fixedJumpFvPatchField<scalar>(ptf, iF),
jumpTable_(ptf.jumpTable_().clone().ptr())
{}
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
void Foam::temperatureJumpFvPatchScalarField::write(Ostream& os) const
{
fixedJumpFvPatchField<scalar>::write(os);
if (this->cyclicPatch().owner())
{
jumpTable_->writeData(os);
}
this->writeEntry("value", os);
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
namespace Foam
{
makePatchTypeField
(
fvPatchScalarField,
temperatureJumpFvPatchScalarField
);
}
// ************************************************************************* //

View File

@ -0,0 +1,159 @@
/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2012 OpenFOAM Foundation
\\/ M anipulation |
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
OpenFOAM is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
Class
Foam::temperatureJumpFvPatchScalarField
Description
Introduce a jump in temperature on a cycle patch
front
{
type temperatureJump;
patchType cyclic;
jumpTable constant 100;
value uniform 300;
}
SourceFiles
temperatureJumpFvPatchScalarField.C
\*---------------------------------------------------------------------------*/
#ifndef temperatureJumpFvPatchScalarField_H
#define temperatureJumpFvPatchScalarField_H
#include "fixedJumpFvPatchField.H"
#include "DataEntry.H"
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
namespace Foam
{
/*---------------------------------------------------------------------------*\
Class temperatureJumpFvPatchScalarField Declaration
\*---------------------------------------------------------------------------*/
class temperatureJumpFvPatchScalarField
:
public fixedJumpFvPatchField<scalar>
{
// Private data
//- Interpolation table
autoPtr<DataEntry<scalar> > jumpTable_;
public:
//- Runtime type information
TypeName("temperatureJump");
// Constructors
//- Construct from patch and internal field
temperatureJumpFvPatchScalarField
(
const fvPatch&,
const DimensionedField<scalar, volMesh>&
);
//- Construct from patch, internal field and dictionary
temperatureJumpFvPatchScalarField
(
const fvPatch&,
const DimensionedField<scalar, volMesh>&,
const dictionary&
);
//- Construct by mapping given temperatureJumpFvPatchScalarField onto a
// new patch
temperatureJumpFvPatchScalarField
(
const temperatureJumpFvPatchScalarField&,
const fvPatch&,
const DimensionedField<scalar, volMesh>&,
const fvPatchFieldMapper&
);
//- Construct as copy
temperatureJumpFvPatchScalarField
(
const temperatureJumpFvPatchScalarField&
);
//- Construct and return a clone
virtual tmp<fvPatchField<scalar> > clone() const
{
return tmp<fvPatchField<scalar> >
(
new temperatureJumpFvPatchScalarField(*this)
);
}
//- Construct as copy setting internal field reference
temperatureJumpFvPatchScalarField
(
const temperatureJumpFvPatchScalarField&,
const DimensionedField<scalar, volMesh>&
);
//- Construct and return a clone setting internal field reference
virtual tmp<fvPatchField<scalar> > clone
(
const DimensionedField<scalar, volMesh>& iF
) const
{
return tmp<fvPatchField<scalar> >
(
new temperatureJumpFvPatchScalarField(*this, iF)
);
}
// Member functions
// Access functions
//- Return jumpTable
const DataEntry<scalar>& jumpTable() const
{
return jumpTable_();
}
//- Write
virtual void write(Ostream&) const;
};
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
} // End namespace Foam
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
#endif
// ************************************************************************* //

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -85,6 +85,7 @@ class CollisionRecordList
//- List of active wall collisions //- List of active wall collisions
DynamicList<WallCollisionRecord<WallType> > wallRecords_; DynamicList<WallCollisionRecord<WallType> > wallRecords_;
public: public:
// Constructors // Constructors
@ -115,11 +116,11 @@ public:
//- Return the active pair collisions //- Return the active pair collisions
inline const DynamicList<PairCollisionRecord<PairType> >& inline const DynamicList<PairCollisionRecord<PairType> >&
pairRecords() const; pairRecords() const;
//- Return the active wall collisions //- Return the active wall collisions
inline const DynamicList<WallCollisionRecord<WallType> >& inline const DynamicList<WallCollisionRecord<WallType> >&
wallRecords() const; wallRecords() const;
// Fields representing the data from each record, i.e if the // Fields representing the data from each record, i.e if the
// records 0-N containing each data members {a, b, c, d...} // records 0-N containing each data members {a, b, c, d...}

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -80,12 +80,15 @@ void Foam::ReactingParcel<ParcelType>::cellValueSourceCorrection
) )
{ {
scalar addedMass = 0.0; scalar addedMass = 0.0;
scalar maxMassI = 0.0;
forAll(td.cloud().rhoTrans(), i) forAll(td.cloud().rhoTrans(), i)
{ {
addedMass += td.cloud().rhoTrans(i)[cellI]; scalar dm = td.cloud().rhoTrans(i)[cellI];
maxMassI = max(maxMassI, mag(dm));
addedMass += dm;
} }
if (addedMass < ROOTVSMALL) if (maxMassI < ROOTVSMALL)
{ {
return; return;
} }
@ -95,16 +98,13 @@ void Foam::ReactingParcel<ParcelType>::cellValueSourceCorrection
this->rhoc_ += addedMass/td.cloud().pMesh().cellVolumes()[cellI]; this->rhoc_ += addedMass/td.cloud().pMesh().cellVolumes()[cellI];
const scalar massCellNew = massCell + addedMass; const scalar massCellNew = massCell + addedMass;
this->Uc_ += td.cloud().UTrans()[cellI]/massCellNew; this->Uc_ = (this->Uc_*massCell + td.cloud().UTrans()[cellI])/massCellNew;
scalar CpEff = 0.0; scalar CpEff = 0.0;
if (addedMass > ROOTVSMALL) forAll(td.cloud().rhoTrans(), i)
{ {
forAll(td.cloud().rhoTrans(), i) scalar Y = td.cloud().rhoTrans(i)[cellI]/addedMass;
{ CpEff += Y*td.cloud().composition().carrier().Cp(i, this->Tc_);
scalar Y = td.cloud().rhoTrans(i)[cellI]/addedMass;
CpEff += Y*td.cloud().composition().carrier().Cp(i, this->Tc_);
}
} }
const scalar Cpc = td.CpInterp().psi()[cellI]; const scalar Cpc = td.CpInterp().psi()[cellI];
@ -152,7 +152,7 @@ void Foam::ReactingParcel<ParcelType>::correctSurfaceValues
) )
{ {
// No correction if total concentration of emitted species is small // No correction if total concentration of emitted species is small
if (sum(Cs) < SMALL) if (!td.cloud().heatTransfer().BirdCorrection() || (sum(Cs) < SMALL))
{ {
return; return;
} }

View File

@ -661,10 +661,10 @@ void Foam::PairCollision<CloudType>::collide()
{ {
preInteraction(); preInteraction();
parcelInteraction();
wallInteraction(); wallInteraction();
parcelInteraction();
postInteraction(); postInteraction();
} }

View File

@ -238,7 +238,7 @@ void Foam::SurfaceFilmModel<CloudType>::cacheFilmFields
diameterParcelPatch_ = diameterParcelPatch_ =
filmModel.cloudDiameterTrans().boundaryField()[filmPatchI]; filmModel.cloudDiameterTrans().boundaryField()[filmPatchI];
filmModel.toPrimary(filmPatchI, diameterParcelPatch_, maxOp<scalar>()); filmModel.toPrimary(filmPatchI, diameterParcelPatch_, maxEqOp<scalar>());
UFilmPatch_ = filmModel.Us().boundaryField()[filmPatchI]; UFilmPatch_ = filmModel.Us().boundaryField()[filmPatchI];
filmModel.toPrimary(filmPatchI, UFilmPatch_); filmModel.toPrimary(filmPatchI, UFilmPatch_);

View File

@ -411,10 +411,10 @@ void Foam::ThermoSurfaceFilm<CloudType>::splashInteraction
const scalar EKIn = 0.5*m*magSqr(Urel); const scalar EKIn = 0.5*m*magSqr(Urel);
// incident surface energy [J] // incident surface energy [J]
const scalar ESigmaIn = sigma*p.areaS(d); const scalar ESigmaIn = np*sigma*p.areaS(d);
// dissipative energy // dissipative energy
const scalar Ed = max(0.8*EKIn, Wec/12*pi*sigma*sqr(d)); const scalar Ed = max(0.8*EKIn, np*Wec/12*pi*sigma*sqr(d));
// total energy [J] // total energy [J]
const scalar EKs = EKIn + ESigmaIn - ESigmaSec - Ed; const scalar EKs = EKIn + ESigmaIn - ESigmaSec - Ed;

View File

@ -439,6 +439,86 @@ void Foam::autoLayerDriver::handleFeatureAngle
} }
//Foam::tmp<Foam::scalarField> Foam::autoLayerDriver::undistortedEdgeLength
//(
// const indirectPrimitivePatch& pp,
// const bool relativeSizes,
// const bool faceSize
//)
//{
// const fvMesh& mesh = meshRefiner_.mesh();
//
// tmp<scalarField> tfld(new scalarField());
// scalarField& fld = tfld();
//
// if (faceSize)
// {
// fld.setSize(pp.size());
// }
// else
// {
// fld.setSize(pp.nPoints());
// }
//
//
// if (relativeSizes)
// {
// const scalar edge0Len = meshRefiner_.meshCutter().level0EdgeLength();
// const labelList& cellLevel = meshRefiner_.meshCutter().cellLevel();
//
// if (faceSize)
// {
// forAll(pp, i)
// {
// label faceI = pp.addressing()[i];
// label ownLevel = cellLevel[mesh.faceOwner()[faceI]];
// fld[i] = edge0Len/(1<<ownLevel);
// }
// }
// else
// {
// // Determine per point the max cell level of connected cells
// // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
//
// labelList maxPointLevel(pp.nPoints(), labelMin);
//
// forAll(pp, i)
// {
// label faceI = pp.addressing()[i];
// label ownLevel = cellLevel[mesh.faceOwner()[faceI]];
//
// const face& f = pp.localFaces()[i];
// forAll(f, fp)
// {
// maxPointLevel[f[fp]] =
// max(maxPointLevel[f[fp]], ownLevel);
// }
// }
//
// syncTools::syncPointList
// (
// mesh,
// pp.meshPoints(),
// maxPointLevel,
// maxEqOp<label>(),
// labelMin // null value
// );
//
//
// forAll(maxPointLevel, pointI)
// {
// // Find undistorted edge size for this level.
// fld[i] = edge0Len/(1<<maxPointLevel[pointI]);
// }
// }
// }
// else
// {
// // Use actual cell size
// }
//}
// No extrusion on cells with warped faces. Calculates the thickness of the // No extrusion on cells with warped faces. Calculates the thickness of the
// layer and compares it to the space the warped face takes up. Disables // layer and compares it to the space the warped face takes up. Disables
// extrusion if layer thickness is more than faceRatio of the thickness of // extrusion if layer thickness is more than faceRatio of the thickness of
@ -2383,22 +2463,27 @@ void Foam::autoLayerDriver::addLayers
// Disable extrusion on warped faces // Disable extrusion on warped faces
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// It is hard to calculate some length scale if not in relative
// mode so disable this check.
if (layerParams.relativeSizes())
{
// Undistorted edge length
const scalar edge0Len =
meshRefiner_.meshCutter().level0EdgeLength();
const labelList& cellLevel = meshRefiner_.meshCutter().cellLevel();
// Undistorted edge length handleWarpedFaces
const scalar edge0Len = meshRefiner_.meshCutter().level0EdgeLength(); (
const labelList& cellLevel = meshRefiner_.meshCutter().cellLevel(); pp,
layerParams.maxFaceThicknessRatio(),
edge0Len,
cellLevel,
handleWarpedFaces patchDisp,
( patchNLayers,
pp, extrudeStatus
layerParams.maxFaceThicknessRatio(), );
edge0Len, }
cellLevel,
patchDisp,
patchNLayers,
extrudeStatus
);
//// Disable extrusion on cells with multiple patch faces //// Disable extrusion on cells with multiple patch faces
//// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@ -2585,6 +2670,21 @@ void Foam::autoLayerDriver::addLayers
dimensionedScalar("medialRatio", dimless, 0.0) dimensionedScalar("medialRatio", dimless, 0.0)
); );
pointVectorField medialVec
(
IOobject
(
"medialVec",
meshRefiner_.timeName(),
mesh,
IOobject::NO_READ,
IOobject::NO_WRITE,
false
),
meshMover().pMesh(),
dimensionedVector("medialVec", dimLength, vector::zero)
);
// Setup information for medial axis smoothing. Calculates medial axis // Setup information for medial axis smoothing. Calculates medial axis
// and a smoothed displacement direction. // and a smoothed displacement direction.
// - pointMedialDist : distance to medial axis // - pointMedialDist : distance to medial axis
@ -2600,7 +2700,8 @@ void Foam::autoLayerDriver::addLayers
dispVec, dispVec,
medialRatio, medialRatio,
pointMedialDist pointMedialDist,
medialVec
); );
@ -2692,6 +2793,7 @@ void Foam::autoLayerDriver::addLayers
dispVec, dispVec,
medialRatio, medialRatio,
pointMedialDist, pointMedialDist,
medialVec,
extrudeStatus, extrudeStatus,
patchDisp, patchDisp,
@ -2830,7 +2932,7 @@ void Foam::autoLayerDriver::addLayers
mesh.name(), mesh.name(),
static_cast<polyMesh&>(mesh).instance(), static_cast<polyMesh&>(mesh).instance(),
mesh.time(), // register with runTime mesh.time(), // register with runTime
static_cast<polyMesh&>(mesh).readOpt(), IOobject::NO_READ,
static_cast<polyMesh&>(mesh).writeOpt() static_cast<polyMesh&>(mesh).writeOpt()
), // io params from original mesh but new name ), // io params from original mesh but new name
mesh, // original mesh mesh, // original mesh

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -460,7 +460,8 @@ class autoLayerDriver
pointVectorField& dispVec, pointVectorField& dispVec,
pointScalarField& medialRatio, pointScalarField& medialRatio,
pointScalarField& medialDist pointScalarField& medialDist,
pointVectorField& medialVec
) const; ) const;
//- Main routine to shrink mesh //- Main routine to shrink mesh
@ -481,6 +482,7 @@ class autoLayerDriver
const pointVectorField& dispVec, const pointVectorField& dispVec,
const pointScalarField& medialRatio, const pointScalarField& medialRatio,
const pointScalarField& medialDist, const pointScalarField& medialDist,
const pointVectorField& medialVec,
List<extrudeMode>& extrudeStatus, List<extrudeMode>& extrudeStatus,
pointField& patchDisp, pointField& patchDisp,

View File

@ -689,7 +689,8 @@ void Foam::autoLayerDriver::medialAxisSmoothingInfo
pointVectorField& dispVec, pointVectorField& dispVec,
pointScalarField& medialRatio, pointScalarField& medialRatio,
pointScalarField& medialDist pointScalarField& medialDist,
pointVectorField& medialVec
) const ) const
{ {
@ -929,7 +930,7 @@ void Foam::autoLayerDriver::medialAxisSmoothingInfo
forAll(pointMedialDist, pointI) forAll(pointMedialDist, pointI)
{ {
medialDist[pointI] = Foam::sqrt(pointMedialDist[pointI].distSqr()); medialDist[pointI] = Foam::sqrt(pointMedialDist[pointI].distSqr());
//medialVec[pointI] = pointMedialDist[pointI].origin(); medialVec[pointI] = pointMedialDist[pointI].origin();
} }
} }
@ -966,14 +967,14 @@ void Foam::autoLayerDriver::medialAxisSmoothingInfo
<< " : normalised direction of nearest displacement" << nl << " : normalised direction of nearest displacement" << nl
<< " " << medialDist.name() << " " << medialDist.name()
<< " : distance to medial axis" << nl << " : distance to medial axis" << nl
//<< " " << medialVec.name() << " " << medialVec.name()
//<< " : nearest point on medial axis" << nl << " : nearest point on medial axis" << nl
<< " " << medialRatio.name() << " " << medialRatio.name()
<< " : ratio of medial distance to wall distance" << nl << " : ratio of medial distance to wall distance" << nl
<< endl; << endl;
dispVec.write(); dispVec.write();
medialDist.write(); medialDist.write();
//medialVec.write(); medialVec.write();
medialRatio.write(); medialRatio.write();
} }
} }
@ -996,7 +997,7 @@ void Foam::autoLayerDriver::shrinkMeshMedialDistance
const pointVectorField& dispVec, const pointVectorField& dispVec,
const pointScalarField& medialRatio, const pointScalarField& medialRatio,
const pointScalarField& medialDist, const pointScalarField& medialDist,
//const pointVectorField& medialVec, const pointVectorField& medialVec,
List<extrudeMode>& extrudeStatus, List<extrudeMode>& extrudeStatus,
pointField& patchDisp, pointField& patchDisp,
@ -1066,6 +1067,24 @@ void Foam::autoLayerDriver::shrinkMeshMedialDistance
<< str().name() << endl; << str().name() << endl;
} }
autoPtr<OFstream> medialVecStr;
label medialVertI = 0;
if (debug)
{
medialVecStr.reset
(
new OFstream
(
mesh.time().path()
/ "thicknessRatioExcludeMedialVec_"
+ meshRefiner_.timeName()
+ ".obj"
)
);
Info<< "Writing points with too large a extrusion distance to "
<< medialVecStr().name() << endl;
}
forAll(meshPoints, patchPointI) forAll(meshPoints, patchPointI)
{ {
if (extrudeStatus[patchPointI] != NOEXTRUDE) if (extrudeStatus[patchPointI] != NOEXTRUDE)
@ -1082,12 +1101,9 @@ void Foam::autoLayerDriver::shrinkMeshMedialDistance
vector n = vector n =
patchDisp[patchPointI] patchDisp[patchPointI]
/ (mag(patchDisp[patchPointI]) + VSMALL); / (mag(patchDisp[patchPointI]) + VSMALL);
//vector mVec = mesh.points()[pointI]-medialVec[pointI]; vector mVec = mesh.points()[pointI]-medialVec[pointI];
//scalar mDist = mag(mVec); mVec /= mag(mVec)+VSMALL;
//scalar thicknessRatio = thicknessRatio *= (n&mVec);
// (n&mVec)
// *thickness[patchPointI]
// /(mDist+VSMALL);
if (thicknessRatio > maxThicknessToMedialRatio) if (thicknessRatio > maxThicknessToMedialRatio)
{ {
@ -1103,8 +1119,9 @@ void Foam::autoLayerDriver::shrinkMeshMedialDistance
minThickness[patchPointI] minThickness[patchPointI]
+thickness[patchPointI] +thickness[patchPointI]
) )
//<< " since near medial at:" << medialVec[pointI] << " medial direction:" << mVec
//<< " with thicknessRatio:" << thicknessRatio << " extrusion direction:" << n
<< " with thicknessRatio:" << thicknessRatio
<< endl; << endl;
} }
@ -1124,6 +1141,16 @@ void Foam::autoLayerDriver::shrinkMeshMedialDistance
vertI++; vertI++;
str()<< "l " << vertI-1 << ' ' << vertI << nl; str()<< "l " << vertI-1 << ' ' << vertI << nl;
} }
if (medialVecStr.valid())
{
const point& pt = mesh.points()[pointI];
meshTools::writeOBJ(medialVecStr(), pt);
medialVertI++;
meshTools::writeOBJ(medialVecStr(), medialVec[pointI]);
medialVertI++;
medialVecStr()<< "l " << medialVertI-1
<< ' ' << medialVertI << nl;
}
} }
} }
} }
@ -1228,6 +1255,15 @@ void Foam::autoLayerDriver::shrinkMeshMedialDistance
) )
)() )()
); );
// Above move will have changed the instance only on the points (which
// is correct).
// However the previous mesh written will be the mesh with layers
// (see autoLayerDriver.C) so we now have to force writing all files
// so we can easily step through time steps. Note that if you
// don't write the mesh with layers this is not necessary.
meshRefiner_.mesh().setInstance(meshRefiner_.timeName());
meshRefiner_.write meshRefiner_.write
( (
debug, debug,

View File

@ -59,16 +59,16 @@ namespace Foam
}; };
//- Combine operator for interpolateToSource/Target //- Combine operator for interpolateToSource/Target
template<class Type, class BinaryOp> template<class Type, class CombineOp>
class combineBinaryOp class combineBinaryOp
{ {
const BinaryOp& bop_; const CombineOp& cop_;
public: public:
combineBinaryOp(const BinaryOp& bop) combineBinaryOp(const CombineOp& cop)
: :
bop_(bop) cop_(cop)
{} {}
void operator() void operator()
@ -79,7 +79,7 @@ namespace Foam
const scalar weight const scalar weight
) const ) const
{ {
x = bop_(x, weight*y); cop_(x, weight*y);
} }
}; };
} }
@ -142,8 +142,8 @@ void Foam::AMIInterpolation<SourcePatch, TargetPatch>::writeIntersectionOBJ
template<class SourcePatch, class TargetPatch> template<class SourcePatch, class TargetPatch>
void Foam::AMIInterpolation<SourcePatch, TargetPatch>::checkPatches void Foam::AMIInterpolation<SourcePatch, TargetPatch>::checkPatches
( (
const primitivePatch& srcPatch, const SourcePatch& srcPatch,
const primitivePatch& tgtPatch const TargetPatch& tgtPatch
) const ) const
{ {
const scalar maxBoundsError = 0.05; const scalar maxBoundsError = 0.05;
@ -166,8 +166,8 @@ void Foam::AMIInterpolation<SourcePatch, TargetPatch>::checkPatches
( (
"AMIInterpolation<SourcePatch, TargetPatch>::checkPatches" "AMIInterpolation<SourcePatch, TargetPatch>::checkPatches"
"(" "("
"const primitivePatch&, " "const SourcePatch&, "
"const primitivePatch&" "const TargetPatch&"
")" ")"
) << "Source and target patch bounding boxes are not similar" << nl ) << "Source and target patch bounding boxes are not similar" << nl
<< " source box span : " << bbSrc.span() << nl << " source box span : " << bbSrc.span() << nl
@ -182,7 +182,7 @@ void Foam::AMIInterpolation<SourcePatch, TargetPatch>::checkPatches
template<class SourcePatch, class TargetPatch> template<class SourcePatch, class TargetPatch>
void Foam::AMIInterpolation<SourcePatch, TargetPatch>::resetTree void Foam::AMIInterpolation<SourcePatch, TargetPatch>::resetTree
( (
const primitivePatch& tgtPatch const TargetPatch& tgtPatch
) )
{ {
// Clear the old octree // Clear the old octree
@ -212,8 +212,8 @@ void Foam::AMIInterpolation<SourcePatch, TargetPatch>::resetTree
template<class SourcePatch, class TargetPatch> template<class SourcePatch, class TargetPatch>
Foam::label Foam::AMIInterpolation<SourcePatch, TargetPatch>::calcDistribution Foam::label Foam::AMIInterpolation<SourcePatch, TargetPatch>::calcDistribution
( (
const primitivePatch& srcPatch, const SourcePatch& srcPatch,
const primitivePatch& tgtPatch const TargetPatch& tgtPatch
) const ) const
{ {
label procI = 0; label procI = 0;
@ -297,7 +297,7 @@ template<class SourcePatch, class TargetPatch>
void Foam::AMIInterpolation<SourcePatch, TargetPatch>::distributePatches void Foam::AMIInterpolation<SourcePatch, TargetPatch>::distributePatches
( (
const mapDistribute& map, const mapDistribute& map,
const primitivePatch& pp, const TargetPatch& pp,
const globalIndex& gi, const globalIndex& gi,
List<faceList>& faces, List<faceList>& faces,
List<pointField>& points, List<pointField>& points,
@ -394,7 +394,7 @@ void Foam::AMIInterpolation<SourcePatch, TargetPatch>::
distributeAndMergePatches distributeAndMergePatches
( (
const mapDistribute& map, const mapDistribute& map,
const primitivePatch& tgtPatch, const TargetPatch& tgtPatch,
const globalIndex& gi, const globalIndex& gi,
faceList& tgtFaces, faceList& tgtFaces,
pointField& tgtPoints, pointField& tgtPoints,
@ -509,8 +509,8 @@ template<class SourcePatch, class TargetPatch>
Foam::autoPtr<Foam::mapDistribute> Foam::autoPtr<Foam::mapDistribute>
Foam::AMIInterpolation<SourcePatch, TargetPatch>::calcProcMap Foam::AMIInterpolation<SourcePatch, TargetPatch>::calcProcMap
( (
const primitivePatch& srcPatch, const SourcePatch& srcPatch,
const primitivePatch& tgtPatch const TargetPatch& tgtPatch
) const ) const
{ {
// Get decomposition of patch // Get decomposition of patch
@ -711,7 +711,7 @@ template<class SourcePatch, class TargetPatch>
Foam::label Foam::AMIInterpolation<SourcePatch, TargetPatch>::findTargetFace Foam::label Foam::AMIInterpolation<SourcePatch, TargetPatch>::findTargetFace
( (
const label srcFaceI, const label srcFaceI,
const primitivePatch& srcPatch const SourcePatch& srcPatch
) const ) const
{ {
label targetFaceI = -1; label targetFaceI = -1;
@ -745,7 +745,7 @@ template<class SourcePatch, class TargetPatch>
void Foam::AMIInterpolation<SourcePatch, TargetPatch>::appendNbrFaces void Foam::AMIInterpolation<SourcePatch, TargetPatch>::appendNbrFaces
( (
const label faceI, const label faceI,
const primitivePatch& patch, const TargetPatch& patch,
const DynamicList<label>& visitedFaces, const DynamicList<label>& visitedFaces,
DynamicList<label>& faceIDs DynamicList<label>& faceIDs
) const ) const
@ -793,8 +793,8 @@ void Foam::AMIInterpolation<SourcePatch, TargetPatch>::setNextFaces
label& startSeedI, label& startSeedI,
label& srcFaceI, label& srcFaceI,
label& tgtFaceI, label& tgtFaceI,
const primitivePatch& srcPatch0, const SourcePatch& srcPatch0,
const primitivePatch& tgtPatch0, const TargetPatch& tgtPatch0,
const boolList& mapFlag, const boolList& mapFlag,
labelList& seedFaces, labelList& seedFaces,
const DynamicList<label>& visitedFaces const DynamicList<label>& visitedFaces
@ -897,8 +897,8 @@ void Foam::AMIInterpolation<SourcePatch, TargetPatch>::setNextFaces
"label&, " "label&, "
"label&, " "label&, "
"label&, " "label&, "
"const primitivePatch&, " "const SourcePatch&, "
"const primitivePatch&, " "const TargetPatch&, "
"const boolList&, " "const boolList&, "
"labelList&, " "labelList&, "
"const DynamicList<label>&" "const DynamicList<label>&"
@ -913,26 +913,27 @@ Foam::scalar Foam::AMIInterpolation<SourcePatch, TargetPatch>::interArea
( (
const label srcFaceI, const label srcFaceI,
const label tgtFaceI, const label tgtFaceI,
const primitivePatch& srcPatch, const SourcePatch& srcPatch,
const primitivePatch& tgtPatch const TargetPatch& tgtPatch
) const ) const
{ {
// quick reject if either face has zero area
if (srcMagSf_[srcFaceI] < ROOTVSMALL || tgtMagSf_[tgtFaceI] < ROOTVSMALL)
{
return 0.0;
}
const pointField& srcPoints = srcPatch.points(); const pointField& srcPoints = srcPatch.points();
const pointField& tgtPoints = tgtPatch.points(); const pointField& tgtPoints = tgtPatch.points();
// create intersection object
faceAreaIntersect inter(srcPoints, tgtPoints, reverseTarget_);
// references to candidate faces // references to candidate faces
const face& src = srcPatch[srcFaceI]; const face& src = srcPatch[srcFaceI];
const face& tgt = tgtPatch[tgtFaceI]; const face& tgt = tgtPatch[tgtFaceI];
// quick reject if either face has zero area
// Note: do not used stored face areas for target patch
if ((srcMagSf_[srcFaceI] < ROOTVSMALL) || (tgt.mag(tgtPoints) < ROOTVSMALL))
{
return 0.0;
}
// create intersection object
faceAreaIntersect inter(srcPoints, tgtPoints, reverseTarget_);
// crude resultant norm // crude resultant norm
vector n(-src.normal(srcPoints)); vector n(-src.normal(srcPoints));
if (reverseTarget_) if (reverseTarget_)
@ -963,20 +964,17 @@ Foam::scalar Foam::AMIInterpolation<SourcePatch, TargetPatch>::interArea
template<class SourcePatch, class TargetPatch> template<class SourcePatch, class TargetPatch>
void Foam::AMIInterpolation<SourcePatch, TargetPatch>::calcAddressing void Foam::AMIInterpolation<SourcePatch, TargetPatch>::calcAddressing
( (
const primitivePatch& srcPatch, const SourcePatch& srcPatch,
const primitivePatch& tgtPatch, const TargetPatch& tgtPatch,
label srcFaceI, label srcFaceI,
label tgtFaceI label tgtFaceI
) )
{ {
if (!srcPatch.size() || !tgtPatch.size()) if (debug && (!srcPatch.size() || !tgtPatch.size()))
{ {
if (debug) Pout<< "AMI: Patches not on processor: Source faces = "
{ << srcPatch.size() << ", target faces = " << tgtPatch.size()
Pout<< "AMI: Patches not on processor: Source faces = " << endl;
<< srcPatch.size() << ", target faces = " << tgtPatch.size()
<< endl;
}
} }
if (!srcPatch.size()) if (!srcPatch.size())
@ -990,8 +988,8 @@ void Foam::AMIInterpolation<SourcePatch, TargetPatch>::calcAddressing
"void Foam::AMIInterpolation<SourcePatch, TargetPatch>::" "void Foam::AMIInterpolation<SourcePatch, TargetPatch>::"
"calcAddressing" "calcAddressing"
"(" "("
"const primitivePatch&, " "const SourcePatch&, "
"const primitivePatch&, " "const TargetPatch&, "
"label, " "label, "
"label" "label"
")" ")"
@ -1035,8 +1033,8 @@ void Foam::AMIInterpolation<SourcePatch, TargetPatch>::calcAddressing
"void Foam::AMIInterpolation<SourcePatch, TargetPatch>::" "void Foam::AMIInterpolation<SourcePatch, TargetPatch>::"
"calcAddressing" "calcAddressing"
"(" "("
"const primitivePatch&, " "const SourcePatch&, "
"const primitivePatch&, " "const TargetPatch&, "
"label, " "label, "
"label" "label"
")" ")"
@ -1542,7 +1540,7 @@ Foam::AMIInterpolation<SourcePatch, TargetPatch>::AMIInterpolation
{ {
// create new patches for source and target // create new patches for source and target
pointField srcPoints = srcPatch.points(); pointField srcPoints = srcPatch.points();
primitivePatch srcPatch0 SourcePatch srcPatch0
( (
SubList<face> SubList<face>
( (
@ -1563,7 +1561,7 @@ Foam::AMIInterpolation<SourcePatch, TargetPatch>::AMIInterpolation
} }
pointField tgtPoints = tgtPatch.points(); pointField tgtPoints = tgtPatch.points();
primitivePatch tgtPatch0 TargetPatch tgtPatch0
( (
SubList<face> SubList<face>
( (
@ -1734,8 +1732,8 @@ Foam::AMIInterpolation<SourcePatch, TargetPatch>::~AMIInterpolation()
template<class SourcePatch, class TargetPatch> template<class SourcePatch, class TargetPatch>
void Foam::AMIInterpolation<SourcePatch, TargetPatch>::update void Foam::AMIInterpolation<SourcePatch, TargetPatch>::update
( (
const primitivePatch& srcPatch, const SourcePatch& srcPatch,
const primitivePatch& tgtPatch const TargetPatch& tgtPatch
) )
{ {
// Calculate face areas // Calculate face areas
@ -1784,7 +1782,7 @@ void Foam::AMIInterpolation<SourcePatch, TargetPatch>::update
tgtFaceIDs tgtFaceIDs
); );
primitivePatch TargetPatch
newTgtPatch newTgtPatch
( (
SubList<face> SubList<face>
@ -1900,7 +1898,7 @@ template<class Type, class CombineOp>
void Foam::AMIInterpolation<SourcePatch, TargetPatch>::interpolateToTarget void Foam::AMIInterpolation<SourcePatch, TargetPatch>::interpolateToTarget
( (
const UList<Type>& fld, const UList<Type>& fld,
const CombineOp& bop, const CombineOp& cop,
List<Type>& result List<Type>& result
) const ) const
{ {
@ -1937,7 +1935,7 @@ void Foam::AMIInterpolation<SourcePatch, TargetPatch>::interpolateToTarget
forAll(faces, i) forAll(faces, i)
{ {
bop(result[faceI], faceI, work[faces[i]], weights[i]); cop(result[faceI], faceI, work[faces[i]], weights[i]);
} }
} }
} }
@ -1950,7 +1948,7 @@ void Foam::AMIInterpolation<SourcePatch, TargetPatch>::interpolateToTarget
forAll(faces, i) forAll(faces, i)
{ {
bop(result[faceI], faceI, fld[faces[i]], weights[i]); cop(result[faceI], faceI, fld[faces[i]], weights[i]);
} }
} }
} }
@ -1962,7 +1960,7 @@ template<class Type, class CombineOp>
void Foam::AMIInterpolation<SourcePatch, TargetPatch>::interpolateToSource void Foam::AMIInterpolation<SourcePatch, TargetPatch>::interpolateToSource
( (
const UList<Type>& fld, const UList<Type>& fld,
const CombineOp& bop, const CombineOp& cop,
List<Type>& result List<Type>& result
) const ) const
{ {
@ -1999,7 +1997,7 @@ void Foam::AMIInterpolation<SourcePatch, TargetPatch>::interpolateToSource
forAll(faces, i) forAll(faces, i)
{ {
bop(result[faceI], faceI, work[faces[i]], weights[i]); cop(result[faceI], faceI, work[faces[i]], weights[i]);
} }
} }
} }
@ -2012,7 +2010,7 @@ void Foam::AMIInterpolation<SourcePatch, TargetPatch>::interpolateToSource
forAll(faces, i) forAll(faces, i)
{ {
bop(result[faceI], faceI, fld[faces[i]], weights[i]); cop(result[faceI], faceI, fld[faces[i]], weights[i]);
} }
} }
} }
@ -2020,12 +2018,12 @@ void Foam::AMIInterpolation<SourcePatch, TargetPatch>::interpolateToSource
template<class SourcePatch, class TargetPatch> template<class SourcePatch, class TargetPatch>
template<class Type, class BinaryOp> template<class Type, class CombineOp>
Foam::tmp<Foam::Field<Type> > Foam::tmp<Foam::Field<Type> >
Foam::AMIInterpolation<SourcePatch, TargetPatch>::interpolateToSource Foam::AMIInterpolation<SourcePatch, TargetPatch>::interpolateToSource
( (
const Field<Type>& fld, const Field<Type>& fld,
const BinaryOp& bop const CombineOp& cop
) const ) const
{ {
tmp<Field<Type> > tresult tmp<Field<Type> > tresult
@ -2037,32 +2035,32 @@ Foam::AMIInterpolation<SourcePatch, TargetPatch>::interpolateToSource
) )
); );
interpolateToSource(fld, combineBinaryOp<Type, BinaryOp>(bop), tresult()); interpolateToSource(fld, combineBinaryOp<Type, CombineOp>(cop), tresult());
return tresult; return tresult;
} }
template<class SourcePatch, class TargetPatch> template<class SourcePatch, class TargetPatch>
template<class Type, class BinaryOp> template<class Type, class CombineOp>
Foam::tmp<Foam::Field<Type> > Foam::tmp<Foam::Field<Type> >
Foam::AMIInterpolation<SourcePatch, TargetPatch>::interpolateToSource Foam::AMIInterpolation<SourcePatch, TargetPatch>::interpolateToSource
( (
const tmp<Field<Type> >& tFld, const tmp<Field<Type> >& tFld,
const BinaryOp& bop const CombineOp& cop
) const ) const
{ {
return interpolateToSource(tFld(), bop); return interpolateToSource(tFld(), cop);
} }
template<class SourcePatch, class TargetPatch> template<class SourcePatch, class TargetPatch>
template<class Type, class BinaryOp> template<class Type, class CombineOp>
Foam::tmp<Foam::Field<Type> > Foam::tmp<Foam::Field<Type> >
Foam::AMIInterpolation<SourcePatch, TargetPatch>::interpolateToTarget Foam::AMIInterpolation<SourcePatch, TargetPatch>::interpolateToTarget
( (
const Field<Type>& fld, const Field<Type>& fld,
const BinaryOp& bop const CombineOp& cop
) const ) const
{ {
tmp<Field<Type> > tresult tmp<Field<Type> > tresult
@ -2074,22 +2072,22 @@ Foam::AMIInterpolation<SourcePatch, TargetPatch>::interpolateToTarget
) )
); );
interpolateToTarget(fld, combineBinaryOp<Type, BinaryOp>(bop), tresult()); interpolateToTarget(fld, combineBinaryOp<Type, CombineOp>(cop), tresult());
return tresult; return tresult;
} }
template<class SourcePatch, class TargetPatch> template<class SourcePatch, class TargetPatch>
template<class Type, class BinaryOp> template<class Type, class CombineOp>
Foam::tmp<Foam::Field<Type> > Foam::tmp<Foam::Field<Type> >
Foam::AMIInterpolation<SourcePatch, TargetPatch>::interpolateToTarget Foam::AMIInterpolation<SourcePatch, TargetPatch>::interpolateToTarget
( (
const tmp<Field<Type> >& tFld, const tmp<Field<Type> >& tFld,
const BinaryOp& bop const CombineOp& cop
) const ) const
{ {
return interpolateToTarget(tFld(), bop); return interpolateToTarget(tFld(), cop);
} }
@ -2101,7 +2099,7 @@ Foam::AMIInterpolation<SourcePatch, TargetPatch>::interpolateToSource
const Field<Type>& fld const Field<Type>& fld
) const ) const
{ {
return interpolateToSource(fld, sumOp<Type>()); return interpolateToSource(fld, plusEqOp<Type>());
} }
@ -2113,7 +2111,7 @@ Foam::AMIInterpolation<SourcePatch, TargetPatch>::interpolateToSource
const tmp<Field<Type> >& tFld const tmp<Field<Type> >& tFld
) const ) const
{ {
return interpolateToSource(tFld(), sumOp<Type>()); return interpolateToSource(tFld(), plusEqOp<Type>());
} }
@ -2125,7 +2123,7 @@ Foam::AMIInterpolation<SourcePatch, TargetPatch>::interpolateToTarget
const Field<Type>& fld const Field<Type>& fld
) const ) const
{ {
return interpolateToTarget(fld, sumOp<Type>()); return interpolateToTarget(fld, plusEqOp<Type>());
} }
@ -2137,15 +2135,15 @@ Foam::AMIInterpolation<SourcePatch, TargetPatch>::interpolateToTarget
const tmp<Field<Type> >& tFld const tmp<Field<Type> >& tFld
) const ) const
{ {
return interpolateToTarget(tFld(), sumOp<Type>()); return interpolateToTarget(tFld(), plusEqOp<Type>());
} }
template<class SourcePatch, class TargetPatch> template<class SourcePatch, class TargetPatch>
void Foam::AMIInterpolation<SourcePatch, TargetPatch>::writeFaceConnectivity void Foam::AMIInterpolation<SourcePatch, TargetPatch>::writeFaceConnectivity
( (
const primitivePatch& srcPatch, const SourcePatch& srcPatch,
const primitivePatch& tgtPatch, const TargetPatch& tgtPatch,
const labelListList& srcAddress const labelListList& srcAddress
) )
const const

View File

@ -160,12 +160,12 @@ class AMIInterpolation
//- Check that patches are valid //- Check that patches are valid
void checkPatches void checkPatches
( (
const primitivePatch& srcPatch, const SourcePatch& srcPatch,
const primitivePatch& tgtPatch const TargetPatch& tgtPatch
) const; ) const;
//- Reset the octree for the traget patch face search //- Reset the octree for the traget patch face search
void resetTree(const primitivePatch& tgtPatch); void resetTree(const TargetPatch& tgtPatch);
@ -174,8 +174,8 @@ class AMIInterpolation
//- Calculate if patches are on multiple processors //- Calculate if patches are on multiple processors
label calcDistribution label calcDistribution
( (
const primitivePatch& srcPatch, const SourcePatch& srcPatch,
const primitivePatch& tgtPatch const TargetPatch& tgtPatch
) const; ) const;
label calcOverlappingProcs label calcOverlappingProcs
@ -188,7 +188,7 @@ class AMIInterpolation
void distributePatches void distributePatches
( (
const mapDistribute& map, const mapDistribute& map,
const primitivePatch& pp, const TargetPatch& pp,
const globalIndex& gi, const globalIndex& gi,
List<faceList>& faces, List<faceList>& faces,
List<pointField>& points, List<pointField>& points,
@ -198,7 +198,7 @@ class AMIInterpolation
void distributeAndMergePatches void distributeAndMergePatches
( (
const mapDistribute& map, const mapDistribute& map,
const primitivePatch& tgtPatch, const TargetPatch& tgtPatch,
const globalIndex& gi, const globalIndex& gi,
faceList& tgtFaces, faceList& tgtFaces,
pointField& tgtPoints, pointField& tgtPoints,
@ -207,8 +207,8 @@ class AMIInterpolation
autoPtr<mapDistribute> calcProcMap autoPtr<mapDistribute> calcProcMap
( (
const primitivePatch& srcPatch, const SourcePatch& srcPatch,
const primitivePatch& tgtPatch const TargetPatch& tgtPatch
) const; ) const;
@ -228,14 +228,14 @@ class AMIInterpolation
label findTargetFace label findTargetFace
( (
const label srcFaceI, const label srcFaceI,
const primitivePatch& srcPatch const SourcePatch& srcPatch
) const; ) const;
//- Add faces neighbouring faceI to the ID list //- Add faces neighbouring faceI to the ID list
void appendNbrFaces void appendNbrFaces
( (
const label faceI, const label faceI,
const primitivePatch& patch, const TargetPatch& patch,
const DynamicList<label>& visitedFaces, const DynamicList<label>& visitedFaces,
DynamicList<label>& faceIDs DynamicList<label>& faceIDs
) const; ) const;
@ -246,8 +246,8 @@ class AMIInterpolation
label& startSeedI, label& startSeedI,
label& srcFaceI, label& srcFaceI,
label& tgtFaceI, label& tgtFaceI,
const primitivePatch& srcPatch0, const SourcePatch& srcPatch0,
const primitivePatch& tgtPatch0, const TargetPatch& tgtPatch0,
const boolList& mapFlag, const boolList& mapFlag,
labelList& seedFaces, labelList& seedFaces,
const DynamicList<label>& visitedFaces const DynamicList<label>& visitedFaces
@ -261,15 +261,15 @@ class AMIInterpolation
( (
const label srcFaceI, const label srcFaceI,
const label tgtFaceI, const label tgtFaceI,
const primitivePatch& srcPatch, const SourcePatch& srcPatch,
const primitivePatch& tgtPatch const TargetPatch& tgtPatch
) const; ) const;
//- Calculate addressing //- Calculate addressing
void calcAddressing void calcAddressing
( (
const primitivePatch& srcPatch, const SourcePatch& srcPatch,
const primitivePatch& tgtPatch, const TargetPatch& tgtPatch,
label srcFaceI = -1, label srcFaceI = -1,
label tgtFaceI = -1 label tgtFaceI = -1
); );
@ -398,8 +398,8 @@ public:
//- Update addressing and weights //- Update addressing and weights
void update void update
( (
const primitivePatch& srcPatch, const SourcePatch& srcPatch,
const primitivePatch& tgtPatch const TargetPatch& tgtPatch
); );
@ -429,36 +429,36 @@ public:
//- Interpolate from target to source with supplied binary op //- Interpolate from target to source with supplied binary op
template<class Type, class BinaryOp> template<class Type, class CombineOp>
tmp<Field<Type> > interpolateToSource tmp<Field<Type> > interpolateToSource
( (
const Field<Type>& fld, const Field<Type>& fld,
const BinaryOp& bop const CombineOp& cop
) const; ) const;
//- Interpolate from target tmp field to source with supplied //- Interpolate from target tmp field to source with supplied
// binary op // binary op
template<class Type, class BinaryOp> template<class Type, class CombineOp>
tmp<Field<Type> > interpolateToSource tmp<Field<Type> > interpolateToSource
( (
const tmp<Field<Type> >& tFld, const tmp<Field<Type> >& tFld,
const BinaryOp& bop const CombineOp& cop
) const; ) const;
//- Interpolate from source to target with supplied op //- Interpolate from source to target with supplied op
template<class Type, class BinaryOp> template<class Type, class CombineOp>
tmp<Field<Type> > interpolateToTarget tmp<Field<Type> > interpolateToTarget
( (
const Field<Type>& fld, const Field<Type>& fld,
const BinaryOp& bop const CombineOp& cop
) const; ) const;
//- Interpolate from source tmp field to target with supplied op //- Interpolate from source tmp field to target with supplied op
template<class Type, class BinaryOp> template<class Type, class CombineOp>
tmp<Field<Type> > interpolateToTarget tmp<Field<Type> > interpolateToTarget
( (
const tmp<Field<Type> >& tFld, const tmp<Field<Type> >& tFld,
const BinaryOp& bop const CombineOp& cop
) const; ) const;
//- Interpolate from target to source //- Interpolate from target to source
@ -495,8 +495,8 @@ public:
//- Write face connectivity as OBJ file //- Write face connectivity as OBJ file
void writeFaceConnectivity void writeFaceConnectivity
( (
const primitivePatch& srcPatch, const SourcePatch& srcPatch,
const primitivePatch& tgtPatch, const TargetPatch& tgtPatch,
const labelListList& srcAddress const labelListList& srcAddress
) const; ) const;
}; };

View File

@ -376,16 +376,16 @@ public:
void distribute(List<Type>& lst) const; void distribute(List<Type>& lst) const;
//- Wrapper around map/interpolate data distribution with operation //- Wrapper around map/interpolate data distribution with operation
template<class Type, class BinaryOp> template<class Type, class CombineOp>
void distribute(List<Type>& lst, const BinaryOp& bop) const; void distribute(List<Type>& lst, const CombineOp& cop) const;
//- Wrapper around map/interpolate data distribution //- Wrapper around map/interpolate data distribution
template<class Type> template<class Type>
void reverseDistribute(List<Type>& lst) const; void reverseDistribute(List<Type>& lst) const;
//- Wrapper around map/interpolate data distribution with operation //- Wrapper around map/interpolate data distribution with operation
template<class Type, class BinaryOp> template<class Type, class CombineOp>
void reverseDistribute(List<Type>& lst, const BinaryOp& bop) const; void reverseDistribute(List<Type>& lst, const CombineOp& cop) const;
// I/O // I/O

View File

@ -41,11 +41,11 @@ void Foam::mappedPatchBase::distribute(List<Type>& lst) const
} }
template<class Type, class BinaryOp> template<class Type, class CombineOp>
void Foam::mappedPatchBase::distribute void Foam::mappedPatchBase::distribute
( (
List<Type>& lst, List<Type>& lst,
const BinaryOp& bop const CombineOp& cop
) const ) const
{ {
switch (mode_) switch (mode_)
@ -55,7 +55,7 @@ void Foam::mappedPatchBase::distribute
lst = AMI().interpolateToSource lst = AMI().interpolateToSource
( (
Field<Type>(lst.xfer()), Field<Type>(lst.xfer()),
bop cop
); );
break; break;
} }
@ -69,7 +69,7 @@ void Foam::mappedPatchBase::distribute
map().subMap(), map().subMap(),
map().constructMap(), map().constructMap(),
lst, lst,
bop, cop,
pTraits<Type>::zero pTraits<Type>::zero
); );
} }
@ -96,11 +96,11 @@ void Foam::mappedPatchBase::reverseDistribute(List<Type>& lst) const
} }
template<class Type, class BinaryOp> template<class Type, class CombineOp>
void Foam::mappedPatchBase::reverseDistribute void Foam::mappedPatchBase::reverseDistribute
( (
List<Type>& lst, List<Type>& lst,
const BinaryOp& bop const CombineOp& cop
) const ) const
{ {
switch (mode_) switch (mode_)
@ -110,7 +110,7 @@ void Foam::mappedPatchBase::reverseDistribute
lst = AMI().interpolateToTarget lst = AMI().interpolateToTarget
( (
Field<Type>(lst.xfer()), Field<Type>(lst.xfer()),
bop cop
); );
break; break;
} }
@ -125,7 +125,7 @@ void Foam::mappedPatchBase::reverseDistribute
map().constructMap(), map().constructMap(),
map().subMap(), map().subMap(),
lst, lst,
bop, cop,
pTraits<Type>::zero pTraits<Type>::zero
); );
break; break;

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -135,7 +135,7 @@ Foam::fileName Foam::topoSet::topoSet::localPath
const word& name const word& name
) )
{ {
return mesh.facesInstance()/polyMesh::meshSubDir/"sets"/name; return mesh.facesInstance()/mesh.dbDir()/polyMesh::meshSubDir/"sets"/name;
} }

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -217,6 +217,9 @@ void Foam::surfaceFeatures::classifyFeatureAngles
const vectorField& faceNormals = surf_.faceNormals(); const vectorField& faceNormals = surf_.faceNormals();
const pointField& points = surf_.points(); const pointField& points = surf_.points();
// Special case: minCos=1
bool selectAll = (mag(minCos-1.0) < SMALL);
forAll(edgeFaces, edgeI) forAll(edgeFaces, edgeI)
{ {
const labelList& eFaces = edgeFaces[edgeI]; const labelList& eFaces = edgeFaces[edgeI];
@ -235,25 +238,25 @@ void Foam::surfaceFeatures::classifyFeatureAngles
{ {
edgeStat[edgeI] = REGION; edgeStat[edgeI] = REGION;
} }
else else if
(
selectAll
|| ((faceNormals[face0] & faceNormals[face1]) < minCos)
)
{ {
if ((faceNormals[face0] & faceNormals[face1]) < minCos) // Check if convex or concave by looking at angle
// between face centres and normal
vector f0Tof1 =
surf_[face1].centre(points)
- surf_[face0].centre(points);
if ((f0Tof1 & faceNormals[face0]) >= 0.0)
{ {
edgeStat[edgeI] = INTERNAL;
// Check if convex or concave by looking at angle }
// between face centres and normal else
vector f0Tof1 = {
surf_[face1].centre(points) edgeStat[edgeI] = EXTERNAL;
- surf_[face0].centre(points);
if ((f0Tof1 & faceNormals[face0]) > 0.0)
{
edgeStat[edgeI] = INTERNAL;
}
else
{
edgeStat[edgeI] = EXTERNAL;
}
} }
} }
} }

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -103,9 +103,9 @@ License
Note: writes out .dgr files for debugging. Run with e.g. Note: writeGraph=true : writes out .dgr files for debugging. Run with e.g.
mpirun -np 4 dgpart 2 'processor%r.grf' mpirun -np 4 dgpart 2 'region0_%r.dgr'
- %r gets replaced by current processor rank - %r gets replaced by current processor rank
- decompose into 2 domains - decompose into 2 domains
@ -167,192 +167,192 @@ void Foam::ptscotchDecomp::check(const int retVal, const char* str)
} }
//- Does prevention of 0 cell domains and calls ptscotch. ////- Does prevention of 0 cell domains and calls ptscotch.
Foam::label Foam::ptscotchDecomp::decomposeZeroDomains //Foam::label Foam::ptscotchDecomp::decomposeZeroDomains
( //(
const fileName& meshPath, // const fileName& meshPath,
const List<int>& initadjncy, // const List<int>& initadjncy,
const List<int>& initxadj, // const List<int>& initxadj,
const scalarField& initcWeights, // const scalarField& initcWeights,
//
List<int>& finalDecomp // List<int>& finalDecomp
) const //) const
{ //{
globalIndex globalCells(initxadj.size()-1); // globalIndex globalCells(initxadj.size()-1);
//
bool hasZeroDomain = false; // bool hasZeroDomain = false;
for (label procI = 0; procI < Pstream::nProcs(); procI++) // for (label procI = 0; procI < Pstream::nProcs(); procI++)
{ // {
if (globalCells.localSize(procI) == 0) // if (globalCells.localSize(procI) == 0)
{ // {
hasZeroDomain = true; // hasZeroDomain = true;
break; // break;
} // }
} // }
//
if (!hasZeroDomain) // if (!hasZeroDomain)
{ // {
return decompose // return decompose
( // (
meshPath, // meshPath,
initadjncy, // initadjncy,
initxadj, // initxadj,
initcWeights, // initcWeights,
finalDecomp // finalDecomp
); // );
} // }
//
//
if (debug) // if (debug)
{ // {
Info<< "ptscotchDecomp : have graphs with locally 0 cells." // Info<< "ptscotchDecomp : have graphs with locally 0 cells."
<< " trickling down." << endl; // << " trickling down." << endl;
} // }
//
// Make sure every domain has at least one cell // // Make sure every domain has at least one cell
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// (scotch does not like zero sized domains) // // (scotch does not like zero sized domains)
// Trickle cells from processors that have them up to those that // // Trickle cells from processors that have them up to those that
// don't. // // don't.
//
//
// Number of cells to send to the next processor // // Number of cells to send to the next processor
// (is same as number of cells next processor has to receive) // // (is same as number of cells next processor has to receive)
List<int> nSendCells(Pstream::nProcs(), 0); // List<int> nSendCells(Pstream::nProcs(), 0);
//
for (label procI = nSendCells.size()-1; procI >=1; procI--) // for (label procI = nSendCells.size()-1; procI >=1; procI--)
{ // {
label nLocalCells = globalCells.localSize(procI); // label nLocalCells = globalCells.localSize(procI);
if (nLocalCells-nSendCells[procI] < 1) // if (nLocalCells-nSendCells[procI] < 1)
{ // {
nSendCells[procI-1] = nSendCells[procI]-nLocalCells+1; // nSendCells[procI-1] = nSendCells[procI]-nLocalCells+1;
} // }
} // }
//
// First receive (so increasing the sizes of all arrays) // // First receive (so increasing the sizes of all arrays)
//
Field<int> xadj(initxadj); // Field<int> xadj(initxadj);
Field<int> adjncy(initadjncy); // Field<int> adjncy(initadjncy);
scalarField cWeights(initcWeights); // scalarField cWeights(initcWeights);
//
if (Pstream::myProcNo() >= 1 && nSendCells[Pstream::myProcNo()-1] > 0) // if (Pstream::myProcNo() >= 1 && nSendCells[Pstream::myProcNo()-1] > 0)
{ // {
// Receive cells from previous processor // // Receive cells from previous processor
IPstream fromPrevProc(Pstream::blocking, Pstream::myProcNo()-1); // IPstream fromPrevProc(Pstream::blocking, Pstream::myProcNo()-1);
//
Field<int> prevXadj(fromPrevProc); // Field<int> prevXadj(fromPrevProc);
Field<int> prevAdjncy(fromPrevProc); // Field<int> prevAdjncy(fromPrevProc);
scalarField prevCellWeights(fromPrevProc); // scalarField prevCellWeights(fromPrevProc);
//
if (prevXadj.size() != nSendCells[Pstream::myProcNo()-1]) // if (prevXadj.size() != nSendCells[Pstream::myProcNo()-1])
{ // {
FatalErrorIn("ptscotchDecomp::decompose(..)") // FatalErrorIn("ptscotchDecomp::decompose(..)")
<< "Expected from processor " << Pstream::myProcNo()-1 // << "Expected from processor " << Pstream::myProcNo()-1
<< " connectivity for " << nSendCells[Pstream::myProcNo()-1] // << " connectivity for " << nSendCells[Pstream::myProcNo()-1]
<< " nCells but only received " << prevXadj.size() // << " nCells but only received " << prevXadj.size()
<< abort(FatalError); // << abort(FatalError);
} // }
//
// Insert adjncy // // Insert adjncy
prepend(prevAdjncy, adjncy); // prepend(prevAdjncy, adjncy);
// Adapt offsets and prepend xadj // // Adapt offsets and prepend xadj
xadj += prevAdjncy.size(); // xadj += prevAdjncy.size();
prepend(prevXadj, xadj); // prepend(prevXadj, xadj);
// Weights // // Weights
prepend(prevCellWeights, cWeights); // prepend(prevCellWeights, cWeights);
} // }
//
//
// Send to my next processor // // Send to my next processor
//
if (nSendCells[Pstream::myProcNo()] > 0) // if (nSendCells[Pstream::myProcNo()] > 0)
{ // {
// Send cells to next processor // // Send cells to next processor
OPstream toNextProc(Pstream::blocking, Pstream::myProcNo()+1); // OPstream toNextProc(Pstream::blocking, Pstream::myProcNo()+1);
//
int nCells = nSendCells[Pstream::myProcNo()]; // int nCells = nSendCells[Pstream::myProcNo()];
int startCell = xadj.size()-1 - nCells; // int startCell = xadj.size()-1 - nCells;
int startFace = xadj[startCell]; // int startFace = xadj[startCell];
int nFaces = adjncy.size()-startFace; // int nFaces = adjncy.size()-startFace;
//
// Send for all cell data: last nCells elements // // Send for all cell data: last nCells elements
// Send for all face data: last nFaces elements // // Send for all face data: last nFaces elements
toNextProc // toNextProc
<< Field<int>::subField(xadj, nCells, startCell)-startFace // << Field<int>::subField(xadj, nCells, startCell)-startFace
<< Field<int>::subField(adjncy, nFaces, startFace) // << Field<int>::subField(adjncy, nFaces, startFace)
<< // <<
( // (
cWeights.size() // cWeights.size()
? static_cast<const scalarField&> // ? static_cast<const scalarField&>
( // (
scalarField::subField(cWeights, nCells, startCell) // scalarField::subField(cWeights, nCells, startCell)
) // )
: scalarField(0) // : scalarField(0)
); // );
//
// Remove data that has been sent // // Remove data that has been sent
if (cWeights.size()) // if (cWeights.size())
{ // {
cWeights.setSize(cWeights.size()-nCells); // cWeights.setSize(cWeights.size()-nCells);
} // }
adjncy.setSize(adjncy.size()-nFaces); // adjncy.setSize(adjncy.size()-nFaces);
xadj.setSize(xadj.size() - nCells); // xadj.setSize(xadj.size() - nCells);
} // }
//
//
// Do decomposition as normal. Sets finalDecomp. // // Do decomposition as normal. Sets finalDecomp.
label result = decompose(meshPath, adjncy, xadj, cWeights, finalDecomp); // label result = decompose(meshPath, adjncy, xadj, cWeights, finalDecomp);
//
//
if (debug) // if (debug)
{ // {
Info<< "ptscotchDecomp : have graphs with locally 0 cells." // Info<< "ptscotchDecomp : have graphs with locally 0 cells."
<< " trickling up." << endl; // << " trickling up." << endl;
} // }
//
//
// If we sent cells across make sure we undo it // // If we sent cells across make sure we undo it
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
//
// Receive back from next processor if I sent something // // Receive back from next processor if I sent something
if (nSendCells[Pstream::myProcNo()] > 0) // if (nSendCells[Pstream::myProcNo()] > 0)
{ // {
IPstream fromNextProc(Pstream::blocking, Pstream::myProcNo()+1); // IPstream fromNextProc(Pstream::blocking, Pstream::myProcNo()+1);
//
List<int> nextFinalDecomp(fromNextProc); // List<int> nextFinalDecomp(fromNextProc);
//
if (nextFinalDecomp.size() != nSendCells[Pstream::myProcNo()]) // if (nextFinalDecomp.size() != nSendCells[Pstream::myProcNo()])
{ // {
FatalErrorIn("parMetisDecomp::decompose(..)") // FatalErrorIn("parMetisDecomp::decompose(..)")
<< "Expected from processor " << Pstream::myProcNo()+1 // << "Expected from processor " << Pstream::myProcNo()+1
<< " decomposition for " << nSendCells[Pstream::myProcNo()] // << " decomposition for " << nSendCells[Pstream::myProcNo()]
<< " nCells but only received " << nextFinalDecomp.size() // << " nCells but only received " << nextFinalDecomp.size()
<< abort(FatalError); // << abort(FatalError);
} // }
//
append(nextFinalDecomp, finalDecomp); // append(nextFinalDecomp, finalDecomp);
} // }
//
// Send back to previous processor. // // Send back to previous processor.
if (Pstream::myProcNo() >= 1 && nSendCells[Pstream::myProcNo()-1] > 0) // if (Pstream::myProcNo() >= 1 && nSendCells[Pstream::myProcNo()-1] > 0)
{ // {
OPstream toPrevProc(Pstream::blocking, Pstream::myProcNo()-1); // OPstream toPrevProc(Pstream::blocking, Pstream::myProcNo()-1);
//
int nToPrevious = nSendCells[Pstream::myProcNo()-1]; // int nToPrevious = nSendCells[Pstream::myProcNo()-1];
//
toPrevProc << // toPrevProc <<
SubList<int> // SubList<int>
( // (
finalDecomp, // finalDecomp,
nToPrevious, // nToPrevious,
finalDecomp.size()-nToPrevious // finalDecomp.size()-nToPrevious
); // );
//
// Remove locally what has been sent // // Remove locally what has been sent
finalDecomp.setSize(finalDecomp.size()-nToPrevious); // finalDecomp.setSize(finalDecomp.size()-nToPrevious);
} // }
return result; // return result;
} //}
// Call scotch with options from dictionary. // Call scotch with options from dictionary.
@ -362,13 +362,42 @@ Foam::label Foam::ptscotchDecomp::decompose
const List<int>& adjncy, const List<int>& adjncy,
const List<int>& xadj, const List<int>& xadj,
const scalarField& cWeights, const scalarField& cWeights,
List<int>& finalDecomp
) const
{
List<int> dummyAdjncy(1);
List<int> dummyXadj(1);
dummyXadj[0] = 0;
return decompose
(
meshPath,
adjncy.size(),
(adjncy.size() ? adjncy.begin() : dummyAdjncy.begin()),
xadj.size(),
(xadj.size() ? xadj.begin() : dummyXadj.begin()),
cWeights,
finalDecomp
);
}
// Call scotch with options from dictionary.
Foam::label Foam::ptscotchDecomp::decompose
(
const fileName& meshPath,
const int adjncySize,
const int adjncy[],
const int xadjSize,
const int xadj[],
const scalarField& cWeights,
List<int>& finalDecomp List<int>& finalDecomp
) const ) const
{ {
if (debug) if (debug)
{ {
Pout<< "ptscotchDecomp : entering with xadj:" << xadj.size() << endl; Pout<< "ptscotchDecomp : entering with xadj:" << xadjSize << endl;
} }
// Dump graph // Dump graph
@ -387,7 +416,7 @@ Foam::label Foam::ptscotchDecomp::decompose
Pout<< "Dumping Scotch graph file to " << str.name() << endl Pout<< "Dumping Scotch graph file to " << str.name() << endl
<< "Use this in combination with dgpart." << endl; << "Use this in combination with dgpart." << endl;
globalIndex globalCells(xadj.size()-1); globalIndex globalCells(xadjSize-1);
// Distributed graph file (.grf) // Distributed graph file (.grf)
label version = 2; label version = 2;
@ -400,17 +429,17 @@ Foam::label Foam::ptscotchDecomp::decompose
// Total number of vertices (vertglbnbr) // Total number of vertices (vertglbnbr)
str << globalCells.size(); str << globalCells.size();
// Total number of connections (edgeglbnbr) // Total number of connections (edgeglbnbr)
str << ' ' << returnReduce(xadj[xadj.size()-1], sumOp<label>()) str << ' ' << returnReduce(xadj[xadjSize-1], sumOp<label>())
<< nl; << nl;
// Local number of vertices (vertlocnbr) // Local number of vertices (vertlocnbr)
str << xadj.size()-1; str << xadjSize-1;
// Local number of connections (edgelocnbr) // Local number of connections (edgelocnbr)
str << ' ' << xadj[xadj.size()-1] << nl; str << ' ' << xadj[xadjSize-1] << nl;
// Numbering starts from 0 // Numbering starts from 0
label baseval = 0; label baseval = 0;
// 100*hasVertlabels+10*hasEdgeWeights+1*hasVertWeighs // 100*hasVertlabels+10*hasEdgeWeights+1*hasVertWeighs
str << baseval << ' ' << "000" << nl; str << baseval << ' ' << "000" << nl;
for (label cellI = 0; cellI < xadj.size()-1; cellI++) for (label cellI = 0; cellI < xadjSize-1; cellI++)
{ {
label start = xadj[cellI]; label start = xadj[cellI];
label end = xadj[cellI+1]; label end = xadj[cellI+1];
@ -462,8 +491,9 @@ Foam::label Foam::ptscotchDecomp::decompose
// Check for externally provided cellweights and if so initialise weights // Check for externally provided cellweights and if so initialise weights
scalar minWeights = gMin(cWeights); scalar minWeights = gMin(cWeights);
scalar maxWeights = gMax(cWeights);
if (!cWeights.empty()) if (maxWeights > minWeights)
{ {
if (minWeights <= 0) if (minWeights <= 0)
{ {
@ -474,13 +504,13 @@ Foam::label Foam::ptscotchDecomp::decompose
<< endl; << endl;
} }
if (cWeights.size() != xadj.size()-1) if (cWeights.size() != xadjSize-1)
{ {
FatalErrorIn FatalErrorIn
( (
"ptscotchDecomp::decompose(..)" "ptscotchDecomp::decompose(..)"
) << "Number of cell weights " << cWeights.size() ) << "Number of cell weights " << cWeights.size()
<< " does not equal number of cells " << xadj.size()-1 << " does not equal number of cells " << xadjSize-1
<< exit(FatalError); << exit(FatalError);
} }
} }
@ -508,7 +538,7 @@ Foam::label Foam::ptscotchDecomp::decompose
Pstream::scatter(rangeScale); Pstream::scatter(rangeScale);
if (!cWeights.empty()) if (maxWeights > minWeights)
{ {
// Convert to integers. // Convert to integers.
velotab.setSize(cWeights.size()); velotab.setSize(cWeights.size());
@ -532,11 +562,11 @@ Foam::label Foam::ptscotchDecomp::decompose
if (debug) if (debug)
{ {
Pout<< "SCOTCH_dgraphBuild with:" << nl Pout<< "SCOTCH_dgraphBuild with:" << nl
<< "xadj.size()-1 : " << xadj.size()-1 << nl << "xadjSize-1 : " << xadjSize-1 << nl
<< "xadj : " << long(xadj.begin()) << nl << "xadj : " << long(xadj) << nl
<< "velotab : " << long(velotab.begin()) << nl << "velotab : " << long(velotab.begin()) << nl
<< "adjncy.size() : " << adjncy.size() << nl << "adjncySize : " << adjncySize << nl
<< "adjncy : " << long(adjncy.begin()) << nl << "adjncy : " << long(adjncy) << nl
<< endl; << endl;
} }
@ -546,19 +576,19 @@ Foam::label Foam::ptscotchDecomp::decompose
( (
&grafdat, // grafdat &grafdat, // grafdat
0, // baseval, c-style numbering 0, // baseval, c-style numbering
xadj.size()-1, // vertlocnbr, nCells xadjSize-1, // vertlocnbr, nCells
xadj.size()-1, // vertlocmax xadjSize-1, // vertlocmax
const_cast<SCOTCH_Num*>(xadj.begin()), const_cast<SCOTCH_Num*>(xadj),
// vertloctab, start index per cell into // vertloctab, start index per cell into
// adjncy // adjncy
const_cast<SCOTCH_Num*>(&xadj[1]),// vendloctab, end index ,, const_cast<SCOTCH_Num*>(xadj+1),// vendloctab, end index ,,
const_cast<SCOTCH_Num*>(velotab.begin()),// veloloctab, vtx weights const_cast<SCOTCH_Num*>(velotab.begin()),// veloloctab, vtx weights
NULL, // vlblloctab NULL, // vlblloctab
adjncy.size(), // edgelocnbr, number of arcs adjncySize, // edgelocnbr, number of arcs
adjncy.size(), // edgelocsiz adjncySize, // edgelocsiz
const_cast<SCOTCH_Num*>(adjncy.begin()), // edgeloctab const_cast<SCOTCH_Num*>(adjncy), // edgeloctab
NULL, // edgegsttab NULL, // edgegsttab
NULL // edlotab, edge weights NULL // edlotab, edge weights
), ),
@ -620,12 +650,6 @@ Foam::label Foam::ptscotchDecomp::decompose
} }
//SCOTCH_Mapping mapdat;
//SCOTCH_dgraphMapInit(&grafdat, &mapdat, &archdat, NULL);
//SCOTCH_dgraphMapCompute(&grafdat, &mapdat, &stradat); /*Perform mapping*/
//SCOTCHdgraphMapExit(&grafdat, &mapdat);
// Hack:switch off fpu error trapping // Hack:switch off fpu error trapping
# ifdef LINUX_GNUC # ifdef LINUX_GNUC
int oldExcepts = fedisableexcept int oldExcepts = fedisableexcept
@ -636,12 +660,15 @@ Foam::label Foam::ptscotchDecomp::decompose
); );
# endif # endif
// Note: always provide allocated storage even if local size 0
finalDecomp.setSize(max(1, xadjSize-1));
finalDecomp = 0;
if (debug) if (debug)
{ {
Pout<< "SCOTCH_dgraphMap" << endl; Pout<< "SCOTCH_dgraphMap" << endl;
} }
finalDecomp.setSize(xadj.size()-1);
finalDecomp = 0;
check check
( (
SCOTCH_dgraphMap SCOTCH_dgraphMap
@ -660,7 +687,7 @@ Foam::label Foam::ptscotchDecomp::decompose
//finalDecomp.setSize(xadj.size()-1); //finalDecomp.setSize(xadjSize-1);
//check //check
//( //(
// SCOTCH_dgraphPart // SCOTCH_dgraphPart
@ -719,12 +746,6 @@ Foam::labelList Foam::ptscotchDecomp::decompose
<< exit(FatalError); << exit(FatalError);
} }
// // For running sequential ...
// if (Pstream::nProcs() <= 1)
// {
// return scotchDecomp(decompositionDict_, mesh_)
// .decompose(points, pointWeights);
// }
// Make Metis CSR (Compressed Storage Format) storage // Make Metis CSR (Compressed Storage Format) storage
// adjncy : contains neighbours (= edges in graph) // adjncy : contains neighbours (= edges in graph)
@ -743,7 +764,7 @@ Foam::labelList Foam::ptscotchDecomp::decompose
// Decompose using default weights // Decompose using default weights
List<int> finalDecomp; List<int> finalDecomp;
decomposeZeroDomains decompose
( (
mesh.time().path()/mesh.name(), mesh.time().path()/mesh.name(),
cellCells.m(), cellCells.m(),
@ -753,7 +774,7 @@ Foam::labelList Foam::ptscotchDecomp::decompose
); );
// Copy back to labelList // Copy back to labelList
labelList decomp(finalDecomp.size()); labelList decomp(points.size());
forAll(decomp, i) forAll(decomp, i)
{ {
decomp[i] = finalDecomp[i]; decomp[i] = finalDecomp[i];
@ -780,12 +801,6 @@ Foam::labelList Foam::ptscotchDecomp::decompose
<< exit(FatalError); << exit(FatalError);
} }
// // For running sequential ...
// if (Pstream::nProcs() <= 1)
// {
// return scotchDecomp(decompositionDict_, mesh)
// .decompose(agglom, agglomPoints, pointWeights);
// }
// Make Metis CSR (Compressed Storage Format) storage // Make Metis CSR (Compressed Storage Format) storage
// adjncy : contains neighbours (= edges in graph) // adjncy : contains neighbours (= edges in graph)
@ -802,7 +817,7 @@ Foam::labelList Foam::ptscotchDecomp::decompose
// Decompose using weights // Decompose using weights
List<int> finalDecomp; List<int> finalDecomp;
decomposeZeroDomains decompose
( (
mesh.time().path()/mesh.name(), mesh.time().path()/mesh.name(),
cellCells.m(), cellCells.m(),
@ -840,13 +855,6 @@ Foam::labelList Foam::ptscotchDecomp::decompose
<< ")." << exit(FatalError); << ")." << exit(FatalError);
} }
// // For running sequential ...
// if (Pstream::nProcs() <= 1)
// {
// return scotchDecomp(decompositionDict_, mesh)
// .decompose(globalCellCells, cellCentres, cWeights);
// }
// Make Metis CSR (Compressed Storage Format) storage // Make Metis CSR (Compressed Storage Format) storage
// adjncy : contains neighbours (= edges in graph) // adjncy : contains neighbours (= edges in graph)
@ -856,7 +864,7 @@ Foam::labelList Foam::ptscotchDecomp::decompose
// Decompose using weights // Decompose using weights
List<int> finalDecomp; List<int> finalDecomp;
decomposeZeroDomains decompose
( (
"ptscotch", "ptscotch",
cellCells.m(), cellCells.m(),
@ -866,7 +874,7 @@ Foam::labelList Foam::ptscotchDecomp::decompose
); );
// Copy back to labelList // Copy back to labelList
labelList decomp(finalDecomp.size()); labelList decomp(cellCentres.size());
forAll(decomp, i) forAll(decomp, i)
{ {
decomp[i] = finalDecomp[i]; decomp[i] = finalDecomp[i];

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -60,17 +60,7 @@ class ptscotchDecomp
//- Check and print error message //- Check and print error message
static void check(const int, const char*); static void check(const int, const char*);
//- Decompose with optionally zero sized domains //- Decompose. Handles size 0 arrays
label decomposeZeroDomains
(
const fileName& meshPath,
const List<int>& initadjncy,
const List<int>& initxadj,
const scalarField& initcWeights,
List<int>& finalDecomp
) const;
//- Decompose
label decompose label decompose
( (
const fileName& meshPath, const fileName& meshPath,
@ -80,6 +70,18 @@ class ptscotchDecomp
List<int>& finalDecomp List<int>& finalDecomp
) const; ) const;
//- Low level decompose
label decompose
(
const fileName& meshPath,
const int adjncySize,
const int adjncy[],
const int xadjSize,
const int xadj[],
const scalarField& cWeights,
List<int>& finalDecomp
) const;
//- Disallow default bitwise copy construct and assignment //- Disallow default bitwise copy construct and assignment
void operator=(const ptscotchDecomp&); void operator=(const ptscotchDecomp&);
ptscotchDecomp(const ptscotchDecomp&); ptscotchDecomp(const ptscotchDecomp&);

View File

@ -157,9 +157,22 @@ void Foam::fieldMinMax::writeFileHeader()
{ {
fieldMinMaxFilePtr_() fieldMinMaxFilePtr_()
<< "# Time" << token::TAB << "field" << token::TAB << "# Time" << token::TAB << "field" << token::TAB
<< "min" << token::TAB << "position(min)" << token::TAB << "min" << token::TAB << "position(min)";
<< "max" << token::TAB << "position(max)" << token::TAB
<< endl; if (Pstream::parRun())
{
fieldMinMaxFilePtr_() << token::TAB << "proc";
}
fieldMinMaxFilePtr_()
<< token::TAB << "max" << token::TAB << "position(max)";
if (Pstream::parRun())
{
fieldMinMaxFilePtr_() << token::TAB << "proc";
}
fieldMinMaxFilePtr_() << endl;
} }
} }

View File

@ -88,16 +88,43 @@ void Foam::fieldMinMax::calcMinMaxFields
fieldMinMaxFilePtr_() fieldMinMaxFilePtr_()
<< obr_.time().value() << token::TAB << obr_.time().value() << token::TAB
<< fieldName << token::TAB << fieldName << token::TAB
<< minValue << token::TAB << minC << token::TAB << minValue << token::TAB << minC;
<< maxValue << token::TAB << maxC << endl;
if (Pstream::parRun())
{
fieldMinMaxFilePtr_() << token::TAB << minI;
}
fieldMinMaxFilePtr_()
<< token::TAB << maxValue << token::TAB << maxC;
if (Pstream::parRun())
{
fieldMinMaxFilePtr_() << token::TAB << maxI;
}
fieldMinMaxFilePtr_() << endl;
} }
if (log_) if (log_)
{ {
Info<< " min(mag(" << fieldName << ")) = " Info<< " min(mag(" << fieldName << ")) = "
<< minValue << " at position " << minC << nl << minValue << " at position " << minC;
<< " max(mag(" << fieldName << ")) = "
<< maxValue << " at position " << maxC << nl; if (Pstream::parRun())
{
Info<< " on processor " << minI;
}
Info<< nl << " max(mag(" << fieldName << ")) = "
<< maxValue << " at position " << maxC;
if (Pstream::parRun())
{
Info<< " on processor " << maxI;
}
Info<< endl;
} }
} }
break; break;
@ -142,16 +169,43 @@ void Foam::fieldMinMax::calcMinMaxFields
fieldMinMaxFilePtr_() fieldMinMaxFilePtr_()
<< obr_.time().value() << token::TAB << obr_.time().value() << token::TAB
<< fieldName << token::TAB << fieldName << token::TAB
<< minValue << token::TAB << minC << token::TAB << minValue << token::TAB << minC;
<< maxValue << token::TAB << maxC << endl;
if (Pstream::parRun())
{
fieldMinMaxFilePtr_() << token::TAB << minI;
}
fieldMinMaxFilePtr_()
<< token::TAB << maxValue << token::TAB << maxC;
if (Pstream::parRun())
{
fieldMinMaxFilePtr_() << token::TAB << maxI;
}
fieldMinMaxFilePtr_() << endl;
} }
if (log_) if (log_)
{ {
Info<< " min(" << fieldName << ") = " Info<< " min(" << fieldName << ") = "
<< minValue << " at position " << minC << nl << minValue << " at position " << minC;
<< " max(" << fieldName << ") = "
<< maxValue << " at position " << maxC << nl; if (Pstream::parRun())
{
Info<< " on processor " << minI;
}
Info<< nl << " max(" << fieldName << ") = "
<< maxValue << " at position " << maxC;
if (Pstream::parRun())
{
Info<< " on processor " << maxI;
}
Info<< endl;
} }
} }
break; break;

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -117,6 +117,7 @@ protected:
//- List of patch IDs internally coupled with the primary region //- List of patch IDs internally coupled with the primary region
labelList intCoupledPatchIDs_; labelList intCoupledPatchIDs_;
//- Region name //- Region name
word regionName_; word regionName_;
@ -235,21 +236,21 @@ public:
) const; ) const;
//- Convert a local region field to the primary region with op //- Convert a local region field to the primary region with op
template<class Type, class BinaryOp> template<class Type, class CombineOp>
void toPrimary void toPrimary
( (
const label regionPatchI, const label regionPatchI,
List<Type>& regionField, List<Type>& regionField,
const BinaryOp& bop const CombineOp& cop
) const; ) const;
//- Convert a primary region field to the local region with op //- Convert a primary region field to the local region with op
template<class Type, class BinaryOp> template<class Type, class CombineOp>
void toRegion void toRegion
( (
const label regionPatchI, const label regionPatchI,
List<Type>& primaryFieldField, List<Type>& primaryFieldField,
const BinaryOp& bop const CombineOp& cop
) const; ) const;

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -77,12 +77,12 @@ void Foam::regionModels::regionModel::toRegion
} }
template<class Type, class BinaryOp> template<class Type, class CombineOp>
void Foam::regionModels::regionModel::toPrimary void Foam::regionModels::regionModel::toPrimary
( (
const label regionPatchI, const label regionPatchI,
List<Type>& regionField, List<Type>& regionField,
const BinaryOp& bop const CombineOp& cop
) const ) const
{ {
forAll(intCoupledPatchIDs_, i) forAll(intCoupledPatchIDs_, i)
@ -94,7 +94,7 @@ void Foam::regionModels::regionModel::toPrimary
( (
regionMesh().boundaryMesh()[regionPatchI] regionMesh().boundaryMesh()[regionPatchI]
); );
mpb.reverseDistribute(regionField, bop); mpb.reverseDistribute(regionField, cop);
return; return;
} }
} }
@ -105,19 +105,19 @@ void Foam::regionModels::regionModel::toPrimary
"(" "("
"const label, " "const label, "
"List<Type>&, " "List<Type>&, "
"const BinaryOp&" "const CombineOp&"
") const" ") const"
) << "Region patch ID " << regionPatchI << " not found in region mesh" ) << "Region patch ID " << regionPatchI << " not found in region mesh"
<< abort(FatalError); << abort(FatalError);
} }
template<class Type, class BinaryOp> template<class Type, class CombineOp>
void Foam::regionModels::regionModel::toRegion void Foam::regionModels::regionModel::toRegion
( (
const label regionPatchI, const label regionPatchI,
List<Type>& primaryField, List<Type>& primaryField,
const BinaryOp& bop const CombineOp& cop
) const ) const
{ {
forAll(intCoupledPatchIDs_, i) forAll(intCoupledPatchIDs_, i)
@ -129,14 +129,14 @@ void Foam::regionModels::regionModel::toRegion
( (
regionMesh().boundaryMesh()[regionPatchI] regionMesh().boundaryMesh()[regionPatchI]
); );
mpb.distribute(primaryField, bop); mpb.distribute(primaryField, cop);
return; return;
} }
} }
FatalErrorIn FatalErrorIn
( (
"const void toRegion(const label, List<Type>&, const BinaryOp&) const" "const void toRegion(const label, List<Type>&, const CombineOp&) const"
) << "Region patch ID " << regionPatchI << " not found in region mesh" ) << "Region patch ID " << regionPatchI << " not found in region mesh"
<< abort(FatalError); << abort(FatalError);
} }

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -879,22 +879,19 @@ scalar kinematicSingleLayer::CourantNumber() const
if (regionMesh().nInternalFaces() > 0) if (regionMesh().nInternalFaces() > 0)
{ {
const scalar deltaT = time_.deltaTValue(); const scalarField sumPhi(fvc::surfaceSum(mag(phi_)));
const surfaceScalarField SfUfbyDelta const scalarField& V = regionMesh().V();
(
regionMesh().surfaceInterpolation::deltaCoeffs()*mag(phi_)
);
const surfaceScalarField rhoDelta(fvc::interpolate(rho_*delta_));
const surfaceScalarField& magSf = regionMesh().magSf();
forAll(rhoDelta, i) forAll(deltaRho_, i)
{ {
if (rhoDelta[i] > ROOTVSMALL) if (deltaRho_[i] > SMALL)
{ {
CoNum = max(CoNum, SfUfbyDelta[i]/rhoDelta[i]/magSf[i]*deltaT); CoNum = max(CoNum, sumPhi[i]/deltaRho_[i]/V[i]);
} }
} }
CoNum *= 0.5*time_.deltaTValue();
} }
reduce(CoNum, maxOp<scalar>()); reduce(CoNum, maxOp<scalar>());

View File

@ -229,15 +229,14 @@ Foam::MeshedSurface<Face>::MeshedSurface
ParentType(List<Face>(), surf.points()) ParentType(List<Face>(), surf.points())
{ {
labelList faceMap; labelList faceMap;
this->storedZones().transfer(surf.sortedZones(faceMap)); this->storedZones() = surf.sortedZones(faceMap);
const List<Face>& origFaces = surf.faces(); const List<Face>& origFaces = surf.faces();
List<Face> newFaces(origFaces.size()); List<Face> newFaces(origFaces.size());
// this is somewhat like ListOps reorder and/or IndirectList
forAll(newFaces, faceI) forAll(newFaces, faceI)
{ {
newFaces[faceI] = origFaces[faceMap[faceI]]; newFaces[faceMap[faceI]] = origFaces[faceI];
} }
this->storedFaces().transfer(newFaces); this->storedFaces().transfer(newFaces);
@ -1084,7 +1083,7 @@ void Foam::MeshedSurface<Face>::transfer
forAll(faceMap, faceI) forAll(faceMap, faceI)
{ {
newFaces[faceI].transfer(oldFaces[faceMap[faceI]]); newFaces[faceMap[faceI]].transfer(oldFaces[faceI]);
} }
reset reset

View File

@ -2,7 +2,7 @@
========= | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011-2012 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation |
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
@ -280,6 +280,7 @@ public:
//- Sort faces according to zoneIds //- Sort faces according to zoneIds
// Returns a surfZoneList and sets faceMap to index within faces() // Returns a surfZoneList and sets faceMap to index within faces()
// (i.e. map from original,unsorted to sorted)
surfZoneList sortedZones(labelList& faceMap) const; surfZoneList sortedZones(labelList& faceMap) const;
//- Set zones to 0 and set a single zone //- Set zones to 0 and set a single zone

View File

@ -1,6 +1,8 @@
EXE_INC = \ EXE_INC = \
-I$(LIB_SRC)/finiteVolume/lnInclude \ -I$(LIB_SRC)/finiteVolume/lnInclude \
-I$(LIB_SRC)/thermophysicalModels/specie/lnInclude -I$(LIB_SRC)/thermophysicalModels/specie/lnInclude \
-I/home/tom3/sergio/development/chtMultiRegionCoupledFoam/lnInclude
LIB_LIBS = \ LIB_LIBS = \
-lfiniteVolume -lfiniteVolume \
-L$(FOAM_USER_LIBBIN)/FvPatchScalarFieldEnthalpyJump

View File

@ -61,10 +61,14 @@ Foam::wordList Foam::basicThermo::heBoundaryTypes()
{ {
hbt[patchi] = gradientEnergyFvPatchScalarField::typeName; hbt[patchi] = gradientEnergyFvPatchScalarField::typeName;
} }
else if (isA<mixedFvPatchScalarField>(tbf[patchi])) else if(isA<mixedFvPatchScalarField>(tbf[patchi]))
{ {
hbt[patchi] = mixedEnergyFvPatchScalarField::typeName; hbt[patchi] = mixedEnergyFvPatchScalarField::typeName;
} }
else if (isA<temperatureJumpFvPatchScalarField>(tbf[patchi]))
{
hbt[patchi] = enthalpyJumpFvPatchScalarField::typeName;
}
} }
return hbt; return hbt;

View File

@ -0,0 +1,165 @@
/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2012 OpenFOAM Foundation
\\/ M anipulation |
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
OpenFOAM is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
\*---------------------------------------------------------------------------*/
#include "addToRunTimeSelectionTable.H"
#include "enthalpyJumpFvPatchScalarField.H"
#include "temperatureJumpFvPatchScalarField.H"
#include "basicThermo.H"
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
Foam::enthalpyJumpFvPatchScalarField::enthalpyJumpFvPatchScalarField
(
const fvPatch& p,
const DimensionedField<scalar, volMesh>& iF
)
:
fixedJumpFvPatchField<scalar>(p, iF)
{}
Foam::enthalpyJumpFvPatchScalarField::enthalpyJumpFvPatchScalarField
(
const enthalpyJumpFvPatchScalarField& ptf,
const fvPatch& p,
const DimensionedField<scalar, volMesh>& iF,
const fvPatchFieldMapper& mapper
)
:
fixedJumpFvPatchField<scalar>(ptf, p, iF, mapper)
{}
Foam::enthalpyJumpFvPatchScalarField::enthalpyJumpFvPatchScalarField
(
const fvPatch& p,
const DimensionedField<scalar, volMesh>& iF,
const dictionary& dict
)
:
fixedJumpFvPatchField<scalar>(p, iF)
{
if (dict.found("value"))
{
fvPatchScalarField::operator=
(
scalarField("value", dict, p.size())
);
}
}
Foam::enthalpyJumpFvPatchScalarField::enthalpyJumpFvPatchScalarField
(
const enthalpyJumpFvPatchScalarField& ptf
)
:
cyclicLduInterfaceField(),
fixedJumpFvPatchField<scalar>(ptf)
{}
Foam::enthalpyJumpFvPatchScalarField::enthalpyJumpFvPatchScalarField
(
const enthalpyJumpFvPatchScalarField& ptf,
const DimensionedField<scalar, volMesh>& iF
)
:
fixedJumpFvPatchField<scalar>(ptf, iF)
{}
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
void Foam::enthalpyJumpFvPatchScalarField::updateCoeffs()
{
if (this->updated())
{
return;
}
if (this->cyclicPatch().owner())
{
const basicThermo& thermo = db().lookupObject<basicThermo>
(
"thermophysicalProperties"
);
label patchID = patch().index();
const temperatureJumpFvPatchScalarField& TbPatch =
refCast<const temperatureJumpFvPatchScalarField>
(
thermo.T().boundaryField()[patchID]
);
const scalar time = this->db().time().value();
const scalarField jumpTb
(
patch().size(), TbPatch.jumpTable().value(time)
);
const labelUList& faceCells = this->patch().faceCells();
if (db().foundObject<volScalarField>("h"))
{
jump_ = thermo.h(jumpTb, faceCells);
}
else if (db().foundObject<volScalarField>("hs"))
{
jump_ = thermo.hs(jumpTb, faceCells);
}
else
{
FatalErrorIn("enthalpyJumpFvPatchScalarField::updateCoeffs()")
<< " hs or h are not found in db()"
<< exit(FatalError);
}
}
fixedJumpFvPatchField<scalar>::updateCoeffs();
}
void Foam::enthalpyJumpFvPatchScalarField::write(Ostream& os) const
{
fixedJumpFvPatchField<scalar>::write(os);
this->writeEntry("value", os);
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
namespace Foam
{
makePatchTypeField
(
fvPatchScalarField,
enthalpyJumpFvPatchScalarField
);
}
// ************************************************************************* //

View File

@ -0,0 +1,142 @@
/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2012 OpenFOAM Foundation
\\/ M anipulation |
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
OpenFOAM is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
Class
Foam::enthalpyJumpFvPatchScalarField
Description
SourceFiles
enthalpyJumpFvPatchScalarField.C
\*---------------------------------------------------------------------------*/
#ifndef enthalpyJumpFvPatchScalarField_H
#define enthalpyJumpFvPatchScalarField_H
#include "fixedJumpFvPatchField.H"
#include "DataEntry.H"
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
namespace Foam
{
/*---------------------------------------------------------------------------*\
Class enthalpyJumpFvPatchScalarField Declaration
\*---------------------------------------------------------------------------*/
class enthalpyJumpFvPatchScalarField
:
public fixedJumpFvPatchField<scalar>
{
public:
//- Runtime type information
TypeName("enthalpyJump");
// Constructors
//- Construct from patch and internal field
enthalpyJumpFvPatchScalarField
(
const fvPatch&,
const DimensionedField<scalar, volMesh>&
);
//- Construct from patch, internal field and dictionary
enthalpyJumpFvPatchScalarField
(
const fvPatch&,
const DimensionedField<scalar, volMesh>&,
const dictionary&
);
//- Construct by mapping given enthalpyJumpFvPatchScalarField onto a
// new patch
enthalpyJumpFvPatchScalarField
(
const enthalpyJumpFvPatchScalarField&,
const fvPatch&,
const DimensionedField<scalar, volMesh>&,
const fvPatchFieldMapper&
);
//- Construct as copy
enthalpyJumpFvPatchScalarField
(
const enthalpyJumpFvPatchScalarField&
);
//- Construct and return a clone
virtual tmp<fvPatchField<scalar> > clone() const
{
return tmp<fvPatchField<scalar> >
(
new enthalpyJumpFvPatchScalarField(*this)
);
}
//- Construct as copy setting internal field reference
enthalpyJumpFvPatchScalarField
(
const enthalpyJumpFvPatchScalarField&,
const DimensionedField<scalar, volMesh>&
);
//- Construct and return a clone setting internal field reference
virtual tmp<fvPatchField<scalar> > clone
(
const DimensionedField<scalar, volMesh>& iF
) const
{
return tmp<fvPatchField<scalar> >
(
new enthalpyJumpFvPatchScalarField(*this, iF)
);
}
// Member functions
// Evaluation functions
//- Update the coefficients
virtual void updateCoeffs();
//- Write
virtual void write(Ostream&) const;
};
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
} // End namespace Foam
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
#endif
// ************************************************************************* //

Some files were not shown because too many files have changed in this diff Show More