Merge branch 'develop' of develop.openfoam.com:Development/OpenFOAM-plus into develop

This commit is contained in:
sergio
2018-10-17 10:29:34 -07:00
736 changed files with 10759 additions and 8147 deletions

View File

@ -49,7 +49,7 @@ Foam::XiGModels::basicSubGrid::basicSubGrid
) )
: :
XiGModel(XiGProperties, thermo, turbulence, Su), XiGModel(XiGProperties, thermo, turbulence, Su),
k1(readScalar(XiGModelCoeffs_.lookup("k1"))), k1(XiGModelCoeffs_.get<scalar>("k1")),
XiGModel_(XiGModel::New(XiGModelCoeffs_, thermo, turbulence, Su)) XiGModel_(XiGModel::New(XiGModelCoeffs_, thermo, turbulence, Su))
{} {}

View File

@ -49,13 +49,10 @@ Foam::XiEqModels::Gulder::Gulder
) )
: :
XiEqModel(XiEqProperties, thermo, turbulence, Su), XiEqModel(XiEqProperties, thermo, turbulence, Su),
XiEqCoef_(readScalar(XiEqModelCoeffs_.lookup("XiEqCoef"))), XiEqCoef_(XiEqModelCoeffs_.get<scalar>("XiEqCoef")),
SuMin_(0.01*Su.average()), SuMin_(0.01*Su.average()),
uPrimeCoef_(readScalar(XiEqModelCoeffs_.lookup("uPrimeCoef"))), uPrimeCoef_(XiEqModelCoeffs_.get<scalar>("uPrimeCoef")),
subGridSchelkin_ subGridSchelkin_(XiEqModelCoeffs_.get<bool>("subGridSchelkin"))
(
readBool(XiEqModelCoeffs_.lookup("subGridSchelkin"))
)
{} {}

View File

@ -49,15 +49,12 @@ Foam::XiEqModels::SCOPEXiEq::SCOPEXiEq
) )
: :
XiEqModel(XiEqProperties, thermo, turbulence, Su), XiEqModel(XiEqProperties, thermo, turbulence, Su),
XiEqCoef_(readScalar(XiEqModelCoeffs_.lookup("XiEqCoef"))), XiEqCoef_(XiEqModelCoeffs_.get<scalar>("XiEqCoef")),
XiEqExp_(readScalar(XiEqModelCoeffs_.lookup("XiEqExp"))), XiEqExp_(XiEqModelCoeffs_.get<scalar>("XiEqExp")),
lCoef_(readScalar(XiEqModelCoeffs_.lookup("lCoef"))), lCoef_(XiEqModelCoeffs_.get<scalar>("lCoef")),
SuMin_(0.01*Su.average()), SuMin_(0.01*Su.average()),
uPrimeCoef_(readScalar(XiEqModelCoeffs_.lookup("uPrimeCoef"))), uPrimeCoef_(XiEqModelCoeffs_.get<scalar>("uPrimeCoef")),
subGridSchelkin_ subGridSchelkin_(XiEqModelCoeffs_.get<bool>("subGridSchelkin")),
(
readBool(XiEqModelCoeffs_.lookup("subGridSchelkin"))
),
MaModel MaModel
( (
Su.mesh().lookupObject<IOdictionary>("combustionProperties"), Su.mesh().lookupObject<IOdictionary>("combustionProperties"),

View File

@ -49,7 +49,7 @@ Foam::XiEqModels::instability::instability
) )
: :
XiEqModel(XiEqProperties, thermo, turbulence, Su), XiEqModel(XiEqProperties, thermo, turbulence, Su),
XiEqIn(readScalar(XiEqModelCoeffs_.lookup("XiEqIn"))), XiEqIn(XiEqModelCoeffs_.get<scalar>("XiEqIn")),
XiEqModel_(XiEqModel::New(XiEqModelCoeffs_, thermo, turbulence, Su)) XiEqModel_(XiEqModel::New(XiEqModelCoeffs_, thermo, turbulence, Su))
{} {}

View File

@ -49,7 +49,7 @@ Foam::XiGModels::KTS::KTS
) )
: :
XiGModel(XiGProperties, thermo, turbulence, Su), XiGModel(XiGProperties, thermo, turbulence, Su),
GEtaCoef_(readScalar(XiGModelCoeffs_.lookup("GEtaCoef"))) GEtaCoef_(XiGModelCoeffs_.get<scalar>("GEtaCoef"))
{} {}

View File

@ -52,7 +52,7 @@ Foam::XiModels::algebraic::algebraic
) )
: :
XiModel(XiProperties, thermo, turbulence, Su, rho, b, phi), XiModel(XiProperties, thermo, turbulence, Su, rho, b, phi),
XiShapeCoef(readScalar(XiModelCoeffs_.lookup("XiShapeCoef"))), XiShapeCoef(XiModelCoeffs_.get<scalar>("XiShapeCoef")),
XiEqModel_(XiEqModel::New(XiProperties, thermo, turbulence, Su)), XiEqModel_(XiEqModel::New(XiProperties, thermo, turbulence, Su)),
XiGModel_(XiGModel::New(XiProperties, thermo, turbulence, Su)) XiGModel_(XiGModel::New(XiProperties, thermo, turbulence, Su))
{} {}

View File

@ -57,7 +57,7 @@ Foam::XiModels::transport::transport
) )
: :
XiModel(XiProperties, thermo, turbulence, Su, rho, b, phi), XiModel(XiProperties, thermo, turbulence, Su, rho, b, phi),
XiShapeCoef(readScalar(XiModelCoeffs_.lookup("XiShapeCoef"))), XiShapeCoef(XiModelCoeffs_.get<scalar>("XiShapeCoef")),
XiEqModel_(XiEqModel::New(XiProperties, thermo, turbulence, Su)), XiEqModel_(XiEqModel::New(XiProperties, thermo, turbulence, Su)),
XiGModel_(XiGModel::New(XiProperties, thermo, turbulence, Su)) XiGModel_(XiGModel::New(XiProperties, thermo, turbulence, Su))
{} {}

View File

@ -53,8 +53,8 @@ Foam::laminarFlameSpeedModels::SCOPE::polynomial::polynomial
) )
: :
FixedList<scalar, 7>(polyDict.lookup("coefficients")), FixedList<scalar, 7>(polyDict.lookup("coefficients")),
ll(readScalar(polyDict.lookup("lowerLimit"))), ll(polyDict.get<scalar>("lowerLimit")),
ul(readScalar(polyDict.lookup("upperLimit"))), ul(polyDict.get<scalar>("upperLimit")),
llv(polyPhi(ll, *this)), llv(polyPhi(ll, *this)),
ulv(polyPhi(ul, *this)), ulv(polyPhi(ul, *this)),
lu(0) lu(0)
@ -75,39 +75,30 @@ Foam::laminarFlameSpeedModels::SCOPE::SCOPE
( (
IFstream IFstream
( (
fileName dict.get<fileName>("fuelFile")
(
dict.lookup("fuelFile")
)
)() )()
).optionalSubDict(typeName + "Coeffs") ).optionalSubDict(typeName + "Coeffs")
), ),
LFL_ LFL_
( (
readScalar coeffsDict_.getCompat<scalar>
(
coeffsDict_.lookupCompat
( (
"lowerFlammabilityLimit", "lowerFlammabilityLimit",
{{"lowerFlamabilityLimit", 1712}} {{"lowerFlamabilityLimit", 1712}}
) )
)
), ),
UFL_ UFL_
( (
readScalar coeffsDict_.getCompat<scalar>
(
coeffsDict_.lookupCompat
( (
"upperFlammabilityLimit", "upperFlammabilityLimit",
{{"upperFlamabilityLimit", 1712}} {{"upperFlamabilityLimit", 1712}}
) )
)
), ),
SuPolyL_(coeffsDict_.subDict("lowerSuPolynomial")), SuPolyL_(coeffsDict_.subDict("lowerSuPolynomial")),
SuPolyU_(coeffsDict_.subDict("upperSuPolynomial")), SuPolyU_(coeffsDict_.subDict("upperSuPolynomial")),
Texp_(readScalar(coeffsDict_.lookup("Texp"))), Texp_(coeffsDict_.get<scalar>("Texp")),
pexp_(readScalar(coeffsDict_.lookup("pexp"))), pexp_(coeffsDict_.get<scalar>("pexp")),
MaPolyL_(coeffsDict_.subDict("lowerMaPolynomial")), MaPolyL_(coeffsDict_.subDict("lowerMaPolynomial")),
MaPolyU_(coeffsDict_.subDict("upperMaPolynomial")) MaPolyU_(coeffsDict_.subDict("upperMaPolynomial"))
{ {

View File

@ -18,8 +18,8 @@
) )
); );
scalar p0 = readScalar(initialConditions.lookup("p")); scalar p0 = initialConditions.get<scalar>("p");
scalar T0 = readScalar(initialConditions.lookup("T")); scalar T0 = initialConditions.get<scalar>("T");
#include "createBaseFields.H" #include "createBaseFields.H"

View File

@ -40,7 +40,7 @@
const word& name = Y[i].name(); const word& name = Y[i].name();
if (fractions.found(name)) if (fractions.found(name))
{ {
X0[i] = readScalar(fractions.lookup(name)); X0[i] = fractions.get<scalar>(name);
} }
} }
@ -64,7 +64,7 @@
const word& name = Y[i].name(); const word& name = Y[i].name();
if (fractions.found(name)) if (fractions.found(name))
{ {
Y0[i] = readScalar(fractions.lookup(name)); Y0[i] = fractions.get<scalar>(name);
} }
} }

View File

@ -29,7 +29,7 @@ License
const dictionary& pimpleDict = pimple.dict(); const dictionary& pimpleDict = pimple.dict();
// Maximum flow Courant number // Maximum flow Courant number
scalar maxCo(readScalar(pimpleDict.lookup("maxCo"))); scalar maxCo(pimpleDict.get<scalar>("maxCo"));
// Maximum time scale // Maximum time scale
scalar maxDeltaT(pimpleDict.lookupOrDefault<scalar>("maxDeltaT", GREAT)); scalar maxDeltaT(pimpleDict.lookupOrDefault<scalar>("maxDeltaT", GREAT));
@ -118,7 +118,7 @@ License
if (Yref.found(Yi.name())) if (Yref.found(Yi.name()))
{ {
foundY = true; foundY = true;
scalar Yrefi = readScalar(Yref.lookup(Yi.name())); const scalar Yrefi = Yref.get<scalar>(Yi.name());
rDeltaTY.field() = max rDeltaTY.field() = max
( (

View File

@ -84,7 +84,7 @@ Foam::smoluchowskiJumpTFvPatchScalarField::smoluchowskiJumpTFvPatchScalarField
rhoName_(dict.lookupOrDefault<word>("rho", "rho")), rhoName_(dict.lookupOrDefault<word>("rho", "rho")),
psiName_(dict.lookupOrDefault<word>("psi", "thermo:psi")), psiName_(dict.lookupOrDefault<word>("psi", "thermo:psi")),
muName_(dict.lookupOrDefault<word>("mu", "thermo:mu")), muName_(dict.lookupOrDefault<word>("mu", "thermo:mu")),
accommodationCoeff_(readScalar(dict.lookup("accommodationCoeff"))), accommodationCoeff_(dict.get<scalar>("accommodationCoeff")),
Twall_("Twall", dict, p.size()), Twall_("Twall", dict, p.size()),
gamma_(dict.lookupOrDefault<scalar>("gamma", 1.4)) gamma_(dict.lookupOrDefault<scalar>("gamma", 1.4))
{ {

View File

@ -85,7 +85,7 @@ Foam::maxwellSlipUFvPatchVectorField::maxwellSlipUFvPatchVectorField
psiName_(dict.lookupOrDefault<word>("psi", "thermo:psi")), psiName_(dict.lookupOrDefault<word>("psi", "thermo:psi")),
muName_(dict.lookupOrDefault<word>("mu", "thermo:mu")), muName_(dict.lookupOrDefault<word>("mu", "thermo:mu")),
tauMCName_(dict.lookupOrDefault<word>("tauMC", "tauMC")), tauMCName_(dict.lookupOrDefault<word>("tauMC", "tauMC")),
accommodationCoeff_(readScalar(dict.lookup("accommodationCoeff"))), accommodationCoeff_(dict.get<scalar>("accommodationCoeff")),
Uwall_("Uwall", dict, p.size()), Uwall_("Uwall", dict, p.size()),
thermalCreep_(dict.lookupOrDefault("thermalCreep", true)), thermalCreep_(dict.lookupOrDefault("thermalCreep", true)),
curvature_(dict.lookupOrDefault("curvature", true)) curvature_(dict.lookupOrDefault("curvature", true))

View File

@ -25,8 +25,8 @@
if (!local) if (!local)
{ {
const scalar T0 = readScalar(eosDict.lookup("T0")); const scalar T0 = eosDict.get<scalar>("T0");
const scalar p0 = readScalar(eosDict.lookup("p0")); const scalar p0 = eosDict.get<scalar>("p0");
he = thermo.he(p, pow(p/p0, (gamma - scalar(1))/gamma)*T0); he = thermo.he(p, pow(p/p0, (gamma - scalar(1))/gamma)*T0);
} }

View File

@ -12,7 +12,7 @@ IOdictionary mdEquilibrationDict
) )
); );
scalar targetTemperature = readScalar scalar targetTemperature
( (
mdEquilibrationDict.lookup("targetTemperature") mdEquilibrationDict.get<scalar>("targetTemperature")
); );

View File

@ -31,7 +31,12 @@
coordinates.set coordinates.set
( (
i, i,
coordinateSystem::New(solidRegions[i], thermos[i]) coordinateSystem::New
(
solidRegions[i],
thermos[i],
coordinateSystem::typeName_()
)
); );
tmp<volVectorField> tkappaByCp = tmp<volVectorField> tkappaByCp =
@ -57,7 +62,11 @@
); );
aniAlphas[i].primitiveFieldRef() = aniAlphas[i].primitiveFieldRef() =
coordinates[i].R().transformVector(tkappaByCp()); coordinates[i].transformPrincipal
(
solidRegions[i].cellCentres(),
tkappaByCp()
);
aniAlphas[i].correctBoundaryConditions(); aniAlphas[i].correctBoundaryConditions();
} }

View File

@ -15,7 +15,12 @@ if (!thermo.isotropic())
const coordinateSystem& coodSys = coordinates[i]; const coordinateSystem& coodSys = coordinates[i];
aniAlpha.primitiveFieldRef() = aniAlpha.primitiveFieldRef() =
coodSys.R().transformVector(tkappaByCp()); coodSys.transformPrincipal
(
mesh.cellCentres(),
tkappaByCp()
);
aniAlpha.correctBoundaryConditions(); aniAlpha.correctBoundaryConditions();
taniAlpha = tmp<volSymmTensorField> taniAlpha = tmp<volSymmTensorField>

View File

@ -139,10 +139,9 @@ basicKinematicTypeCloud kinematicCloud
scalar alphacMin scalar alphacMin
( (
1.0 1.0
- readScalar - (
(
kinematicCloud.particleProperties().subDict("constantProperties") kinematicCloud.particleProperties().subDict("constantProperties")
.lookup("alphaMax") .get<scalar>("alphaMax")
) )
); );

View File

@ -29,7 +29,7 @@ License
const dictionary& pimpleDict = pimple.dict(); const dictionary& pimpleDict = pimple.dict();
// Maximum flow Courant number // Maximum flow Courant number
scalar maxCo(readScalar(pimpleDict.lookup("maxCo"))); scalar maxCo(pimpleDict.get<scalar>("maxCo"));
// Maximum time scale // Maximum time scale
scalar maxDeltaT(pimpleDict.lookupOrDefault<scalar>("maxDeltaT", GREAT)); scalar maxDeltaT(pimpleDict.lookupOrDefault<scalar>("maxDeltaT", GREAT));

View File

@ -29,7 +29,7 @@ License
const dictionary& pimpleDict = pimple.dict(); const dictionary& pimpleDict = pimple.dict();
// Maximum flow Courant number // Maximum flow Courant number
scalar maxCo(readScalar(pimpleDict.lookup("maxCo"))); scalar maxCo(pimpleDict.get<scalar>("maxCo"));
// Maximum time scale // Maximum time scale
scalar maxDeltaT(pimpleDict.lookupOrDefault<scalar>("maxDeltaT", GREAT)); scalar maxDeltaT(pimpleDict.lookupOrDefault<scalar>("maxDeltaT", GREAT));

View File

@ -160,10 +160,9 @@ basicKinematicMPPICCloud kinematicCloud
scalar alphacMin scalar alphacMin
( (
1.0 1.0
- readScalar - (
(
kinematicCloud.particleProperties().subDict("constantProperties") kinematicCloud.particleProperties().subDict("constantProperties")
.lookup("alphaMax") .get<scalar>("alphaMax")
) )
); );

View File

@ -31,7 +31,7 @@ Description
scalar maxAlphaCo scalar maxAlphaCo
( (
readScalar(runTime.controlDict().lookup("maxAlphaCo")) runTime.controlDict().get<scalar>("maxAlphaCo")
); );
scalar alphaCoNum = 0.0; scalar alphaCoNum = 0.0;

View File

@ -2,5 +2,5 @@
scalar maxAcousticCo scalar maxAcousticCo
( (
readScalar(runTime.controlDict().lookup("maxAcousticCo")) runTime.controlDict().get<scalar>("maxAcousticCo")
); );

View File

@ -70,10 +70,10 @@
// Remove the swirl component of velocity for "wedge" cases // Remove the swirl component of velocity for "wedge" cases
if (pimple.dict().found("removeSwirl")) if (pimple.dict().found("removeSwirl"))
{ {
label swirlCmpt(readLabel(pimple.dict().lookup("removeSwirl"))); label swirlCmpt(pimple.dict().get<label>("removeSwirl"));
Info<< "Removing swirl component-" << swirlCmpt << " of U" << endl; Info<< "Removing swirl component-" << swirlCmpt << " of U" << endl;
U.field().replace(swirlCmpt, 0.0); U.field().replace(swirlCmpt, Zero);
} }
U.correctBoundaryConditions(); U.correctBoundaryConditions();

View File

@ -2,5 +2,5 @@
scalar maxAcousticCo scalar maxAcousticCo
( (
readScalar(runTime.controlDict().lookup("maxAcousticCo")) runTime.controlDict().get<scalar>("maxAcousticCo")
); );

View File

@ -73,10 +73,10 @@
// Remove the swirl component of velocity for "wedge" cases // Remove the swirl component of velocity for "wedge" cases
if (pimple.dict().found("removeSwirl")) if (pimple.dict().found("removeSwirl"))
{ {
label swirlCmpt(readLabel(pimple.dict().lookup("removeSwirl"))); label swirlCmpt(pimple.dict().get<label>("removeSwirl"));
Info<< "Removing swirl component-" << swirlCmpt << " of U" << endl; Info<< "Removing swirl component-" << swirlCmpt << " of U" << endl;
U.field().replace(swirlCmpt, 0.0); U.field().replace(swirlCmpt, Zero);
} }
U.correctBoundaryConditions(); U.correctBoundaryConditions();

View File

@ -763,8 +763,8 @@ void Foam::multiphaseMixtureThermo::solve()
const Time& runTime = mesh_.time(); const Time& runTime = mesh_.time();
const dictionary& alphaControls = mesh_.solverDict("alpha"); const dictionary& alphaControls = mesh_.solverDict("alpha");
label nAlphaSubCycles(readLabel(alphaControls.lookup("nAlphaSubCycles"))); label nAlphaSubCycles(alphaControls.get<label>("nAlphaSubCycles"));
scalar cAlpha(readScalar(alphaControls.lookup("cAlpha"))); scalar cAlpha(alphaControls.get<scalar>("cAlpha"));
volScalarField& alpha = phases_.first(); volScalarField& alpha = phases_.first();

View File

@ -39,8 +39,6 @@ SourceFiles
#include "rhoThermo.H" #include "rhoThermo.H"
#include "volFields.H" #include "volFields.H"
#include "dictionaryEntry.H"
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //

View File

@ -1,8 +1,8 @@
const dictionary& alphaControls = mesh.solverDict(alpha1.name()); const dictionary& alphaControls = mesh.solverDict(alpha1.name());
label nAlphaCorr(readLabel(alphaControls.lookup("nAlphaCorr"))); label nAlphaCorr(alphaControls.get<label>("nAlphaCorr"));
label nAlphaSubCycles(readLabel(alphaControls.lookup("nAlphaSubCycles"))); label nAlphaSubCycles(alphaControls.get<label>("nAlphaSubCycles"));
bool MULESCorr(alphaControls.lookupOrDefault("MULESCorr", false)); bool MULESCorr(alphaControls.lookupOrDefault("MULESCorr", false));

View File

@ -31,7 +31,7 @@ Description
scalar maxAlphaCo scalar maxAlphaCo
( (
readScalar(runTime.controlDict().lookup("maxAlphaCo")) runTime.controlDict().get<scalar>("maxAlphaCo")
); );
scalar maxAlphaDdt scalar maxAlphaDdt

View File

@ -198,7 +198,7 @@ void Foam::radiation::laserDTRM::initialise()
{ {
case pdGaussian: case pdGaussian:
{ {
sigma_ = readScalar(lookup("sigma")); sigma_ = get<scalar>("sigma");
break; break;
} }
case pdManual: case pdManual:
@ -325,8 +325,8 @@ Foam::radiation::laserDTRM::laserDTRM(const volScalarField& T)
mode_(powerDistNames_.lookup("mode", *this)), mode_(powerDistNames_.lookup("mode", *this)),
DTRMCloud_(mesh_, "DTRMCloud", IDLList<DTRMParticle>()), DTRMCloud_(mesh_, "DTRMCloud", IDLList<DTRMParticle>()),
nParticles_(0), nParticles_(0),
ndTheta_(readLabel(lookup("nTheta"))), ndTheta_(get<label>("nTheta")),
ndr_(readLabel(lookup("nr"))), ndr_(get<label>("nr")),
maxTrackLength_(mesh_.bounds().mag()), maxTrackLength_(mesh_.bounds().mag()),
focalLaserPosition_ focalLaserPosition_
@ -339,7 +339,7 @@ Foam::radiation::laserDTRM::laserDTRM(const volScalarField& T)
Function1<vector>::New("laserDirection", *this) Function1<vector>::New("laserDirection", *this)
), ),
focalLaserRadius_(readScalar(lookup("focalLaserRadius"))), focalLaserRadius_(get<scalar>("focalLaserRadius")),
qualityBeamLaser_ qualityBeamLaser_
( (
lookupOrDefault<scalar>("qualityBeamLaser", 0.0) lookupOrDefault<scalar>("qualityBeamLaser", 0.0)
@ -435,8 +435,8 @@ Foam::radiation::laserDTRM::laserDTRM
mode_(powerDistNames_.lookup("mode", *this)), mode_(powerDistNames_.lookup("mode", *this)),
DTRMCloud_(mesh_, "DTRMCloud", IDLList<DTRMParticle>()), DTRMCloud_(mesh_, "DTRMCloud", IDLList<DTRMParticle>()),
nParticles_(0), nParticles_(0),
ndTheta_(readLabel(lookup("nTheta"))), ndTheta_(get<label>("nTheta")),
ndr_(readLabel(lookup("nr"))), ndr_(get<label>("nr")),
maxTrackLength_(mesh_.bounds().mag()), maxTrackLength_(mesh_.bounds().mag()),
focalLaserPosition_ focalLaserPosition_
@ -448,7 +448,7 @@ Foam::radiation::laserDTRM::laserDTRM
Function1<vector>::New("laserDirection", *this) Function1<vector>::New("laserDirection", *this)
), ),
focalLaserRadius_(readScalar(lookup("focalLaserRadius"))), focalLaserRadius_(get<scalar>("focalLaserRadius")),
qualityBeamLaser_ qualityBeamLaser_
( (
lookupOrDefault<scalar>("qualityBeamLaser", 0.0) lookupOrDefault<scalar>("qualityBeamLaser", 0.0)

View File

@ -53,7 +53,7 @@ Foam::radiation::FresnelLaser::FresnelLaser
) )
: :
reflectionModel(dict, mesh), reflectionModel(dict, mesh),
epsilon_(readScalar(dict.lookup("epsilon"))) epsilon_(dict.get<scalar>("epsilon"))
{} {}

View File

@ -53,8 +53,8 @@ Foam::porousModels::VollerPrakash::VollerPrakash
) )
: :
porousModel(dict, mesh), porousModel(dict, mesh),
Cu_(readScalar(dict.lookup("Cu"))), Cu_(dict.get<scalar>("Cu")),
solidPhase_(dict.lookup("solidPhase")) solidPhase_(dict.get<word>("solidPhase"))
{} {}

View File

@ -191,7 +191,7 @@ void Foam::MultiComponentPhaseModel<BasePhaseModel, phaseThermo>::solveYi
const dictionary& MULEScontrols = mesh.solverDict(alpha1.name()); const dictionary& MULEScontrols = mesh.solverDict(alpha1.name());
scalar cAlpha(readScalar(MULEScontrols.lookup("cYi"))); scalar cAlpha(MULEScontrols.get<scalar>("cYi"));
PtrList<surfaceScalarField> phiYiCorrs(species_.size()); PtrList<surfaceScalarField> phiYiCorrs(species_.size());
const surfaceScalarField& phi = this->fluid().phi(); const surfaceScalarField& phi = this->fluid().phi();

View File

@ -269,8 +269,8 @@ void Foam::multiphaseSystem::solve()
const fvMesh& mesh = this->mesh(); const fvMesh& mesh = this->mesh();
const dictionary& alphaControls = mesh.solverDict("alpha"); const dictionary& alphaControls = mesh.solverDict("alpha");
label nAlphaSubCycles(readLabel(alphaControls.lookup("nAlphaSubCycles"))); label nAlphaSubCycles(alphaControls.get<label>("nAlphaSubCycles"));
label nAlphaCorr(readLabel(alphaControls.lookup("nAlphaCorr"))); label nAlphaCorr(alphaControls.get<label>("nAlphaCorr"));
mesh.solverDict("alpha").readEntry("cAlphas", cAlphas_); mesh.solverDict("alpha").readEntry("cAlphas", cAlphas_);
// Reset ddtAlphaMax // Reset ddtAlphaMax

View File

@ -260,13 +260,7 @@ public:
virtual volScalarField& he() virtual volScalarField& he()
{ {
NotImplemented; NotImplemented;
return return const_cast<volScalarField&>(volScalarField::null());
(
const_cast<volScalarField&>
(
volScalarField::null()
)
);
} }
//- Return access to the inernal energy field [J/Kg] //- Return access to the inernal energy field [J/Kg]
@ -274,10 +268,7 @@ public:
virtual const volScalarField& he() const virtual const volScalarField& he() const
{ {
NotImplemented; NotImplemented;
return return volScalarField::null();
(
volScalarField::null()
);
} }
//- Enthalpy/Internal energy //- Enthalpy/Internal energy

View File

@ -50,10 +50,7 @@ Foam::temperaturePhaseChangeTwoPhaseMixture::New
const word modelType const word modelType
( (
phaseChangePropertiesDict.lookup phaseChangePropertiesDict.get<word>("phaseChangeTwoPhaseModel")
(
"phaseChangeTwoPhaseModel"
)
); );
Info<< "Selecting phaseChange model " << modelType << endl; Info<< "Selecting phaseChange model " << modelType << endl;

View File

@ -1,5 +1,5 @@
const dictionary& alphaControls = mesh.solverDict(alpha1.name()); const dictionary& alphaControls = mesh.solverDict(alpha1.name());
label nAlphaCorr(readLabel(alphaControls.lookup("nAlphaCorr"))); label nAlphaCorr(alphaControls.get<label>("nAlphaCorr"));
label nAlphaSubCycles(readLabel(alphaControls.lookup("nAlphaSubCycles"))); label nAlphaSubCycles(alphaControls.get<label>("nAlphaSubCycles"));

View File

@ -154,14 +154,11 @@ Foam::threePhaseInterfaceProperties::threePhaseInterfaceProperties
: :
mixture_(mixture), mixture_(mixture),
cAlpha_ cAlpha_
(
readScalar
( (
mixture.U().mesh().solverDict mixture.U().mesh().solverDict
( (
mixture_.alpha1().name() mixture_.alpha1().name()
).lookup("cAlpha") ).get<scalar>("cAlpha")
)
), ),
sigma12_("sigma12", dimensionSet(1, 0, -2, 0, 0), mixture), sigma12_("sigma12", dimensionSet(1, 0, -2, 0, 0), mixture),
sigma13_("sigma13", dimensionSet(1, 0, -2, 0, 0), mixture), sigma13_("sigma13", dimensionSet(1, 0, -2, 0, 0), mixture),

View File

@ -31,7 +31,7 @@ Description
scalar maxAlphaCo scalar maxAlphaCo
( (
readScalar(runTime.controlDict().lookup("maxAlphaCo")) runTime.controlDict().get<scalar>("maxAlphaCo")
); );
scalar alphaCoNum = 0.0; scalar alphaCoNum = 0.0;

View File

@ -1,3 +1,3 @@
const dictionary& alphaControls = mesh.solverDict(alpha1.name()); const dictionary& alphaControls = mesh.solverDict(alpha1.name());
label nAlphaSubCycles(readLabel(alphaControls.lookup("nAlphaSubCycles"))); label nAlphaSubCycles(alphaControls.get<label>("nAlphaSubCycles"));

View File

@ -31,7 +31,7 @@ Description
scalar maxAlphaCo scalar maxAlphaCo
( (
readScalar(runTime.controlDict().lookup("maxAlphaCo")) runTime.controlDict().get<scalar>("maxAlphaCo")
); );
scalar alphaCoNum = 0.0; scalar alphaCoNum = 0.0;

View File

@ -1,8 +1,8 @@
const dictionary& alphaControls = mesh.solverDict(alpha1.name()); const dictionary& alphaControls = mesh.solverDict(alpha1.name());
label nAlphaCorr(readLabel(alphaControls.lookup("nAlphaCorr"))); label nAlphaCorr(alphaControls.get<label>("nAlphaCorr"));
label nAlphaSubCycles(readLabel(alphaControls.lookup("nAlphaSubCycles"))); label nAlphaSubCycles(alphaControls.get<label>("nAlphaSubCycles"));
bool MULESCorr(alphaControls.lookupOrDefault("MULESCorr", false)); bool MULESCorr(alphaControls.lookupOrDefault("MULESCorr", false));

View File

@ -844,7 +844,7 @@ void Foam::multiphaseSystem::solve()
const Time& runTime = mesh_.time(); const Time& runTime = mesh_.time();
const dictionary& alphaControls = mesh_.solverDict("alpha"); const dictionary& alphaControls = mesh_.solverDict("alpha");
label nAlphaSubCycles(readLabel(alphaControls.lookup("nAlphaSubCycles"))); label nAlphaSubCycles(alphaControls.get<label>("nAlphaSubCycles"));
if (nAlphaSubCycles > 1) if (nAlphaSubCycles > 1)
{ {

View File

@ -311,8 +311,8 @@ void Foam::multiphaseMixture::solve()
volScalarField& alpha = phases_.first(); volScalarField& alpha = phases_.first();
const dictionary& alphaControls = mesh_.solverDict("alpha"); const dictionary& alphaControls = mesh_.solverDict("alpha");
label nAlphaSubCycles(readLabel(alphaControls.lookup("nAlphaSubCycles"))); label nAlphaSubCycles(alphaControls.get<label>("nAlphaSubCycles"));
scalar cAlpha(readScalar(alphaControls.lookup("cAlpha"))); scalar cAlpha(alphaControls.get<scalar>("cAlpha"));
if (nAlphaSubCycles > 1) if (nAlphaSubCycles > 1)
{ {

View File

@ -55,7 +55,7 @@ Foam::wallLubricationModels::Frank::Frank
wallLubricationModel(dict, pair), wallLubricationModel(dict, pair),
Cwd_("Cwd", dimless, dict), Cwd_("Cwd", dimless, dict),
Cwc_("Cwc", dimless, dict), Cwc_("Cwc", dimless, dict),
p_(readScalar(dict.lookup("p"))) p_(dict.get<scalar>("p"))
{} {}

View File

@ -131,20 +131,16 @@ Foam::ThermalPhaseChangePhaseSystem<BasePhaseSystem>::heatTransfer() const
dimensionedScalar(dimensionSet(1,-1,-3,0,0), Zero) dimensionedScalar(dimensionSet(1,-1,-3,0,0), Zero)
); );
if const volScalarField* alphatPtr =
( otherPhase.mesh().findObject<volScalarField>
otherPhase.mesh().foundObject<volScalarField>
(
"alphat." + otherPhase.name()
)
)
{
const volScalarField& alphat =
otherPhase.mesh().lookupObject<volScalarField>
( (
"alphat." + otherPhase.name() "alphat." + otherPhase.name()
); );
if (alphatPtr)
{
const volScalarField& alphat = *alphatPtr;
const fvPatchList& patches = this->mesh().boundary(); const fvPatchList& patches = this->mesh().boundary();
forAll(patches, patchi) forAll(patches, patchi)
{ {
@ -427,20 +423,16 @@ void Foam::ThermalPhaseChangePhaseSystem<BasePhaseSystem>::correctThermo()
dimensionedScalar(dimDensity/dimTime, Zero) dimensionedScalar(dimDensity/dimTime, Zero)
); );
if const volScalarField* alphatPtr =
( phase2.mesh().findObject<volScalarField>
phase2.mesh().foundObject<volScalarField>
(
"alphat." + phase2.name()
)
)
{
const volScalarField& alphat =
phase2.mesh().lookupObject<volScalarField>
( (
"alphat." + phase2.name() "alphat." + phase2.name()
); );
if (alphatPtr)
{
const volScalarField& alphat = *alphatPtr;
const fvPatchList& patches = this->mesh().boundary(); const fvPatchList& patches = this->mesh().boundary();
forAll(patches, patchi) forAll(patches, patchi)
{ {

View File

@ -606,7 +606,7 @@ void Foam::multiphaseSystem::solve()
const Time& runTime = mesh_.time(); const Time& runTime = mesh_.time();
const dictionary& alphaControls = mesh_.solverDict("alpha"); const dictionary& alphaControls = mesh_.solverDict("alpha");
label nAlphaSubCycles(readLabel(alphaControls.lookup("nAlphaSubCycles"))); label nAlphaSubCycles(alphaControls.get<label>("nAlphaSubCycles"));
bool LTS = fv::localEulerDdt::enabled(mesh_); bool LTS = fv::localEulerDdt::enabled(mesh_);

View File

@ -60,7 +60,7 @@ KocamustafaogullariIshii::KocamustafaogullariIshii
) )
: :
departureDiameterModel(), departureDiameterModel(),
phi_(readScalar(dict.lookup("phi"))) phi_(dict.get<scalar>("phi"))
{} {}

View File

@ -52,7 +52,7 @@ Foam::wallBoilingModels::partitioningModels::
Lavieville::Lavieville(const dictionary& dict) Lavieville::Lavieville(const dictionary& dict)
: :
partitioningModel(), partitioningModel(),
alphaCrit_(readScalar(dict.lookup("alphaCrit"))) alphaCrit_(dict.get<scalar>("alphaCrit"))
{} {}

View File

@ -52,8 +52,8 @@ Foam::wallBoilingModels::partitioningModels::
cosine::cosine(const dictionary& dict) cosine::cosine(const dictionary& dict)
: :
partitioningModel(), partitioningModel(),
alphaLiquid1_(readScalar(dict.lookup("alphaLiquid1"))), alphaLiquid1_(dict.get<scalar>("alphaLiquid1")),
alphaLiquid0_(readScalar(dict.lookup("alphaLiquid0"))) alphaLiquid0_(dict.get<scalar>("alphaLiquid0"))
{} {}

View File

@ -52,8 +52,8 @@ Foam::wallBoilingModels::partitioningModels::
linear::linear(const dictionary& dict) linear::linear(const dictionary& dict)
: :
partitioningModel(), partitioningModel(),
alphaLiquid1_(readScalar(dict.lookup("alphaLiquid1"))), alphaLiquid1_(dict.get<scalar>("alphaLiquid1")),
alphaLiquid0_(readScalar(dict.lookup("alphaLiquid0"))) alphaLiquid0_(dict.get<scalar>("alphaLiquid0"))
{} {}

View File

@ -57,9 +57,9 @@ Foam::RASModels::phasePressureModel::phasePressureModel
phase_(phase), phase_(phase),
alphaMax_(readScalar(coeffDict_.lookup("alphaMax"))), alphaMax_(coeffDict_.get<scalar>("alphaMax")),
preAlphaExp_(readScalar(coeffDict_.lookup("preAlphaExp"))), preAlphaExp_(coeffDict_.get<scalar>("preAlphaExp")),
expMax_(readScalar(coeffDict_.lookup("expMax"))), expMax_(coeffDict_.get<scalar>("expMax")),
g0_ g0_
( (
"g0", "g0",

View File

@ -193,8 +193,8 @@ void Foam::twoPhaseSystem::solve()
const dictionary& alphaControls = mesh_.solverDict(alpha1.name()); const dictionary& alphaControls = mesh_.solverDict(alpha1.name());
label nAlphaSubCycles(readLabel(alphaControls.lookup("nAlphaSubCycles"))); label nAlphaSubCycles(alphaControls.get<label>("nAlphaSubCycles"));
label nAlphaCorr(readLabel(alphaControls.lookup("nAlphaCorr"))); label nAlphaCorr(alphaControls.get<label>("nAlphaCorr"));
bool LTS = fv::localEulerDdt::enabled(mesh_); bool LTS = fv::localEulerDdt::enabled(mesh_);

View File

@ -1,3 +1,3 @@
const dictionary& alphaControls = mesh.solverDict(alpha1.name()); const dictionary& alphaControls = mesh.solverDict(alpha1.name());
label nAlphaSubCycles(readLabel(alphaControls.lookup("nAlphaSubCycles"))); label nAlphaSubCycles(alphaControls.get<label>("nAlphaSubCycles"));

View File

@ -31,7 +31,7 @@ Description
scalar maxAlphaCo scalar maxAlphaCo
( (
readScalar(runTime.controlDict().lookup("maxAlphaCo")) runTime.controlDict().get<scalar>("maxAlphaCo")
); );
scalar alphaCoNum = 0.0; scalar alphaCoNum = 0.0;

View File

@ -55,7 +55,7 @@ Foam::wallLubricationModels::Frank::Frank
wallLubricationModel(dict, pair), wallLubricationModel(dict, pair),
Cwd_("Cwd", dimless, dict), Cwd_("Cwd", dimless, dict),
Cwc_("Cwc", dimless, dict), Cwc_("Cwc", dimless, dict),
p_(readScalar(dict.lookup("p"))) p_(dict.get<scalar>("p"))
{} {}

View File

@ -60,9 +60,9 @@ Foam::RASModels::phasePressureModel::phasePressureModel
phase_(phase), phase_(phase),
alphaMax_(readScalar(coeffDict_.lookup("alphaMax"))), alphaMax_(coeffDict_.get<scalar>("alphaMax")),
preAlphaExp_(readScalar(coeffDict_.lookup("preAlphaExp"))), preAlphaExp_(coeffDict_.get<scalar>("preAlphaExp")),
expMax_(readScalar(coeffDict_.lookup("expMax"))), expMax_(coeffDict_.get<scalar>("expMax")),
g0_ g0_
( (
"g0", "g0",

View File

@ -360,8 +360,8 @@ void Foam::twoPhaseSystem::solve()
alpha1.name() alpha1.name()
); );
label nAlphaSubCycles(readLabel(alphaControls.lookup("nAlphaSubCycles"))); label nAlphaSubCycles(alphaControls.get<label>("nAlphaSubCycles"));
label nAlphaCorr(readLabel(alphaControls.lookup("nAlphaCorr"))); label nAlphaCorr(alphaControls.get<label>("nAlphaCorr"));
word alphaScheme("div(phi," + alpha1.name() + ')'); word alphaScheme("div(phi," + alpha1.name() + ')');
word alpharScheme("div(phir," + alpha1.name() + ')'); word alpharScheme("div(phir," + alpha1.name() + ')');

View File

@ -13,7 +13,7 @@
); );
const dictionary& rhoDict(mechanicalProperties.subDict("rho")); const dictionary& rhoDict(mechanicalProperties.subDict("rho"));
word rhoType(rhoDict.lookup("type")); word rhoType(rhoDict.get<word>("type"));
autoPtr<volScalarField> rhoPtr; autoPtr<volScalarField> rhoPtr;
@ -28,7 +28,7 @@
if (rhoType == "uniform") if (rhoType == "uniform")
{ {
scalar rhoValue(readScalar(rhoDict.lookup("value"))); scalar rhoValue(rhoDict.get<scalar>("value"));
rhoPtr.reset rhoPtr.reset
( (
@ -68,7 +68,7 @@
volScalarField& rho = rhoPtr(); volScalarField& rho = rhoPtr();
const dictionary& EDict(mechanicalProperties.subDict("E")); const dictionary& EDict(mechanicalProperties.subDict("E"));
word EType(EDict.lookup("type")); word EType(EDict.get<word>("type"));
autoPtr<volScalarField> EPtr; autoPtr<volScalarField> EPtr;
@ -83,7 +83,7 @@
if (EType == "uniform") if (EType == "uniform")
{ {
scalar rhoEValue(readScalar(EDict.lookup("value"))); scalar rhoEValue(EDict.get<scalar>("value"));
EPtr.reset EPtr.reset
( (
@ -134,11 +134,11 @@
); );
const dictionary& nuDict(mechanicalProperties.subDict("nu")); const dictionary& nuDict(mechanicalProperties.subDict("nu"));
word nuType(nuDict.lookup("type")); word nuType(nuDict.get<word>("type"));
if (nuType == "uniform") if (nuType == "uniform")
{ {
scalar nuValue(readScalar(nuDict.lookup("value"))); scalar nuValue(nuDict.get<scalar>("value"));
nuPtr.reset nuPtr.reset
( (
new volScalarField new volScalarField

View File

@ -58,10 +58,10 @@ if (thermalStress)
); );
const dictionary& CDict(thermalProperties.subDict("C")); const dictionary& CDict(thermalProperties.subDict("C"));
word CType(CDict.lookup("type")); word CType(CDict.get<word>("type"));
if (CType == "uniform") if (CType == "uniform")
{ {
scalar CValue(readScalar(CDict.lookup("value"))); scalar CValue(CDict.get<scalar>("value"));
CPtr.reset CPtr.reset
( (
@ -113,10 +113,10 @@ if (thermalStress)
); );
const dictionary& kDict(thermalProperties.subDict("k")); const dictionary& kDict(thermalProperties.subDict("k"));
word kType(kDict.lookup("type")); word kType(kDict.get<word>("type"));
if (kType == "uniform") if (kType == "uniform")
{ {
scalar rhoKValue(readScalar(kDict.lookup("value"))); scalar rhoKValue(kDict.get<scalar>("value"));
rhoKPtr.reset rhoKPtr.reset
( (
@ -169,11 +169,11 @@ if (thermalStress)
const dictionary& alphaDict(thermalProperties.subDict("alpha")); const dictionary& alphaDict(thermalProperties.subDict("alpha"));
word alphaType(alphaDict.lookup("type")); word alphaType(alphaDict.get<word>("type"));
if (alphaType == "uniform") if (alphaType == "uniform")
{ {
scalar alphaValue(readScalar(alphaDict.lookup("value"))); scalar alphaValue(alphaDict.get<scalar>("value"));
alphaPtr.reset alphaPtr.reset
( (
new volScalarField new volScalarField

View File

@ -1 +1 @@
scalar accFac(readScalar(stressControl.lookup("accelerationFactor"))); scalar accFac(stressControl.get<scalar>("accelerationFactor"));

View File

@ -120,8 +120,8 @@ int main(int argc, char *argv[])
) )
); );
scalar x0 = readScalar(function1Properties.lookup("x0")); scalar x0 = function1Properties.get<scalar>("x0");
scalar x1 = readScalar(function1Properties.lookup("x1")); scalar x1 = function1Properties.get<scalar>("x1");
Info<< "Data entry type: " << function1().type() << nl << endl; Info<< "Data entry type: " << function1().type() << nl << endl;

View File

@ -3,7 +3,7 @@
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | \\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation \\ / A nd | Copyright (C) 2011 OpenFOAM Foundation
\\/ M anipulation | \\/ M anipulation | Copyright (C) 2018 OpenCFD Ltd.
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
License License
This file is part of OpenFOAM. This file is part of OpenFOAM.
@ -37,6 +37,10 @@ using namespace Foam;
int main(int argc, char *argv[]) int main(int argc, char *argv[])
{ {
Info<<"cwd() " << cwd() << nl;
Info<<"cwd(-P) " << cwd(false) << nl;
Info<<"cwd(-L) " << cwd(true) << nl;
Info<<"rmDir" << nl; Info<<"rmDir" << nl;
rmDir("hmm"); rmDir("hmm");

View File

@ -328,6 +328,17 @@ int main(int argc, char *argv[])
<<"addresses:" << nl; <<"addresses:" << nl;
printAddr(Info, list1); printAddr(Info, list1);
printAddr(Info, list1a); printAddr(Info, list1a);
Info<<"values:" << nl;
print(Info, list1a);
// This should not cause problems (ie, no deletion)
{
auto* ptr = &(list1a.first());
list1a.set(0, ptr);
Info<<"values:" << nl;
print(Info, list1a);
}
PtrList<Scalar> list1b(list1a, true); PtrList<Scalar> list1b(list1a, true);

View File

@ -32,9 +32,39 @@ Description
#include "label.H" #include "label.H"
#include "scalar.H" #include "scalar.H"
#include "List.H" #include "List.H"
#include "ops.H"
#include <functional>
using namespace Foam; using namespace Foam;
// Test for special comparison operation using compareOp
// Normal sort on label, reverse sort on scalar
struct special1
{
typedef Tuple2<label, scalar> type;
bool operator()(const type& a, const type& b) const
{
int val = compareOp<label>()(a.first(), b.first());
return (val == 0) ? (b.second() < a.second()) : (val < 0);
}
};
// Test for special comparison operation using compareOp
// Normal sort on scalar, reverse sort on label
struct special2
{
typedef Tuple2<label, scalar> type;
bool operator()(const type& a, const type& b) const
{
scalar val = compareOp<scalar>()(a.second(), b.second());
return (val == 0) ? (b.first() < a.first()) : (val < 0);
}
};
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
// Main program: // Main program:
@ -46,19 +76,39 @@ int main()
Info<< "tuple: " Info<< "tuple: "
<< t2 << " " << t2 << " "
<< t2.first() << " " << t2.second() << endl; << t2.first() << " " << t2.second() << nl;
List<indexedScalar> list1(10); // As list. Generated so that we have duplicate indices
forAll(list1, i) List<indexedScalar> list1(3*4);
for (label i = 0; i < 4; ++i)
{ {
list1[i] = indexedScalar(-i, i*i); const label j = (i+1);
const label idx = ((i % 2) ? -1 : 1) * (j);
list1[i] = indexedScalar(idx, (j*j));
list1[i+4] = indexedScalar(idx, 2*j); // duplicate index
list1[i+8] = indexedScalar(idx+12, 2*j); // duplicate value
} }
sort(list1); Info<< "Unsorted tuples:" << nl << list1 << nl;
Info<< "tuples:" << nl Foam::sort(list1, std::less<indexedScalar>());
<< list1
<< endl; Info<< "sorted tuples:" << nl << list1 << nl;
Foam::sort(list1, std::greater<indexedScalar>());
Info<< "reverse sorted tuples:" << nl << list1 << nl;
Foam::sort(list1, special1());
Info<< "special sorted tuples - sort on index, reverse on value:"
<< nl << list1 << nl;
Foam::sort(list1, special2());
Info<< "special sorted tuples - sort on value, reverse on index:"
<< nl << list1 << nl;
Info<< "End\n" << endl; Info<< "End\n" << endl;

View File

@ -40,7 +40,6 @@ int main(int argc, char *argv[])
argList::noBanner(); argList::noBanner();
argList::noParallel(); argList::noParallel();
// argList::noFunctionObjects(); // argList::noFunctionObjects();
argList::removeOption("case");
argList::addOption("label", "value", "Test parsing of label"); argList::addOption("label", "value", "Test parsing of label");
argList::addOption("scalar", "value", "Test parsing of scalar"); argList::addOption("scalar", "value", "Test parsing of scalar");
argList::addOption("string", "value", "Test string lookup"); argList::addOption("string", "value", "Test string lookup");
@ -73,6 +72,15 @@ int main(int argc, char *argv[])
argList args(argc, argv, false, true); argList args(argc, argv, false, true);
Info<< "command-line ("
<< args.options().size() << " options, "
<< args.args().size() << " args)" << nl
<< " " << args.commandLine().c_str() << nl << nl;
Info<< "rootPath: " << args.rootPath() << nl
<< "globalCase: " << args.globalCaseName() << nl
<< "globalPath: " << args.globalPath() << nl
<< nl;
Info<<"have: " Info<<"have: "
<<args.count({"label", "scalar"}) << " options" << nl; <<args.count({"label", "scalar"}) << " options" << nl;

View File

@ -30,12 +30,68 @@ Description
\*---------------------------------------------------------------------------*/ \*---------------------------------------------------------------------------*/
#include "argList.H" #include "argList.H"
#include "coordinateSystem.H" #include "Time.H"
#include "coordinateSystems.H"
#include "identityRotation.H"
#include "indirectCS.H"
#include "Fstream.H" #include "Fstream.H"
#include "IOstreams.H" #include "IOstreams.H"
#include "transform.H"
using namespace Foam; using namespace Foam;
template<class T>
void testTransform(const coordinateSystem& cs, const point& p, const T& val)
{
Info<< " " << pTraits<T>::typeName << ": " << val
<< " transform: " << cs.transform(p, val)
<< " invTransform: " << cs.invTransform(p, val) << nl;
// Info<< " both: " << cs.invTransform(p, cs.transform(p, val)) << nl;
}
void basicTests(const coordinateSystem& cs)
{
cs.writeEntry(cs.name(), Info);
if (isA<coordSystem::indirect>(cs))
{
Info<< "indirect from:" << nl;
dynamicCast<const coordSystem::indirect>(cs).cs()
.writeEntry(cs.name(), Info);
}
Info<< "rotation: " << cs.R() << nl;
List<point> testPoints
({
{1,0,0}, {0,1,0}, {0,0,1}, {1,1,1},
});
for (const point& p : testPoints)
{
Info<< nl
<< " test point: " << p
<< " = local point " << cs.transformPoint(p)
<< " = local coord " << cs.localPosition(p) << nl;
const vector v1(1, 1, 1);
const tensor t1(tensor::I);
const tensor t2(1, 2, 3, 4, 5, 6, 7, 8, 9);
testTransform(cs, p, v1);
testTransform(cs, p, t1);
testTransform(cs, p, t2);
}
Info<< nl;
}
void doTest(const dictionary& dict) void doTest(const dictionary& dict)
{ {
Info<< dict.dictName() << dict << nl; Info<< dict.dictName() << dict << nl;
@ -43,18 +99,42 @@ void doTest(const dictionary& dict)
// Could fail? // Could fail?
const bool throwingIOError = FatalIOError.throwExceptions(); const bool throwingIOError = FatalIOError.throwExceptions();
const bool throwingError = FatalError.throwExceptions(); const bool throwingError = FatalError.throwExceptions();
try try
{ {
coordinateSystem cs1(dict.dictName(), dict); auto cs1ptr = coordinateSystem::New(dict, "");
coordinateSystem& cs1 = *cs1ptr;
cs1.rename(dict.dictName());
coordinateSystem cs2; basicTests(cs1);
}
catch (Foam::IOerror& err)
{
Info<< "Caught FatalIOError " << err << nl << endl;
}
catch (Foam::error& err)
{
Info<< "Caught FatalError " << err << nl << endl;
}
FatalError.throwExceptions(throwingError);
FatalIOError.throwExceptions(throwingIOError);
}
// Move assign
cs2 = std::move(cs1);
// Info<<cs2 << nl; void doTest(const objectRegistry& obr, const dictionary& dict)
cs2.writeDict(Info, true); {
Info<< nl; Info<< dict.dictName() << dict << nl;
// Could fail?
const bool throwingIOError = FatalIOError.throwExceptions();
const bool throwingError = FatalError.throwExceptions();
try
{
auto cs1ptr = coordinateSystem::New(obr, dict, word::null);
coordinateSystem& cs1 = *cs1ptr;
basicTests(cs1);
} }
catch (Foam::IOerror& err) catch (Foam::IOerror& err)
{ {
@ -78,7 +158,40 @@ int main(int argc, char *argv[])
argList::addArgument("dict .. dictN"); argList::addArgument("dict .. dictN");
argList args(argc, argv, false, true); argList args(argc, argv, false, true);
if (args.size() <= 1) if (args.found("case"))
{
Info<<"using case for tests" << nl;
#include "createTime.H"
const coordinateSystems& systems = coordinateSystems::New(runTime);
Info<< systems.size() << " global systems" << nl;
for (const coordinateSystem& cs : systems)
{
basicTests(cs);
}
// systems.write();
for (label argi=1; argi < args.size(); ++argi)
{
const string& dictFile = args[argi];
IFstream is(dictFile);
dictionary inputDict(is);
forAllConstIters(inputDict, iter)
{
if (iter().isDict())
{
doTest(runTime, iter().dict());
}
}
}
}
else if (args.size() <= 1)
{ {
Info<<"no coordinateSystem dictionaries to expand" << nl; Info<<"no coordinateSystem dictionaries to expand" << nl;
} }

View File

@ -1,7 +1,7 @@
/*--------------------------------*- C++ -*----------------------------------*\ /*--------------------------------*- C++ -*----------------------------------*\
| ========= | | | ========= | |
| \\ / F ield | OpenFOAM: The Open Source CFD Toolbox | | \\ / F ield | OpenFOAM: The Open Source CFD Toolbox |
| \\ / O peration | Version: plus | | \\ / O peration | Version: v1806 |
| \\ / A nd | Web: www.OpenFOAM.com | | \\ / A nd | Web: www.OpenFOAM.com |
| \\/ M anipulation | | | \\/ M anipulation | |
\*---------------------------------------------------------------------------*/ \*---------------------------------------------------------------------------*/
@ -9,13 +9,24 @@ FoamFile
{ {
version 2.0; version 2.0;
format ascii; format ascii;
class dictionary; class IOPtrList<coordinateSystem>; //<-- Older name
object meshQualityDict; object coordinateSystems;
} }
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
// Include defaults parameters from master dictionary (
#includeEtc "caseDicts/meshQualityDict" cs1
{
type cartesian;
origin (1 2 3);
coordinateRotation
{
type axes;
e1 (0 0 1);
e2 (0 1 0);
}
}
)
// ************************************************************************* // // ************************************************************************* //

View File

@ -1,7 +1,7 @@
/*--------------------------------*- C++ -*----------------------------------*\ /*--------------------------------*- C++ -*----------------------------------*\
| ========= | | | ========= | |
| \\ / F ield | OpenFOAM: The Open Source CFD Toolbox | | \\ / F ield | OpenFOAM: The Open Source CFD Toolbox |
| \\ / O peration | Version: plus s | | \\ / O peration | Version: v1806 |
| \\ / A nd | Web: www.OpenFOAM.com | | \\ / A nd | Web: www.OpenFOAM.com |
| \\/ M anipulation | | | \\/ M anipulation | |
\*---------------------------------------------------------------------------*/ \*---------------------------------------------------------------------------*/
@ -10,20 +10,19 @@ FoamFile
version 2.0; version 2.0;
format ascii; format ascii;
class dictionary; class dictionary;
location "system";
object controlDict; object controlDict;
} }
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
application snappyHexMesh; application simpleFoam;
startFrom startTime; startFrom latestTime;
startTime 0; startTime 0;
stopAt endTime; stopAt endTime;
endTime 2000; endTime 4;
deltaT 1; deltaT 1;

View File

@ -0,0 +1,86 @@
/*--------------------------------*- C++ -*----------------------------------*\
| ========= | |
| \\ / F ield | OpenFOAM: The Open Source CFD Toolbox |
| \\ / O peration | Version: v1806 |
| \\ / A nd | Web: www.OpenFOAM.com |
| \\/ M anipulation | |
\*---------------------------------------------------------------------------*/
FoamFile
{
version 2.0;
format ascii;
//OLD class IOPtrList<coordinateSystem>;
class coordinateSystems;
object coordinateSystems;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
(
cs1
{
type cartesian;
origin (1 2 3);
rotation
{
type axes;
e1 (0 0 1);
e2 (0 1 0);
}
}
cs2
{
type cartesian;
origin (0 3 5);
e1 (1 2 0);
e2 (2 0 2);
}
cs3
{
type cartesian;
origin (0 3 5);
coordinateRotation // older name
{
type euler;
angles (90 0 0);
}
}
cs4
{
type cylindrical;
origin (0 3 5);
rotation
{
type euler;
angles (90 0 0);
}
}
cyl
{
type cylindrical;
origin (0 0 0);
degrees false;
rotation
{
type axisAngle;
axis (0 0 1);
angle 90;
}
}
ident
{
origin (0 0 0);
rotation
{
type none;
}
}
)
// ************************************************************************* //

View File

@ -1,7 +1,7 @@
/*--------------------------------*- C++ -*----------------------------------*\ /*--------------------------------*- C++ -*----------------------------------*\
| ========= | | | ========= | |
| \\ / F ield | OpenFOAM: The Open Source CFD Toolbox | | \\ / F ield | OpenFOAM: The Open Source CFD Toolbox |
| \\ / O peration | Version: plus | | \\ / O peration | Version: v1806 |
| \\ / A nd | Web: www.OpenFOAM.com | | \\ / A nd | Web: www.OpenFOAM.com |
| \\/ M anipulation | | | \\/ M anipulation | |
\*---------------------------------------------------------------------------*/ \*---------------------------------------------------------------------------*/
@ -10,14 +10,39 @@ FoamFile
version 2.0; version 2.0;
format ascii; format ascii;
class dictionary; class dictionary;
location "system"; object controlDict;
object decomposeParDict;
} }
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
numberOfSubdomains 8; application simpleFoam;
method scotch; startFrom latestTime;
startTime 0;
stopAt endTime;
endTime 4;
deltaT 1;
writeControl timeStep;
writeInterval 100;
purgeWrite 0;
writeFormat binary;
writePrecision 6;
writeCompression off;
timeFormat general;
timePrecision 6;
runTimeModifiable true;
// ************************************************************************* // // ************************************************************************* //

View File

@ -10,12 +10,19 @@ FoamFile
version 2.0; version 2.0;
format ascii; format ascii;
class dictionary; class dictionary;
object testDict; object testCsys1;
} }
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
// Rotate 90 deg around x: y -> z, z -> -y // Rotate 90 deg around x: y -> z, z -> -y
rot_x90
{
origin (0 0 0);
e1 (1 0 0);
e3 (0 -1 0);
}
rot_x90_axesRotation rot_x90_axesRotation
{ {
origin (0 0 0); origin (0 0 0);
@ -27,13 +34,24 @@ rot_x90_axesRotation
} }
} }
rot_x90_axisAngle
{
origin (0 0 0);
coordinateRotation
{
type axisAngle;
axis (1 0 0); // non-unit also OK
angle 90;
}
}
rot_x90_euler rot_x90_euler
{ {
origin (0 0 0); origin (0 0 0);
coordinateRotation coordinateRotation
{ {
type EulerRotation; type euler;
rotation (0 90 0); // z-x'-z'' angles (0 90 0); // z-x'-z''
} }
} }
@ -51,18 +69,40 @@ rot_z45_axesRotation
} }
} }
rot_z45_axisAngle
{
origin (0 0 0);
coordinateRotation
{
type axisAngle;
axis (0 0 10); // non-unit also OK
angle 45;
}
}
rot_z45_euler rot_z45_euler
{ {
origin (0 0 0); origin (0 0 0);
coordinateRotation coordinateRotation
{ {
type EulerRotation; type euler;
rotation (45 0 0); // z-x'-z'' angles (45 0 0); // z-x'-z''
}
}
rot_z45_starcd
{
origin (0 0 0);
coordinateRotation
{
type starcd;
angles (45 0 0); // z-x'-y''
} }
} }
// Rotate -45 deg around z: x -> (1 -1 0), y = (1 1 0) // Rotate -45 deg around z: x -> (1 -1 0), y = (1 1 0)
rot_zm45_axesRotation rot_zm45_axesRotation
{ {
origin (0 0 0); origin (0 0 0);
@ -74,13 +114,24 @@ rot_zm45_axesRotation
} }
} }
rot_zm45_axisAngle
{
origin (0 0 0);
coordinateRotation
{
type axisAngle;
axis (0 0 10); // non-unit also OK
angle -45;
}
}
rot_zm45_euler rot_zm45_euler
{ {
origin (0 0 0); origin (0 0 0);
coordinateRotation coordinateRotation
{ {
type EulerRotation; type euler;
rotation (-45 0 0); // z-x'-z'' angles (-45 0 0); // z-x'-z''
} }
} }
@ -98,13 +149,35 @@ null_axesRotation
} }
} }
null_axisAngle0
{
origin (0 0 0);
coordinateRotation
{
type axisAngle;
axis (0 0 0); // non-unit also OK
angle 0;
}
}
null_axisAngle1
{
origin (0 0 0);
coordinateRotation
{
type axisAngle;
axis (1 1 1); // non-unit also OK
angle 0;
}
}
null_euler null_euler
{ {
origin (0 0 0); origin (0 0 0);
coordinateRotation coordinateRotation
{ {
type EulerRotation; type euler;
rotation (0 0 0); // z-x'-z'' angles (0 0 0); // z-x'-z''
} }
} }

View File

@ -0,0 +1,59 @@
/*--------------------------------*- C++ -*----------------------------------*\
| ========= | |
| \\ / F ield | OpenFOAM: The Open Source CFD Toolbox |
| \\ / O peration | Version: v1806 |
| \\ / A nd | Web: www.OpenFOAM.com |
| \\/ M anipulation | |
\*---------------------------------------------------------------------------*/
FoamFile
{
version 2.0;
format ascii;
class dictionary;
object testCsys1;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
// This dictionary only works in combination with constant/coordinateSystems
mycs1
{
type indirect;
name cs1;
}
mycs2
{
type indirect;
name cs2;
}
mycs3
{
type indirect;
name cs3;
}
mycyl
{
type indirect;
name cyl;
}
mycy2
{
coordinateSystem
{
type indirect;
name cyl;
}
}
mycy3
{
coordinateSystem cyl;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //

View File

@ -82,8 +82,8 @@ int main(int argc, char *argv[])
Info<< "dict1.toc(): " << dict1.name() << " " << dict1.toc() Info<< "dict1.toc(): " << dict1.name() << " " << dict1.toc()
<< endl; << endl;
dictionary dict3(dict2.subDictPtr("boundaryField")); dictionary dict3(dict2.findDict("boundaryField"));
dictionary dict4(dict2.subDictPtr("NONEXISTENT")); dictionary dict4(dict2.findDict("NONEXISTENT"));
Info<< "dictionary construct from pointer" << nl Info<< "dictionary construct from pointer" << nl
<< "ok = " << dict3.name() << " " << dict3.toc() << nl << "ok = " << dict3.name() << " " << dict3.toc() << nl
@ -105,23 +105,17 @@ int main(int argc, char *argv[])
Info<< "Pattern find \"abc\" in top directory : " Info<< "Pattern find \"abc\" in top directory : "
<< dict.lookup("abc") << endl; << dict.lookup("abc") << endl;
Info<< "Pattern find \"abc\" in sub directory : " Info<< "Pattern find \"abc\" in sub directory : "
<< dict.subDict("someDict").lookup("abc") << dict.subDict("someDict").lookup("abc") << nl;
<< endl;
Info<< "Recursive pattern find \"def\" in sub directory : " Info<< "Recursive pattern find \"def\" in sub directory : "
<< dict.subDict("someDict").lookup("def", true) << dict.subDict("someDict").lookup("def", true) << nl;
<< endl;
Info<< "Recursive pattern find \"foo\" in sub directory : " Info<< "Recursive pattern find \"foo\" in sub directory : "
<< dict.subDict("someDict").lookup("foo", true) << dict.subDict("someDict").lookup("foo", true) << nl;
<< endl;
Info<< "Recursive pattern find \"fooz\" in sub directory : " Info<< "Recursive pattern find \"fooz\" in sub directory : "
<< dict.subDict("someDict").lookup("fooz", true) << dict.subDict("someDict").lookup("fooz", true) << nl;
<< endl;
Info<< "Recursive pattern find \"bar\" in sub directory : " Info<< "Recursive pattern find \"bar\" in sub directory : "
<< dict.subDict("someDict").lookup("bar", true) << dict.subDict("someDict").lookup("bar", true) << nl;
<< endl;
Info<< "Recursive pattern find \"xxx\" in sub directory : " Info<< "Recursive pattern find \"xxx\" in sub directory : "
<< dict.subDict("someDict").lookup("xxx", true) << dict.subDict("someDict").lookup("xxx", true) << nl;
<< endl;
} }
} }
else else

View File

@ -48,6 +48,102 @@ void entryInfo(entry* e)
} }
// Try with readScalar
scalar try_readScalar(const dictionary& dict, const word& k)
{
scalar val(-GREAT);
const bool throwingIOError = FatalIOError.throwExceptions();
const bool throwingError = FatalError.throwExceptions();
try
{
val = readScalar(dict.lookup(k));
Info<< "readScalar(" << k << ") = " << val << nl;
}
catch (Foam::IOerror& err)
{
Info<< "readScalar(" << k << ") Caught FatalIOError "
<< err << nl << endl;
}
catch (Foam::error& err)
{
Info<< "readScalar(" << k << ") Caught FatalError "
<< err << nl << endl;
}
FatalError.throwExceptions(throwingError);
FatalIOError.throwExceptions(throwingIOError);
return val;
}
// Try with get<scalar>
scalar try_getScalar(const dictionary& dict, const word& k)
{
scalar val(-GREAT);
const bool throwingIOError = FatalIOError.throwExceptions();
const bool throwingError = FatalError.throwExceptions();
try
{
val = dict.get<scalar>(k);
Info<< "get<scalar>(" << k << ") = " << val << nl;
}
catch (Foam::IOerror& err)
{
Info<< "get<scalar>(" << k << ") Caught FatalIOError "
<< err << nl << endl;
}
catch (Foam::error& err)
{
Info<< "get<scalar>(" << k << ") Caught FatalError "
<< err << nl << endl;
}
FatalError.throwExceptions(throwingError);
FatalIOError.throwExceptions(throwingIOError);
return val;
}
// Try with *entry (from findEntry) and get<scalar>
scalar try_getScalar(const entry* eptr, const word& k)
{
scalar val(-GREAT);
if (!eptr)
{
Info<< "No entry" << k << nl;
return val;
}
const bool throwingIOError = FatalIOError.throwExceptions();
const bool throwingError = FatalError.throwExceptions();
try
{
val = eptr->get<scalar>();
Info<< "entry get<scalar>(" << k << ") = " << val << nl;
}
catch (Foam::IOerror& err)
{
Info<< "entry get<scalar>(" << k << ") Caught FatalIOError "
<< err << nl << endl;
}
catch (Foam::error& err)
{
Info<< "entry get<scalar>(" << k << ") Caught FatalError "
<< err << nl << endl;
}
FatalError.throwExceptions(throwingError);
FatalIOError.throwExceptions(throwingIOError);
return val;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
// Main program: // Main program:
@ -227,29 +323,9 @@ int main(int argc, char *argv[])
{ {
Info<< nl << "Test some bad input with readScalar()" << nl; Info<< nl << "Test some bad input with readScalar()" << nl;
const bool throwingIOError = FatalIOError.throwExceptions(); try_readScalar(dict2, "good");
const bool throwingError = FatalError.throwExceptions(); // try_readScalar(dict2, "bad");
try_readScalar(dict2, "empty");
try
{
scalar val1 = readScalar(dict2.lookup("good"));
// scalar val2 = readScalar(dict2.lookup("bad"));
scalar val2 = -1;
scalar val3 = readScalar(dict2.lookup("empty"));
Info<< "got good=" << val1 << " bad=" << val2
<< " empty=" << val3 << nl;
}
catch (Foam::IOerror& err)
{
Info<< "Caught FatalIOError " << err << nl << endl;
}
catch (Foam::error& err)
{
Info<< "Caught FatalError " << err << nl << endl;
}
FatalError.throwExceptions(throwingError);
FatalIOError.throwExceptions(throwingIOError);
} }
@ -257,29 +333,19 @@ int main(int argc, char *argv[])
{ {
Info<< nl << "Test some bad input with get<scalar>()" << nl; Info<< nl << "Test some bad input with get<scalar>()" << nl;
const bool throwingIOError = FatalIOError.throwExceptions(); try_getScalar(dict2, "good");
const bool throwingError = FatalError.throwExceptions(); // try_getScalar(dict2, "bad");
try_getScalar(dict2, "empty");
}
try // With findEntry and get<scalar>
{ {
scalar val1 = dict2.get<scalar>("good"); Info<< nl
// scalar val2 = dict2.get<scalar>("bad"); << "Test some bad input with findEntry + get<scalar>()" << nl;
scalar val2 = -1;
scalar val3 = dict2.get<scalar>("empty");
Info<< "got good=" << val1 << " bad=" << val2 try_getScalar(dict2.findEntry("good"), "good");
<< " empty=" << val3 << nl; // try_getScalar(dict2.findEntry("bad"), "bad");
} try_getScalar(dict2.findEntry("empty"), "empty");
catch (Foam::IOerror& err)
{
Info<< "Caught FatalIOError " << err << nl << endl;
}
catch (Foam::error& err)
{
Info<< "Caught FatalError " << err << nl << endl;
}
FatalError.throwExceptions(throwingError);
FatalIOError.throwExceptions(throwingIOError);
} }
} }

View File

@ -47,6 +47,38 @@ using namespace Foam;
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
unsigned testClean(std::initializer_list<Pair<std::string>> tests)
{
unsigned nFail = 0;
for (const Pair<std::string>& test : tests)
{
const std::string& input = test.first();
const std::string& expected = test.second();
fileName cleaned(test.first());
cleaned.clean();
if (cleaned == expected)
{
Info<< "(pass)"
<< " clean " << input << " -> " << cleaned << nl;
}
else
{
Info<< "(fail)"
<< " clean " << input << " -> " << cleaned
<< " expected=" << expected
<< nl;
++nFail;
}
}
return nFail;
}
unsigned testStrip unsigned testStrip
( (
const bool doClean, const bool doClean,
@ -184,6 +216,16 @@ unsigned testRelative(std::initializer_list<Pair<std::string>> tests)
} }
void testDirname(const fileName& input)
{
Info<< "input:" << input
<< " path:" << input.path()
<< " name:\"" << input.name() << '"'
<< " ext:\"" << input.ext() << '"'
<< " components: " << flatOutput(input.components()) << nl;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
// Main program: // Main program:
@ -196,6 +238,8 @@ int main(int argc, char *argv[])
argList::addBoolOption("relative", "test relative operations"); argList::addBoolOption("relative", "test relative operations");
argList::addBoolOption("system", "test filesystem operations"); argList::addBoolOption("system", "test filesystem operations");
argList::addBoolOption("default", "reinstate default tests"); argList::addBoolOption("default", "reinstate default tests");
argList::addBoolOption("clean", "clean()");
argList::addBoolOption("dirname", "basename/dirname tables");
argList::addNote("runs default tests or specified ones only"); argList::addNote("runs default tests or specified ones only");
#include "setRootCase.H" #include "setRootCase.H"
@ -254,6 +298,20 @@ int main(int argc, char *argv[])
Info<< "All ==> " << file4 << nl; Info<< "All ==> " << file4 << nl;
} }
if (args.found("dirname"))
{
testDirname("");
testDirname(".");
testDirname("abc");
testDirname("/");
testDirname("/abc");
testDirname("abc/def");
testDirname("/abc/def");
testDirname("/abc/def/");
testDirname("/abc///def///");
testDirname("/abc/../def");
}
// Test various ext() methods // Test various ext() methods
if (args.found("ext")) if (args.found("ext"))
@ -381,6 +439,35 @@ int main(int argc, char *argv[])
} }
if (args.found("clean"))
{
Info<< nl << "Test fileName::clean()" << nl << nl;
unsigned nFail = testClean
({
{ "/", "/" },
{ "/abc/", "/abc" },
{ "/abc////def", "/abc/def" },
{ "/abc/def/./ghi/.", "/abc/def/ghi" },
{ "abc/def/./", "abc/def" },
{ "./abc/", "./abc" },
{ "/abc/def/../ghi/jkl/nmo/..", "/abc/ghi/jkl" },
{ "abc/../def/ghi/../jkl", "abc/../def/jkl" },
});
Info<< nl;
if (nFail)
{
Info<< "failed " << nFail;
}
else
{
Info<< "passed all";
}
Info<< " fileName::clean tests" << nl;
}
if (args.found("validate")) if (args.found("validate"))
{ {
unsigned nFail = 0; unsigned nFail = 0;
@ -677,9 +764,27 @@ int main(int argc, char *argv[])
<< " controlDict => " << findEtcFile("controlDict") << nl << " controlDict => " << findEtcFile("controlDict") << nl
<< " badName => " << findEtcFile("badName") << endl; << " badName => " << findEtcFile("badName") << endl;
Info<< "This should emit a fatal error:" << endl; {
Info<< " badName(die) => " << findEtcFile("badName", true) << nl
Info<< nl << "Expect a FatalError for findEtcFile() with a bad name:"
<< nl;
const bool throwingError = FatalError.throwExceptions();
try
{
Info<< " badName(die) => " << flush
<< findEtcFile("<very-badName>", true) << nl
<< endl; << endl;
}
catch (Foam::error& err)
{
Info<< nl << "findEtcFile() Caught FatalError "
<< err << nl << endl;
}
FatalError.throwExceptions(throwingError);
}
Info<< "\nEnd\n" << endl; Info<< "\nEnd\n" << endl;
return 0; return 0;

View File

@ -50,22 +50,17 @@ bool checkDictionaryContent(const dictionary& dict1, const dictionary& dict2)
} }
forAllConstIter(dictionary, dict1, iter1) for (const entry& entry1 : dict1)
{ {
const entry* entryPtr = dict2.lookupEntryPtr const entry* eptr =
( dict2.findEntry(entry1.keyword(), keyType::LITERAL);
iter1().keyword(),
false,
false
);
if (!entryPtr) if (!eptr)
{ {
return false; return false;
} }
const entry& entry1 = iter1(); const entry& entry2 = *eptr;
const entry& entry2 = *entryPtr;
bool ok = false; bool ok = false;
if (entry1.isDict()) if (entry1.isDict())

View File

@ -48,12 +48,6 @@ using namespace Foam;
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
bool notEqual(const scalar s1, const scalar s2, const scalar tol)
{
return mag(s1-s2) > tol;
}
// Main program: // Main program:
int main(int argc, char *argv[]) int main(int argc, char *argv[])
{ {
@ -180,6 +174,8 @@ int main(int argc, char *argv[])
// Construct refiner. Read initial cell and point levels. // Construct refiner. Read initial cell and point levels.
hexRef8 meshCutter(mesh); hexRef8 meshCutter(mesh);
// Comparison for inequality
const auto isNotEqual = notEqualOp<scalar>(1e-10);
while (runTime.loop()) while (runTime.loop())
{ {
@ -345,7 +341,7 @@ int main(int argc, char *argv[])
Info<< "Uniform one field min = " << min Info<< "Uniform one field min = " << min
<< " max = " << max << endl; << " max = " << max << endl;
if (notEqual(max, 1.0, 1e-10) || notEqual(min, 1.0, 1e-10)) if (isNotEqual(min, 1) || isNotEqual(max, 1))
{ {
FatalErrorInFunction FatalErrorInFunction
<< "Uniform volVectorField not preserved." << "Uniform volVectorField not preserved."
@ -369,7 +365,7 @@ int main(int argc, char *argv[])
Info<< "Linear profile field min = " << min Info<< "Linear profile field min = " << min
<< " max = " << max << endl; << " max = " << max << endl;
if (notEqual(max, 0.0, 1e-10) || notEqual(min, 0.0, 1e-10)) if (isNotEqual(min, 0) || isNotEqual(max, 0))
{ {
Info<< "Linear profile not preserved." Info<< "Linear profile not preserved."
<< " Min and max should both be 0.0. min:" << min << " Min and max should both be 0.0. min:" << min
@ -390,7 +386,7 @@ int main(int argc, char *argv[])
Info<< "Uniform surface field min = " << min Info<< "Uniform surface field min = " << min
<< " max = " << max << endl; << " max = " << max << endl;
if (notEqual(max, 1.0, 1e-10) || notEqual(min, 1.0, 1e-10)) if (isNotEqual(min, 1) || isNotEqual(max, 1))
{ {
FatalErrorInFunction FatalErrorInFunction
<< "Uniform surfaceScalarField not preserved." << "Uniform surfaceScalarField not preserved."

View File

@ -55,7 +55,7 @@ int main(int argc, char *argv[])
// Create the pendulumAndSpring model from dictionary // Create the pendulumAndSpring model from dictionary
rigidBodyMotion pendulumAndSpring(runTime, pendulumAndSpringDict); rigidBodyMotion pendulumAndSpring(runTime, pendulumAndSpringDict);
label nIter(readLabel(pendulumAndSpringDict.lookup("nIter"))); label nIter(pendulumAndSpringDict.get<label>("nIter"));
Info<< pendulumAndSpring << endl; Info<< pendulumAndSpring << endl;
Info<< "// Joint state " << endl; Info<< "// Joint state " << endl;

View File

@ -54,7 +54,7 @@ int main(int argc, char *argv[])
// Create the sphericalJoint model from dictionary // Create the sphericalJoint model from dictionary
rigidBodyMotion sphericalJoint(runTime, sphericalJointDict); rigidBodyMotion sphericalJoint(runTime, sphericalJointDict);
label nIter(readLabel(sphericalJointDict.lookup("nIter"))); label nIter(sphericalJointDict.get<label>("nIter"));
Info<< sphericalJoint << endl; Info<< sphericalJoint << endl;

View File

@ -49,7 +49,7 @@ int main(int argc, char *argv[])
// Create the spring model from dictionary // Create the spring model from dictionary
rigidBodyMotion spring(springDict); rigidBodyMotion spring(springDict);
label nIter(readLabel(springDict.lookup("nIter"))); label nIter(springDict.get<label>("nIter"));
Info<< spring << endl; Info<< spring << endl;

View File

@ -33,6 +33,7 @@ Description
#include "nil.H" #include "nil.H"
#include "IOstreams.H" #include "IOstreams.H"
#include "PstreamBuffers.H" #include "PstreamBuffers.H"
#include "argList.H"
#include "Time.H" #include "Time.H"
namespace Foam namespace Foam
@ -63,6 +64,13 @@ int main(int argc, char *argv[])
nil x; nil x;
cout<<"nil:" << sizeof(x) << nl; cout<<"nil:" << sizeof(x) << nl;
} }
{
argList x(argc, argv);
cout<<"argList:" << sizeof(x) << nl;
TimePaths y(x);
cout<<"TimePaths:" << sizeof(y) << nl;
}
{ {
zero x; zero x;
cout<<"zero:" << sizeof(x) << nl; cout<<"zero:" << sizeof(x) << nl;

View File

@ -358,8 +358,7 @@ int main(int argc, char *argv[])
if (Pstream::parRun()) if (Pstream::parRun())
{ {
sourceCaseDir = sourceCaseDir =
sourceCaseDir sourceCaseDir/("processor" + Foam::name(Pstream::myProcNo()));
/"processor" + Foam::name(Pstream::myProcNo());
} }
wordList sourcePatches; wordList sourcePatches;
dict.readEntry("sourcePatches", sourcePatches); dict.readEntry("sourcePatches", sourcePatches);

View File

@ -63,9 +63,13 @@ Foam::cellSizeFunction::cellSizeFunction
defaultCellSize_(defaultCellSize), defaultCellSize_(defaultCellSize),
regionIndices_(regionIndices), regionIndices_(regionIndices),
sideMode_(), sideMode_(),
priority_(cellSizeFunctionDict.get<label>("priority", true)) priority_
(
cellSizeFunctionDict.get<label>("priority", keyType::REGEX_RECURSIVE)
)
{ {
const word mode = cellSizeFunctionDict.get<word>("mode", true); const word mode =
cellSizeFunctionDict.get<word>("mode", keyType::REGEX_RECURSIVE);
if (surface_.hasVolumeType()) if (surface_.hasVolumeType())
{ {

View File

@ -837,7 +837,7 @@ void Foam::conformalVoronoiMesh::checkCellSizing()
= dict.subDict("meshQualityControls"); = dict.subDict("meshQualityControls");
const scalar maxNonOrtho = const scalar maxNonOrtho =
meshQualityDict.get<scalar>("maxNonOrtho", true); meshQualityDict.get<scalar>("maxNonOrtho", keyType::REGEX_RECURSIVE);
label nWrongFaces = 0; label nWrongFaces = 0;

View File

@ -339,7 +339,7 @@ Foam::conformationSurfaces::conformationSurfaces
{ {
const word& geomName = allGeometry_.names()[geomI]; const word& geomName = allGeometry_.names()[geomI];
const entry* ePtr = surfacesDict.lookupEntryPtr(geomName, false, true); const entry* ePtr = surfacesDict.findEntry(geomName, keyType::REGEX);
if (ePtr) if (ePtr)
{ {

View File

@ -125,7 +125,7 @@ autoPtr<refinementSurfaces> createRefinementSurfaces
{ {
const word& geomName = allGeometry.names()[geomi]; const word& geomName = allGeometry.names()[geomi];
const entry* ePtr = surfacesDict.lookupEntryPtr(geomName, false, true); const entry* ePtr = surfacesDict.findEntry(geomName, keyType::REGEX);
if (ePtr) if (ePtr)
{ {

View File

@ -170,9 +170,10 @@ class dictAndKeyword
word key_; word key_;
public: public:
dictAndKeyword(const word& scopedName) dictAndKeyword(const word& scopedName)
{ {
string::size_type i = scopedName.rfind('/'); auto i = scopedName.rfind('/');
if (i == string::npos) if (i == string::npos)
{ {
i = scopedName.rfind('.'); i = scopedName.rfind('.');
@ -212,7 +213,7 @@ const dictionary& lookupScopedDict
return dict; return dict;
} }
const entry* eptr = dict.lookupScopedEntryPtr(subDictName, false, false); const entry* eptr = dict.findScoped(subDictName, keyType::LITERAL);
if (!eptr || !eptr->isDict()) if (!eptr || !eptr->isDict())
{ {
@ -231,7 +232,7 @@ void removeDict(dictionary& dict, const dictionary& dictToRemove)
{ {
for (const entry& refEntry : dictToRemove) for (const entry& refEntry : dictToRemove)
{ {
auto finder = dict.search(refEntry.keyword(), false, false); auto finder = dict.search(refEntry.keyword(), keyType::LITERAL);
bool purge = false; bool purge = false;
@ -357,8 +358,7 @@ int main(int argc, char *argv[])
bool changed = false; bool changed = false;
// Read but preserve headers // Read but preserve headers
dictionary dict; dictionary dict(dictFile(), true);
dict.read(dictFile(), true);
if (listIncludes) if (listIncludes)
{ {
@ -455,12 +455,7 @@ int main(int argc, char *argv[])
changed = true; changed = true;
// Print the changed entry // Print the changed entry
const auto finder = dict.csearchScoped const auto finder = dict.csearchScoped(scopedName, keyType::REGEX);
(
scopedName,
false,
true // Support wildcards
);
if (finder.found()) if (finder.found())
{ {
@ -489,8 +484,8 @@ int main(int argc, char *argv[])
const dictionary& d1(lookupScopedDict(dict, dAk.dict())); const dictionary& d1(lookupScopedDict(dict, dAk.dict()));
const dictionary& d2(lookupScopedDict(diffDict, dAk.dict())); const dictionary& d2(lookupScopedDict(diffDict, dAk.dict()));
const entry* e1Ptr = d1.lookupEntryPtr(dAk.key(), false, true); const entry* e1Ptr = d1.findEntry(dAk.key(), keyType::REGEX);
const entry* e2Ptr = d2.lookupEntryPtr(dAk.key(), false, true); const entry* e2Ptr = d2.findEntry(dAk.key(), keyType::REGEX);
if (e1Ptr && e2Ptr) if (e1Ptr && e2Ptr)
{ {
@ -509,12 +504,7 @@ int main(int argc, char *argv[])
} }
} }
const auto finder = dict.csearchScoped const auto finder = dict.csearchScoped(scopedName, keyType::REGEX);
(
scopedName,
false,
true // Support wildcards
);
if (!finder.found()) if (!finder.found())
{ {

View File

@ -589,7 +589,7 @@ int main(int argc, char *argv[])
( (
Time::controlDictName, Time::controlDictName,
args.rootPath(), args.rootPath(),
args.caseName()/fileName(word("processor") + name(proci)) args.caseName()/("processor" + Foam::name(proci))
); );
processorDb.setTime(runTime); processorDb.setTime(runTime);
@ -1016,7 +1016,7 @@ int main(int argc, char *argv[])
Time::controlDictName, Time::controlDictName,
args.rootPath(), args.rootPath(),
args.caseName() args.caseName()
/fileName(word("processor") + name(proci)) / ("processor" + Foam::name(proci))
) )
); );
} }
@ -1374,8 +1374,8 @@ int main(int argc, char *argv[])
( (
Time::controlDictName, Time::controlDictName,
args.rootPath(), args.rootPath(),
args.caseName()/ args.caseName()
fileName(word("processor") + name(procI)) / ("processor" + Foam::name(procI))
); );
processorDb.setTime(runTime); processorDb.setTime(runTime);

View File

@ -305,7 +305,7 @@ bool Foam::domainDecomposition::writeDecomposition(const bool decomposeSets)
fileName processorCasePath fileName processorCasePath
( (
time().caseName()/fileName(word("processor") + Foam::name(proci)) time().caseName()/("processor" + Foam::name(proci))
); );
// create a database // create a database

View File

@ -50,7 +50,7 @@ void Foam::faMeshDecomposition::distributeFaces()
( (
Time::controlDictName, Time::controlDictName,
time().rootPath(), time().rootPath(),
time().caseName()/fileName(word("processor") + Foam::name(procI)) time().caseName()/("processor" + Foam::name(procI))
); );
fvMesh procMesh fvMesh procMesh
@ -250,7 +250,7 @@ void Foam::faMeshDecomposition::decomposeMesh()
( (
Time::controlDictName, Time::controlDictName,
time().rootPath(), time().rootPath(),
time().caseName()/fileName(word("processor") + Foam::name(procI)) time().caseName()/("processor" + Foam::name(procI))
); );
fvMesh procFvMesh fvMesh procFvMesh
@ -1059,8 +1059,7 @@ void Foam::faMeshDecomposition::decomposeMesh()
{ {
fileName processorCasePath fileName processorCasePath
( (
time().caseName()/fileName(word("processor") time().caseName()/("processor" + Foam::name(procI))
+ Foam::name(procI))
); );
// create a database // create a database
@ -1179,7 +1178,7 @@ bool Foam::faMeshDecomposition::writeDecomposition()
fileName processorCasePath fileName processorCasePath
( (
time().caseName()/fileName(word("processor") + Foam::name(procI)) time().caseName()/("processor" + Foam::name(procI))
); );
// create a database // create a database

View File

@ -223,7 +223,7 @@ int main(int argc, char *argv[])
( (
Time::controlDictName, Time::controlDictName,
args.rootPath(), args.rootPath(),
args.caseName()/fileName(word("processor") + name(proci)) args.caseName()/("processor" + Foam::name(proci))
) )
); );
} }

View File

@ -550,7 +550,7 @@ int main(int argc, char *argv[])
forAll(databases, proci) forAll(databases, proci)
{ {
Info<< "Reading database " Info<< "Reading database "
<< args.caseName()/fileName(word("processor") + name(proci)) << args.caseName()/("processor" + Foam::name(proci))
<< endl; << endl;
databases.set databases.set
@ -560,7 +560,7 @@ int main(int argc, char *argv[])
( (
Time::controlDictName, Time::controlDictName,
args.rootPath(), args.rootPath(),
args.caseName()/fileName(word("processor") + name(proci)) args.caseName()/("processor" + Foam::name(proci))
) )
); );
} }

View File

@ -336,8 +336,7 @@ void determineDecomposition
{ {
Info<< "Setting caseName to " << baseRunTime.caseName() Info<< "Setting caseName to " << baseRunTime.caseName()
<< " to read decomposeParDict" << endl; << " to read decomposeParDict" << endl;
const_cast<Time&>(mesh.time()).TimePaths::caseName() = const_cast<Time&>(mesh.time()).caseName() = baseRunTime.caseName();
baseRunTime.caseName();
} }
scalarField cellWeights; scalarField cellWeights;
@ -366,8 +365,7 @@ void determineDecomposition
if (Pstream::master() && decompose) if (Pstream::master() && decompose)
{ {
Info<< "Restoring caseName to " << proc0CaseName << endl; Info<< "Restoring caseName to " << proc0CaseName << endl;
const_cast<Time&>(mesh.time()).TimePaths::caseName() = const_cast<Time&>(mesh.time()).caseName() = proc0CaseName;
proc0CaseName;
} }
// Dump decomposition to volScalarField // Dump decomposition to volScalarField
@ -383,10 +381,10 @@ void determineDecomposition
Time& tm = const_cast<Time&>(mesh.time()); Time& tm = const_cast<Time&>(mesh.time());
tm.TimePaths::caseName() = baseRunTime.caseName(); tm.caseName() = baseRunTime.caseName();
writeDecomposition("cellDist", mesh, decomp); writeDecomposition("cellDist", mesh, decomp);
Info<< "Restoring caseName to " << proc0CaseName << endl; Info<< "Restoring caseName to " << proc0CaseName << endl;
tm.TimePaths::caseName() = proc0CaseName; tm.caseName() = proc0CaseName;
} }
} }
else else
@ -908,12 +906,12 @@ autoPtr<mapDistributePolyMesh> redistributeAndWrite
// Get original objects (before incrementing time!) // Get original objects (before incrementing time!)
if (Pstream::master() && decompose) if (Pstream::master() && decompose)
{ {
runTime.TimePaths::caseName() = baseRunTime.caseName(); runTime.caseName() = baseRunTime.caseName();
} }
IOobjectList objects(mesh, runTime.timeName()); IOobjectList objects(mesh, runTime.timeName());
if (Pstream::master() && decompose) if (Pstream::master() && decompose)
{ {
runTime.TimePaths::caseName() = proc0CaseName; runTime.caseName() = proc0CaseName;
} }
Info<< "From time " << runTime.timeName() Info<< "From time " << runTime.timeName()
@ -932,7 +930,7 @@ autoPtr<mapDistributePolyMesh> redistributeAndWrite
if (Pstream::master() && decompose) if (Pstream::master() && decompose)
{ {
runTime.TimePaths::caseName() = baseRunTime.caseName(); runTime.caseName() = baseRunTime.caseName();
} }
readFields readFields
( (
@ -1112,7 +1110,7 @@ autoPtr<mapDistributePolyMesh> redistributeAndWrite
if (Pstream::master() && decompose) if (Pstream::master() && decompose)
{ {
runTime.TimePaths::caseName() = proc0CaseName; runTime.caseName() = proc0CaseName;
} }
} }
@ -1192,7 +1190,7 @@ autoPtr<mapDistributePolyMesh> redistributeAndWrite
{ {
Info<< "Setting caseName to " << baseRunTime.caseName() Info<< "Setting caseName to " << baseRunTime.caseName()
<< " to write reconstructed mesh and fields." << endl; << " to write reconstructed mesh and fields." << endl;
runTime.TimePaths::caseName() = baseRunTime.caseName(); runTime.caseName() = baseRunTime.caseName();
mesh.write(); mesh.write();
topoSet::removeFiles(mesh); topoSet::removeFiles(mesh);
@ -1212,7 +1210,7 @@ autoPtr<mapDistributePolyMesh> redistributeAndWrite
// Now we've written all. Reset caseName on master // Now we've written all. Reset caseName on master
Info<< "Restoring caseName to " << proc0CaseName << endl; Info<< "Restoring caseName to " << proc0CaseName << endl;
runTime.TimePaths::caseName() = proc0CaseName; runTime.caseName() = proc0CaseName;
} }
} }
else else
@ -1258,7 +1256,7 @@ autoPtr<mapDistributePolyMesh> redistributeAndWrite
// Read refinement data // Read refinement data
if (Pstream::master() && decompose) if (Pstream::master() && decompose)
{ {
runTime.TimePaths::caseName() = baseRunTime.caseName(); runTime.caseName() = baseRunTime.caseName();
} }
IOobject io IOobject io
( (
@ -1274,7 +1272,7 @@ autoPtr<mapDistributePolyMesh> redistributeAndWrite
hexRef8Data refData(io); hexRef8Data refData(io);
if (Pstream::master() && decompose) if (Pstream::master() && decompose)
{ {
runTime.TimePaths::caseName() = proc0CaseName; runTime.caseName() = proc0CaseName;
} }
// Make sure all processors have valid data (since only some will // Make sure all processors have valid data (since only some will
@ -1294,13 +1292,13 @@ autoPtr<mapDistributePolyMesh> redistributeAndWrite
{ {
Info<< "Setting caseName to " << baseRunTime.caseName() Info<< "Setting caseName to " << baseRunTime.caseName()
<< " to write reconstructed refinement data." << endl; << " to write reconstructed refinement data." << endl;
runTime.TimePaths::caseName() = baseRunTime.caseName(); runTime.caseName() = baseRunTime.caseName();
refData.write(); refData.write();
// Now we've written all. Reset caseName on master // Now we've written all. Reset caseName on master
Info<< "Restoring caseName to " << proc0CaseName << endl; Info<< "Restoring caseName to " << proc0CaseName << endl;
runTime.TimePaths::caseName() = proc0CaseName; runTime.caseName() = proc0CaseName;
} }
} }
else else
@ -1314,7 +1312,7 @@ autoPtr<mapDistributePolyMesh> redistributeAndWrite
// // Read sets // // Read sets
// if (Pstream::master() && decompose) // if (Pstream::master() && decompose)
// { // {
// runTime.TimePaths::caseName() = baseRunTime.caseName(); // runTime.caseName() = baseRunTime.caseName();
// } // }
// IOobjectList objects(mesh, mesh.facesInstance(), "polyMesh/sets"); // IOobjectList objects(mesh, mesh.facesInstance(), "polyMesh/sets");
// //
@ -1323,7 +1321,7 @@ autoPtr<mapDistributePolyMesh> redistributeAndWrite
// //
// if (Pstream::master() && decompose) // if (Pstream::master() && decompose)
// { // {
// runTime.TimePaths::caseName() = proc0CaseName; // runTime.caseName() = proc0CaseName;
// } // }
// //
// forAll(cellSets, i) // forAll(cellSets, i)
@ -1337,7 +1335,7 @@ autoPtr<mapDistributePolyMesh> redistributeAndWrite
// { // {
// Info<< "Setting caseName to " << baseRunTime.caseName() // Info<< "Setting caseName to " << baseRunTime.caseName()
// << " to write reconstructed refinement data." << endl; // << " to write reconstructed refinement data." << endl;
// runTime.TimePaths::caseName() = baseRunTime.caseName(); // runTime.caseName() = baseRunTime.caseName();
// //
// forAll(cellSets, i) // forAll(cellSets, i)
// { // {
@ -1346,7 +1344,7 @@ autoPtr<mapDistributePolyMesh> redistributeAndWrite
// //
// // Now we've written all. Reset caseName on master // // Now we've written all. Reset caseName on master
// Info<< "Restoring caseName to " << proc0CaseName << endl; // Info<< "Restoring caseName to " << proc0CaseName << endl;
// runTime.TimePaths::caseName() = proc0CaseName; // runTime.caseName() = proc0CaseName;
// } // }
// } // }
// else // else
@ -2931,7 +2929,7 @@ int main(int argc, char *argv[])
{ {
Info<< "Setting caseName to " << baseRunTime.caseName() Info<< "Setting caseName to " << baseRunTime.caseName()
<< " to find undecomposed mesh" << endl; << " to find undecomposed mesh" << endl;
runTime.TimePaths::caseName() = baseRunTime.caseName(); runTime.caseName() = baseRunTime.caseName();
} }
masterInstDir = runTime.findInstance masterInstDir = runTime.findInstance
@ -2944,7 +2942,7 @@ int main(int argc, char *argv[])
if (decompose) if (decompose)
{ {
Info<< "Restoring caseName to " << proc0CaseName << endl; Info<< "Restoring caseName to " << proc0CaseName << endl;
runTime.TimePaths::caseName() = proc0CaseName; runTime.caseName() = proc0CaseName;
} }
} }
Pstream::scatter(masterInstDir); Pstream::scatter(masterInstDir);
@ -2970,7 +2968,7 @@ int main(int argc, char *argv[])
{ {
Info<< "Setting caseName to " << baseRunTime.caseName() Info<< "Setting caseName to " << baseRunTime.caseName()
<< " to read undecomposed mesh" << endl; << " to read undecomposed mesh" << endl;
runTime.TimePaths::caseName() = baseRunTime.caseName(); runTime.caseName() = baseRunTime.caseName();
} }
autoPtr<fvMesh> meshPtr = loadOrCreateMesh autoPtr<fvMesh> meshPtr = loadOrCreateMesh
@ -2987,7 +2985,7 @@ int main(int argc, char *argv[])
if (Pstream::master() && decompose) if (Pstream::master() && decompose)
{ {
Info<< "Restoring caseName to " << proc0CaseName << endl; Info<< "Restoring caseName to " << proc0CaseName << endl;
runTime.TimePaths::caseName() = proc0CaseName; runTime.caseName() = proc0CaseName;
} }
fvMesh& mesh = meshPtr(); fvMesh& mesh = meshPtr();
@ -3046,7 +3044,7 @@ int main(int argc, char *argv[])
// Detect lagrangian fields // Detect lagrangian fields
if (Pstream::master() && decompose) if (Pstream::master() && decompose)
{ {
runTime.TimePaths::caseName() = baseRunTime.caseName(); runTime.caseName() = baseRunTime.caseName();
} }
parLagrangianRedistributor::findClouds parLagrangianRedistributor::findClouds
( (
@ -3069,7 +3067,7 @@ int main(int argc, char *argv[])
); );
if (Pstream::master() && decompose) if (Pstream::master() && decompose)
{ {
runTime.TimePaths::caseName() = proc0CaseName; runTime.caseName() = proc0CaseName;
} }

View File

@ -88,7 +88,7 @@ int main(int argc, char *argv[])
const label nProcs = fileHandler().nProcs(args.path()); const label nProcs = fileHandler().nProcs(args.path());
#else #else
label nProcs = 0; label nProcs = 0;
while (isDir(args.path()/(word("processor") + name(nProcs)))) while (isDir(args.path()/("processor" + Foam::name(nProcs))))
{ {
++nProcs; ++nProcs;
} }
@ -106,7 +106,7 @@ int main(int argc, char *argv[])
( (
Time::controlDictName, Time::controlDictName,
args.rootPath(), args.rootPath(),
args.caseName()/fileName(word("processor") + name(proci)) args.caseName()/("processor" + Foam::name(proci))
) )
); );
} }
@ -183,7 +183,7 @@ int main(int argc, char *argv[])
// Assumed to be good if it has 'profiling' sub-dict // Assumed to be good if it has 'profiling' sub-dict
const dictionary* ptr = dict.subDictPtr(blockNameProfiling); const dictionary* ptr = dict.findDict(blockNameProfiling);
if (ptr) if (ptr)
{ {
++nDict; ++nDict;
@ -295,13 +295,12 @@ int main(int argc, char *argv[])
for (const dictionary& procDict : profiles) for (const dictionary& procDict : profiles)
{ {
const dictionary* inDictPtr = const dictionary* inDictPtr = procDict.findDict(level1Name);
procDict.subDictPtr(level1Name);
if (inDictPtr && hasDictEntries) if (inDictPtr && hasDictEntries)
{ {
// descend to the next level as required // Descend to the next level as required
inDictPtr = inDictPtr->subDictPtr(level2Name); inDictPtr = inDictPtr->findDict(level2Name);
} }
if (!inDictPtr) if (!inDictPtr)
@ -313,16 +312,13 @@ int main(int argc, char *argv[])
for (const word& tag : tags) for (const word& tag : tags)
{ {
const entry* eptr = inDictPtr->lookupEntryPtr scalar val;
(
tag,
false,
false
);
if (eptr) if
(
inDictPtr->readIfPresent(tag, val, keyType::LITERAL)
)
{ {
const scalar val = readScalar(eptr->stream());
stats(tag).append(val); stats(tag).append(val);
} }
} }
@ -339,7 +335,7 @@ int main(int argc, char *argv[])
if (hasDictEntries) if (hasDictEntries)
{ {
outputDict.add(level2Name, level1Dict.subDict(level2Name)); outputDict.add(level2Name, level1Dict.subDict(level2Name));
outDictPtr = outputDict.subDictPtr(level2Name); outDictPtr = outputDict.findDict(level2Name);
} }
else else
{ {

View File

@ -235,10 +235,9 @@ bool merge
// Save current (non-wildcard) keys before adding items. // Save current (non-wildcard) keys before adding items.
wordHashSet thisKeysSet; wordHashSet thisKeysSet;
{ {
List<keyType> keys = thisDict.keys(false); for (const word& k : thisDict.keys(false))
forAll(keys, i)
{ {
thisKeysSet.insert(keys[i]); thisKeysSet.insert(k);
} }
} }
@ -261,25 +260,20 @@ bool merge
} }
else if (literalRE || !(key.isPattern() || shortcuts.found(key))) else if (literalRE || !(key.isPattern() || shortcuts.found(key)))
{ {
entry* entryPtr = thisDict.lookupEntryPtr entry* eptr = thisDict.findEntry(key, keyType::LITERAL);
(
key,
false, // recursive
false // patternMatch
);
if (entryPtr) if (eptr)
{ {
// Mark thisDict entry as having been match for wildcard // Mark thisDict entry as having been match for wildcard
// handling later on. // handling later on.
thisKeysSet.erase(entryPtr->keyword()); thisKeysSet.erase(eptr->keyword());
if if
( (
addEntry addEntry
( (
thisDict, thisDict,
*entryPtr, *eptr,
mergeIter(), mergeIter(),
literalRE, literalRE,
shortcuts shortcuts
@ -310,7 +304,7 @@ bool merge
// Pass 2. Wildcard or shortcut matches (if any) on any non-match keys. // Pass 2. Wildcard or shortcut matches (if any) on any non-match keys.
if (!literalRE && thisKeysSet.size() > 0) if (!literalRE && thisKeysSet.size())
{ {
// Pick up remaining dictionary entries // Pick up remaining dictionary entries
wordList thisKeys(thisKeysSet.toc()); wordList thisKeys(thisKeysSet.toc());
@ -336,10 +330,10 @@ bool merge
); );
// Remove all matches // Remove all matches
forAll(matches, i) for (const label matchi : matches)
{ {
const word& thisKey = thisKeys[matches[i]]; const word& k = thisKeys[matchi];
thisKeysSet.erase(thisKey); thisKeysSet.erase(k);
} }
changed = true; changed = true;
} }
@ -358,21 +352,18 @@ bool merge
); );
// Add all matches // Add all matches
forAll(matches, i) for (const label matchi : matches)
{ {
const word& thisKey = thisKeys[matches[i]]; const word& k = thisKeys[matchi];
entry& thisEntry = const_cast<entry&> entry* eptr = thisDict.findEntry(k, keyType::LITERAL);
(
thisDict.lookupEntry(thisKey, false, false)
);
if if
( (
addEntry addEntry
( (
thisDict, thisDict,
thisEntry, *eptr,
mergeIter(), mergeIter(),
literalRE, literalRE,
HashTable<wordList>(0) // no shortcuts HashTable<wordList>(0) // no shortcuts
@ -627,8 +618,7 @@ int main(int argc, char *argv[])
fieldDict.lookupEntry fieldDict.lookupEntry
( (
doneKeys[i], doneKeys[i],
false, keyType::REGEX
true
).clone() ).clone()
); );
fieldDict.remove(doneKeys[i]); fieldDict.remove(doneKeys[i]);

View File

@ -396,7 +396,7 @@ int main(int argc, char *argv[])
( (
Time::controlDictName, Time::controlDictName,
rootDirSource, rootDirSource,
caseDirSource/fileName(word("processor") + name(proci)) caseDirSource/("processor" + Foam::name(proci))
); );
#include "setTimeIndex.H" #include "setTimeIndex.H"
@ -471,7 +471,7 @@ int main(int argc, char *argv[])
( (
Time::controlDictName, Time::controlDictName,
rootDirTarget, rootDirTarget,
caseDirTarget/fileName(word("processor") + name(proci)) caseDirTarget/("processor" + Foam::name(proci))
); );
fvMesh meshTarget fvMesh meshTarget
@ -536,7 +536,7 @@ int main(int argc, char *argv[])
( (
Time::controlDictName, Time::controlDictName,
rootDirSource, rootDirSource,
caseDirSource/fileName(word("processor") + name(procISource)) caseDirSource/("processor" + Foam::name(procISource))
); );
#include "setTimeIndex.H" #include "setTimeIndex.H"
@ -572,8 +572,7 @@ int main(int argc, char *argv[])
( (
Time::controlDictName, Time::controlDictName,
rootDirTarget, rootDirTarget,
caseDirTarget/fileName(word("processor") caseDirTarget/("processor" + Foam::name(procITarget))
+ name(procITarget))
); );
fvMesh meshTarget fvMesh meshTarget

View File

@ -9,74 +9,74 @@ FoamFile
{ {
version 2.0; version 2.0;
format ascii; format ascii;
class IOPtrList<coordinateSystem>; class coordinateSystems;
object coordinateSystems; object coordinateSystems;
} }
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
7
( (
system_9 _9
{ {
type cartesian; type cartesian;
origin (1.03291515 -0.114391257 -0.0826236662); origin (1.03291515 -0.114391257 -0.0826236662);
e3 (1 0 0);
e1 (0 1 0); e1 (0 1 0);
// STARCDRotation (0 90 90); e3 (1 0 0);
// rotation { type starcd; angles (0 90 90); }
} }
system_10 _10
{ {
type cartesian; type cartesian;
origin (0.623151719 -0.286472935 -0.113933262); origin (0.623151719 -0.286472935 -0.113933262);
e3 (0.99508851 0.09829095 0.01173645);
e1 (0.01179356 0 -0.99993045); e1 (0.01179356 0 -0.99993045);
// STARCDRotation (5.6403745 -0.0664172952 89.3275351); e3 (0.99508851 0.09829095 0.01173645);
// rotation { type starcd; angles (5.6403745 -0.0664172952 89.3275351); }
} }
system_15 _15
{ {
type cartesian; type cartesian;
origin (0.644772231 -0.240036493 0.155972187); origin (0.644772231 -0.240036493 0.155972187);
e3 (-0.01346388 -0.90616979 -0.42269969);
e1 (0.00627978 0.42265304 -0.90626981); e1 (0.00627978 0.42265304 -0.90626981);
// STARCDRotation (-90.8512386 0 115.005148); e3 (-0.01346388 -0.90616979 -0.42269969);
// rotation { type starcd; angles (-90.8512386 0 115.005148); }
} }
system_16 _16
{ {
type cartesian; type cartesian;
origin (0.540824938 -0.240036415 0.15928296); origin (0.540824938 -0.240036415 0.15928296);
e3 (-0.01346388 -0.90616979 -0.42269969);
e1 (0.00627978 0.42265304 -0.90626981); e1 (0.00627978 0.42265304 -0.90626981);
// STARCDRotation (-90.8512386 0 115.005148); e3 (-0.01346388 -0.90616979 -0.42269969);
// rotation { type starcd; angles (-90.8512386 0 115.005148); }
} }
system_17 _17
{ {
type cartesian; type cartesian;
origin (0.436877646 -0.240036339 0.162593737); origin (0.436877646 -0.240036339 0.162593737);
e3 (-0.01346388 -0.90616979 -0.42269969);
e1 (0.00627978 0.42265304 -0.90626981); e1 (0.00627978 0.42265304 -0.90626981);
// STARCDRotation (-90.8512386 0 115.005148); e3 (-0.01346388 -0.90616979 -0.42269969);
// rotation { type starcd; angles (-90.8512386 0 115.005148); }
} }
system_18 _18
{ {
type cartesian; type cartesian;
origin (0.332930354 -0.240036261 0.16590451); origin (0.332930354 -0.240036261 0.16590451);
e3 (-0.01346388 -0.90616979 -0.42269969);
e1 (0.00627978 0.42265304 -0.90626981); e1 (0.00627978 0.42265304 -0.90626981);
// STARCDRotation (-90.8512386 0 115.005148); e3 (-0.01346388 -0.90616979 -0.42269969);
// rotation { type starcd; angles (-90.8512386 0 115.005148); }
} }
system_21 _21
{ {
type cartesian; type cartesian;
origin (0.55863733 -0.300866705 0.00317260982); origin (0.55863733 -0.300866705 0.00317260982);
e3 (0.42110287 0.02470132 -0.90667647);
e1 (0.90646036 0.02342535 0.42164069); e1 (0.90646036 0.02342535 0.42164069);
// STARCDRotation (-178.185897 -0.71772221 -155.059695); e3 (0.42110287 0.02470132 -0.90667647);
// rotation { type starcd; angles (-178.185897 -0.71772221 -155.059695); }
} }
) )

View File

@ -66,6 +66,7 @@ Note
#include "MeshedSurfaces.H" #include "MeshedSurfaces.H"
#include "coordinateSystems.H" #include "coordinateSystems.H"
#include "cartesianCS.H"
using namespace Foam; using namespace Foam;
@ -146,9 +147,9 @@ int main(int argc, char *argv[])
} }
// get the coordinate transformations // The coordinate transformations (must be cartesian)
autoPtr<coordinateSystem> fromCsys; autoPtr<coordSystem::cartesian> fromCsys;
autoPtr<coordinateSystem> toCsys; autoPtr<coordSystem::cartesian> toCsys;
if (args.found("from") || args.found("to")) if (args.found("from") || args.found("to"))
{ {
@ -174,43 +175,44 @@ int main(int argc, char *argv[])
<< exit(FatalError); << exit(FatalError);
} }
coordinateSystems csLst(ioCsys); coordinateSystems globalCoords(ioCsys);
if (args.found("from")) if (args.found("from"))
{ {
const word csName = args["from"]; const word csName(args["from"]);
const auto* csPtr = globalCoords.lookupPtr(csName);
const label csIndex = csLst.findIndex(csName); if (!csPtr)
if (csIndex < 0)
{ {
FatalErrorInFunction FatalErrorInFunction
<< "Cannot find -from " << csName << nl << "Cannot find -from " << csName << nl
<< "available coordinateSystems: " << csLst.toc() << nl << "available coordinateSystems: "
<< flatOutput(globalCoords.names()) << nl
<< exit(FatalError); << exit(FatalError);
} }
fromCsys.reset(new coordinateSystem(csLst[csIndex])); fromCsys = autoPtr<coordSystem::cartesian>::New(*csPtr);
} }
if (args.found("to")) if (args.found("to"))
{ {
const word csName = args["to"]; const word csName(args["to"]);
const auto* csPtr = globalCoords.lookupPtr(csName);
const label csIndex = csLst.findIndex(csName); if (!csPtr)
if (csIndex < 0)
{ {
FatalErrorInFunction FatalErrorInFunction
<< "Cannot find -to " << csName << nl << "Cannot find -to " << csName << nl
<< "available coordinateSystems: " << csLst.toc() << nl << "available coordinateSystems: "
<< flatOutput(globalCoords.names()) << nl
<< exit(FatalError); << exit(FatalError);
} }
toCsys.reset(new coordinateSystem(csLst[csIndex])); toCsys = autoPtr<coordSystem::cartesian>::New(*csPtr);
} }
// Maybe fix this later
// maybe fix this later if (fromCsys && toCsys)
if (fromCsys.valid() && toCsys.valid())
{ {
FatalErrorInFunction FatalErrorInFunction
<< "Only allowed '-from' or '-to' option at the moment." << "Only allowed '-from' or '-to' option at the moment."
@ -230,29 +232,30 @@ int main(int argc, char *argv[])
scalar scaleIn = 0; scalar scaleIn = 0;
if (args.readIfPresent("scaleIn", scaleIn) && scaleIn > 0) if (args.readIfPresent("scaleIn", scaleIn) && scaleIn > 0)
{ {
Info<< " -scaleIn " << scaleIn << endl; Info<< "scale input " << scaleIn << endl;
surf.scalePoints(scaleIn); surf.scalePoints(scaleIn);
} }
if (fromCsys)
if (fromCsys.valid())
{ {
Info<< " -from " << fromCsys().name() << endl; Info<< "move points from coordinate system: "
tmp<pointField> tpf = fromCsys().localPosition(surf.points()); << fromCsys->name() << endl;
tmp<pointField> tpf = fromCsys->localPosition(surf.points());
surf.movePoints(tpf()); surf.movePoints(tpf());
} }
if (toCsys.valid()) if (toCsys)
{ {
Info<< " -to " << toCsys().name() << endl; Info<< "move points to coordinate system: "
tmp<pointField> tpf = toCsys().globalPosition(surf.points()); << toCsys->name() << endl;
tmp<pointField> tpf = toCsys->globalPosition(surf.points());
surf.movePoints(tpf()); surf.movePoints(tpf());
} }
scalar scaleOut = 0; scalar scaleOut = 0;
if (args.readIfPresent("scaleOut", scaleOut) && scaleOut > 0) if (args.readIfPresent("scaleOut", scaleOut) && scaleOut > 0)
{ {
Info<< " -scaleOut " << scaleOut << endl; Info<< "scale output " << scaleOut << endl;
surf.scalePoints(scaleOut); surf.scalePoints(scaleOut);
} }

Some files were not shown because too many files have changed in this diff Show More