Merge branch 'develop' of develop.openfoam.com:Development/OpenFOAM-plus into develop

This commit is contained in:
sergio
2018-10-17 10:29:34 -07:00
736 changed files with 10759 additions and 8147 deletions

View File

@ -49,7 +49,7 @@ Foam::XiGModels::basicSubGrid::basicSubGrid
)
:
XiGModel(XiGProperties, thermo, turbulence, Su),
k1(readScalar(XiGModelCoeffs_.lookup("k1"))),
k1(XiGModelCoeffs_.get<scalar>("k1")),
XiGModel_(XiGModel::New(XiGModelCoeffs_, thermo, turbulence, Su))
{}

View File

@ -49,13 +49,10 @@ Foam::XiEqModels::Gulder::Gulder
)
:
XiEqModel(XiEqProperties, thermo, turbulence, Su),
XiEqCoef_(readScalar(XiEqModelCoeffs_.lookup("XiEqCoef"))),
XiEqCoef_(XiEqModelCoeffs_.get<scalar>("XiEqCoef")),
SuMin_(0.01*Su.average()),
uPrimeCoef_(readScalar(XiEqModelCoeffs_.lookup("uPrimeCoef"))),
subGridSchelkin_
(
readBool(XiEqModelCoeffs_.lookup("subGridSchelkin"))
)
uPrimeCoef_(XiEqModelCoeffs_.get<scalar>("uPrimeCoef")),
subGridSchelkin_(XiEqModelCoeffs_.get<bool>("subGridSchelkin"))
{}

View File

@ -49,15 +49,12 @@ Foam::XiEqModels::SCOPEXiEq::SCOPEXiEq
)
:
XiEqModel(XiEqProperties, thermo, turbulence, Su),
XiEqCoef_(readScalar(XiEqModelCoeffs_.lookup("XiEqCoef"))),
XiEqExp_(readScalar(XiEqModelCoeffs_.lookup("XiEqExp"))),
lCoef_(readScalar(XiEqModelCoeffs_.lookup("lCoef"))),
XiEqCoef_(XiEqModelCoeffs_.get<scalar>("XiEqCoef")),
XiEqExp_(XiEqModelCoeffs_.get<scalar>("XiEqExp")),
lCoef_(XiEqModelCoeffs_.get<scalar>("lCoef")),
SuMin_(0.01*Su.average()),
uPrimeCoef_(readScalar(XiEqModelCoeffs_.lookup("uPrimeCoef"))),
subGridSchelkin_
(
readBool(XiEqModelCoeffs_.lookup("subGridSchelkin"))
),
uPrimeCoef_(XiEqModelCoeffs_.get<scalar>("uPrimeCoef")),
subGridSchelkin_(XiEqModelCoeffs_.get<bool>("subGridSchelkin")),
MaModel
(
Su.mesh().lookupObject<IOdictionary>("combustionProperties"),

View File

@ -49,7 +49,7 @@ Foam::XiEqModels::instability::instability
)
:
XiEqModel(XiEqProperties, thermo, turbulence, Su),
XiEqIn(readScalar(XiEqModelCoeffs_.lookup("XiEqIn"))),
XiEqIn(XiEqModelCoeffs_.get<scalar>("XiEqIn")),
XiEqModel_(XiEqModel::New(XiEqModelCoeffs_, thermo, turbulence, Su))
{}

View File

@ -49,7 +49,7 @@ Foam::XiGModels::KTS::KTS
)
:
XiGModel(XiGProperties, thermo, turbulence, Su),
GEtaCoef_(readScalar(XiGModelCoeffs_.lookup("GEtaCoef")))
GEtaCoef_(XiGModelCoeffs_.get<scalar>("GEtaCoef"))
{}

View File

@ -52,7 +52,7 @@ Foam::XiModels::algebraic::algebraic
)
:
XiModel(XiProperties, thermo, turbulence, Su, rho, b, phi),
XiShapeCoef(readScalar(XiModelCoeffs_.lookup("XiShapeCoef"))),
XiShapeCoef(XiModelCoeffs_.get<scalar>("XiShapeCoef")),
XiEqModel_(XiEqModel::New(XiProperties, thermo, turbulence, Su)),
XiGModel_(XiGModel::New(XiProperties, thermo, turbulence, Su))
{}

View File

@ -57,7 +57,7 @@ Foam::XiModels::transport::transport
)
:
XiModel(XiProperties, thermo, turbulence, Su, rho, b, phi),
XiShapeCoef(readScalar(XiModelCoeffs_.lookup("XiShapeCoef"))),
XiShapeCoef(XiModelCoeffs_.get<scalar>("XiShapeCoef")),
XiEqModel_(XiEqModel::New(XiProperties, thermo, turbulence, Su)),
XiGModel_(XiGModel::New(XiProperties, thermo, turbulence, Su))
{}

View File

@ -53,8 +53,8 @@ Foam::laminarFlameSpeedModels::SCOPE::polynomial::polynomial
)
:
FixedList<scalar, 7>(polyDict.lookup("coefficients")),
ll(readScalar(polyDict.lookup("lowerLimit"))),
ul(readScalar(polyDict.lookup("upperLimit"))),
ll(polyDict.get<scalar>("lowerLimit")),
ul(polyDict.get<scalar>("upperLimit")),
llv(polyPhi(ll, *this)),
ulv(polyPhi(ul, *this)),
lu(0)
@ -75,39 +75,30 @@ Foam::laminarFlameSpeedModels::SCOPE::SCOPE
(
IFstream
(
fileName
(
dict.lookup("fuelFile")
)
dict.get<fileName>("fuelFile")
)()
).optionalSubDict(typeName + "Coeffs")
),
LFL_
(
readScalar
(
coeffsDict_.lookupCompat
coeffsDict_.getCompat<scalar>
(
"lowerFlammabilityLimit",
{{"lowerFlamabilityLimit", 1712}}
)
)
),
UFL_
(
readScalar
(
coeffsDict_.lookupCompat
coeffsDict_.getCompat<scalar>
(
"upperFlammabilityLimit",
{{"upperFlamabilityLimit", 1712}}
)
)
),
SuPolyL_(coeffsDict_.subDict("lowerSuPolynomial")),
SuPolyU_(coeffsDict_.subDict("upperSuPolynomial")),
Texp_(readScalar(coeffsDict_.lookup("Texp"))),
pexp_(readScalar(coeffsDict_.lookup("pexp"))),
Texp_(coeffsDict_.get<scalar>("Texp")),
pexp_(coeffsDict_.get<scalar>("pexp")),
MaPolyL_(coeffsDict_.subDict("lowerMaPolynomial")),
MaPolyU_(coeffsDict_.subDict("upperMaPolynomial"))
{

View File

@ -18,8 +18,8 @@
)
);
scalar p0 = readScalar(initialConditions.lookup("p"));
scalar T0 = readScalar(initialConditions.lookup("T"));
scalar p0 = initialConditions.get<scalar>("p");
scalar T0 = initialConditions.get<scalar>("T");
#include "createBaseFields.H"

View File

@ -40,7 +40,7 @@
const word& name = Y[i].name();
if (fractions.found(name))
{
X0[i] = readScalar(fractions.lookup(name));
X0[i] = fractions.get<scalar>(name);
}
}
@ -64,7 +64,7 @@
const word& name = Y[i].name();
if (fractions.found(name))
{
Y0[i] = readScalar(fractions.lookup(name));
Y0[i] = fractions.get<scalar>(name);
}
}

View File

@ -29,7 +29,7 @@ License
const dictionary& pimpleDict = pimple.dict();
// Maximum flow Courant number
scalar maxCo(readScalar(pimpleDict.lookup("maxCo")));
scalar maxCo(pimpleDict.get<scalar>("maxCo"));
// Maximum time scale
scalar maxDeltaT(pimpleDict.lookupOrDefault<scalar>("maxDeltaT", GREAT));
@ -118,7 +118,7 @@ License
if (Yref.found(Yi.name()))
{
foundY = true;
scalar Yrefi = readScalar(Yref.lookup(Yi.name()));
const scalar Yrefi = Yref.get<scalar>(Yi.name());
rDeltaTY.field() = max
(

View File

@ -84,7 +84,7 @@ Foam::smoluchowskiJumpTFvPatchScalarField::smoluchowskiJumpTFvPatchScalarField
rhoName_(dict.lookupOrDefault<word>("rho", "rho")),
psiName_(dict.lookupOrDefault<word>("psi", "thermo:psi")),
muName_(dict.lookupOrDefault<word>("mu", "thermo:mu")),
accommodationCoeff_(readScalar(dict.lookup("accommodationCoeff"))),
accommodationCoeff_(dict.get<scalar>("accommodationCoeff")),
Twall_("Twall", dict, p.size()),
gamma_(dict.lookupOrDefault<scalar>("gamma", 1.4))
{

View File

@ -85,7 +85,7 @@ Foam::maxwellSlipUFvPatchVectorField::maxwellSlipUFvPatchVectorField
psiName_(dict.lookupOrDefault<word>("psi", "thermo:psi")),
muName_(dict.lookupOrDefault<word>("mu", "thermo:mu")),
tauMCName_(dict.lookupOrDefault<word>("tauMC", "tauMC")),
accommodationCoeff_(readScalar(dict.lookup("accommodationCoeff"))),
accommodationCoeff_(dict.get<scalar>("accommodationCoeff")),
Uwall_("Uwall", dict, p.size()),
thermalCreep_(dict.lookupOrDefault("thermalCreep", true)),
curvature_(dict.lookupOrDefault("curvature", true))

View File

@ -25,8 +25,8 @@
if (!local)
{
const scalar T0 = readScalar(eosDict.lookup("T0"));
const scalar p0 = readScalar(eosDict.lookup("p0"));
const scalar T0 = eosDict.get<scalar>("T0");
const scalar p0 = eosDict.get<scalar>("p0");
he = thermo.he(p, pow(p/p0, (gamma - scalar(1))/gamma)*T0);
}

View File

@ -12,7 +12,7 @@ IOdictionary mdEquilibrationDict
)
);
scalar targetTemperature = readScalar
scalar targetTemperature
(
mdEquilibrationDict.lookup("targetTemperature")
mdEquilibrationDict.get<scalar>("targetTemperature")
);

View File

@ -31,7 +31,12 @@
coordinates.set
(
i,
coordinateSystem::New(solidRegions[i], thermos[i])
coordinateSystem::New
(
solidRegions[i],
thermos[i],
coordinateSystem::typeName_()
)
);
tmp<volVectorField> tkappaByCp =
@ -57,7 +62,11 @@
);
aniAlphas[i].primitiveFieldRef() =
coordinates[i].R().transformVector(tkappaByCp());
coordinates[i].transformPrincipal
(
solidRegions[i].cellCentres(),
tkappaByCp()
);
aniAlphas[i].correctBoundaryConditions();
}

View File

@ -15,7 +15,12 @@ if (!thermo.isotropic())
const coordinateSystem& coodSys = coordinates[i];
aniAlpha.primitiveFieldRef() =
coodSys.R().transformVector(tkappaByCp());
coodSys.transformPrincipal
(
mesh.cellCentres(),
tkappaByCp()
);
aniAlpha.correctBoundaryConditions();
taniAlpha = tmp<volSymmTensorField>

View File

@ -139,10 +139,9 @@ basicKinematicTypeCloud kinematicCloud
scalar alphacMin
(
1.0
- readScalar
(
- (
kinematicCloud.particleProperties().subDict("constantProperties")
.lookup("alphaMax")
.get<scalar>("alphaMax")
)
);

View File

@ -29,7 +29,7 @@ License
const dictionary& pimpleDict = pimple.dict();
// Maximum flow Courant number
scalar maxCo(readScalar(pimpleDict.lookup("maxCo")));
scalar maxCo(pimpleDict.get<scalar>("maxCo"));
// Maximum time scale
scalar maxDeltaT(pimpleDict.lookupOrDefault<scalar>("maxDeltaT", GREAT));

View File

@ -29,7 +29,7 @@ License
const dictionary& pimpleDict = pimple.dict();
// Maximum flow Courant number
scalar maxCo(readScalar(pimpleDict.lookup("maxCo")));
scalar maxCo(pimpleDict.get<scalar>("maxCo"));
// Maximum time scale
scalar maxDeltaT(pimpleDict.lookupOrDefault<scalar>("maxDeltaT", GREAT));

View File

@ -160,10 +160,9 @@ basicKinematicMPPICCloud kinematicCloud
scalar alphacMin
(
1.0
- readScalar
(
- (
kinematicCloud.particleProperties().subDict("constantProperties")
.lookup("alphaMax")
.get<scalar>("alphaMax")
)
);

View File

@ -31,7 +31,7 @@ Description
scalar maxAlphaCo
(
readScalar(runTime.controlDict().lookup("maxAlphaCo"))
runTime.controlDict().get<scalar>("maxAlphaCo")
);
scalar alphaCoNum = 0.0;

View File

@ -2,5 +2,5 @@
scalar maxAcousticCo
(
readScalar(runTime.controlDict().lookup("maxAcousticCo"))
runTime.controlDict().get<scalar>("maxAcousticCo")
);

View File

@ -70,10 +70,10 @@
// Remove the swirl component of velocity for "wedge" cases
if (pimple.dict().found("removeSwirl"))
{
label swirlCmpt(readLabel(pimple.dict().lookup("removeSwirl")));
label swirlCmpt(pimple.dict().get<label>("removeSwirl"));
Info<< "Removing swirl component-" << swirlCmpt << " of U" << endl;
U.field().replace(swirlCmpt, 0.0);
U.field().replace(swirlCmpt, Zero);
}
U.correctBoundaryConditions();

View File

@ -2,5 +2,5 @@
scalar maxAcousticCo
(
readScalar(runTime.controlDict().lookup("maxAcousticCo"))
runTime.controlDict().get<scalar>("maxAcousticCo")
);

View File

@ -73,10 +73,10 @@
// Remove the swirl component of velocity for "wedge" cases
if (pimple.dict().found("removeSwirl"))
{
label swirlCmpt(readLabel(pimple.dict().lookup("removeSwirl")));
label swirlCmpt(pimple.dict().get<label>("removeSwirl"));
Info<< "Removing swirl component-" << swirlCmpt << " of U" << endl;
U.field().replace(swirlCmpt, 0.0);
U.field().replace(swirlCmpt, Zero);
}
U.correctBoundaryConditions();

View File

@ -763,8 +763,8 @@ void Foam::multiphaseMixtureThermo::solve()
const Time& runTime = mesh_.time();
const dictionary& alphaControls = mesh_.solverDict("alpha");
label nAlphaSubCycles(readLabel(alphaControls.lookup("nAlphaSubCycles")));
scalar cAlpha(readScalar(alphaControls.lookup("cAlpha")));
label nAlphaSubCycles(alphaControls.get<label>("nAlphaSubCycles"));
scalar cAlpha(alphaControls.get<scalar>("cAlpha"));
volScalarField& alpha = phases_.first();

View File

@ -39,8 +39,6 @@ SourceFiles
#include "rhoThermo.H"
#include "volFields.H"
#include "dictionaryEntry.H"
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //

View File

@ -1,8 +1,8 @@
const dictionary& alphaControls = mesh.solverDict(alpha1.name());
label nAlphaCorr(readLabel(alphaControls.lookup("nAlphaCorr")));
label nAlphaCorr(alphaControls.get<label>("nAlphaCorr"));
label nAlphaSubCycles(readLabel(alphaControls.lookup("nAlphaSubCycles")));
label nAlphaSubCycles(alphaControls.get<label>("nAlphaSubCycles"));
bool MULESCorr(alphaControls.lookupOrDefault("MULESCorr", false));

View File

@ -31,7 +31,7 @@ Description
scalar maxAlphaCo
(
readScalar(runTime.controlDict().lookup("maxAlphaCo"))
runTime.controlDict().get<scalar>("maxAlphaCo")
);
scalar maxAlphaDdt

View File

@ -198,7 +198,7 @@ void Foam::radiation::laserDTRM::initialise()
{
case pdGaussian:
{
sigma_ = readScalar(lookup("sigma"));
sigma_ = get<scalar>("sigma");
break;
}
case pdManual:
@ -325,8 +325,8 @@ Foam::radiation::laserDTRM::laserDTRM(const volScalarField& T)
mode_(powerDistNames_.lookup("mode", *this)),
DTRMCloud_(mesh_, "DTRMCloud", IDLList<DTRMParticle>()),
nParticles_(0),
ndTheta_(readLabel(lookup("nTheta"))),
ndr_(readLabel(lookup("nr"))),
ndTheta_(get<label>("nTheta")),
ndr_(get<label>("nr")),
maxTrackLength_(mesh_.bounds().mag()),
focalLaserPosition_
@ -339,7 +339,7 @@ Foam::radiation::laserDTRM::laserDTRM(const volScalarField& T)
Function1<vector>::New("laserDirection", *this)
),
focalLaserRadius_(readScalar(lookup("focalLaserRadius"))),
focalLaserRadius_(get<scalar>("focalLaserRadius")),
qualityBeamLaser_
(
lookupOrDefault<scalar>("qualityBeamLaser", 0.0)
@ -435,8 +435,8 @@ Foam::radiation::laserDTRM::laserDTRM
mode_(powerDistNames_.lookup("mode", *this)),
DTRMCloud_(mesh_, "DTRMCloud", IDLList<DTRMParticle>()),
nParticles_(0),
ndTheta_(readLabel(lookup("nTheta"))),
ndr_(readLabel(lookup("nr"))),
ndTheta_(get<label>("nTheta")),
ndr_(get<label>("nr")),
maxTrackLength_(mesh_.bounds().mag()),
focalLaserPosition_
@ -448,7 +448,7 @@ Foam::radiation::laserDTRM::laserDTRM
Function1<vector>::New("laserDirection", *this)
),
focalLaserRadius_(readScalar(lookup("focalLaserRadius"))),
focalLaserRadius_(get<scalar>("focalLaserRadius")),
qualityBeamLaser_
(
lookupOrDefault<scalar>("qualityBeamLaser", 0.0)

View File

@ -53,7 +53,7 @@ Foam::radiation::FresnelLaser::FresnelLaser
)
:
reflectionModel(dict, mesh),
epsilon_(readScalar(dict.lookup("epsilon")))
epsilon_(dict.get<scalar>("epsilon"))
{}

View File

@ -53,8 +53,8 @@ Foam::porousModels::VollerPrakash::VollerPrakash
)
:
porousModel(dict, mesh),
Cu_(readScalar(dict.lookup("Cu"))),
solidPhase_(dict.lookup("solidPhase"))
Cu_(dict.get<scalar>("Cu")),
solidPhase_(dict.get<word>("solidPhase"))
{}

View File

@ -191,7 +191,7 @@ void Foam::MultiComponentPhaseModel<BasePhaseModel, phaseThermo>::solveYi
const dictionary& MULEScontrols = mesh.solverDict(alpha1.name());
scalar cAlpha(readScalar(MULEScontrols.lookup("cYi")));
scalar cAlpha(MULEScontrols.get<scalar>("cYi"));
PtrList<surfaceScalarField> phiYiCorrs(species_.size());
const surfaceScalarField& phi = this->fluid().phi();

View File

@ -269,8 +269,8 @@ void Foam::multiphaseSystem::solve()
const fvMesh& mesh = this->mesh();
const dictionary& alphaControls = mesh.solverDict("alpha");
label nAlphaSubCycles(readLabel(alphaControls.lookup("nAlphaSubCycles")));
label nAlphaCorr(readLabel(alphaControls.lookup("nAlphaCorr")));
label nAlphaSubCycles(alphaControls.get<label>("nAlphaSubCycles"));
label nAlphaCorr(alphaControls.get<label>("nAlphaCorr"));
mesh.solverDict("alpha").readEntry("cAlphas", cAlphas_);
// Reset ddtAlphaMax

View File

@ -260,13 +260,7 @@ public:
virtual volScalarField& he()
{
NotImplemented;
return
(
const_cast<volScalarField&>
(
volScalarField::null()
)
);
return const_cast<volScalarField&>(volScalarField::null());
}
//- Return access to the inernal energy field [J/Kg]
@ -274,10 +268,7 @@ public:
virtual const volScalarField& he() const
{
NotImplemented;
return
(
volScalarField::null()
);
return volScalarField::null();
}
//- Enthalpy/Internal energy

View File

@ -50,10 +50,7 @@ Foam::temperaturePhaseChangeTwoPhaseMixture::New
const word modelType
(
phaseChangePropertiesDict.lookup
(
"phaseChangeTwoPhaseModel"
)
phaseChangePropertiesDict.get<word>("phaseChangeTwoPhaseModel")
);
Info<< "Selecting phaseChange model " << modelType << endl;

View File

@ -1,5 +1,5 @@
const dictionary& alphaControls = mesh.solverDict(alpha1.name());
label nAlphaCorr(readLabel(alphaControls.lookup("nAlphaCorr")));
label nAlphaCorr(alphaControls.get<label>("nAlphaCorr"));
label nAlphaSubCycles(readLabel(alphaControls.lookup("nAlphaSubCycles")));
label nAlphaSubCycles(alphaControls.get<label>("nAlphaSubCycles"));

View File

@ -154,14 +154,11 @@ Foam::threePhaseInterfaceProperties::threePhaseInterfaceProperties
:
mixture_(mixture),
cAlpha_
(
readScalar
(
mixture.U().mesh().solverDict
(
mixture_.alpha1().name()
).lookup("cAlpha")
)
).get<scalar>("cAlpha")
),
sigma12_("sigma12", dimensionSet(1, 0, -2, 0, 0), mixture),
sigma13_("sigma13", dimensionSet(1, 0, -2, 0, 0), mixture),

View File

@ -31,7 +31,7 @@ Description
scalar maxAlphaCo
(
readScalar(runTime.controlDict().lookup("maxAlphaCo"))
runTime.controlDict().get<scalar>("maxAlphaCo")
);
scalar alphaCoNum = 0.0;

View File

@ -1,3 +1,3 @@
const dictionary& alphaControls = mesh.solverDict(alpha1.name());
label nAlphaSubCycles(readLabel(alphaControls.lookup("nAlphaSubCycles")));
label nAlphaSubCycles(alphaControls.get<label>("nAlphaSubCycles"));

View File

@ -31,7 +31,7 @@ Description
scalar maxAlphaCo
(
readScalar(runTime.controlDict().lookup("maxAlphaCo"))
runTime.controlDict().get<scalar>("maxAlphaCo")
);
scalar alphaCoNum = 0.0;

View File

@ -1,8 +1,8 @@
const dictionary& alphaControls = mesh.solverDict(alpha1.name());
label nAlphaCorr(readLabel(alphaControls.lookup("nAlphaCorr")));
label nAlphaCorr(alphaControls.get<label>("nAlphaCorr"));
label nAlphaSubCycles(readLabel(alphaControls.lookup("nAlphaSubCycles")));
label nAlphaSubCycles(alphaControls.get<label>("nAlphaSubCycles"));
bool MULESCorr(alphaControls.lookupOrDefault("MULESCorr", false));

View File

@ -844,7 +844,7 @@ void Foam::multiphaseSystem::solve()
const Time& runTime = mesh_.time();
const dictionary& alphaControls = mesh_.solverDict("alpha");
label nAlphaSubCycles(readLabel(alphaControls.lookup("nAlphaSubCycles")));
label nAlphaSubCycles(alphaControls.get<label>("nAlphaSubCycles"));
if (nAlphaSubCycles > 1)
{

View File

@ -311,8 +311,8 @@ void Foam::multiphaseMixture::solve()
volScalarField& alpha = phases_.first();
const dictionary& alphaControls = mesh_.solverDict("alpha");
label nAlphaSubCycles(readLabel(alphaControls.lookup("nAlphaSubCycles")));
scalar cAlpha(readScalar(alphaControls.lookup("cAlpha")));
label nAlphaSubCycles(alphaControls.get<label>("nAlphaSubCycles"));
scalar cAlpha(alphaControls.get<scalar>("cAlpha"));
if (nAlphaSubCycles > 1)
{

View File

@ -55,7 +55,7 @@ Foam::wallLubricationModels::Frank::Frank
wallLubricationModel(dict, pair),
Cwd_("Cwd", dimless, dict),
Cwc_("Cwc", dimless, dict),
p_(readScalar(dict.lookup("p")))
p_(dict.get<scalar>("p"))
{}

View File

@ -131,20 +131,16 @@ Foam::ThermalPhaseChangePhaseSystem<BasePhaseSystem>::heatTransfer() const
dimensionedScalar(dimensionSet(1,-1,-3,0,0), Zero)
);
if
(
otherPhase.mesh().foundObject<volScalarField>
(
"alphat." + otherPhase.name()
)
)
{
const volScalarField& alphat =
otherPhase.mesh().lookupObject<volScalarField>
const volScalarField* alphatPtr =
otherPhase.mesh().findObject<volScalarField>
(
"alphat." + otherPhase.name()
);
if (alphatPtr)
{
const volScalarField& alphat = *alphatPtr;
const fvPatchList& patches = this->mesh().boundary();
forAll(patches, patchi)
{
@ -427,20 +423,16 @@ void Foam::ThermalPhaseChangePhaseSystem<BasePhaseSystem>::correctThermo()
dimensionedScalar(dimDensity/dimTime, Zero)
);
if
(
phase2.mesh().foundObject<volScalarField>
(
"alphat." + phase2.name()
)
)
{
const volScalarField& alphat =
phase2.mesh().lookupObject<volScalarField>
const volScalarField* alphatPtr =
phase2.mesh().findObject<volScalarField>
(
"alphat." + phase2.name()
);
if (alphatPtr)
{
const volScalarField& alphat = *alphatPtr;
const fvPatchList& patches = this->mesh().boundary();
forAll(patches, patchi)
{

View File

@ -606,7 +606,7 @@ void Foam::multiphaseSystem::solve()
const Time& runTime = mesh_.time();
const dictionary& alphaControls = mesh_.solverDict("alpha");
label nAlphaSubCycles(readLabel(alphaControls.lookup("nAlphaSubCycles")));
label nAlphaSubCycles(alphaControls.get<label>("nAlphaSubCycles"));
bool LTS = fv::localEulerDdt::enabled(mesh_);

View File

@ -60,7 +60,7 @@ KocamustafaogullariIshii::KocamustafaogullariIshii
)
:
departureDiameterModel(),
phi_(readScalar(dict.lookup("phi")))
phi_(dict.get<scalar>("phi"))
{}

View File

@ -52,7 +52,7 @@ Foam::wallBoilingModels::partitioningModels::
Lavieville::Lavieville(const dictionary& dict)
:
partitioningModel(),
alphaCrit_(readScalar(dict.lookup("alphaCrit")))
alphaCrit_(dict.get<scalar>("alphaCrit"))
{}

View File

@ -52,8 +52,8 @@ Foam::wallBoilingModels::partitioningModels::
cosine::cosine(const dictionary& dict)
:
partitioningModel(),
alphaLiquid1_(readScalar(dict.lookup("alphaLiquid1"))),
alphaLiquid0_(readScalar(dict.lookup("alphaLiquid0")))
alphaLiquid1_(dict.get<scalar>("alphaLiquid1")),
alphaLiquid0_(dict.get<scalar>("alphaLiquid0"))
{}

View File

@ -52,8 +52,8 @@ Foam::wallBoilingModels::partitioningModels::
linear::linear(const dictionary& dict)
:
partitioningModel(),
alphaLiquid1_(readScalar(dict.lookup("alphaLiquid1"))),
alphaLiquid0_(readScalar(dict.lookup("alphaLiquid0")))
alphaLiquid1_(dict.get<scalar>("alphaLiquid1")),
alphaLiquid0_(dict.get<scalar>("alphaLiquid0"))
{}

View File

@ -57,9 +57,9 @@ Foam::RASModels::phasePressureModel::phasePressureModel
phase_(phase),
alphaMax_(readScalar(coeffDict_.lookup("alphaMax"))),
preAlphaExp_(readScalar(coeffDict_.lookup("preAlphaExp"))),
expMax_(readScalar(coeffDict_.lookup("expMax"))),
alphaMax_(coeffDict_.get<scalar>("alphaMax")),
preAlphaExp_(coeffDict_.get<scalar>("preAlphaExp")),
expMax_(coeffDict_.get<scalar>("expMax")),
g0_
(
"g0",

View File

@ -193,8 +193,8 @@ void Foam::twoPhaseSystem::solve()
const dictionary& alphaControls = mesh_.solverDict(alpha1.name());
label nAlphaSubCycles(readLabel(alphaControls.lookup("nAlphaSubCycles")));
label nAlphaCorr(readLabel(alphaControls.lookup("nAlphaCorr")));
label nAlphaSubCycles(alphaControls.get<label>("nAlphaSubCycles"));
label nAlphaCorr(alphaControls.get<label>("nAlphaCorr"));
bool LTS = fv::localEulerDdt::enabled(mesh_);

View File

@ -1,3 +1,3 @@
const dictionary& alphaControls = mesh.solverDict(alpha1.name());
label nAlphaSubCycles(readLabel(alphaControls.lookup("nAlphaSubCycles")));
label nAlphaSubCycles(alphaControls.get<label>("nAlphaSubCycles"));

View File

@ -31,7 +31,7 @@ Description
scalar maxAlphaCo
(
readScalar(runTime.controlDict().lookup("maxAlphaCo"))
runTime.controlDict().get<scalar>("maxAlphaCo")
);
scalar alphaCoNum = 0.0;

View File

@ -55,7 +55,7 @@ Foam::wallLubricationModels::Frank::Frank
wallLubricationModel(dict, pair),
Cwd_("Cwd", dimless, dict),
Cwc_("Cwc", dimless, dict),
p_(readScalar(dict.lookup("p")))
p_(dict.get<scalar>("p"))
{}

View File

@ -60,9 +60,9 @@ Foam::RASModels::phasePressureModel::phasePressureModel
phase_(phase),
alphaMax_(readScalar(coeffDict_.lookup("alphaMax"))),
preAlphaExp_(readScalar(coeffDict_.lookup("preAlphaExp"))),
expMax_(readScalar(coeffDict_.lookup("expMax"))),
alphaMax_(coeffDict_.get<scalar>("alphaMax")),
preAlphaExp_(coeffDict_.get<scalar>("preAlphaExp")),
expMax_(coeffDict_.get<scalar>("expMax")),
g0_
(
"g0",

View File

@ -360,8 +360,8 @@ void Foam::twoPhaseSystem::solve()
alpha1.name()
);
label nAlphaSubCycles(readLabel(alphaControls.lookup("nAlphaSubCycles")));
label nAlphaCorr(readLabel(alphaControls.lookup("nAlphaCorr")));
label nAlphaSubCycles(alphaControls.get<label>("nAlphaSubCycles"));
label nAlphaCorr(alphaControls.get<label>("nAlphaCorr"));
word alphaScheme("div(phi," + alpha1.name() + ')');
word alpharScheme("div(phir," + alpha1.name() + ')');

View File

@ -13,7 +13,7 @@
);
const dictionary& rhoDict(mechanicalProperties.subDict("rho"));
word rhoType(rhoDict.lookup("type"));
word rhoType(rhoDict.get<word>("type"));
autoPtr<volScalarField> rhoPtr;
@ -28,7 +28,7 @@
if (rhoType == "uniform")
{
scalar rhoValue(readScalar(rhoDict.lookup("value")));
scalar rhoValue(rhoDict.get<scalar>("value"));
rhoPtr.reset
(
@ -68,7 +68,7 @@
volScalarField& rho = rhoPtr();
const dictionary& EDict(mechanicalProperties.subDict("E"));
word EType(EDict.lookup("type"));
word EType(EDict.get<word>("type"));
autoPtr<volScalarField> EPtr;
@ -83,7 +83,7 @@
if (EType == "uniform")
{
scalar rhoEValue(readScalar(EDict.lookup("value")));
scalar rhoEValue(EDict.get<scalar>("value"));
EPtr.reset
(
@ -134,11 +134,11 @@
);
const dictionary& nuDict(mechanicalProperties.subDict("nu"));
word nuType(nuDict.lookup("type"));
word nuType(nuDict.get<word>("type"));
if (nuType == "uniform")
{
scalar nuValue(readScalar(nuDict.lookup("value")));
scalar nuValue(nuDict.get<scalar>("value"));
nuPtr.reset
(
new volScalarField

View File

@ -58,10 +58,10 @@ if (thermalStress)
);
const dictionary& CDict(thermalProperties.subDict("C"));
word CType(CDict.lookup("type"));
word CType(CDict.get<word>("type"));
if (CType == "uniform")
{
scalar CValue(readScalar(CDict.lookup("value")));
scalar CValue(CDict.get<scalar>("value"));
CPtr.reset
(
@ -113,10 +113,10 @@ if (thermalStress)
);
const dictionary& kDict(thermalProperties.subDict("k"));
word kType(kDict.lookup("type"));
word kType(kDict.get<word>("type"));
if (kType == "uniform")
{
scalar rhoKValue(readScalar(kDict.lookup("value")));
scalar rhoKValue(kDict.get<scalar>("value"));
rhoKPtr.reset
(
@ -169,11 +169,11 @@ if (thermalStress)
const dictionary& alphaDict(thermalProperties.subDict("alpha"));
word alphaType(alphaDict.lookup("type"));
word alphaType(alphaDict.get<word>("type"));
if (alphaType == "uniform")
{
scalar alphaValue(readScalar(alphaDict.lookup("value")));
scalar alphaValue(alphaDict.get<scalar>("value"));
alphaPtr.reset
(
new volScalarField

View File

@ -1 +1 @@
scalar accFac(readScalar(stressControl.lookup("accelerationFactor")));
scalar accFac(stressControl.get<scalar>("accelerationFactor"));

View File

@ -120,8 +120,8 @@ int main(int argc, char *argv[])
)
);
scalar x0 = readScalar(function1Properties.lookup("x0"));
scalar x1 = readScalar(function1Properties.lookup("x1"));
scalar x0 = function1Properties.get<scalar>("x0");
scalar x1 = function1Properties.get<scalar>("x1");
Info<< "Data entry type: " << function1().type() << nl << endl;

View File

@ -3,7 +3,7 @@
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2011 OpenFOAM Foundation
\\/ M anipulation |
\\/ M anipulation | Copyright (C) 2018 OpenCFD Ltd.
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
@ -37,6 +37,10 @@ using namespace Foam;
int main(int argc, char *argv[])
{
Info<<"cwd() " << cwd() << nl;
Info<<"cwd(-P) " << cwd(false) << nl;
Info<<"cwd(-L) " << cwd(true) << nl;
Info<<"rmDir" << nl;
rmDir("hmm");

View File

@ -328,6 +328,17 @@ int main(int argc, char *argv[])
<<"addresses:" << nl;
printAddr(Info, list1);
printAddr(Info, list1a);
Info<<"values:" << nl;
print(Info, list1a);
// This should not cause problems (ie, no deletion)
{
auto* ptr = &(list1a.first());
list1a.set(0, ptr);
Info<<"values:" << nl;
print(Info, list1a);
}
PtrList<Scalar> list1b(list1a, true);

View File

@ -32,9 +32,39 @@ Description
#include "label.H"
#include "scalar.H"
#include "List.H"
#include "ops.H"
#include <functional>
using namespace Foam;
// Test for special comparison operation using compareOp
// Normal sort on label, reverse sort on scalar
struct special1
{
typedef Tuple2<label, scalar> type;
bool operator()(const type& a, const type& b) const
{
int val = compareOp<label>()(a.first(), b.first());
return (val == 0) ? (b.second() < a.second()) : (val < 0);
}
};
// Test for special comparison operation using compareOp
// Normal sort on scalar, reverse sort on label
struct special2
{
typedef Tuple2<label, scalar> type;
bool operator()(const type& a, const type& b) const
{
scalar val = compareOp<scalar>()(a.second(), b.second());
return (val == 0) ? (b.first() < a.first()) : (val < 0);
}
};
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
// Main program:
@ -46,19 +76,39 @@ int main()
Info<< "tuple: "
<< t2 << " "
<< t2.first() << " " << t2.second() << endl;
<< t2.first() << " " << t2.second() << nl;
List<indexedScalar> list1(10);
forAll(list1, i)
// As list. Generated so that we have duplicate indices
List<indexedScalar> list1(3*4);
for (label i = 0; i < 4; ++i)
{
list1[i] = indexedScalar(-i, i*i);
const label j = (i+1);
const label idx = ((i % 2) ? -1 : 1) * (j);
list1[i] = indexedScalar(idx, (j*j));
list1[i+4] = indexedScalar(idx, 2*j); // duplicate index
list1[i+8] = indexedScalar(idx+12, 2*j); // duplicate value
}
sort(list1);
Info<< "Unsorted tuples:" << nl << list1 << nl;
Info<< "tuples:" << nl
<< list1
<< endl;
Foam::sort(list1, std::less<indexedScalar>());
Info<< "sorted tuples:" << nl << list1 << nl;
Foam::sort(list1, std::greater<indexedScalar>());
Info<< "reverse sorted tuples:" << nl << list1 << nl;
Foam::sort(list1, special1());
Info<< "special sorted tuples - sort on index, reverse on value:"
<< nl << list1 << nl;
Foam::sort(list1, special2());
Info<< "special sorted tuples - sort on value, reverse on index:"
<< nl << list1 << nl;
Info<< "End\n" << endl;

View File

@ -40,7 +40,6 @@ int main(int argc, char *argv[])
argList::noBanner();
argList::noParallel();
// argList::noFunctionObjects();
argList::removeOption("case");
argList::addOption("label", "value", "Test parsing of label");
argList::addOption("scalar", "value", "Test parsing of scalar");
argList::addOption("string", "value", "Test string lookup");
@ -73,6 +72,15 @@ int main(int argc, char *argv[])
argList args(argc, argv, false, true);
Info<< "command-line ("
<< args.options().size() << " options, "
<< args.args().size() << " args)" << nl
<< " " << args.commandLine().c_str() << nl << nl;
Info<< "rootPath: " << args.rootPath() << nl
<< "globalCase: " << args.globalCaseName() << nl
<< "globalPath: " << args.globalPath() << nl
<< nl;
Info<<"have: "
<<args.count({"label", "scalar"}) << " options" << nl;

View File

@ -30,12 +30,68 @@ Description
\*---------------------------------------------------------------------------*/
#include "argList.H"
#include "coordinateSystem.H"
#include "Time.H"
#include "coordinateSystems.H"
#include "identityRotation.H"
#include "indirectCS.H"
#include "Fstream.H"
#include "IOstreams.H"
#include "transform.H"
using namespace Foam;
template<class T>
void testTransform(const coordinateSystem& cs, const point& p, const T& val)
{
Info<< " " << pTraits<T>::typeName << ": " << val
<< " transform: " << cs.transform(p, val)
<< " invTransform: " << cs.invTransform(p, val) << nl;
// Info<< " both: " << cs.invTransform(p, cs.transform(p, val)) << nl;
}
void basicTests(const coordinateSystem& cs)
{
cs.writeEntry(cs.name(), Info);
if (isA<coordSystem::indirect>(cs))
{
Info<< "indirect from:" << nl;
dynamicCast<const coordSystem::indirect>(cs).cs()
.writeEntry(cs.name(), Info);
}
Info<< "rotation: " << cs.R() << nl;
List<point> testPoints
({
{1,0,0}, {0,1,0}, {0,0,1}, {1,1,1},
});
for (const point& p : testPoints)
{
Info<< nl
<< " test point: " << p
<< " = local point " << cs.transformPoint(p)
<< " = local coord " << cs.localPosition(p) << nl;
const vector v1(1, 1, 1);
const tensor t1(tensor::I);
const tensor t2(1, 2, 3, 4, 5, 6, 7, 8, 9);
testTransform(cs, p, v1);
testTransform(cs, p, t1);
testTransform(cs, p, t2);
}
Info<< nl;
}
void doTest(const dictionary& dict)
{
Info<< dict.dictName() << dict << nl;
@ -43,18 +99,42 @@ void doTest(const dictionary& dict)
// Could fail?
const bool throwingIOError = FatalIOError.throwExceptions();
const bool throwingError = FatalError.throwExceptions();
try
{
coordinateSystem cs1(dict.dictName(), dict);
auto cs1ptr = coordinateSystem::New(dict, "");
coordinateSystem& cs1 = *cs1ptr;
cs1.rename(dict.dictName());
coordinateSystem cs2;
basicTests(cs1);
}
catch (Foam::IOerror& err)
{
Info<< "Caught FatalIOError " << err << nl << endl;
}
catch (Foam::error& err)
{
Info<< "Caught FatalError " << err << nl << endl;
}
FatalError.throwExceptions(throwingError);
FatalIOError.throwExceptions(throwingIOError);
}
// Move assign
cs2 = std::move(cs1);
// Info<<cs2 << nl;
cs2.writeDict(Info, true);
Info<< nl;
void doTest(const objectRegistry& obr, const dictionary& dict)
{
Info<< dict.dictName() << dict << nl;
// Could fail?
const bool throwingIOError = FatalIOError.throwExceptions();
const bool throwingError = FatalError.throwExceptions();
try
{
auto cs1ptr = coordinateSystem::New(obr, dict, word::null);
coordinateSystem& cs1 = *cs1ptr;
basicTests(cs1);
}
catch (Foam::IOerror& err)
{
@ -78,7 +158,40 @@ int main(int argc, char *argv[])
argList::addArgument("dict .. dictN");
argList args(argc, argv, false, true);
if (args.size() <= 1)
if (args.found("case"))
{
Info<<"using case for tests" << nl;
#include "createTime.H"
const coordinateSystems& systems = coordinateSystems::New(runTime);
Info<< systems.size() << " global systems" << nl;
for (const coordinateSystem& cs : systems)
{
basicTests(cs);
}
// systems.write();
for (label argi=1; argi < args.size(); ++argi)
{
const string& dictFile = args[argi];
IFstream is(dictFile);
dictionary inputDict(is);
forAllConstIters(inputDict, iter)
{
if (iter().isDict())
{
doTest(runTime, iter().dict());
}
}
}
}
else if (args.size() <= 1)
{
Info<<"no coordinateSystem dictionaries to expand" << nl;
}

View File

@ -1,7 +1,7 @@
/*--------------------------------*- C++ -*----------------------------------*\
| ========= | |
| \\ / F ield | OpenFOAM: The Open Source CFD Toolbox |
| \\ / O peration | Version: plus |
| \\ / O peration | Version: v1806 |
| \\ / A nd | Web: www.OpenFOAM.com |
| \\/ M anipulation | |
\*---------------------------------------------------------------------------*/
@ -9,13 +9,24 @@ FoamFile
{
version 2.0;
format ascii;
class dictionary;
object meshQualityDict;
class IOPtrList<coordinateSystem>; //<-- Older name
object coordinateSystems;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
// Include defaults parameters from master dictionary
#includeEtc "caseDicts/meshQualityDict"
(
cs1
{
type cartesian;
origin (1 2 3);
coordinateRotation
{
type axes;
e1 (0 0 1);
e2 (0 1 0);
}
}
)
// ************************************************************************* //

View File

@ -1,7 +1,7 @@
/*--------------------------------*- C++ -*----------------------------------*\
| ========= | |
| \\ / F ield | OpenFOAM: The Open Source CFD Toolbox |
| \\ / O peration | Version: plus s |
| \\ / O peration | Version: v1806 |
| \\ / A nd | Web: www.OpenFOAM.com |
| \\/ M anipulation | |
\*---------------------------------------------------------------------------*/
@ -10,20 +10,19 @@ FoamFile
version 2.0;
format ascii;
class dictionary;
location "system";
object controlDict;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
application snappyHexMesh;
application simpleFoam;
startFrom startTime;
startFrom latestTime;
startTime 0;
stopAt endTime;
endTime 2000;
endTime 4;
deltaT 1;

View File

@ -0,0 +1,86 @@
/*--------------------------------*- C++ -*----------------------------------*\
| ========= | |
| \\ / F ield | OpenFOAM: The Open Source CFD Toolbox |
| \\ / O peration | Version: v1806 |
| \\ / A nd | Web: www.OpenFOAM.com |
| \\/ M anipulation | |
\*---------------------------------------------------------------------------*/
FoamFile
{
version 2.0;
format ascii;
//OLD class IOPtrList<coordinateSystem>;
class coordinateSystems;
object coordinateSystems;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
(
cs1
{
type cartesian;
origin (1 2 3);
rotation
{
type axes;
e1 (0 0 1);
e2 (0 1 0);
}
}
cs2
{
type cartesian;
origin (0 3 5);
e1 (1 2 0);
e2 (2 0 2);
}
cs3
{
type cartesian;
origin (0 3 5);
coordinateRotation // older name
{
type euler;
angles (90 0 0);
}
}
cs4
{
type cylindrical;
origin (0 3 5);
rotation
{
type euler;
angles (90 0 0);
}
}
cyl
{
type cylindrical;
origin (0 0 0);
degrees false;
rotation
{
type axisAngle;
axis (0 0 1);
angle 90;
}
}
ident
{
origin (0 0 0);
rotation
{
type none;
}
}
)
// ************************************************************************* //

View File

@ -1,7 +1,7 @@
/*--------------------------------*- C++ -*----------------------------------*\
| ========= | |
| \\ / F ield | OpenFOAM: The Open Source CFD Toolbox |
| \\ / O peration | Version: plus |
| \\ / O peration | Version: v1806 |
| \\ / A nd | Web: www.OpenFOAM.com |
| \\/ M anipulation | |
\*---------------------------------------------------------------------------*/
@ -10,14 +10,39 @@ FoamFile
version 2.0;
format ascii;
class dictionary;
location "system";
object decomposeParDict;
object controlDict;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
numberOfSubdomains 8;
application simpleFoam;
method scotch;
startFrom latestTime;
startTime 0;
stopAt endTime;
endTime 4;
deltaT 1;
writeControl timeStep;
writeInterval 100;
purgeWrite 0;
writeFormat binary;
writePrecision 6;
writeCompression off;
timeFormat general;
timePrecision 6;
runTimeModifiable true;
// ************************************************************************* //

View File

@ -10,12 +10,19 @@ FoamFile
version 2.0;
format ascii;
class dictionary;
object testDict;
object testCsys1;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
// Rotate 90 deg around x: y -> z, z -> -y
rot_x90
{
origin (0 0 0);
e1 (1 0 0);
e3 (0 -1 0);
}
rot_x90_axesRotation
{
origin (0 0 0);
@ -27,13 +34,24 @@ rot_x90_axesRotation
}
}
rot_x90_axisAngle
{
origin (0 0 0);
coordinateRotation
{
type axisAngle;
axis (1 0 0); // non-unit also OK
angle 90;
}
}
rot_x90_euler
{
origin (0 0 0);
coordinateRotation
{
type EulerRotation;
rotation (0 90 0); // z-x'-z''
type euler;
angles (0 90 0); // z-x'-z''
}
}
@ -51,18 +69,40 @@ rot_z45_axesRotation
}
}
rot_z45_axisAngle
{
origin (0 0 0);
coordinateRotation
{
type axisAngle;
axis (0 0 10); // non-unit also OK
angle 45;
}
}
rot_z45_euler
{
origin (0 0 0);
coordinateRotation
{
type EulerRotation;
rotation (45 0 0); // z-x'-z''
type euler;
angles (45 0 0); // z-x'-z''
}
}
rot_z45_starcd
{
origin (0 0 0);
coordinateRotation
{
type starcd;
angles (45 0 0); // z-x'-y''
}
}
// Rotate -45 deg around z: x -> (1 -1 0), y = (1 1 0)
rot_zm45_axesRotation
{
origin (0 0 0);
@ -74,13 +114,24 @@ rot_zm45_axesRotation
}
}
rot_zm45_axisAngle
{
origin (0 0 0);
coordinateRotation
{
type axisAngle;
axis (0 0 10); // non-unit also OK
angle -45;
}
}
rot_zm45_euler
{
origin (0 0 0);
coordinateRotation
{
type EulerRotation;
rotation (-45 0 0); // z-x'-z''
type euler;
angles (-45 0 0); // z-x'-z''
}
}
@ -98,13 +149,35 @@ null_axesRotation
}
}
null_axisAngle0
{
origin (0 0 0);
coordinateRotation
{
type axisAngle;
axis (0 0 0); // non-unit also OK
angle 0;
}
}
null_axisAngle1
{
origin (0 0 0);
coordinateRotation
{
type axisAngle;
axis (1 1 1); // non-unit also OK
angle 0;
}
}
null_euler
{
origin (0 0 0);
coordinateRotation
{
type EulerRotation;
rotation (0 0 0); // z-x'-z''
type euler;
angles (0 0 0); // z-x'-z''
}
}

View File

@ -0,0 +1,59 @@
/*--------------------------------*- C++ -*----------------------------------*\
| ========= | |
| \\ / F ield | OpenFOAM: The Open Source CFD Toolbox |
| \\ / O peration | Version: v1806 |
| \\ / A nd | Web: www.OpenFOAM.com |
| \\/ M anipulation | |
\*---------------------------------------------------------------------------*/
FoamFile
{
version 2.0;
format ascii;
class dictionary;
object testCsys1;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
// This dictionary only works in combination with constant/coordinateSystems
mycs1
{
type indirect;
name cs1;
}
mycs2
{
type indirect;
name cs2;
}
mycs3
{
type indirect;
name cs3;
}
mycyl
{
type indirect;
name cyl;
}
mycy2
{
coordinateSystem
{
type indirect;
name cyl;
}
}
mycy3
{
coordinateSystem cyl;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //

View File

@ -82,8 +82,8 @@ int main(int argc, char *argv[])
Info<< "dict1.toc(): " << dict1.name() << " " << dict1.toc()
<< endl;
dictionary dict3(dict2.subDictPtr("boundaryField"));
dictionary dict4(dict2.subDictPtr("NONEXISTENT"));
dictionary dict3(dict2.findDict("boundaryField"));
dictionary dict4(dict2.findDict("NONEXISTENT"));
Info<< "dictionary construct from pointer" << nl
<< "ok = " << dict3.name() << " " << dict3.toc() << nl
@ -105,23 +105,17 @@ int main(int argc, char *argv[])
Info<< "Pattern find \"abc\" in top directory : "
<< dict.lookup("abc") << endl;
Info<< "Pattern find \"abc\" in sub directory : "
<< dict.subDict("someDict").lookup("abc")
<< endl;
<< dict.subDict("someDict").lookup("abc") << nl;
Info<< "Recursive pattern find \"def\" in sub directory : "
<< dict.subDict("someDict").lookup("def", true)
<< endl;
<< dict.subDict("someDict").lookup("def", true) << nl;
Info<< "Recursive pattern find \"foo\" in sub directory : "
<< dict.subDict("someDict").lookup("foo", true)
<< endl;
<< dict.subDict("someDict").lookup("foo", true) << nl;
Info<< "Recursive pattern find \"fooz\" in sub directory : "
<< dict.subDict("someDict").lookup("fooz", true)
<< endl;
<< dict.subDict("someDict").lookup("fooz", true) << nl;
Info<< "Recursive pattern find \"bar\" in sub directory : "
<< dict.subDict("someDict").lookup("bar", true)
<< endl;
<< dict.subDict("someDict").lookup("bar", true) << nl;
Info<< "Recursive pattern find \"xxx\" in sub directory : "
<< dict.subDict("someDict").lookup("xxx", true)
<< endl;
<< dict.subDict("someDict").lookup("xxx", true) << nl;
}
}
else

View File

@ -48,6 +48,102 @@ void entryInfo(entry* e)
}
// Try with readScalar
scalar try_readScalar(const dictionary& dict, const word& k)
{
scalar val(-GREAT);
const bool throwingIOError = FatalIOError.throwExceptions();
const bool throwingError = FatalError.throwExceptions();
try
{
val = readScalar(dict.lookup(k));
Info<< "readScalar(" << k << ") = " << val << nl;
}
catch (Foam::IOerror& err)
{
Info<< "readScalar(" << k << ") Caught FatalIOError "
<< err << nl << endl;
}
catch (Foam::error& err)
{
Info<< "readScalar(" << k << ") Caught FatalError "
<< err << nl << endl;
}
FatalError.throwExceptions(throwingError);
FatalIOError.throwExceptions(throwingIOError);
return val;
}
// Try with get<scalar>
scalar try_getScalar(const dictionary& dict, const word& k)
{
scalar val(-GREAT);
const bool throwingIOError = FatalIOError.throwExceptions();
const bool throwingError = FatalError.throwExceptions();
try
{
val = dict.get<scalar>(k);
Info<< "get<scalar>(" << k << ") = " << val << nl;
}
catch (Foam::IOerror& err)
{
Info<< "get<scalar>(" << k << ") Caught FatalIOError "
<< err << nl << endl;
}
catch (Foam::error& err)
{
Info<< "get<scalar>(" << k << ") Caught FatalError "
<< err << nl << endl;
}
FatalError.throwExceptions(throwingError);
FatalIOError.throwExceptions(throwingIOError);
return val;
}
// Try with *entry (from findEntry) and get<scalar>
scalar try_getScalar(const entry* eptr, const word& k)
{
scalar val(-GREAT);
if (!eptr)
{
Info<< "No entry" << k << nl;
return val;
}
const bool throwingIOError = FatalIOError.throwExceptions();
const bool throwingError = FatalError.throwExceptions();
try
{
val = eptr->get<scalar>();
Info<< "entry get<scalar>(" << k << ") = " << val << nl;
}
catch (Foam::IOerror& err)
{
Info<< "entry get<scalar>(" << k << ") Caught FatalIOError "
<< err << nl << endl;
}
catch (Foam::error& err)
{
Info<< "entry get<scalar>(" << k << ") Caught FatalError "
<< err << nl << endl;
}
FatalError.throwExceptions(throwingError);
FatalIOError.throwExceptions(throwingIOError);
return val;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
// Main program:
@ -227,29 +323,9 @@ int main(int argc, char *argv[])
{
Info<< nl << "Test some bad input with readScalar()" << nl;
const bool throwingIOError = FatalIOError.throwExceptions();
const bool throwingError = FatalError.throwExceptions();
try
{
scalar val1 = readScalar(dict2.lookup("good"));
// scalar val2 = readScalar(dict2.lookup("bad"));
scalar val2 = -1;
scalar val3 = readScalar(dict2.lookup("empty"));
Info<< "got good=" << val1 << " bad=" << val2
<< " empty=" << val3 << nl;
}
catch (Foam::IOerror& err)
{
Info<< "Caught FatalIOError " << err << nl << endl;
}
catch (Foam::error& err)
{
Info<< "Caught FatalError " << err << nl << endl;
}
FatalError.throwExceptions(throwingError);
FatalIOError.throwExceptions(throwingIOError);
try_readScalar(dict2, "good");
// try_readScalar(dict2, "bad");
try_readScalar(dict2, "empty");
}
@ -257,29 +333,19 @@ int main(int argc, char *argv[])
{
Info<< nl << "Test some bad input with get<scalar>()" << nl;
const bool throwingIOError = FatalIOError.throwExceptions();
const bool throwingError = FatalError.throwExceptions();
try_getScalar(dict2, "good");
// try_getScalar(dict2, "bad");
try_getScalar(dict2, "empty");
}
try
// With findEntry and get<scalar>
{
scalar val1 = dict2.get<scalar>("good");
// scalar val2 = dict2.get<scalar>("bad");
scalar val2 = -1;
scalar val3 = dict2.get<scalar>("empty");
Info<< nl
<< "Test some bad input with findEntry + get<scalar>()" << nl;
Info<< "got good=" << val1 << " bad=" << val2
<< " empty=" << val3 << nl;
}
catch (Foam::IOerror& err)
{
Info<< "Caught FatalIOError " << err << nl << endl;
}
catch (Foam::error& err)
{
Info<< "Caught FatalError " << err << nl << endl;
}
FatalError.throwExceptions(throwingError);
FatalIOError.throwExceptions(throwingIOError);
try_getScalar(dict2.findEntry("good"), "good");
// try_getScalar(dict2.findEntry("bad"), "bad");
try_getScalar(dict2.findEntry("empty"), "empty");
}
}

View File

@ -47,6 +47,38 @@ using namespace Foam;
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
unsigned testClean(std::initializer_list<Pair<std::string>> tests)
{
unsigned nFail = 0;
for (const Pair<std::string>& test : tests)
{
const std::string& input = test.first();
const std::string& expected = test.second();
fileName cleaned(test.first());
cleaned.clean();
if (cleaned == expected)
{
Info<< "(pass)"
<< " clean " << input << " -> " << cleaned << nl;
}
else
{
Info<< "(fail)"
<< " clean " << input << " -> " << cleaned
<< " expected=" << expected
<< nl;
++nFail;
}
}
return nFail;
}
unsigned testStrip
(
const bool doClean,
@ -184,6 +216,16 @@ unsigned testRelative(std::initializer_list<Pair<std::string>> tests)
}
void testDirname(const fileName& input)
{
Info<< "input:" << input
<< " path:" << input.path()
<< " name:\"" << input.name() << '"'
<< " ext:\"" << input.ext() << '"'
<< " components: " << flatOutput(input.components()) << nl;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
// Main program:
@ -196,6 +238,8 @@ int main(int argc, char *argv[])
argList::addBoolOption("relative", "test relative operations");
argList::addBoolOption("system", "test filesystem operations");
argList::addBoolOption("default", "reinstate default tests");
argList::addBoolOption("clean", "clean()");
argList::addBoolOption("dirname", "basename/dirname tables");
argList::addNote("runs default tests or specified ones only");
#include "setRootCase.H"
@ -254,6 +298,20 @@ int main(int argc, char *argv[])
Info<< "All ==> " << file4 << nl;
}
if (args.found("dirname"))
{
testDirname("");
testDirname(".");
testDirname("abc");
testDirname("/");
testDirname("/abc");
testDirname("abc/def");
testDirname("/abc/def");
testDirname("/abc/def/");
testDirname("/abc///def///");
testDirname("/abc/../def");
}
// Test various ext() methods
if (args.found("ext"))
@ -381,6 +439,35 @@ int main(int argc, char *argv[])
}
if (args.found("clean"))
{
Info<< nl << "Test fileName::clean()" << nl << nl;
unsigned nFail = testClean
({
{ "/", "/" },
{ "/abc/", "/abc" },
{ "/abc////def", "/abc/def" },
{ "/abc/def/./ghi/.", "/abc/def/ghi" },
{ "abc/def/./", "abc/def" },
{ "./abc/", "./abc" },
{ "/abc/def/../ghi/jkl/nmo/..", "/abc/ghi/jkl" },
{ "abc/../def/ghi/../jkl", "abc/../def/jkl" },
});
Info<< nl;
if (nFail)
{
Info<< "failed " << nFail;
}
else
{
Info<< "passed all";
}
Info<< " fileName::clean tests" << nl;
}
if (args.found("validate"))
{
unsigned nFail = 0;
@ -677,9 +764,27 @@ int main(int argc, char *argv[])
<< " controlDict => " << findEtcFile("controlDict") << nl
<< " badName => " << findEtcFile("badName") << endl;
Info<< "This should emit a fatal error:" << endl;
Info<< " badName(die) => " << findEtcFile("badName", true) << nl
{
Info<< nl << "Expect a FatalError for findEtcFile() with a bad name:"
<< nl;
const bool throwingError = FatalError.throwExceptions();
try
{
Info<< " badName(die) => " << flush
<< findEtcFile("<very-badName>", true) << nl
<< endl;
}
catch (Foam::error& err)
{
Info<< nl << "findEtcFile() Caught FatalError "
<< err << nl << endl;
}
FatalError.throwExceptions(throwingError);
}
Info<< "\nEnd\n" << endl;
return 0;

View File

@ -50,22 +50,17 @@ bool checkDictionaryContent(const dictionary& dict1, const dictionary& dict2)
}
forAllConstIter(dictionary, dict1, iter1)
for (const entry& entry1 : dict1)
{
const entry* entryPtr = dict2.lookupEntryPtr
(
iter1().keyword(),
false,
false
);
const entry* eptr =
dict2.findEntry(entry1.keyword(), keyType::LITERAL);
if (!entryPtr)
if (!eptr)
{
return false;
}
const entry& entry1 = iter1();
const entry& entry2 = *entryPtr;
const entry& entry2 = *eptr;
bool ok = false;
if (entry1.isDict())

View File

@ -48,12 +48,6 @@ using namespace Foam;
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
bool notEqual(const scalar s1, const scalar s2, const scalar tol)
{
return mag(s1-s2) > tol;
}
// Main program:
int main(int argc, char *argv[])
{
@ -180,6 +174,8 @@ int main(int argc, char *argv[])
// Construct refiner. Read initial cell and point levels.
hexRef8 meshCutter(mesh);
// Comparison for inequality
const auto isNotEqual = notEqualOp<scalar>(1e-10);
while (runTime.loop())
{
@ -345,7 +341,7 @@ int main(int argc, char *argv[])
Info<< "Uniform one field min = " << min
<< " max = " << max << endl;
if (notEqual(max, 1.0, 1e-10) || notEqual(min, 1.0, 1e-10))
if (isNotEqual(min, 1) || isNotEqual(max, 1))
{
FatalErrorInFunction
<< "Uniform volVectorField not preserved."
@ -369,7 +365,7 @@ int main(int argc, char *argv[])
Info<< "Linear profile field min = " << min
<< " max = " << max << endl;
if (notEqual(max, 0.0, 1e-10) || notEqual(min, 0.0, 1e-10))
if (isNotEqual(min, 0) || isNotEqual(max, 0))
{
Info<< "Linear profile not preserved."
<< " Min and max should both be 0.0. min:" << min
@ -390,7 +386,7 @@ int main(int argc, char *argv[])
Info<< "Uniform surface field min = " << min
<< " max = " << max << endl;
if (notEqual(max, 1.0, 1e-10) || notEqual(min, 1.0, 1e-10))
if (isNotEqual(min, 1) || isNotEqual(max, 1))
{
FatalErrorInFunction
<< "Uniform surfaceScalarField not preserved."

View File

@ -55,7 +55,7 @@ int main(int argc, char *argv[])
// Create the pendulumAndSpring model from dictionary
rigidBodyMotion pendulumAndSpring(runTime, pendulumAndSpringDict);
label nIter(readLabel(pendulumAndSpringDict.lookup("nIter")));
label nIter(pendulumAndSpringDict.get<label>("nIter"));
Info<< pendulumAndSpring << endl;
Info<< "// Joint state " << endl;

View File

@ -54,7 +54,7 @@ int main(int argc, char *argv[])
// Create the sphericalJoint model from dictionary
rigidBodyMotion sphericalJoint(runTime, sphericalJointDict);
label nIter(readLabel(sphericalJointDict.lookup("nIter")));
label nIter(sphericalJointDict.get<label>("nIter"));
Info<< sphericalJoint << endl;

View File

@ -49,7 +49,7 @@ int main(int argc, char *argv[])
// Create the spring model from dictionary
rigidBodyMotion spring(springDict);
label nIter(readLabel(springDict.lookup("nIter")));
label nIter(springDict.get<label>("nIter"));
Info<< spring << endl;

View File

@ -33,6 +33,7 @@ Description
#include "nil.H"
#include "IOstreams.H"
#include "PstreamBuffers.H"
#include "argList.H"
#include "Time.H"
namespace Foam
@ -63,6 +64,13 @@ int main(int argc, char *argv[])
nil x;
cout<<"nil:" << sizeof(x) << nl;
}
{
argList x(argc, argv);
cout<<"argList:" << sizeof(x) << nl;
TimePaths y(x);
cout<<"TimePaths:" << sizeof(y) << nl;
}
{
zero x;
cout<<"zero:" << sizeof(x) << nl;

View File

@ -358,8 +358,7 @@ int main(int argc, char *argv[])
if (Pstream::parRun())
{
sourceCaseDir =
sourceCaseDir
/"processor" + Foam::name(Pstream::myProcNo());
sourceCaseDir/("processor" + Foam::name(Pstream::myProcNo()));
}
wordList sourcePatches;
dict.readEntry("sourcePatches", sourcePatches);

View File

@ -63,9 +63,13 @@ Foam::cellSizeFunction::cellSizeFunction
defaultCellSize_(defaultCellSize),
regionIndices_(regionIndices),
sideMode_(),
priority_(cellSizeFunctionDict.get<label>("priority", true))
priority_
(
cellSizeFunctionDict.get<label>("priority", keyType::REGEX_RECURSIVE)
)
{
const word mode = cellSizeFunctionDict.get<word>("mode", true);
const word mode =
cellSizeFunctionDict.get<word>("mode", keyType::REGEX_RECURSIVE);
if (surface_.hasVolumeType())
{

View File

@ -837,7 +837,7 @@ void Foam::conformalVoronoiMesh::checkCellSizing()
= dict.subDict("meshQualityControls");
const scalar maxNonOrtho =
meshQualityDict.get<scalar>("maxNonOrtho", true);
meshQualityDict.get<scalar>("maxNonOrtho", keyType::REGEX_RECURSIVE);
label nWrongFaces = 0;

View File

@ -339,7 +339,7 @@ Foam::conformationSurfaces::conformationSurfaces
{
const word& geomName = allGeometry_.names()[geomI];
const entry* ePtr = surfacesDict.lookupEntryPtr(geomName, false, true);
const entry* ePtr = surfacesDict.findEntry(geomName, keyType::REGEX);
if (ePtr)
{

View File

@ -125,7 +125,7 @@ autoPtr<refinementSurfaces> createRefinementSurfaces
{
const word& geomName = allGeometry.names()[geomi];
const entry* ePtr = surfacesDict.lookupEntryPtr(geomName, false, true);
const entry* ePtr = surfacesDict.findEntry(geomName, keyType::REGEX);
if (ePtr)
{

View File

@ -170,9 +170,10 @@ class dictAndKeyword
word key_;
public:
dictAndKeyword(const word& scopedName)
{
string::size_type i = scopedName.rfind('/');
auto i = scopedName.rfind('/');
if (i == string::npos)
{
i = scopedName.rfind('.');
@ -212,7 +213,7 @@ const dictionary& lookupScopedDict
return dict;
}
const entry* eptr = dict.lookupScopedEntryPtr(subDictName, false, false);
const entry* eptr = dict.findScoped(subDictName, keyType::LITERAL);
if (!eptr || !eptr->isDict())
{
@ -231,7 +232,7 @@ void removeDict(dictionary& dict, const dictionary& dictToRemove)
{
for (const entry& refEntry : dictToRemove)
{
auto finder = dict.search(refEntry.keyword(), false, false);
auto finder = dict.search(refEntry.keyword(), keyType::LITERAL);
bool purge = false;
@ -357,8 +358,7 @@ int main(int argc, char *argv[])
bool changed = false;
// Read but preserve headers
dictionary dict;
dict.read(dictFile(), true);
dictionary dict(dictFile(), true);
if (listIncludes)
{
@ -455,12 +455,7 @@ int main(int argc, char *argv[])
changed = true;
// Print the changed entry
const auto finder = dict.csearchScoped
(
scopedName,
false,
true // Support wildcards
);
const auto finder = dict.csearchScoped(scopedName, keyType::REGEX);
if (finder.found())
{
@ -489,8 +484,8 @@ int main(int argc, char *argv[])
const dictionary& d1(lookupScopedDict(dict, dAk.dict()));
const dictionary& d2(lookupScopedDict(diffDict, dAk.dict()));
const entry* e1Ptr = d1.lookupEntryPtr(dAk.key(), false, true);
const entry* e2Ptr = d2.lookupEntryPtr(dAk.key(), false, true);
const entry* e1Ptr = d1.findEntry(dAk.key(), keyType::REGEX);
const entry* e2Ptr = d2.findEntry(dAk.key(), keyType::REGEX);
if (e1Ptr && e2Ptr)
{
@ -509,12 +504,7 @@ int main(int argc, char *argv[])
}
}
const auto finder = dict.csearchScoped
(
scopedName,
false,
true // Support wildcards
);
const auto finder = dict.csearchScoped(scopedName, keyType::REGEX);
if (!finder.found())
{

View File

@ -589,7 +589,7 @@ int main(int argc, char *argv[])
(
Time::controlDictName,
args.rootPath(),
args.caseName()/fileName(word("processor") + name(proci))
args.caseName()/("processor" + Foam::name(proci))
);
processorDb.setTime(runTime);
@ -1016,7 +1016,7 @@ int main(int argc, char *argv[])
Time::controlDictName,
args.rootPath(),
args.caseName()
/fileName(word("processor") + name(proci))
/ ("processor" + Foam::name(proci))
)
);
}
@ -1374,8 +1374,8 @@ int main(int argc, char *argv[])
(
Time::controlDictName,
args.rootPath(),
args.caseName()/
fileName(word("processor") + name(procI))
args.caseName()
/ ("processor" + Foam::name(procI))
);
processorDb.setTime(runTime);

View File

@ -305,7 +305,7 @@ bool Foam::domainDecomposition::writeDecomposition(const bool decomposeSets)
fileName processorCasePath
(
time().caseName()/fileName(word("processor") + Foam::name(proci))
time().caseName()/("processor" + Foam::name(proci))
);
// create a database

View File

@ -50,7 +50,7 @@ void Foam::faMeshDecomposition::distributeFaces()
(
Time::controlDictName,
time().rootPath(),
time().caseName()/fileName(word("processor") + Foam::name(procI))
time().caseName()/("processor" + Foam::name(procI))
);
fvMesh procMesh
@ -250,7 +250,7 @@ void Foam::faMeshDecomposition::decomposeMesh()
(
Time::controlDictName,
time().rootPath(),
time().caseName()/fileName(word("processor") + Foam::name(procI))
time().caseName()/("processor" + Foam::name(procI))
);
fvMesh procFvMesh
@ -1059,8 +1059,7 @@ void Foam::faMeshDecomposition::decomposeMesh()
{
fileName processorCasePath
(
time().caseName()/fileName(word("processor")
+ Foam::name(procI))
time().caseName()/("processor" + Foam::name(procI))
);
// create a database
@ -1179,7 +1178,7 @@ bool Foam::faMeshDecomposition::writeDecomposition()
fileName processorCasePath
(
time().caseName()/fileName(word("processor") + Foam::name(procI))
time().caseName()/("processor" + Foam::name(procI))
);
// create a database

View File

@ -223,7 +223,7 @@ int main(int argc, char *argv[])
(
Time::controlDictName,
args.rootPath(),
args.caseName()/fileName(word("processor") + name(proci))
args.caseName()/("processor" + Foam::name(proci))
)
);
}

View File

@ -550,7 +550,7 @@ int main(int argc, char *argv[])
forAll(databases, proci)
{
Info<< "Reading database "
<< args.caseName()/fileName(word("processor") + name(proci))
<< args.caseName()/("processor" + Foam::name(proci))
<< endl;
databases.set
@ -560,7 +560,7 @@ int main(int argc, char *argv[])
(
Time::controlDictName,
args.rootPath(),
args.caseName()/fileName(word("processor") + name(proci))
args.caseName()/("processor" + Foam::name(proci))
)
);
}

View File

@ -336,8 +336,7 @@ void determineDecomposition
{
Info<< "Setting caseName to " << baseRunTime.caseName()
<< " to read decomposeParDict" << endl;
const_cast<Time&>(mesh.time()).TimePaths::caseName() =
baseRunTime.caseName();
const_cast<Time&>(mesh.time()).caseName() = baseRunTime.caseName();
}
scalarField cellWeights;
@ -366,8 +365,7 @@ void determineDecomposition
if (Pstream::master() && decompose)
{
Info<< "Restoring caseName to " << proc0CaseName << endl;
const_cast<Time&>(mesh.time()).TimePaths::caseName() =
proc0CaseName;
const_cast<Time&>(mesh.time()).caseName() = proc0CaseName;
}
// Dump decomposition to volScalarField
@ -383,10 +381,10 @@ void determineDecomposition
Time& tm = const_cast<Time&>(mesh.time());
tm.TimePaths::caseName() = baseRunTime.caseName();
tm.caseName() = baseRunTime.caseName();
writeDecomposition("cellDist", mesh, decomp);
Info<< "Restoring caseName to " << proc0CaseName << endl;
tm.TimePaths::caseName() = proc0CaseName;
tm.caseName() = proc0CaseName;
}
}
else
@ -908,12 +906,12 @@ autoPtr<mapDistributePolyMesh> redistributeAndWrite
// Get original objects (before incrementing time!)
if (Pstream::master() && decompose)
{
runTime.TimePaths::caseName() = baseRunTime.caseName();
runTime.caseName() = baseRunTime.caseName();
}
IOobjectList objects(mesh, runTime.timeName());
if (Pstream::master() && decompose)
{
runTime.TimePaths::caseName() = proc0CaseName;
runTime.caseName() = proc0CaseName;
}
Info<< "From time " << runTime.timeName()
@ -932,7 +930,7 @@ autoPtr<mapDistributePolyMesh> redistributeAndWrite
if (Pstream::master() && decompose)
{
runTime.TimePaths::caseName() = baseRunTime.caseName();
runTime.caseName() = baseRunTime.caseName();
}
readFields
(
@ -1112,7 +1110,7 @@ autoPtr<mapDistributePolyMesh> redistributeAndWrite
if (Pstream::master() && decompose)
{
runTime.TimePaths::caseName() = proc0CaseName;
runTime.caseName() = proc0CaseName;
}
}
@ -1192,7 +1190,7 @@ autoPtr<mapDistributePolyMesh> redistributeAndWrite
{
Info<< "Setting caseName to " << baseRunTime.caseName()
<< " to write reconstructed mesh and fields." << endl;
runTime.TimePaths::caseName() = baseRunTime.caseName();
runTime.caseName() = baseRunTime.caseName();
mesh.write();
topoSet::removeFiles(mesh);
@ -1212,7 +1210,7 @@ autoPtr<mapDistributePolyMesh> redistributeAndWrite
// Now we've written all. Reset caseName on master
Info<< "Restoring caseName to " << proc0CaseName << endl;
runTime.TimePaths::caseName() = proc0CaseName;
runTime.caseName() = proc0CaseName;
}
}
else
@ -1258,7 +1256,7 @@ autoPtr<mapDistributePolyMesh> redistributeAndWrite
// Read refinement data
if (Pstream::master() && decompose)
{
runTime.TimePaths::caseName() = baseRunTime.caseName();
runTime.caseName() = baseRunTime.caseName();
}
IOobject io
(
@ -1274,7 +1272,7 @@ autoPtr<mapDistributePolyMesh> redistributeAndWrite
hexRef8Data refData(io);
if (Pstream::master() && decompose)
{
runTime.TimePaths::caseName() = proc0CaseName;
runTime.caseName() = proc0CaseName;
}
// Make sure all processors have valid data (since only some will
@ -1294,13 +1292,13 @@ autoPtr<mapDistributePolyMesh> redistributeAndWrite
{
Info<< "Setting caseName to " << baseRunTime.caseName()
<< " to write reconstructed refinement data." << endl;
runTime.TimePaths::caseName() = baseRunTime.caseName();
runTime.caseName() = baseRunTime.caseName();
refData.write();
// Now we've written all. Reset caseName on master
Info<< "Restoring caseName to " << proc0CaseName << endl;
runTime.TimePaths::caseName() = proc0CaseName;
runTime.caseName() = proc0CaseName;
}
}
else
@ -1314,7 +1312,7 @@ autoPtr<mapDistributePolyMesh> redistributeAndWrite
// // Read sets
// if (Pstream::master() && decompose)
// {
// runTime.TimePaths::caseName() = baseRunTime.caseName();
// runTime.caseName() = baseRunTime.caseName();
// }
// IOobjectList objects(mesh, mesh.facesInstance(), "polyMesh/sets");
//
@ -1323,7 +1321,7 @@ autoPtr<mapDistributePolyMesh> redistributeAndWrite
//
// if (Pstream::master() && decompose)
// {
// runTime.TimePaths::caseName() = proc0CaseName;
// runTime.caseName() = proc0CaseName;
// }
//
// forAll(cellSets, i)
@ -1337,7 +1335,7 @@ autoPtr<mapDistributePolyMesh> redistributeAndWrite
// {
// Info<< "Setting caseName to " << baseRunTime.caseName()
// << " to write reconstructed refinement data." << endl;
// runTime.TimePaths::caseName() = baseRunTime.caseName();
// runTime.caseName() = baseRunTime.caseName();
//
// forAll(cellSets, i)
// {
@ -1346,7 +1344,7 @@ autoPtr<mapDistributePolyMesh> redistributeAndWrite
//
// // Now we've written all. Reset caseName on master
// Info<< "Restoring caseName to " << proc0CaseName << endl;
// runTime.TimePaths::caseName() = proc0CaseName;
// runTime.caseName() = proc0CaseName;
// }
// }
// else
@ -2931,7 +2929,7 @@ int main(int argc, char *argv[])
{
Info<< "Setting caseName to " << baseRunTime.caseName()
<< " to find undecomposed mesh" << endl;
runTime.TimePaths::caseName() = baseRunTime.caseName();
runTime.caseName() = baseRunTime.caseName();
}
masterInstDir = runTime.findInstance
@ -2944,7 +2942,7 @@ int main(int argc, char *argv[])
if (decompose)
{
Info<< "Restoring caseName to " << proc0CaseName << endl;
runTime.TimePaths::caseName() = proc0CaseName;
runTime.caseName() = proc0CaseName;
}
}
Pstream::scatter(masterInstDir);
@ -2970,7 +2968,7 @@ int main(int argc, char *argv[])
{
Info<< "Setting caseName to " << baseRunTime.caseName()
<< " to read undecomposed mesh" << endl;
runTime.TimePaths::caseName() = baseRunTime.caseName();
runTime.caseName() = baseRunTime.caseName();
}
autoPtr<fvMesh> meshPtr = loadOrCreateMesh
@ -2987,7 +2985,7 @@ int main(int argc, char *argv[])
if (Pstream::master() && decompose)
{
Info<< "Restoring caseName to " << proc0CaseName << endl;
runTime.TimePaths::caseName() = proc0CaseName;
runTime.caseName() = proc0CaseName;
}
fvMesh& mesh = meshPtr();
@ -3046,7 +3044,7 @@ int main(int argc, char *argv[])
// Detect lagrangian fields
if (Pstream::master() && decompose)
{
runTime.TimePaths::caseName() = baseRunTime.caseName();
runTime.caseName() = baseRunTime.caseName();
}
parLagrangianRedistributor::findClouds
(
@ -3069,7 +3067,7 @@ int main(int argc, char *argv[])
);
if (Pstream::master() && decompose)
{
runTime.TimePaths::caseName() = proc0CaseName;
runTime.caseName() = proc0CaseName;
}

View File

@ -88,7 +88,7 @@ int main(int argc, char *argv[])
const label nProcs = fileHandler().nProcs(args.path());
#else
label nProcs = 0;
while (isDir(args.path()/(word("processor") + name(nProcs))))
while (isDir(args.path()/("processor" + Foam::name(nProcs))))
{
++nProcs;
}
@ -106,7 +106,7 @@ int main(int argc, char *argv[])
(
Time::controlDictName,
args.rootPath(),
args.caseName()/fileName(word("processor") + name(proci))
args.caseName()/("processor" + Foam::name(proci))
)
);
}
@ -183,7 +183,7 @@ int main(int argc, char *argv[])
// Assumed to be good if it has 'profiling' sub-dict
const dictionary* ptr = dict.subDictPtr(blockNameProfiling);
const dictionary* ptr = dict.findDict(blockNameProfiling);
if (ptr)
{
++nDict;
@ -295,13 +295,12 @@ int main(int argc, char *argv[])
for (const dictionary& procDict : profiles)
{
const dictionary* inDictPtr =
procDict.subDictPtr(level1Name);
const dictionary* inDictPtr = procDict.findDict(level1Name);
if (inDictPtr && hasDictEntries)
{
// descend to the next level as required
inDictPtr = inDictPtr->subDictPtr(level2Name);
// Descend to the next level as required
inDictPtr = inDictPtr->findDict(level2Name);
}
if (!inDictPtr)
@ -313,16 +312,13 @@ int main(int argc, char *argv[])
for (const word& tag : tags)
{
const entry* eptr = inDictPtr->lookupEntryPtr
(
tag,
false,
false
);
scalar val;
if (eptr)
if
(
inDictPtr->readIfPresent(tag, val, keyType::LITERAL)
)
{
const scalar val = readScalar(eptr->stream());
stats(tag).append(val);
}
}
@ -339,7 +335,7 @@ int main(int argc, char *argv[])
if (hasDictEntries)
{
outputDict.add(level2Name, level1Dict.subDict(level2Name));
outDictPtr = outputDict.subDictPtr(level2Name);
outDictPtr = outputDict.findDict(level2Name);
}
else
{

View File

@ -235,10 +235,9 @@ bool merge
// Save current (non-wildcard) keys before adding items.
wordHashSet thisKeysSet;
{
List<keyType> keys = thisDict.keys(false);
forAll(keys, i)
for (const word& k : thisDict.keys(false))
{
thisKeysSet.insert(keys[i]);
thisKeysSet.insert(k);
}
}
@ -261,25 +260,20 @@ bool merge
}
else if (literalRE || !(key.isPattern() || shortcuts.found(key)))
{
entry* entryPtr = thisDict.lookupEntryPtr
(
key,
false, // recursive
false // patternMatch
);
entry* eptr = thisDict.findEntry(key, keyType::LITERAL);
if (entryPtr)
if (eptr)
{
// Mark thisDict entry as having been match for wildcard
// handling later on.
thisKeysSet.erase(entryPtr->keyword());
thisKeysSet.erase(eptr->keyword());
if
(
addEntry
(
thisDict,
*entryPtr,
*eptr,
mergeIter(),
literalRE,
shortcuts
@ -310,7 +304,7 @@ bool merge
// Pass 2. Wildcard or shortcut matches (if any) on any non-match keys.
if (!literalRE && thisKeysSet.size() > 0)
if (!literalRE && thisKeysSet.size())
{
// Pick up remaining dictionary entries
wordList thisKeys(thisKeysSet.toc());
@ -336,10 +330,10 @@ bool merge
);
// Remove all matches
forAll(matches, i)
for (const label matchi : matches)
{
const word& thisKey = thisKeys[matches[i]];
thisKeysSet.erase(thisKey);
const word& k = thisKeys[matchi];
thisKeysSet.erase(k);
}
changed = true;
}
@ -358,21 +352,18 @@ bool merge
);
// Add all matches
forAll(matches, i)
for (const label matchi : matches)
{
const word& thisKey = thisKeys[matches[i]];
const word& k = thisKeys[matchi];
entry& thisEntry = const_cast<entry&>
(
thisDict.lookupEntry(thisKey, false, false)
);
entry* eptr = thisDict.findEntry(k, keyType::LITERAL);
if
(
addEntry
(
thisDict,
thisEntry,
*eptr,
mergeIter(),
literalRE,
HashTable<wordList>(0) // no shortcuts
@ -627,8 +618,7 @@ int main(int argc, char *argv[])
fieldDict.lookupEntry
(
doneKeys[i],
false,
true
keyType::REGEX
).clone()
);
fieldDict.remove(doneKeys[i]);

View File

@ -396,7 +396,7 @@ int main(int argc, char *argv[])
(
Time::controlDictName,
rootDirSource,
caseDirSource/fileName(word("processor") + name(proci))
caseDirSource/("processor" + Foam::name(proci))
);
#include "setTimeIndex.H"
@ -471,7 +471,7 @@ int main(int argc, char *argv[])
(
Time::controlDictName,
rootDirTarget,
caseDirTarget/fileName(word("processor") + name(proci))
caseDirTarget/("processor" + Foam::name(proci))
);
fvMesh meshTarget
@ -536,7 +536,7 @@ int main(int argc, char *argv[])
(
Time::controlDictName,
rootDirSource,
caseDirSource/fileName(word("processor") + name(procISource))
caseDirSource/("processor" + Foam::name(procISource))
);
#include "setTimeIndex.H"
@ -572,8 +572,7 @@ int main(int argc, char *argv[])
(
Time::controlDictName,
rootDirTarget,
caseDirTarget/fileName(word("processor")
+ name(procITarget))
caseDirTarget/("processor" + Foam::name(procITarget))
);
fvMesh meshTarget

View File

@ -9,74 +9,74 @@ FoamFile
{
version 2.0;
format ascii;
class IOPtrList<coordinateSystem>;
class coordinateSystems;
object coordinateSystems;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
7
(
system_9
_9
{
type cartesian;
origin (1.03291515 -0.114391257 -0.0826236662);
e3 (1 0 0);
e1 (0 1 0);
// STARCDRotation (0 90 90);
e3 (1 0 0);
// rotation { type starcd; angles (0 90 90); }
}
system_10
_10
{
type cartesian;
origin (0.623151719 -0.286472935 -0.113933262);
e3 (0.99508851 0.09829095 0.01173645);
e1 (0.01179356 0 -0.99993045);
// STARCDRotation (5.6403745 -0.0664172952 89.3275351);
e3 (0.99508851 0.09829095 0.01173645);
// rotation { type starcd; angles (5.6403745 -0.0664172952 89.3275351); }
}
system_15
_15
{
type cartesian;
origin (0.644772231 -0.240036493 0.155972187);
e3 (-0.01346388 -0.90616979 -0.42269969);
e1 (0.00627978 0.42265304 -0.90626981);
// STARCDRotation (-90.8512386 0 115.005148);
e3 (-0.01346388 -0.90616979 -0.42269969);
// rotation { type starcd; angles (-90.8512386 0 115.005148); }
}
system_16
_16
{
type cartesian;
origin (0.540824938 -0.240036415 0.15928296);
e3 (-0.01346388 -0.90616979 -0.42269969);
e1 (0.00627978 0.42265304 -0.90626981);
// STARCDRotation (-90.8512386 0 115.005148);
e3 (-0.01346388 -0.90616979 -0.42269969);
// rotation { type starcd; angles (-90.8512386 0 115.005148); }
}
system_17
_17
{
type cartesian;
origin (0.436877646 -0.240036339 0.162593737);
e3 (-0.01346388 -0.90616979 -0.42269969);
e1 (0.00627978 0.42265304 -0.90626981);
// STARCDRotation (-90.8512386 0 115.005148);
e3 (-0.01346388 -0.90616979 -0.42269969);
// rotation { type starcd; angles (-90.8512386 0 115.005148); }
}
system_18
_18
{
type cartesian;
origin (0.332930354 -0.240036261 0.16590451);
e3 (-0.01346388 -0.90616979 -0.42269969);
e1 (0.00627978 0.42265304 -0.90626981);
// STARCDRotation (-90.8512386 0 115.005148);
e3 (-0.01346388 -0.90616979 -0.42269969);
// rotation { type starcd; angles (-90.8512386 0 115.005148); }
}
system_21
_21
{
type cartesian;
origin (0.55863733 -0.300866705 0.00317260982);
e3 (0.42110287 0.02470132 -0.90667647);
e1 (0.90646036 0.02342535 0.42164069);
// STARCDRotation (-178.185897 -0.71772221 -155.059695);
e3 (0.42110287 0.02470132 -0.90667647);
// rotation { type starcd; angles (-178.185897 -0.71772221 -155.059695); }
}
)

View File

@ -66,6 +66,7 @@ Note
#include "MeshedSurfaces.H"
#include "coordinateSystems.H"
#include "cartesianCS.H"
using namespace Foam;
@ -146,9 +147,9 @@ int main(int argc, char *argv[])
}
// get the coordinate transformations
autoPtr<coordinateSystem> fromCsys;
autoPtr<coordinateSystem> toCsys;
// The coordinate transformations (must be cartesian)
autoPtr<coordSystem::cartesian> fromCsys;
autoPtr<coordSystem::cartesian> toCsys;
if (args.found("from") || args.found("to"))
{
@ -174,43 +175,44 @@ int main(int argc, char *argv[])
<< exit(FatalError);
}
coordinateSystems csLst(ioCsys);
coordinateSystems globalCoords(ioCsys);
if (args.found("from"))
{
const word csName = args["from"];
const word csName(args["from"]);
const auto* csPtr = globalCoords.lookupPtr(csName);
const label csIndex = csLst.findIndex(csName);
if (csIndex < 0)
if (!csPtr)
{
FatalErrorInFunction
<< "Cannot find -from " << csName << nl
<< "available coordinateSystems: " << csLst.toc() << nl
<< "available coordinateSystems: "
<< flatOutput(globalCoords.names()) << nl
<< exit(FatalError);
}
fromCsys.reset(new coordinateSystem(csLst[csIndex]));
fromCsys = autoPtr<coordSystem::cartesian>::New(*csPtr);
}
if (args.found("to"))
{
const word csName = args["to"];
const word csName(args["to"]);
const auto* csPtr = globalCoords.lookupPtr(csName);
const label csIndex = csLst.findIndex(csName);
if (csIndex < 0)
if (!csPtr)
{
FatalErrorInFunction
<< "Cannot find -to " << csName << nl
<< "available coordinateSystems: " << csLst.toc() << nl
<< "available coordinateSystems: "
<< flatOutput(globalCoords.names()) << nl
<< exit(FatalError);
}
toCsys.reset(new coordinateSystem(csLst[csIndex]));
toCsys = autoPtr<coordSystem::cartesian>::New(*csPtr);
}
// maybe fix this later
if (fromCsys.valid() && toCsys.valid())
// Maybe fix this later
if (fromCsys && toCsys)
{
FatalErrorInFunction
<< "Only allowed '-from' or '-to' option at the moment."
@ -230,29 +232,30 @@ int main(int argc, char *argv[])
scalar scaleIn = 0;
if (args.readIfPresent("scaleIn", scaleIn) && scaleIn > 0)
{
Info<< " -scaleIn " << scaleIn << endl;
Info<< "scale input " << scaleIn << endl;
surf.scalePoints(scaleIn);
}
if (fromCsys.valid())
if (fromCsys)
{
Info<< " -from " << fromCsys().name() << endl;
tmp<pointField> tpf = fromCsys().localPosition(surf.points());
Info<< "move points from coordinate system: "
<< fromCsys->name() << endl;
tmp<pointField> tpf = fromCsys->localPosition(surf.points());
surf.movePoints(tpf());
}
if (toCsys.valid())
if (toCsys)
{
Info<< " -to " << toCsys().name() << endl;
tmp<pointField> tpf = toCsys().globalPosition(surf.points());
Info<< "move points to coordinate system: "
<< toCsys->name() << endl;
tmp<pointField> tpf = toCsys->globalPosition(surf.points());
surf.movePoints(tpf());
}
scalar scaleOut = 0;
if (args.readIfPresent("scaleOut", scaleOut) && scaleOut > 0)
{
Info<< " -scaleOut " << scaleOut << endl;
Info<< "scale output " << scaleOut << endl;
surf.scalePoints(scaleOut);
}

Some files were not shown because too many files have changed in this diff Show More