STYLE: use range-for when looping dictionary entries.

- as part of the cleanup of dictionary access methods (c6520033c9)
  made the dictionary class single inheritance from IDLList<entry>.

  This eliminates any ambiguities for iterators and allows
  for simple use of range-for looping.

  Eg,
      for (const entry& e : topDict))
      {
          Info<< "entry:" << e.keyword() << " is dict:" << e.isDict() << nl;
      }

   vs

      forAllConstIter(dictionary, topDict, iter))
      {
          Info<< "entry:" << iter().keyword()
              << " is dict:" << iter().isDict() << nl;
      }
This commit is contained in:
Mark Olesen
2018-10-19 13:08:24 +02:00
parent 4e04c1966f
commit 07dafe7b0b
60 changed files with 636 additions and 653 deletions

View File

@ -25,12 +25,7 @@
PtrList<fvScalarMatrix> p_rghEqnComps(mixture.phases().size());
label phasei = 0;
forAllConstIter
(
PtrDictionary<phaseModel>,
mixture.phases(),
phase
)
forAllConstIters(mixture.phases(), phase)
{
const rhoThermo& thermo = phase().thermo();
const volScalarField& rho = thermo.rho()();
@ -61,12 +56,7 @@
tmp<fvScalarMatrix> p_rghEqnComp;
phasei = 0;
forAllConstIter
(
PtrDictionary<phaseModel>,
mixture.phases(),
phase
)
forAllConstIters(mixture.phases(), phase)
{
tmp<fvScalarMatrix> hmm
(

View File

@ -35,12 +35,7 @@
- fvm::laplacian(rAUf, p_rgh)
);
forAllConstIter
(
phaseSystem::phasePairTable,
fluid.totalPhasePairs(),
iter
)
forAllConstIters(fluid.totalPhasePairs(), iter)
{
const phasePair& pair = iter()();

View File

@ -202,7 +202,7 @@ void Foam::MultiComponentPhaseModel<BasePhaseModel, phaseThermo>::solveYi
surfaceScalarField phir(0.0*phi);
forAllConstIter(phaseSystem::phaseModelTable,this->fluid().phases(),iter2)
forAllConstIters(this->fluid().phases(),iter2)
{
const volScalarField& alpha2 = iter2();
if (&alpha2 == &alpha1)
@ -251,10 +251,7 @@ void Foam::MultiComponentPhaseModel<BasePhaseModel, phaseThermo>::solveYi
surfaceScalarField& phiYiCorr = phiYiCorrs[i];
forAllConstIter
(
phaseSystem::phaseModelTable, this->fluid().phases(), iter2
)
forAllConstIters(this->fluid().phases(), iter2)
{
//const volScalarField& alpha2 = iter2()().oldTime();
const volScalarField& alpha2 = iter2();

View File

@ -125,15 +125,8 @@ massTransfer() const
this->massTransferModels_[key][phase.name()]->K()
);
forAllConstIter
(
hashedWordList,
compositionModel.species(),
memberIter
)
for (const word& member : compositionModel.species())
{
const word& member = *memberIter;
const word name
(
IOobject::groupName(member, phase.name())

View File

@ -157,14 +157,14 @@ Foam::phaseSystem::phaseSystem
phi_.writeOpt() = IOobject::AUTO_WRITE;
// Blending methods
forAllConstIter(dictionary, subDict("blending"), iter)
for (const entry& dEntry : subDict("blending"))
{
blendingMethods_.insert
(
iter().dict().dictName(),
dEntry.dict().dictName(),
blendingMethod::New
(
iter().dict(),
dEntry.dict(),
phaseModels_.toc()
)
);

View File

@ -113,14 +113,14 @@ Foam::twoPhaseSystem::twoPhaseSystem
// Blending
forAllConstIter(dictionary, subDict("blending"), iter)
for (const entry& dEntry : subDict("blending"))
{
blendingMethods_.insert
(
iter().dict().dictName(),
dEntry.dict().dictName(),
blendingMethod::New
(
iter().dict(),
dEntry.dict(),
wordList(lookup("phases"))
)
);

View File

@ -182,11 +182,11 @@ int main(int argc, char *argv[])
dictionary inputDict(is);
forAllConstIters(inputDict, iter)
for (const entry& dEntry : inputDict)
{
if (iter().isDict())
if (dEntry.isDict())
{
doTest(runTime, iter().dict());
doTest(runTime, dEntry.dict());
}
}
}
@ -204,11 +204,11 @@ int main(int argc, char *argv[])
dictionary inputDict(is);
forAllConstIters(inputDict, iter)
for (const entry& dEntry : inputDict)
{
if (iter().isDict())
if (dEntry.isDict())
{
doTest(iter().dict());
doTest(dEntry.dict());
}
}
}

View File

@ -650,17 +650,19 @@ int main(int argc, char *argv[])
// Per faceSet the patch to put the coupled baffles into
DynamicList<FixedList<word, 3>> coupledAndPatches(10);
const dictionary& functionDicts = dict.subDict("coupledFaces");
forAllConstIter(dictionary, functionDicts, iter)
for (const entry& dEntry : functionDicts)
{
// safety:
if (!iter().isDict())
if (!dEntry.isDict()) // Safety
{
continue;
}
const word& key = iter().keyword();
const dictionary& dict = iter().dict();
const word& key = dEntry.keyword();
const dictionary& dict = dEntry.dict();
const word cyclicName = dict.get<word>("cyclicMasterPatch");
const word wallName = dict.get<word>("wallPatch");
FixedList<word, 3> nameAndType;

View File

@ -93,14 +93,10 @@ Foam::cellSizeAndAlignmentControls::cellSizeAndAlignmentControls
{
label functionI = 0;
forAllConstIter(dictionary, shapeControlDict_, iter)
for (const entry& dEntry : shapeControlDict_)
{
word shapeControlEntryName = iter().keyword();
const dictionary& controlFunctionDict
(
shapeControlDict_.subDict(shapeControlEntryName)
);
const word& shapeControlEntryName = dEntry.keyword();
const dictionary& controlFunctionDict = dEntry.dict();
Info<< nl << "Shape Control : " << shapeControlEntryName << endl;
Info<< incrIndent;

View File

@ -535,16 +535,12 @@ Foam::conformationSurfaces::conformationSurfaces
Info<< nl << "Reading additionalFeatures" << endl;
}
forAllConstIter(dictionary, additionalFeaturesDict, iter)
for (const entry& dEntry : additionalFeaturesDict)
{
word featureName = iter().keyword();
const word& featureName = dEntry.keyword();
const dictionary& featureSubDict = dEntry.dict();
Info<< nl << " " << iter().keyword() << endl;
const dictionary& featureSubDict
(
additionalFeaturesDict.subDict(featureName)
);
Info<< nl << " " << featureName << endl;
readFeatures(featureSubDict, featureName, featureI);
}

View File

@ -474,27 +474,24 @@ int main(int argc, char *argv[])
const dictionary& selectionsDict = dict.subDict("baffles");
label n = 0;
forAllConstIter(dictionary, selectionsDict, iter)
selectors.resize(selectionsDict.size());
label nselect = 0;
for (const entry& dEntry : selectionsDict)
{
if (iter().isDict())
{
n++;
}
}
selectors.setSize(n);
n = 0;
forAllConstIter(dictionary, selectionsDict, iter)
{
if (iter().isDict())
if (dEntry.isDict())
{
selectors.set
(
n++,
faceSelection::New(iter().keyword(), mesh, iter().dict())
nselect,
faceSelection::New(dEntry.keyword(), mesh, dEntry.dict())
);
++nselect;
}
}
selectors.resize(nselect);
}
@ -641,10 +638,9 @@ int main(int argc, char *argv[])
if (dict.found("patches"))
{
const dictionary& patchSources = dict.subDict("patches");
forAllConstIter(dictionary, patchSources, iter)
for (const entry& dEntry : dict.subDict("patches"))
{
const word patchName(iter().dict().get<word>("name"));
const word patchName(dEntry.dict().get<word>("name"));
bafflePatches.insert(patchName);
}
@ -687,14 +683,15 @@ int main(int argc, char *argv[])
if (dict.found("patches"))
{
const dictionary& patchSources = dict.subDict("patches");
forAllConstIter(dictionary, patchSources, iter)
for (const entry& dEntry : dict.subDict("patches"))
{
const word patchName(iter().dict().get<word>("name"));
const dictionary& dict = dEntry.dict();
const word patchName(dict.get<word>("name"));
if (pbm.findPatchID(patchName) == -1)
{
dictionary patchDict = iter().dict();
dictionary patchDict = dict;
patchDict.set("nFaces", 0);
patchDict.set("startFace", 0);
@ -789,13 +786,14 @@ int main(int argc, char *argv[])
if (dict.found("patches"))
{
const dictionary& patchSources = dict.subDict("patches");
bool master = true;
forAllConstIter(dictionary, patchSources, iter)
for (const entry& dEntry : dict.subDict("patches"))
{
const word patchName(iter().dict().get<word>("name"));
label patchi = pbm.findPatchID(patchName);
const word patchName(dEntry.dict().get<word>("name"));
const label patchi = pbm.findPatchID(patchName);
if (master)
{
newMasterPatches.append(patchi);
@ -885,17 +883,18 @@ int main(int argc, char *argv[])
const dictionary& dict = selectors[selectorI].dict();
if (dict.found("patches"))
{
const dictionary& patchSources = dict.subDict("patches");
forAllConstIter(dictionary, patchSources, iter)
for (const entry& dEntry : dict.subDict("patches"))
{
const word patchName(iter().dict().get<word>("name"));
const dictionary& dict = dEntry.dict();
const word patchName(dict.get<word>("name"));
label patchi = pbm.findPatchID(patchName);
if (iter().dict().found("patchFields"))
if (dEntry.dict().found("patchFields"))
{
const dictionary& patchFieldsDict =
iter().dict().subDict
dEntry.dict().subDict
(
"patchFields"
);
@ -928,11 +927,11 @@ int main(int argc, char *argv[])
if (sameGroup)
{
// Add coupleGroup to all entries
forAllIter(dictionary, patchFieldsDict, iter)
for (entry& dEntry : patchFieldsDict)
{
if (iter().isDict())
if (dEntry.isDict())
{
dictionary& dict = iter().dict();
dictionary& dict = dEntry.dict();
dict.set("coupleGroup", groupName);
}
}

View File

@ -292,16 +292,17 @@ int main(int argc, char *argv[])
// Suppress duplicate names
wordHashSet requestedPatches;
forAllConstIters(stitchDict, iter)
for (const entry& dEntry : stitchDict)
{
if (!iter().isDict())
if (!dEntry.isDict())
{
Info<< "Ignoring non-dictionary entry: "
<< iter().keyword() << nl;
<< dEntry.keyword() << nl;
continue;
}
const dictionary& dict = iter().dict();
const keyType& key = dEntry.keyword();
const dictionary& dict = dEntry.dict();
// Match type
word matchName;
@ -358,7 +359,7 @@ int main(int argc, char *argv[])
// Input was validated
validatedDict.add(iter().keyword(), iter().dict());
validatedDict.add(key, dict);
}
}
@ -433,9 +434,9 @@ int main(int argc, char *argv[])
// Step through the topology changes
label actioni = 0;
forAllConstIters(validatedDict, iter)
for (const entry& dEntry : validatedDict)
{
const dictionary& dict = iter().dict();
const dictionary& dict = dEntry.dict();
// Match type
bool perfect = false;

View File

@ -97,10 +97,10 @@ HashTable<wordList> extractPatchGroups(const dictionary& boundaryDict)
{
HashTable<wordList> groupToPatch;
forAllConstIter(dictionary, boundaryDict, iter)
for (const entry& dEntry : boundaryDict)
{
const word& patchName = iter().keyword();
const dictionary& patchDict = iter().dict();
const word& patchName = dEntry.keyword();
const dictionary& patchDict = dEntry.dict();
wordList groups;
if (patchDict.readIfPresent("inGroups", groups))
@ -243,9 +243,9 @@ bool merge
// Pass 1. All literal matches
forAllConstIter(IDLList<entry>, mergeDict, mergeIter)
for (const entry& mergeEntry : mergeDict)
{
const keyType& key = mergeIter().keyword();
const keyType& key = mergeEntry.keyword();
if (key[0] == '~')
{
@ -274,7 +274,7 @@ bool merge
(
thisDict,
*eptr,
mergeIter(),
mergeEntry,
literalRE,
shortcuts
)
@ -287,8 +287,8 @@ bool merge
{
if (addNonExisting)
{
// not found - just add
thisDict.add(mergeIter().clone(thisDict).ptr());
// Not found - just add
thisDict.add(mergeEntry.clone(thisDict).ptr());
changed = true;
}
else
@ -309,9 +309,9 @@ bool merge
// Pick up remaining dictionary entries
wordList thisKeys(thisKeysSet.toc());
forAllConstIter(IDLList<entry>, mergeDict, mergeIter)
for (const entry& mergeEntry : mergeDict)
{
const keyType& key = mergeIter().keyword();
const keyType& key = mergeEntry.keyword();
if (key[0] == '~')
{
@ -364,7 +364,7 @@ bool merge
(
thisDict,
*eptr,
mergeIter(),
mergeEntry,
literalRE,
HashTable<wordList>(0) // no shortcuts
// at deeper levels
@ -462,7 +462,7 @@ int main(int argc, char *argv[])
const bool enableEntries = args.found("enableFunctionEntries");
if (enableEntries)
{
Info<< "Allowing dictionary preprocessing ('#include', '#codeStream')."
Info<< "Allowing dictionary preprocessing (#include, #codeStream)."
<< endl;
}
@ -550,9 +550,9 @@ int main(int argc, char *argv[])
// Temporary convert to dictionary
dictionary fieldDict;
forAll(dictList, i)
for (const entry& e : dictList)
{
fieldDict.add(dictList[i].keyword(), dictList[i].dict());
fieldDict.add(e.keyword(), e.dict());
}
if (dictList.size())
@ -582,9 +582,11 @@ int main(int argc, char *argv[])
// Every replacement is a dictionary name and a keyword in this
forAllConstIter(dictionary, replaceDicts, fieldIter)
for (const entry& replaceEntry : replaceDicts)
{
const word& fieldName = fieldIter().keyword();
const word& fieldName = replaceEntry.keyword();
const dictionary& replaceDict = replaceEntry.dict();
Info<< "Replacing entries in dictionary " << fieldName << endl;
// Handle 'boundary' specially:
@ -595,11 +597,8 @@ int main(int argc, char *argv[])
Info<< "Special handling of " << fieldName
<< " as polyMesh/boundary file." << endl;
// Get the replacement dictionary for the field
const dictionary& replaceDict = fieldIter().dict();
Info<< "Merging entries from " << replaceDict.toc() << endl;
// Merge the replacements in. Do not add non-existing entries.
Info<< "Merging entries from " << replaceDict.toc() << endl;
merge(false, fieldDict, replaceDict, literalRE, patchGroups);
Info<< "fieldDict:" << fieldDict << endl;
@ -627,9 +626,9 @@ int main(int argc, char *argv[])
// Add remaining entries
label sz = dictList.size();
dictList.setSize(nEntries);
forAllConstIter(dictionary, fieldDict, iter)
for (const entry& e : fieldDict)
{
dictList.set(sz++, iter().clone());
dictList.set(sz++, e.clone());
}
Info<< "Writing modified " << fieldName << endl;
@ -672,11 +671,8 @@ int main(int argc, char *argv[])
Info<< "Loaded dictionary " << fieldName
<< " with entries " << fieldDict.toc() << endl;
// Get the replacement dictionary for the field
const dictionary& replaceDict = fieldIter().dict();
Info<< "Merging entries from " << replaceDict.toc() << endl;
// Merge the replacements in (allow adding)
Info<< "Merging entries from " << replaceDict.toc() << endl;
merge(true, fieldDict, replaceDict, literalRE, patchGroups);
Info<< "Writing modified fieldDict " << fieldName << endl;

View File

@ -54,9 +54,9 @@ Foam::boundaryTemplates::boundaryTemplates
)
);
forAllConstIter(dictionary, regionBCs, iter)
for (const entry& dEntry : regionBCs)
{
const word& regionType = iter().keyword();
const word& regionType = dEntry.keyword();
wordList patchTypes(regionBCs.lookup(regionType));
dictionary regionTemplate = dictionary::null;
@ -175,7 +175,7 @@ Foam::dictionary Foam::boundaryTemplates::generatePatchDict
// look for inlet, outlet, wall etc
if (regionTemplates.found(category))
{
const dictionary& categoryDict(regionTemplates.subDict(category));
const dictionary& categoryDict = regionTemplates.subDict(category);
// look for subSonic, slip etc
if (categoryDict.found(patchType))
@ -199,10 +199,8 @@ Foam::dictionary Foam::boundaryTemplates::generatePatchDict
const wordList requiredOptions(patchDict.lookup("OPTIONS"));
forAll(requiredOptions, i)
for (const word& option : requiredOptions)
{
const word& option = requiredOptions[i];
word selected;
if (!conditionOptions.readIfPresent(option, selected))
{
@ -252,18 +250,19 @@ Foam::dictionary Foam::boundaryTemplates::generatePatchDict
dictionary dict(dictionary::null);
const dictionary& fieldDict(patchDict.subDict(fieldName));
forAllConstIter(IDLList<entry>, fieldDict, iter)
for (const entry& dEntry : fieldDict)
{
OStringStream oss;
oss << iter();
oss << dEntry;
string s(oss.str());
s.replace(iter().keyword(), "");
s.replace(dEntry.keyword(), "");
s.replace
(
"VALUE",
"boundaryConditions." + condition + ".values"
);
dict.add(iter().keyword(), s.c_str());
dict.add(dEntry.keyword(), s.c_str());
}
return dict;
@ -344,7 +343,7 @@ bool Foam::boundaryTemplates::optionsRequired
if (regionTemplates.found(category))
{
const dictionary& categoryDict(regionTemplates.subDict(category));
const dictionary& categoryDict = regionTemplates.subDict(category);
if (categoryDict.found(patchType))
{

View File

@ -86,9 +86,10 @@ int main(int argc, char *argv[])
label nCoarseFaces = 0;
forAllConstIter(dictionary, agglomDict, iter)
for (const entry& dEntry : agglomDict)
{
labelList patchids = boundary.indices(iter().keyword());
labelList patchids = boundary.indices(dEntry.keyword());
for (const label patchi : patchids)
{
const polyPatch& pp = boundary[patchi];

View File

@ -241,14 +241,15 @@ int main(int argc, char *argv[])
// Where to write VTK output files
const fileName vtkOutputDir = runTime.constantPath()/"triSurface";
forAllConstIters(dict, iter)
for (const entry& dEntry : dict)
{
if (!iter().isDict() || iter().keyword().isPattern())
if (!dEntry.isDict() || dEntry.keyword().isPattern()) // safety
{
continue;
}
const dictionary& surfaceDict = iter().dict();
const word& dictName = dEntry.keyword();
const dictionary& surfaceDict = dEntry.dict();
if (!surfaceDict.found("extractionMethod"))
{
@ -257,7 +258,6 @@ int main(int argc, char *argv[])
}
// The output name based in dictionary name (without extensions)
const word& dictName = iter().keyword();
const word outputName = dictName.lessExt();
autoPtr<surfaceFeaturesExtraction::method> extractor =

View File

@ -84,12 +84,12 @@ int main(int argc, char *argv[])
const dictionary& surfacesDict = meshDict.subDict("surfaces");
forAllConstIter(dictionary, surfacesDict, surfacesIter)
for (const entry& dEntry : surfacesDict)
{
if (surfacesIter().isDict())
if (dEntry.isDict())
{
const word& surfName = surfacesIter().keyword();
const dictionary& surfDict = surfacesIter().dict();
const word& surfName = dEntry.keyword();
const dictionary& surfDict = dEntry.dict();
// Look up surface
searchableSurface& surf = allGeometry[surfName];
@ -120,10 +120,11 @@ int main(int argc, char *argv[])
if (surfDict.found("regions"))
{
const dictionary& regionsDict = surfDict.subDict("regions");
forAllConstIter(dictionary, regionsDict, regionsIter)
for (const entry& e : regionsDict)
{
const dictionary& regionDict = regionsIter().dict();
const keyType& regionName = regionsIter().keyword();
const keyType& regionName = e.keyword();
const dictionary& regionDict = e.dict();
autoPtr<searchableSurfaceModifier> modifier
(

View File

@ -147,11 +147,9 @@ void Foam::HashPtrTable<T, Key, Hash>::read
const INew& inew
)
{
forAllConstIter(dictionary, dict, iter)
for (const entry& e : dict)
{
const word& k = iter().keyword();
this->set(k, inew(dict.subDict(k)).ptr());
this->set(e.keyword(), inew(e.dict()).ptr());
}
}

View File

@ -109,9 +109,9 @@ void Foam::Time::readDict()
simpleObjectRegistry& objs = debug::debugObjects();
forAllConstIters(*localDict, iter)
for (const entry& dEntry : *localDict)
{
const word& name = iter().keyword();
const word& name = dEntry.keyword();
simpleObjectRegistryEntry* objPtr = objs.lookupPtr(name);
@ -119,14 +119,14 @@ void Foam::Time::readDict()
{
const List<simpleRegIOobject*>& objects = *objPtr;
DetailInfo << " " << iter() << nl;
DetailInfo << " " << dEntry << nl;
if (iter().isDict())
if (dEntry.isDict())
{
for (simpleRegIOobject* obj : objects)
{
OStringStream os(IOstream::ASCII);
os << iter().dict();
os << dEntry.dict();
IStringStream is(os.str());
obj->readData(is);
}
@ -135,7 +135,7 @@ void Foam::Time::readDict()
{
for (simpleRegIOobject* obj : objects)
{
obj->readData(iter().stream());
obj->readData(dEntry.stream());
}
}
}
@ -156,9 +156,9 @@ void Foam::Time::readDict()
simpleObjectRegistry& objs = debug::infoObjects();
forAllConstIters(*localDict, iter)
for (const entry& dEntry : *localDict)
{
const word& name = iter().keyword();
const word& name = dEntry.keyword();
simpleObjectRegistryEntry* objPtr = objs.lookupPtr(name);
@ -166,14 +166,14 @@ void Foam::Time::readDict()
{
const List<simpleRegIOobject*>& objects = *objPtr;
DetailInfo << " " << iter() << nl;
DetailInfo << " " << dEntry << nl;
if (iter().isDict())
if (dEntry.isDict())
{
for (simpleRegIOobject* obj : objects)
{
OStringStream os(IOstream::ASCII);
os << iter().dict();
os << dEntry.dict();
IStringStream is(os.str());
obj->readData(is);
}
@ -182,7 +182,7 @@ void Foam::Time::readDict()
{
for (simpleRegIOobject* obj : objects)
{
obj->readData(iter().stream());
obj->readData(dEntry.stream());
}
}
}
@ -202,24 +202,24 @@ void Foam::Time::readDict()
simpleObjectRegistry& objs = debug::optimisationObjects();
forAllConstIters(*localDict, iter)
for (const entry& dEntry : *localDict)
{
const word& name = iter().keyword();
const word& name = dEntry.keyword();
simpleObjectRegistryEntry* objPtr = objs.lookupPtr(name);
if (objPtr)
{
DetailInfo << " " << iter() << nl;
DetailInfo << " " << dEntry << nl;
const List<simpleRegIOobject*>& objects = *objPtr;
if (iter().isDict())
if (dEntry.isDict())
{
for (simpleRegIOobject* obj : objects)
{
OStringStream os(IOstream::ASCII);
os << iter().dict();
os << dEntry.dict();
IStringStream is(os.str());
obj->readData(is);
}
@ -228,7 +228,7 @@ void Foam::Time::readDict()
{
for (simpleRegIOobject* obj : objects)
{
obj->readData(iter().stream());
obj->readData(dEntry.stream());
}
}
}

View File

@ -728,11 +728,11 @@ bool Foam::functionObjectList::read()
newPtrs.setSize(functionsDict.size());
newDigs.setSize(functionsDict.size());
forAllConstIter(dictionary, functionsDict, iter)
for (const entry& dEntry : functionsDict)
{
const word& key = iter().keyword();
const word& key = dEntry.keyword();
if (!iter().isDict())
if (!dEntry.isDict())
{
if (key != "libs")
{
@ -743,7 +743,8 @@ bool Foam::functionObjectList::read()
continue;
}
const dictionary& dict = iter().dict();
const dictionary& dict = dEntry.dict();
bool enabled = dict.lookupOrDefault("enabled", true);
newDigs[nFunc] = dict.digest();

View File

@ -154,9 +154,9 @@ Foam::word Foam::functionObjects::stateFunctionObject::objectResultType
{
const dictionary& objectDict = resultsDict.subDict(objectName);
forAllConstIter(dictionary, objectDict, iter)
for (const entry& dEntry : objectDict)
{
const dictionary& dict = iter().dict();
const dictionary& dict = dEntry.dict();
if (dict.found(entryName))
{
@ -195,9 +195,10 @@ objectResultEntries
{
const dictionary& objectDict = resultsDict.subDict(objectName);
forAllConstIter(dictionary, objectDict, iter)
for (const entry& dEntry : objectDict)
{
const dictionary& dict = iter().dict();
const dictionary& dict = dEntry.dict();
result.append(dict.toc());
}
}

View File

@ -120,17 +120,18 @@ const HashTable<dimensionedScalar>& unitSet()
unitSetPtr_ = new HashTable<dimensionedScalar>(unitDict.size());
forAllConstIter(dictionary, unitDict, iter)
for (const entry& dEntry : unitDict)
{
if (iter().keyword() != "writeUnits")
if (dEntry.keyword() != "writeUnits")
{
dimensionedScalar dt;
dt.read(iter().stream(), unitDict);
bool ok = unitSetPtr_->insert(iter().keyword(), dt);
dt.read(dEntry.stream(), unitDict);
bool ok = unitSetPtr_->insert(dEntry.keyword(), dt);
if (!ok)
{
FatalIOErrorInFunction(dict)
<< "Duplicate unit " << iter().keyword()
<< "Duplicate unit " << dEntry.keyword()
<< " in DimensionSets dictionary"
<< exit(FatalIOError);
}

View File

@ -47,11 +47,12 @@ readField
// 1. Handle explicit patch names. Note that there can be only one explicit
// patch name since is key of dictionary.
forAllConstIter(dictionary, dict, iter)
for (const entry& dEntry : dict)
{
if (iter().isDict() && iter().keyword().isLiteral())
if (dEntry.isDict() && dEntry.keyword().isLiteral())
{
const label patchi = bmesh_.findPatchID(iter().keyword());
const label patchi = bmesh_.findPatchID(dEntry.keyword());
if (patchi != -1)
{
@ -62,7 +63,7 @@ readField
(
bmesh_[patchi],
field,
iter().dict()
dEntry.dict()
)
);
nUnset--;
@ -81,37 +82,29 @@ readField
// Note: in reverse order of entries in the dictionary (last
// patchGroups wins). This is so it is consistent with dictionary wildcard
// behaviour
if (dict.size())
for (auto iter = dict.crbegin(); iter != dict.crend(); ++iter)
{
for
(
IDLList<entry>::const_reverse_iterator iter = dict.crbegin();
iter != dict.crend();
++iter
)
const entry& dEntry = *iter;
if (dEntry.isDict() && dEntry.keyword().isLiteral())
{
const entry& e = iter();
const labelList patchIds =
bmesh_.indices(dEntry.keyword(), true); // use patchGroups
if (e.isDict() && e.keyword().isLiteral())
for (const label patchi : patchIds)
{
const labelList patchIds =
bmesh_.indices(e.keyword(), true); // use patchGroups
for (const label patchi : patchIds)
if (!this->set(patchi))
{
if (!this->set(patchi))
{
this->set
this->set
(
patchi,
PatchField<Type>::New
(
patchi,
PatchField<Type>::New
(
bmesh_[patchi],
field,
e.dict()
)
);
}
bmesh_[patchi],
field,
dEntry.dict()
)
);
}
}
}

View File

@ -131,19 +131,21 @@ void Foam::genericPolyPatch::write(Ostream& os) const
os.writeEntry("nFaces", size());
os.writeEntry("startFace", start());
forAllConstIter(dictionary, dict_, iter)
for (const entry& e : dict_)
{
const word& key = e.keyword();
// Filter out any keywords already written by above
if
(
iter().keyword() != "type"
&& iter().keyword() != "nFaces"
&& iter().keyword() != "startFace"
&& iter().keyword() != "physicalType"
&& iter().keyword() != "inGroups"
key != "type"
&& key != "nFaces"
&& key != "startFace"
&& key != "physicalType"
&& key != "inGroups"
)
{
iter().write(os);
e.write(os);
}
}
}

View File

@ -238,16 +238,19 @@ Foam::Map<Foam::word> Foam::cellTable::selectType(const word& matl) const
forAllConstIter(Map<dictionary>, *this, iter)
{
const label index = iter.key();
const dictionary& dict = iter.object();
if
(
matl
== iter().lookupOrDefault<word>("MaterialType", defaultMaterial_)
== dict.lookupOrDefault<word>("MaterialType", defaultMaterial_)
)
{
lookup.insert
(
iter.key(),
iter().lookupOrDefault<word>
index,
dict.lookupOrDefault<word>
(
"Label",
"cellTable_" + Foam::name(iter.key())

View File

@ -71,11 +71,11 @@ Foam::dynamicMultiMotionSolverFvMesh::dynamicMultiMotionSolverFvMesh
pointIDs_.setSize(dynamicMeshCoeffs.size());
label zoneI = 0;
forAllConstIter(dictionary, dynamicMeshCoeffs, iter)
for (const entry& dEntry : dynamicMeshCoeffs)
{
if (iter().isDict())
if (dEntry.isDict())
{
const dictionary& subDict = iter().dict();
const dictionary& subDict = dEntry.dict();
const word zoneName(subDict.get<word>("cellZone"));

View File

@ -306,9 +306,9 @@ void Foam::displacementLayeredMotionMotionSolver::cellZoneSolve
// Allocate the fields
label patchi = 0;
forAllConstIter(dictionary, patchesDict, patchiter)
for (const entry& dEntry : patchesDict)
{
const word& faceZoneName = patchiter().keyword();
const word& faceZoneName = dEntry.keyword();
label zoneI = mesh().faceZones().findZoneID(faceZoneName);
if (zoneI == -1)
{
@ -353,10 +353,10 @@ void Foam::displacementLayeredMotionMotionSolver::cellZoneSolve
pointDisplacement_.correctBoundaryConditions();
patchi = 0;
forAllConstIter(dictionary, patchesDict, patchiter)
for (const entry& dEntry : patchesDict)
{
const word& faceZoneName = patchiter().keyword();
const dictionary& faceZoneDict = patchiter().dict();
const word& faceZoneName = dEntry.keyword();
const dictionary& faceZoneDict = dEntry.dict();
// Determine the points of the faceZone within the cellZone
const faceZone& fz = mesh().faceZones()[faceZoneName];
@ -546,11 +546,10 @@ void Foam::displacementLayeredMotionMotionSolver::solve()
pointDisplacement_.boundaryFieldRef().updateCoeffs();
// Solve motion on all regions (=cellZones)
const dictionary& regionDicts = coeffDict().subDict("regions");
forAllConstIter(dictionary, regionDicts, regionIter)
for (const entry& dEntry : coeffDict().subDict("regions"))
{
const word& cellZoneName = regionIter().keyword();
const dictionary& regionDict = regionIter().dict();
const word& cellZoneName = dEntry.keyword();
const dictionary& regionDict = dEntry.dict();
label zoneI = mesh().cellZones().findZoneID(cellZoneName);

View File

@ -59,24 +59,26 @@ Foam::multiSolidBodyMotionSolver::multiSolidBodyMotionSolver
pointIDs_.setSize(coeffDict().size());
label zonei = 0;
forAllConstIter(dictionary, coeffDict(), iter)
for (const entry& dEntry : coeffDict())
{
if (iter().isDict())
if (dEntry.isDict())
{
zoneIDs_[zonei] = mesh.cellZones().findZoneID(iter().keyword());
const word& zoneName = dEntry.keyword();
const dictionary& subDict = dEntry.dict();
zoneIDs_[zonei] = mesh.cellZones().findZoneID(zoneName);
if (zoneIDs_[zonei] == -1)
{
FatalIOErrorInFunction
(
coeffDict()
) << "Cannot find cellZone named " << iter().keyword()
<< ". Valid zones are " << mesh.cellZones().names()
) << "Cannot find cellZone named " << zoneName
<< ". Valid zones are "
<< flatOutput(mesh.cellZones().names())
<< exit(FatalIOError);
}
const dictionary& subDict = iter().dict();
SBMFs_.set
(
zonei,
@ -119,7 +121,7 @@ Foam::multiSolidBodyMotionSolver::multiSolidBodyMotionSolver
Info<< "Applying solid body motion " << SBMFs_[zonei].type()
<< " to "
<< returnReduce(pointIDs_[zonei].size(), sumOp<label>())
<< " points of cellZone " << iter().keyword() << endl;
<< " points of cellZone " << zoneName << endl;
zonei++;
}

View File

@ -93,18 +93,18 @@ bool Foam::solidBodyMotionFunctions::multiMotion::read
label i = 0;
SBMFs_.setSize(SBMFCoeffs_.size());
forAllConstIter(IDLList<entry>, SBMFCoeffs_, iter)
for (const entry& dEntry : SBMFCoeffs_)
{
if (iter().isDict())
if (dEntry.isDict())
{
SBMFs_.set
(
i,
solidBodyMotionFunction::New(iter().dict(), time_)
solidBodyMotionFunction::New(dEntry.dict(), time_)
);
Info<< "Constructed SBMF " << i << " : "
<< iter().keyword() << " of type "
<< dEntry.keyword() << " of type "
<< SBMFs_[i].type() << endl;
i++;

View File

@ -72,28 +72,29 @@ bool Foam::MRFZoneList::active(const bool warn) const
void Foam::MRFZoneList::reset(const dictionary& dict)
{
label count = 0;
forAllConstIter(dictionary, dict, iter)
for (const entry& dEntry : dict)
{
if (iter().isDict())
if (dEntry.isDict())
{
count++;
++count;
}
}
this->setSize(count);
label i = 0;
forAllConstIter(dictionary, dict, iter)
this->resize(count);
count = 0;
for (const entry& dEntry : dict)
{
if (iter().isDict())
if (dEntry.isDict())
{
const word& name = iter().keyword();
const dictionary& modelDict = iter().dict();
const word& name = dEntry.keyword();
const dictionary& modelDict = dEntry.dict();
Info<< " creating MRF zone: " << name << endl;
this->set
(
i++,
count++,
new MRFZone(name, mesh_, modelDict)
);
}

View File

@ -109,26 +109,27 @@ void Foam::fv::optionList::reset(const dictionary& dict)
{
// Count number of active fvOptions
label count = 0;
forAllConstIter(dictionary, dict, iter)
for (const entry& dEntry : dict)
{
if (iter().isDict())
if (dEntry.isDict())
{
count++;
++count;
}
}
this->setSize(count);
label i = 0;
forAllConstIter(dictionary, dict, iter)
this->resize(count);
count = 0;
for (const entry& dEntry : dict)
{
if (iter().isDict())
if (dEntry.isDict())
{
const word& name = iter().keyword();
const dictionary& sourceDict = iter().dict();
const word& name = dEntry.keyword();
const dictionary& sourceDict = dEntry.dict();
this->set
(
i++,
count++,
option::New(name, sourceDict, mesh_)
);
}

View File

@ -64,26 +64,27 @@ bool Foam::porosityModelList::active(const bool warn) const
void Foam::porosityModelList::reset(const dictionary& dict)
{
label count = 0;
forAllConstIter(dictionary, dict, iter)
for (const entry& dEntry : dict)
{
if (iter().isDict())
if (dEntry.isDict())
{
count++;
++count;
}
}
this->setSize(count);
label i = 0;
forAllConstIter(dictionary, dict, iter)
this->resize(count);
count = 0;
for (const entry& dEntry : dict)
{
if (iter().isDict())
if (dEntry.isDict())
{
const word& name = iter().keyword();
const dictionary& modelDict = iter().dict();
const word& name = dEntry.keyword();
const dictionary& modelDict = dEntry.dict();
this->set
(
i++,
count++,
porosityModel::New(name, mesh_, modelDict)
);
}

View File

@ -56,9 +56,9 @@ void Foam::solutionControl::read(const bool absTolOnly)
DynamicList<fieldData> data(residualControl_);
forAllConstIter(dictionary, residualDict, iter)
for (const entry& dEntry : residualDict)
{
const word& fName = iter().keyword();
const word& fName = dEntry.keyword();
const label fieldi = applyToField(fName, false);
if (fieldi == -1)
{
@ -71,22 +71,19 @@ void Foam::solutionControl::read(const bool absTolOnly)
fd.relTol = -1;
fd.initialResidual = -1;
}
else if (dEntry.isDict())
{
const dictionary& fieldDict = dEntry.dict();
fd.absTol = fieldDict.get<scalar>("tolerance");
fd.relTol = fieldDict.get<scalar>("relTol");
fd.initialResidual = 0.0;
}
else
{
if (iter().isDict())
{
const dictionary& fieldDict(iter().dict());
fd.absTol = fieldDict.get<scalar>("tolerance");
fd.relTol = fieldDict.get<scalar>("relTol");
fd.initialResidual = 0.0;
}
else
{
FatalErrorInFunction
<< "Residual data for " << iter().keyword()
<< " must be specified as a dictionary"
<< exit(FatalError);
}
FatalErrorInFunction
<< "Residual data for " << dEntry.keyword()
<< " must be specified as a dictionary"
<< exit(FatalError);
}
data.append(fd);
@ -98,21 +95,18 @@ void Foam::solutionControl::read(const bool absTolOnly)
{
fd.absTol = residualDict.get<scalar>(fName);
}
else if (dEntry.isDict())
{
const dictionary& fieldDict = dEntry.dict();
fd.absTol = fieldDict.get<scalar>("tolerance");
fd.relTol = fieldDict.get<scalar>("relTol");
}
else
{
if (iter().isDict())
{
const dictionary& fieldDict(iter().dict());
fd.absTol = fieldDict.get<scalar>("tolerance");
fd.relTol = fieldDict.get<scalar>("relTol");
}
else
{
FatalErrorInFunction
<< "Residual data for " << iter().keyword()
<< " must be specified as a dictionary"
<< exit(FatalError);
}
FatalErrorInFunction
<< "Residual data for " << dEntry.keyword()
<< " must be specified as a dictionary"
<< exit(FatalError);
}
}
}

View File

@ -575,17 +575,17 @@ bool Foam::functionObjects::externalCoupled::read(const dictionary& dict)
wordList allRegionNames(time_.lookupClass<fvMesh>().sortedToc());
const dictionary& allRegionsDict = dict.subDict("regions");
forAllConstIters(allRegionsDict, iter)
for (const entry& dEntry : allRegionsDict)
{
if (!iter().isDict())
if (!dEntry.isDict())
{
FatalIOErrorInFunction(allRegionsDict)
<< "Regions must be specified in dictionary format"
<< exit(FatalIOError);
}
const wordRe regionGroupName(iter().keyword());
const dictionary& regionDict = iter().dict();
const wordRe regionGroupName(dEntry.keyword());
const dictionary& regionDict = dEntry.dict();
labelList regionIDs = findStrings(regionGroupName, allRegionNames);
@ -594,16 +594,17 @@ bool Foam::functionObjects::externalCoupled::read(const dictionary& dict)
regionGroupNames_.append(compositeName(regionNames));
regionGroupRegions_.append(regionNames);
forAllConstIters(regionDict, regionIter)
for (const entry& dEntry : regionDict)
{
if (!regionIter().isDict())
if (!dEntry.isDict())
{
FatalIOErrorInFunction(regionDict)
<< "Regions must be specified in dictionary format"
<< exit(FatalIOError);
}
const wordRe groupName(regionIter().keyword());
const dictionary& groupDict = regionIter().dict();
const wordRe groupName(dEntry.keyword());
const dictionary& groupDict = dEntry.dict();
const label nGroups = groupNames_.size();
const wordList readFields(groupDict.get<wordList>("readFields"));
@ -619,7 +620,7 @@ bool Foam::functionObjects::externalCoupled::read(const dictionary& dict)
regionToGroups_.insert
(
regionGroupNames_.last(),
labelList{nGroups}
labelList(one(), nGroups)
);
}
groupNames_.append(groupName);

View File

@ -106,11 +106,11 @@ bool Foam::functionObjects::runTimePostProcessing::read(const dictionary& dict)
readObjects(dict.subOrEmptyDict("lines"), lines_);
readObjects(dict.subOrEmptyDict("surfaces"), surfaces_);
const dictionary& textDict = dict.subDict("text");
forAllConstIter(dictionary, textDict, iter)
for (const entry& dEntry : textDict)
{
if (!iter().isDict())
if (!dEntry.isDict())
{
FatalIOErrorInFunction(textDict)
<< "text must be specified in dictionary format"
@ -122,7 +122,7 @@ bool Foam::functionObjects::runTimePostProcessing::read(const dictionary& dict)
new runTimePostPro::text
(
*this,
iter().dict(),
dEntry.dict(),
scene_.colours()
)
);

View File

@ -33,9 +33,10 @@ void Foam::functionObjects::runTimePostProcessing::readObjects
) const
{
objects.clear();
forAllConstIter(dictionary, dict, iter)
for (const entry& dEntry : dict)
{
if (!iter().isDict())
if (!dEntry.isDict())
{
FatalIOErrorInFunction(dict)
<< dict.dictName()
@ -43,12 +44,12 @@ void Foam::functionObjects::runTimePostProcessing::readObjects
<< exit(FatalIOError);
}
const dictionary& objectDict(iter().dict());
const dictionary& objectDict = dEntry.dict();
const word objectType = objectDict.get<word>("type");
objects.append
(
Type::New(*this, iter().dict(), scene_.colours(), objectType)
Type::New(*this, objectDict, scene_.colours(), objectType)
);
}
}

View File

@ -54,18 +54,20 @@ bool Foam::fv::FixedValueConstraint<Type>::read(const dictionary& dict)
{
const dictionary& fieldValuesDict = coeffs_.subDict("fieldValues");
fieldNames_.setSize(fieldValuesDict.size());
fieldValues_.setSize(fieldNames_.size());
label count = fieldValuesDict.size();
label i = 0;
forAllConstIter(dictionary, fieldValuesDict, iter)
fieldNames_.setSize(count);
fieldValues_.setSize(count);
applied_.setSize(count, false);
count = 0;
for (const entry& dEntry : fieldValuesDict)
{
fieldNames_[i] = iter().keyword();
fieldValuesDict.readEntry(iter().keyword(), fieldValues_[i]);
++i;
}
fieldNames_[count] = dEntry.keyword();
dEntry.readEntry(fieldValues_[count]);
applied_.setSize(fieldNames_.size(), false);
++count;
}
return true;
}

View File

@ -83,17 +83,19 @@ Foam::word Foam::fv::SemiImplicitSource<Type>::volumeModeTypeToWord
template<class Type>
void Foam::fv::SemiImplicitSource<Type>::setFieldData(const dictionary& dict)
{
fieldNames_.setSize(dict.toc().size());
injectionRate_.setSize(fieldNames_.size());
label count = dict.size();
applied_.setSize(fieldNames_.size(), false);
fieldNames_.resize(count);
injectionRate_.resize(count);
applied_.resize(count, false);
label i = 0;
forAllConstIter(dictionary, dict, iter)
count = 0;
for (const entry& dEntry : dict)
{
fieldNames_[i] = iter().keyword();
dict.readEntry(iter().keyword(), injectionRate_[i]);
++i;
fieldNames_[count] = dEntry.keyword();
dEntry.readEntry(injectionRate_[count]);
++count;
}
// Set volume normalisation

View File

@ -75,17 +75,19 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
<< exit(FatalIOError);
}
forAllConstIter(dictionary, dict_, iter)
for (const entry& dEntry : dict_)
{
if (iter().keyword() != "type" && iter().keyword() != "value")
const keyType& key = dEntry.keyword();
if (key != "type" && key != "value")
{
if
(
iter().isStream()
&& iter().stream().size()
dEntry.isStream()
&& dEntry.stream().size()
)
{
ITstream& is = iter().stream();
ITstream& is = dEntry.stream();
// Read first token
token firstToken(is);
@ -108,7 +110,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
{
scalarFields_.insert
(
iter().keyword(),
key,
autoPtr<scalarField>::New()
);
}
@ -148,7 +150,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
FatalIOErrorInFunction
(
dict
) << "\n size of field " << iter().keyword()
) << "\n size of field " << key
<< " (" << fPtr->size() << ')'
<< " is not the same size as the patch ("
<< this->size() << ')'
@ -160,7 +162,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
<< exit(FatalIOError);
}
scalarFields_.insert(iter().keyword(), fPtr);
scalarFields_.insert(key, fPtr);
}
else if
(
@ -183,7 +185,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
FatalIOErrorInFunction
(
dict
) << "\n size of field " << iter().keyword()
) << "\n size of field " << key
<< " (" << fPtr->size() << ')'
<< " is not the same size as the patch ("
<< this->size() << ')'
@ -195,7 +197,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
<< exit(FatalIOError);
}
vectorFields_.insert(iter().keyword(), fPtr);
vectorFields_.insert(key, fPtr);
}
else if
(
@ -221,7 +223,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
FatalIOErrorInFunction
(
dict
) << "\n size of field " << iter().keyword()
) << "\n size of field " << key
<< " (" << fPtr->size() << ')'
<< " is not the same size as the patch ("
<< this->size() << ')'
@ -233,7 +235,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
<< exit(FatalIOError);
}
sphericalTensorFields_.insert(iter().keyword(), fPtr);
sphericalTensorFields_.insert(key, fPtr);
}
else if
(
@ -259,7 +261,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
FatalIOErrorInFunction
(
dict
) << "\n size of field " << iter().keyword()
) << "\n size of field " << key
<< " (" << fPtr->size() << ')'
<< " is not the same size as the patch ("
<< this->size() << ')'
@ -271,7 +273,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
<< exit(FatalIOError);
}
symmTensorFields_.insert(iter().keyword(), fPtr);
symmTensorFields_.insert(key, fPtr);
}
else if
(
@ -294,7 +296,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
FatalIOErrorInFunction
(
dict
) << "\n size of field " << iter().keyword()
) << "\n size of field " << key
<< " (" << fPtr->size() << ')'
<< " is not the same size as the patch ("
<< this->size() << ')'
@ -306,7 +308,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
<< exit(FatalIOError);
}
tensorFields_.insert(iter().keyword(), fPtr);
tensorFields_.insert(key, fPtr);
}
else
{
@ -335,7 +337,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
{
scalarFields_.insert
(
iter().keyword(),
key,
autoPtr<scalarField>::New
(
this->size(),
@ -356,7 +358,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
vectorFields_.insert
(
iter().keyword(),
key,
autoPtr<vectorField>::New
(
this->size(),
@ -370,7 +372,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
sphericalTensorFields_.insert
(
iter().keyword(),
key,
autoPtr<sphericalTensorField>::New
(
this->size(),
@ -384,7 +386,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
symmTensorFields_.insert
(
iter().keyword(),
key,
autoPtr<symmTensorField>::New
(
this->size(),
@ -403,7 +405,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
tensorFields_.insert
(
iter().keyword(),
key,
autoPtr<tensorField>::New
(
this->size(),
@ -797,47 +799,49 @@ void Foam::genericFaPatchField<Type>::write(Ostream& os) const
{
os.writeEntry("type", actualTypeName_);
forAllConstIter(dictionary, dict_, iter)
for (const entry& dEntry : dict_)
{
if (iter().keyword() != "type" && iter().keyword() != "value")
const keyType& key = dEntry.keyword();
if (key != "type" && key != "value")
{
if
(
iter().isStream()
&& iter().stream().size()
&& iter().stream()[0].isWord()
&& iter().stream()[0].wordToken() == "nonuniform"
dEntry.isStream()
&& dEntry.stream().size()
&& dEntry.stream()[0].isWord()
&& dEntry.stream()[0].wordToken() == "nonuniform"
)
{
if (scalarFields_.found(iter().keyword()))
if (scalarFields_.found(key))
{
scalarFields_.find(iter().keyword())()
->writeEntry(iter().keyword(), os);
scalarFields_.find(key)()
->writeEntry(key, os);
}
else if (vectorFields_.found(iter().keyword()))
else if (vectorFields_.found(key))
{
vectorFields_.find(iter().keyword())()
->writeEntry(iter().keyword(), os);
vectorFields_.find(key)()
->writeEntry(key, os);
}
else if (sphericalTensorFields_.found(iter().keyword()))
else if (sphericalTensorFields_.found(key))
{
sphericalTensorFields_.find(iter().keyword())()
->writeEntry(iter().keyword(), os);
sphericalTensorFields_.find(key)()
->writeEntry(key, os);
}
else if (symmTensorFields_.found(iter().keyword()))
else if (symmTensorFields_.found(key))
{
symmTensorFields_.find(iter().keyword())()
->writeEntry(iter().keyword(), os);
symmTensorFields_.find(key)()
->writeEntry(key, os);
}
else if (tensorFields_.found(iter().keyword()))
else if (tensorFields_.found(key))
{
tensorFields_.find(iter().keyword())()
->writeEntry(iter().keyword(), os);
tensorFields_.find(key)()
->writeEntry(key, os);
}
}
else
{
iter().write(os);
dEntry.write(os);
}
}
}

View File

@ -75,17 +75,19 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
<< exit(FatalIOError);
}
forAllConstIter(dictionary, dict_, iter)
for (const entry& dEntry : dict_)
{
if (iter().keyword() != "type" && iter().keyword() != "value")
const keyType& key = dEntry.keyword();
if (key != "type" && key != "value")
{
if
(
iter().isStream()
&& iter().stream().size()
dEntry.isStream()
&& dEntry.stream().size()
)
{
ITstream& is = iter().stream();
ITstream& is = dEntry.stream();
// Read first token
token firstToken(is);
@ -108,7 +110,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
{
scalarFields_.insert
(
iter().keyword(),
dEntry.keyword(),
autoPtr<scalarField>::New()
);
}
@ -148,7 +150,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
FatalIOErrorInFunction
(
dict
) << "\n size of field " << iter().keyword()
) << "\n size of field " << key
<< " (" << fPtr->size() << ')'
<< " is not the same size as the patch ("
<< this->size() << ')'
@ -160,7 +162,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
<< exit(FatalIOError);
}
scalarFields_.insert(iter().keyword(), fPtr);
scalarFields_.insert(key, fPtr);
}
else if
(
@ -183,7 +185,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
FatalIOErrorInFunction
(
dict
) << "\n size of field " << iter().keyword()
) << "\n size of field " << key
<< " (" << fPtr->size() << ')'
<< " is not the same size as the patch ("
<< this->size() << ')'
@ -195,7 +197,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
<< exit(FatalIOError);
}
vectorFields_.insert(iter().keyword(), fPtr);
vectorFields_.insert(key, fPtr);
}
else if
(
@ -221,7 +223,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
FatalIOErrorInFunction
(
dict
) << "\n size of field " << iter().keyword()
) << "\n size of field " << key
<< " (" << fPtr->size() << ')'
<< " is not the same size as the patch ("
<< this->size() << ')'
@ -233,7 +235,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
<< exit(FatalIOError);
}
sphericalTensorFields_.insert(iter().keyword(), fPtr);
sphericalTensorFields_.insert(key, fPtr);
}
else if
(
@ -259,7 +261,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
FatalIOErrorInFunction
(
dict
) << "\n size of field " << iter().keyword()
) << "\n size of field " << key
<< " (" << fPtr->size() << ')'
<< " is not the same size as the patch ("
<< this->size() << ')'
@ -271,7 +273,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
<< exit(FatalIOError);
}
symmTensorFields_.insert(iter().keyword(), fPtr);
symmTensorFields_.insert(key, fPtr);
}
else if
(
@ -294,7 +296,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
FatalIOErrorInFunction
(
dict
) << "\n size of field " << iter().keyword()
) << "\n size of field " << key
<< " (" << fPtr->size() << ')'
<< " is not the same size as the patch ("
<< this->size() << ')'
@ -306,7 +308,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
<< exit(FatalIOError);
}
tensorFields_.insert(iter().keyword(), fPtr);
tensorFields_.insert(key, fPtr);
}
else
{
@ -335,7 +337,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
{
scalarFields_.insert
(
iter().keyword(),
key,
autoPtr<scalarField>::New
(
this->size(),
@ -356,7 +358,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
vectorFields_.insert
(
iter().keyword(),
key,
autoPtr<vectorField>::New
(
this->size(),
@ -370,7 +372,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
sphericalTensorFields_.insert
(
iter().keyword(),
key,
autoPtr<sphericalTensorField>::New
(
this->size(),
@ -384,7 +386,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
symmTensorFields_.insert
(
iter().keyword(),
key,
autoPtr<symmTensorField>::New
(
this->size(),
@ -403,7 +405,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
tensorFields_.insert
(
iter().keyword(),
key,
autoPtr<tensorField>::New
(
this->size(),
@ -797,47 +799,49 @@ void Foam::genericFvPatchField<Type>::write(Ostream& os) const
{
os.writeEntry("type", actualTypeName_);
forAllConstIter(dictionary, dict_, iter)
for (const entry& dEntry : dict_)
{
if (iter().keyword() != "type" && iter().keyword() != "value")
const keyType& key = dEntry.keyword();
if (key != "type" && key != "value")
{
if
(
iter().isStream()
&& iter().stream().size()
&& iter().stream()[0].isWord()
&& iter().stream()[0].wordToken() == "nonuniform"
dEntry.isStream()
&& dEntry.stream().size()
&& dEntry.stream()[0].isWord()
&& dEntry.stream()[0].wordToken() == "nonuniform"
)
{
if (scalarFields_.found(iter().keyword()))
if (scalarFields_.found(key))
{
scalarFields_.find(iter().keyword())()
->writeEntry(iter().keyword(), os);
scalarFields_.find(key)()
->writeEntry(key, os);
}
else if (vectorFields_.found(iter().keyword()))
else if (vectorFields_.found(key))
{
vectorFields_.find(iter().keyword())()
->writeEntry(iter().keyword(), os);
vectorFields_.find(key)()
->writeEntry(key, os);
}
else if (sphericalTensorFields_.found(iter().keyword()))
else if (sphericalTensorFields_.found(key))
{
sphericalTensorFields_.find(iter().keyword())()
->writeEntry(iter().keyword(), os);
sphericalTensorFields_.find(key)()
->writeEntry(key, os);
}
else if (symmTensorFields_.found(iter().keyword()))
else if (symmTensorFields_.found(key))
{
symmTensorFields_.find(iter().keyword())()
->writeEntry(iter().keyword(), os);
symmTensorFields_.find(key)()
->writeEntry(key, os);
}
else if (tensorFields_.found(iter().keyword()))
else if (tensorFields_.found(key))
{
tensorFields_.find(iter().keyword())()
->writeEntry(iter().keyword(), os);
tensorFields_.find(key)()
->writeEntry(key, os);
}
}
else
{
iter().write(os);
dEntry.write(os);
}
}
}

View File

@ -53,17 +53,19 @@ Foam::genericPointPatchField<Type>::genericPointPatchField
actualTypeName_(dict.get<word>("type")),
dict_(dict)
{
forAllConstIter(dictionary, dict_, iter)
for (const entry& dEntry : dict_)
{
if (iter().keyword() != "type")
const keyType& key = dEntry.keyword();
if (key != "type")
{
if
(
iter().isStream()
&& iter().stream().size()
dEntry.isStream()
&& dEntry.stream().size()
)
{
ITstream& is = iter().stream();
ITstream& is = dEntry.stream();
// Read first token
token firstToken(is);
@ -86,7 +88,7 @@ Foam::genericPointPatchField<Type>::genericPointPatchField
{
scalarFields_.insert
(
iter().keyword(),
key,
autoPtr<scalarField>::New()
);
}
@ -126,7 +128,7 @@ Foam::genericPointPatchField<Type>::genericPointPatchField
FatalIOErrorInFunction
(
dict
) << "\n size of field " << iter().keyword()
) << "\n size of field " << key
<< " (" << fPtr->size() << ')'
<< " is not the same size as the patch ("
<< this->size() << ')'
@ -138,7 +140,7 @@ Foam::genericPointPatchField<Type>::genericPointPatchField
<< exit(FatalIOError);
}
scalarFields_.insert(iter().keyword(), fPtr);
scalarFields_.insert(key, fPtr);
}
else if
(
@ -161,7 +163,7 @@ Foam::genericPointPatchField<Type>::genericPointPatchField
FatalIOErrorInFunction
(
dict
) << "\n size of field " << iter().keyword()
) << "\n size of field " << key
<< " (" << fPtr->size() << ')'
<< " is not the same size as the patch ("
<< this->size() << ')'
@ -173,7 +175,7 @@ Foam::genericPointPatchField<Type>::genericPointPatchField
<< exit(FatalIOError);
}
vectorFields_.insert(iter().keyword(), fPtr);
vectorFields_.insert(key, fPtr);
}
else if
(
@ -199,7 +201,7 @@ Foam::genericPointPatchField<Type>::genericPointPatchField
FatalIOErrorInFunction
(
dict
) << "\n size of field " << iter().keyword()
) << "\n size of field " << key
<< " (" << fPtr->size() << ')'
<< " is not the same size as the patch ("
<< this->size() << ')'
@ -211,7 +213,7 @@ Foam::genericPointPatchField<Type>::genericPointPatchField
<< exit(FatalIOError);
}
sphericalTensorFields_.insert(iter().keyword(), fPtr);
sphericalTensorFields_.insert(key, fPtr);
}
else if
(
@ -237,7 +239,7 @@ Foam::genericPointPatchField<Type>::genericPointPatchField
FatalIOErrorInFunction
(
dict
) << "\n size of field " << iter().keyword()
) << "\n size of field " << key
<< " (" << fPtr->size() << ')'
<< " is not the same size as the patch ("
<< this->size() << ')'
@ -249,7 +251,7 @@ Foam::genericPointPatchField<Type>::genericPointPatchField
<< exit(FatalIOError);
}
symmTensorFields_.insert(iter().keyword(), fPtr);
symmTensorFields_.insert(key, fPtr);
}
else if
(
@ -272,7 +274,7 @@ Foam::genericPointPatchField<Type>::genericPointPatchField
FatalIOErrorInFunction
(
dict
) << "\n size of field " << iter().keyword()
) << "\n size of field " << key
<< " (" << fPtr->size() << ')'
<< " is not the same size as the patch ("
<< this->size() << ')'
@ -284,7 +286,7 @@ Foam::genericPointPatchField<Type>::genericPointPatchField
<< exit(FatalIOError);
}
tensorFields_.insert(iter().keyword(), fPtr);
tensorFields_.insert(key, fPtr);
}
else
{
@ -574,47 +576,49 @@ void Foam::genericPointPatchField<Type>::write(Ostream& os) const
{
os.writeEntry("type", actualTypeName_);
forAllConstIter(dictionary, dict_, iter)
for (const entry& dEntry : dict_)
{
if (iter().keyword() != "type")
const keyType& key = dEntry.keyword();
if (key != "type")
{
if
(
iter().isStream()
&& iter().stream().size()
&& iter().stream()[0].isWord()
&& iter().stream()[0].wordToken() == "nonuniform"
dEntry.isStream()
&& dEntry.stream().size()
&& dEntry.stream()[0].isWord()
&& dEntry.stream()[0].wordToken() == "nonuniform"
)
{
if (scalarFields_.found(iter().keyword()))
if (scalarFields_.found(key))
{
scalarFields_.find(iter().keyword())()
->writeEntry(iter().keyword(), os);
scalarFields_.find(key)()
->writeEntry(key, os);
}
else if (vectorFields_.found(iter().keyword()))
else if (vectorFields_.found(key))
{
vectorFields_.find(iter().keyword())()
->writeEntry(iter().keyword(), os);
vectorFields_.find(key)()
->writeEntry(key, os);
}
else if (sphericalTensorFields_.found(iter().keyword()))
else if (sphericalTensorFields_.found(key))
{
sphericalTensorFields_.find(iter().keyword())()
->writeEntry(iter().keyword(), os);
sphericalTensorFields_.find(key)()
->writeEntry(key, os);
}
else if (symmTensorFields_.found(iter().keyword()))
else if (symmTensorFields_.found(key))
{
symmTensorFields_.find(iter().keyword())()
->writeEntry(iter().keyword(), os);
symmTensorFields_.find(key)()
->writeEntry(key, os);
}
else if (tensorFields_.found(iter().keyword()))
else if (tensorFields_.found(key))
{
tensorFields_.find(iter().keyword())()
->writeEntry(iter().keyword(), os);
tensorFields_.find(key)()
->writeEntry(key, os);
}
}
else
{
iter().write(os);
dEntry.write(os);
}
}
}

View File

@ -62,49 +62,47 @@ Foam::ParticleForceList<CloudType>::ParticleForceList
{
if (readFields)
{
wordList modelNames(dict.toc());
Info<< "Constructing particle forces" << endl;
if (modelNames.size() > 0)
{
this->setSize(modelNames.size());
this->resize(dict.size());
label i = 0;
forAllConstIter(IDLList<entry>, dict, iter)
label count = 0;
for (const entry& dEntry : dict)
{
const word& model = dEntry.keyword();
if (dEntry.isDict())
{
const word& model = iter().keyword();
if (iter().isDict())
{
this->set
this->set
(
count,
ParticleForce<CloudType>::New
(
i++,
ParticleForce<CloudType>::New
(
owner,
mesh,
iter().dict(),
model
)
);
}
else
{
this->set
(
i++,
ParticleForce<CloudType>::New
(
owner,
mesh,
dict,
model
)
);
}
owner,
mesh,
dEntry.dict(),
model
)
);
}
else
{
this->set
(
count,
ParticleForce<CloudType>::New
(
owner,
mesh,
dict,
model
)
);
}
++count;
}
else
if (!count)
{
Info<< " none" << endl;
}

View File

@ -43,37 +43,40 @@ Foam::InjectionModelList<CloudType>::InjectionModelList
:
PtrList<InjectionModel<CloudType>>()
{
wordList modelNames(dict.toc());
Info<< "Constructing particle injection models" << endl;
if (modelNames.size() > 0)
label count = dict.size();
if (count)
{
this->setSize(modelNames.size());
label i = 0;
forAllConstIter(IDLList<entry>, dict, iter)
{
const word& model = iter().keyword();
Info<< "Creating injector: " << model << endl;
const dictionary& props = iter().dict();
this->set
(
i++,
InjectionModel<CloudType>::New
(
props,
model,
props.get<word>("type"),
owner
)
);
}
this->resize(count);
}
else
count = 0;
for (const entry& dEntry : dict)
{
this->setSize(1);
const word& model = dEntry.keyword();
const dictionary& props = dEntry.dict();
Info<< "Creating injector: " << model << endl;
this->set
(
count,
InjectionModel<CloudType>::New
(
props,
model,
props.get<word>("type"),
owner
)
);
++count;
}
if (!count)
{
this->resize(1);
this->set
(

View File

@ -35,29 +35,30 @@ bool Foam::MultiInteraction<CloudType>::read(const dictionary& dict)
Info<< "Patch interaction model " << typeName << nl
<< "Executing in turn " << endl;
label nModels = 0;
forAllConstIter(dictionary, dict, iter)
label count = 0;
for (const entry& dEntry : dict)
{
if (iter().isDict())
if (dEntry.isDict())
{
Info<< " " << iter().name() << endl;
Info<< " " << dEntry.name() << endl;
nModels++;
++count;
}
}
models_.setSize(nModels);
nModels = 0;
forAllConstIter(dictionary, dict, iter)
models_.resize(count);
count = 0;
for (const entry& dEntry : dict)
{
if (iter().isDict())
if (dEntry.isDict())
{
models_.set
(
nModels++,
count++,
PatchInteractionModel<CloudType>::New
(
iter().dict(),
dEntry.dict(),
this->owner()
)
);

View File

@ -89,14 +89,14 @@ void Foam::blockMeshTools::write
const dictionary& dict
)
{
forAllConstIter(dictionary, dict, iter)
for (const entry& e : dict)
{
if (iter().isStream())
if (e.isStream())
{
label keyVal(Foam::readLabel(iter().stream()));
label keyVal(Foam::readLabel(e.stream()));
if (keyVal == val)
{
os << iter().keyword();
os << e.keyword();
return;
}
}
@ -111,14 +111,14 @@ const Foam::keyType& Foam::blockMeshTools::findEntry
const label val
)
{
forAllConstIter(dictionary, dict, iter)
for (const entry& e : dict)
{
if (iter().isStream())
if (e.isStream())
{
label keyVal(Foam::readLabel(iter().stream()));
label keyVal(Foam::readLabel(e.stream()));
if (keyVal == val)
{
return iter().keyword();
return e.keyword();
}
}
}

View File

@ -259,11 +259,13 @@ Foam::layerParameters::layerParameters
const dictionary& layersDict = dict.subDict("layers");
forAllConstIter(dictionary, layersDict, iter)
for (const entry& dEntry : layersDict)
{
if (iter().isDict())
if (dEntry.isDict())
{
const keyType& key = iter().keyword();
const keyType& key = dEntry.keyword();
const dictionary& layerDict = dEntry.dict();
const labelHashSet patchIDs
(
boundaryMesh.patchSet(List<wordRe>(1, wordRe(key)))
@ -278,8 +280,6 @@ Foam::layerParameters::layerParameters
}
else
{
const dictionary& layerDict = iter().dict();
for (const label patchi : patchIDs)
{
numLayers_[patchi] =

View File

@ -187,13 +187,13 @@ Foam::searchableSurfaceCollection::searchableSurfaceCollection
label surfI = 0;
label startIndex = 0;
forAllConstIter(dictionary, dict, iter)
for (const entry& dEntry : dict)
{
if (dict.isDict(iter().keyword()))
if (dEntry.isDict())
{
instance_[surfI] = iter().keyword();
instance_[surfI] = dEntry.keyword();
const dictionary& subDict = dict.subDict(instance_[surfI]);
const dictionary& subDict = dEntry.dict();
subDict.readEntry("scale", scale_[surfI]);
transform_.set

View File

@ -119,14 +119,12 @@ Foam::searchableSurfaces::searchableSurfaces(const label size)
// {
// const dictionary& regionsDict = dict.subDict("regions");
//
// forAllConstIter(dictionary, regionsDict, iter)
// for (const entry& dEntry : regionsDict)
// {
// const word& key = iter().keyword();
//
// if (regionsDict.isDict(key))
// if (dEntry.isDict())
// {
// // Get the dictionary for region iter.key()
// const dictionary& regionDict = regionsDict.subDict(key);
// const word& key = dEntry.keyword();
// const dictionary& regionDict = dEntry.dict();
//
// label index = localNames.find(key);
//
@ -178,18 +176,18 @@ Foam::searchableSurfaces::searchableSurfaces
allSurfaces_(identity(topDict.size()))
{
label surfI = 0;
forAllConstIter(dictionary, topDict, iter)
{
const word& key = iter().keyword();
if (!topDict.isDict(key))
for (const entry& dEntry : topDict)
{
if (!dEntry.isDict())
{
FatalErrorInFunction
<< "Found non-dictionary entry " << iter()
<< "Found non-dictionary entry " << dEntry
<< " in top-level dictionary " << topDict
<< exit(FatalError);
}
const word& key = dEntry.keyword();
const dictionary& dict = topDict.subDict(key);
names_[surfI] = dict.lookupOrDefault<word>("name", key);
@ -240,14 +238,12 @@ Foam::searchableSurfaces::searchableSurfaces
{
const dictionary& regionsDict = dict.subDict("regions");
forAllConstIter(dictionary, regionsDict, iter)
for (const entry& dEntry : regionsDict)
{
const word& key = iter().keyword();
if (regionsDict.isDict(key))
if (dEntry.isDict())
{
// Get the dictionary for region iter.keyword()
const dictionary& regionDict = regionsDict.subDict(key);
const word& key = dEntry.keyword();
const dictionary& regionDict = dEntry.dict();
label index = localNames.find(key);
@ -255,8 +251,9 @@ Foam::searchableSurfaces::searchableSurfaces
{
FatalErrorInFunction
<< "Unknown region name " << key
<< " for surface " << s.name() << endl
<< " for surface " << s.name() << nl
<< "Valid region names are " << localNames
<< endl
<< exit(FatalError);
}

View File

@ -125,9 +125,9 @@ void Foam::decompositionMethod::readConstraints()
if (dictptr)
{
forAllConstIters(*dictptr, iter)
for (const entry& dEntry : *dictptr)
{
const dictionary& dict = iter().dict();
const dictionary& dict = dEntry.dict();
constraintTypes.append(dict.get<word>("type"));

View File

@ -161,26 +161,26 @@ void Foam::multiLevelDecomp::createMethodsDict()
// - Only consider sub-dictionaries with a "numberOfSubdomains" entry
// This automatically filters out any coeffs dictionaries
forAllConstIters(coeffsDict_, iter)
for (const entry& dEntry : coeffsDict_)
{
word methodName;
if
(
iter().isDict()
dEntry.isDict()
// non-recursive, no patterns
&& iter().dict().found("numberOfSubdomains", keyType::LITERAL)
&& dEntry.dict().found("numberOfSubdomains", keyType::LITERAL)
)
{
// No method specified? can use a default method?
const bool addDefaultMethod
(
!(iter().dict().found("method", keyType::LITERAL))
!(dEntry.dict().found("method", keyType::LITERAL))
&& !defaultMethod.empty()
);
entry* e = methodsDict_.add(iter());
entry* e = methodsDict_.add(dEntry);
if (addDefaultMethod && e && e->isDict())
{
@ -201,17 +201,17 @@ void Foam::multiLevelDecomp::setMethods()
methods_.clear();
methods_.setSize(methodsDict_.size());
forAllConstIters(methodsDict_, iter)
for (const entry& dEntry : methodsDict_)
{
// Dictionary entries only
// - these method dictionaries are non-regional
if (iter().isDict())
if (dEntry.isDict())
{
methods_.set
(
nLevels++,
// non-verbose would be nicer
decompositionMethod::New(iter().dict())
decompositionMethod::New(dEntry.dict())
);
}
}
@ -475,11 +475,11 @@ void Foam::multiLevelDecomp::decompose
// Get original level0 dictionary and modify numberOfSubdomains
dictionary level0Dict;
forAllConstIters(methodsDict_, iter)
for (const entry& dEntry : methodsDict_)
{
if (iter().isDict())
if (dEntry.isDict())
{
level0Dict = iter().dict();
level0Dict = dEntry.dict();
break;
}
}

View File

@ -300,12 +300,12 @@ Foam::labelList Foam::zoltanRenumber::renumber
polyMesh& mesh = const_cast<polyMesh&>(pMesh);
forAllConstIter(IDLList<entry>, coeffsDict_, iter)
for (const entry& dEntry : coeffsDict_)
{
if (!iter().isDict())
if (!dEntry.isDict())
{
const word& key = iter().keyword();
const word value(iter().stream());
const word& key = dEntry.keyword();
const word value(dEntry.get<word>());
Info<< typeName << " : setting parameter " << key
<< " to " << value << endl;

View File

@ -92,17 +92,17 @@ void Foam::RBD::rigidBodyModel::addRestraints
restraints_.setSize(restraintDict.size());
forAllConstIter(IDLList<entry>, restraintDict, iter)
for (const entry& dEntry : restraintDict)
{
if (iter().isDict())
if (dEntry.isDict())
{
restraints_.set
(
i++,
restraint::New
(
iter().keyword(),
iter().dict(),
dEntry.keyword(),
dEntry.dict(),
*this
)
);
@ -185,9 +185,10 @@ Foam::RBD::rigidBodyModel::rigidBodyModel
const dictionary& bodiesDict = dict.subDict("bodies");
forAllConstIter(IDLList<entry>, bodiesDict, iter)
for (const entry& dEntry : bodiesDict)
{
const dictionary& bodyDict = iter().dict();
const keyType& key = dEntry.keyword();
const dictionary& bodyDict = dEntry.dict();
if (bodyDict.found("mergeWith"))
{
@ -195,7 +196,7 @@ Foam::RBD::rigidBodyModel::rigidBodyModel
(
bodyID(bodyDict.lookup("mergeWith")),
bodyDict.lookup("transform"),
rigidBody::New(iter().keyword(), bodyDict)
rigidBody::New(key, bodyDict)
);
}
else
@ -205,7 +206,7 @@ Foam::RBD::rigidBodyModel::rigidBodyModel
bodyID(bodyDict.lookup("parent")),
bodyDict.lookup("transform"),
joint::New(bodyDict.subDict("joint")),
rigidBody::New(iter().keyword(), bodyDict)
rigidBody::New(key, bodyDict)
);
}
}

View File

@ -136,18 +136,19 @@ Foam::rigidBodyMeshMotion::rigidBodyMeshMotion
const dictionary& bodiesDict = coeffDict().subDict("bodies");
forAllConstIter(IDLList<entry>, bodiesDict, iter)
for (const entry& dEntry : bodiesDict)
{
const dictionary& bodyDict = iter().dict();
const keyType& bodyName = dEntry.keyword();
const dictionary& bodyDict = dEntry.dict();
if (bodyDict.found("patches"))
{
const label bodyID = model_.bodyID(iter().keyword());
const label bodyID = model_.bodyID(bodyName);
if (bodyID == -1)
{
FatalErrorInFunction
<< "Body " << iter().keyword()
<< "Body " << bodyName
<< " has been merged with another body"
" and cannot be assigned a set of patches"
<< exit(FatalError);
@ -158,7 +159,7 @@ Foam::rigidBodyMeshMotion::rigidBodyMeshMotion
new bodyMesh
(
mesh,
iter().keyword(),
bodyName,
bodyID,
bodyDict
)

View File

@ -127,18 +127,19 @@ Foam::rigidBodyMeshMotionSolver::rigidBodyMeshMotionSolver
const dictionary& bodiesDict = coeffDict().subDict("bodies");
forAllConstIter(IDLList<entry>, bodiesDict, iter)
for (const entry& dEntry : bodiesDict)
{
const dictionary& bodyDict = iter().dict();
const keyType& bodyName = dEntry.keyword();
const dictionary& bodyDict = dEntry.dict();
if (bodyDict.found("patches"))
{
const label bodyID = model_.bodyID(iter().keyword());
const label bodyID = model_.bodyID(bodyName);
if (bodyID == -1)
{
FatalErrorInFunction
<< "Body " << iter().keyword()
<< "Body " << bodyName
<< " has been merged with another body"
" and cannot be assigned a set of patches"
<< exit(FatalError);
@ -149,7 +150,7 @@ Foam::rigidBodyMeshMotionSolver::rigidBodyMeshMotionSolver
new bodyMesh
(
mesh,
iter().keyword(),
bodyName,
bodyID,
bodyDict
)

View File

@ -346,14 +346,15 @@ Foam::fileName Foam::ensightSurfaceWriter::writeCollated
<< "VARIABLE" << nl;
const dictionary& fieldsDict = dict.subDict("fields");
forAllConstIter(dictionary, fieldsDict, iter)
for (const entry& dEntry : fieldsDict)
{
const dictionary& subDict = iter().dict();
const dictionary& subDict = dEntry.dict();
const word fieldType(subDict.get<word>("type"));
const word varName = subDict.lookupOrDefault
(
"name",
iter().keyword() // fieldName as fallback
dEntry.keyword() // fieldName as fallback
);
osCase

View File

@ -196,17 +196,17 @@ void Foam::sixDoFRigidBodyMotion::addRestraints
restraints_.setSize(restraintDict.size());
forAllConstIter(IDLList<entry>, restraintDict, iter)
for (const entry& dEntry : restraintDict)
{
if (iter().isDict())
if (dEntry.isDict())
{
restraints_.set
(
i++,
sixDoFRigidBodyMotionRestraint::New
(
iter().keyword(),
iter().dict()
dEntry.keyword(),
dEntry.dict()
)
);
}
@ -233,17 +233,17 @@ void Foam::sixDoFRigidBodyMotion::addConstraints
pointConstraint pct;
pointConstraint pcr;
forAllConstIter(IDLList<entry>, constraintDict, iter)
for (const entry& dEntry : constraintDict)
{
if (iter().isDict())
if (dEntry.isDict())
{
constraints_.set
(
i,
sixDoFRigidBodyMotionConstraint::New
(
iter().keyword(),
iter().dict(),
dEntry.keyword(),
dEntry.dict(),
*this
)
);

View File

@ -75,16 +75,18 @@ Foam::radiation::greyMeanAbsorptionEmission::greyMeanAbsorptionEmission
label nFunc = 0;
const dictionary& functionDicts = dict.optionalSubDict(typeName + "Coeffs");
forAllConstIter(dictionary, functionDicts, iter)
for (const entry& dEntry : functionDicts)
{
// safety:
if (!iter().isDict())
if (!dEntry.isDict()) // safety
{
continue;
}
const word& key = iter().keyword();
const word& key = dEntry.keyword();
const dictionary& dict = dEntry.dict();
speciesNames_.insert(key, nFunc);
const dictionary& dict = iter().dict();
coeffs_[nFunc].initialise(dict);
nFunc++;
}

View File

@ -105,14 +105,16 @@ greyMeanSolidAbsorptionEmission
label nFunc = 0;
const dictionary& functionDicts = dict.optionalSubDict(typeName + "Coeffs");
forAllConstIter(dictionary, functionDicts, iter)
for (const entry& dEntry : functionDicts)
{
// safety:
if (!iter().isDict())
if (!dEntry.isDict()) // safety
{
continue;
}
const word& key = iter().keyword();
const word& key = dEntry.keyword();
const dictionary& dict = dEntry.dict();
if (!mixture_.contains(key))
{
WarningInFunction
@ -122,7 +124,7 @@ greyMeanSolidAbsorptionEmission
<< nl << endl;
}
speciesNames_.insert(key, nFunc);
const dictionary& dict = iter().dict();
dict.readEntry("absorptivity", solidData_[nFunc][absorptivity]);
dict.readEntry("emissivity", solidData_[nFunc][emissivity]);

View File

@ -65,36 +65,35 @@ Foam::radiation::wideBandAbsorptionEmission::wideBandAbsorptionEmission
{
label nBand = 0;
const dictionary& functionDicts = dict.optionalSubDict(typeName +"Coeffs");
forAllConstIter(dictionary, functionDicts, iter)
for (const entry& dEntry : functionDicts)
{
// safety:
if (!iter().isDict())
if (!dEntry.isDict()) // safety
{
continue;
}
const dictionary& dict = iter().dict();
const dictionary& dict = dEntry.dict();
dict.readEntry("bandLimits", iBands_[nBand]);
dict.readEntry("EhrrCoeff", iEhrrCoeffs_[nBand]);
totalWaveLength_ += iBands_[nBand][1] - iBands_[nBand][0];
label nSpec = 0;
const dictionary& specDicts = dict.subDict("species");
forAllConstIter(dictionary, specDicts, iter)
for (const entry& dEntry : specDicts)
{
const word& key = iter().keyword();
const word& key = dEntry.keyword();
if (nBand == 0)
{
speciesNames_.insert(key, nSpec);
}
else
else if (!speciesNames_.found(key))
{
if (!speciesNames_.found(key))
{
FatalErrorInFunction
<< "specie: " << key << " is not in all the bands"
<< nl << exit(FatalError);
}
FatalErrorInFunction
<< "specie: " << key << " is not in all the bands"
<< nl << exit(FatalError);
}
coeffs_[nBand][nSpec].initialise(specDicts.subDict(key));
nSpec++;

View File

@ -82,19 +82,15 @@ Foam::ReactionList<ThermoType>::~ReactionList()
template<class ThermoType>
bool Foam::ReactionList<ThermoType>::readReactionDict()
{
const dictionary& reactions(dict_.subDict("reactions"));
forAllConstIter(dictionary, reactions, iter)
for (const entry& dEntry : dict_.subDict("reactions"))
{
const word reactionName = iter().keyword();
this->append
(
Reaction<ThermoType>::New
(
species_,
thermoDb_,
reactions.subDict(reactionName)
dEntry.dict()
).ptr()
);
}