STYLE: use range-for when looping dictionary entries.

- as part of the cleanup of dictionary access methods (c6520033c9)
  made the dictionary class single inheritance from IDLList<entry>.

  This eliminates any ambiguities for iterators and allows
  for simple use of range-for looping.

  Eg,
      for (const entry& e : topDict))
      {
          Info<< "entry:" << e.keyword() << " is dict:" << e.isDict() << nl;
      }

   vs

      forAllConstIter(dictionary, topDict, iter))
      {
          Info<< "entry:" << iter().keyword()
              << " is dict:" << iter().isDict() << nl;
      }
This commit is contained in:
Mark Olesen
2018-10-19 13:08:24 +02:00
parent 4e04c1966f
commit 07dafe7b0b
60 changed files with 636 additions and 653 deletions

View File

@ -25,12 +25,7 @@
PtrList<fvScalarMatrix> p_rghEqnComps(mixture.phases().size()); PtrList<fvScalarMatrix> p_rghEqnComps(mixture.phases().size());
label phasei = 0; label phasei = 0;
forAllConstIter forAllConstIters(mixture.phases(), phase)
(
PtrDictionary<phaseModel>,
mixture.phases(),
phase
)
{ {
const rhoThermo& thermo = phase().thermo(); const rhoThermo& thermo = phase().thermo();
const volScalarField& rho = thermo.rho()(); const volScalarField& rho = thermo.rho()();
@ -61,12 +56,7 @@
tmp<fvScalarMatrix> p_rghEqnComp; tmp<fvScalarMatrix> p_rghEqnComp;
phasei = 0; phasei = 0;
forAllConstIter forAllConstIters(mixture.phases(), phase)
(
PtrDictionary<phaseModel>,
mixture.phases(),
phase
)
{ {
tmp<fvScalarMatrix> hmm tmp<fvScalarMatrix> hmm
( (

View File

@ -35,12 +35,7 @@
- fvm::laplacian(rAUf, p_rgh) - fvm::laplacian(rAUf, p_rgh)
); );
forAllConstIter forAllConstIters(fluid.totalPhasePairs(), iter)
(
phaseSystem::phasePairTable,
fluid.totalPhasePairs(),
iter
)
{ {
const phasePair& pair = iter()(); const phasePair& pair = iter()();

View File

@ -202,7 +202,7 @@ void Foam::MultiComponentPhaseModel<BasePhaseModel, phaseThermo>::solveYi
surfaceScalarField phir(0.0*phi); surfaceScalarField phir(0.0*phi);
forAllConstIter(phaseSystem::phaseModelTable,this->fluid().phases(),iter2) forAllConstIters(this->fluid().phases(),iter2)
{ {
const volScalarField& alpha2 = iter2(); const volScalarField& alpha2 = iter2();
if (&alpha2 == &alpha1) if (&alpha2 == &alpha1)
@ -251,10 +251,7 @@ void Foam::MultiComponentPhaseModel<BasePhaseModel, phaseThermo>::solveYi
surfaceScalarField& phiYiCorr = phiYiCorrs[i]; surfaceScalarField& phiYiCorr = phiYiCorrs[i];
forAllConstIter forAllConstIters(this->fluid().phases(), iter2)
(
phaseSystem::phaseModelTable, this->fluid().phases(), iter2
)
{ {
//const volScalarField& alpha2 = iter2()().oldTime(); //const volScalarField& alpha2 = iter2()().oldTime();
const volScalarField& alpha2 = iter2(); const volScalarField& alpha2 = iter2();

View File

@ -125,15 +125,8 @@ massTransfer() const
this->massTransferModels_[key][phase.name()]->K() this->massTransferModels_[key][phase.name()]->K()
); );
forAllConstIter for (const word& member : compositionModel.species())
(
hashedWordList,
compositionModel.species(),
memberIter
)
{ {
const word& member = *memberIter;
const word name const word name
( (
IOobject::groupName(member, phase.name()) IOobject::groupName(member, phase.name())

View File

@ -157,14 +157,14 @@ Foam::phaseSystem::phaseSystem
phi_.writeOpt() = IOobject::AUTO_WRITE; phi_.writeOpt() = IOobject::AUTO_WRITE;
// Blending methods // Blending methods
forAllConstIter(dictionary, subDict("blending"), iter) for (const entry& dEntry : subDict("blending"))
{ {
blendingMethods_.insert blendingMethods_.insert
( (
iter().dict().dictName(), dEntry.dict().dictName(),
blendingMethod::New blendingMethod::New
( (
iter().dict(), dEntry.dict(),
phaseModels_.toc() phaseModels_.toc()
) )
); );

View File

@ -113,14 +113,14 @@ Foam::twoPhaseSystem::twoPhaseSystem
// Blending // Blending
forAllConstIter(dictionary, subDict("blending"), iter) for (const entry& dEntry : subDict("blending"))
{ {
blendingMethods_.insert blendingMethods_.insert
( (
iter().dict().dictName(), dEntry.dict().dictName(),
blendingMethod::New blendingMethod::New
( (
iter().dict(), dEntry.dict(),
wordList(lookup("phases")) wordList(lookup("phases"))
) )
); );

View File

@ -182,11 +182,11 @@ int main(int argc, char *argv[])
dictionary inputDict(is); dictionary inputDict(is);
forAllConstIters(inputDict, iter) for (const entry& dEntry : inputDict)
{ {
if (iter().isDict()) if (dEntry.isDict())
{ {
doTest(runTime, iter().dict()); doTest(runTime, dEntry.dict());
} }
} }
} }
@ -204,11 +204,11 @@ int main(int argc, char *argv[])
dictionary inputDict(is); dictionary inputDict(is);
forAllConstIters(inputDict, iter) for (const entry& dEntry : inputDict)
{ {
if (iter().isDict()) if (dEntry.isDict())
{ {
doTest(iter().dict()); doTest(dEntry.dict());
} }
} }
} }

View File

@ -650,17 +650,19 @@ int main(int argc, char *argv[])
// Per faceSet the patch to put the coupled baffles into // Per faceSet the patch to put the coupled baffles into
DynamicList<FixedList<word, 3>> coupledAndPatches(10); DynamicList<FixedList<word, 3>> coupledAndPatches(10);
const dictionary& functionDicts = dict.subDict("coupledFaces"); const dictionary& functionDicts = dict.subDict("coupledFaces");
forAllConstIter(dictionary, functionDicts, iter)
for (const entry& dEntry : functionDicts)
{ {
// safety: if (!dEntry.isDict()) // Safety
if (!iter().isDict())
{ {
continue; continue;
} }
const word& key = iter().keyword();
const dictionary& dict = iter().dict(); const word& key = dEntry.keyword();
const dictionary& dict = dEntry.dict();
const word cyclicName = dict.get<word>("cyclicMasterPatch"); const word cyclicName = dict.get<word>("cyclicMasterPatch");
const word wallName = dict.get<word>("wallPatch"); const word wallName = dict.get<word>("wallPatch");
FixedList<word, 3> nameAndType; FixedList<word, 3> nameAndType;

View File

@ -93,14 +93,10 @@ Foam::cellSizeAndAlignmentControls::cellSizeAndAlignmentControls
{ {
label functionI = 0; label functionI = 0;
forAllConstIter(dictionary, shapeControlDict_, iter) for (const entry& dEntry : shapeControlDict_)
{ {
word shapeControlEntryName = iter().keyword(); const word& shapeControlEntryName = dEntry.keyword();
const dictionary& controlFunctionDict = dEntry.dict();
const dictionary& controlFunctionDict
(
shapeControlDict_.subDict(shapeControlEntryName)
);
Info<< nl << "Shape Control : " << shapeControlEntryName << endl; Info<< nl << "Shape Control : " << shapeControlEntryName << endl;
Info<< incrIndent; Info<< incrIndent;

View File

@ -535,16 +535,12 @@ Foam::conformationSurfaces::conformationSurfaces
Info<< nl << "Reading additionalFeatures" << endl; Info<< nl << "Reading additionalFeatures" << endl;
} }
forAllConstIter(dictionary, additionalFeaturesDict, iter) for (const entry& dEntry : additionalFeaturesDict)
{ {
word featureName = iter().keyword(); const word& featureName = dEntry.keyword();
const dictionary& featureSubDict = dEntry.dict();
Info<< nl << " " << iter().keyword() << endl; Info<< nl << " " << featureName << endl;
const dictionary& featureSubDict
(
additionalFeaturesDict.subDict(featureName)
);
readFeatures(featureSubDict, featureName, featureI); readFeatures(featureSubDict, featureName, featureI);
} }

View File

@ -474,27 +474,24 @@ int main(int argc, char *argv[])
const dictionary& selectionsDict = dict.subDict("baffles"); const dictionary& selectionsDict = dict.subDict("baffles");
label n = 0; selectors.resize(selectionsDict.size());
forAllConstIter(dictionary, selectionsDict, iter)
label nselect = 0;
for (const entry& dEntry : selectionsDict)
{ {
if (iter().isDict()) if (dEntry.isDict())
{
n++;
}
}
selectors.setSize(n);
n = 0;
forAllConstIter(dictionary, selectionsDict, iter)
{
if (iter().isDict())
{ {
selectors.set selectors.set
( (
n++, nselect,
faceSelection::New(iter().keyword(), mesh, iter().dict()) faceSelection::New(dEntry.keyword(), mesh, dEntry.dict())
); );
++nselect;
} }
} }
selectors.resize(nselect);
} }
@ -641,10 +638,9 @@ int main(int argc, char *argv[])
if (dict.found("patches")) if (dict.found("patches"))
{ {
const dictionary& patchSources = dict.subDict("patches"); for (const entry& dEntry : dict.subDict("patches"))
forAllConstIter(dictionary, patchSources, iter)
{ {
const word patchName(iter().dict().get<word>("name")); const word patchName(dEntry.dict().get<word>("name"));
bafflePatches.insert(patchName); bafflePatches.insert(patchName);
} }
@ -687,14 +683,15 @@ int main(int argc, char *argv[])
if (dict.found("patches")) if (dict.found("patches"))
{ {
const dictionary& patchSources = dict.subDict("patches"); for (const entry& dEntry : dict.subDict("patches"))
forAllConstIter(dictionary, patchSources, iter)
{ {
const word patchName(iter().dict().get<word>("name")); const dictionary& dict = dEntry.dict();
const word patchName(dict.get<word>("name"));
if (pbm.findPatchID(patchName) == -1) if (pbm.findPatchID(patchName) == -1)
{ {
dictionary patchDict = iter().dict(); dictionary patchDict = dict;
patchDict.set("nFaces", 0); patchDict.set("nFaces", 0);
patchDict.set("startFace", 0); patchDict.set("startFace", 0);
@ -789,13 +786,14 @@ int main(int argc, char *argv[])
if (dict.found("patches")) if (dict.found("patches"))
{ {
const dictionary& patchSources = dict.subDict("patches");
bool master = true; bool master = true;
forAllConstIter(dictionary, patchSources, iter)
for (const entry& dEntry : dict.subDict("patches"))
{ {
const word patchName(iter().dict().get<word>("name")); const word patchName(dEntry.dict().get<word>("name"));
label patchi = pbm.findPatchID(patchName);
const label patchi = pbm.findPatchID(patchName);
if (master) if (master)
{ {
newMasterPatches.append(patchi); newMasterPatches.append(patchi);
@ -885,17 +883,18 @@ int main(int argc, char *argv[])
const dictionary& dict = selectors[selectorI].dict(); const dictionary& dict = selectors[selectorI].dict();
if (dict.found("patches")) if (dict.found("patches"))
{ {
const dictionary& patchSources = dict.subDict("patches"); for (const entry& dEntry : dict.subDict("patches"))
forAllConstIter(dictionary, patchSources, iter)
{ {
const word patchName(iter().dict().get<word>("name")); const dictionary& dict = dEntry.dict();
const word patchName(dict.get<word>("name"));
label patchi = pbm.findPatchID(patchName); label patchi = pbm.findPatchID(patchName);
if (iter().dict().found("patchFields")) if (dEntry.dict().found("patchFields"))
{ {
const dictionary& patchFieldsDict = const dictionary& patchFieldsDict =
iter().dict().subDict dEntry.dict().subDict
( (
"patchFields" "patchFields"
); );
@ -928,11 +927,11 @@ int main(int argc, char *argv[])
if (sameGroup) if (sameGroup)
{ {
// Add coupleGroup to all entries // Add coupleGroup to all entries
forAllIter(dictionary, patchFieldsDict, iter) for (entry& dEntry : patchFieldsDict)
{ {
if (iter().isDict()) if (dEntry.isDict())
{ {
dictionary& dict = iter().dict(); dictionary& dict = dEntry.dict();
dict.set("coupleGroup", groupName); dict.set("coupleGroup", groupName);
} }
} }

View File

@ -292,16 +292,17 @@ int main(int argc, char *argv[])
// Suppress duplicate names // Suppress duplicate names
wordHashSet requestedPatches; wordHashSet requestedPatches;
forAllConstIters(stitchDict, iter) for (const entry& dEntry : stitchDict)
{ {
if (!iter().isDict()) if (!dEntry.isDict())
{ {
Info<< "Ignoring non-dictionary entry: " Info<< "Ignoring non-dictionary entry: "
<< iter().keyword() << nl; << dEntry.keyword() << nl;
continue; continue;
} }
const dictionary& dict = iter().dict(); const keyType& key = dEntry.keyword();
const dictionary& dict = dEntry.dict();
// Match type // Match type
word matchName; word matchName;
@ -358,7 +359,7 @@ int main(int argc, char *argv[])
// Input was validated // Input was validated
validatedDict.add(iter().keyword(), iter().dict()); validatedDict.add(key, dict);
} }
} }
@ -433,9 +434,9 @@ int main(int argc, char *argv[])
// Step through the topology changes // Step through the topology changes
label actioni = 0; label actioni = 0;
forAllConstIters(validatedDict, iter) for (const entry& dEntry : validatedDict)
{ {
const dictionary& dict = iter().dict(); const dictionary& dict = dEntry.dict();
// Match type // Match type
bool perfect = false; bool perfect = false;

View File

@ -97,10 +97,10 @@ HashTable<wordList> extractPatchGroups(const dictionary& boundaryDict)
{ {
HashTable<wordList> groupToPatch; HashTable<wordList> groupToPatch;
forAllConstIter(dictionary, boundaryDict, iter) for (const entry& dEntry : boundaryDict)
{ {
const word& patchName = iter().keyword(); const word& patchName = dEntry.keyword();
const dictionary& patchDict = iter().dict(); const dictionary& patchDict = dEntry.dict();
wordList groups; wordList groups;
if (patchDict.readIfPresent("inGroups", groups)) if (patchDict.readIfPresent("inGroups", groups))
@ -243,9 +243,9 @@ bool merge
// Pass 1. All literal matches // Pass 1. All literal matches
forAllConstIter(IDLList<entry>, mergeDict, mergeIter) for (const entry& mergeEntry : mergeDict)
{ {
const keyType& key = mergeIter().keyword(); const keyType& key = mergeEntry.keyword();
if (key[0] == '~') if (key[0] == '~')
{ {
@ -274,7 +274,7 @@ bool merge
( (
thisDict, thisDict,
*eptr, *eptr,
mergeIter(), mergeEntry,
literalRE, literalRE,
shortcuts shortcuts
) )
@ -287,8 +287,8 @@ bool merge
{ {
if (addNonExisting) if (addNonExisting)
{ {
// not found - just add // Not found - just add
thisDict.add(mergeIter().clone(thisDict).ptr()); thisDict.add(mergeEntry.clone(thisDict).ptr());
changed = true; changed = true;
} }
else else
@ -309,9 +309,9 @@ bool merge
// Pick up remaining dictionary entries // Pick up remaining dictionary entries
wordList thisKeys(thisKeysSet.toc()); wordList thisKeys(thisKeysSet.toc());
forAllConstIter(IDLList<entry>, mergeDict, mergeIter) for (const entry& mergeEntry : mergeDict)
{ {
const keyType& key = mergeIter().keyword(); const keyType& key = mergeEntry.keyword();
if (key[0] == '~') if (key[0] == '~')
{ {
@ -364,7 +364,7 @@ bool merge
( (
thisDict, thisDict,
*eptr, *eptr,
mergeIter(), mergeEntry,
literalRE, literalRE,
HashTable<wordList>(0) // no shortcuts HashTable<wordList>(0) // no shortcuts
// at deeper levels // at deeper levels
@ -462,7 +462,7 @@ int main(int argc, char *argv[])
const bool enableEntries = args.found("enableFunctionEntries"); const bool enableEntries = args.found("enableFunctionEntries");
if (enableEntries) if (enableEntries)
{ {
Info<< "Allowing dictionary preprocessing ('#include', '#codeStream')." Info<< "Allowing dictionary preprocessing (#include, #codeStream)."
<< endl; << endl;
} }
@ -550,9 +550,9 @@ int main(int argc, char *argv[])
// Temporary convert to dictionary // Temporary convert to dictionary
dictionary fieldDict; dictionary fieldDict;
forAll(dictList, i) for (const entry& e : dictList)
{ {
fieldDict.add(dictList[i].keyword(), dictList[i].dict()); fieldDict.add(e.keyword(), e.dict());
} }
if (dictList.size()) if (dictList.size())
@ -582,9 +582,11 @@ int main(int argc, char *argv[])
// Every replacement is a dictionary name and a keyword in this // Every replacement is a dictionary name and a keyword in this
forAllConstIter(dictionary, replaceDicts, fieldIter) for (const entry& replaceEntry : replaceDicts)
{ {
const word& fieldName = fieldIter().keyword(); const word& fieldName = replaceEntry.keyword();
const dictionary& replaceDict = replaceEntry.dict();
Info<< "Replacing entries in dictionary " << fieldName << endl; Info<< "Replacing entries in dictionary " << fieldName << endl;
// Handle 'boundary' specially: // Handle 'boundary' specially:
@ -595,11 +597,8 @@ int main(int argc, char *argv[])
Info<< "Special handling of " << fieldName Info<< "Special handling of " << fieldName
<< " as polyMesh/boundary file." << endl; << " as polyMesh/boundary file." << endl;
// Get the replacement dictionary for the field
const dictionary& replaceDict = fieldIter().dict();
Info<< "Merging entries from " << replaceDict.toc() << endl;
// Merge the replacements in. Do not add non-existing entries. // Merge the replacements in. Do not add non-existing entries.
Info<< "Merging entries from " << replaceDict.toc() << endl;
merge(false, fieldDict, replaceDict, literalRE, patchGroups); merge(false, fieldDict, replaceDict, literalRE, patchGroups);
Info<< "fieldDict:" << fieldDict << endl; Info<< "fieldDict:" << fieldDict << endl;
@ -627,9 +626,9 @@ int main(int argc, char *argv[])
// Add remaining entries // Add remaining entries
label sz = dictList.size(); label sz = dictList.size();
dictList.setSize(nEntries); dictList.setSize(nEntries);
forAllConstIter(dictionary, fieldDict, iter) for (const entry& e : fieldDict)
{ {
dictList.set(sz++, iter().clone()); dictList.set(sz++, e.clone());
} }
Info<< "Writing modified " << fieldName << endl; Info<< "Writing modified " << fieldName << endl;
@ -672,11 +671,8 @@ int main(int argc, char *argv[])
Info<< "Loaded dictionary " << fieldName Info<< "Loaded dictionary " << fieldName
<< " with entries " << fieldDict.toc() << endl; << " with entries " << fieldDict.toc() << endl;
// Get the replacement dictionary for the field
const dictionary& replaceDict = fieldIter().dict();
Info<< "Merging entries from " << replaceDict.toc() << endl;
// Merge the replacements in (allow adding) // Merge the replacements in (allow adding)
Info<< "Merging entries from " << replaceDict.toc() << endl;
merge(true, fieldDict, replaceDict, literalRE, patchGroups); merge(true, fieldDict, replaceDict, literalRE, patchGroups);
Info<< "Writing modified fieldDict " << fieldName << endl; Info<< "Writing modified fieldDict " << fieldName << endl;

View File

@ -54,9 +54,9 @@ Foam::boundaryTemplates::boundaryTemplates
) )
); );
forAllConstIter(dictionary, regionBCs, iter) for (const entry& dEntry : regionBCs)
{ {
const word& regionType = iter().keyword(); const word& regionType = dEntry.keyword();
wordList patchTypes(regionBCs.lookup(regionType)); wordList patchTypes(regionBCs.lookup(regionType));
dictionary regionTemplate = dictionary::null; dictionary regionTemplate = dictionary::null;
@ -175,7 +175,7 @@ Foam::dictionary Foam::boundaryTemplates::generatePatchDict
// look for inlet, outlet, wall etc // look for inlet, outlet, wall etc
if (regionTemplates.found(category)) if (regionTemplates.found(category))
{ {
const dictionary& categoryDict(regionTemplates.subDict(category)); const dictionary& categoryDict = regionTemplates.subDict(category);
// look for subSonic, slip etc // look for subSonic, slip etc
if (categoryDict.found(patchType)) if (categoryDict.found(patchType))
@ -199,10 +199,8 @@ Foam::dictionary Foam::boundaryTemplates::generatePatchDict
const wordList requiredOptions(patchDict.lookup("OPTIONS")); const wordList requiredOptions(patchDict.lookup("OPTIONS"));
forAll(requiredOptions, i) for (const word& option : requiredOptions)
{ {
const word& option = requiredOptions[i];
word selected; word selected;
if (!conditionOptions.readIfPresent(option, selected)) if (!conditionOptions.readIfPresent(option, selected))
{ {
@ -252,18 +250,19 @@ Foam::dictionary Foam::boundaryTemplates::generatePatchDict
dictionary dict(dictionary::null); dictionary dict(dictionary::null);
const dictionary& fieldDict(patchDict.subDict(fieldName)); const dictionary& fieldDict(patchDict.subDict(fieldName));
forAllConstIter(IDLList<entry>, fieldDict, iter) for (const entry& dEntry : fieldDict)
{ {
OStringStream oss; OStringStream oss;
oss << iter(); oss << dEntry;
string s(oss.str()); string s(oss.str());
s.replace(iter().keyword(), ""); s.replace(dEntry.keyword(), "");
s.replace s.replace
( (
"VALUE", "VALUE",
"boundaryConditions." + condition + ".values" "boundaryConditions." + condition + ".values"
); );
dict.add(iter().keyword(), s.c_str()); dict.add(dEntry.keyword(), s.c_str());
} }
return dict; return dict;
@ -344,7 +343,7 @@ bool Foam::boundaryTemplates::optionsRequired
if (regionTemplates.found(category)) if (regionTemplates.found(category))
{ {
const dictionary& categoryDict(regionTemplates.subDict(category)); const dictionary& categoryDict = regionTemplates.subDict(category);
if (categoryDict.found(patchType)) if (categoryDict.found(patchType))
{ {

View File

@ -86,9 +86,10 @@ int main(int argc, char *argv[])
label nCoarseFaces = 0; label nCoarseFaces = 0;
forAllConstIter(dictionary, agglomDict, iter) for (const entry& dEntry : agglomDict)
{ {
labelList patchids = boundary.indices(iter().keyword()); labelList patchids = boundary.indices(dEntry.keyword());
for (const label patchi : patchids) for (const label patchi : patchids)
{ {
const polyPatch& pp = boundary[patchi]; const polyPatch& pp = boundary[patchi];

View File

@ -241,14 +241,15 @@ int main(int argc, char *argv[])
// Where to write VTK output files // Where to write VTK output files
const fileName vtkOutputDir = runTime.constantPath()/"triSurface"; const fileName vtkOutputDir = runTime.constantPath()/"triSurface";
forAllConstIters(dict, iter) for (const entry& dEntry : dict)
{ {
if (!iter().isDict() || iter().keyword().isPattern()) if (!dEntry.isDict() || dEntry.keyword().isPattern()) // safety
{ {
continue; continue;
} }
const dictionary& surfaceDict = iter().dict(); const word& dictName = dEntry.keyword();
const dictionary& surfaceDict = dEntry.dict();
if (!surfaceDict.found("extractionMethod")) if (!surfaceDict.found("extractionMethod"))
{ {
@ -257,7 +258,6 @@ int main(int argc, char *argv[])
} }
// The output name based in dictionary name (without extensions) // The output name based in dictionary name (without extensions)
const word& dictName = iter().keyword();
const word outputName = dictName.lessExt(); const word outputName = dictName.lessExt();
autoPtr<surfaceFeaturesExtraction::method> extractor = autoPtr<surfaceFeaturesExtraction::method> extractor =

View File

@ -84,12 +84,12 @@ int main(int argc, char *argv[])
const dictionary& surfacesDict = meshDict.subDict("surfaces"); const dictionary& surfacesDict = meshDict.subDict("surfaces");
forAllConstIter(dictionary, surfacesDict, surfacesIter) for (const entry& dEntry : surfacesDict)
{ {
if (surfacesIter().isDict()) if (dEntry.isDict())
{ {
const word& surfName = surfacesIter().keyword(); const word& surfName = dEntry.keyword();
const dictionary& surfDict = surfacesIter().dict(); const dictionary& surfDict = dEntry.dict();
// Look up surface // Look up surface
searchableSurface& surf = allGeometry[surfName]; searchableSurface& surf = allGeometry[surfName];
@ -120,10 +120,11 @@ int main(int argc, char *argv[])
if (surfDict.found("regions")) if (surfDict.found("regions"))
{ {
const dictionary& regionsDict = surfDict.subDict("regions"); const dictionary& regionsDict = surfDict.subDict("regions");
forAllConstIter(dictionary, regionsDict, regionsIter)
for (const entry& e : regionsDict)
{ {
const dictionary& regionDict = regionsIter().dict(); const keyType& regionName = e.keyword();
const keyType& regionName = regionsIter().keyword(); const dictionary& regionDict = e.dict();
autoPtr<searchableSurfaceModifier> modifier autoPtr<searchableSurfaceModifier> modifier
( (

View File

@ -147,11 +147,9 @@ void Foam::HashPtrTable<T, Key, Hash>::read
const INew& inew const INew& inew
) )
{ {
forAllConstIter(dictionary, dict, iter) for (const entry& e : dict)
{ {
const word& k = iter().keyword(); this->set(e.keyword(), inew(e.dict()).ptr());
this->set(k, inew(dict.subDict(k)).ptr());
} }
} }

View File

@ -109,9 +109,9 @@ void Foam::Time::readDict()
simpleObjectRegistry& objs = debug::debugObjects(); simpleObjectRegistry& objs = debug::debugObjects();
forAllConstIters(*localDict, iter) for (const entry& dEntry : *localDict)
{ {
const word& name = iter().keyword(); const word& name = dEntry.keyword();
simpleObjectRegistryEntry* objPtr = objs.lookupPtr(name); simpleObjectRegistryEntry* objPtr = objs.lookupPtr(name);
@ -119,14 +119,14 @@ void Foam::Time::readDict()
{ {
const List<simpleRegIOobject*>& objects = *objPtr; const List<simpleRegIOobject*>& objects = *objPtr;
DetailInfo << " " << iter() << nl; DetailInfo << " " << dEntry << nl;
if (iter().isDict()) if (dEntry.isDict())
{ {
for (simpleRegIOobject* obj : objects) for (simpleRegIOobject* obj : objects)
{ {
OStringStream os(IOstream::ASCII); OStringStream os(IOstream::ASCII);
os << iter().dict(); os << dEntry.dict();
IStringStream is(os.str()); IStringStream is(os.str());
obj->readData(is); obj->readData(is);
} }
@ -135,7 +135,7 @@ void Foam::Time::readDict()
{ {
for (simpleRegIOobject* obj : objects) for (simpleRegIOobject* obj : objects)
{ {
obj->readData(iter().stream()); obj->readData(dEntry.stream());
} }
} }
} }
@ -156,9 +156,9 @@ void Foam::Time::readDict()
simpleObjectRegistry& objs = debug::infoObjects(); simpleObjectRegistry& objs = debug::infoObjects();
forAllConstIters(*localDict, iter) for (const entry& dEntry : *localDict)
{ {
const word& name = iter().keyword(); const word& name = dEntry.keyword();
simpleObjectRegistryEntry* objPtr = objs.lookupPtr(name); simpleObjectRegistryEntry* objPtr = objs.lookupPtr(name);
@ -166,14 +166,14 @@ void Foam::Time::readDict()
{ {
const List<simpleRegIOobject*>& objects = *objPtr; const List<simpleRegIOobject*>& objects = *objPtr;
DetailInfo << " " << iter() << nl; DetailInfo << " " << dEntry << nl;
if (iter().isDict()) if (dEntry.isDict())
{ {
for (simpleRegIOobject* obj : objects) for (simpleRegIOobject* obj : objects)
{ {
OStringStream os(IOstream::ASCII); OStringStream os(IOstream::ASCII);
os << iter().dict(); os << dEntry.dict();
IStringStream is(os.str()); IStringStream is(os.str());
obj->readData(is); obj->readData(is);
} }
@ -182,7 +182,7 @@ void Foam::Time::readDict()
{ {
for (simpleRegIOobject* obj : objects) for (simpleRegIOobject* obj : objects)
{ {
obj->readData(iter().stream()); obj->readData(dEntry.stream());
} }
} }
} }
@ -202,24 +202,24 @@ void Foam::Time::readDict()
simpleObjectRegistry& objs = debug::optimisationObjects(); simpleObjectRegistry& objs = debug::optimisationObjects();
forAllConstIters(*localDict, iter) for (const entry& dEntry : *localDict)
{ {
const word& name = iter().keyword(); const word& name = dEntry.keyword();
simpleObjectRegistryEntry* objPtr = objs.lookupPtr(name); simpleObjectRegistryEntry* objPtr = objs.lookupPtr(name);
if (objPtr) if (objPtr)
{ {
DetailInfo << " " << iter() << nl; DetailInfo << " " << dEntry << nl;
const List<simpleRegIOobject*>& objects = *objPtr; const List<simpleRegIOobject*>& objects = *objPtr;
if (iter().isDict()) if (dEntry.isDict())
{ {
for (simpleRegIOobject* obj : objects) for (simpleRegIOobject* obj : objects)
{ {
OStringStream os(IOstream::ASCII); OStringStream os(IOstream::ASCII);
os << iter().dict(); os << dEntry.dict();
IStringStream is(os.str()); IStringStream is(os.str());
obj->readData(is); obj->readData(is);
} }
@ -228,7 +228,7 @@ void Foam::Time::readDict()
{ {
for (simpleRegIOobject* obj : objects) for (simpleRegIOobject* obj : objects)
{ {
obj->readData(iter().stream()); obj->readData(dEntry.stream());
} }
} }
} }

View File

@ -728,11 +728,11 @@ bool Foam::functionObjectList::read()
newPtrs.setSize(functionsDict.size()); newPtrs.setSize(functionsDict.size());
newDigs.setSize(functionsDict.size()); newDigs.setSize(functionsDict.size());
forAllConstIter(dictionary, functionsDict, iter) for (const entry& dEntry : functionsDict)
{ {
const word& key = iter().keyword(); const word& key = dEntry.keyword();
if (!iter().isDict()) if (!dEntry.isDict())
{ {
if (key != "libs") if (key != "libs")
{ {
@ -743,7 +743,8 @@ bool Foam::functionObjectList::read()
continue; continue;
} }
const dictionary& dict = iter().dict(); const dictionary& dict = dEntry.dict();
bool enabled = dict.lookupOrDefault("enabled", true); bool enabled = dict.lookupOrDefault("enabled", true);
newDigs[nFunc] = dict.digest(); newDigs[nFunc] = dict.digest();

View File

@ -154,9 +154,9 @@ Foam::word Foam::functionObjects::stateFunctionObject::objectResultType
{ {
const dictionary& objectDict = resultsDict.subDict(objectName); const dictionary& objectDict = resultsDict.subDict(objectName);
forAllConstIter(dictionary, objectDict, iter) for (const entry& dEntry : objectDict)
{ {
const dictionary& dict = iter().dict(); const dictionary& dict = dEntry.dict();
if (dict.found(entryName)) if (dict.found(entryName))
{ {
@ -195,9 +195,10 @@ objectResultEntries
{ {
const dictionary& objectDict = resultsDict.subDict(objectName); const dictionary& objectDict = resultsDict.subDict(objectName);
forAllConstIter(dictionary, objectDict, iter) for (const entry& dEntry : objectDict)
{ {
const dictionary& dict = iter().dict(); const dictionary& dict = dEntry.dict();
result.append(dict.toc()); result.append(dict.toc());
} }
} }

View File

@ -120,17 +120,18 @@ const HashTable<dimensionedScalar>& unitSet()
unitSetPtr_ = new HashTable<dimensionedScalar>(unitDict.size()); unitSetPtr_ = new HashTable<dimensionedScalar>(unitDict.size());
forAllConstIter(dictionary, unitDict, iter) for (const entry& dEntry : unitDict)
{ {
if (iter().keyword() != "writeUnits") if (dEntry.keyword() != "writeUnits")
{ {
dimensionedScalar dt; dimensionedScalar dt;
dt.read(iter().stream(), unitDict); dt.read(dEntry.stream(), unitDict);
bool ok = unitSetPtr_->insert(iter().keyword(), dt);
bool ok = unitSetPtr_->insert(dEntry.keyword(), dt);
if (!ok) if (!ok)
{ {
FatalIOErrorInFunction(dict) FatalIOErrorInFunction(dict)
<< "Duplicate unit " << iter().keyword() << "Duplicate unit " << dEntry.keyword()
<< " in DimensionSets dictionary" << " in DimensionSets dictionary"
<< exit(FatalIOError); << exit(FatalIOError);
} }

View File

@ -47,11 +47,12 @@ readField
// 1. Handle explicit patch names. Note that there can be only one explicit // 1. Handle explicit patch names. Note that there can be only one explicit
// patch name since is key of dictionary. // patch name since is key of dictionary.
forAllConstIter(dictionary, dict, iter)
for (const entry& dEntry : dict)
{ {
if (iter().isDict() && iter().keyword().isLiteral()) if (dEntry.isDict() && dEntry.keyword().isLiteral())
{ {
const label patchi = bmesh_.findPatchID(iter().keyword()); const label patchi = bmesh_.findPatchID(dEntry.keyword());
if (patchi != -1) if (patchi != -1)
{ {
@ -62,7 +63,7 @@ readField
( (
bmesh_[patchi], bmesh_[patchi],
field, field,
iter().dict() dEntry.dict()
) )
); );
nUnset--; nUnset--;
@ -81,21 +82,14 @@ readField
// Note: in reverse order of entries in the dictionary (last // Note: in reverse order of entries in the dictionary (last
// patchGroups wins). This is so it is consistent with dictionary wildcard // patchGroups wins). This is so it is consistent with dictionary wildcard
// behaviour // behaviour
if (dict.size()) for (auto iter = dict.crbegin(); iter != dict.crend(); ++iter)
{ {
for const entry& dEntry = *iter;
(
IDLList<entry>::const_reverse_iterator iter = dict.crbegin();
iter != dict.crend();
++iter
)
{
const entry& e = iter();
if (e.isDict() && e.keyword().isLiteral()) if (dEntry.isDict() && dEntry.keyword().isLiteral())
{ {
const labelList patchIds = const labelList patchIds =
bmesh_.indices(e.keyword(), true); // use patchGroups bmesh_.indices(dEntry.keyword(), true); // use patchGroups
for (const label patchi : patchIds) for (const label patchi : patchIds)
{ {
@ -108,14 +102,13 @@ readField
( (
bmesh_[patchi], bmesh_[patchi],
field, field,
e.dict() dEntry.dict()
) )
); );
} }
} }
} }
} }
}
// 3. Wildcard patch overrides // 3. Wildcard patch overrides

View File

@ -131,19 +131,21 @@ void Foam::genericPolyPatch::write(Ostream& os) const
os.writeEntry("nFaces", size()); os.writeEntry("nFaces", size());
os.writeEntry("startFace", start()); os.writeEntry("startFace", start());
forAllConstIter(dictionary, dict_, iter) for (const entry& e : dict_)
{ {
const word& key = e.keyword();
// Filter out any keywords already written by above // Filter out any keywords already written by above
if if
( (
iter().keyword() != "type" key != "type"
&& iter().keyword() != "nFaces" && key != "nFaces"
&& iter().keyword() != "startFace" && key != "startFace"
&& iter().keyword() != "physicalType" && key != "physicalType"
&& iter().keyword() != "inGroups" && key != "inGroups"
) )
{ {
iter().write(os); e.write(os);
} }
} }
} }

View File

@ -238,16 +238,19 @@ Foam::Map<Foam::word> Foam::cellTable::selectType(const word& matl) const
forAllConstIter(Map<dictionary>, *this, iter) forAllConstIter(Map<dictionary>, *this, iter)
{ {
const label index = iter.key();
const dictionary& dict = iter.object();
if if
( (
matl matl
== iter().lookupOrDefault<word>("MaterialType", defaultMaterial_) == dict.lookupOrDefault<word>("MaterialType", defaultMaterial_)
) )
{ {
lookup.insert lookup.insert
( (
iter.key(), index,
iter().lookupOrDefault<word> dict.lookupOrDefault<word>
( (
"Label", "Label",
"cellTable_" + Foam::name(iter.key()) "cellTable_" + Foam::name(iter.key())

View File

@ -71,11 +71,11 @@ Foam::dynamicMultiMotionSolverFvMesh::dynamicMultiMotionSolverFvMesh
pointIDs_.setSize(dynamicMeshCoeffs.size()); pointIDs_.setSize(dynamicMeshCoeffs.size());
label zoneI = 0; label zoneI = 0;
forAllConstIter(dictionary, dynamicMeshCoeffs, iter) for (const entry& dEntry : dynamicMeshCoeffs)
{ {
if (iter().isDict()) if (dEntry.isDict())
{ {
const dictionary& subDict = iter().dict(); const dictionary& subDict = dEntry.dict();
const word zoneName(subDict.get<word>("cellZone")); const word zoneName(subDict.get<word>("cellZone"));

View File

@ -306,9 +306,9 @@ void Foam::displacementLayeredMotionMotionSolver::cellZoneSolve
// Allocate the fields // Allocate the fields
label patchi = 0; label patchi = 0;
forAllConstIter(dictionary, patchesDict, patchiter) for (const entry& dEntry : patchesDict)
{ {
const word& faceZoneName = patchiter().keyword(); const word& faceZoneName = dEntry.keyword();
label zoneI = mesh().faceZones().findZoneID(faceZoneName); label zoneI = mesh().faceZones().findZoneID(faceZoneName);
if (zoneI == -1) if (zoneI == -1)
{ {
@ -353,10 +353,10 @@ void Foam::displacementLayeredMotionMotionSolver::cellZoneSolve
pointDisplacement_.correctBoundaryConditions(); pointDisplacement_.correctBoundaryConditions();
patchi = 0; patchi = 0;
forAllConstIter(dictionary, patchesDict, patchiter) for (const entry& dEntry : patchesDict)
{ {
const word& faceZoneName = patchiter().keyword(); const word& faceZoneName = dEntry.keyword();
const dictionary& faceZoneDict = patchiter().dict(); const dictionary& faceZoneDict = dEntry.dict();
// Determine the points of the faceZone within the cellZone // Determine the points of the faceZone within the cellZone
const faceZone& fz = mesh().faceZones()[faceZoneName]; const faceZone& fz = mesh().faceZones()[faceZoneName];
@ -546,11 +546,10 @@ void Foam::displacementLayeredMotionMotionSolver::solve()
pointDisplacement_.boundaryFieldRef().updateCoeffs(); pointDisplacement_.boundaryFieldRef().updateCoeffs();
// Solve motion on all regions (=cellZones) // Solve motion on all regions (=cellZones)
const dictionary& regionDicts = coeffDict().subDict("regions"); for (const entry& dEntry : coeffDict().subDict("regions"))
forAllConstIter(dictionary, regionDicts, regionIter)
{ {
const word& cellZoneName = regionIter().keyword(); const word& cellZoneName = dEntry.keyword();
const dictionary& regionDict = regionIter().dict(); const dictionary& regionDict = dEntry.dict();
label zoneI = mesh().cellZones().findZoneID(cellZoneName); label zoneI = mesh().cellZones().findZoneID(cellZoneName);

View File

@ -59,24 +59,26 @@ Foam::multiSolidBodyMotionSolver::multiSolidBodyMotionSolver
pointIDs_.setSize(coeffDict().size()); pointIDs_.setSize(coeffDict().size());
label zonei = 0; label zonei = 0;
forAllConstIter(dictionary, coeffDict(), iter) for (const entry& dEntry : coeffDict())
{ {
if (iter().isDict()) if (dEntry.isDict())
{ {
zoneIDs_[zonei] = mesh.cellZones().findZoneID(iter().keyword()); const word& zoneName = dEntry.keyword();
const dictionary& subDict = dEntry.dict();
zoneIDs_[zonei] = mesh.cellZones().findZoneID(zoneName);
if (zoneIDs_[zonei] == -1) if (zoneIDs_[zonei] == -1)
{ {
FatalIOErrorInFunction FatalIOErrorInFunction
( (
coeffDict() coeffDict()
) << "Cannot find cellZone named " << iter().keyword() ) << "Cannot find cellZone named " << zoneName
<< ". Valid zones are " << mesh.cellZones().names() << ". Valid zones are "
<< flatOutput(mesh.cellZones().names())
<< exit(FatalIOError); << exit(FatalIOError);
} }
const dictionary& subDict = iter().dict();
SBMFs_.set SBMFs_.set
( (
zonei, zonei,
@ -119,7 +121,7 @@ Foam::multiSolidBodyMotionSolver::multiSolidBodyMotionSolver
Info<< "Applying solid body motion " << SBMFs_[zonei].type() Info<< "Applying solid body motion " << SBMFs_[zonei].type()
<< " to " << " to "
<< returnReduce(pointIDs_[zonei].size(), sumOp<label>()) << returnReduce(pointIDs_[zonei].size(), sumOp<label>())
<< " points of cellZone " << iter().keyword() << endl; << " points of cellZone " << zoneName << endl;
zonei++; zonei++;
} }

View File

@ -93,18 +93,18 @@ bool Foam::solidBodyMotionFunctions::multiMotion::read
label i = 0; label i = 0;
SBMFs_.setSize(SBMFCoeffs_.size()); SBMFs_.setSize(SBMFCoeffs_.size());
forAllConstIter(IDLList<entry>, SBMFCoeffs_, iter) for (const entry& dEntry : SBMFCoeffs_)
{ {
if (iter().isDict()) if (dEntry.isDict())
{ {
SBMFs_.set SBMFs_.set
( (
i, i,
solidBodyMotionFunction::New(iter().dict(), time_) solidBodyMotionFunction::New(dEntry.dict(), time_)
); );
Info<< "Constructed SBMF " << i << " : " Info<< "Constructed SBMF " << i << " : "
<< iter().keyword() << " of type " << dEntry.keyword() << " of type "
<< SBMFs_[i].type() << endl; << SBMFs_[i].type() << endl;
i++; i++;

View File

@ -72,28 +72,29 @@ bool Foam::MRFZoneList::active(const bool warn) const
void Foam::MRFZoneList::reset(const dictionary& dict) void Foam::MRFZoneList::reset(const dictionary& dict)
{ {
label count = 0; label count = 0;
forAllConstIter(dictionary, dict, iter) for (const entry& dEntry : dict)
{ {
if (iter().isDict()) if (dEntry.isDict())
{ {
count++; ++count;
} }
} }
this->setSize(count); this->resize(count);
label i = 0;
forAllConstIter(dictionary, dict, iter) count = 0;
for (const entry& dEntry : dict)
{ {
if (iter().isDict()) if (dEntry.isDict())
{ {
const word& name = iter().keyword(); const word& name = dEntry.keyword();
const dictionary& modelDict = iter().dict(); const dictionary& modelDict = dEntry.dict();
Info<< " creating MRF zone: " << name << endl; Info<< " creating MRF zone: " << name << endl;
this->set this->set
( (
i++, count++,
new MRFZone(name, mesh_, modelDict) new MRFZone(name, mesh_, modelDict)
); );
} }

View File

@ -109,26 +109,27 @@ void Foam::fv::optionList::reset(const dictionary& dict)
{ {
// Count number of active fvOptions // Count number of active fvOptions
label count = 0; label count = 0;
forAllConstIter(dictionary, dict, iter) for (const entry& dEntry : dict)
{ {
if (iter().isDict()) if (dEntry.isDict())
{ {
count++; ++count;
} }
} }
this->setSize(count); this->resize(count);
label i = 0;
forAllConstIter(dictionary, dict, iter) count = 0;
for (const entry& dEntry : dict)
{ {
if (iter().isDict()) if (dEntry.isDict())
{ {
const word& name = iter().keyword(); const word& name = dEntry.keyword();
const dictionary& sourceDict = iter().dict(); const dictionary& sourceDict = dEntry.dict();
this->set this->set
( (
i++, count++,
option::New(name, sourceDict, mesh_) option::New(name, sourceDict, mesh_)
); );
} }

View File

@ -64,26 +64,27 @@ bool Foam::porosityModelList::active(const bool warn) const
void Foam::porosityModelList::reset(const dictionary& dict) void Foam::porosityModelList::reset(const dictionary& dict)
{ {
label count = 0; label count = 0;
forAllConstIter(dictionary, dict, iter) for (const entry& dEntry : dict)
{ {
if (iter().isDict()) if (dEntry.isDict())
{ {
count++; ++count;
} }
} }
this->setSize(count); this->resize(count);
label i = 0;
forAllConstIter(dictionary, dict, iter) count = 0;
for (const entry& dEntry : dict)
{ {
if (iter().isDict()) if (dEntry.isDict())
{ {
const word& name = iter().keyword(); const word& name = dEntry.keyword();
const dictionary& modelDict = iter().dict(); const dictionary& modelDict = dEntry.dict();
this->set this->set
( (
i++, count++,
porosityModel::New(name, mesh_, modelDict) porosityModel::New(name, mesh_, modelDict)
); );
} }

View File

@ -56,9 +56,9 @@ void Foam::solutionControl::read(const bool absTolOnly)
DynamicList<fieldData> data(residualControl_); DynamicList<fieldData> data(residualControl_);
forAllConstIter(dictionary, residualDict, iter) for (const entry& dEntry : residualDict)
{ {
const word& fName = iter().keyword(); const word& fName = dEntry.keyword();
const label fieldi = applyToField(fName, false); const label fieldi = applyToField(fName, false);
if (fieldi == -1) if (fieldi == -1)
{ {
@ -71,11 +71,9 @@ void Foam::solutionControl::read(const bool absTolOnly)
fd.relTol = -1; fd.relTol = -1;
fd.initialResidual = -1; fd.initialResidual = -1;
} }
else else if (dEntry.isDict())
{ {
if (iter().isDict()) const dictionary& fieldDict = dEntry.dict();
{
const dictionary& fieldDict(iter().dict());
fd.absTol = fieldDict.get<scalar>("tolerance"); fd.absTol = fieldDict.get<scalar>("tolerance");
fd.relTol = fieldDict.get<scalar>("relTol"); fd.relTol = fieldDict.get<scalar>("relTol");
fd.initialResidual = 0.0; fd.initialResidual = 0.0;
@ -83,11 +81,10 @@ void Foam::solutionControl::read(const bool absTolOnly)
else else
{ {
FatalErrorInFunction FatalErrorInFunction
<< "Residual data for " << iter().keyword() << "Residual data for " << dEntry.keyword()
<< " must be specified as a dictionary" << " must be specified as a dictionary"
<< exit(FatalError); << exit(FatalError);
} }
}
data.append(fd); data.append(fd);
} }
@ -98,24 +95,21 @@ void Foam::solutionControl::read(const bool absTolOnly)
{ {
fd.absTol = residualDict.get<scalar>(fName); fd.absTol = residualDict.get<scalar>(fName);
} }
else else if (dEntry.isDict())
{ {
if (iter().isDict()) const dictionary& fieldDict = dEntry.dict();
{
const dictionary& fieldDict(iter().dict());
fd.absTol = fieldDict.get<scalar>("tolerance"); fd.absTol = fieldDict.get<scalar>("tolerance");
fd.relTol = fieldDict.get<scalar>("relTol"); fd.relTol = fieldDict.get<scalar>("relTol");
} }
else else
{ {
FatalErrorInFunction FatalErrorInFunction
<< "Residual data for " << iter().keyword() << "Residual data for " << dEntry.keyword()
<< " must be specified as a dictionary" << " must be specified as a dictionary"
<< exit(FatalError); << exit(FatalError);
} }
} }
} }
}
residualControl_.transfer(data); residualControl_.transfer(data);

View File

@ -575,17 +575,17 @@ bool Foam::functionObjects::externalCoupled::read(const dictionary& dict)
wordList allRegionNames(time_.lookupClass<fvMesh>().sortedToc()); wordList allRegionNames(time_.lookupClass<fvMesh>().sortedToc());
const dictionary& allRegionsDict = dict.subDict("regions"); const dictionary& allRegionsDict = dict.subDict("regions");
forAllConstIters(allRegionsDict, iter) for (const entry& dEntry : allRegionsDict)
{ {
if (!iter().isDict()) if (!dEntry.isDict())
{ {
FatalIOErrorInFunction(allRegionsDict) FatalIOErrorInFunction(allRegionsDict)
<< "Regions must be specified in dictionary format" << "Regions must be specified in dictionary format"
<< exit(FatalIOError); << exit(FatalIOError);
} }
const wordRe regionGroupName(iter().keyword()); const wordRe regionGroupName(dEntry.keyword());
const dictionary& regionDict = iter().dict(); const dictionary& regionDict = dEntry.dict();
labelList regionIDs = findStrings(regionGroupName, allRegionNames); labelList regionIDs = findStrings(regionGroupName, allRegionNames);
@ -594,16 +594,17 @@ bool Foam::functionObjects::externalCoupled::read(const dictionary& dict)
regionGroupNames_.append(compositeName(regionNames)); regionGroupNames_.append(compositeName(regionNames));
regionGroupRegions_.append(regionNames); regionGroupRegions_.append(regionNames);
forAllConstIters(regionDict, regionIter) for (const entry& dEntry : regionDict)
{ {
if (!regionIter().isDict()) if (!dEntry.isDict())
{ {
FatalIOErrorInFunction(regionDict) FatalIOErrorInFunction(regionDict)
<< "Regions must be specified in dictionary format" << "Regions must be specified in dictionary format"
<< exit(FatalIOError); << exit(FatalIOError);
} }
const wordRe groupName(regionIter().keyword());
const dictionary& groupDict = regionIter().dict(); const wordRe groupName(dEntry.keyword());
const dictionary& groupDict = dEntry.dict();
const label nGroups = groupNames_.size(); const label nGroups = groupNames_.size();
const wordList readFields(groupDict.get<wordList>("readFields")); const wordList readFields(groupDict.get<wordList>("readFields"));
@ -619,7 +620,7 @@ bool Foam::functionObjects::externalCoupled::read(const dictionary& dict)
regionToGroups_.insert regionToGroups_.insert
( (
regionGroupNames_.last(), regionGroupNames_.last(),
labelList{nGroups} labelList(one(), nGroups)
); );
} }
groupNames_.append(groupName); groupNames_.append(groupName);

View File

@ -106,11 +106,11 @@ bool Foam::functionObjects::runTimePostProcessing::read(const dictionary& dict)
readObjects(dict.subOrEmptyDict("lines"), lines_); readObjects(dict.subOrEmptyDict("lines"), lines_);
readObjects(dict.subOrEmptyDict("surfaces"), surfaces_); readObjects(dict.subOrEmptyDict("surfaces"), surfaces_);
const dictionary& textDict = dict.subDict("text"); const dictionary& textDict = dict.subDict("text");
forAllConstIter(dictionary, textDict, iter)
for (const entry& dEntry : textDict)
{ {
if (!iter().isDict()) if (!dEntry.isDict())
{ {
FatalIOErrorInFunction(textDict) FatalIOErrorInFunction(textDict)
<< "text must be specified in dictionary format" << "text must be specified in dictionary format"
@ -122,7 +122,7 @@ bool Foam::functionObjects::runTimePostProcessing::read(const dictionary& dict)
new runTimePostPro::text new runTimePostPro::text
( (
*this, *this,
iter().dict(), dEntry.dict(),
scene_.colours() scene_.colours()
) )
); );

View File

@ -33,9 +33,10 @@ void Foam::functionObjects::runTimePostProcessing::readObjects
) const ) const
{ {
objects.clear(); objects.clear();
forAllConstIter(dictionary, dict, iter)
for (const entry& dEntry : dict)
{ {
if (!iter().isDict()) if (!dEntry.isDict())
{ {
FatalIOErrorInFunction(dict) FatalIOErrorInFunction(dict)
<< dict.dictName() << dict.dictName()
@ -43,12 +44,12 @@ void Foam::functionObjects::runTimePostProcessing::readObjects
<< exit(FatalIOError); << exit(FatalIOError);
} }
const dictionary& objectDict(iter().dict()); const dictionary& objectDict = dEntry.dict();
const word objectType = objectDict.get<word>("type"); const word objectType = objectDict.get<word>("type");
objects.append objects.append
( (
Type::New(*this, iter().dict(), scene_.colours(), objectType) Type::New(*this, objectDict, scene_.colours(), objectType)
); );
} }
} }

View File

@ -54,18 +54,20 @@ bool Foam::fv::FixedValueConstraint<Type>::read(const dictionary& dict)
{ {
const dictionary& fieldValuesDict = coeffs_.subDict("fieldValues"); const dictionary& fieldValuesDict = coeffs_.subDict("fieldValues");
fieldNames_.setSize(fieldValuesDict.size()); label count = fieldValuesDict.size();
fieldValues_.setSize(fieldNames_.size());
label i = 0; fieldNames_.setSize(count);
forAllConstIter(dictionary, fieldValuesDict, iter) fieldValues_.setSize(count);
applied_.setSize(count, false);
count = 0;
for (const entry& dEntry : fieldValuesDict)
{ {
fieldNames_[i] = iter().keyword(); fieldNames_[count] = dEntry.keyword();
fieldValuesDict.readEntry(iter().keyword(), fieldValues_[i]); dEntry.readEntry(fieldValues_[count]);
++i;
}
applied_.setSize(fieldNames_.size(), false); ++count;
}
return true; return true;
} }

View File

@ -83,17 +83,19 @@ Foam::word Foam::fv::SemiImplicitSource<Type>::volumeModeTypeToWord
template<class Type> template<class Type>
void Foam::fv::SemiImplicitSource<Type>::setFieldData(const dictionary& dict) void Foam::fv::SemiImplicitSource<Type>::setFieldData(const dictionary& dict)
{ {
fieldNames_.setSize(dict.toc().size()); label count = dict.size();
injectionRate_.setSize(fieldNames_.size());
applied_.setSize(fieldNames_.size(), false); fieldNames_.resize(count);
injectionRate_.resize(count);
applied_.resize(count, false);
label i = 0; count = 0;
forAllConstIter(dictionary, dict, iter) for (const entry& dEntry : dict)
{ {
fieldNames_[i] = iter().keyword(); fieldNames_[count] = dEntry.keyword();
dict.readEntry(iter().keyword(), injectionRate_[i]); dEntry.readEntry(injectionRate_[count]);
++i;
++count;
} }
// Set volume normalisation // Set volume normalisation

View File

@ -75,17 +75,19 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
<< exit(FatalIOError); << exit(FatalIOError);
} }
forAllConstIter(dictionary, dict_, iter) for (const entry& dEntry : dict_)
{ {
if (iter().keyword() != "type" && iter().keyword() != "value") const keyType& key = dEntry.keyword();
if (key != "type" && key != "value")
{ {
if if
( (
iter().isStream() dEntry.isStream()
&& iter().stream().size() && dEntry.stream().size()
) )
{ {
ITstream& is = iter().stream(); ITstream& is = dEntry.stream();
// Read first token // Read first token
token firstToken(is); token firstToken(is);
@ -108,7 +110,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
{ {
scalarFields_.insert scalarFields_.insert
( (
iter().keyword(), key,
autoPtr<scalarField>::New() autoPtr<scalarField>::New()
); );
} }
@ -148,7 +150,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
FatalIOErrorInFunction FatalIOErrorInFunction
( (
dict dict
) << "\n size of field " << iter().keyword() ) << "\n size of field " << key
<< " (" << fPtr->size() << ')' << " (" << fPtr->size() << ')'
<< " is not the same size as the patch (" << " is not the same size as the patch ("
<< this->size() << ')' << this->size() << ')'
@ -160,7 +162,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
<< exit(FatalIOError); << exit(FatalIOError);
} }
scalarFields_.insert(iter().keyword(), fPtr); scalarFields_.insert(key, fPtr);
} }
else if else if
( (
@ -183,7 +185,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
FatalIOErrorInFunction FatalIOErrorInFunction
( (
dict dict
) << "\n size of field " << iter().keyword() ) << "\n size of field " << key
<< " (" << fPtr->size() << ')' << " (" << fPtr->size() << ')'
<< " is not the same size as the patch (" << " is not the same size as the patch ("
<< this->size() << ')' << this->size() << ')'
@ -195,7 +197,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
<< exit(FatalIOError); << exit(FatalIOError);
} }
vectorFields_.insert(iter().keyword(), fPtr); vectorFields_.insert(key, fPtr);
} }
else if else if
( (
@ -221,7 +223,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
FatalIOErrorInFunction FatalIOErrorInFunction
( (
dict dict
) << "\n size of field " << iter().keyword() ) << "\n size of field " << key
<< " (" << fPtr->size() << ')' << " (" << fPtr->size() << ')'
<< " is not the same size as the patch (" << " is not the same size as the patch ("
<< this->size() << ')' << this->size() << ')'
@ -233,7 +235,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
<< exit(FatalIOError); << exit(FatalIOError);
} }
sphericalTensorFields_.insert(iter().keyword(), fPtr); sphericalTensorFields_.insert(key, fPtr);
} }
else if else if
( (
@ -259,7 +261,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
FatalIOErrorInFunction FatalIOErrorInFunction
( (
dict dict
) << "\n size of field " << iter().keyword() ) << "\n size of field " << key
<< " (" << fPtr->size() << ')' << " (" << fPtr->size() << ')'
<< " is not the same size as the patch (" << " is not the same size as the patch ("
<< this->size() << ')' << this->size() << ')'
@ -271,7 +273,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
<< exit(FatalIOError); << exit(FatalIOError);
} }
symmTensorFields_.insert(iter().keyword(), fPtr); symmTensorFields_.insert(key, fPtr);
} }
else if else if
( (
@ -294,7 +296,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
FatalIOErrorInFunction FatalIOErrorInFunction
( (
dict dict
) << "\n size of field " << iter().keyword() ) << "\n size of field " << key
<< " (" << fPtr->size() << ')' << " (" << fPtr->size() << ')'
<< " is not the same size as the patch (" << " is not the same size as the patch ("
<< this->size() << ')' << this->size() << ')'
@ -306,7 +308,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
<< exit(FatalIOError); << exit(FatalIOError);
} }
tensorFields_.insert(iter().keyword(), fPtr); tensorFields_.insert(key, fPtr);
} }
else else
{ {
@ -335,7 +337,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
{ {
scalarFields_.insert scalarFields_.insert
( (
iter().keyword(), key,
autoPtr<scalarField>::New autoPtr<scalarField>::New
( (
this->size(), this->size(),
@ -356,7 +358,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
vectorFields_.insert vectorFields_.insert
( (
iter().keyword(), key,
autoPtr<vectorField>::New autoPtr<vectorField>::New
( (
this->size(), this->size(),
@ -370,7 +372,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
sphericalTensorFields_.insert sphericalTensorFields_.insert
( (
iter().keyword(), key,
autoPtr<sphericalTensorField>::New autoPtr<sphericalTensorField>::New
( (
this->size(), this->size(),
@ -384,7 +386,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
symmTensorFields_.insert symmTensorFields_.insert
( (
iter().keyword(), key,
autoPtr<symmTensorField>::New autoPtr<symmTensorField>::New
( (
this->size(), this->size(),
@ -403,7 +405,7 @@ Foam::genericFaPatchField<Type>::genericFaPatchField
tensorFields_.insert tensorFields_.insert
( (
iter().keyword(), key,
autoPtr<tensorField>::New autoPtr<tensorField>::New
( (
this->size(), this->size(),
@ -797,47 +799,49 @@ void Foam::genericFaPatchField<Type>::write(Ostream& os) const
{ {
os.writeEntry("type", actualTypeName_); os.writeEntry("type", actualTypeName_);
forAllConstIter(dictionary, dict_, iter) for (const entry& dEntry : dict_)
{ {
if (iter().keyword() != "type" && iter().keyword() != "value") const keyType& key = dEntry.keyword();
if (key != "type" && key != "value")
{ {
if if
( (
iter().isStream() dEntry.isStream()
&& iter().stream().size() && dEntry.stream().size()
&& iter().stream()[0].isWord() && dEntry.stream()[0].isWord()
&& iter().stream()[0].wordToken() == "nonuniform" && dEntry.stream()[0].wordToken() == "nonuniform"
) )
{ {
if (scalarFields_.found(iter().keyword())) if (scalarFields_.found(key))
{ {
scalarFields_.find(iter().keyword())() scalarFields_.find(key)()
->writeEntry(iter().keyword(), os); ->writeEntry(key, os);
} }
else if (vectorFields_.found(iter().keyword())) else if (vectorFields_.found(key))
{ {
vectorFields_.find(iter().keyword())() vectorFields_.find(key)()
->writeEntry(iter().keyword(), os); ->writeEntry(key, os);
} }
else if (sphericalTensorFields_.found(iter().keyword())) else if (sphericalTensorFields_.found(key))
{ {
sphericalTensorFields_.find(iter().keyword())() sphericalTensorFields_.find(key)()
->writeEntry(iter().keyword(), os); ->writeEntry(key, os);
} }
else if (symmTensorFields_.found(iter().keyword())) else if (symmTensorFields_.found(key))
{ {
symmTensorFields_.find(iter().keyword())() symmTensorFields_.find(key)()
->writeEntry(iter().keyword(), os); ->writeEntry(key, os);
} }
else if (tensorFields_.found(iter().keyword())) else if (tensorFields_.found(key))
{ {
tensorFields_.find(iter().keyword())() tensorFields_.find(key)()
->writeEntry(iter().keyword(), os); ->writeEntry(key, os);
} }
} }
else else
{ {
iter().write(os); dEntry.write(os);
} }
} }
} }

View File

@ -75,17 +75,19 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
<< exit(FatalIOError); << exit(FatalIOError);
} }
forAllConstIter(dictionary, dict_, iter) for (const entry& dEntry : dict_)
{ {
if (iter().keyword() != "type" && iter().keyword() != "value") const keyType& key = dEntry.keyword();
if (key != "type" && key != "value")
{ {
if if
( (
iter().isStream() dEntry.isStream()
&& iter().stream().size() && dEntry.stream().size()
) )
{ {
ITstream& is = iter().stream(); ITstream& is = dEntry.stream();
// Read first token // Read first token
token firstToken(is); token firstToken(is);
@ -108,7 +110,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
{ {
scalarFields_.insert scalarFields_.insert
( (
iter().keyword(), dEntry.keyword(),
autoPtr<scalarField>::New() autoPtr<scalarField>::New()
); );
} }
@ -148,7 +150,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
FatalIOErrorInFunction FatalIOErrorInFunction
( (
dict dict
) << "\n size of field " << iter().keyword() ) << "\n size of field " << key
<< " (" << fPtr->size() << ')' << " (" << fPtr->size() << ')'
<< " is not the same size as the patch (" << " is not the same size as the patch ("
<< this->size() << ')' << this->size() << ')'
@ -160,7 +162,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
<< exit(FatalIOError); << exit(FatalIOError);
} }
scalarFields_.insert(iter().keyword(), fPtr); scalarFields_.insert(key, fPtr);
} }
else if else if
( (
@ -183,7 +185,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
FatalIOErrorInFunction FatalIOErrorInFunction
( (
dict dict
) << "\n size of field " << iter().keyword() ) << "\n size of field " << key
<< " (" << fPtr->size() << ')' << " (" << fPtr->size() << ')'
<< " is not the same size as the patch (" << " is not the same size as the patch ("
<< this->size() << ')' << this->size() << ')'
@ -195,7 +197,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
<< exit(FatalIOError); << exit(FatalIOError);
} }
vectorFields_.insert(iter().keyword(), fPtr); vectorFields_.insert(key, fPtr);
} }
else if else if
( (
@ -221,7 +223,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
FatalIOErrorInFunction FatalIOErrorInFunction
( (
dict dict
) << "\n size of field " << iter().keyword() ) << "\n size of field " << key
<< " (" << fPtr->size() << ')' << " (" << fPtr->size() << ')'
<< " is not the same size as the patch (" << " is not the same size as the patch ("
<< this->size() << ')' << this->size() << ')'
@ -233,7 +235,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
<< exit(FatalIOError); << exit(FatalIOError);
} }
sphericalTensorFields_.insert(iter().keyword(), fPtr); sphericalTensorFields_.insert(key, fPtr);
} }
else if else if
( (
@ -259,7 +261,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
FatalIOErrorInFunction FatalIOErrorInFunction
( (
dict dict
) << "\n size of field " << iter().keyword() ) << "\n size of field " << key
<< " (" << fPtr->size() << ')' << " (" << fPtr->size() << ')'
<< " is not the same size as the patch (" << " is not the same size as the patch ("
<< this->size() << ')' << this->size() << ')'
@ -271,7 +273,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
<< exit(FatalIOError); << exit(FatalIOError);
} }
symmTensorFields_.insert(iter().keyword(), fPtr); symmTensorFields_.insert(key, fPtr);
} }
else if else if
( (
@ -294,7 +296,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
FatalIOErrorInFunction FatalIOErrorInFunction
( (
dict dict
) << "\n size of field " << iter().keyword() ) << "\n size of field " << key
<< " (" << fPtr->size() << ')' << " (" << fPtr->size() << ')'
<< " is not the same size as the patch (" << " is not the same size as the patch ("
<< this->size() << ')' << this->size() << ')'
@ -306,7 +308,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
<< exit(FatalIOError); << exit(FatalIOError);
} }
tensorFields_.insert(iter().keyword(), fPtr); tensorFields_.insert(key, fPtr);
} }
else else
{ {
@ -335,7 +337,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
{ {
scalarFields_.insert scalarFields_.insert
( (
iter().keyword(), key,
autoPtr<scalarField>::New autoPtr<scalarField>::New
( (
this->size(), this->size(),
@ -356,7 +358,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
vectorFields_.insert vectorFields_.insert
( (
iter().keyword(), key,
autoPtr<vectorField>::New autoPtr<vectorField>::New
( (
this->size(), this->size(),
@ -370,7 +372,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
sphericalTensorFields_.insert sphericalTensorFields_.insert
( (
iter().keyword(), key,
autoPtr<sphericalTensorField>::New autoPtr<sphericalTensorField>::New
( (
this->size(), this->size(),
@ -384,7 +386,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
symmTensorFields_.insert symmTensorFields_.insert
( (
iter().keyword(), key,
autoPtr<symmTensorField>::New autoPtr<symmTensorField>::New
( (
this->size(), this->size(),
@ -403,7 +405,7 @@ Foam::genericFvPatchField<Type>::genericFvPatchField
tensorFields_.insert tensorFields_.insert
( (
iter().keyword(), key,
autoPtr<tensorField>::New autoPtr<tensorField>::New
( (
this->size(), this->size(),
@ -797,47 +799,49 @@ void Foam::genericFvPatchField<Type>::write(Ostream& os) const
{ {
os.writeEntry("type", actualTypeName_); os.writeEntry("type", actualTypeName_);
forAllConstIter(dictionary, dict_, iter) for (const entry& dEntry : dict_)
{ {
if (iter().keyword() != "type" && iter().keyword() != "value") const keyType& key = dEntry.keyword();
if (key != "type" && key != "value")
{ {
if if
( (
iter().isStream() dEntry.isStream()
&& iter().stream().size() && dEntry.stream().size()
&& iter().stream()[0].isWord() && dEntry.stream()[0].isWord()
&& iter().stream()[0].wordToken() == "nonuniform" && dEntry.stream()[0].wordToken() == "nonuniform"
) )
{ {
if (scalarFields_.found(iter().keyword())) if (scalarFields_.found(key))
{ {
scalarFields_.find(iter().keyword())() scalarFields_.find(key)()
->writeEntry(iter().keyword(), os); ->writeEntry(key, os);
} }
else if (vectorFields_.found(iter().keyword())) else if (vectorFields_.found(key))
{ {
vectorFields_.find(iter().keyword())() vectorFields_.find(key)()
->writeEntry(iter().keyword(), os); ->writeEntry(key, os);
} }
else if (sphericalTensorFields_.found(iter().keyword())) else if (sphericalTensorFields_.found(key))
{ {
sphericalTensorFields_.find(iter().keyword())() sphericalTensorFields_.find(key)()
->writeEntry(iter().keyword(), os); ->writeEntry(key, os);
} }
else if (symmTensorFields_.found(iter().keyword())) else if (symmTensorFields_.found(key))
{ {
symmTensorFields_.find(iter().keyword())() symmTensorFields_.find(key)()
->writeEntry(iter().keyword(), os); ->writeEntry(key, os);
} }
else if (tensorFields_.found(iter().keyword())) else if (tensorFields_.found(key))
{ {
tensorFields_.find(iter().keyword())() tensorFields_.find(key)()
->writeEntry(iter().keyword(), os); ->writeEntry(key, os);
} }
} }
else else
{ {
iter().write(os); dEntry.write(os);
} }
} }
} }

View File

@ -53,17 +53,19 @@ Foam::genericPointPatchField<Type>::genericPointPatchField
actualTypeName_(dict.get<word>("type")), actualTypeName_(dict.get<word>("type")),
dict_(dict) dict_(dict)
{ {
forAllConstIter(dictionary, dict_, iter) for (const entry& dEntry : dict_)
{ {
if (iter().keyword() != "type") const keyType& key = dEntry.keyword();
if (key != "type")
{ {
if if
( (
iter().isStream() dEntry.isStream()
&& iter().stream().size() && dEntry.stream().size()
) )
{ {
ITstream& is = iter().stream(); ITstream& is = dEntry.stream();
// Read first token // Read first token
token firstToken(is); token firstToken(is);
@ -86,7 +88,7 @@ Foam::genericPointPatchField<Type>::genericPointPatchField
{ {
scalarFields_.insert scalarFields_.insert
( (
iter().keyword(), key,
autoPtr<scalarField>::New() autoPtr<scalarField>::New()
); );
} }
@ -126,7 +128,7 @@ Foam::genericPointPatchField<Type>::genericPointPatchField
FatalIOErrorInFunction FatalIOErrorInFunction
( (
dict dict
) << "\n size of field " << iter().keyword() ) << "\n size of field " << key
<< " (" << fPtr->size() << ')' << " (" << fPtr->size() << ')'
<< " is not the same size as the patch (" << " is not the same size as the patch ("
<< this->size() << ')' << this->size() << ')'
@ -138,7 +140,7 @@ Foam::genericPointPatchField<Type>::genericPointPatchField
<< exit(FatalIOError); << exit(FatalIOError);
} }
scalarFields_.insert(iter().keyword(), fPtr); scalarFields_.insert(key, fPtr);
} }
else if else if
( (
@ -161,7 +163,7 @@ Foam::genericPointPatchField<Type>::genericPointPatchField
FatalIOErrorInFunction FatalIOErrorInFunction
( (
dict dict
) << "\n size of field " << iter().keyword() ) << "\n size of field " << key
<< " (" << fPtr->size() << ')' << " (" << fPtr->size() << ')'
<< " is not the same size as the patch (" << " is not the same size as the patch ("
<< this->size() << ')' << this->size() << ')'
@ -173,7 +175,7 @@ Foam::genericPointPatchField<Type>::genericPointPatchField
<< exit(FatalIOError); << exit(FatalIOError);
} }
vectorFields_.insert(iter().keyword(), fPtr); vectorFields_.insert(key, fPtr);
} }
else if else if
( (
@ -199,7 +201,7 @@ Foam::genericPointPatchField<Type>::genericPointPatchField
FatalIOErrorInFunction FatalIOErrorInFunction
( (
dict dict
) << "\n size of field " << iter().keyword() ) << "\n size of field " << key
<< " (" << fPtr->size() << ')' << " (" << fPtr->size() << ')'
<< " is not the same size as the patch (" << " is not the same size as the patch ("
<< this->size() << ')' << this->size() << ')'
@ -211,7 +213,7 @@ Foam::genericPointPatchField<Type>::genericPointPatchField
<< exit(FatalIOError); << exit(FatalIOError);
} }
sphericalTensorFields_.insert(iter().keyword(), fPtr); sphericalTensorFields_.insert(key, fPtr);
} }
else if else if
( (
@ -237,7 +239,7 @@ Foam::genericPointPatchField<Type>::genericPointPatchField
FatalIOErrorInFunction FatalIOErrorInFunction
( (
dict dict
) << "\n size of field " << iter().keyword() ) << "\n size of field " << key
<< " (" << fPtr->size() << ')' << " (" << fPtr->size() << ')'
<< " is not the same size as the patch (" << " is not the same size as the patch ("
<< this->size() << ')' << this->size() << ')'
@ -249,7 +251,7 @@ Foam::genericPointPatchField<Type>::genericPointPatchField
<< exit(FatalIOError); << exit(FatalIOError);
} }
symmTensorFields_.insert(iter().keyword(), fPtr); symmTensorFields_.insert(key, fPtr);
} }
else if else if
( (
@ -272,7 +274,7 @@ Foam::genericPointPatchField<Type>::genericPointPatchField
FatalIOErrorInFunction FatalIOErrorInFunction
( (
dict dict
) << "\n size of field " << iter().keyword() ) << "\n size of field " << key
<< " (" << fPtr->size() << ')' << " (" << fPtr->size() << ')'
<< " is not the same size as the patch (" << " is not the same size as the patch ("
<< this->size() << ')' << this->size() << ')'
@ -284,7 +286,7 @@ Foam::genericPointPatchField<Type>::genericPointPatchField
<< exit(FatalIOError); << exit(FatalIOError);
} }
tensorFields_.insert(iter().keyword(), fPtr); tensorFields_.insert(key, fPtr);
} }
else else
{ {
@ -574,47 +576,49 @@ void Foam::genericPointPatchField<Type>::write(Ostream& os) const
{ {
os.writeEntry("type", actualTypeName_); os.writeEntry("type", actualTypeName_);
forAllConstIter(dictionary, dict_, iter) for (const entry& dEntry : dict_)
{ {
if (iter().keyword() != "type") const keyType& key = dEntry.keyword();
if (key != "type")
{ {
if if
( (
iter().isStream() dEntry.isStream()
&& iter().stream().size() && dEntry.stream().size()
&& iter().stream()[0].isWord() && dEntry.stream()[0].isWord()
&& iter().stream()[0].wordToken() == "nonuniform" && dEntry.stream()[0].wordToken() == "nonuniform"
) )
{ {
if (scalarFields_.found(iter().keyword())) if (scalarFields_.found(key))
{ {
scalarFields_.find(iter().keyword())() scalarFields_.find(key)()
->writeEntry(iter().keyword(), os); ->writeEntry(key, os);
} }
else if (vectorFields_.found(iter().keyword())) else if (vectorFields_.found(key))
{ {
vectorFields_.find(iter().keyword())() vectorFields_.find(key)()
->writeEntry(iter().keyword(), os); ->writeEntry(key, os);
} }
else if (sphericalTensorFields_.found(iter().keyword())) else if (sphericalTensorFields_.found(key))
{ {
sphericalTensorFields_.find(iter().keyword())() sphericalTensorFields_.find(key)()
->writeEntry(iter().keyword(), os); ->writeEntry(key, os);
} }
else if (symmTensorFields_.found(iter().keyword())) else if (symmTensorFields_.found(key))
{ {
symmTensorFields_.find(iter().keyword())() symmTensorFields_.find(key)()
->writeEntry(iter().keyword(), os); ->writeEntry(key, os);
} }
else if (tensorFields_.found(iter().keyword())) else if (tensorFields_.found(key))
{ {
tensorFields_.find(iter().keyword())() tensorFields_.find(key)()
->writeEntry(iter().keyword(), os); ->writeEntry(key, os);
} }
} }
else else
{ {
iter().write(os); dEntry.write(os);
} }
} }
} }

View File

@ -62,28 +62,24 @@ Foam::ParticleForceList<CloudType>::ParticleForceList
{ {
if (readFields) if (readFields)
{ {
wordList modelNames(dict.toc());
Info<< "Constructing particle forces" << endl; Info<< "Constructing particle forces" << endl;
if (modelNames.size() > 0) this->resize(dict.size());
{
this->setSize(modelNames.size());
label i = 0; label count = 0;
forAllConstIter(IDLList<entry>, dict, iter) for (const entry& dEntry : dict)
{ {
const word& model = iter().keyword(); const word& model = dEntry.keyword();
if (iter().isDict()) if (dEntry.isDict())
{ {
this->set this->set
( (
i++, count,
ParticleForce<CloudType>::New ParticleForce<CloudType>::New
( (
owner, owner,
mesh, mesh,
iter().dict(), dEntry.dict(),
model model
) )
); );
@ -92,7 +88,7 @@ Foam::ParticleForceList<CloudType>::ParticleForceList
{ {
this->set this->set
( (
i++, count,
ParticleForce<CloudType>::New ParticleForce<CloudType>::New
( (
owner, owner,
@ -102,9 +98,11 @@ Foam::ParticleForceList<CloudType>::ParticleForceList
) )
); );
} }
++count;
} }
}
else if (!count)
{ {
Info<< " none" << endl; Info<< " none" << endl;
} }

View File

@ -43,24 +43,25 @@ Foam::InjectionModelList<CloudType>::InjectionModelList
: :
PtrList<InjectionModel<CloudType>>() PtrList<InjectionModel<CloudType>>()
{ {
wordList modelNames(dict.toc());
Info<< "Constructing particle injection models" << endl; Info<< "Constructing particle injection models" << endl;
if (modelNames.size() > 0) label count = dict.size();
if (count)
{ {
this->setSize(modelNames.size()); this->resize(count);
}
label i = 0; count = 0;
forAllConstIter(IDLList<entry>, dict, iter) for (const entry& dEntry : dict)
{ {
const word& model = iter().keyword(); const word& model = dEntry.keyword();
const dictionary& props = dEntry.dict();
Info<< "Creating injector: " << model << endl; Info<< "Creating injector: " << model << endl;
const dictionary& props = iter().dict();
this->set this->set
( (
i++, count,
InjectionModel<CloudType>::New InjectionModel<CloudType>::New
( (
props, props,
@ -69,11 +70,13 @@ Foam::InjectionModelList<CloudType>::InjectionModelList
owner owner
) )
); );
++count;
} }
}
else if (!count)
{ {
this->setSize(1); this->resize(1);
this->set this->set
( (

View File

@ -35,29 +35,30 @@ bool Foam::MultiInteraction<CloudType>::read(const dictionary& dict)
Info<< "Patch interaction model " << typeName << nl Info<< "Patch interaction model " << typeName << nl
<< "Executing in turn " << endl; << "Executing in turn " << endl;
label nModels = 0; label count = 0;
forAllConstIter(dictionary, dict, iter) for (const entry& dEntry : dict)
{ {
if (iter().isDict()) if (dEntry.isDict())
{ {
Info<< " " << iter().name() << endl; Info<< " " << dEntry.name() << endl;
nModels++; ++count;
} }
} }
models_.setSize(nModels); models_.resize(count);
nModels = 0;
forAllConstIter(dictionary, dict, iter) count = 0;
for (const entry& dEntry : dict)
{ {
if (iter().isDict()) if (dEntry.isDict())
{ {
models_.set models_.set
( (
nModels++, count++,
PatchInteractionModel<CloudType>::New PatchInteractionModel<CloudType>::New
( (
iter().dict(), dEntry.dict(),
this->owner() this->owner()
) )
); );

View File

@ -89,14 +89,14 @@ void Foam::blockMeshTools::write
const dictionary& dict const dictionary& dict
) )
{ {
forAllConstIter(dictionary, dict, iter) for (const entry& e : dict)
{ {
if (iter().isStream()) if (e.isStream())
{ {
label keyVal(Foam::readLabel(iter().stream())); label keyVal(Foam::readLabel(e.stream()));
if (keyVal == val) if (keyVal == val)
{ {
os << iter().keyword(); os << e.keyword();
return; return;
} }
} }
@ -111,14 +111,14 @@ const Foam::keyType& Foam::blockMeshTools::findEntry
const label val const label val
) )
{ {
forAllConstIter(dictionary, dict, iter) for (const entry& e : dict)
{ {
if (iter().isStream()) if (e.isStream())
{ {
label keyVal(Foam::readLabel(iter().stream())); label keyVal(Foam::readLabel(e.stream()));
if (keyVal == val) if (keyVal == val)
{ {
return iter().keyword(); return e.keyword();
} }
} }
} }

View File

@ -259,11 +259,13 @@ Foam::layerParameters::layerParameters
const dictionary& layersDict = dict.subDict("layers"); const dictionary& layersDict = dict.subDict("layers");
forAllConstIter(dictionary, layersDict, iter) for (const entry& dEntry : layersDict)
{ {
if (iter().isDict()) if (dEntry.isDict())
{ {
const keyType& key = iter().keyword(); const keyType& key = dEntry.keyword();
const dictionary& layerDict = dEntry.dict();
const labelHashSet patchIDs const labelHashSet patchIDs
( (
boundaryMesh.patchSet(List<wordRe>(1, wordRe(key))) boundaryMesh.patchSet(List<wordRe>(1, wordRe(key)))
@ -278,8 +280,6 @@ Foam::layerParameters::layerParameters
} }
else else
{ {
const dictionary& layerDict = iter().dict();
for (const label patchi : patchIDs) for (const label patchi : patchIDs)
{ {
numLayers_[patchi] = numLayers_[patchi] =

View File

@ -187,13 +187,13 @@ Foam::searchableSurfaceCollection::searchableSurfaceCollection
label surfI = 0; label surfI = 0;
label startIndex = 0; label startIndex = 0;
forAllConstIter(dictionary, dict, iter) for (const entry& dEntry : dict)
{ {
if (dict.isDict(iter().keyword())) if (dEntry.isDict())
{ {
instance_[surfI] = iter().keyword(); instance_[surfI] = dEntry.keyword();
const dictionary& subDict = dict.subDict(instance_[surfI]); const dictionary& subDict = dEntry.dict();
subDict.readEntry("scale", scale_[surfI]); subDict.readEntry("scale", scale_[surfI]);
transform_.set transform_.set

View File

@ -119,14 +119,12 @@ Foam::searchableSurfaces::searchableSurfaces(const label size)
// { // {
// const dictionary& regionsDict = dict.subDict("regions"); // const dictionary& regionsDict = dict.subDict("regions");
// //
// forAllConstIter(dictionary, regionsDict, iter) // for (const entry& dEntry : regionsDict)
// { // {
// const word& key = iter().keyword(); // if (dEntry.isDict())
//
// if (regionsDict.isDict(key))
// { // {
// // Get the dictionary for region iter.key() // const word& key = dEntry.keyword();
// const dictionary& regionDict = regionsDict.subDict(key); // const dictionary& regionDict = dEntry.dict();
// //
// label index = localNames.find(key); // label index = localNames.find(key);
// //
@ -178,18 +176,18 @@ Foam::searchableSurfaces::searchableSurfaces
allSurfaces_(identity(topDict.size())) allSurfaces_(identity(topDict.size()))
{ {
label surfI = 0; label surfI = 0;
forAllConstIter(dictionary, topDict, iter)
{
const word& key = iter().keyword();
if (!topDict.isDict(key)) for (const entry& dEntry : topDict)
{
if (!dEntry.isDict())
{ {
FatalErrorInFunction FatalErrorInFunction
<< "Found non-dictionary entry " << iter() << "Found non-dictionary entry " << dEntry
<< " in top-level dictionary " << topDict << " in top-level dictionary " << topDict
<< exit(FatalError); << exit(FatalError);
} }
const word& key = dEntry.keyword();
const dictionary& dict = topDict.subDict(key); const dictionary& dict = topDict.subDict(key);
names_[surfI] = dict.lookupOrDefault<word>("name", key); names_[surfI] = dict.lookupOrDefault<word>("name", key);
@ -240,14 +238,12 @@ Foam::searchableSurfaces::searchableSurfaces
{ {
const dictionary& regionsDict = dict.subDict("regions"); const dictionary& regionsDict = dict.subDict("regions");
forAllConstIter(dictionary, regionsDict, iter) for (const entry& dEntry : regionsDict)
{ {
const word& key = iter().keyword(); if (dEntry.isDict())
if (regionsDict.isDict(key))
{ {
// Get the dictionary for region iter.keyword() const word& key = dEntry.keyword();
const dictionary& regionDict = regionsDict.subDict(key); const dictionary& regionDict = dEntry.dict();
label index = localNames.find(key); label index = localNames.find(key);
@ -255,8 +251,9 @@ Foam::searchableSurfaces::searchableSurfaces
{ {
FatalErrorInFunction FatalErrorInFunction
<< "Unknown region name " << key << "Unknown region name " << key
<< " for surface " << s.name() << endl << " for surface " << s.name() << nl
<< "Valid region names are " << localNames << "Valid region names are " << localNames
<< endl
<< exit(FatalError); << exit(FatalError);
} }

View File

@ -125,9 +125,9 @@ void Foam::decompositionMethod::readConstraints()
if (dictptr) if (dictptr)
{ {
forAllConstIters(*dictptr, iter) for (const entry& dEntry : *dictptr)
{ {
const dictionary& dict = iter().dict(); const dictionary& dict = dEntry.dict();
constraintTypes.append(dict.get<word>("type")); constraintTypes.append(dict.get<word>("type"));

View File

@ -161,26 +161,26 @@ void Foam::multiLevelDecomp::createMethodsDict()
// - Only consider sub-dictionaries with a "numberOfSubdomains" entry // - Only consider sub-dictionaries with a "numberOfSubdomains" entry
// This automatically filters out any coeffs dictionaries // This automatically filters out any coeffs dictionaries
forAllConstIters(coeffsDict_, iter) for (const entry& dEntry : coeffsDict_)
{ {
word methodName; word methodName;
if if
( (
iter().isDict() dEntry.isDict()
// non-recursive, no patterns // non-recursive, no patterns
&& iter().dict().found("numberOfSubdomains", keyType::LITERAL) && dEntry.dict().found("numberOfSubdomains", keyType::LITERAL)
) )
{ {
// No method specified? can use a default method? // No method specified? can use a default method?
const bool addDefaultMethod const bool addDefaultMethod
( (
!(iter().dict().found("method", keyType::LITERAL)) !(dEntry.dict().found("method", keyType::LITERAL))
&& !defaultMethod.empty() && !defaultMethod.empty()
); );
entry* e = methodsDict_.add(iter()); entry* e = methodsDict_.add(dEntry);
if (addDefaultMethod && e && e->isDict()) if (addDefaultMethod && e && e->isDict())
{ {
@ -201,17 +201,17 @@ void Foam::multiLevelDecomp::setMethods()
methods_.clear(); methods_.clear();
methods_.setSize(methodsDict_.size()); methods_.setSize(methodsDict_.size());
forAllConstIters(methodsDict_, iter) for (const entry& dEntry : methodsDict_)
{ {
// Dictionary entries only // Dictionary entries only
// - these method dictionaries are non-regional // - these method dictionaries are non-regional
if (iter().isDict()) if (dEntry.isDict())
{ {
methods_.set methods_.set
( (
nLevels++, nLevels++,
// non-verbose would be nicer // non-verbose would be nicer
decompositionMethod::New(iter().dict()) decompositionMethod::New(dEntry.dict())
); );
} }
} }
@ -475,11 +475,11 @@ void Foam::multiLevelDecomp::decompose
// Get original level0 dictionary and modify numberOfSubdomains // Get original level0 dictionary and modify numberOfSubdomains
dictionary level0Dict; dictionary level0Dict;
forAllConstIters(methodsDict_, iter) for (const entry& dEntry : methodsDict_)
{ {
if (iter().isDict()) if (dEntry.isDict())
{ {
level0Dict = iter().dict(); level0Dict = dEntry.dict();
break; break;
} }
} }

View File

@ -300,12 +300,12 @@ Foam::labelList Foam::zoltanRenumber::renumber
polyMesh& mesh = const_cast<polyMesh&>(pMesh); polyMesh& mesh = const_cast<polyMesh&>(pMesh);
forAllConstIter(IDLList<entry>, coeffsDict_, iter) for (const entry& dEntry : coeffsDict_)
{ {
if (!iter().isDict()) if (!dEntry.isDict())
{ {
const word& key = iter().keyword(); const word& key = dEntry.keyword();
const word value(iter().stream()); const word value(dEntry.get<word>());
Info<< typeName << " : setting parameter " << key Info<< typeName << " : setting parameter " << key
<< " to " << value << endl; << " to " << value << endl;

View File

@ -92,17 +92,17 @@ void Foam::RBD::rigidBodyModel::addRestraints
restraints_.setSize(restraintDict.size()); restraints_.setSize(restraintDict.size());
forAllConstIter(IDLList<entry>, restraintDict, iter) for (const entry& dEntry : restraintDict)
{ {
if (iter().isDict()) if (dEntry.isDict())
{ {
restraints_.set restraints_.set
( (
i++, i++,
restraint::New restraint::New
( (
iter().keyword(), dEntry.keyword(),
iter().dict(), dEntry.dict(),
*this *this
) )
); );
@ -185,9 +185,10 @@ Foam::RBD::rigidBodyModel::rigidBodyModel
const dictionary& bodiesDict = dict.subDict("bodies"); const dictionary& bodiesDict = dict.subDict("bodies");
forAllConstIter(IDLList<entry>, bodiesDict, iter) for (const entry& dEntry : bodiesDict)
{ {
const dictionary& bodyDict = iter().dict(); const keyType& key = dEntry.keyword();
const dictionary& bodyDict = dEntry.dict();
if (bodyDict.found("mergeWith")) if (bodyDict.found("mergeWith"))
{ {
@ -195,7 +196,7 @@ Foam::RBD::rigidBodyModel::rigidBodyModel
( (
bodyID(bodyDict.lookup("mergeWith")), bodyID(bodyDict.lookup("mergeWith")),
bodyDict.lookup("transform"), bodyDict.lookup("transform"),
rigidBody::New(iter().keyword(), bodyDict) rigidBody::New(key, bodyDict)
); );
} }
else else
@ -205,7 +206,7 @@ Foam::RBD::rigidBodyModel::rigidBodyModel
bodyID(bodyDict.lookup("parent")), bodyID(bodyDict.lookup("parent")),
bodyDict.lookup("transform"), bodyDict.lookup("transform"),
joint::New(bodyDict.subDict("joint")), joint::New(bodyDict.subDict("joint")),
rigidBody::New(iter().keyword(), bodyDict) rigidBody::New(key, bodyDict)
); );
} }
} }

View File

@ -136,18 +136,19 @@ Foam::rigidBodyMeshMotion::rigidBodyMeshMotion
const dictionary& bodiesDict = coeffDict().subDict("bodies"); const dictionary& bodiesDict = coeffDict().subDict("bodies");
forAllConstIter(IDLList<entry>, bodiesDict, iter) for (const entry& dEntry : bodiesDict)
{ {
const dictionary& bodyDict = iter().dict(); const keyType& bodyName = dEntry.keyword();
const dictionary& bodyDict = dEntry.dict();
if (bodyDict.found("patches")) if (bodyDict.found("patches"))
{ {
const label bodyID = model_.bodyID(iter().keyword()); const label bodyID = model_.bodyID(bodyName);
if (bodyID == -1) if (bodyID == -1)
{ {
FatalErrorInFunction FatalErrorInFunction
<< "Body " << iter().keyword() << "Body " << bodyName
<< " has been merged with another body" << " has been merged with another body"
" and cannot be assigned a set of patches" " and cannot be assigned a set of patches"
<< exit(FatalError); << exit(FatalError);
@ -158,7 +159,7 @@ Foam::rigidBodyMeshMotion::rigidBodyMeshMotion
new bodyMesh new bodyMesh
( (
mesh, mesh,
iter().keyword(), bodyName,
bodyID, bodyID,
bodyDict bodyDict
) )

View File

@ -127,18 +127,19 @@ Foam::rigidBodyMeshMotionSolver::rigidBodyMeshMotionSolver
const dictionary& bodiesDict = coeffDict().subDict("bodies"); const dictionary& bodiesDict = coeffDict().subDict("bodies");
forAllConstIter(IDLList<entry>, bodiesDict, iter) for (const entry& dEntry : bodiesDict)
{ {
const dictionary& bodyDict = iter().dict(); const keyType& bodyName = dEntry.keyword();
const dictionary& bodyDict = dEntry.dict();
if (bodyDict.found("patches")) if (bodyDict.found("patches"))
{ {
const label bodyID = model_.bodyID(iter().keyword()); const label bodyID = model_.bodyID(bodyName);
if (bodyID == -1) if (bodyID == -1)
{ {
FatalErrorInFunction FatalErrorInFunction
<< "Body " << iter().keyword() << "Body " << bodyName
<< " has been merged with another body" << " has been merged with another body"
" and cannot be assigned a set of patches" " and cannot be assigned a set of patches"
<< exit(FatalError); << exit(FatalError);
@ -149,7 +150,7 @@ Foam::rigidBodyMeshMotionSolver::rigidBodyMeshMotionSolver
new bodyMesh new bodyMesh
( (
mesh, mesh,
iter().keyword(), bodyName,
bodyID, bodyID,
bodyDict bodyDict
) )

View File

@ -346,14 +346,15 @@ Foam::fileName Foam::ensightSurfaceWriter::writeCollated
<< "VARIABLE" << nl; << "VARIABLE" << nl;
const dictionary& fieldsDict = dict.subDict("fields"); const dictionary& fieldsDict = dict.subDict("fields");
forAllConstIter(dictionary, fieldsDict, iter) for (const entry& dEntry : fieldsDict)
{ {
const dictionary& subDict = iter().dict(); const dictionary& subDict = dEntry.dict();
const word fieldType(subDict.get<word>("type")); const word fieldType(subDict.get<word>("type"));
const word varName = subDict.lookupOrDefault const word varName = subDict.lookupOrDefault
( (
"name", "name",
iter().keyword() // fieldName as fallback dEntry.keyword() // fieldName as fallback
); );
osCase osCase

View File

@ -196,17 +196,17 @@ void Foam::sixDoFRigidBodyMotion::addRestraints
restraints_.setSize(restraintDict.size()); restraints_.setSize(restraintDict.size());
forAllConstIter(IDLList<entry>, restraintDict, iter) for (const entry& dEntry : restraintDict)
{ {
if (iter().isDict()) if (dEntry.isDict())
{ {
restraints_.set restraints_.set
( (
i++, i++,
sixDoFRigidBodyMotionRestraint::New sixDoFRigidBodyMotionRestraint::New
( (
iter().keyword(), dEntry.keyword(),
iter().dict() dEntry.dict()
) )
); );
} }
@ -233,17 +233,17 @@ void Foam::sixDoFRigidBodyMotion::addConstraints
pointConstraint pct; pointConstraint pct;
pointConstraint pcr; pointConstraint pcr;
forAllConstIter(IDLList<entry>, constraintDict, iter) for (const entry& dEntry : constraintDict)
{ {
if (iter().isDict()) if (dEntry.isDict())
{ {
constraints_.set constraints_.set
( (
i, i,
sixDoFRigidBodyMotionConstraint::New sixDoFRigidBodyMotionConstraint::New
( (
iter().keyword(), dEntry.keyword(),
iter().dict(), dEntry.dict(),
*this *this
) )
); );

View File

@ -75,16 +75,18 @@ Foam::radiation::greyMeanAbsorptionEmission::greyMeanAbsorptionEmission
label nFunc = 0; label nFunc = 0;
const dictionary& functionDicts = dict.optionalSubDict(typeName + "Coeffs"); const dictionary& functionDicts = dict.optionalSubDict(typeName + "Coeffs");
forAllConstIter(dictionary, functionDicts, iter) for (const entry& dEntry : functionDicts)
{ {
// safety: if (!dEntry.isDict()) // safety
if (!iter().isDict())
{ {
continue; continue;
} }
const word& key = iter().keyword();
const word& key = dEntry.keyword();
const dictionary& dict = dEntry.dict();
speciesNames_.insert(key, nFunc); speciesNames_.insert(key, nFunc);
const dictionary& dict = iter().dict();
coeffs_[nFunc].initialise(dict); coeffs_[nFunc].initialise(dict);
nFunc++; nFunc++;
} }

View File

@ -105,14 +105,16 @@ greyMeanSolidAbsorptionEmission
label nFunc = 0; label nFunc = 0;
const dictionary& functionDicts = dict.optionalSubDict(typeName + "Coeffs"); const dictionary& functionDicts = dict.optionalSubDict(typeName + "Coeffs");
forAllConstIter(dictionary, functionDicts, iter) for (const entry& dEntry : functionDicts)
{ {
// safety: if (!dEntry.isDict()) // safety
if (!iter().isDict())
{ {
continue; continue;
} }
const word& key = iter().keyword();
const word& key = dEntry.keyword();
const dictionary& dict = dEntry.dict();
if (!mixture_.contains(key)) if (!mixture_.contains(key))
{ {
WarningInFunction WarningInFunction
@ -122,7 +124,7 @@ greyMeanSolidAbsorptionEmission
<< nl << endl; << nl << endl;
} }
speciesNames_.insert(key, nFunc); speciesNames_.insert(key, nFunc);
const dictionary& dict = iter().dict();
dict.readEntry("absorptivity", solidData_[nFunc][absorptivity]); dict.readEntry("absorptivity", solidData_[nFunc][absorptivity]);
dict.readEntry("emissivity", solidData_[nFunc][emissivity]); dict.readEntry("emissivity", solidData_[nFunc][emissivity]);

View File

@ -65,37 +65,36 @@ Foam::radiation::wideBandAbsorptionEmission::wideBandAbsorptionEmission
{ {
label nBand = 0; label nBand = 0;
const dictionary& functionDicts = dict.optionalSubDict(typeName +"Coeffs"); const dictionary& functionDicts = dict.optionalSubDict(typeName +"Coeffs");
forAllConstIter(dictionary, functionDicts, iter) for (const entry& dEntry : functionDicts)
{ {
// safety: if (!dEntry.isDict()) // safety
if (!iter().isDict())
{ {
continue; continue;
} }
const dictionary& dict = iter().dict(); const dictionary& dict = dEntry.dict();
dict.readEntry("bandLimits", iBands_[nBand]); dict.readEntry("bandLimits", iBands_[nBand]);
dict.readEntry("EhrrCoeff", iEhrrCoeffs_[nBand]); dict.readEntry("EhrrCoeff", iEhrrCoeffs_[nBand]);
totalWaveLength_ += iBands_[nBand][1] - iBands_[nBand][0]; totalWaveLength_ += iBands_[nBand][1] - iBands_[nBand][0];
label nSpec = 0; label nSpec = 0;
const dictionary& specDicts = dict.subDict("species"); const dictionary& specDicts = dict.subDict("species");
forAllConstIter(dictionary, specDicts, iter) for (const entry& dEntry : specDicts)
{ {
const word& key = iter().keyword(); const word& key = dEntry.keyword();
if (nBand == 0) if (nBand == 0)
{ {
speciesNames_.insert(key, nSpec); speciesNames_.insert(key, nSpec);
} }
else else if (!speciesNames_.found(key))
{
if (!speciesNames_.found(key))
{ {
FatalErrorInFunction FatalErrorInFunction
<< "specie: " << key << " is not in all the bands" << "specie: " << key << " is not in all the bands"
<< nl << exit(FatalError); << nl << exit(FatalError);
} }
}
coeffs_[nBand][nSpec].initialise(specDicts.subDict(key)); coeffs_[nBand][nSpec].initialise(specDicts.subDict(key));
nSpec++; nSpec++;
} }

View File

@ -82,19 +82,15 @@ Foam::ReactionList<ThermoType>::~ReactionList()
template<class ThermoType> template<class ThermoType>
bool Foam::ReactionList<ThermoType>::readReactionDict() bool Foam::ReactionList<ThermoType>::readReactionDict()
{ {
const dictionary& reactions(dict_.subDict("reactions")); for (const entry& dEntry : dict_.subDict("reactions"))
forAllConstIter(dictionary, reactions, iter)
{ {
const word reactionName = iter().keyword();
this->append this->append
( (
Reaction<ThermoType>::New Reaction<ThermoType>::New
( (
species_, species_,
thermoDb_, thermoDb_,
reactions.subDict(reactionName) dEntry.dict()
).ptr() ).ptr()
); );
} }