mirror of
https://develop.openfoam.com/Development/openfoam.git
synced 2025-11-28 03:28:01 +00:00
ENH: fail-safe access of ITstream tokens as front(), back()
- more standard naming than peekFront() etc
This commit is contained in:
@ -126,8 +126,8 @@ void doTest
|
|||||||
|
|
||||||
if (testskip)
|
if (testskip)
|
||||||
{
|
{
|
||||||
Info<< " first : " << its.peekFirst().info() << nl
|
Info<< " front : " << its.front().info() << nl
|
||||||
<< " last : " << its.peekLast().info() << nl;
|
<< " back : " << its.back().info() << nl;
|
||||||
|
|
||||||
Info<< "rewind():" << nl;
|
Info<< "rewind():" << nl;
|
||||||
reportPeek(its);
|
reportPeek(its);
|
||||||
|
|||||||
@ -36,23 +36,6 @@ License
|
|||||||
namespace Foam
|
namespace Foam
|
||||||
{
|
{
|
||||||
|
|
||||||
// Failsafe read-access.
|
|
||||||
// Return the token at location, or undefinedToken.
|
|
||||||
inline static const token& peekTokenAt
|
|
||||||
(
|
|
||||||
const UList<token>& list,
|
|
||||||
const label i
|
|
||||||
)
|
|
||||||
{
|
|
||||||
return
|
|
||||||
(
|
|
||||||
i >= 0 && i < list.size()
|
|
||||||
? list[i]
|
|
||||||
: token::undefinedToken
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// Convert input sequence into a list of tokens.
|
// Convert input sequence into a list of tokens.
|
||||||
// Return the number of tokens in the resulting list.
|
// Return the number of tokens in the resulting list.
|
||||||
static label parseStream(ISstream& is, tokenList& tokens)
|
static label parseStream(ISstream& is, tokenList& tokens)
|
||||||
@ -337,18 +320,6 @@ std::string Foam::ITstream::toString() const
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
const Foam::token& Foam::ITstream::peekFirst() const
|
|
||||||
{
|
|
||||||
return peekTokenAt(*this, 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
const Foam::token& Foam::ITstream::peekLast() const
|
|
||||||
{
|
|
||||||
return peekTokenAt(*this, tokenList::size()-1);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
const Foam::token& Foam::ITstream::peek() const
|
const Foam::token& Foam::ITstream::peek() const
|
||||||
{
|
{
|
||||||
// Use putback token if it exists
|
// Use putback token if it exists
|
||||||
@ -357,7 +328,7 @@ const Foam::token& Foam::ITstream::peek() const
|
|||||||
return Istream::peekBack();
|
return Istream::peekBack();
|
||||||
}
|
}
|
||||||
|
|
||||||
return peekTokenAt(*this, tokenIndex_);
|
return peekAt(tokenIndex_);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -564,7 +535,7 @@ void Foam::ITstream::rewind()
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void Foam::ITstream::append(const token& t, const bool lazy)
|
void Foam::ITstream::push_back(const token& t, const bool lazy)
|
||||||
{
|
{
|
||||||
reserveCapacity(tokenIndex_ + 1, lazy);
|
reserveCapacity(tokenIndex_ + 1, lazy);
|
||||||
tokenList& toks = *this;
|
tokenList& toks = *this;
|
||||||
@ -574,7 +545,7 @@ void Foam::ITstream::append(const token& t, const bool lazy)
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void Foam::ITstream::append(token&& t, const bool lazy)
|
void Foam::ITstream::push_back(token&& t, const bool lazy)
|
||||||
{
|
{
|
||||||
reserveCapacity(tokenIndex_ + 1, lazy);
|
reserveCapacity(tokenIndex_ + 1, lazy);
|
||||||
tokenList& toks = *this;
|
tokenList& toks = *this;
|
||||||
@ -584,7 +555,7 @@ void Foam::ITstream::append(token&& t, const bool lazy)
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void Foam::ITstream::append(const UList<token>& newTokens, const bool lazy)
|
void Foam::ITstream::push_back(const UList<token>& newTokens, const bool lazy)
|
||||||
{
|
{
|
||||||
reserveCapacity(tokenIndex_ + newTokens.size(), lazy);
|
reserveCapacity(tokenIndex_ + newTokens.size(), lazy);
|
||||||
tokenList& toks = *this;
|
tokenList& toks = *this;
|
||||||
@ -597,7 +568,7 @@ void Foam::ITstream::append(const UList<token>& newTokens, const bool lazy)
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void Foam::ITstream::append(List<token>&& newTokens, const bool lazy)
|
void Foam::ITstream::push_back(List<token>&& newTokens, const bool lazy)
|
||||||
{
|
{
|
||||||
reserveCapacity(tokenIndex_ + newTokens.size(), lazy);
|
reserveCapacity(tokenIndex_ + newTokens.size(), lazy);
|
||||||
tokenList& toks = *this;
|
tokenList& toks = *this;
|
||||||
|
|||||||
@ -76,6 +76,19 @@ class ITstream
|
|||||||
void reserveCapacity(const label nElem, const bool lazy);
|
void reserveCapacity(const label nElem, const bool lazy);
|
||||||
|
|
||||||
|
|
||||||
|
//- Failsafe read-access to token at specified location
|
||||||
|
//- or undefinedToken
|
||||||
|
inline const token& peekAt(const label i) const
|
||||||
|
{
|
||||||
|
return
|
||||||
|
(
|
||||||
|
i >= 0 && i < tokenList::size()
|
||||||
|
? tokenList::operator[](i)
|
||||||
|
: token::undefinedToken
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
// Constructors
|
// Constructors
|
||||||
@ -222,28 +235,22 @@ public:
|
|||||||
|
|
||||||
//- Failsafe peek at the \b first token in the list.
|
//- Failsafe peek at the \b first token in the list.
|
||||||
// \return \c undefinedToken if the list is empty.
|
// \return \c undefinedToken if the list is empty.
|
||||||
const token& peekFirst() const;
|
const token& front() const { return peekAt(0); }
|
||||||
|
|
||||||
//- Failsafe peek at the \b last token in the list.
|
//- Failsafe peek at the \b last token in the list.
|
||||||
// \return \c undefinedToken if the list is empty.
|
// \return \c undefinedToken if the list is empty.
|
||||||
const token& peekLast() const;
|
const token& back() const { return peekAt(tokenList::size()-1); }
|
||||||
|
|
||||||
//- Failsafe peek at what the next read would return,
|
//- Failsafe peek at what the next read would return,
|
||||||
// including handling of any putback
|
//- including handling of any putback
|
||||||
// \return \c undefinedToken if list is exhausted
|
// \return \c undefinedToken if list is exhausted
|
||||||
const token& peek() const;
|
const token& peek() const;
|
||||||
|
|
||||||
//- The current token index when reading, or the insertion point.
|
//- The current token index when reading, or the insertion point.
|
||||||
label tokenIndex() const noexcept
|
label tokenIndex() const noexcept { return tokenIndex_; }
|
||||||
{
|
|
||||||
return tokenIndex_;
|
|
||||||
}
|
|
||||||
|
|
||||||
//- Non-const access to the current token index
|
//- Non-const access to the current token index
|
||||||
label& tokenIndex() noexcept
|
label& tokenIndex() noexcept { return tokenIndex_; }
|
||||||
{
|
|
||||||
return tokenIndex_;
|
|
||||||
}
|
|
||||||
|
|
||||||
//- Number of tokens remaining
|
//- Number of tokens remaining
|
||||||
label nRemainingTokens() const noexcept
|
label nRemainingTokens() const noexcept
|
||||||
@ -268,11 +275,11 @@ public:
|
|||||||
|
|
||||||
//- Copy append a token at the current tokenIndex,
|
//- Copy append a token at the current tokenIndex,
|
||||||
//- incrementing the index.
|
//- incrementing the index.
|
||||||
void append(const token& t, const bool lazy);
|
void push_back(const token& t, const bool lazy);
|
||||||
|
|
||||||
//- Move append a token at the current tokenIndex,
|
//- Move append a token at the current tokenIndex,
|
||||||
//- incrementing the index.
|
//- incrementing the index.
|
||||||
void append(token&& t, const bool lazy);
|
void push_back(token&& t, const bool lazy);
|
||||||
|
|
||||||
//- Copy append a list of tokens at the current tokenIndex,
|
//- Copy append a list of tokens at the current tokenIndex,
|
||||||
//- incrementing the index.
|
//- incrementing the index.
|
||||||
@ -280,7 +287,7 @@ public:
|
|||||||
// \param newTokens the list of tokens to copy append
|
// \param newTokens the list of tokens to copy append
|
||||||
// \param lazy leaves any excess capacity for further appends.
|
// \param lazy leaves any excess capacity for further appends.
|
||||||
// The caller will be responsible for resizing later.
|
// The caller will be responsible for resizing later.
|
||||||
void append(const UList<token>& newTokens, const bool lazy);
|
void push_back(const UList<token>& newTokens, const bool lazy);
|
||||||
|
|
||||||
//- Move append a list of tokens at the current tokenIndex,
|
//- Move append a list of tokens at the current tokenIndex,
|
||||||
//- incrementing the index.
|
//- incrementing the index.
|
||||||
@ -288,7 +295,7 @@ public:
|
|||||||
// \param newTokens the list of tokens to move append
|
// \param newTokens the list of tokens to move append
|
||||||
// \param lazy leaves any excess capacity for further appends.
|
// \param lazy leaves any excess capacity for further appends.
|
||||||
// The caller will be responsible for resizing later.
|
// The caller will be responsible for resizing later.
|
||||||
void append(List<token>&& newTokens, const bool lazy);
|
void push_back(List<token>&& newTokens, const bool lazy);
|
||||||
|
|
||||||
|
|
||||||
// Stream State Functions
|
// Stream State Functions
|
||||||
@ -404,6 +411,48 @@ public:
|
|||||||
{}
|
{}
|
||||||
|
|
||||||
#endif /* Foam_IOstream_extras */
|
#endif /* Foam_IOstream_extras */
|
||||||
|
|
||||||
|
|
||||||
|
// Housekeeping
|
||||||
|
|
||||||
|
//- Same as front()
|
||||||
|
const token& peekFirst() const { return front(); }
|
||||||
|
|
||||||
|
//- Copy append a token at the current tokenIndex,
|
||||||
|
//- incrementing the index.
|
||||||
|
void append(const token& t, const bool lazy)
|
||||||
|
{
|
||||||
|
this->push_back(std::move(t), lazy);
|
||||||
|
}
|
||||||
|
|
||||||
|
//- Move append a token at the current tokenIndex,
|
||||||
|
//- incrementing the index.
|
||||||
|
void append(token&& t, const bool lazy)
|
||||||
|
{
|
||||||
|
this->push_back(std::move(t), lazy);
|
||||||
|
}
|
||||||
|
|
||||||
|
//- Copy append a list of tokens at the current tokenIndex,
|
||||||
|
//- incrementing the index.
|
||||||
|
//
|
||||||
|
// \param newTokens the list of tokens to copy append
|
||||||
|
// \param lazy leaves any excess capacity for further appends.
|
||||||
|
// The caller will be responsible for resizing later.
|
||||||
|
void append(const UList<token>& newTokens, const bool lazy)
|
||||||
|
{
|
||||||
|
this->push_back(newTokens, lazy);
|
||||||
|
}
|
||||||
|
|
||||||
|
//- Move append a list of tokens at the current tokenIndex,
|
||||||
|
//- incrementing the index.
|
||||||
|
//
|
||||||
|
// \param newTokens the list of tokens to move append
|
||||||
|
// \param lazy leaves any excess capacity for further appends.
|
||||||
|
// The caller will be responsible for resizing later.
|
||||||
|
void append(List<token>&& newTokens, const bool lazy)
|
||||||
|
{
|
||||||
|
this->push_back(std::move(newTokens), lazy);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -56,10 +56,10 @@ bool Foam::functionEntries::ifEntry::isTrue(ITstream& its)
|
|||||||
{
|
{
|
||||||
Switch logic;
|
Switch logic;
|
||||||
|
|
||||||
if (its.peekFirst().isScalar())
|
if (its.front().isScalar())
|
||||||
{
|
{
|
||||||
// Use default rounding tolerance
|
// Use default rounding tolerance
|
||||||
logic = Switch(its.first().scalarToken());
|
logic = Switch(its.front().scalarToken());
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
|||||||
@ -1014,7 +1014,7 @@ bool Foam::functionObjectList::read()
|
|||||||
|
|
||||||
if (key == "useNamePrefix") // As per functionObject
|
if (key == "useNamePrefix") // As per functionObject
|
||||||
{
|
{
|
||||||
Switch sw(dEntry.stream().peekFirst());
|
Switch sw(dEntry.stream().front());
|
||||||
if (sw.good())
|
if (sw.good())
|
||||||
{
|
{
|
||||||
functionObject::defaultUseNamePrefix = sw;
|
functionObject::defaultUseNamePrefix = sw;
|
||||||
|
|||||||
Reference in New Issue
Block a user