diff --git a/src/tokenizer.cpp b/src/tokenizer.cpp index 7d048d9e02..42ae45453d 100644 --- a/src/tokenizer.cpp +++ b/src/tokenizer.cpp @@ -17,6 +17,7 @@ #include "tokenizer.h" #include "utils.h" +#include using namespace LAMMPS_NS; @@ -84,6 +85,10 @@ bool ValueTokenizer::has_next() const { return current != tokens.cend(); } +bool ValueTokenizer::contains(const std::string & value) const { + return std::find(tokens.cbegin(), tokens.cend(), value) != tokens.cend(); +} + std::string ValueTokenizer::next_string() { if (has_next()) { std::string value = *current; diff --git a/src/tokenizer.h b/src/tokenizer.h index 89cb57b301..60dcf9bfd5 100644 --- a/src/tokenizer.h +++ b/src/tokenizer.h @@ -91,6 +91,7 @@ public: double next_double(); bool has_next() const; + bool contains(const std::string & value) const; void skip(int ntokens); size_t count() const; diff --git a/unittest/utils/test_tokenizer.cpp b/unittest/utils/test_tokenizer.cpp index 08c71338be..02d2df9b38 100644 --- a/unittest/utils/test_tokenizer.cpp +++ b/unittest/utils/test_tokenizer.cpp @@ -112,3 +112,14 @@ TEST(ValueTokenizer, valid_double_with_exponential) { ValueTokenizer values("3.14e22"); ASSERT_DOUBLE_EQ(values.next_double(), 3.14e22); } + +TEST(ValueTokenizer, contains) { + ValueTokenizer values("test word"); + ASSERT_TRUE(values.contains("test")); + ASSERT_TRUE(values.contains("word")); +} + +TEST(ValueTokenizer, not_contains) { + ValueTokenizer values("test word"); + ASSERT_FALSE(values.contains("test2")); +}