more unit tests for tokenizer functions constains() and matches()
This commit is contained in:
@ -178,6 +178,9 @@ TEST(Tokenizer, contains)
|
||||
Tokenizer values("test word");
|
||||
ASSERT_TRUE(values.contains("test"));
|
||||
ASSERT_TRUE(values.contains("word"));
|
||||
values = Tokenizer("Triangles");
|
||||
ASSERT_TRUE(values.contains("angles"));
|
||||
ASSERT_TRUE(values.contains("Triangles"));
|
||||
}
|
||||
|
||||
TEST(Tokenizer, not_contains)
|
||||
@ -190,13 +193,27 @@ TEST(Tokenizer, matches)
|
||||
{
|
||||
Tokenizer values("test word");
|
||||
ASSERT_TRUE(values.matches("test"));
|
||||
ASSERT_TRUE(values.matches("^test"));
|
||||
ASSERT_TRUE(values.matches("word"));
|
||||
ASSERT_TRUE(values.matches("word$"));
|
||||
ASSERT_TRUE(values.matches("^\\s*\\S+\\s+word"));
|
||||
values = Tokenizer("Triangles");
|
||||
ASSERT_TRUE(values.matches("^\\s*Triangles\\s*$"));
|
||||
values = Tokenizer("\t20\tatoms");
|
||||
ASSERT_TRUE(values.matches("^\\s*\\d+\\s+atoms\\s*$"));
|
||||
}
|
||||
|
||||
TEST(Tokenizer, not_matches)
|
||||
{
|
||||
Tokenizer values("test word");
|
||||
ASSERT_FALSE(values.matches("test2"));
|
||||
ASSERT_FALSE(values.matches("^word"));
|
||||
ASSERT_FALSE(values.matches("^ "));
|
||||
ASSERT_FALSE(values.matches(" $"));
|
||||
values = Tokenizer("Triangles");
|
||||
ASSERT_FALSE(values.matches("^\\s*\\S+\\s+angles"));
|
||||
values = Tokenizer("\t0x20\tatoms");
|
||||
ASSERT_FALSE(values.matches("^\\s*\\d+\\s+atoms\\s*$"));
|
||||
}
|
||||
|
||||
TEST(Tokenizer, as_vector1)
|
||||
@ -364,6 +381,9 @@ TEST(ValueTokenizer, contains)
|
||||
ValueTokenizer values("test word");
|
||||
ASSERT_TRUE(values.contains("test"));
|
||||
ASSERT_TRUE(values.contains("word"));
|
||||
values = ValueTokenizer("Triangles");
|
||||
ASSERT_TRUE(values.contains("angles"));
|
||||
ASSERT_TRUE(values.contains("Triangles"));
|
||||
}
|
||||
|
||||
TEST(ValueTokenizer, not_contains)
|
||||
@ -376,13 +396,27 @@ TEST(ValueTokenizer, matches)
|
||||
{
|
||||
ValueTokenizer values("test word");
|
||||
ASSERT_TRUE(values.matches("test"));
|
||||
ASSERT_TRUE(values.matches("^test"));
|
||||
ASSERT_TRUE(values.matches("word"));
|
||||
ASSERT_TRUE(values.matches("word$"));
|
||||
ASSERT_TRUE(values.matches("^\\s*\\S+\\s+word"));
|
||||
values = ValueTokenizer("Triangles");
|
||||
ASSERT_TRUE(values.matches("^\\s*Triangles\\s*$"));
|
||||
values = ValueTokenizer("\t20\tatoms");
|
||||
ASSERT_TRUE(values.matches("^\\s*\\d+\\s+atoms\\s*$"));
|
||||
}
|
||||
|
||||
TEST(ValueTokenizer, not_matches)
|
||||
{
|
||||
ValueTokenizer values("test word");
|
||||
ASSERT_FALSE(values.matches("test2"));
|
||||
ASSERT_FALSE(values.matches("^word"));
|
||||
ASSERT_FALSE(values.matches("^ "));
|
||||
ASSERT_FALSE(values.matches(" $"));
|
||||
values = ValueTokenizer("Triangles");
|
||||
ASSERT_FALSE(values.matches("^\\s*\\S+\\s+angles"));
|
||||
values = ValueTokenizer("\t0x20\tatoms");
|
||||
ASSERT_FALSE(values.matches("^\\s*\\d+\\s+atoms\\s*$"));
|
||||
}
|
||||
|
||||
TEST(ValueTokenizer, missing_int)
|
||||
|
||||
Reference in New Issue
Block a user