add a few more tokenizer test cases for added use cases.

This commit is contained in:
Axel Kohlmeyer
2021-03-15 12:02:58 -04:00
parent 63110fd588
commit a6a2879807

View File

@ -102,7 +102,7 @@ TEST(Tokenizer, default_separators)
ASSERT_EQ(t.count(), 2);
}
TEST(Tokenizer, as_vector)
TEST(Tokenizer, as_vector1)
{
Tokenizer t(" \r\n test \t word \f");
std::vector<std::string> list = t.as_vector();
@ -110,6 +110,29 @@ TEST(Tokenizer, as_vector)
ASSERT_THAT(list[1], Eq("word"));
}
TEST(Tokenizer, as_vector2)
{
auto list = Tokenizer("a\\b\\c","\\").as_vector();
ASSERT_THAT(list[0], Eq("a"));
ASSERT_THAT(list[1], Eq("b"));
ASSERT_THAT(list[2], Eq("c"));
ASSERT_EQ(list.size(), 3);
}
TEST(Tokenizer, as_vector3)
{
auto list = Tokenizer ("a\\","\\").as_vector();
ASSERT_THAT(list[0], Eq("a"));
ASSERT_EQ(list.size(), 1);
}
TEST(Tokenizer, as_vector4)
{
auto list = Tokenizer ("\\a","\\").as_vector();
ASSERT_THAT(list[0], Eq("a"));
ASSERT_EQ(list.size(), 1);
}
TEST(ValueTokenizer, empty_string)
{
ValueTokenizer values("");