Performance optimization of Tokenizer

Reduces string allocations and removes std::vector from Tokenizer
Most processing now happens on-demand.
This commit is contained in:
Richard Berger
2020-06-11 00:16:46 -04:00
parent 71d3827cce
commit 81d937ee97
9 changed files with 145 additions and 95 deletions

View File

@ -32,6 +32,10 @@ TEST(Utils, trim_and_count_words) {
ASSERT_EQ(utils::trim_and_count_words("some text # comment"), 2);
}
TEST(Utils, count_words_with_extra_spaces) {
ASSERT_EQ(utils::count_words(" some text # comment "), 4);
}
TEST(Utils, valid_integer1) {
ASSERT_TRUE(utils::is_integer("10"));
}