Merge pull request #2655 from akohlmey/collected-refactoring

Collected small code refactoring changes
This commit is contained in:
Axel Kohlmeyer
2021-03-16 18:39:30 -04:00
committed by GitHub
33 changed files with 503 additions and 559 deletions

View File

@ -192,6 +192,29 @@ TEST_F(SimpleCommandsTest, Newton)
ASSERT_EQ(lmp->force->newton_bond, 1);
}
TEST_F(SimpleCommandsTest, Partition)
{
if (!verbose) ::testing::internal::CaptureStdout();
lmp->input->one("echo none");
if (!verbose) ::testing::internal::GetCapturedStdout();
TEST_FAILURE(".*ERROR: Illegal partition command .*",
lmp->input->one("partition xxx 1 echo none"););
TEST_FAILURE(".*ERROR: Numeric index 2 is out of bounds.*",
lmp->input->one("partition yes 2 echo none"););
::testing::internal::CaptureStdout();
lmp->input->one("partition yes 1 print 'test'");
auto text = ::testing::internal::GetCapturedStdout();
if (verbose) std::cout << text;
ASSERT_THAT(text, StrEq("test\n"));
::testing::internal::CaptureStdout();
lmp->input->one("partition no 1 print 'test'");
text = ::testing::internal::GetCapturedStdout();
if (verbose) std::cout << text;
ASSERT_THAT(text, StrEq(""));
}
TEST_F(SimpleCommandsTest, Quit)
{
::testing::internal::CaptureStdout();

View File

@ -102,7 +102,7 @@ TEST(Tokenizer, default_separators)
ASSERT_EQ(t.count(), 2);
}
TEST(Tokenizer, as_vector)
TEST(Tokenizer, as_vector1)
{
Tokenizer t(" \r\n test \t word \f");
std::vector<std::string> list = t.as_vector();
@ -110,6 +110,29 @@ TEST(Tokenizer, as_vector)
ASSERT_THAT(list[1], Eq("word"));
}
TEST(Tokenizer, as_vector2)
{
auto list = Tokenizer("a\\b\\c","\\").as_vector();
ASSERT_THAT(list[0], Eq("a"));
ASSERT_THAT(list[1], Eq("b"));
ASSERT_THAT(list[2], Eq("c"));
ASSERT_EQ(list.size(), 3);
}
TEST(Tokenizer, as_vector3)
{
auto list = Tokenizer ("a\\","\\").as_vector();
ASSERT_THAT(list[0], Eq("a"));
ASSERT_EQ(list.size(), 1);
}
TEST(Tokenizer, as_vector4)
{
auto list = Tokenizer ("\\a","\\").as_vector();
ASSERT_THAT(list[0], Eq("a"));
ASSERT_EQ(list.size(), 1);
}
TEST(ValueTokenizer, empty_string)
{
ValueTokenizer values("");
@ -170,3 +193,31 @@ TEST(ValueTokenizer, not_contains)
ValueTokenizer values("test word");
ASSERT_FALSE(values.contains("test2"));
}
TEST(ValueTokenizer, missing_int)
{
ValueTokenizer values("10");
ASSERT_EQ(values.next_int(), 10);
ASSERT_THROW(values.next_int(), TokenizerException);
}
TEST(ValueTokenizer, missing_tagint)
{
ValueTokenizer values("42");
ASSERT_EQ(values.next_tagint(), 42);
ASSERT_THROW(values.next_tagint(), TokenizerException);
}
TEST(ValueTokenizer, missing_bigint)
{
ValueTokenizer values("42");
ASSERT_EQ(values.next_bigint(), 42);
ASSERT_THROW(values.next_bigint(), TokenizerException);
}
TEST(ValueTokenizer, missing_double)
{
ValueTokenizer values("3.14");
ASSERT_DOUBLE_EQ(values.next_double(), 3.14);
ASSERT_THROW(values.next_double(), TokenizerException);
}