awtk/tests/tokenizer_test.cc

103 lines
2.6 KiB
C++
Raw Normal View History

#include "tkc/tokenizer.h"
2018-06-17 07:57:00 +08:00
#include "gtest/gtest.h"
#include <string>
using std::string;
TEST(Tokenizer, basic) {
tokenizer_t tokenizer;
tokenizer_t* t = tokenizer_init(&tokenizer, "w10 h10 10 3 123.1", 0xffffff, " ");
ASSERT_EQ(t, &tokenizer);
ASSERT_EQ(tokenizer_has_more(t), TRUE);
ASSERT_EQ(string("w10"), tokenizer_next(t));
2018-06-17 12:03:18 +08:00
2018-06-17 07:57:00 +08:00
ASSERT_EQ(tokenizer_has_more(t), TRUE);
ASSERT_EQ(string("h10"), tokenizer_next(t));
2018-06-17 12:03:18 +08:00
2018-06-17 07:57:00 +08:00
ASSERT_EQ(tokenizer_has_more(t), TRUE);
ASSERT_EQ(10, tokenizer_next_int(t, 0));
2018-06-17 12:03:18 +08:00
2018-06-17 07:57:00 +08:00
ASSERT_EQ(tokenizer_has_more(t), TRUE);
ASSERT_EQ(3, tokenizer_next_int(t, 0));
2018-06-17 12:03:18 +08:00
2018-06-17 07:57:00 +08:00
ASSERT_EQ(tokenizer_has_more(t), TRUE);
ASSERT_EQ(123.1f, tokenizer_next_float(t, 0));
2018-06-17 12:03:18 +08:00
ASSERT_EQ(tokenizer_has_more(t), FALSE);
ASSERT_EQ(1.0f, tokenizer_next_float(t, 1));
tokenizer_deinit(t);
2018-06-17 07:57:00 +08:00
}
TEST(Tokenizer, separators) {
tokenizer_t tokenizer;
tokenizer_t* t = tokenizer_init(&tokenizer, "w10, h10, 10 , 3, 123.1", 0xffffff, " ,");
ASSERT_EQ(t, &tokenizer);
ASSERT_EQ(tokenizer_has_more(t), TRUE);
ASSERT_EQ(string("w10"), tokenizer_next(t));
2018-06-17 12:03:18 +08:00
2018-06-17 07:57:00 +08:00
ASSERT_EQ(tokenizer_has_more(t), TRUE);
ASSERT_EQ(string("h10"), tokenizer_next(t));
2018-06-17 12:03:18 +08:00
2018-06-17 07:57:00 +08:00
ASSERT_EQ(tokenizer_has_more(t), TRUE);
ASSERT_EQ(10, tokenizer_next_int(t, 0));
2018-06-17 12:03:18 +08:00
2018-06-17 07:57:00 +08:00
ASSERT_EQ(tokenizer_has_more(t), TRUE);
ASSERT_EQ(3, tokenizer_next_int(t, 0));
2018-06-17 12:03:18 +08:00
2018-06-17 07:57:00 +08:00
ASSERT_EQ(tokenizer_has_more(t), TRUE);
ASSERT_EQ(123.1f, tokenizer_next_float(t, 0));
2018-06-17 12:03:18 +08:00
tokenizer_deinit(t);
}
2019-01-22 12:08:01 +08:00
TEST(Tokenizer, single_char_token) {
tokenizer_t tokenizer;
tokenizer_t* t = tokenizer_init_ex(&tokenizer, "{a, b=2}", 100, "{ }", "=,");
ASSERT_EQ(t, &tokenizer);
ASSERT_EQ(tokenizer_has_more(t), TRUE);
ASSERT_EQ(string("a"), tokenizer_next(t));
ASSERT_EQ(tokenizer_has_more(t), TRUE);
ASSERT_EQ(string(","), tokenizer_next(t));
ASSERT_EQ(tokenizer_has_more(t), TRUE);
ASSERT_EQ(string("b"), tokenizer_next(t));
ASSERT_EQ(tokenizer_has_more(t), TRUE);
ASSERT_EQ(string("="), tokenizer_next(t));
ASSERT_EQ(tokenizer_has_more(t), TRUE);
ASSERT_EQ(string("2"), tokenizer_next(t));
tokenizer_deinit(t);
}
2019-02-04 18:04:19 +08:00
TEST(Tokenizer, until1) {
tokenizer_t tokenizer;
tokenizer_t* t = tokenizer_init_ex(&tokenizer, "{a==1}", 100, "{ }", "=,");
ASSERT_EQ(t, &tokenizer);
ASSERT_EQ(string(tokenizer_next_until(t, ",}")), string("a==1"));
tokenizer_deinit(t);
}
TEST(Tokenizer, until2) {
tokenizer_t tokenizer;
tokenizer_t* t = tokenizer_init_ex(&tokenizer, "{a==1, a}", 100, "{ }", "=,");
ASSERT_EQ(t, &tokenizer);
ASSERT_EQ(string(tokenizer_next_until(t, ",}")), string("a==1"));
tokenizer_deinit(t);
}