add tokenizer_next_str_until

This commit is contained in:
lixianjing 2024-01-02 09:26:25 +08:00
parent 359a076180
commit aacad31a20
4 changed files with 77 additions and 4 deletions

View File

@ -1,5 +1,8 @@
# 最新动态
2024/01/02
* 增加函数 tokenizer\_next\_str\_until
2024/01/01
* 文件浏览支持变量。
* 修复多行编辑器的软键盘action为空的问题。

View File

@ -87,13 +87,14 @@ bool_t tokenizer_has_more(tokenizer_t* tokenizer) {
static ret_t tokenizer_skip_quoted_str(tokenizer_t* tokenizer) {
bool_t escaped = FALSE;
char c = tokenizer->str[tokenizer->cursor];
return_value_if_fail(c == '\"', RET_BAD_PARAMS);
char quot_c = c;
return_value_if_fail(c == '\"' || c == '\'', RET_BAD_PARAMS);
tokenizer->cursor++;
while (tokenizer->str[tokenizer->cursor]) {
c = tokenizer->str[tokenizer->cursor++];
if (c == '\"') {
if (c == quot_c) {
if (!escaped) {
break;
}
@ -116,7 +117,7 @@ static ret_t tokenizer_closing_bracket_until(tokenizer_t* tokenizer, char openin
tokenizer->cursor++;
while (tokenizer->str[tokenizer->cursor]) {
char c = tokenizer->str[tokenizer->cursor];
if (c == '\"') {
if (c == '\"' || c == '\'') {
tokenizer_skip_quoted_str(tokenizer);
continue;
} else if (c == opening_bracket) {
@ -134,6 +135,38 @@ static ret_t tokenizer_closing_bracket_until(tokenizer_t* tokenizer, char openin
return RET_OK;
}
const char* tokenizer_next_str_until(tokenizer_t* tokenizer, const char* str) {
return_value_if_fail(tokenizer_skip_separator(tokenizer) == RET_OK && str != NULL, NULL);
if (tokenizer_has_more(tokenizer)) {
uint32_t len = 0;
str_t* s = &(tokenizer->token);
uint32_t start = tokenizer->cursor;
char c = tokenizer->str[tokenizer->cursor];
if (c == '\"' || c == '\'') {
return tokenizer_next_str(tokenizer);
}
while (tokenizer->str[tokenizer->cursor]) {
c = tokenizer->str[tokenizer->cursor];
if (strchr(str, c) != NULL) {
break;
}
tokenizer->cursor++;
}
len = tokenizer->cursor - start;
str_set_with_len(s, tokenizer->str + start, len);
tokenizer_skip_separator(tokenizer);
return s->str;
}
return NULL;
}
const char* tokenizer_next_expr_until(tokenizer_t* tokenizer, const char* str) {
return_value_if_fail(tokenizer_skip_separator(tokenizer) == RET_OK && str != NULL, NULL);
@ -144,7 +177,7 @@ const char* tokenizer_next_expr_until(tokenizer_t* tokenizer, const char* str) {
while (tokenizer->str[tokenizer->cursor]) {
char c = tokenizer->str[tokenizer->cursor];
if (c == '\"') {
if (c == '\"' || c == '\'') {
tokenizer_skip_quoted_str(tokenizer);
continue;
} else if (c == '(') {

View File

@ -155,6 +155,16 @@ const char* tokenizer_next_until(tokenizer_t* tokenizer, const char* str);
*/
const char* tokenizer_next_expr_until(tokenizer_t* tokenizer, const char* str);
/**
* @method tokenizer_next_str_until
* strstr直到遇到指定的char
* @param {tokenizer_t*} tokenizer tokenizer对象
* @param {const char*} str
*
* @return {const char*} tokenNULL
*/
const char* tokenizer_next_str_until(tokenizer_t* tokenizer, const char* str);
/**
* @method tokenizer_next_int
* tokenint

View File

@ -101,6 +101,33 @@ TEST(Tokenizer, until2) {
tokenizer_deinit(t);
}
TEST(Tokenizer, str1) {
tokenizer_t tokenizer;
tokenizer_t* t = tokenizer_init_ex(&tokenizer, "{\"a,a\"}", 100, "{}", "=,");
ASSERT_EQ(string(tokenizer_next_str_until(t, ",}")), string("a,a"));
tokenizer_deinit(t);
}
TEST(Tokenizer, str2) {
tokenizer_t tokenizer;
tokenizer_t* t = tokenizer_init_ex(&tokenizer, "{'a,a'}", 100, "{}", "=,");
ASSERT_EQ(string(tokenizer_next_str_until(t, ",}")), string("a,a"));
tokenizer_deinit(t);
}
TEST(Tokenizer, str3) {
tokenizer_t tokenizer;
tokenizer_t* t = tokenizer_init_ex(&tokenizer, "{a,a}", 100, "{}", "=,");
ASSERT_EQ(string(tokenizer_next_str_until(t, ",}")), string("a"));
tokenizer_deinit(t);
}
TEST(Tokenizer, expr1) {
tokenizer_t tokenizer;