Add tokenizer test case
During development on the parser I got a "malloc(): corrupted top size" error in the tokenizer when parsing `a=a`. I wrote this test to see if it was really a problem with the tokenizer. It wasn't, lets keep the test nontheless.
This commit is contained in:
parent
808219f9c7
commit
c0e47b14e6
1 changed files with 12 additions and 0 deletions
|
|
@ -220,6 +220,17 @@ TEST(names, t) {
|
|||
destroy_tokenizer_test(tt);
|
||||
}
|
||||
|
||||
TEST(assignment, t) {
|
||||
struct tokenizer_test *tt = new_tokenizer_test(t, "a=a");
|
||||
expect_text_token (tt, 1, 1, APFL_TOK_NAME, "a");
|
||||
expect_simple_token(tt, 1, 2, APFL_TOK_ASSIGN);
|
||||
expect_text_token (tt, 1, 3, APFL_TOK_NAME, "a");
|
||||
|
||||
expect_eof(tt);
|
||||
|
||||
destroy_tokenizer_test(tt);
|
||||
}
|
||||
|
||||
TEST(all_tokens, t) {
|
||||
struct tokenizer_test *tt = new_tokenizer_test(t,
|
||||
// 1234567
|
||||
|
|
@ -280,5 +291,6 @@ TESTS_BEGIN
|
|||
ADDTEST(simple_variable),
|
||||
ADDTEST(numbers),
|
||||
ADDTEST(names),
|
||||
ADDTEST(assignment),
|
||||
ADDTEST(all_tokens),
|
||||
TESTS_END
|
||||
|
|
|
|||
Loading…
Reference in a new issue