From c0e47b14e6b06123da0c1131690d2c670fe92648 Mon Sep 17 00:00:00 2001 From: Laria Carolin Chabowski Date: Wed, 15 Dec 2021 23:17:46 +0100 Subject: [PATCH] Add tokenizer test case During development on the parser I got a "malloc(): corrupted top size" error in the tokenizer when parsing `a=a`. I wrote this test to see if it was really a problem with the tokenizer. It wasn't, lets keep the test nontheless. --- src/tokenizer_test.c | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/src/tokenizer_test.c b/src/tokenizer_test.c index 2e7e8f6..39c53f6 100644 --- a/src/tokenizer_test.c +++ b/src/tokenizer_test.c @@ -220,6 +220,17 @@ TEST(names, t) { destroy_tokenizer_test(tt); } +TEST(assignment, t) { + struct tokenizer_test *tt = new_tokenizer_test(t, "a=a"); + expect_text_token (tt, 1, 1, APFL_TOK_NAME, "a"); + expect_simple_token(tt, 1, 2, APFL_TOK_ASSIGN); + expect_text_token (tt, 1, 3, APFL_TOK_NAME, "a"); + + expect_eof(tt); + + destroy_tokenizer_test(tt); +} + TEST(all_tokens, t) { struct tokenizer_test *tt = new_tokenizer_test(t, // 1234567 @@ -280,5 +291,6 @@ TESTS_BEGIN ADDTEST(simple_variable), ADDTEST(numbers), ADDTEST(names), + ADDTEST(assignment), ADDTEST(all_tokens), TESTS_END