From 09b9a73c2d8abcd8d459a0e412fa4bef5f4ded95 Mon Sep 17 00:00:00 2001 From: nsfisis Date: Thu, 8 Jan 2026 02:44:13 +0900 Subject: fix: infinite loop when tokenizing "#include HEADER_MACRO" --- src/tokenize.c | 10 ++++++++++ 1 file changed, 10 insertions(+) (limited to 'src/tokenize.c') diff --git a/src/tokenize.c b/src/tokenize.c index 1e7a8df..26c6e13 100644 --- a/src/tokenize.c +++ b/src/tokenize.c @@ -361,6 +361,16 @@ static void do_tokenize_all(Lexer* l) { } else if (c == '\n') { infile_next_char(l->src); tok->kind = TokenKind_newline; + + // Reset expect_header_name at the end of line. It handles cases like: + // + // #ifdef ADDITIONAL_HEADER + // #include ADDITIONAL_HEADER + // #endif + // + // Even if ADDITIONAL_HEADER is undefined, this include directive line is tokenized. If the flag were not + // reset, the next occurrence of '<' or '"' would be recognized as part of a header name. + l->expect_header_name = false; } else if (isspace(c)) { while (isspace((c = infile_peek_char(l->src)))) { if (c == '\n') -- cgit v1.2.3-70-g09d2