diff options
| author | nsfisis <nsfisis@gmail.com> | 2025-08-17 08:05:54 +0900 |
|---|---|---|
| committer | nsfisis <nsfisis@gmail.com> | 2025-08-17 08:05:54 +0900 |
| commit | 89a723320a8b7f387948f3edd9c1ce12824424d6 (patch) | |
| tree | c83691af082fe9e0c32834154f513516d4f7aa11 | |
| parent | 48d5ca9bcf08a0995008759bf5bb9fec5a1631bc (diff) | |
| download | ducc-89a723320a8b7f387948f3edd9c1ce12824424d6.tar.gz ducc-89a723320a8b7f387948f3edd9c1ce12824424d6.tar.zst ducc-89a723320a8b7f387948f3edd9c1ce12824424d6.zip | |
refactor: define TokenValue to store semantic value
| -rw-r--r-- | common.c | 9 | ||||
| -rw-r--r-- | parse.c | 28 | ||||
| -rw-r--r-- | preprocess.c | 98 | ||||
| -rw-r--r-- | std.h | 5 | ||||
| -rw-r--r-- | tokenize.c | 95 |
5 files changed, 120 insertions, 115 deletions
@@ -12,7 +12,7 @@ void fatal_error(const char* msg, ...) { #define unimplemented() fatal_error("%s:%d: unimplemented", __FILE__, __LINE__) struct String { - char* data; + const char* data; size_t len; }; typedef struct String String; @@ -23,6 +23,13 @@ char* string_to_cstr(const String* s) { return buf; } +String* cstr_to_new_string(const char* cstr) { + String* s = calloc(1, sizeof(String)); + s->len = strlen(cstr); + s->data = strndup(cstr, s->len); + return s; +} + BOOL string_equals(const String* s1, const String* s2) { return s1->len == s2->len && strncmp(s1->data, s2->data, s1->len) == 0; } @@ -60,8 +60,7 @@ Parser* parser_new(TokenArray* tokens) { p->typedefs = calloc(64, sizeof(AstNode)); p->str_literals = calloc(1024, sizeof(char*)); - p->funcs[p->n_funcs].name.data = "va_start"; - p->funcs[p->n_funcs].name.len = strlen("va_start"); + p->funcs[p->n_funcs].name = *cstr_to_new_string("va_start"); p->funcs[p->n_funcs].ty = calloc(1, sizeof(Type)); p->funcs[p->n_funcs].ty->kind = TypeKind_void; ++p->n_funcs; @@ -154,9 +153,10 @@ int add_lvar(Parser* p, String* name, Type* ty, BOOL is_param) { String* generate_temporary_lvar_name(Parser* p) { String* ret = calloc(1, sizeof(String)); - ret->data = calloc(256, sizeof(char)); + char* buf = calloc(256, sizeof(char)); for (int i = 1;; ++i) { - ret->len = sprintf(ret->data, "__%d", i); + ret->len = sprintf(buf, "__%d", i); + ret->data = buf; if (find_lvar(p, ret) == -1) { return ret; } @@ -265,7 +265,7 @@ AstNode* parse_expr(Parser* p); AstNode* parse_stmt(Parser* p); String* parse_ident(Parser* p) { - return &expect(p, TokenKind_ident)->raw; + return &expect(p, TokenKind_ident)->value.string; } int register_str_literal(Parser* p, char* s) { @@ -277,18 +277,18 @@ int register_str_literal(Parser* p, char* s) { AstNode* parse_primary_expr(Parser* p) { Token* t = next_token(p); if (t->kind == TokenKind_literal_int) { - return ast_new_int(atoi(string_to_cstr(&t->raw))); + return ast_new_int(t->value.integer); } else if (t->kind == TokenKind_literal_str) { AstNode* e = ast_new(AstNodeKind_str_expr); - e->node_idx = register_str_literal(p, string_to_cstr(&t->raw)); - e->ty = type_new_static_string(t->raw.len); + e->node_idx = register_str_literal(p, string_to_cstr(&t->value.string)); + e->ty = type_new_static_string(t->value.string.len); return e; } else if (t->kind == TokenKind_paren_l) { AstNode* e = parse_expr(p); expect(p, TokenKind_paren_r); return e; } else if (t->kind == TokenKind_ident || t->kind == TokenKind_va_start) { - String* name = &t->raw; + String* name = &t->value.string; if (peek_token(p)->kind == TokenKind_paren_l) { AstNode* e = ast_new(AstNodeKind_func_call); @@ -423,7 +423,7 @@ BOOL is_type_token(Parser* p, Token* token) { if (token->kind != TokenKind_ident) { return FALSE; } - return find_typedef(p, &token->raw) != -1; + return find_typedef(p, &token->value.string) != -1; } Type* parse_type(Parser* p) { @@ -436,9 +436,9 @@ Type* parse_type(Parser* p) { } Type* ty; if (t->kind == TokenKind_ident) { - int typedef_idx = find_typedef(p, &t->raw); + int typedef_idx = find_typedef(p, &t->value.string); if (typedef_idx == -1) { - fatal_error("parse_type: unknown typedef, %.*s", t->raw.len, t->raw.data); + fatal_error("parse_type: unknown typedef, %.*s", t->value.string.len, t->value.string.data); } ty = p->typedefs[typedef_idx].ty; } else { @@ -524,12 +524,12 @@ AstNode* parse_prefix_expr(Parser* p) { Token* next_tok = peek_token(p); Type* ty = NULL; if (next_tok->kind == TokenKind_ident) { - int lvar_idx = find_lvar(p, &next_tok->raw); + int lvar_idx = find_lvar(p, &next_tok->value.string); if (lvar_idx != -1) { next_token(p); ty = p->lvars[lvar_idx].ty; } - int gvar_idx = find_gvar(p, &next_tok->raw); + int gvar_idx = find_gvar(p, &next_tok->value.string); if (gvar_idx != -1) { next_token(p); ty = p->gvars[gvar_idx].ty; diff --git a/preprocess.c b/preprocess.c index fde107e..a2c1429 100644 --- a/preprocess.c +++ b/preprocess.c @@ -339,20 +339,32 @@ struct SourceLocation { }; typedef struct SourceLocation SourceLocation; +// TokenValue is externally tagged by Token's kind. +union TokenValue { + String string; + int integer; +}; +typedef union TokenValue TokenValue; + struct Token { TokenKind kind; - String raw; + TokenValue value; SourceLocation loc; }; typedef struct Token Token; const char* token_stringify(Token* t) { TokenKind k = t->kind; - if (k == TokenKind_other || k == TokenKind_character_constant || k == TokenKind_ident || - k == TokenKind_literal_int || k == TokenKind_literal_str) { + if (k == TokenKind_literal_int) { + const char* kind_str = token_kind_stringify(k); + char* buf = calloc(10 + strlen(kind_str) + 3 + 1, sizeof(char)); + sprintf(buf, "%d (%s)", t->value.integer, kind_str); + return buf; + } else if (k == TokenKind_other || k == TokenKind_character_constant || k == TokenKind_ident || + k == TokenKind_literal_int || k == TokenKind_literal_str) { const char* kind_str = token_kind_stringify(k); - char* buf = calloc(t->raw.len + strlen(kind_str) + 3 + 1, sizeof(char)); - sprintf(buf, "%.*s (%s)", t->raw.len, t->raw.data, kind_str); + char* buf = calloc(t->value.string.len + strlen(kind_str) + 3 + 1, sizeof(char)); + sprintf(buf, "%.*s (%s)", t->value.string.len, t->value.string.data, kind_str); return buf; } else { return token_kind_stringify(k); @@ -424,7 +436,7 @@ int macro_find_param(Macro* macro, Token* tok) { return -1; for (int i = 0; i < macro->parameters.len; ++i) { - if (string_equals(¯o->parameters.data[i].raw, &tok->raw)) { + if (string_equals(¯o->parameters.data[i].value.string, &tok->value.string)) { return i; } } @@ -480,23 +492,19 @@ void add_predefined_macros(MacroArray* macros) { m = macros_push_new(macros); m->kind = MacroKind_obj; - m->name.len = strlen("__ducc__"); - m->name.data = "__ducc__"; + m->name = *cstr_to_new_string("__ducc__"); tokens_init(&m->replacements, 1); Token* tok = tokens_push_new(&m->replacements); tok->kind = TokenKind_literal_int; - tok->raw.len = strlen("1"); - tok->raw.data = "1"; + tok->value.integer = 1; m = macros_push_new(macros); m->kind = MacroKind_builtin_file; - m->name.len = strlen("__FILE__"); - m->name.data = "__FILE__"; + m->name = *cstr_to_new_string("__FILE__"); m = macros_push_new(macros); m->kind = MacroKind_builtin_line; - m->name.len = strlen("__LINE__"); - m->name.data = "__LINE__"; + m->name = *cstr_to_new_string("__LINE__"); } struct MacroArg { @@ -629,8 +637,8 @@ void pplexer_tokenize_all(PpLexer* ppl) { } ++ppl->pos; tok->kind = TokenKind_header_name; - tok->raw.data = ppl->src + start; - tok->raw.len = ppl->pos - start; + tok->value.string.data = ppl->src + start; + tok->value.string.len = ppl->pos - start; ppl->expect_header_name = FALSE; } else if (ppl->expect_header_name && c == '<') { int start = ppl->pos - 1; @@ -643,8 +651,8 @@ void pplexer_tokenize_all(PpLexer* ppl) { } ++ppl->pos; tok->kind = TokenKind_header_name; - tok->raw.data = ppl->src + start; - tok->raw.len = ppl->pos - start; + tok->value.string.data = ppl->src + start; + tok->value.string.len = ppl->pos - start; ppl->expect_header_name = FALSE; } else if (c == '(') { tok->kind = TokenKind_paren_l; @@ -768,13 +776,11 @@ void pplexer_tokenize_all(PpLexer* ppl) { tok->kind = TokenKind_ellipsis; } else { tok->kind = TokenKind_other; - tok->raw.len = 2; - tok->raw.data = ppl->src + ppl->pos - tok->raw.len; + tok->value.string.len = 2; + tok->value.string.data = ppl->src + ppl->pos - tok->value.string.len; } } else { tok->kind = TokenKind_dot; - tok->raw.len = 1; - tok->raw.data = ppl->src + ppl->pos - tok->raw.len; } } else if (c == '!') { if (ppl->src[ppl->pos] == '=') { @@ -834,8 +840,8 @@ void pplexer_tokenize_all(PpLexer* ppl) { } ppl->pos += 2; tok->kind = TokenKind_character_constant; - tok->raw.data = ppl->src + start; - tok->raw.len = ppl->pos - start; + tok->value.string.data = ppl->src + start; + tok->value.string.len = ppl->pos - start; } else if (c == '"') { int start = ppl->pos - 1; while (1) { @@ -849,8 +855,8 @@ void pplexer_tokenize_all(PpLexer* ppl) { } ++ppl->pos; tok->kind = TokenKind_literal_str; - tok->raw.data = ppl->src + start; - tok->raw.len = ppl->pos - start; + tok->value.string.data = ppl->src + start + 1; + tok->value.string.len = ppl->pos - start - 2; } else if (isdigit(c)) { --ppl->pos; int start = ppl->pos; @@ -858,16 +864,18 @@ void pplexer_tokenize_all(PpLexer* ppl) { ++ppl->pos; } tok->kind = TokenKind_literal_int; - tok->raw.data = ppl->src + start; - tok->raw.len = ppl->pos - start; + String n; + n.data = ppl->src + start; + n.len = ppl->pos - start; + tok->value.integer = atoi(string_to_cstr(&n)); } else if (isalpha(c) || c == '_') { --ppl->pos; int start = ppl->pos; while (isalnum(ppl->src[ppl->pos]) || ppl->src[ppl->pos] == '_') { ++ppl->pos; } - tok->raw.data = ppl->src + start; - tok->raw.len = ppl->pos - start; + tok->value.string.data = ppl->src + start; + tok->value.string.len = ppl->pos - start; tok->kind = TokenKind_ident; } else if (c == '\n' || c == '\r') { ++ppl->line; @@ -888,8 +896,8 @@ void pplexer_tokenize_all(PpLexer* ppl) { } } else { tok->kind = TokenKind_other; - tok->raw.len = 1; - tok->raw.data = ppl->src + ppl->pos - tok->raw.len; + tok->value.string.len = 1; + tok->value.string.data = ppl->src + ppl->pos - tok->value.string.len; } ppl->at_bol = tok->kind == TokenKind_newline; } @@ -991,8 +999,8 @@ void seek_to_next_newline(Preprocessor* pp) { void make_token_whitespace(Token* tok) { tok->kind = TokenKind_whitespace; - tok->raw.len = 0; - tok->raw.data = NULL; + tok->value.string.len = 0; + tok->value.string.data = NULL; } void remove_directive_tokens(Preprocessor* pp, int start, int end) { @@ -1027,7 +1035,7 @@ void process_ifdef_directive(Preprocessor* pp, int directive_token_pos) { Token* macro_name = peek_pp_token(pp); if (macro_name->kind == TokenKind_ident) { next_pp_token(pp); - pp->skip_pp_tokens = find_macro(pp, ¯o_name->raw) == -1; + pp->skip_pp_tokens = find_macro(pp, ¯o_name->value.string) == -1; } remove_directive_tokens(pp, directive_token_pos, pp->pos); } @@ -1038,7 +1046,7 @@ void process_ifndef_directive(Preprocessor* pp, int directive_token_pos) { Token* macro_name = peek_pp_token(pp); if (macro_name->kind == TokenKind_ident) { next_pp_token(pp); - pp->skip_pp_tokens = find_macro(pp, ¯o_name->raw) != -1; + pp->skip_pp_tokens = find_macro(pp, ¯o_name->value.string) != -1; } remove_directive_tokens(pp, directive_token_pos, pp->pos); } @@ -1049,7 +1057,7 @@ String* read_include_header_name(Preprocessor* pp) { fatal_error("%s:%d: invalid #include", tok->loc.filename, tok->loc.line); } - return &tok->raw; + return &tok->value.string; } const char* resolve_include_name(Preprocessor* pp, String* include_name) { @@ -1173,7 +1181,7 @@ void process_define_directive(Preprocessor* pp, int directive_token_pos) { } Macro* macro = macros_push_new(pp->macros); macro->kind = MacroKind_func; - macro->name = macro_name->raw; + macro->name = macro_name->value.string; macro->parameters = *parameters; int n_replacements = pp->pos - replacements_start_pos; tokens_init(¯o->replacements, n_replacements); @@ -1188,7 +1196,7 @@ void process_define_directive(Preprocessor* pp, int directive_token_pos) { } Macro* macro = macros_push_new(pp->macros); macro->kind = MacroKind_obj; - macro->name = macro_name->raw; + macro->name = macro_name->value.string; int n_replacements = pp->pos - replacements_start_pos; tokens_init(¯o->replacements, n_replacements); for (int i = 0; i < n_replacements; ++i) { @@ -1204,7 +1212,7 @@ void process_undef_directive(Preprocessor* pp, int directive_token_pos) { Token* macro_name = peek_pp_token(pp); if (macro_name->kind == TokenKind_ident) { next_pp_token(pp); - int macro_idx = find_macro(pp, ¯o_name->raw); + int macro_idx = find_macro(pp, ¯o_name->value.string); if (macro_idx != -1) { undef_macro(pp, macro_idx); } @@ -1260,7 +1268,7 @@ MacroArgArray* pp_parse_macro_arguments(Preprocessor* pp) { BOOL expand_macro(Preprocessor* pp) { int macro_name_pos = pp->pos; Token* macro_name = next_pp_token(pp); - int macro_idx = find_macro(pp, ¯o_name->raw); + int macro_idx = find_macro(pp, ¯o_name->value.string); if (macro_idx == -1) { return FALSE; } @@ -1290,18 +1298,14 @@ BOOL expand_macro(Preprocessor* pp) { } else if (macro->kind == MacroKind_builtin_file) { Token file_tok; file_tok.kind = TokenKind_literal_str; - file_tok.raw.len = strlen(macro_name->loc.filename) + 2; - file_tok.raw.data = calloc(file_tok.raw.len, sizeof(char)); - sprintf(file_tok.raw.data, "\"%s\"", macro_name->loc.filename); + file_tok.value.string = *cstr_to_new_string(macro_name->loc.filename); file_tok.loc.filename = NULL; file_tok.loc.line = 0; replace_single_pp_token(pp, macro_name_pos, &file_tok); } else if (macro->kind == MacroKind_builtin_line) { Token line_tok; line_tok.kind = TokenKind_literal_int; - line_tok.raw.data = calloc(10, sizeof(char)); - sprintf(line_tok.raw.data, "%d", macro_name->loc.line); - line_tok.raw.len = strlen(line_tok.raw.data); + line_tok.value.integer = macro_name->loc.line; line_tok.loc.filename = NULL; line_tok.loc.line = 0; replace_single_pp_token(pp, macro_name_pos, &line_tok); @@ -19,8 +19,8 @@ int isalnum(int); int isalpha(int); int isdigit(int); int isspace(int); -void* memcpy(void*, void*, size_t); -void* memmove(void*, void*, size_t); +void* memcpy(void*, const void*, size_t); +void* memmove(void*, const void*, size_t); void* memset(void*, int, size_t); int printf(const char*, ...); void* realloc(void*, size_t); @@ -28,6 +28,7 @@ int sprintf(char*, const char*, ...); int strcmp(const char*, const char*); size_t strlen(const char*); int strncmp(const char*, const char*, size_t); +char* strndup(const char*, size_t); char* strstr(const char*, const char*); #include <stdarg.h> @@ -24,9 +24,9 @@ void tokenize_all(Lexer* l) { tok->loc = pp_tok->loc; if (k == TokenKind_character_constant) { tok->kind = TokenKind_literal_int; - int ch = pp_tok->raw.data[1]; + int ch = pp_tok->value.string.data[1]; if (ch == '\\') { - ch = pp_tok->raw.data[2]; + ch = pp_tok->value.string.data[2]; if (ch == 'a') { ch = '\a'; } else if (ch == 'b') { @@ -45,101 +45,94 @@ void tokenize_all(Lexer* l) { ch = '\0'; } } - char* buf = calloc(4, sizeof(char)); - sprintf(buf, "%d", ch); - tok->raw.data = buf; - tok->raw.len = strlen(buf); - } else if (k == TokenKind_literal_str) { - tok->kind = TokenKind_literal_str; - tok->raw.data = pp_tok->raw.data + 1; - tok->raw.len = pp_tok->raw.len - 2; + tok->value.integer = ch; } else if (k == TokenKind_ident) { - if (string_equals_cstr(&pp_tok->raw, "auto")) { + if (string_equals_cstr(&pp_tok->value.string, "auto")) { tok->kind = TokenKind_keyword_auto; - } else if (string_equals_cstr(&pp_tok->raw, "break")) { + } else if (string_equals_cstr(&pp_tok->value.string, "break")) { tok->kind = TokenKind_keyword_break; - } else if (string_equals_cstr(&pp_tok->raw, "case")) { + } else if (string_equals_cstr(&pp_tok->value.string, "case")) { tok->kind = TokenKind_keyword_case; - } else if (string_equals_cstr(&pp_tok->raw, "char")) { + } else if (string_equals_cstr(&pp_tok->value.string, "char")) { tok->kind = TokenKind_keyword_char; - } else if (string_equals_cstr(&pp_tok->raw, "const")) { + } else if (string_equals_cstr(&pp_tok->value.string, "const")) { tok->kind = TokenKind_keyword_const; - } else if (string_equals_cstr(&pp_tok->raw, "continue")) { + } else if (string_equals_cstr(&pp_tok->value.string, "continue")) { tok->kind = TokenKind_keyword_continue; - } else if (string_equals_cstr(&pp_tok->raw, "default")) { + } else if (string_equals_cstr(&pp_tok->value.string, "default")) { tok->kind = TokenKind_keyword_default; - } else if (string_equals_cstr(&pp_tok->raw, "do")) { + } else if (string_equals_cstr(&pp_tok->value.string, "do")) { tok->kind = TokenKind_keyword_do; - } else if (string_equals_cstr(&pp_tok->raw, "double")) { + } else if (string_equals_cstr(&pp_tok->value.string, "double")) { tok->kind = TokenKind_keyword_double; - } else if (string_equals_cstr(&pp_tok->raw, "else")) { + } else if (string_equals_cstr(&pp_tok->value.string, "else")) { tok->kind = TokenKind_keyword_else; - } else if (string_equals_cstr(&pp_tok->raw, "enum")) { + } else if (string_equals_cstr(&pp_tok->value.string, "enum")) { tok->kind = TokenKind_keyword_enum; - } else if (string_equals_cstr(&pp_tok->raw, "extern")) { + } else if (string_equals_cstr(&pp_tok->value.string, "extern")) { tok->kind = TokenKind_keyword_extern; - } else if (string_equals_cstr(&pp_tok->raw, "float")) { + } else if (string_equals_cstr(&pp_tok->value.string, "float")) { tok->kind = TokenKind_keyword_float; - } else if (string_equals_cstr(&pp_tok->raw, "for")) { + } else if (string_equals_cstr(&pp_tok->value.string, "for")) { tok->kind = TokenKind_keyword_for; - } else if (string_equals_cstr(&pp_tok->raw, "goto")) { + } else if (string_equals_cstr(&pp_tok->value.string, "goto")) { tok->kind = TokenKind_keyword_goto; - } else if (string_equals_cstr(&pp_tok->raw, "if")) { + } else if (string_equals_cstr(&pp_tok->value.string, "if")) { tok->kind = TokenKind_keyword_if; - } else if (string_equals_cstr(&pp_tok->raw, "inline")) { + } else if (string_equals_cstr(&pp_tok->value.string, "inline")) { tok->kind = TokenKind_keyword_inline; - } else if (string_equals_cstr(&pp_tok->raw, "int")) { + } else if (string_equals_cstr(&pp_tok->value.string, "int")) { tok->kind = TokenKind_keyword_int; - } else if (string_equals_cstr(&pp_tok->raw, "long")) { + } else if (string_equals_cstr(&pp_tok->value.string, "long")) { tok->kind = TokenKind_keyword_long; - } else if (string_equals_cstr(&pp_tok->raw, "register")) { + } else if (string_equals_cstr(&pp_tok->value.string, "register")) { tok->kind = TokenKind_keyword_register; - } else if (string_equals_cstr(&pp_tok->raw, "restrict")) { + } else if (string_equals_cstr(&pp_tok->value.string, "restrict")) { tok->kind = TokenKind_keyword_restrict; - } else if (string_equals_cstr(&pp_tok->raw, "return")) { + } else if (string_equals_cstr(&pp_tok->value.string, "return")) { tok->kind = TokenKind_keyword_return; - } else if (string_equals_cstr(&pp_tok->raw, "short")) { + } else if (string_equals_cstr(&pp_tok->value.string, "short")) { tok->kind = TokenKind_keyword_short; - } else if (string_equals_cstr(&pp_tok->raw, "signed")) { + } else if (string_equals_cstr(&pp_tok->value.string, "signed")) { tok->kind = TokenKind_keyword_signed; - } else if (string_equals_cstr(&pp_tok->raw, "sizeof")) { + } else if (string_equals_cstr(&pp_tok->value.string, "sizeof")) { tok->kind = TokenKind_keyword_sizeof; - } else if (string_equals_cstr(&pp_tok->raw, "static")) { + } else if (string_equals_cstr(&pp_tok->value.string, "static")) { tok->kind = TokenKind_keyword_static; - } else if (string_equals_cstr(&pp_tok->raw, "struct")) { + } else if (string_equals_cstr(&pp_tok->value.string, "struct")) { tok->kind = TokenKind_keyword_struct; - } else if (string_equals_cstr(&pp_tok->raw, "switch")) { + } else if (string_equals_cstr(&pp_tok->value.string, "switch")) { tok->kind = TokenKind_keyword_switch; - } else if (string_equals_cstr(&pp_tok->raw, "typedef")) { + } else if (string_equals_cstr(&pp_tok->value.string, "typedef")) { tok->kind = TokenKind_keyword_typedef; - } else if (string_equals_cstr(&pp_tok->raw, "union")) { + } else if (string_equals_cstr(&pp_tok->value.string, "union")) { tok->kind = TokenKind_keyword_union; - } else if (string_equals_cstr(&pp_tok->raw, "unsigned")) { + } else if (string_equals_cstr(&pp_tok->value.string, "unsigned")) { tok->kind = TokenKind_keyword_unsigned; - } else if (string_equals_cstr(&pp_tok->raw, "void")) { + } else if (string_equals_cstr(&pp_tok->value.string, "void")) { tok->kind = TokenKind_keyword_void; - } else if (string_equals_cstr(&pp_tok->raw, "volatile")) { + } else if (string_equals_cstr(&pp_tok->value.string, "volatile")) { tok->kind = TokenKind_keyword_volatile; - } else if (string_equals_cstr(&pp_tok->raw, "while")) { + } else if (string_equals_cstr(&pp_tok->value.string, "while")) { tok->kind = TokenKind_keyword_while; - } else if (string_equals_cstr(&pp_tok->raw, "_Bool")) { + } else if (string_equals_cstr(&pp_tok->value.string, "_Bool")) { tok->kind = TokenKind_keyword__Bool; - } else if (string_equals_cstr(&pp_tok->raw, "_Complex")) { + } else if (string_equals_cstr(&pp_tok->value.string, "_Complex")) { tok->kind = TokenKind_keyword__Complex; - } else if (string_equals_cstr(&pp_tok->raw, "_Imaginary")) { + } else if (string_equals_cstr(&pp_tok->value.string, "_Imaginary")) { tok->kind = TokenKind_keyword__Imaginary; - } else if (string_equals_cstr(&pp_tok->raw, "va_start")) { + } else if (string_equals_cstr(&pp_tok->value.string, "va_start")) { tok->kind = TokenKind_va_start; - tok->raw = pp_tok->raw; + tok->value = pp_tok->value; } else { tok->kind = TokenKind_ident; - tok->raw = pp_tok->raw; + tok->value = pp_tok->value; } } else if (k == TokenKind_other) { unreachable(); } else { tok->kind = pp_tok->kind; - tok->raw = pp_tok->raw; + tok->value = pp_tok->value; } } } |
