diff options
| author | nsfisis <nsfisis@gmail.com> | 2025-08-29 21:59:03 +0900 |
|---|---|---|
| committer | nsfisis <nsfisis@gmail.com> | 2025-08-29 21:59:25 +0900 |
| commit | c56290f726cb1cf9b88e5e0d4a35d0673b41e27c (patch) | |
| tree | ac65b8d2fd2c4c9e8954b7266badd9d860c98c0c /src | |
| parent | 3244eb68cb12aacb5f262223d2c7b01e5f638e59 (diff) | |
| download | ducc-c56290f726cb1cf9b88e5e0d4a35d0673b41e27c.tar.gz ducc-c56290f726cb1cf9b88e5e0d4a35d0673b41e27c.tar.zst ducc-c56290f726cb1cf9b88e5e0d4a35d0673b41e27c.zip | |
refactor: add stricter types
Diffstat (limited to 'src')
| -rw-r--r-- | src/parse.c | 6 | ||||
| -rw-r--r-- | src/preprocess.c | 18 | ||||
| -rw-r--r-- | src/token.c | 7 | ||||
| -rw-r--r-- | src/tokenize.c | 2 |
4 files changed, 18 insertions, 15 deletions
diff --git a/src/parse.c b/src/parse.c index 381d7e1..b33c180 100644 --- a/src/parse.c +++ b/src/parse.c @@ -209,7 +209,7 @@ static Token* expect(Parser* p, TokenKind expected) { } static int find_lvar_in_scope(Parser* p, Scope* scope, const char* name) { - for (int i = 0; i < scope->syms.len; ++i) { + for (size_t i = 0; i < scope->syms.len; ++i) { ScopedSymbol* sym = &scope->syms.data[i]; if (sym->name && strcmp(sym->name, name) == 0) { return sym->index; @@ -277,7 +277,7 @@ static AstNode* generate_temporary_lvar(Parser* p, Type* ty) { } static int find_gvar(Parser* p, const char* name) { - for (int i = 0; i < p->gvars.len; ++i) { + for (size_t i = 0; i < p->gvars.len; ++i) { if (strcmp(p->gvars.data[i].name, name) == 0) { return i; } @@ -286,7 +286,7 @@ static int find_gvar(Parser* p, const char* name) { } static int find_func(Parser* p, const char* name) { - for (int i = 0; i < p->funcs.len; ++i) { + for (size_t i = 0; i < p->funcs.len; ++i) { if (strcmp(p->funcs.data[i].name, name) == 0) { return i; } diff --git a/src/preprocess.c b/src/preprocess.c index 5c8d888..2a2bf57 100644 --- a/src/preprocess.c +++ b/src/preprocess.c @@ -39,7 +39,7 @@ static int macro_find_param(Macro* macro, Token* tok) { if (tok->kind != TokenKind_ident) return -1; - for (int i = 0; i < macro->parameters.len; ++i) { + for (size_t i = 0; i < macro->parameters.len; ++i) { if (strcmp(macro->parameters.data[i].value.string, tok->value.string) == 0) { return i; } @@ -599,7 +599,7 @@ static BOOL pp_eof(Preprocessor* pp) { } static int find_macro(Preprocessor* pp, const char* name) { - for (int i = 0; i < pp->macros->len; ++i) { + for (size_t i = 0; i < pp->macros->len; ++i) { if (pp->macros->data[i].kind == MacroKind_undef) continue; if (strcmp(pp->macros->data[i].name, name) == 0) { @@ -667,9 +667,9 @@ static const char* resolve_include_name(Preprocessor* pp, const Token* include_n } static int replace_pp_tokens(Preprocessor* pp, int dest_start, int dest_end, TokenArray* source_tokens) { - int n_tokens_to_remove = dest_end - dest_start; - int n_tokens_after_dest = pp->pp_tokens->len - dest_end; - int shift_amount; + size_t n_tokens_to_remove = dest_end - dest_start; + size_t n_tokens_after_dest = pp->pp_tokens->len - dest_end; + size_t shift_amount; if (n_tokens_to_remove < source_tokens->len) { // Move existing tokens backward to make room. @@ -692,7 +692,7 @@ static int replace_pp_tokens(Preprocessor* pp, int dest_start, int dest_end, Tok return dest_start + source_tokens->len; } -static int replace_single_pp_token(Preprocessor* pp, int dest, Token* source_tok) { +static void replace_single_pp_token(Preprocessor* pp, int dest, Token* source_tok) { TokenArray tokens; tokens_init(&tokens, 1); *tokens_push_new(&tokens) = *source_tok; @@ -775,7 +775,7 @@ static BOOL expand_macro(Preprocessor* pp) { if (macro->kind == MacroKind_func) { MacroArgArray* args = pp_parse_macro_arguments(pp); replace_pp_tokens(pp, macro_name_pos, pp->pos, ¯o->replacements); - for (int i = 0; i < macro->replacements.len; ++i) { + for (size_t i = 0; i < macro->replacements.len; ++i) { Token* tok = pp_token_at(pp, macro_name_pos + i); int macro_param_idx = macro_find_param(macro, tok); if (macro_param_idx != -1) { @@ -783,13 +783,13 @@ static BOOL expand_macro(Preprocessor* pp) { } } // Inherit a source location from the original macro token. - for (int i = 0; i < macro->replacements.len; ++i) { + for (size_t i = 0; i < macro->replacements.len; ++i) { pp_token_at(pp, macro_name_pos + i)->loc = original_loc; } } else if (macro->kind == MacroKind_obj) { replace_pp_tokens(pp, macro_name_pos, macro_name_pos + 1, ¯o->replacements); // Inherit a source location from the original macro token. - for (int i = 0; i < macro->replacements.len; ++i) { + for (size_t i = 0; i < macro->replacements.len; ++i) { pp_token_at(pp, macro_name_pos + i)->loc = original_loc; } } else if (macro->kind == MacroKind_builtin_file) { diff --git a/src/token.c b/src/token.c index 74105f3..21f9274 100644 --- a/src/token.c +++ b/src/token.c @@ -353,8 +353,11 @@ Token* tokens_push_new(TokenArray* tokens) { } Token* tokens_pop(TokenArray* tokens) { - if (tokens->len != 0) - tokens->len--; + if (tokens->len == 0) { + return NULL; + } else { + return &tokens->data[--tokens->len]; + } } void tokens_build_json(JsonBuilder* builder, TokenArray* tokens) { diff --git a/src/tokenize.c b/src/tokenize.c index 352755b..b0e1447 100644 --- a/src/tokenize.c +++ b/src/tokenize.c @@ -17,7 +17,7 @@ static Lexer* lexer_new(TokenArray* pp_tokens) { } static void tokenize_all(Lexer* l) { - for (int pos = 0; pos < l->src->len; ++pos) { + for (size_t pos = 0; pos < l->src->len; ++pos) { Token* pp_tok = &l->src->data[pos]; TokenKind k = pp_tok->kind; if (k == TokenKind_whitespace || k == TokenKind_newline) { |
