aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authornsfisis <nsfisis@gmail.com>2025-08-13 04:17:46 +0900
committernsfisis <nsfisis@gmail.com>2025-08-15 10:06:21 +0900
commit80e1d0fc957a2f254ca94232cca0269567888ddb (patch)
tree92ba2a4de23a9978c55fc375df739c4ac736cacd
parent15c8a2f45b3916f840665b317afb344cbc08f5a4 (diff)
downloadducc-80e1d0fc957a2f254ca94232cca0269567888ddb.tar.gz
ducc-80e1d0fc957a2f254ca94232cca0269567888ddb.tar.zst
ducc-80e1d0fc957a2f254ca94232cca0269567888ddb.zip
feat: implement block-based variable scope
-rw-r--r--parse.c57
-rw-r--r--preprocess.c27
-rw-r--r--tokenize.c4
3 files changed, 37 insertions, 51 deletions
diff --git a/parse.c b/parse.c
index a1d4aa8..c902952 100644
--- a/parse.c
+++ b/parse.c
@@ -276,23 +276,22 @@ int register_str_literal(Parser* p, char* s) {
AstNode* parse_primary_expr(Parser* p) {
Token* t = next_token(p);
- AstNode* e;
if (t->kind == TokenKind_literal_int) {
return ast_new_int(atoi(string_to_cstr(&t->raw)));
} else if (t->kind == TokenKind_literal_str) {
- e = ast_new(AstNodeKind_str_expr);
+ AstNode* e = ast_new(AstNodeKind_str_expr);
e->node_idx = register_str_literal(p, string_to_cstr(&t->raw));
e->ty = type_new_static_string(t->raw.len);
return e;
} else if (t->kind == TokenKind_paren_l) {
- e = parse_expr(p);
+ AstNode* e = parse_expr(p);
expect(p, TokenKind_paren_r);
return e;
} else if (t->kind == TokenKind_ident || t->kind == TokenKind_va_start) {
String* name = &t->raw;
if (peek_token(p)->kind == TokenKind_paren_l) {
- e = ast_new(AstNodeKind_func_call);
+ AstNode* e = ast_new(AstNodeKind_func_call);
int func_idx = find_func(p, name);
if (func_idx == -1) {
fatal_error("undefined function: %.*s", name->len, name->data);
@@ -312,18 +311,18 @@ AstNode* parse_primary_expr(Parser* p) {
}
int enum_idx = enum_member_idx / 1000;
int n = enum_member_idx % 1000;
- e = ast_new_int(p->enums[enum_idx].node_members->node_items[n].node_int_value);
+ AstNode* e = ast_new_int(p->enums[enum_idx].node_members->node_items[n].node_int_value);
e->ty = type_new(TypeKind_enum);
e->ty->def = p->enums + enum_idx;
return e;
}
- e = ast_new(AstNodeKind_gvar);
+ AstNode* e = ast_new(AstNodeKind_gvar);
e->name = *name;
e->ty = p->gvars[gvar_idx].ty;
return e;
}
- e = ast_new(AstNodeKind_lvar);
+ AstNode* e = ast_new(AstNodeKind_lvar);
e->name = *name;
e->node_stack_offset = p->lvars[lvar_idx].stack_offset;
e->ty = p->lvars[lvar_idx].ty;
@@ -379,7 +378,6 @@ AstNode* create_new_postfix_inc_or_dec(Parser* p, AstNode* e, TokenKind op) {
AstNode* parse_postfix_expr(Parser* p) {
AstNode* ret = parse_primary_expr(p);
- String* name;
while (1) {
TokenKind tk = peek_token(p)->kind;
if (tk == TokenKind_paren_l) {
@@ -395,11 +393,11 @@ AstNode* parse_postfix_expr(Parser* p) {
ret = ast_new_deref_expr(ast_new_binary_expr(TokenKind_plus, ret, idx));
} else if (tk == TokenKind_dot) {
next_token(p);
- name = parse_ident(p);
+ String* name = parse_ident(p);
ret = ast_new_member_access_expr(ast_new_ref_expr(ret), name);
} else if (tk == TokenKind_arrow) {
next_token(p);
- name = parse_ident(p);
+ String* name = parse_ident(p);
ret = ast_new_member_access_expr(ret, name);
} else if (tk == TokenKind_plusplus) {
next_token(p);
@@ -430,7 +428,6 @@ BOOL is_type_token(Parser* p, Token* token) {
Type* parse_type(Parser* p) {
Token* t = next_token(p);
- String* name;
if (t->kind == TokenKind_keyword_const) {
t = next_token(p);
}
@@ -458,7 +455,7 @@ Type* parse_type(Parser* p) {
ty->kind = TypeKind_void;
} else if (t->kind == TokenKind_keyword_enum) {
ty->kind = TypeKind_enum;
- name = parse_ident(p);
+ String* name = parse_ident(p);
int enum_idx = find_enum(p, name);
if (enum_idx == -1) {
fatal_error("parse_type: unknown enum, %.*s", name->len, name->data);
@@ -466,7 +463,7 @@ Type* parse_type(Parser* p) {
ty->def = p->enums + enum_idx;
} else if (t->kind == TokenKind_keyword_struct) {
ty->kind = TypeKind_struct;
- name = parse_ident(p);
+ String* name = parse_ident(p);
int struct_idx = find_struct(p, name);
if (struct_idx == -1) {
fatal_error("parse_type: unknown struct, %.*s", name->len, name->data);
@@ -474,7 +471,7 @@ Type* parse_type(Parser* p) {
ty->def = p->structs + struct_idx;
} else if (t->kind == TokenKind_keyword_union) {
ty->kind = TypeKind_union;
- name = parse_ident(p);
+ String* name = parse_ident(p);
int union_idx = find_union(p, name);
if (union_idx == -1) {
fatal_error("parse_type: unknown union, %.*s", name->len, name->data);
@@ -496,31 +493,30 @@ Type* parse_type(Parser* p) {
}
AstNode* parse_prefix_expr(Parser* p) {
- AstNode* operand;
TokenKind op = peek_token(p)->kind;
if (op == TokenKind_minus) {
next_token(p);
- operand = parse_prefix_expr(p);
+ AstNode* operand = parse_prefix_expr(p);
return ast_new_binary_expr(op, ast_new_int(0), operand);
} else if (op == TokenKind_not) {
next_token(p);
- operand = parse_prefix_expr(p);
+ AstNode* operand = parse_prefix_expr(p);
return ast_new_unary_expr(op, operand);
} else if (op == TokenKind_and) {
next_token(p);
- operand = parse_prefix_expr(p);
+ AstNode* operand = parse_prefix_expr(p);
return ast_new_ref_expr(operand);
} else if (op == TokenKind_star) {
next_token(p);
- operand = parse_prefix_expr(p);
+ AstNode* operand = parse_prefix_expr(p);
return ast_new_deref_expr(operand);
} else if (op == TokenKind_plusplus) {
next_token(p);
- operand = parse_prefix_expr(p);
+ AstNode* operand = parse_prefix_expr(p);
return ast_new_assign_add_expr(operand, ast_new_int(1));
} else if (op == TokenKind_minusminus) {
next_token(p);
- operand = parse_prefix_expr(p);
+ AstNode* operand = parse_prefix_expr(p);
return ast_new_assign_sub_expr(operand, ast_new_int(1));
} else if (op == TokenKind_keyword_sizeof) {
next_token(p);
@@ -565,12 +561,11 @@ AstNode* parse_multiplicative_expr(Parser* p) {
AstNode* parse_additive_expr(Parser* p) {
AstNode* lhs = parse_multiplicative_expr(p);
- AstNode* rhs;
while (1) {
TokenKind op = peek_token(p)->kind;
if (op == TokenKind_plus) {
next_token(p);
- rhs = parse_multiplicative_expr(p);
+ AstNode* rhs = parse_multiplicative_expr(p);
if (lhs->ty->base) {
lhs = ast_new_binary_expr(
op, lhs, ast_new_binary_expr(TokenKind_star, rhs, ast_new_int(type_sizeof(lhs->ty->base))));
@@ -582,7 +577,7 @@ AstNode* parse_additive_expr(Parser* p) {
}
} else if (op == TokenKind_minus) {
next_token(p);
- rhs = parse_multiplicative_expr(p);
+ AstNode* rhs = parse_multiplicative_expr(p);
if (lhs->ty->base) {
if (rhs->ty->base) {
// (a - b) / sizeof(a)
@@ -605,20 +600,19 @@ AstNode* parse_additive_expr(Parser* p) {
AstNode* parse_relational_expr(Parser* p) {
AstNode* lhs = parse_additive_expr(p);
- AstNode* rhs;
while (1) {
TokenKind op = peek_token(p)->kind;
if (op == TokenKind_lt || op == TokenKind_le) {
next_token(p);
- rhs = parse_additive_expr(p);
+ AstNode* rhs = parse_additive_expr(p);
lhs = ast_new_binary_expr(op, lhs, rhs);
} else if (op == TokenKind_gt) {
next_token(p);
- rhs = parse_additive_expr(p);
+ AstNode* rhs = parse_additive_expr(p);
lhs = ast_new_binary_expr(TokenKind_lt, rhs, lhs);
} else if (op == TokenKind_ge) {
next_token(p);
- rhs = parse_additive_expr(p);
+ AstNode* rhs = parse_additive_expr(p);
lhs = ast_new_binary_expr(TokenKind_le, rhs, lhs);
} else {
break;
@@ -700,21 +694,20 @@ AstNode* parse_logical_or_expr(Parser* p) {
AstNode* parse_assignment_expr(Parser* p) {
AstNode* lhs = parse_logical_or_expr(p);
- AstNode* rhs;
while (1) {
TokenKind op = peek_token(p)->kind;
if (op == TokenKind_assign || op == TokenKind_assign_mul || op == TokenKind_assign_div ||
op == TokenKind_assign_mod) {
next_token(p);
- rhs = parse_logical_or_expr(p);
+ AstNode* rhs = parse_logical_or_expr(p);
lhs = ast_new_assign_expr(op, lhs, rhs);
} else if (op == TokenKind_assign_add) {
next_token(p);
- rhs = parse_logical_or_expr(p);
+ AstNode* rhs = parse_logical_or_expr(p);
lhs = ast_new_assign_add_expr(lhs, rhs);
} else if (op == TokenKind_assign_sub) {
next_token(p);
- rhs = parse_logical_or_expr(p);
+ AstNode* rhs = parse_logical_or_expr(p);
lhs = ast_new_assign_sub_expr(lhs, rhs);
} else {
break;
diff --git a/preprocess.c b/preprocess.c
index f11bde2..d375430 100644
--- a/preprocess.c
+++ b/preprocess.c
@@ -448,9 +448,6 @@ BOOL skip_pp_tokens(Preprocessor* pp) {
}
void pp_tokenize_all(Preprocessor* pp) {
- char* buf;
- int ch;
- int start;
while (pp->src[pp->pos]) {
Token* tok = pp->pp_tokens + pp->n_pp_tokens;
tok->loc.filename = pp->filename;
@@ -522,7 +519,7 @@ void pp_tokenize_all(Preprocessor* pp) {
++pp->pos;
tok->kind = TokenKind_assign_div;
} else if (pp->src[pp->pos] == '/') {
- start = pp->pos - 1;
+ int start = pp->pos - 1;
++pp->pos;
while (pp->src[pp->pos] && pp->src[pp->pos] != '\n' && pp->src[pp->pos] != '\r') {
++pp->pos;
@@ -531,7 +528,7 @@ void pp_tokenize_all(Preprocessor* pp) {
tok->raw.len = pp->pos - start;
tok->raw.data = pp->src + pp->pos - tok->raw.len;
} else if (pp->src[pp->pos] == '*') {
- start = pp->pos - 1;
+ int start = pp->pos - 1;
++pp->pos;
while (pp->src[pp->pos]) {
if (pp->src[pp->pos] == '*' && pp->src[pp->pos + 1] == '/') {
@@ -608,7 +605,7 @@ void pp_tokenize_all(Preprocessor* pp) {
tok->kind = TokenKind_hash;
}
} else if (c == '\'') {
- start = pp->pos - 1;
+ int start = pp->pos - 1;
if (pp->src[pp->pos] == '\\') {
++pp->pos;
}
@@ -617,9 +614,9 @@ void pp_tokenize_all(Preprocessor* pp) {
tok->raw.data = pp->src + start;
tok->raw.len = pp->pos - start;
} else if (c == '"') {
- start = pp->pos - 1;
+ int start = pp->pos - 1;
while (1) {
- ch = pp->src[pp->pos];
+ char ch = pp->src[pp->pos];
if (ch == '\\') {
++pp->pos;
} else if (ch == '"') {
@@ -633,7 +630,7 @@ void pp_tokenize_all(Preprocessor* pp) {
tok->raw.len = pp->pos - start;
} else if (isdigit(c)) {
--pp->pos;
- start = pp->pos;
+ int start = pp->pos;
while (isdigit(pp->src[pp->pos])) {
++pp->pos;
}
@@ -642,7 +639,7 @@ void pp_tokenize_all(Preprocessor* pp) {
tok->raw.len = pp->pos - start;
} else if (isalpha(c) || c == '_') {
--pp->pos;
- start = pp->pos;
+ int start = pp->pos;
while (isalnum(pp->src[pp->pos]) || pp->src[pp->pos] == '_') {
++pp->pos;
}
@@ -856,14 +853,13 @@ Token* read_include_header_name(Token* tok2, String* include_name) {
}
const char* resolve_include_name(Preprocessor* pp, String* include_name) {
- char* buf;
if (include_name->data[0] == '"') {
- buf = calloc(include_name->len - 2 + 1, sizeof(char));
+ char* buf = calloc(include_name->len - 2 + 1, sizeof(char));
sprintf(buf, "%.*s", include_name->len - 2, include_name->data + 1);
return buf;
} else {
for (int i = 0; i < pp->n_include_paths; ++i) {
- buf = calloc(include_name->len + 1 + pp->include_paths[i].len, sizeof(char));
+ char* buf = calloc(include_name->len + 1 + pp->include_paths[i].len, sizeof(char));
sprintf(buf, "%s/%.*s", pp->include_paths[i].data, include_name->len, include_name->data);
if (access(buf, F_OK | R_OK) == 0) {
return buf;
@@ -921,7 +917,6 @@ Token* process_include_directive(Preprocessor* pp, Token* tok, Token* tok2) {
Token* process_define_directive(Preprocessor* pp, Token* tok, Token* tok2) {
Token* tok3 = NULL;
- PpMacro* pp_macro;
++tok2;
tok2 = skip_whitespace(tok2);
if (tok2->kind != TokenKind_ident) {
@@ -942,7 +937,7 @@ Token* process_define_directive(Preprocessor* pp, Token* tok, Token* tok2) {
if (!tok3) {
fatal_error("%s:%s: invalid #define syntax", tok3->loc.filename, tok3->loc.line);
}
- pp_macro = pp_macros_push_new(pp->pp_macros);
+ PpMacro* pp_macro = pp_macros_push_new(pp->pp_macros);
pp_macro->kind = PpMacroKind_func;
pp_macro->name = macro_name->raw;
pp_macro->n_replacements = tok3 - tok2;
@@ -955,7 +950,7 @@ Token* process_define_directive(Preprocessor* pp, Token* tok, Token* tok2) {
if (!tok3) {
fatal_error("%s:%s: invalid #define syntax", tok3->loc.filename, tok3->loc.line);
}
- pp_macro = pp_macros_push_new(pp->pp_macros);
+ PpMacro* pp_macro = pp_macros_push_new(pp->pp_macros);
pp_macro->kind = PpMacroKind_obj;
pp_macro->name = macro_name->raw;
pp_macro->n_replacements = tok3 - tok2;
diff --git a/tokenize.c b/tokenize.c
index e6c61c0..de4488e 100644
--- a/tokenize.c
+++ b/tokenize.c
@@ -14,8 +14,6 @@ Lexer* lexer_new(Token* pp_tokens) {
}
void tokenize_all(Lexer* l) {
- int ch;
- int start;
while (l->src[l->pos].kind != TokenKind_eof) {
Token* pp_tok = l->src + l->pos;
Token* tok = l->tokens + l->n_tokens;
@@ -24,7 +22,7 @@ void tokenize_all(Lexer* l) {
++l->pos;
if (k == TokenKind_character_constant) {
tok->kind = TokenKind_literal_int;
- ch = pp_tok->raw.data[1];
+ int ch = pp_tok->raw.data[1];
if (ch == '\\') {
ch = pp_tok->raw.data[2];
if (ch == 'a') {