aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authornsfisis <nsfisis@gmail.com>2025-08-04 06:10:33 +0900
committernsfisis <nsfisis@gmail.com>2025-08-15 10:06:21 +0900
commit1edc3a61992e24ab95306ad5506e6b33cbcf765a (patch)
tree447dd7722fc784c03aa59159d69c609bef4c7a98
parent6f880873fbc49f44df6d2d19a76ceeb4052890c0 (diff)
downloadducc-1edc3a61992e24ab95306ad5506e6b33cbcf765a.tar.gz
ducc-1edc3a61992e24ab95306ad5506e6b33cbcf765a.tar.zst
ducc-1edc3a61992e24ab95306ad5506e6b33cbcf765a.zip
refactor: define BOOL/TRUE/FALSE macros
-rw-r--r--ast.c2
-rw-r--r--common.c4
-rw-r--r--parse.c22
-rw-r--r--preprocess.c31
-rw-r--r--std.h4
5 files changed, 36 insertions, 27 deletions
diff --git a/ast.c b/ast.c
index 3eafa94..8f474ac 100644
--- a/ast.c
+++ b/ast.c
@@ -87,7 +87,7 @@ Type* type_array_to_ptr(Type* ty) {
return type_new_ptr(ty->base);
}
-int type_is_unsized(Type* ty) {
+BOOL type_is_unsized(Type* ty) {
return ty->kind == TypeKind_void;
}
diff --git a/common.c b/common.c
index 6b1cb8f..badb1ed 100644
--- a/common.c
+++ b/common.c
@@ -23,11 +23,11 @@ char* string_to_cstr(const String* s) {
return buf;
}
-int string_equals(const String* s1, const String* s2) {
+BOOL string_equals(const String* s1, const String* s2) {
return s1->len == s2->len && strncmp(s1->data, s2->data, s1->len) == 0;
}
-int string_equals_cstr(const String* s1, const char* s2) {
+BOOL string_equals_cstr(const String* s1, const char* s2) {
size_t s2_len = strlen(s2);
return s1->len == s2_len && strncmp(s1->data, s2, s1->len) == 0;
}
diff --git a/parse.c b/parse.c
index 2f856bd..f8ad4f1 100644
--- a/parse.c
+++ b/parse.c
@@ -70,11 +70,11 @@ Token* next_token(Parser* p) {
return p->tokens + p->pos - 1;
}
-int eof(Parser* p) {
+BOOL eof(Parser* p) {
return peek_token(p)->kind != TokenKind_eof;
}
-Token* expect(Parser* p, int expected) {
+Token* expect(Parser* p, TokenKind expected) {
Token* t = next_token(p);
if (t->kind == expected) {
return t;
@@ -92,7 +92,7 @@ int find_lvar(Parser* p, const String* name) {
return -1;
}
-int calc_stack_offset(Parser* p, Type* ty, int is_param) {
+int calc_stack_offset(Parser* p, Type* ty, BOOL is_param) {
int align;
if (is_param) {
if (8 < type_sizeof(ty) || 8 < type_alignof(ty)) {
@@ -114,7 +114,7 @@ int calc_stack_offset(Parser* p, Type* ty, int is_param) {
return to_aligned(offset, align);
}
-int add_lvar(Parser* p, String* name, Type* ty, int is_param) {
+int add_lvar(Parser* p, String* name, Type* ty, BOOL is_param) {
int stack_offset = calc_stack_offset(p, ty, is_param);
p->lvars[p->n_lvars].name = *name;
p->lvars[p->n_lvars].ty = ty;
@@ -137,7 +137,7 @@ String* generate_temporary_lvar_name(Parser* p) {
AstNode* generate_temporary_lvar(Parser* p, Type* ty) {
String* name = generate_temporary_lvar_name(p);
- int stack_offset = add_lvar(p, name, ty, 0);
+ int stack_offset = add_lvar(p, name, ty, FALSE);
AstNode* lvar = ast_new(AstNodeKind_lvar);
lvar->name = *name;
lvar->node_stack_offset = stack_offset;
@@ -370,16 +370,16 @@ AstNode* parse_postfix_expr(Parser* p) {
return ret;
}
-int is_type_token(Parser* p, Token* token) {
+BOOL is_type_token(Parser* p, Token* token) {
if (token->kind == TokenKind_keyword_int || token->kind == TokenKind_keyword_short ||
token->kind == TokenKind_keyword_long || token->kind == TokenKind_keyword_char ||
token->kind == TokenKind_keyword_void || token->kind == TokenKind_keyword_enum ||
token->kind == TokenKind_keyword_struct || token->kind == TokenKind_keyword_union ||
token->kind == TokenKind_keyword_const) {
- return 1;
+ return TRUE;
}
if (token->kind != TokenKind_ident) {
- return 0;
+ return FALSE;
}
return find_typedef(p, &token->raw) != -1;
}
@@ -809,7 +809,7 @@ AstNode* parse_var_decl(Parser* p) {
if (find_lvar(p, name) != -1 || find_gvar(p, name) != -1) {
fatal_error("parse_var_decl: %.*s redeclared", name->len, name->data);
}
- int stack_offset = add_lvar(p, name, ty, 0);
+ int stack_offset = add_lvar(p, name, ty, FALSE);
AstNode* ret;
if (init) {
@@ -886,7 +886,7 @@ void register_params(Parser* p, AstNode* params) {
int i;
for (i = 0; i < params->node_len; ++i) {
AstNode* param = params->node_items + i;
- add_lvar(p, &param->name, param->ty, 1);
+ add_lvar(p, &param->name, param->ty, TRUE);
}
}
@@ -912,7 +912,7 @@ AstNode* parse_param(Parser* p) {
}
AstNode* parse_param_list(Parser* p) {
- int has_void = 0;
+ BOOL has_void = FALSE;
AstNode* list = ast_new_list(6);
while (peek_token(p)->kind != TokenKind_paren_r) {
if (peek_token(p)->kind == TokenKind_ellipsis) {
diff --git a/preprocess.c b/preprocess.c
index ea900fe..066a77f 100644
--- a/preprocess.c
+++ b/preprocess.c
@@ -268,7 +268,7 @@ struct Preprocessor {
int n_pp_tokens;
PpMacros* pp_macros;
int include_depth;
- int skip_pp_tokens;
+ BOOL skip_pp_tokens;
String* include_paths;
int n_include_paths;
};
@@ -368,7 +368,7 @@ void add_include_path(Preprocessor* pp, char* include_path) {
++pp->n_include_paths;
}
-int skip_pp_tokens(Preprocessor* pp) {
+BOOL skip_pp_tokens(Preprocessor* pp) {
// TODO: support nested #if
return pp->skip_pp_tokens;
}
@@ -629,14 +629,14 @@ Token* skip_whitespace(Token* tok) {
return tok;
}
-int string_contains_newline(String* s) {
+BOOL string_contains_newline(String* s) {
int i;
for (i = 0; i < s->len; ++i) {
if (s->data[i] == '\n') {
- return 1;
+ return TRUE;
}
}
- return 0;
+ return FALSE;
}
Token* find_next_newline(Token* tok) {
@@ -667,7 +667,7 @@ Token* process_endif_directive(Preprocessor* pp, Token* tok) {
Token* tok2 = skip_whitespace(tok + 1);
if (tok2->kind == TokenKind_ident && string_equals_cstr(&tok2->raw, "endif")) {
++tok2;
- pp->skip_pp_tokens = 0;
+ pp->skip_pp_tokens = FALSE;
remove_directive_tokens(tok, tok2);
return tok2;
}
@@ -678,7 +678,7 @@ Token* process_else_directive(Preprocessor* pp, Token* tok) {
Token* tok2 = skip_whitespace(tok + 1);
if (tok2->kind == TokenKind_keyword_else) {
++tok2;
- pp->skip_pp_tokens = 1 - pp->skip_pp_tokens;
+ pp->skip_pp_tokens = !pp->skip_pp_tokens;
remove_directive_tokens(tok, tok2);
return tok2;
}
@@ -867,10 +867,10 @@ Token* process_define_directive(Preprocessor* pp, Token* tok) {
return NULL;
}
-int expand_macro(Preprocessor* pp, Token* tok) {
+BOOL expand_macro(Preprocessor* pp, Token* tok) {
int pp_macro_idx = find_pp_macro(pp, &tok->raw);
if (pp_macro_idx == -1) {
- return 0;
+ return FALSE;
}
int i;
@@ -906,14 +906,19 @@ int expand_macro(Preprocessor* pp, Token* tok) {
} else {
unreachable();
}
- return 1;
+ return TRUE;
+}
+
+BOOL is_pp_hash(Token* t) {
+ // TODO: '#' must be at the beginning of the line.
+ return t->kind == TokenKind_hash;
}
void process_pp_directives(Preprocessor* pp) {
Token* tok = pp->pp_tokens;
while (tok->kind != TokenKind_eof) {
- if (tok->kind == TokenKind_hash) {
+ if (is_pp_hash(tok)) {
Token* next_tok;
if ((next_tok = process_endif_directive(pp, tok)) != NULL) {
@@ -944,7 +949,7 @@ void process_pp_directives(Preprocessor* pp) {
} else if (skip_pp_tokens(pp)) {
make_token_whitespace(tok);
} else if (tok->kind == TokenKind_ident) {
- int expanded = expand_macro(pp, tok);
+ BOOL expanded = expand_macro(pp, tok);
if (expanded) {
// A macro may expand to another macro. Re-scan the expanded tokens.
// TODO: if the macro is defined recursively, it causes infinite loop.
@@ -955,7 +960,7 @@ void process_pp_directives(Preprocessor* pp) {
}
}
-void pp_dump(Token* t, int include_whitespace) {
+void pp_dump(Token* t, BOOL include_whitespace) {
for (; t->kind != TokenKind_eof; ++t) {
if (t->kind == TokenKind_whitespace && !include_whitespace) {
continue;
diff --git a/std.h b/std.h
index 80eaacd..c625e18 100644
--- a/std.h
+++ b/std.h
@@ -42,3 +42,7 @@ int access(const char*, int);
typedef long ssize_t;
ssize_t readlink(const char*, char*, size_t);
char* dirname(char*);
+
+#define BOOL int
+#define TRUE 1
+#define FALSE 0