aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authornsfisis <nsfisis@gmail.com>2025-08-17 08:42:44 +0900
committernsfisis <nsfisis@gmail.com>2025-08-17 08:42:44 +0900
commit41705febbd65aff0e2ef5967f7a9d7c65a0de339 (patch)
tree89a91615f4b106269d37af67711cf5bf81e57fad
parent89a723320a8b7f387948f3edd9c1ce12824424d6 (diff)
downloadducc-41705febbd65aff0e2ef5967f7a9d7c65a0de339.tar.gz
ducc-41705febbd65aff0e2ef5967f7a9d7c65a0de339.tar.zst
ducc-41705febbd65aff0e2ef5967f7a9d7c65a0de339.zip
refactor: duplicate source text in tokenization phase
-rw-r--r--preprocess.c14
1 files changed, 7 insertions, 7 deletions
diff --git a/preprocess.c b/preprocess.c
index a2c1429..5dfb0d5 100644
--- a/preprocess.c
+++ b/preprocess.c
@@ -637,7 +637,7 @@ void pplexer_tokenize_all(PpLexer* ppl) {
}
++ppl->pos;
tok->kind = TokenKind_header_name;
- tok->value.string.data = ppl->src + start;
+ tok->value.string.data = strndup(ppl->src + start, ppl->pos - start);
tok->value.string.len = ppl->pos - start;
ppl->expect_header_name = FALSE;
} else if (ppl->expect_header_name && c == '<') {
@@ -651,7 +651,7 @@ void pplexer_tokenize_all(PpLexer* ppl) {
}
++ppl->pos;
tok->kind = TokenKind_header_name;
- tok->value.string.data = ppl->src + start;
+ tok->value.string.data = strndup(ppl->src + start, ppl->pos - start);
tok->value.string.len = ppl->pos - start;
ppl->expect_header_name = FALSE;
} else if (c == '(') {
@@ -777,7 +777,7 @@ void pplexer_tokenize_all(PpLexer* ppl) {
} else {
tok->kind = TokenKind_other;
tok->value.string.len = 2;
- tok->value.string.data = ppl->src + ppl->pos - tok->value.string.len;
+ tok->value.string.data = strndup(ppl->src + ppl->pos - tok->value.string.len, tok->value.string.len);
}
} else {
tok->kind = TokenKind_dot;
@@ -840,7 +840,7 @@ void pplexer_tokenize_all(PpLexer* ppl) {
}
ppl->pos += 2;
tok->kind = TokenKind_character_constant;
- tok->value.string.data = ppl->src + start;
+ tok->value.string.data = strndup(ppl->src + start, ppl->pos - start);
tok->value.string.len = ppl->pos - start;
} else if (c == '"') {
int start = ppl->pos - 1;
@@ -855,7 +855,7 @@ void pplexer_tokenize_all(PpLexer* ppl) {
}
++ppl->pos;
tok->kind = TokenKind_literal_str;
- tok->value.string.data = ppl->src + start + 1;
+ tok->value.string.data = strndup(ppl->src + start + 1, ppl->pos - start - 2);
tok->value.string.len = ppl->pos - start - 2;
} else if (isdigit(c)) {
--ppl->pos;
@@ -874,7 +874,7 @@ void pplexer_tokenize_all(PpLexer* ppl) {
while (isalnum(ppl->src[ppl->pos]) || ppl->src[ppl->pos] == '_') {
++ppl->pos;
}
- tok->value.string.data = ppl->src + start;
+ tok->value.string.data = strndup(ppl->src + start, ppl->pos - start);
tok->value.string.len = ppl->pos - start;
tok->kind = TokenKind_ident;
} else if (c == '\n' || c == '\r') {
@@ -897,7 +897,7 @@ void pplexer_tokenize_all(PpLexer* ppl) {
} else {
tok->kind = TokenKind_other;
tok->value.string.len = 1;
- tok->value.string.data = ppl->src + ppl->pos - tok->value.string.len;
+ tok->value.string.data = strndup(ppl->src + ppl->pos - tok->value.string.len, tok->value.string.len);
}
ppl->at_bol = tok->kind == TokenKind_newline;
}