Managing bigint memory

This commit is contained in:
Krzosa Karol
2022-06-06 16:49:41 +02:00
parent 06d6ec7525
commit dc56bd54f3
6 changed files with 165 additions and 62 deletions

View File

@@ -86,7 +86,7 @@ enum Token_Kind{
TK_Pointer = TK_Mul,
TK_Dereference = TK_BitAnd,
OPEN_SCOPE = 128,
CLOSE_SCOPE,
SAME_SCOPE,
@@ -125,6 +125,7 @@ struct Lex_Stream{
};
struct Lexer{
Allocator *arena;
Lex_Stream stream;
Array<Token> tokens;
Intern_Table interns;
@@ -205,17 +206,23 @@ token_error(Token *t, String error_val){
}
function void
lex_parse_u64(Token *t){
lex_parse_u64(Lexer *lexer, Token *t){
Scratch scratch;
Set_BigInt_Allocator(scratch);
t->kind = TK_Integer;
BigInt m = bigint_u64(1); // @leak, it accumulates and potentially needs allocation
BigInt val10 = bigint_u64(10);
BigInt result = bigint_u64(0);
for(S64 i = t->len - 1; i >= 0; --i){
BigInt val = bigint_u64(t->str[i] - '0'); // I dont think this is a leak, too small
BigInt new_val = bigint_mul(&val, &m); // @leak
bigint_add(&t->int_val, &t->int_val, &new_val);
bigint_mul(&m, &m, &val10);
result = bigint_add(&result, &new_val); // @leak
m = bigint_mul(&m, &val10); // @leak
}
t->int_val = bigint_copy(lexer->arena, &result);
}
function void
@@ -332,7 +339,10 @@ lex_unwind_indent_stack(Token *t, Lex_Stream *s, Array<Token> *array){
}
function void
lex__stream(Intern_Table *table, Array<Token> *array, Lex_Stream *s){
lex__stream(Lexer *lexer, Lex_Stream *s){
Intern_Table *table = &lexer->interns;
Array<Token> *array = &lexer->tokens;
B32 beginning = true;
for(;;){
if(lexc(s) == 0 || s->iter >= s->stream.len){
@@ -594,7 +604,7 @@ lex__stream(Intern_Table *table, Array<Token> *array, Lex_Stream *s){
}
lex_set_len(s, &t);
if(found_dot) lex_parse_f64(&t);
else lex_parse_u64(&t);
else lex_parse_u64(lexer, &t);
} break;
@@ -633,6 +643,7 @@ lex__stream(Intern_Table *table, Array<Token> *array, Lex_Stream *s){
function void
lex_init(Allocator *token_string_arena, Allocator *map_allocator, Lexer *l){
l->arena = token_string_arena;
l->tokens = array_make<Token>(token_string_arena, 1024*2);
l->interns= intern_table_make(token_string_arena, map_allocator, 1024);
}
@@ -657,7 +668,7 @@ lex_restream(Lexer *lexer, String istream, String file){
Scratch scratch;
lexer->stream.indent_stack.allocator = scratch;
lexer->stream.indent_stack.add(&token_null);
lex__stream(&lexer->interns, &lexer->tokens, &lexer->stream);
lex__stream(lexer, &lexer->stream);
}
function Lexer