Compile string, working on scopes

This commit is contained in:
Krzosa Karol
2022-05-25 15:17:08 +02:00
parent b945f3affd
commit f4c05923c9
9 changed files with 125 additions and 136 deletions

View File

@@ -1,6 +1,6 @@
#define genln(...) do{printf("\n"); gen_indent(); gen(__VA_ARGS__); }while(0)
#define gen(...) printf(__VA_ARGS__)
#define gen(...) pctx->gen.addf(__VA_ARGS__)
#define genln(...) do{gen("\n"); gen_indent(); gen(__VA_ARGS__); }while(0)
global S32 global_indent;
function void
@@ -34,7 +34,7 @@ gen_simple_decl_prefix(Ast_Resolved_Type *ast){
function void
gen_simple_decl_postfix(Ast_Resolved_Type *ast){
switch(ast->kind){
switch(ast->kind){
case TYPE_Int: break;
case TYPE_Bool: break;
case TYPE_Unsigned: break;
@@ -195,7 +195,7 @@ gen_ast(Ast *ast){
}
Ast_Begin(AST_CONST, Ast_Decl){
Sym *sym = sym_get(node->name);
Sym *sym = resolved_get(node);
if(sym->type->kind == TYPE_Lambda){
if(node->var.expr->kind == AST_LAMBDA){
@@ -246,19 +246,36 @@ gen_ast(Ast *ast){
}
}
function void
test_gen(){
TEST_PARSER();
String filename = "globals.kl"_s;
String file_content = os_read_file(scratch, filename);
lex_restream(&ctx, file_content, filename);
function String
compile_string(String filecontent, String filename = "default_name"_s){
Scratch scratch(thread_ctx.scratch);
OS_Heap heap = win32_os_heap_create(false, mib(4), 0);
Parse_Ctx ctx = {};
ctx.init(scratch, &heap);
pctx = &ctx;
lex_restream(&ctx, filecontent, filename);
Ast_Package *result = parse_file();
sym_insert_builtins();
eval_decl(result);
gen(R"==(
gen(R"==(//-------------------------------
#define NULL_POINTER 0
#define NULL_LAMBDA 0
)==");
//-------------------------------)==");
eval_decl(result);
gen_ast(result);
__debugbreak();
exp_destroy(&heap);
String string_result = string_flatten(scratch, &pctx->gen);
return string_result;
}
function String
compile_file(String filename){
Scratch scratch;
String filecontent = os_read_file(scratch, filename);
String result = compile_string(filecontent, filename);
return result;
}

View File

@@ -1,12 +1,3 @@
returning_void :: (insert: int)
return
arena_push :: (size: int): int
result := size + 10
return result
//-----------------------------------------------------------------------------
// Function types
//-----------------------------------------------------------------------------

View File

@@ -1,4 +1,15 @@
arena_push :: (size: int): int
add_10 :: (size: int): int
add_20 :: (new_size: int): int
return 20
result := size + 10
return result
return_constant :: (): int
constant :: 10
return constant
returning_void :: (insert: int)
return

View File

@@ -10,15 +10,18 @@
int main(){
test_os_memory();
thread_ctx_init();
test_gen();
test_unicode();
test_resolve();
test_types();
test_parse_expr();
map_test();
test_array();
test_string_builder();
test_intern_table();
lex_test();
String result = compile_file("lambdas.kl"_s);
printf("%s", result.str);
compile_file("globals.kl"_s);
__debugbreak();
}

View File

@@ -35,6 +35,8 @@ struct Parse_Ctx:Lexer{
Token empty_token;
S64 indent;
String_Builder gen;
Intern_String intern(String string){
return intern_string(&interns, string);
}
@@ -43,6 +45,7 @@ struct Parse_Ctx:Lexer{
perm = perm_allocator;
heap = heap_allocator;
gen = {perm};
resolved = {heap};
syms = {heap};
type_map = {heap};
@@ -203,7 +206,7 @@ function Ast_Typespec *ast_typespec_name(Token *pos, Intern_String name);
// AST Constructors beginning with expressions
//-----------------------------------------------------------------------------
#define AST_NEW(T,ikind,ipos) \
Ast_##T *result = exp_alloc_type(pctx->perm, Ast_##T); \
Ast_##T *result = exp_alloc_type(pctx->perm, Ast_##T, AF_ZeroMemory);\
result->kind = ikind; \
result->pos = ipos; \
result->id = ++pctx->unique_ids

View File

@@ -272,6 +272,7 @@ lex_parse_string(Lex_Stream *s, Token *t, U8 c){
function void
lex__stream(Intern_Table *table, Array<Token> *array, Lex_Stream *s){
while(lexc(s)){
while(lexc(s) == '\r') lex_advance(s);
Token t = {};
t.str = lexcp(s);
@@ -307,7 +308,7 @@ lex__stream(Intern_Table *table, Array<Token> *array, Lex_Stream *s){
t.kind = TK_Semicolon;
}break;
case ' ' : s->stream.str -= 1;
case '\r': case ' ' : s->stream.str -= 1;
case '\n': {
t.kind = TK_NewLine;
if(lexc(s) == '\r')

View File

@@ -1,4 +1,30 @@
function void
parsing_error(Token *token, const char *str, ...){
Scratch scratch;
STRING_FMT(scratch, str, string);
// @Note(Krzosa): Print nice error message
printf("\nError: %s", string.str);
if(token){
printf(" %s:%d\n", token->file.str, (S32)token->line + 1);
// @Note(Krzosa): Print error line
{
int i = 0;
while(token->line_begin[i]!='\n' && token->line_begin[i]!=0) i++;
printf("%.*s\n", i, token->line_begin);
// @Note(Krzosa): Print error marker
int token_i = token->str - token->line_begin;
for(int i = 0; i < token_i-2; i++) printf(" ");
printf("^^^^^^\n");
}
}
__debugbreak();
}
function Token *
token_get(S64 i = 0){
i += pctx->token_iter;
@@ -47,32 +73,6 @@ token_match_keyword(Intern_String string){
return 0;
}
function void
parsing_error(Token *token, const char *str, ...){
Scratch scratch;
STRING_FMT(scratch, str, string);
// @Note(Krzosa): Print nice error message
printf("\nError: %s", string.str);
if(token){
printf(" %s:%d\n", token->file.str, (S32)token->line + 1);
// @Note(Krzosa): Print error line
{
int i = 0;
while(token->line_begin[i]!='\n' && token->line_begin[i]!=0) i++;
printf("%.*s\n", i, token->line_begin);
// @Note(Krzosa): Print error marker
int token_i = token->str - token->line_begin;
for(int i = 0; i < token_i-2; i++) printf(" ");
printf("^^^^^^\n");
}
}
__debugbreak();
}
function Token *
token_expect(Token_Kind kind){
Token *token = token_get();
@@ -81,14 +81,17 @@ token_expect(Token_Kind kind){
return 0;
}
// @note: right now we check if on downscope there is a end of file
// not sure if this is the right approach codewise but the fact
// that end of file is treated as end of scope feels intuitive
function Token *
token_is_scope(Token_Kind scope){
assert(scope == OPEN_SCOPE || scope == CLOSE_SCOPE || scope == SAME_SCOPE);
Token *token = token_get();
if(token->kind == TK_NewLine){
if(token->kind == TK_NewLine || token->kind == TK_End){
if (scope == OPEN_SCOPE && token->indent > pctx->indent) return token;
else if(scope == CLOSE_SCOPE && token->indent < pctx->indent) return token;
else if(scope == SAME_SCOPE && token->indent == pctx->indent) return token;
else if((scope == CLOSE_SCOPE) && ((token->indent < pctx->indent) || (token->kind == TK_End))) return token;
}
return 0;
}
@@ -104,10 +107,10 @@ function Token *
token_expect_scope(Token_Kind scope){
assert(scope == OPEN_SCOPE || scope == CLOSE_SCOPE || scope == SAME_SCOPE);
Token *token = token_get();
if(token->kind == TK_NewLine){
if (scope == OPEN_SCOPE && token->indent > pctx->indent) return token;
else if(scope == CLOSE_SCOPE && token->indent < pctx->indent) return token;
else if(scope == SAME_SCOPE && token->indent == pctx->indent) return token;
if(token->kind == TK_NewLine || token->kind == TK_End){
if (scope == OPEN_SCOPE && token->indent > pctx->indent) return token_next();
else if(scope == SAME_SCOPE && token->indent == pctx->indent) return token_next();
else if((scope == CLOSE_SCOPE) && ((token->indent < pctx->indent) || (token->kind == TK_End))) return token_next();
else parsing_error(token, "Expected a scope of kind [%s]", token_kind_string(scope));
}
parsing_error(token, "Expected Scope[%s] got instead: [%s]", token_kind_string(scope).str, token_kind_string(token->kind).str);
@@ -220,15 +223,11 @@ parse_block(){
return block;
}
function Ast_Lambda *
parse_lambda(Token *token, B32 is_typespec = false){ // @Todo(Krzosa): is_typespec is not used currently
parse_lambda(Token *token, B32 is_typespec = false){
Scratch scratch;
Array<Ast_Lambda_Arg *> params = {scratch};
// @Note(Krzosa): No need to guard against "()"
// We needed to lookahead to verify it's a function
// and this routine only fires when we have situation
// with at least one parameter
if(!token_is(TK_CloseParen)){
for(;;){
Token *name = token_expect(TK_Identifier);
token_expect(TK_Colon);
@@ -240,10 +239,11 @@ parse_lambda(Token *token, B32 is_typespec = false){ // @Todo(Krzosa): is_typesp
break;
}
}
}
token_expect(TK_CloseParen);
Ast_Typespec *ret = parse_optional_type();
Ast_Block *block = parse_block();
Ast_Block *block = is_typespec ? 0 : parse_block();
Ast_Lambda *result = ast_lambda(token, params, ret, block);
return result;
}
@@ -271,7 +271,7 @@ null_denotation(Token *token){
}break;
case TK_OpenBrace: return parse_expr_compound();
case TK_OpenParen: {
if(token_match(TK_CloseParen)) return ast_expr_lambda_empty(token);
if (token_is(TK_CloseParen)) return parse_lambda(token);
else if(token_is(TK_Identifier) && token_is(TK_Colon, 1)) return parse_lambda(token);
else{
Ast_Expr *result = parse_expr();
@@ -342,32 +342,6 @@ parse_expr(S64 rbp){
return left;
}
#define TEST_PARSER() \
Scratch scratch(thread_ctx.scratch); \
Parse_Ctx ctx = {}; \
ctx.init(scratch, scratch); \
pctx = &ctx
function void
test_parse_expr(){
TEST_PARSER();
struct Test{String str;S64 val;};
// Array<Test> exprs = {scratch};
//exprs.add({"thing[1][2][3]"_s, 0});
// exprs.add({"4++++--"_s, 5});
// exprs.add({"(4+5)*2"_s, (4+5)*2});
// exprs.add({"4+5*2"_s, 4+5*2});
// exprs.add({"4*5+5"_s, 4*5+5});
// exprs.add({"4+5+5+3"_s, 4+5+5+3});
// For(exprs){
// lex_restream(&ctx, it->str, "test_expr"_s);
// Ast_Expr *result = parse_expr();
// S64 val = expr_eval(result);
// assert(val == it->val);
// }
}
//-----------------------------------------------------------------------------
// Parsing declarations
//-----------------------------------------------------------------------------

View File

@@ -70,7 +70,7 @@ scope_pop(S64 local_sym_count){
function Sym *
sym_new(Sym_Kind kind, Intern_String name, Ast_Resolved_Type *type, Ast *ast){
Sym *result = exp_alloc_type(pctx->perm, Sym);
Sym *result = exp_alloc_type(pctx->perm, Sym, AF_ZeroMemory);
result->name = name;
result->kind = kind;
result->type = type;
@@ -189,22 +189,12 @@ resolve_type_pair(Token *pos, Ast_Resolved_Type *a, Ast_Resolved_Type *b){
return result;
}
function void
eval_var(Ast_Decl *node){
Ast_Resolved_Type *type = eval_typespec(node->var.typespec);
Operand expr = node->var.expr ? eval_expr(node->var.expr, type) : Operand{};
Ast_Resolved_Type *resolved_type = resolve_type_pair(node->pos, type, expr.type);
Sym *sym = sym_new(SYM_Var, node->name, resolved_type, node);
sym_insert(sym);
}
function void eval_decl(Ast *ast);
function void
eval_stmt(Ast *ast, Ast_Resolved_Type *ret){
// @todo: need to check if all paths return a value
switch(ast->kind){
Ast_Begin(AST_RETURN, Ast_Return){
Ast_Begin(AST_RETURN, Ast_Return){ // @todo: need to check if all paths return a value
Operand op = {};
if(node->expr) op = eval_expr(node->expr);
if(!op.type && ret != type_void) parsing_error(node->pos, "Function expects a void return value but the returned value is [x]");
@@ -214,7 +204,12 @@ eval_stmt(Ast *ast, Ast_Resolved_Type *ret){
}
Ast_Begin(AST_VAR, Ast_Decl){
eval_var(node);
eval_decl(node);
Ast_End();
}
Ast_Begin(AST_CONST, Ast_Decl){
eval_decl(node);
Ast_End();
}
@@ -435,7 +430,12 @@ eval_decl(Ast *ast){
}
Ast_Begin(AST_VAR, Ast_Decl){
eval_var(node);
Ast_Resolved_Type *type = eval_typespec(node->var.typespec);
Operand expr = node->var.expr ? eval_expr(node->var.expr, type) : Operand{};
Ast_Resolved_Type *resolved_type = resolve_type_pair(node->pos, type, expr.type);
Sym *sym = sym_new(SYM_Var, node->name, resolved_type, node);
sym_insert(sym);
Ast_End();
}
@@ -458,14 +458,3 @@ eval_decl(Ast *ast){
}
}
function void
test_resolve(){
TEST_PARSER();
String filename = "globals.kl"_s;
String file_content = os_read_file(scratch, filename);
lex_restream(&ctx, file_content, filename);
Ast_Package *result = parse_file();
sym_insert_builtins();
eval_decl(result);
}

View File

@@ -66,7 +66,7 @@ global Ast_Resolved_Type *type_null = &type__null;
function Ast_Resolved_Type *
type_new(Allocator *allocator, Ast_Resolved_Type_Kind kind, SizeU size, SizeU align){
Ast_Resolved_Type *result = exp_alloc_type(allocator, Ast_Resolved_Type);
Ast_Resolved_Type *result = exp_alloc_type(allocator, Ast_Resolved_Type, AF_ZeroMemory);
result->kind = kind;
result->size = size;
result->align = align;