1001 lines
34 KiB
C++
1001 lines
34 KiB
C++
CORE_Static Ast_Decl *parse_decl(B32 is_global);
|
|
|
|
static Core_Message *core_add_message(Core_Message_Kind kind, String string, Token *pos1, Token *pos2 = 0, int line = -1, const char *file = 0) {
|
|
if (kind == CORE_ERROR) pctx->errors_occured += 1;
|
|
if (kind == CORE_WARNING) pctx->warnings_occured += 1;
|
|
Core_Message *message = allocate_struct(pctx->perm, Core_Message);
|
|
message->kind = kind;
|
|
message->string = string;
|
|
message->tokens[0] = pos1;
|
|
message->tokens[1] = pos2;
|
|
message->trace_line = line;
|
|
message->trace_file = (char *)file;
|
|
SLL_QUEUE_ADD(pctx->first_message, pctx->last_message, message);
|
|
return message;
|
|
}
|
|
|
|
#define log_trace(...) core_log_trace(__LINE__, __FILE__, ##__VA_ARGS__)
|
|
static void core_log_trace(int line, const char *file, const char *str, ...) {
|
|
STRING_FMT(pctx->perm, str, string);
|
|
core_add_message(CORE_TRACE, string, 0, 0, line, file);
|
|
}
|
|
|
|
#define PRINTF_GREEN "\033[32m"
|
|
#define PRINTF_RED "\033[31m"
|
|
#define PRINTF_RESET "\033[0m"
|
|
|
|
String core_stringify_message(Core_Ctx *pctx, Allocator *allocator, Core_Message *msg, int color_codes_enabled = false) {
|
|
String_Builder &b = pctx->helper_builder;
|
|
|
|
if (msg->kind == CORE_ERROR) b.addf("Error! ");
|
|
else if (msg->kind == CORE_WARNING) b.addf("Warning! ");
|
|
else if (msg->kind == CORE_TRACE) b.addf("Trace: ");
|
|
else invalid_codepath;
|
|
|
|
for (int i = 0; i < buff_cap(msg->tokens); i += 1) {
|
|
Token *it = msg->tokens[i];
|
|
if (it) {
|
|
if (it->kind == TK_Error) {
|
|
b.addf("%Q | ", it->error_val);
|
|
}
|
|
}
|
|
}
|
|
|
|
b.addf("%Q", msg->string);
|
|
|
|
for (S64 i = 0; i < buff_cap(msg->tokens); i += 1) {
|
|
Token *token = msg->tokens[i];
|
|
if (token) {
|
|
b.addf("\n");
|
|
// Print from line begin to token
|
|
S64 i1 = token->str - token->line_begin;
|
|
b.addf("%.*s", i1, token->line_begin);
|
|
|
|
// Print token part
|
|
if (color_codes_enabled) {
|
|
b.addf(PRINTF_RED "%.*s" PRINTF_RESET, (S64)token->len, token->str);
|
|
}
|
|
else {
|
|
b.addf("%.*s", (S64)token->len, token->str);
|
|
}
|
|
|
|
// Print to end of line from token
|
|
S64 iend = 0;
|
|
U8 *pointer = token->str + token->len;
|
|
while (pointer[iend] != '\n' && pointer[iend] != 0) iend++;
|
|
b.addf("%.*s", iend, pointer);
|
|
}
|
|
}
|
|
|
|
for (S64 i = 0; i < buff_cap(msg->tokens); i += 1) {
|
|
Token *it = msg->tokens[i];
|
|
if (it) {
|
|
b.addf("\n%s:%d", it->file.str, (S64)it->line + 1);
|
|
}
|
|
}
|
|
|
|
String result = string_flatten(allocator, &b);
|
|
return result;
|
|
}
|
|
|
|
static void compiler_error(Token *token1, Token *token2, const char *str, ...) {
|
|
STRING_FMT(pctx->perm, str, string);
|
|
Core_Message *msg = core_add_message(CORE_ERROR, string, token1, token2);
|
|
if (pctx->debugger_break_on_compiler_error) {
|
|
String str = core_stringify_message(pctx, pctx->perm, msg, pctx->color_codes_enabled);
|
|
printf("%s", str.str); // @! How to get rid of printf ?
|
|
fflush(stdout);
|
|
Breakpoint;
|
|
}
|
|
}
|
|
|
|
CORE_Static void
|
|
compiler_error(Token *token, const char *str, ...) {
|
|
STRING_FMT(pctx->perm, str, string);
|
|
Core_Message *msg = core_add_message(CORE_ERROR, string, token);
|
|
if (pctx->debugger_break_on_compiler_error) {
|
|
String str = core_stringify_message(pctx, pctx->perm, msg, pctx->color_codes_enabled);
|
|
printf("%s", str.str); // @! How to get rid of printf ?
|
|
fflush(stdout);
|
|
Breakpoint;
|
|
}
|
|
}
|
|
|
|
CORE_Static Token *
|
|
token_get(S64 i = 0) {
|
|
i += pctx->token_iter;
|
|
if (i >= pctx->tokens.len) {
|
|
return &pctx->null_token;
|
|
}
|
|
Token *result = &pctx->tokens[i];
|
|
return result;
|
|
}
|
|
|
|
CORE_Static Token *
|
|
token_is_scope() {
|
|
Token *token = token_get();
|
|
if (lex_is_scope(token)) return token;
|
|
return 0;
|
|
}
|
|
|
|
CORE_Static Token *
|
|
token_next() {
|
|
Token *token = token_get();
|
|
if (lex_is_scope(token)) pctx->indent = token->indent;
|
|
pctx->token_iter++;
|
|
return token;
|
|
}
|
|
|
|
CORE_Static Token *
|
|
token_is(Token_Kind kind, S64 lookahead = 0) {
|
|
Token *token = token_get(lookahead);
|
|
if (token->kind == kind) {
|
|
return token;
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
CORE_Static Token *
|
|
token_is_keyword(Intern_String keyword, S64 lookahead = 0) {
|
|
Token *token = token_get(lookahead);
|
|
if (token->kind == TK_Keyword) {
|
|
if (keyword.str == token->intern_val.str) {
|
|
return token;
|
|
}
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
CORE_Static Token *
|
|
token_match_pound(Intern_String string) {
|
|
Token *token = token_get();
|
|
if (token->kind == TK_Pound) {
|
|
if (token->intern_val == string) {
|
|
return token_next();
|
|
}
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
CORE_Static Token *
|
|
token_match(Token_Kind kind) {
|
|
Token *token = token_get();
|
|
if (token->kind == kind) {
|
|
return token_next();
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
CORE_Static Token *
|
|
token_match(Token_Kind a, Token_Kind b) {
|
|
Token *ta = token_get();
|
|
Token *tb = token_get(1);
|
|
if (ta->kind == a && tb->kind == b) {
|
|
token_next();
|
|
token_next();
|
|
return ta;
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
CORE_Static Token *
|
|
token_match_keyword(Intern_String string) {
|
|
Token *token = token_get();
|
|
if (token->kind == TK_Keyword) {
|
|
if (string.str == token->intern_val.str) {
|
|
token = token_next();
|
|
return token;
|
|
}
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
CORE_Static Token *
|
|
token_expect(Token_Kind kind) {
|
|
Token *token = token_get();
|
|
if (token->kind == kind) return token_next();
|
|
compiler_error(token, "Expected token of kind: [%s], got instead token of kind: [%s]", name(kind), name(token->kind));
|
|
return 0;
|
|
}
|
|
|
|
CORE_Static Ast_Expr *
|
|
parse_init_stmt(Ast_Expr *expr) {
|
|
Token *token = token_get();
|
|
if (token->kind == TK_ColonAssign && expr->kind != AST_IDENT)
|
|
compiler_error(expr->pos, "Binding with [:=] to something that is not an identifier");
|
|
|
|
else if (token_is_assign(token)) {
|
|
token_next();
|
|
Ast_Expr *value = parse_expr();
|
|
Ast_Expr *result = 0;
|
|
if (token->kind == TK_ColonAssign) {
|
|
Ast_Atom *name = (Ast_Atom *)expr;
|
|
result = (Ast_Expr *)ast_var(token, 0, name->intern_val, value);
|
|
set_flag(result->flags, AST_EXPR);
|
|
}
|
|
else {
|
|
result = ast_expr_binary((Ast_Atom *)expr, value, token);
|
|
}
|
|
set_flag(result->flags, AST_STMT);
|
|
return result;
|
|
}
|
|
|
|
return expr;
|
|
}
|
|
|
|
CORE_Static Ast_Call *
|
|
parse_expr_call(Ast_Expr *left, Token_Kind close_kind) {
|
|
Scratch_Scope scratch(pctx->scratch);
|
|
Token *pos = token_get();
|
|
Array<Ast_Call_Item *> exprs = {scratch.arena};
|
|
|
|
while (!token_is(close_kind)) {
|
|
Ast_Call_Item *item_comp = ast_new(Ast_Call_Item, AST_CALL_ITEM, token_get(), AST_EXPR);
|
|
item_comp->item = parse_expr();
|
|
if (token_match(TK_Assign)) {
|
|
if (!is_flag_set(item_comp->item->flags, AST_ATOM)) {
|
|
compiler_error(item_comp->pos, "Invalid value specifier, it's required to be a simple identifier/index");
|
|
}
|
|
|
|
if (item_comp->item->kind != AST_IDENT) {
|
|
item_comp->index = item_comp->item;
|
|
set_flag(item_comp->call_flags, CALL_INDEX);
|
|
}
|
|
else {
|
|
item_comp->name = (Ast_Atom *)item_comp->item;
|
|
set_flag(item_comp->call_flags, CALL_NAME);
|
|
}
|
|
|
|
item_comp->item = parse_expr();
|
|
}
|
|
|
|
if (close_kind == TK_OpenParen && is_flag_set(item_comp->call_flags, CALL_INDEX))
|
|
compiler_error(item_comp->pos, "Lambda calls can't have indexed arguments");
|
|
|
|
exprs.add(item_comp);
|
|
if (!token_match(TK_Comma)) {
|
|
break;
|
|
}
|
|
}
|
|
token_expect(close_kind);
|
|
|
|
Ast_Call *result = ast_call(pos, left, exprs);
|
|
return result;
|
|
}
|
|
|
|
CORE_Static Ast_Expr *
|
|
parse_optional_type() {
|
|
Ast_Expr *result = 0;
|
|
if (token_match(TK_Colon)) result = parse_expr();
|
|
return result;
|
|
}
|
|
|
|
CORE_Static Ast_Scope *
|
|
parse_stmt_scope(Ast_Scope *scope_defined_outside = 0) {
|
|
Ast_Scope *scope = scope_defined_outside;
|
|
|
|
if (token_expect(OPEN_SCOPE)) { // @todo: Fix error message here, it doesn't show proper token context
|
|
Token *token_block = token_get();
|
|
|
|
Scratch_Scope scratch(pctx->scratch);
|
|
if (!scope_defined_outside) scope = begin_stmt_scope(scratch.arena, token_block);
|
|
do {
|
|
Token *token = token_get();
|
|
|
|
if (token_match_keyword(pctx->keyword_return)) {
|
|
Array<Ast_Expr *> expr = {scratch.arena};
|
|
if (!token_is_scope()) {
|
|
do {
|
|
Ast_Expr *subexpr = parse_expr();
|
|
expr.add(subexpr);
|
|
} while (token_match(TK_Comma));
|
|
}
|
|
scope->stmts.add(ast_return(token, expr));
|
|
}
|
|
|
|
else if (token_match_keyword(pctx->keyword_break)) {
|
|
scope->stmts.add(ast_break(token));
|
|
}
|
|
|
|
else if (token_match_keyword(pctx->keyword_pass)) {
|
|
scope->stmts.add(ast_pass(token));
|
|
}
|
|
|
|
else if (token_match_keyword(pctx->keyword_switch)) {
|
|
Ast_Switch *result = ast_new(Ast_Switch, AST_SWITCH, token, AST_STMT);
|
|
result->value = parse_expr();
|
|
result->cases = {scratch.arena};
|
|
|
|
token_expect(OPEN_SCOPE);
|
|
do {
|
|
if (token_match_keyword(pctx->keyword_default)) {
|
|
result->default_scope = parse_stmt_scope();
|
|
continue;
|
|
}
|
|
|
|
Ast_Switch_Case *switch_case = ast_new(Ast_Switch_Case, AST_SWITCH_CASE, token_get(), AST_STMT);
|
|
if (token_match_pound(pctx->intern("fallthrough"_s)))
|
|
switch_case->fallthrough = true;
|
|
|
|
switch_case->labels = {scratch.arena};
|
|
do {
|
|
switch_case->labels.add(parse_expr());
|
|
} while (token_match(TK_Comma));
|
|
switch_case->labels = switch_case->labels.tight_copy(pctx->perm);
|
|
|
|
switch_case->scope = parse_stmt_scope();
|
|
result->cases.add(switch_case);
|
|
} while (token_match(SAME_SCOPE));
|
|
token_expect(CLOSE_SCOPE);
|
|
result->cases = result->cases.tight_copy(pctx->perm);
|
|
|
|
scope->stmts.add(result);
|
|
}
|
|
|
|
else if (token_match_keyword(pctx->keyword_assert)) {
|
|
token_expect(TK_OpenParen);
|
|
Ast_Expr *expr = parse_expr();
|
|
Intern_String message = {};
|
|
if (token_match(TK_Comma)) {
|
|
Token *t = token_expect(TK_StringLit);
|
|
message = t->intern_val;
|
|
}
|
|
token_expect(TK_CloseParen);
|
|
scope->stmts.add(ast_runtime_assert(token, expr, message));
|
|
}
|
|
|
|
else if (token_match_pound(pctx->keyword_assert)) {
|
|
token_expect(TK_OpenParen);
|
|
Ast_Expr *expr = parse_expr();
|
|
Intern_String message = {};
|
|
if (token_match(TK_Comma)) {
|
|
Token *t = token_expect(TK_StringLit);
|
|
message = t->intern_val;
|
|
}
|
|
token_expect(TK_CloseParen);
|
|
scope->stmts.add(ast_constant_assert(token, expr, message));
|
|
}
|
|
|
|
else if (token_match_keyword(pctx->keyword_for)) {
|
|
Ast_Scope *for_scope = begin_stmt_scope(scratch.arena, token_get());
|
|
Ast_Expr *init = 0;
|
|
Ast_Expr *cond = 0;
|
|
Ast_Expr *iter = 0;
|
|
|
|
if (!token_is(OPEN_SCOPE)) {
|
|
if (!token_is(TK_Comma)) {
|
|
Ast_Expr *expr_first = parse_expr();
|
|
init = parse_init_stmt(expr_first);
|
|
}
|
|
|
|
if (token_match(TK_Comma)) {
|
|
if (!token_is(TK_Comma)) cond = parse_expr();
|
|
if (token_match(TK_Comma)) {
|
|
iter = parse_expr();
|
|
iter = parse_init_stmt(iter);
|
|
}
|
|
}
|
|
}
|
|
|
|
parse_stmt_scope(for_scope);
|
|
finalize_stmt_scope(for_scope);
|
|
scope->stmts.add(ast_for(token, init, cond, iter, for_scope));
|
|
}
|
|
|
|
else if (token_match_keyword(pctx->keyword_if)) {
|
|
Array<Ast_If_Node *> if_nodes = {scratch.arena};
|
|
Ast_Expr *expr = parse_expr();
|
|
Ast_Expr *init_val = parse_init_stmt(expr);
|
|
if (init_val != expr) {
|
|
if (token_match(TK_Comma)) expr = parse_expr();
|
|
else expr = 0;
|
|
}
|
|
if (init_val == expr) init_val = 0;
|
|
|
|
Ast_Scope *if_block = parse_stmt_scope();
|
|
Ast_If_Node *if_node = ast_if_node(token, init_val, expr, if_block);
|
|
if_nodes.add(if_node);
|
|
|
|
while (token_is(SAME_SCOPE) && (token_is_keyword(pctx->keyword_elif, 1) || (token_is_keyword(pctx->keyword_else, 1)))) {
|
|
token_next();
|
|
token = token_get();
|
|
if (token_match_keyword(pctx->keyword_elif)) {
|
|
assert(token->intern_val == pctx->keyword_elif);
|
|
Ast_Expr *expr = parse_expr();
|
|
Ast_Scope *else_if_block = parse_stmt_scope();
|
|
Ast_If_Node *if_node = ast_if_node(token, 0, expr, else_if_block);
|
|
if_nodes.add(if_node);
|
|
}
|
|
else {
|
|
token_match_keyword(pctx->keyword_else);
|
|
assert(token->intern_val == pctx->keyword_else);
|
|
Ast_Scope *else_block = parse_stmt_scope();
|
|
Ast_If_Node *if_node = ast_if_node(token, 0, 0, else_block);
|
|
if_nodes.add(if_node);
|
|
break;
|
|
}
|
|
}
|
|
Ast_If *result_if = ast_if(token, if_nodes);
|
|
scope->stmts.add(result_if);
|
|
}
|
|
else if (token_is(TK_Identifier) && token_is(TK_Comma, 1)) {
|
|
Array<Ast_Decl *> decls = {scratch.arena};
|
|
do {
|
|
Token *name = token_match(TK_Identifier);
|
|
Ast_Decl *decl = ast_var(name, 0, name->intern_val, 0);
|
|
decls.add(decl);
|
|
} while (token_match(TK_Comma));
|
|
|
|
token_expect(TK_ColonAssign);
|
|
Ast_Expr *expr = parse_expr();
|
|
Ast_Var_Unpack *vars = ast_var_unpack(token, decls, expr);
|
|
scope->stmts.add(vars);
|
|
}
|
|
else {
|
|
Ast *result = parse_decl(false);
|
|
if (result && result->kind != AST_VAR && result->kind != AST_CONST) {
|
|
compiler_error(token, "Invalid statement construct");
|
|
}
|
|
if (!result) {
|
|
result = parse_expr();
|
|
result = parse_init_stmt((Ast_Expr *)result);
|
|
}
|
|
|
|
if (result) {
|
|
result->flags = set_flag(result->flags, AST_STMT);
|
|
scope->stmts.add(result);
|
|
}
|
|
else {
|
|
compiler_error(token, "Unexpected token [%s] while parsing statement", name(token->kind));
|
|
}
|
|
}
|
|
} while (token_match(SAME_SCOPE));
|
|
token_expect(CLOSE_SCOPE);
|
|
|
|
if (!scope_defined_outside) finalize_stmt_scope(scope);
|
|
}
|
|
return scope;
|
|
}
|
|
|
|
CORE_Static Ast_Lambda *
|
|
parse_lambda(Token *token) {
|
|
Scratch_Scope scratch(pctx->scratch);
|
|
|
|
Array<Ast_Decl *> params = {scratch.arena};
|
|
if (!token_is(TK_CloseParen)) {
|
|
for (;;) {
|
|
Token *name = token_get();
|
|
if (token_match(TK_Identifier)) {
|
|
token_expect(TK_Colon);
|
|
Ast_Decl *param = ast_new(Ast_Decl, AST_VAR, name, AST_DECL);
|
|
param->name = name->intern_val;
|
|
|
|
param->typespec = parse_expr();
|
|
if (token_match(TK_Assign))
|
|
param->expr = parse_expr();
|
|
|
|
params.add(param);
|
|
}
|
|
else compiler_error(name, "Expected [Identifier] or [..] when parsing lambda arguments");
|
|
|
|
if (!token_match(TK_Comma))
|
|
break;
|
|
}
|
|
}
|
|
token_expect(TK_CloseParen);
|
|
|
|
Array<Ast_Expr *> ret = {scratch.arena};
|
|
if (token_match(TK_Colon)) {
|
|
do {
|
|
Ast_Expr *typespec = parse_expr();
|
|
ret.add(typespec);
|
|
} while (token_match(TK_Comma));
|
|
}
|
|
else ret.add(ast_ident(token, pctx->intern_void));
|
|
|
|
Ast_Scope *scope = token_is(OPEN_SCOPE) ? parse_stmt_scope() : 0;
|
|
Ast_Lambda *result = ast_lambda(token, params, ret, scope);
|
|
return result;
|
|
}
|
|
|
|
//-----------------------------------------------------------------------------
|
|
// Pratt expression parser
|
|
// Based on this really good article: https://matklad.github.io/2020/04/13/simple-but-powerful-pratt-parsing.html
|
|
//-----------------------------------------------------------------------------
|
|
struct Binding_Power {
|
|
S64 left;
|
|
S64 right;
|
|
};
|
|
enum Binding { Binding_Prefix,
|
|
Binding_Infix,
|
|
Binding_Postfix };
|
|
|
|
CORE_Static Binding_Power
|
|
binding_power(Binding binding, Token_Kind kind) {
|
|
if (binding == Binding_Prefix) goto Prefix;
|
|
if (binding == Binding_Infix) goto Infix;
|
|
if (binding == Binding_Postfix) goto Postfix;
|
|
else invalid_codepath;
|
|
|
|
Prefix:
|
|
switch (kind) {
|
|
case TK_OpenBracket:
|
|
return {-2, 22};
|
|
case TK_Increment:
|
|
case TK_Decrement:
|
|
case TK_Pointer:
|
|
case TK_Dereference:
|
|
case TK_Keyword:
|
|
case TK_OpenParen:
|
|
case TK_OpenBrace:
|
|
case TK_Sub:
|
|
case TK_Add:
|
|
case TK_Neg:
|
|
case TK_Not:
|
|
return {-2, 20};
|
|
default: return {-1, -1};
|
|
}
|
|
Infix:
|
|
switch (kind) {
|
|
case TK_Or:
|
|
return {9, 10};
|
|
case TK_And:
|
|
return {11, 12};
|
|
case TK_Equals:
|
|
case TK_NotEquals:
|
|
case TK_GreaterThen:
|
|
case TK_GreaterThenOrEqual:
|
|
case TK_LesserThen:
|
|
case TK_LesserThenOrEqual:
|
|
return {13, 14};
|
|
case TK_Sub:
|
|
case TK_Add:
|
|
case TK_BitOr:
|
|
case TK_BitXor:
|
|
return {15, 16};
|
|
case TK_RightShift:
|
|
case TK_LeftShift:
|
|
case TK_BitAnd:
|
|
case TK_Mul:
|
|
case TK_Div:
|
|
case TK_Mod:
|
|
return {17, 18};
|
|
case TK_Dot:
|
|
return {31, 30};
|
|
case TK_Arrow:
|
|
return {20, 19};
|
|
default: return {};
|
|
}
|
|
Postfix:
|
|
switch (kind) {
|
|
case TK_Increment:
|
|
case TK_Decrement:
|
|
case TK_OpenBracket:
|
|
case TK_OpenParen:
|
|
case TK_OpenBrace:
|
|
return {21, -2};
|
|
default: return {-1, -1};
|
|
}
|
|
}
|
|
|
|
CORE_Static Ast_Expr *
|
|
parse_expr(S64 min_bp) {
|
|
Ast_Expr *left = 0;
|
|
Token *token = token_next();
|
|
Binding_Power prefix_bp = binding_power(Binding_Prefix, token->kind);
|
|
|
|
// @note: parse prefix expression
|
|
switch (token->kind) {
|
|
case TK_StringLit: left = ast_str(token, token->intern_val); break;
|
|
case TK_Identifier: left = ast_ident(token, token->intern_val); break;
|
|
case TK_Integer: left = ast_int(token, token->int_val); break;
|
|
case TK_UnicodeLit: left = ast_int(token, token->unicode); break;
|
|
case TK_Float: left = ast_float(token, token->f64_val); break;
|
|
case TK_Pointer: left = ast_expr_unary(token, TK_Pointer, parse_expr(prefix_bp.right)); break;
|
|
case TK_Dereference: left = ast_expr_unary(token, TK_Dereference, parse_expr(prefix_bp.right)); break;
|
|
case TK_Sub: left = ast_expr_unary(token, TK_Sub, parse_expr(prefix_bp.right)); break;
|
|
case TK_Add: left = ast_expr_unary(token, TK_Add, parse_expr(prefix_bp.right)); break;
|
|
case TK_Not: left = ast_expr_unary(token, TK_Not, parse_expr(prefix_bp.right)); break;
|
|
case TK_Neg: left = ast_expr_unary(token, TK_Neg, parse_expr(prefix_bp.right)); break;
|
|
case TK_Increment: left = ast_expr_unary(token, TK_Increment, parse_expr(prefix_bp.right)); break;
|
|
case TK_Decrement: left = ast_expr_unary(token, TK_Decrement, parse_expr(prefix_bp.right)); break;
|
|
|
|
case TK_OpenBracket: {
|
|
Ast_Expr *expr = 0;
|
|
if (!token_is(TK_CloseBracket))
|
|
expr = parse_expr(0);
|
|
|
|
Ast_Array *result = ast_array(token, expr);
|
|
token_expect(TK_CloseBracket);
|
|
result->base = parse_expr(prefix_bp.right);
|
|
left = result;
|
|
} break;
|
|
|
|
case TK_OpenBrace: {
|
|
left = parse_expr_call(0, TK_CloseBrace);
|
|
left->kind = AST_COMPOUND;
|
|
} break;
|
|
|
|
case TK_Keyword: {
|
|
if (token->intern_val == pctx->keyword_true)
|
|
left = ast_bool(token, 1);
|
|
else if (token->intern_val == pctx->keyword_false)
|
|
left = ast_bool(token, 0);
|
|
else compiler_error(token, "Unexpected keyword: [%s]", token->intern_val.str);
|
|
} break;
|
|
|
|
case TK_OpenParen: {
|
|
if (token_is(TK_CloseParen) || (token_is(TK_Identifier) && token_is(TK_Colon, 1)) || token_is(TK_ThreeDots))
|
|
left = parse_lambda(token);
|
|
else {
|
|
left = parse_expr(0);
|
|
token_expect(TK_CloseParen);
|
|
}
|
|
} break;
|
|
default: compiler_error(token, "Unexpected token of kind: [%s] in expression", name(token->kind)); return 0;
|
|
}
|
|
|
|
for (;;) {
|
|
token = token_get();
|
|
|
|
// lets say [+] is left:1, right:2 and we parse 2+3+4
|
|
// We pass min_bp of 2 to the next recursion
|
|
// in recursion we check if left(1) > min_bp(2)
|
|
// it's not so we don't recurse - we break
|
|
// We do standard do the for loop instead
|
|
|
|
Binding_Power postfix_bp = binding_power(Binding_Postfix, token->kind);
|
|
Binding_Power infix_bp = binding_power(Binding_Infix, token->kind);
|
|
|
|
// @note: parse postfix expression
|
|
if (postfix_bp.left > min_bp) {
|
|
token_next();
|
|
switch (token->kind) {
|
|
case TK_OpenBracket: {
|
|
Ast_Expr *index = parse_expr(0);
|
|
token_expect(TK_CloseBracket);
|
|
left = ast_expr_index(token, left, index);
|
|
} break;
|
|
case TK_OpenBrace: {
|
|
left = parse_expr_call(left, TK_CloseBrace);
|
|
left->kind = AST_COMPOUND;
|
|
} break;
|
|
case TK_OpenParen: {
|
|
left = parse_expr_call(left, TK_CloseParen);
|
|
} break;
|
|
default: {
|
|
assert(token->kind == TK_Increment || token->kind == TK_Decrement);
|
|
if (token->kind == TK_Increment) token->kind = TK_PostIncrement;
|
|
else if (token->kind == TK_Decrement) token->kind = TK_PostDecrement;
|
|
left = ast_expr_unary(token, token->kind, left);
|
|
}
|
|
}
|
|
}
|
|
|
|
// @note: parse infix expression
|
|
else if (infix_bp.left > min_bp) {
|
|
token = token_next();
|
|
Ast_Expr *right = parse_expr(infix_bp.right);
|
|
left = ast_expr_binary(left, right, token);
|
|
}
|
|
|
|
else break;
|
|
}
|
|
return left;
|
|
}
|
|
|
|
CORE_Static Ast_Expr *
|
|
parse_assign_expr() {
|
|
Ast_Expr *result = 0;
|
|
if (token_match(TK_Assign)) result = parse_expr();
|
|
return result;
|
|
}
|
|
|
|
CORE_Static Ast_Decl *
|
|
parse_struct(Token *pos, Ast_Kind kind) {
|
|
Scratch_Scope scratch(pctx->scratch);
|
|
|
|
token_match(OPEN_SCOPE);
|
|
Ast_Scope *scope = begin_decl_scope(scratch.arena, token_get());
|
|
do {
|
|
Token *token = token_expect(TK_Identifier);
|
|
token_expect(TK_Colon);
|
|
|
|
Ast_Expr *typespec = parse_expr();
|
|
Ast_Decl *decl = ast_var(token, typespec, token->intern_val, 0);
|
|
decl->flags = set_flag(decl->flags, AST_AGGREGATE_CHILD);
|
|
|
|
add(pctx->perm, &scope->decls, decl);
|
|
|
|
} while (token_match(SAME_SCOPE));
|
|
token_expect(CLOSE_SCOPE);
|
|
|
|
finalize_decl_scope(scope);
|
|
Ast_Decl *result = ast_struct(pos, scope, kind);
|
|
return result;
|
|
}
|
|
|
|
CORE_Static Ast_Decl *
|
|
parse_enum(Token *pos) {
|
|
Scratch_Scope scratch(pctx->scratch);
|
|
Ast_Expr *typespec = parse_optional_type();
|
|
Token *flag = token_match_pound(pctx->intern_flag);
|
|
|
|
token_match(OPEN_SCOPE);
|
|
Ast_Scope *scope = begin_decl_scope(scratch.arena, token_get());
|
|
do {
|
|
Token *name = token_expect(TK_Identifier);
|
|
Ast_Expr *value = 0;
|
|
if (token_match(TK_DoubleColon)) value = parse_expr();
|
|
Ast_Decl *member = ast_const(name, name->intern_val, value);
|
|
member->flags = set_flag(member->flags, AST_AGGREGATE_CHILD);
|
|
add(pctx->perm, &scope->decls, member);
|
|
} while (token_match(SAME_SCOPE));
|
|
finalize_decl_scope(scope);
|
|
token_expect(CLOSE_SCOPE);
|
|
|
|
Ast_Decl *result = ast_enum(pos, typespec, scope);
|
|
if (flag) set_flag(result->flags, AST_FLAG);
|
|
return result;
|
|
}
|
|
|
|
CORE_Static void
|
|
add_implicit_import(Ast_Scope *scope, Ast_Scope *to_add) {
|
|
B32 found = false;
|
|
For(scope->implicit_imports) {
|
|
if (it == to_add) {
|
|
found = true;
|
|
break;
|
|
}
|
|
}
|
|
if (!found) {
|
|
add(pctx->perm, &scope->implicit_imports, to_add);
|
|
}
|
|
}
|
|
|
|
enum { GLOBAL_IMPLICIT_LOAD = 1 };
|
|
|
|
CORE_Static Ast_File *
|
|
register_ast_file(Token *pos, String absolute_file_path, Ast_Module *module, B32 global_implicit_load) {
|
|
Ast_File *file = 0;
|
|
|
|
For(pctx->files) {
|
|
if (string_compare(it->absolute_file_path, absolute_file_path)) {
|
|
if (module == it->module) {
|
|
log_trace("%Q :: Returning registered file: %Q\n", module->absolute_file_path, absolute_file_path);
|
|
file = it;
|
|
break;
|
|
}
|
|
|
|
compiler_error(it->pos, pos, "This file is already loaded by module: %Q, try importing that module to get access to it", module->absolute_file_path);
|
|
}
|
|
}
|
|
|
|
if (!file) {
|
|
log_trace("%Q :: Registering file: %Q\n", module->absolute_file_path, absolute_file_path);
|
|
AST_NEW(File, FILE, 0, 0);
|
|
file = result;
|
|
file->absolute_file_path = absolute_file_path;
|
|
file->absolute_base_folder = string_copy(pctx->perm, string_chop_last_slash(file->absolute_file_path));
|
|
file->module = module;
|
|
file->parent_scope = module;
|
|
file->file = file; // @warning: self referential!
|
|
file->pos = pos;
|
|
file->debug_name = string_skip_to_last_slash(absolute_file_path);
|
|
add(pctx->perm, &file->module->all_loaded_files, file);
|
|
file->scope_id = pctx->scope_ids++;
|
|
add(pctx->perm, &pctx->files, file);
|
|
}
|
|
|
|
if (global_implicit_load) {
|
|
add_implicit_import(module, file);
|
|
}
|
|
|
|
return file;
|
|
}
|
|
|
|
CORE_Static Intern_String
|
|
preprocess_filename(Token *token_filename) {
|
|
Scratch_Scope _scope(pctx->scratch);
|
|
String filename = token_filename->intern_val.s;
|
|
Array<String_Replace> replace = {pctx->scratch};
|
|
replace.add({"$OS"_s, OS_NAME});
|
|
replace.add({"$os"_s, OS_NAME_LOWER});
|
|
String result0 = string_replace(pctx->scratch, pctx->scratch, filename, replace);
|
|
Intern_String result = pctx->intern(result0);
|
|
return result;
|
|
}
|
|
|
|
CORE_Static Ast_File *
|
|
parse_load(B32 global_implicit_load) {
|
|
Token *file = token_expect(TK_StringLit);
|
|
Intern_String filename = preprocess_filename(file);
|
|
String absolute_path = string_fmt(pctx->perm, "%Q/%Q", pctx->currently_parsed_file->absolute_base_folder, filename);
|
|
Ast_File *result = register_ast_file(file, absolute_path, pctx->currently_parsed_file->module, global_implicit_load);
|
|
return result;
|
|
}
|
|
|
|
CORE_Static Ast_Module *add_module(Token *pos, Intern_String filename, B32 command_line_module = false, bool string_only_module = false);
|
|
CORE_Static Ast_Module *
|
|
parse_import(B32 global_implicit_import) {
|
|
Token *file = token_expect(TK_StringLit);
|
|
Intern_String filename = preprocess_filename(file);
|
|
Ast_Module *result = add_module(file, filename);
|
|
if (global_implicit_import) {
|
|
add_implicit_import(pctx->currently_parsed_file->module, result);
|
|
}
|
|
return result;
|
|
}
|
|
|
|
/*
|
|
Needs peeking only because I didn't want to duplicate code
|
|
for parsing statements and it makes code nicer.
|
|
Statements can have named syntax i :=
|
|
*/
|
|
CORE_Static Ast_Decl *
|
|
parse_decl(B32 is_global) {
|
|
Ast_Decl *result = 0;
|
|
if (is_global) {
|
|
token_match(SAME_SCOPE);
|
|
if (pctx->indent != 0) {
|
|
compiler_error(token_get(), "Top level declarations shouldn't be indented");
|
|
}
|
|
}
|
|
|
|
Ast_Flag flags = 0;
|
|
Token *tname = token_get();
|
|
if (token_match(TK_Identifier, TK_DoubleColon)) {
|
|
|
|
if (token_match_pound(pctx->intern_foreign)) {
|
|
set_flag(flags, AST_FOREIGN);
|
|
}
|
|
else if (token_match_pound(pctx->intern_strict)) {
|
|
set_flag(flags, AST_STRICT);
|
|
}
|
|
|
|
if (token_match_keyword(pctx->keyword_struct)) {
|
|
result = parse_struct(tname, AST_STRUCT);
|
|
}
|
|
|
|
else if (token_match_keyword(pctx->keyword_union)) {
|
|
result = parse_struct(tname, AST_UNION);
|
|
}
|
|
|
|
else if (token_match_keyword(pctx->keyword_enum)) {
|
|
result = parse_enum(tname);
|
|
}
|
|
|
|
else if (token_match_pound(pctx->intern("import"_s))) {
|
|
Ast_Module *module = parse_import(false);
|
|
result = ast_namespace(tname, module, tname->intern_val);
|
|
}
|
|
|
|
else {
|
|
Ast_Expr *expr = parse_expr();
|
|
result = ast_const(tname, tname->intern_val, expr);
|
|
|
|
// @cleanup: consider simplifying lambdas, removing AST_LAMBDA_EXPR and
|
|
// implementing actual parse_lambda or something. Probably needs less
|
|
// ambigious syntax.
|
|
if (expr->kind == AST_LAMBDA_EXPR) {
|
|
auto a = (Ast_Lambda *)expr;
|
|
if (a->scope || is_flag_set(flags, AST_FOREIGN)) {
|
|
result->kind = AST_LAMBDA;
|
|
if (is_flag_set(flags, AST_FOREIGN))
|
|
set_flag(expr->flags, flags);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
else if (token_match(TK_StringLit, TK_DoubleColon)) {
|
|
|
|
// @cleanup: consider simplifying lambdas, removing AST_LAMBDA_EXPR and
|
|
// implementing actual parse_lambda or something. Probably needs less
|
|
// ambigious syntax.
|
|
Ast_Lambda *expr = (Ast_Lambda *)parse_expr();
|
|
if (expr->kind != AST_LAMBDA_EXPR) {
|
|
compiler_error(tname, "Operator overload is required to be a lambda function");
|
|
}
|
|
if (!expr->scope) {
|
|
compiler_error(tname, "Operator overload doesn't have body");
|
|
}
|
|
Ast_Operator_Info *op_info = get_operator_info(tname->intern_val);
|
|
if (!op_info) {
|
|
compiler_error(tname, "This operator cannot be overloaded");
|
|
}
|
|
|
|
if (expr->args.len == 1) {
|
|
if (!op_info->valid_unary_expr) {
|
|
compiler_error(tname, "This operator cannot have a unary expression");
|
|
}
|
|
}
|
|
else if (expr->args.len == 2) {
|
|
if (!op_info->valid_binary_expr) {
|
|
compiler_error(tname, "This operator cannot have a binary expression");
|
|
}
|
|
}
|
|
else {
|
|
compiler_error(tname, "Invalid argument count for operator overload, unhandled operator");
|
|
}
|
|
|
|
result = ast_const(tname, tname->intern_val, expr);
|
|
result->overload_op_info = op_info;
|
|
result->kind = AST_LAMBDA;
|
|
result->flags = set_flag(result->flags, AST_OPERATOR_OVERLOAD);
|
|
}
|
|
else if (token_match(TK_Identifier, TK_Colon)) {
|
|
Ast_Expr *typespec = parse_expr();
|
|
Ast_Expr *expr = parse_assign_expr();
|
|
if (token_match_pound(pctx->intern_foreign))
|
|
set_flag(flags, AST_FOREIGN);
|
|
|
|
result = ast_var(tname, typespec, tname->intern_val, expr);
|
|
}
|
|
|
|
else if (token_match(TK_Identifier, TK_ColonAssign)) {
|
|
Ast_Expr *expr = parse_expr();
|
|
result = ast_var(tname, 0, tname->intern_val, expr);
|
|
}
|
|
|
|
else if (is_global && tname->kind != TK_End) {
|
|
compiler_error(tname, "Unexpected token: [%s] when parsing a declaration", name(tname->kind));
|
|
}
|
|
|
|
if (result) {
|
|
set_flag(result->flags, flags);
|
|
result->name = tname->intern_val;
|
|
}
|
|
|
|
return result;
|
|
}
|
|
|
|
CORE_Static void
|
|
parse_file(Ast_File *file) {
|
|
assert(file);
|
|
Scratch_Scope scratch(pctx->scratch);
|
|
|
|
if (!file->filecontent.len) {
|
|
file->filecontent = os_read_file(pctx->perm, file->absolute_file_path);
|
|
if (file->filecontent.len == 0) {
|
|
compiler_error(file->pos, "Failed to open file \"%Q\"", file->absolute_file_path);
|
|
}
|
|
}
|
|
|
|
pctx->currently_parsed_file = file;
|
|
pctx->currently_parsed_scope = file;
|
|
lex_restream(pctx, file->filecontent, file->absolute_file_path);
|
|
while (token_expect(SAME_SCOPE)) {
|
|
if (token_match_pound(pctx->intern_load)) {
|
|
parse_load(true);
|
|
continue;
|
|
}
|
|
else if (token_match_pound(pctx->intern_import)) {
|
|
parse_import(true);
|
|
continue;
|
|
}
|
|
else if (token_match_pound(pctx->intern_link)) {
|
|
Token *file = token_expect(TK_StringLit);
|
|
add(pctx->perm, &pctx->files_to_link, file);
|
|
continue;
|
|
}
|
|
|
|
if (!file->pos) {
|
|
file->pos = token_get();
|
|
}
|
|
|
|
Ast_Decl *decl = parse_decl(true);
|
|
if (!decl) break;
|
|
|
|
set_flag(decl->flags, AST_GLOBAL);
|
|
if (decl->kind == AST_STRUCT || decl->kind == AST_UNION) {
|
|
decl->type = pctx->type_type;
|
|
decl->type_val = type_incomplete(decl);
|
|
decl->state = DECL_RESOLVED;
|
|
}
|
|
|
|
insert_into_scope(file, decl);
|
|
}
|
|
pctx->currently_parsed_scope = 0;
|
|
pctx->currently_parsed_file = 0;
|
|
}
|