Generating valid c code, more work on null values

This commit is contained in:
Krzosa Karol
2022-05-24 20:33:11 +02:00
parent 236ff0cd64
commit 5176b40204
17 changed files with 698 additions and 1258 deletions

84
lex.c
View File

@@ -27,7 +27,7 @@ function void
init_default_keywords(Intern_Table *t){
keyword_if = intern_string(t, lit("if"));
first_keyword = keyword_if.s.str;
keyword_cast = intern_string(t, lit("cast"));
keyword_for = intern_string(t, lit("for"));
keyword_else = intern_string(t, lit("else"));
@@ -44,16 +44,16 @@ init_default_keywords(Intern_Table *t){
keyword_case = intern_string(t, lit("case"));
keyword_struct = intern_string(t, lit("struct"));
keyword_enum = intern_string(t, lit("enum"));
keyword_union = intern_string(t, lit("union"));
last_keyword = keyword_union.s.str;
intern_char = intern_string(t, lit("char"));
intern_void = intern_string(t, lit("void"));
intern_int = intern_string(t, lit("int"));
}
function B32
function B32
lex_is_keyword(Intern_String str){
B32 result = str.s.str >= first_keyword && str.s.str <= last_keyword;
return result;
@@ -61,7 +61,7 @@ lex_is_keyword(Intern_String str){
typedef enum Token_Kind{
TK_End,
TK_Mul,
TK_Div,
TK_Mod,
@@ -69,12 +69,12 @@ typedef enum Token_Kind{
TK_RightShift,
TK_FirstMul = TK_Mul,
TK_LastMul = TK_RightShift,
TK_Add,
TK_Sub,
TK_FirstAdd = TK_Add,
TK_LastAdd = TK_Sub,
TK_Equals,
TK_LesserThenOrEqual,
TK_GreaterThenOrEqual,
@@ -83,15 +83,15 @@ typedef enum Token_Kind{
TK_NotEquals,
TK_FirstCompare = TK_Equals,
TK_LastCompare = TK_NotEquals,
TK_BitAnd,
TK_BitOr,
TK_BitXor,
TK_Pointer,
TK_And,
TK_Or,
TK_FirstLogical = TK_BitAnd,
TK_LastLogical = TK_Or,
TK_Neg,
TK_Not,
TK_OpenParen,
@@ -106,9 +106,9 @@ typedef enum Token_Kind{
TK_ThreeDots,
TK_Semicolon,
TK_Dot,
TK_Colon,
TK_Assign,
TK_ColonAssign,
TK_DivAssign,
@@ -123,14 +123,14 @@ typedef enum Token_Kind{
TK_RightShiftAssign,
TK_FirstAssign = TK_Assign,
TK_LastAssign = TK_RightShiftAssign,
TK_DoubleColon,
TK_At,
TK_Decrement,
TK_Increment,
TK_PostDecrement,
TK_PostIncrement,
TK_Arrow,
TK_ExprSizeof,
TK_DocComment,
@@ -150,14 +150,14 @@ typedef struct Token{
String string;
struct{U8 *str; S64 len;};
};
union {
U64 int_val;
F64 float_val;
String error_val;
Intern_String intern_val;
};
String file;
S32 line;
U8 *line_begin;
@@ -167,7 +167,7 @@ typedef struct Token{
typedef struct Lex_Stream{
String stream;
S64 iter;
U8 *line_begin;
String file;
S32 line;
@@ -207,7 +207,7 @@ lex_is_numeric(U8 c){
return result;
}
function B32
function B32
lex_is_alphanumeric(U8 c){
B32 result = lex_is_numeric(c) || lex_is_alphabetic(c);
return result;
@@ -226,7 +226,7 @@ token_error(Token *t, String error_val){
}
function void
lex_parse_u64(Token *t){
lex_parse_u64(Token *t){
U64 result = 0;
U64 m = 1;
for(S64 i = t->len - 1; i >= 0; --i){
@@ -257,7 +257,7 @@ lex_advance(Lex_Stream *s){
}
}
function void
function void
lex_parse_string(Lex_Stream *s, Token *t, U8 c){
for(;;){
if(lexc(s) == '\\') lex_advance(s);
@@ -301,14 +301,14 @@ lex__stream(Token_Array *array, Lex_Stream *s){
while(lexc(s)){
while(lex_is_whitespace(lexc(s)))
lex_advance(s);
Token t = {0};
t.str = lexcp(s);
t.file = s->file;
t.line = s->line;
t.line_begin = s->line_begin;
lex_advance(s);
switch(*t.str){
case 0: break;
case '@': t.kind = TK_At; break;
@@ -324,7 +324,7 @@ lex__stream(Token_Array *array, Lex_Stream *s){
case ';': t.kind = TK_Semicolon; break;
case '#': t.kind = TK_Pound; break;
CASE2('!', TK_Not, TK_NotEquals);
CASE2('^', TK_BitXor, TK_XorAssign);
CASE2('^', TK_Pointer, TK_XorAssign);
CASE2('=', TK_Assign, TK_Equals);
CASE2('*', TK_Mul, TK_MulAssign);
CASE2('%', TK_Mod, TK_ModAssign);
@@ -342,8 +342,8 @@ lex__stream(Token_Array *array, Lex_Stream *s){
t.kind = TK_Dot;
}
} break;
case '<': {
if (lexc(s) == '<') {
lex_advance(s);
@@ -363,7 +363,7 @@ lex__stream(Token_Array *array, Lex_Stream *s){
t.kind = TK_LesserThen;
}
} break;
case '>': {
if (lexc(s) == '>') {
lex_advance(s);
@@ -383,7 +383,7 @@ lex__stream(Token_Array *array, Lex_Stream *s){
t.kind = TK_GreaterThen;
}
} break;
case ':': {
if (lexc(s) == ':') {
lex_advance(s);
@@ -397,7 +397,7 @@ lex__stream(Token_Array *array, Lex_Stream *s){
t.kind = TK_Colon;
}
} break;
case '-':{
if (lexc(s) == '=') {
lex_advance(s);
@@ -415,11 +415,11 @@ lex__stream(Token_Array *array, Lex_Stream *s){
t.kind = TK_Sub;
}
} break;
case '\'':{not_implemented;} break;
case '"': {
t.kind = TK_StringLit;
t.kind = TK_StringLit;
lex_parse_string(s,&t,'"');
if(t.kind != TK_Error){
t.str += 1;
@@ -427,7 +427,7 @@ lex__stream(Token_Array *array, Lex_Stream *s){
}
t.intern_val = intern_string(&array->interns, t.string);
} break;
case '/': {
if(lexc(s) == '='){
t.kind = TK_DivAssign;
@@ -464,7 +464,7 @@ lex__stream(Token_Array *array, Lex_Stream *s){
t.kind = TK_Div;
}
} break;
case '0':case '1':case '2':case '3':case '4':
case '5':case '6':case '7':case '8':case '9':{
t.kind = TK_Integer;
@@ -473,7 +473,7 @@ lex__stream(Token_Array *array, Lex_Stream *s){
lex_set_len(s, &t);
lex_parse_u64(&t);
} break;
case 'A':case 'a':case 'M':case 'm':case 'B':
case 'b':case 'N':case 'n':case 'C':case 'c':case 'O':
case 'o':case 'D':case 'd':case 'P':case 'p':case 'E':
@@ -492,15 +492,15 @@ lex__stream(Token_Array *array, Lex_Stream *s){
t.kind = TK_Keyword;
}
} break;
default: {
token_error(&t, lit("Unknown token"));
}
}
if(t.len==0)
lex_set_len(s,&t);
token_array_push(array, &t);
}
}
@@ -538,13 +538,13 @@ lex_test(){
"\"Test_Meme\"+=-===42524 4294967295 18446744073709551615"
"for if while switch :=");
Token_Array array = lex_stream(scratch, test, lit("Test1"));
Token_Kind kind[] = {
TK_Error,TK_OpenBrace,TK_CloseBrace,TK_CloseParen,TK_OpenParen,
TK_At,TK_Question,TK_BitAnd,TK_Add,TK_Sub,TK_Semicolon,
TK_ThreeDots, TK_Dot, TK_Arrow, TK_Comma, TK_DoubleColon, TK_Colon,
TK_StringLit, TK_Identifier, TK_StringLit, TK_AddAssign, TK_SubAssign,
TK_Equals, TK_Integer, TK_Integer, TK_Integer, TK_Keyword, TK_Keyword,
TK_Equals, TK_Integer, TK_Integer, TK_Integer, TK_Keyword, TK_Keyword,
TK_Keyword, TK_Keyword, TK_ColonAssign, TK_End
};
String strs[] = {
@@ -558,7 +558,7 @@ lex_test(){
U64 vals[] = {
42524, 4294967295, 18446744073709551615llu
};
int i = 0;
int ui = 0;
for(Token *t = token_array_iter_begin(&array); t->kind != TK_End; t = token_array_iter_next(&array)){
@@ -569,7 +569,7 @@ lex_test(){
}
}
arena_end_scratch();
}
//-----------------------------------------------------------------------------
@@ -584,7 +584,7 @@ global const char *token_kind_string[] = {
[TK_Mod] = "%",
[TK_BitAnd] = "&",
[TK_BitOr] = "|",
[TK_BitXor] = "^",
[TK_Pointer] = "^",
[TK_Neg] = "~",
[TK_Not] = "!",
[TK_OpenParen] = "(",