Generating valid c code, more work on null values

This commit is contained in:
Krzosa Karol
2022-05-24 20:33:11 +02:00
parent 236ff0cd64
commit 5176b40204
17 changed files with 698 additions and 1258 deletions

View File

@@ -1,7 +1,7 @@
enum Token_Kind{
TK_End,
TK_Mul,
TK_Div,
TK_Mod,
@@ -9,12 +9,12 @@ enum Token_Kind{
TK_RightShift,
TK_FirstMul = TK_Mul,
TK_LastMul = TK_RightShift,
TK_Add,
TK_Sub,
TK_FirstAdd = TK_Add,
TK_LastAdd = TK_Sub,
TK_Equals,
TK_LesserThenOrEqual,
TK_GreaterThenOrEqual,
@@ -23,15 +23,14 @@ enum Token_Kind{
TK_NotEquals,
TK_FirstCompare = TK_Equals,
TK_LastCompare = TK_NotEquals,
TK_BitAnd,
TK_BitOr,
TK_BitXor,
TK_And,
TK_Or,
TK_FirstLogical = TK_BitAnd,
TK_LastLogical = TK_Or,
TK_Neg,
TK_Not,
TK_OpenParen,
@@ -46,10 +45,10 @@ enum Token_Kind{
TK_ThreeDots,
TK_Semicolon,
TK_Dot,
TK_NewLine,
TK_Colon,
TK_Assign,
TK_DivAssign,
TK_MulAssign,
@@ -63,14 +62,14 @@ enum Token_Kind{
TK_RightShiftAssign,
TK_FirstAssign = TK_Assign,
TK_LastAssign = TK_RightShiftAssign,
TK_DoubleColon,
TK_At,
TK_Decrement,
TK_Increment,
TK_PostDecrement,
TK_PostIncrement,
TK_Arrow,
TK_ExprSizeof,
TK_DocComment,
@@ -82,8 +81,8 @@ enum Token_Kind{
TK_Float,
TK_Integer,
TK_Keyword,
TK_Pointer = TK_BitXor,
TK_Pointer,
TK_Dereference,
};
@@ -93,7 +92,7 @@ struct Token{
String string;
struct{U8 *str; S64 len;};
};
union {
U64 int_val;
F64 float_val;
@@ -101,7 +100,7 @@ struct Token{
Intern_String intern_val;
S64 indent;
};
String file;
S32 line;
U8 *line_begin;
@@ -110,7 +109,7 @@ struct Token{
struct Lex_Stream{
String stream;
S64 iter;
U8 *line_begin;
String file;
S32 line;
@@ -157,7 +156,7 @@ lex_is_numeric(U8 c){
return result;
}
function B32
function B32
lex_is_alphanumeric(U8 c){
B32 result = lex_is_numeric(c) || lex_is_alphabetic(c);
return result;
@@ -172,9 +171,9 @@ lex_set_len(Lex_Stream *s, Token *token){
function void
lex_set_keywords(Lexer *lexer, Array<String> keywords){
Intern_String keyword = {};
Iter(keywords){
For(keywords){
keyword = intern_string(&lexer->interns, *it);
if(it == keywords.begin())
if(it == keywords.begin())
lexer->interns.first_keyword = keyword.str;
}
lexer->interns.last_keyword = keyword.str;
@@ -193,7 +192,7 @@ token_error(Token *t, String error_val){
}
function void
lex_parse_u64(Token *t){
lex_parse_u64(Token *t){
U64 result = 0;
U64 m = 1;
for(S64 i = t->len - 1; i >= 0; --i){
@@ -224,7 +223,7 @@ lex_advance(Lex_Stream *s){
}
}
function void
function void
lex_parse_string(Lex_Stream *s, Token *t, U8 c){
for(;;){
if(lexc(s) == '\\') lex_advance(s);
@@ -241,39 +240,39 @@ lex_parse_string(Lex_Stream *s, Token *t, U8 c){
}
}
#define CASE2(op, OpName, Assign) \
case op: \
if (lexc(s) == '=') { \
lex_advance(s); \
t.kind = Assign; \
} else { \
t.kind = OpName; \
} \
break
#define CASE3(op, OpName, Assign, Incr) \
case op: \
if (lexc(s) == '=') { \
lex_advance(s); \
t.kind = Assign; \
} else if (lexc(s) == op) { \
lex_advance(s); \
t.kind = Incr; \
} else { \
t.kind = OpName; \
} \
break
#define CASE2(op, OpName, Assign) \
case op: \
if (lexc(s) == '=') { \
lex_advance(s); \
t.kind = Assign; \
} else { \
t.kind = OpName; \
} \
break
#define CASE3(op, OpName, Assign, Incr) \
case op: \
if (lexc(s) == '=') { \
lex_advance(s); \
t.kind = Assign; \
} else if (lexc(s) == op) { \
lex_advance(s); \
t.kind = Incr; \
} else { \
t.kind = OpName; \
} \
break
function void
lex__stream(Intern_Table *table, Array<Token> *array, Lex_Stream *s){
while(lexc(s)){
Token t = {};
t.str = lexcp(s);
t.file = s->file;
t.line = s->line;
t.line_begin = s->line_begin;
lex_advance(s);
switch(*t.str){
case 0 : break;
case '@': t.kind = TK_At; break;
@@ -287,8 +286,8 @@ lex__stream(Intern_Table *table, Array<Token> *array, Lex_Stream *s){
case '~': t.kind = TK_Neg; break;
case '?': t.kind = TK_Question; break;
case '#': t.kind = TK_Pound; break;
case '^': t.kind = TK_Pointer; break;
CASE2('!', TK_Not, TK_NotEquals);
CASE2('^', TK_BitXor, TK_XorAssign);
CASE2('=', TK_Assign, TK_Equals);
CASE2('*', TK_Mul, TK_MulAssign);
CASE2('%', TK_Mod, TK_ModAssign);
@@ -298,15 +297,15 @@ lex__stream(Intern_Table *table, Array<Token> *array, Lex_Stream *s){
#undef CASE2
#undef CASE3
case ';': {
t.kind = TK_Semicolon;
t.kind = TK_Semicolon;
}break;
case ' ' : s->stream.str -= 1;
case '\n': {
t.kind = TK_NewLine;
if(lexc(s) == '\r')
if(lexc(s) == '\r')
lex_advance(s);
for(;;){
if(lexc(s) == ' ') {
t.indent++;
@@ -316,7 +315,7 @@ lex__stream(Intern_Table *table, Array<Token> *array, Lex_Stream *s){
else break;
lex_advance(s);
}
}break;
case '.': {
if(lexc(s) == '.' && lexci(s,1) == '.') {
@@ -327,8 +326,8 @@ lex__stream(Intern_Table *table, Array<Token> *array, Lex_Stream *s){
t.kind = TK_Dot;
}
} break;
case '<': {
if (lexc(s) == '<') {
lex_advance(s);
@@ -348,7 +347,7 @@ lex__stream(Intern_Table *table, Array<Token> *array, Lex_Stream *s){
t.kind = TK_LesserThen;
}
} break;
case '>': {
if (lexc(s) == '>') {
lex_advance(s);
@@ -368,7 +367,7 @@ lex__stream(Intern_Table *table, Array<Token> *array, Lex_Stream *s){
t.kind = TK_GreaterThen;
}
} break;
case ':': {
if (lexc(s) == ':') {
lex_advance(s);
@@ -378,7 +377,7 @@ lex__stream(Intern_Table *table, Array<Token> *array, Lex_Stream *s){
t.kind = TK_Colon;
}
} break;
case '-':{
if (lexc(s) == '=') {
lex_advance(s);
@@ -396,11 +395,11 @@ lex__stream(Intern_Table *table, Array<Token> *array, Lex_Stream *s){
t.kind = TK_Sub;
}
} break;
case '\'':{not_implemented;} break;
case '"': {
t.kind = TK_StringLit;
t.kind = TK_StringLit;
lex_parse_string(s,&t,'"');
if(t.kind != TK_Error){
t.str += 1;
@@ -408,7 +407,7 @@ lex__stream(Intern_Table *table, Array<Token> *array, Lex_Stream *s){
}
t.intern_val = intern_string(table, t.string);
} break;
case '/': {
if(lexc(s) == '='){
t.kind = TK_DivAssign;
@@ -445,7 +444,7 @@ lex__stream(Intern_Table *table, Array<Token> *array, Lex_Stream *s){
t.kind = TK_Div;
}
} break;
case '0':case '1':case '2':case '3':case '4':
case '5':case '6':case '7':case '8':case '9':{
t.kind = TK_Integer;
@@ -454,7 +453,7 @@ lex__stream(Intern_Table *table, Array<Token> *array, Lex_Stream *s){
lex_set_len(s, &t);
lex_parse_u64(&t);
} break;
case 'A':case 'a':case 'M':case 'm':case 'B':
case 'b':case 'N':case 'n':case 'C':case 'c':case 'O':
case 'o':case 'D':case 'd':case 'P':case 'p':case 'E':
@@ -473,23 +472,23 @@ lex__stream(Intern_Table *table, Array<Token> *array, Lex_Stream *s){
t.kind = TK_Keyword;
}
} break;
default: {
token_error(&t, "Unknown token"_s);
}
}
if(t.len==0)
lex_set_len(s,&t);
B32 skip = t.kind == TK_NewLine && s->inside_brace_paren > 0;
if(!skip){
array->add(t);
}
while(lex_is_whitespace(lexc(s)))
lex_advance(s);
if(s->iter >= s->stream.len) // End of stream
break;
}
@@ -510,7 +509,12 @@ lex_make(Allocator *token_string_arena, Allocator *map_allocator){
function void
lex_restream(Lexer *lexer, String istream, String file){
lexer->stream = {istream, 0, istream.str, file, 0};
lexer->stream = {};
lexer->stream.stream = istream;
lexer->stream.line_begin = istream.str;
lexer->stream.file = file;
lexer->tokens.clear();
lexer->token_iter = 0;
lex__stream(&lexer->interns, &lexer->tokens, &lexer->stream);
@@ -529,26 +533,26 @@ lex_test(){
String test = "Keyword //R\n 18446744073709551616{})(@?&+-;....->,:::/**/\"Thing\" Thingy"
"\"Test_Meme\"+=-===42524 4294967295 18446744073709551615"
"for if while switch :="_s;
Array<String> keywords = {scratch};
keywords.add("Keyword"_s);
keywords.add("for"_s);
keywords.add("if"_s);
keywords.add("while"_s);
keywords.add("switch"_s);
Lexer lexer = lex_make(scratch, scratch);
lex_set_keywords(&lexer, keywords);
lex_restream(&lexer, test, "Test1"_s);
Array<Token> arr = lexer.tokens;
Token_Kind kind[] = {
TK_Keyword, TK_NewLine, TK_Error,TK_OpenBrace,TK_CloseBrace,TK_CloseParen,TK_OpenParen,
TK_At,TK_Question,TK_BitAnd,TK_Add,TK_Sub,TK_Semicolon,
TK_ThreeDots, TK_Dot, TK_Arrow, TK_Comma, TK_DoubleColon, TK_Colon,
TK_StringLit, TK_Identifier, TK_StringLit, TK_AddAssign, TK_SubAssign,
TK_Equals, TK_Integer, TK_Integer, TK_Integer,
TK_Keyword, TK_Keyword, TK_Keyword, TK_Keyword,
TK_Equals, TK_Integer, TK_Integer, TK_Integer,
TK_Keyword, TK_Keyword, TK_Keyword, TK_Keyword,
TK_Colon, TK_Assign, TK_End
};
String strs[] = {
@@ -562,7 +566,7 @@ lex_test(){
U64 vals[] = {
42524, 4294967295, 18446744073709551615llu
};
int ui = 0;
For(arr){
assert(it->kind == kind[i]);
@@ -576,7 +580,7 @@ lex_test(){
//-----------------------------------------------------------------------------
// Token metadata
//-----------------------------------------------------------------------------
function String
function String
token_kind_string(Token_Kind kind){
switch(kind){
case TK_End: return "End of stream"_s;
@@ -587,7 +591,7 @@ token_kind_string(Token_Kind kind){
case TK_Mod: return "%"_s;
case TK_BitAnd: return "&"_s;
case TK_BitOr: return "|"_s;
case TK_BitXor: return "^"_s;
case TK_Pointer: return "^"_s;
case TK_Neg: return "~"_s;
case TK_Not: return "!"_s;
case TK_OpenParen: return "("_s;