532 lines
10 KiB
C++
532 lines
10 KiB
C++
//
|
|
// Instructions
|
|
//
|
|
enum{
|
|
INS_END,
|
|
INS_POP,
|
|
|
|
//
|
|
// Generated using code_generating_script.py
|
|
//
|
|
INS_PUSH_S64,
|
|
INS_ADD_S64,
|
|
INS_SUB_S64,
|
|
INS_DIV_S64,
|
|
INS_MUL_S64,
|
|
INS_MOD_S64,
|
|
INS_SHR_S64,
|
|
INS_SHL_S64,
|
|
INS_BITAND_S64,
|
|
INS_BITOR_S64,
|
|
INS_BITXOR_S64,
|
|
INS_BITNOT_S64,
|
|
INS_EQ_S64,
|
|
INS_NEQ_S64,
|
|
INS_GT_S64,
|
|
INS_LT_S64,
|
|
INS_OR_S64,
|
|
INS_GTE_S64,
|
|
INS_LTE_S64,
|
|
|
|
INS_PUSH_U64,
|
|
INS_ADD_U64,
|
|
INS_SUB_U64,
|
|
INS_DIV_U64,
|
|
INS_MUL_U64,
|
|
INS_MOD_U64,
|
|
INS_SHR_U64,
|
|
INS_SHL_U64,
|
|
INS_BITAND_U64,
|
|
INS_BITOR_U64,
|
|
INS_BITXOR_U64,
|
|
INS_BITNOT_U64,
|
|
INS_EQ_U64,
|
|
INS_NEQ_U64,
|
|
INS_GT_U64,
|
|
INS_LT_U64,
|
|
INS_OR_U64,
|
|
INS_GTE_U64,
|
|
INS_LTE_U64,
|
|
|
|
INS_PUSH_F64,
|
|
INS_ADD_F64,
|
|
INS_SUB_F64,
|
|
INS_DIV_F64,
|
|
INS_MUL_F64,
|
|
INS_EQ_F64,
|
|
INS_NEQ_F64,
|
|
INS_GT_F64,
|
|
INS_LT_F64,
|
|
INS_GTE_F64,
|
|
INS_LTE_F64,
|
|
|
|
|
|
//
|
|
// **End** of generated using code_generating_script.py
|
|
//
|
|
|
|
};
|
|
|
|
//
|
|
// Bytecode interpreter context
|
|
//
|
|
struct Bc{
|
|
U8 *ins_pointer;
|
|
U8 *stack_pointer;
|
|
U8 *stack_bottom;
|
|
Arena instructions;
|
|
Arena stack;
|
|
};
|
|
#define C(type, data) ((type *)data)[0] // Cast value to type and unpack the pointer so you can write to it
|
|
#include "bytecode_interpreter_generated.cpp"
|
|
|
|
function Bc
|
|
create_bytecode_interp(){
|
|
Bc b = {};
|
|
{
|
|
arena_init(&b.instructions, "Bytecode instructions"_s);
|
|
b.instructions.alignment = 1;
|
|
|
|
// Commit
|
|
arena_push_size(&b.instructions, 16);
|
|
arena_clear(&b.instructions);
|
|
b.ins_pointer = b.instructions.memory.data;
|
|
}
|
|
|
|
{
|
|
arena_init(&b.stack, "Bytecode stack"_s);
|
|
b.stack.alignment = 8;
|
|
|
|
// Setup a 4 kilobyte stack
|
|
arena_push_size(&b.stack, kib(4));
|
|
b.stack_pointer = b.stack_bottom = b.stack.memory.data;
|
|
}
|
|
|
|
return b;
|
|
}
|
|
|
|
force_inline void
|
|
emit_pop(Bc *bc){
|
|
U8 *instruction = (U8 *)arena_push_size(&bc->instructions, sizeof(U8));
|
|
*instruction = INS_POP;
|
|
}
|
|
|
|
force_inline void
|
|
emit_end(Bc *bc){
|
|
U8 *instruction = (U8 *)arena_push_size(&bc->instructions, sizeof(U8));
|
|
*instruction = INS_END;
|
|
}
|
|
|
|
#define ins_pop_t(b, T) (*((T *)ins_pop(b)))
|
|
force_inline void *
|
|
ins_pop(Bc *b){
|
|
assert_msg(b->stack_pointer != b->stack_bottom, "Reached bottom of bytecode interpreter stack");
|
|
b->stack_pointer -= sizeof(U64);
|
|
// @warning we don't do anything with type for now
|
|
Ast_Type_Kind *type = (Ast_Type_Kind *)b->stack_pointer;
|
|
unused(type);
|
|
b->stack_pointer -= sizeof(U64);
|
|
return b->stack_pointer;
|
|
}
|
|
|
|
function void
|
|
run_bytecode_interp(Bc *b){
|
|
for(;;){
|
|
U8 instruction = *b->ins_pointer++;
|
|
switch(instruction){
|
|
|
|
case INS_POP:{
|
|
void *value = ins_pop(b);
|
|
S64 *s64 = (S64 *)value;
|
|
F64 *f64 = (F64 *)value;
|
|
U64 *u64 = (U64 *)value;
|
|
unused(s64);
|
|
unused(f64);
|
|
unused(u64);
|
|
} break;
|
|
|
|
case INS_END:{
|
|
goto interp_loop_breakout;
|
|
} break;
|
|
|
|
|
|
//
|
|
// Generated using code_generating_script.py
|
|
//
|
|
|
|
case INS_PUSH_S64:{
|
|
// Fetch value from the instruction.
|
|
// instructions are tightly packed so we
|
|
// move pointer by the type size
|
|
auto value = (S64 *)b->ins_pointer;
|
|
b->ins_pointer += sizeof(S64);
|
|
ins_push_s64(b, *value);
|
|
} break;
|
|
|
|
case INS_ADD_S64:{
|
|
S64 l = ins_pop_t(b, S64);
|
|
S64 r = ins_pop_t(b, S64);
|
|
S64 result = l + r;
|
|
ins_push_s64(b, result);
|
|
}break;
|
|
|
|
case INS_SUB_S64:{
|
|
S64 l = ins_pop_t(b, S64);
|
|
S64 r = ins_pop_t(b, S64);
|
|
S64 result = l - r;
|
|
ins_push_s64(b, result);
|
|
}break;
|
|
|
|
case INS_DIV_S64:{
|
|
S64 l = ins_pop_t(b, S64);
|
|
S64 r = ins_pop_t(b, S64);
|
|
S64 result = l / r;
|
|
ins_push_s64(b, result);
|
|
}break;
|
|
|
|
case INS_MUL_S64:{
|
|
S64 l = ins_pop_t(b, S64);
|
|
S64 r = ins_pop_t(b, S64);
|
|
S64 result = l * r;
|
|
ins_push_s64(b, result);
|
|
}break;
|
|
|
|
case INS_MOD_S64:{
|
|
S64 l = ins_pop_t(b, S64);
|
|
S64 r = ins_pop_t(b, S64);
|
|
S64 result = l % r;
|
|
ins_push_s64(b, result);
|
|
}break;
|
|
|
|
case INS_SHR_S64:{
|
|
S64 l = ins_pop_t(b, S64);
|
|
S64 r = ins_pop_t(b, S64);
|
|
S64 result = l >> r;
|
|
ins_push_s64(b, result);
|
|
}break;
|
|
|
|
case INS_SHL_S64:{
|
|
S64 l = ins_pop_t(b, S64);
|
|
S64 r = ins_pop_t(b, S64);
|
|
S64 result = l << r;
|
|
ins_push_s64(b, result);
|
|
}break;
|
|
|
|
case INS_BITAND_S64:{
|
|
S64 l = ins_pop_t(b, S64);
|
|
S64 r = ins_pop_t(b, S64);
|
|
S64 result = l & r;
|
|
ins_push_s64(b, result);
|
|
}break;
|
|
|
|
case INS_BITOR_S64:{
|
|
S64 l = ins_pop_t(b, S64);
|
|
S64 r = ins_pop_t(b, S64);
|
|
S64 result = l | r;
|
|
ins_push_s64(b, result);
|
|
}break;
|
|
|
|
case INS_BITXOR_S64:{
|
|
S64 l = ins_pop_t(b, S64);
|
|
S64 r = ins_pop_t(b, S64);
|
|
S64 result = l | r;
|
|
ins_push_s64(b, result);
|
|
}break;
|
|
|
|
case INS_BITNOT_S64:{
|
|
S64 l = ins_pop_t(b, S64);
|
|
S64 result = ~l;
|
|
ins_push_s64(b, result);
|
|
}break;
|
|
|
|
case INS_EQ_S64:{
|
|
S64 l = ins_pop_t(b, S64);
|
|
S64 r = ins_pop_t(b, S64);
|
|
S64 result = l == r;
|
|
ins_push_s64(b, result);
|
|
}break;
|
|
|
|
case INS_NEQ_S64:{
|
|
S64 l = ins_pop_t(b, S64);
|
|
S64 r = ins_pop_t(b, S64);
|
|
S64 result = l != r;
|
|
ins_push_s64(b, result);
|
|
}break;
|
|
|
|
case INS_GT_S64:{
|
|
S64 l = ins_pop_t(b, S64);
|
|
S64 r = ins_pop_t(b, S64);
|
|
S64 result = l > r;
|
|
ins_push_s64(b, result);
|
|
}break;
|
|
|
|
case INS_LT_S64:{
|
|
S64 l = ins_pop_t(b, S64);
|
|
S64 r = ins_pop_t(b, S64);
|
|
S64 result = l < r;
|
|
ins_push_s64(b, result);
|
|
}break;
|
|
|
|
case INS_OR_S64:{
|
|
S64 l = ins_pop_t(b, S64);
|
|
S64 r = ins_pop_t(b, S64);
|
|
S64 result = l || r;
|
|
ins_push_s64(b, result);
|
|
}break;
|
|
|
|
case INS_GTE_S64:{
|
|
S64 l = ins_pop_t(b, S64);
|
|
S64 r = ins_pop_t(b, S64);
|
|
S64 result = l >= r;
|
|
ins_push_s64(b, result);
|
|
}break;
|
|
|
|
case INS_LTE_S64:{
|
|
S64 l = ins_pop_t(b, S64);
|
|
S64 r = ins_pop_t(b, S64);
|
|
S64 result = l <= r;
|
|
ins_push_s64(b, result);
|
|
}break;
|
|
|
|
case INS_PUSH_U64:{
|
|
// Fetch value from the instruction.
|
|
// instructions are tightly packed so we
|
|
// move pointer by the type size
|
|
auto value = (U64 *)b->ins_pointer;
|
|
b->ins_pointer += sizeof(U64);
|
|
ins_push_u64(b, *value);
|
|
} break;
|
|
|
|
case INS_ADD_U64:{
|
|
U64 l = ins_pop_t(b, U64);
|
|
U64 r = ins_pop_t(b, U64);
|
|
U64 result = l + r;
|
|
ins_push_u64(b, result);
|
|
}break;
|
|
|
|
case INS_SUB_U64:{
|
|
U64 l = ins_pop_t(b, U64);
|
|
U64 r = ins_pop_t(b, U64);
|
|
U64 result = l - r;
|
|
ins_push_u64(b, result);
|
|
}break;
|
|
|
|
case INS_DIV_U64:{
|
|
U64 l = ins_pop_t(b, U64);
|
|
U64 r = ins_pop_t(b, U64);
|
|
U64 result = l / r;
|
|
ins_push_u64(b, result);
|
|
}break;
|
|
|
|
case INS_MUL_U64:{
|
|
U64 l = ins_pop_t(b, U64);
|
|
U64 r = ins_pop_t(b, U64);
|
|
U64 result = l * r;
|
|
ins_push_u64(b, result);
|
|
}break;
|
|
|
|
case INS_MOD_U64:{
|
|
U64 l = ins_pop_t(b, U64);
|
|
U64 r = ins_pop_t(b, U64);
|
|
U64 result = l % r;
|
|
ins_push_u64(b, result);
|
|
}break;
|
|
|
|
case INS_SHR_U64:{
|
|
U64 l = ins_pop_t(b, U64);
|
|
U64 r = ins_pop_t(b, U64);
|
|
U64 result = l >> r;
|
|
ins_push_u64(b, result);
|
|
}break;
|
|
|
|
case INS_SHL_U64:{
|
|
U64 l = ins_pop_t(b, U64);
|
|
U64 r = ins_pop_t(b, U64);
|
|
U64 result = l << r;
|
|
ins_push_u64(b, result);
|
|
}break;
|
|
|
|
case INS_BITAND_U64:{
|
|
U64 l = ins_pop_t(b, U64);
|
|
U64 r = ins_pop_t(b, U64);
|
|
U64 result = l & r;
|
|
ins_push_u64(b, result);
|
|
}break;
|
|
|
|
case INS_BITOR_U64:{
|
|
U64 l = ins_pop_t(b, U64);
|
|
U64 r = ins_pop_t(b, U64);
|
|
U64 result = l | r;
|
|
ins_push_u64(b, result);
|
|
}break;
|
|
|
|
case INS_BITXOR_U64:{
|
|
U64 l = ins_pop_t(b, U64);
|
|
U64 r = ins_pop_t(b, U64);
|
|
U64 result = l | r;
|
|
ins_push_u64(b, result);
|
|
}break;
|
|
|
|
case INS_BITNOT_U64:{
|
|
U64 l = ins_pop_t(b, U64);
|
|
U64 result = ~l;
|
|
ins_push_u64(b, result);
|
|
}break;
|
|
|
|
case INS_EQ_U64:{
|
|
U64 l = ins_pop_t(b, U64);
|
|
U64 r = ins_pop_t(b, U64);
|
|
U64 result = l == r;
|
|
ins_push_u64(b, result);
|
|
}break;
|
|
|
|
case INS_NEQ_U64:{
|
|
U64 l = ins_pop_t(b, U64);
|
|
U64 r = ins_pop_t(b, U64);
|
|
U64 result = l != r;
|
|
ins_push_u64(b, result);
|
|
}break;
|
|
|
|
case INS_GT_U64:{
|
|
U64 l = ins_pop_t(b, U64);
|
|
U64 r = ins_pop_t(b, U64);
|
|
U64 result = l > r;
|
|
ins_push_u64(b, result);
|
|
}break;
|
|
|
|
case INS_LT_U64:{
|
|
U64 l = ins_pop_t(b, U64);
|
|
U64 r = ins_pop_t(b, U64);
|
|
U64 result = l < r;
|
|
ins_push_u64(b, result);
|
|
}break;
|
|
|
|
case INS_OR_U64:{
|
|
U64 l = ins_pop_t(b, U64);
|
|
U64 r = ins_pop_t(b, U64);
|
|
U64 result = l || r;
|
|
ins_push_u64(b, result);
|
|
}break;
|
|
|
|
case INS_GTE_U64:{
|
|
U64 l = ins_pop_t(b, U64);
|
|
U64 r = ins_pop_t(b, U64);
|
|
U64 result = l >= r;
|
|
ins_push_u64(b, result);
|
|
}break;
|
|
|
|
case INS_LTE_U64:{
|
|
U64 l = ins_pop_t(b, U64);
|
|
U64 r = ins_pop_t(b, U64);
|
|
U64 result = l <= r;
|
|
ins_push_u64(b, result);
|
|
}break;
|
|
|
|
case INS_PUSH_F64:{
|
|
// Fetch value from the instruction.
|
|
// instructions are tightly packed so we
|
|
// move pointer by the type size
|
|
auto value = (F64 *)b->ins_pointer;
|
|
b->ins_pointer += sizeof(F64);
|
|
ins_push_f64(b, *value);
|
|
} break;
|
|
|
|
case INS_ADD_F64:{
|
|
F64 l = ins_pop_t(b, F64);
|
|
F64 r = ins_pop_t(b, F64);
|
|
F64 result = l + r;
|
|
ins_push_f64(b, result);
|
|
}break;
|
|
|
|
case INS_SUB_F64:{
|
|
F64 l = ins_pop_t(b, F64);
|
|
F64 r = ins_pop_t(b, F64);
|
|
F64 result = l - r;
|
|
ins_push_f64(b, result);
|
|
}break;
|
|
|
|
case INS_DIV_F64:{
|
|
F64 l = ins_pop_t(b, F64);
|
|
F64 r = ins_pop_t(b, F64);
|
|
F64 result = l / r;
|
|
ins_push_f64(b, result);
|
|
}break;
|
|
|
|
case INS_MUL_F64:{
|
|
F64 l = ins_pop_t(b, F64);
|
|
F64 r = ins_pop_t(b, F64);
|
|
F64 result = l * r;
|
|
ins_push_f64(b, result);
|
|
}break;
|
|
|
|
case INS_EQ_F64:{
|
|
F64 l = ins_pop_t(b, F64);
|
|
F64 r = ins_pop_t(b, F64);
|
|
F64 result = l == r;
|
|
ins_push_f64(b, result);
|
|
}break;
|
|
|
|
case INS_NEQ_F64:{
|
|
F64 l = ins_pop_t(b, F64);
|
|
F64 r = ins_pop_t(b, F64);
|
|
F64 result = l != r;
|
|
ins_push_f64(b, result);
|
|
}break;
|
|
|
|
case INS_GT_F64:{
|
|
F64 l = ins_pop_t(b, F64);
|
|
F64 r = ins_pop_t(b, F64);
|
|
F64 result = l > r;
|
|
ins_push_f64(b, result);
|
|
}break;
|
|
|
|
case INS_LT_F64:{
|
|
F64 l = ins_pop_t(b, F64);
|
|
F64 r = ins_pop_t(b, F64);
|
|
F64 result = l < r;
|
|
ins_push_f64(b, result);
|
|
}break;
|
|
|
|
case INS_GTE_F64:{
|
|
F64 l = ins_pop_t(b, F64);
|
|
F64 r = ins_pop_t(b, F64);
|
|
F64 result = l >= r;
|
|
ins_push_f64(b, result);
|
|
}break;
|
|
|
|
case INS_LTE_F64:{
|
|
F64 l = ins_pop_t(b, F64);
|
|
F64 r = ins_pop_t(b, F64);
|
|
F64 result = l <= r;
|
|
ins_push_f64(b, result);
|
|
}break;
|
|
|
|
//
|
|
// **End** of generated using code_generating_script.py
|
|
//
|
|
|
|
|
|
default: invalid_codepath;
|
|
}
|
|
}
|
|
interp_loop_breakout:;
|
|
}
|
|
|
|
function void
|
|
test_interpreter(){
|
|
Bc b = create_bytecode_interp();
|
|
|
|
emit_push_f64(&b, 64);
|
|
emit_push_f64(&b, 32);
|
|
emit_neq_f64(&b);
|
|
emit_pop(&b);
|
|
|
|
|
|
emit_push_f64(&b, 64);
|
|
emit_push_f64(&b, 32);
|
|
emit_add_f64(&b);
|
|
emit_pop(&b);
|
|
|
|
emit_end(&b);
|
|
run_bytecode_interp(&b);
|
|
} |