From 51daf5556c49225ae75b99fd4a8c4530e503f368 Mon Sep 17 00:00:00 2001 From: Dmitry Stogov Date: Tue, 19 Apr 2022 23:42:05 +0300 Subject: [PATCH] Initial support for ALLOCA, LOAD and STORE (incomplete) --- TODO | 2 +- ir.c | 2 +- ir.h | 2 +- ir_emit_c.c | 4 +- ir_x86.dasc | 153 ++++++++++++++++++++++++++++++++++++++++++++++++---- 5 files changed, 150 insertions(+), 13 deletions(-) diff --git a/TODO b/TODO index 467dfda..9375d52 100644 --- a/TODO +++ b/TODO @@ -60,7 +60,7 @@ - POW, NEG (fp), ABS, OVERFLOW, MIN, MAX, COND + TAILCALL + VLOAD, VSTORE - - ALLOCA, LOAD, STORE + ? ALLOCA, LOAD, STORE + SWITCH - ir_last_use - binop_int $imm, mem diff --git a/ir.c b/ir.c index 773f6e6..6f7a881 100644 --- a/ir.c +++ b/ir.c @@ -93,7 +93,7 @@ void ir_print_const(ir_ctx *ctx, ir_insn *insn, FILE *f) #define ir_op_flag_s2 (ir_op_flag_s | 2 | (2 << IR_OP_FLAG_OPERANS_SHIFT)) #define ir_op_flag_s3 (ir_op_flag_s | 3 | (3 << IR_OP_FLAG_OPERANS_SHIFT)) #define ir_op_flag_xN (IR_OP_FLAG_CONTROL|IR_OP_FLAG_MEM|IR_OP_FLAG_MEM_CALL | 4 | (4 << IR_OP_FLAG_OPERANS_SHIFT)) -#define ir_op_flag_a1X1 (IR_OP_FLAG_CONTROL|IR_OP_FLAG_MEM|IR_OP_FLAG_MEM_ALLOC | 1 | (2 << IR_OP_FLAG_OPERANS_SHIFT)) +#define ir_op_flag_a2 (IR_OP_FLAG_CONTROL|IR_OP_FLAG_MEM|IR_OP_FLAG_MEM_ALLOC | 2 | (2 << IR_OP_FLAG_OPERANS_SHIFT)) #define ir_op_kind____ IR_OPND_UNUSED #define ir_op_kind_def IR_OPND_DATA diff --git a/ir.h b/ir.h index 00f0dd0..b3f26bb 100644 --- a/ir.h +++ b/ir.h @@ -245,7 +245,7 @@ int ir_mem_flush(void *ptr, size_t size); _(TAILCALL, xN, src, def, def) /* CALL+RETURN */ \ \ /* memory reference and load/store ops */ \ - _(ALLOCA, a1X1, src, num, ___) /* alloca(num) */ \ + _(ALLOCA, a2, src, def, ___) /* alloca(def) */ \ _(VLOAD, l2, src, var, ___) /* load value of local var */ \ _(VSTORE, s3, src, var, def) /* store value to local var */ \ _(LOAD, l2, src, ref, ___) /* load from memory */ \ diff --git a/ir_emit_c.c b/ir_emit_c.c index 131dc3e..5e63f8a 100644 --- a/ir_emit_c.c +++ b/ir_emit_c.c @@ -400,7 +400,9 @@ static void ir_emit_tailcall(ir_ctx *ctx, FILE *f, ir_insn *insn) static void ir_emit_alloca(ir_ctx *ctx, FILE *f, ir_ref def, ir_insn *insn) { ir_emit_def_ref(ctx, f, def); - fprintf(f, "alloca(%d);\n", insn->op2); + fprintf(f, "alloca("); + ir_emit_ref(ctx, f, insn->op2); + fprintf(f, ");\n"); } static void ir_emit_vstore(ir_ctx *ctx, FILE *f, ir_insn *insn) diff --git a/ir_x86.dasc b/ir_x86.dasc index 32417b6..01c2ed7 100644 --- a/ir_x86.dasc +++ b/ir_x86.dasc @@ -745,6 +745,10 @@ typedef enum _ir_rule { IR_VLOAD_FP, IR_VSTORE_INT, IR_VSTORE_FP, + IR_LOAD_INT, + IR_LOAD_FP, + IR_STORE_INT, + IR_STORE_FP, IR_IF_INT, // op1=reg(GP)|mem IR_RETURN_VOID, // IR_RETURN_INT, // op1=reg(GP, hint=%rax)|mem|imm @@ -1154,8 +1158,16 @@ static uint32_t ir_match_insn(ir_ctx *ctx, ir_ref ref, ir_block *bb) ctx->flags |= IR_HAS_CALLS; return IR_CALL; -// case IR_TAILCALL: -// case IR_ALLOCA: + case IR_VAR: + if (ctx->use_lists[ref].count > 0) { + return IR_VAR; + } else { + return IR_SKIP; + } + break; + case IR_ALLOCA: + ctx->flags |= IR_USE_FRAME_POINTER; + return IR_ALLOCA; case IR_VLOAD: if (IR_IS_TYPE_INT(insn->type)) { return IR_VLOAD_INT; @@ -1170,14 +1182,18 @@ static uint32_t ir_match_insn(ir_ctx *ctx, ir_ref ref, ir_block *bb) return IR_VSTORE_FP; } break; -// case IR_LOAD: -// case IR_STORE: - - case IR_VAR: - if (ctx->use_lists[ref].count > 0) { - return IR_VAR; + case IR_LOAD: + if (IR_IS_TYPE_INT(ctx->ir_base[insn->op3].type)) { + return IR_LOAD_INT; } else { - return IR_SKIP; + return IR_LOAD_FP; + } + break; + case IR_STORE: + if (IR_IS_TYPE_INT(ctx->ir_base[insn->op3].type)) { + return IR_STORE_INT; + } else { + return IR_STORE_FP; } break; case IR_START: @@ -2695,6 +2711,110 @@ static void ir_emit_vstore_fp(ir_ctx *ctx, ir_insn *insn) | ASM_FP_VREG_REG_MOV type, ctx->vregs[insn->op2], reg } +static void ir_emit_load_int(ir_ctx *ctx, ir_ref def, ir_insn *insn) +{ + ir_backend_data *data = ctx->data; + dasm_State **Dst = &data->dasm_state; + ir_ref type = insn->type; + ir_reg op2_reg = ir_ref_reg(ctx, insn->op2); + ir_reg def_reg = ir_ref_reg(ctx, def); + ir_reg reg; + + if (def_reg >= 0) { + reg = def_reg; + } else { + reg = IR_REG_RAX; // TODO: temporary register + } + if (op2_reg < 0) { + op2_reg = IR_REG_RAX; // TODO: temporary register + ir_emit_load(ctx, type, insn->op2, op2_reg); + } + | ASM_REG_MEM_OP mov, type, reg, [Ra(op2_reg)] + if (def_reg != reg) { + ir_emit_store(ctx, type, reg, def); + } +} + +static void ir_emit_load_fp(ir_ctx *ctx, ir_ref def, ir_insn *insn) +{ + ir_backend_data *data = ctx->data; + dasm_State **Dst = &data->dasm_state; + ir_ref type = insn->type; + ir_reg op2_reg = ir_ref_reg(ctx, insn->op2); + ir_reg def_reg = ir_ref_reg(ctx, def); + ir_reg reg; + + if (def_reg >= 0) { + reg = def_reg; + } else { + reg = IR_REG_XMM7; // TODO: temporary register + } + if (op2_reg < 0) { + op2_reg = IR_REG_RAX; // TODO: temporary register + ir_emit_load(ctx, type, insn->op2, op2_reg); + } + | ASM_FP_REG_MEM_OP movss, movsd, vmovss, vmovsd, type, reg, [Ra(op2_reg)] + if (def_reg != reg) { + ir_emit_fp_store(ctx, type, reg, def); + } +} + +static void ir_emit_store_int(ir_ctx *ctx, ir_insn *insn) +{ + ir_backend_data *data = ctx->data; + dasm_State **Dst = &data->dasm_state; + ir_ref type = ctx->ir_base[insn->op3].type; + ir_reg op2_reg = ir_ref_reg(ctx, insn->op2); + ir_reg op3_reg = ir_ref_reg(ctx, insn->op3); + ir_reg reg; + + if (op3_reg >= 0) { + reg = op3_reg; + } else { + reg = IR_REG_RAX; // TODO: temporary register + ir_emit_load(ctx, type, insn->op3, reg); + } + if (op2_reg < 0) { + op2_reg = IR_REG_RAX; // TODO: temporary register + ir_emit_load(ctx, type, insn->op2, op2_reg); + } + | ASM_MEM_REG_OP mov, type, [Ra(op2_reg)], reg +} + +static void ir_emit_store_fp(ir_ctx *ctx, ir_insn *insn) +{ + ir_backend_data *data = ctx->data; + dasm_State **Dst = &data->dasm_state; + ir_ref type = ctx->ir_base[insn->op3].type; + ir_reg op2_reg = ir_ref_reg(ctx, insn->op2); + ir_reg op3_reg = ir_ref_reg(ctx, insn->op3); + ir_reg reg; + + if (op3_reg >= 0) { + reg = op3_reg; + } else { + reg = IR_REG_XMM7; // TODO: temporary register + ir_emit_fp_load(ctx, type, insn->op3, reg); + } + if (op2_reg < 0) { + op2_reg = IR_REG_RAX; // TODO: temporary register + ir_emit_load(ctx, type, insn->op2, op2_reg); + } + IR_ASSERT(ctx->vregs[insn->op2]); + | ASM_FP_MEM_REG_OP movss, movsd, vmovss, vmovsd, type, [Ra(op2_reg)], reg +} + +static void ir_emit_alloca(ir_ctx *ctx, ir_ref def, ir_insn *insn) +{ + ir_backend_data *data = ctx->data; + dasm_State **Dst = &data->dasm_state; + + // TODO: alignment + | ASM_REG_REF_OP sub, IR_ADDR, IR_REG_RSP, insn->op2 + ir_emit_store(ctx, IR_ADDR, IR_REG_RSP, def); + // TODO: stack frame alignment +} + static void ir_emit_switch(ir_ctx *ctx, ir_ref def, ir_insn *insn) { ir_backend_data *data = ctx->data; @@ -3946,6 +4066,21 @@ void *ir_emit(ir_ctx *ctx, size_t *size) case IR_VSTORE_FP: ir_emit_vstore_fp(ctx, insn); break; + case IR_LOAD_INT: + ir_emit_load_int(ctx, i, insn); + break; + case IR_LOAD_FP: + ir_emit_load_fp(ctx, i, insn); + break; + case IR_STORE_INT: + ir_emit_store_int(ctx, insn); + break; + case IR_STORE_FP: + ir_emit_store_fp(ctx, insn); + break; + case IR_ALLOCA: + ir_emit_alloca(ctx, i, insn); + break; default: IR_ASSERT(0 && "NIY rule/insruction"); break;