Allow reservation stack for passing arguments

This commit is contained in:
Dmitry Stogov 2023-02-17 15:52:26 +03:00
parent 2bf6334f69
commit c71076d3f0
5 changed files with 58 additions and 18 deletions

1
ir.c
View File

@ -311,6 +311,7 @@ void ir_init(ir_ctx *ctx, ir_ref consts_limit, ir_ref insns_limit)
ctx->spill_base = -1; ctx->spill_base = -1;
ctx->fixed_stack_red_zone = 0; ctx->fixed_stack_red_zone = 0;
ctx->fixed_stack_frame_size = -1; ctx->fixed_stack_frame_size = -1;
ctx->fixed_call_stack_size = 0;
ctx->fixed_regset = 0; ctx->fixed_regset = 0;
ctx->fixed_save_regset = 0; ctx->fixed_save_regset = 0;
ctx->live_intervals = NULL; ctx->live_intervals = NULL;

9
ir.h
View File

@ -501,10 +501,11 @@ struct _ir_ctx {
uint32_t *vregs; uint32_t *vregs;
ir_ref vregs_count; ir_ref vregs_count;
int32_t spill_base; /* base register for special spill area (e.g. PHP VM frame pointer) */ int32_t spill_base; /* base register for special spill area (e.g. PHP VM frame pointer) */
int32_t fixed_stack_red_zone; uint64_t fixed_regset; /* fixed registers, excluded for regular register allocation */
int32_t fixed_stack_frame_size; int32_t fixed_stack_red_zone; /* reusable stack allocated by caller (default 0) */
uint64_t fixed_regset; int32_t fixed_stack_frame_size; /* fixed stack allocated by generated code for spills and registers save/restore */
uint64_t fixed_save_regset; int32_t fixed_call_stack_size; /* fixed preallocated stack for parameter passing (default 0) */
uint64_t fixed_save_regset; /* registers that always saved/restored in prologue/epilugue */
ir_live_interval **live_intervals; ir_live_interval **live_intervals;
ir_regs *regs; ir_regs *regs;
ir_ref *prev_ref; ir_ref *prev_ref;

View File

@ -1223,8 +1223,12 @@ static void ir_emit_prologue(ir_ctx *ctx)
| sub sp, sp, #(data->call_stack_size) | sub sp, sp, #(data->call_stack_size)
} }
} else if (data->ra_data.stack_frame_size + data->call_stack_size) { } else if (data->ra_data.stack_frame_size + data->call_stack_size) {
if (ctx->fixed_stack_red_zone) {
IR_ASSERT(data->ra_data.stack_frame_size + data->call_stack_size <= ctx->fixed_stack_red_zone);
} else {
| sub sp, sp, #(data->ra_data.stack_frame_size + data->call_stack_size) | sub sp, sp, #(data->ra_data.stack_frame_size + data->call_stack_size)
} }
}
if (data->used_preserved_regs) { if (data->used_preserved_regs) {
int offset; int offset;
uint32_t i; uint32_t i;
@ -1325,8 +1329,12 @@ static void ir_emit_epilogue(ir_ctx *ctx)
} }
| ldp x29, x30, [sp], # (data->ra_data.stack_frame_size+16) | ldp x29, x30, [sp], # (data->ra_data.stack_frame_size+16)
} else if (data->ra_data.stack_frame_size + data->call_stack_size) { } else if (data->ra_data.stack_frame_size + data->call_stack_size) {
if (ctx->fixed_stack_red_zone) {
IR_ASSERT(data->ra_data.stack_frame_size + data->call_stack_size <= ctx->fixed_stack_red_zone);
} else {
| add sp, sp, #(data->ra_data.stack_frame_size + data->call_stack_size) | add sp, sp, #(data->ra_data.stack_frame_size + data->call_stack_size)
} }
}
} }
static void ir_emit_binop_int(ir_ctx *ctx, ir_ref def, ir_insn *insn) static void ir_emit_binop_int(ir_ctx *ctx, ir_ref def, ir_insn *insn)
@ -3574,11 +3582,15 @@ static int32_t ir_emit_arguments(ir_ctx *ctx, ir_ref def, ir_insn *insn, ir_reg
used_stack = ir_call_used_stack(ctx, insn); used_stack = ir_call_used_stack(ctx, insn);
/* Stack must be 16 byte aligned */ /* Stack must be 16 byte aligned */
used_stack = IR_ALIGNED_SIZE(used_stack, 16); used_stack = IR_ALIGNED_SIZE(used_stack, 16);
if (ctx->fixed_call_stack_size && used_stack <= ctx->fixed_call_stack_size) {
used_stack = 0;
} else {
data->call_stack_size += used_stack; data->call_stack_size += used_stack;
if (used_stack) { if (used_stack) {
| sub sp, sp, #used_stack | sub sp, sp, #used_stack
} }
} }
}
/* 1. move all register arguments that should be passed through stack /* 1. move all register arguments that should be passed through stack
* and collect arguments that should be passed through registers */ * and collect arguments that should be passed through registers */
@ -4891,8 +4903,12 @@ void *ir_emit_code(ir_ctx *ctx, size_t *size_ptr)
} }
if (ctx->fixed_stack_frame_size != -1) { if (ctx->fixed_stack_frame_size != -1) {
IR_ASSERT(data.ra_data.stack_frame_size <= ctx->fixed_stack_frame_size + ctx->fixed_stack_red_zone); if (ctx->fixed_stack_red_zone) {
IR_ASSERT(ctx->fixed_stack_red_zone == ctx->fixed_stack_frame_size + ctx->fixed_call_stack_size);
}
IR_ASSERT(data.ra_data.stack_frame_size <= ctx->fixed_stack_frame_size);
data.ra_data.stack_frame_size = ctx->fixed_stack_frame_size; data.ra_data.stack_frame_size = ctx->fixed_stack_frame_size;
data.call_stack_size = ctx->fixed_call_stack_size;
data.stack_frame_alignment = 0; data.stack_frame_alignment = 0;
} }

View File

@ -651,6 +651,7 @@ restart:
new_ctx.spill_base = ctx->spill_base; new_ctx.spill_base = ctx->spill_base;
new_ctx.fixed_stack_red_zone = ctx->fixed_stack_red_zone; new_ctx.fixed_stack_red_zone = ctx->fixed_stack_red_zone;
new_ctx.fixed_stack_frame_size = ctx->fixed_stack_frame_size; new_ctx.fixed_stack_frame_size = ctx->fixed_stack_frame_size;
new_ctx.fixed_call_stack_size = ctx->fixed_call_stack_size;
new_ctx.fixed_regset = ctx->fixed_regset; new_ctx.fixed_regset = ctx->fixed_regset;
new_ctx.fixed_save_regset = ctx->fixed_save_regset; new_ctx.fixed_save_regset = ctx->fixed_save_regset;

View File

@ -2045,8 +2045,12 @@ static void ir_emit_prologue(ir_ctx *ctx)
| mov Ra(IR_REG_RBP), Ra(IR_REG_RSP) | mov Ra(IR_REG_RBP), Ra(IR_REG_RSP)
} }
if (data->ra_data.stack_frame_size + data->call_stack_size) { if (data->ra_data.stack_frame_size + data->call_stack_size) {
if (ctx->fixed_stack_red_zone) {
IR_ASSERT(data->ra_data.stack_frame_size + data->call_stack_size <= ctx->fixed_stack_red_zone);
} else {
| sub Ra(IR_REG_RSP), (data->ra_data.stack_frame_size + data->call_stack_size) | sub Ra(IR_REG_RSP), (data->ra_data.stack_frame_size + data->call_stack_size)
} }
}
if (data->used_preserved_regs) { if (data->used_preserved_regs) {
int offset; int offset;
uint32_t i; uint32_t i;
@ -2103,8 +2107,12 @@ static void ir_emit_epilogue(ir_ctx *ctx)
| mov Ra(IR_REG_RSP), Ra(IR_REG_RBP) | mov Ra(IR_REG_RSP), Ra(IR_REG_RBP)
| pop Ra(IR_REG_RBP) | pop Ra(IR_REG_RBP)
} else if (data->ra_data.stack_frame_size + data->call_stack_size) { } else if (data->ra_data.stack_frame_size + data->call_stack_size) {
if (ctx->fixed_stack_red_zone) {
IR_ASSERT(data->ra_data.stack_frame_size + data->call_stack_size <= ctx->fixed_stack_red_zone);
} else {
| add Ra(IR_REG_RSP), (data->ra_data.stack_frame_size + data->call_stack_size) | add Ra(IR_REG_RSP), (data->ra_data.stack_frame_size + data->call_stack_size)
} }
}
} }
static void ir_emit_binop_int(ir_ctx *ctx, ir_ref def, ir_insn *insn) static void ir_emit_binop_int(ir_ctx *ctx, ir_ref def, ir_insn *insn)
@ -5880,6 +5888,14 @@ static int32_t ir_emit_arguments(ir_ctx *ctx, ir_ref def, ir_insn *insn, ir_reg
used_stack = 0; used_stack = 0;
} else { } else {
used_stack = ir_call_used_stack(ctx, insn); used_stack = ir_call_used_stack(ctx, insn);
if (ctx->fixed_call_stack_size
&& used_stack <= ctx->fixed_call_stack_size
#ifdef IR_HAVE_FASTCALL
&& !ir_is_fastcall(ctx, insn) /* fast call functions restore stack pointer */
#endif
) {
used_stack = 0;
} else {
/* Stack must be 16 byte aligned */ /* Stack must be 16 byte aligned */
int32_t aligned_stack = IR_ALIGNED_SIZE(used_stack, 16); int32_t aligned_stack = IR_ALIGNED_SIZE(used_stack, 16);
data->call_stack_size += aligned_stack; data->call_stack_size += aligned_stack;
@ -5887,6 +5903,7 @@ static int32_t ir_emit_arguments(ir_ctx *ctx, ir_ref def, ir_insn *insn, ir_reg
| sub Ra(IR_REG_RSP), aligned_stack | sub Ra(IR_REG_RSP), aligned_stack
} }
} }
}
/* 1. move all register arguments that should be passed through stack /* 1. move all register arguments that should be passed through stack
* and collect arguments that should be passed through registers */ * and collect arguments that should be passed through registers */
@ -7739,8 +7756,12 @@ void *ir_emit_code(ir_ctx *ctx, size_t *size_ptr)
} }
if (ctx->fixed_stack_frame_size != -1) { if (ctx->fixed_stack_frame_size != -1) {
IR_ASSERT(data.ra_data.stack_frame_size <= ctx->fixed_stack_frame_size + ctx->fixed_stack_red_zone); if (ctx->fixed_stack_red_zone) {
IR_ASSERT(ctx->fixed_stack_red_zone == ctx->fixed_stack_frame_size + ctx->fixed_call_stack_size);
}
IR_ASSERT(data.ra_data.stack_frame_size <= ctx->fixed_stack_frame_size);
data.ra_data.stack_frame_size = ctx->fixed_stack_frame_size; data.ra_data.stack_frame_size = ctx->fixed_stack_frame_size;
data.call_stack_size = ctx->fixed_call_stack_size;
data.stack_frame_alignment = 0; data.stack_frame_alignment = 0;
} }