mirror of
https://github.com/danog/ir.git
synced 2024-11-26 20:34:53 +01:00
Allow reservation stack for passing arguments
This commit is contained in:
parent
2bf6334f69
commit
c71076d3f0
1
ir.c
1
ir.c
@ -311,6 +311,7 @@ void ir_init(ir_ctx *ctx, ir_ref consts_limit, ir_ref insns_limit)
|
||||
ctx->spill_base = -1;
|
||||
ctx->fixed_stack_red_zone = 0;
|
||||
ctx->fixed_stack_frame_size = -1;
|
||||
ctx->fixed_call_stack_size = 0;
|
||||
ctx->fixed_regset = 0;
|
||||
ctx->fixed_save_regset = 0;
|
||||
ctx->live_intervals = NULL;
|
||||
|
9
ir.h
9
ir.h
@ -501,10 +501,11 @@ struct _ir_ctx {
|
||||
uint32_t *vregs;
|
||||
ir_ref vregs_count;
|
||||
int32_t spill_base; /* base register for special spill area (e.g. PHP VM frame pointer) */
|
||||
int32_t fixed_stack_red_zone;
|
||||
int32_t fixed_stack_frame_size;
|
||||
uint64_t fixed_regset;
|
||||
uint64_t fixed_save_regset;
|
||||
uint64_t fixed_regset; /* fixed registers, excluded for regular register allocation */
|
||||
int32_t fixed_stack_red_zone; /* reusable stack allocated by caller (default 0) */
|
||||
int32_t fixed_stack_frame_size; /* fixed stack allocated by generated code for spills and registers save/restore */
|
||||
int32_t fixed_call_stack_size; /* fixed preallocated stack for parameter passing (default 0) */
|
||||
uint64_t fixed_save_regset; /* registers that always saved/restored in prologue/epilugue */
|
||||
ir_live_interval **live_intervals;
|
||||
ir_regs *regs;
|
||||
ir_ref *prev_ref;
|
||||
|
@ -1223,7 +1223,11 @@ static void ir_emit_prologue(ir_ctx *ctx)
|
||||
| sub sp, sp, #(data->call_stack_size)
|
||||
}
|
||||
} else if (data->ra_data.stack_frame_size + data->call_stack_size) {
|
||||
| sub sp, sp, #(data->ra_data.stack_frame_size + data->call_stack_size)
|
||||
if (ctx->fixed_stack_red_zone) {
|
||||
IR_ASSERT(data->ra_data.stack_frame_size + data->call_stack_size <= ctx->fixed_stack_red_zone);
|
||||
} else {
|
||||
| sub sp, sp, #(data->ra_data.stack_frame_size + data->call_stack_size)
|
||||
}
|
||||
}
|
||||
if (data->used_preserved_regs) {
|
||||
int offset;
|
||||
@ -1325,7 +1329,11 @@ static void ir_emit_epilogue(ir_ctx *ctx)
|
||||
}
|
||||
| ldp x29, x30, [sp], # (data->ra_data.stack_frame_size+16)
|
||||
} else if (data->ra_data.stack_frame_size + data->call_stack_size) {
|
||||
| add sp, sp, #(data->ra_data.stack_frame_size + data->call_stack_size)
|
||||
if (ctx->fixed_stack_red_zone) {
|
||||
IR_ASSERT(data->ra_data.stack_frame_size + data->call_stack_size <= ctx->fixed_stack_red_zone);
|
||||
} else {
|
||||
| add sp, sp, #(data->ra_data.stack_frame_size + data->call_stack_size)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -3574,9 +3582,13 @@ static int32_t ir_emit_arguments(ir_ctx *ctx, ir_ref def, ir_insn *insn, ir_reg
|
||||
used_stack = ir_call_used_stack(ctx, insn);
|
||||
/* Stack must be 16 byte aligned */
|
||||
used_stack = IR_ALIGNED_SIZE(used_stack, 16);
|
||||
data->call_stack_size += used_stack;
|
||||
if (used_stack) {
|
||||
| sub sp, sp, #used_stack
|
||||
if (ctx->fixed_call_stack_size && used_stack <= ctx->fixed_call_stack_size) {
|
||||
used_stack = 0;
|
||||
} else {
|
||||
data->call_stack_size += used_stack;
|
||||
if (used_stack) {
|
||||
| sub sp, sp, #used_stack
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -4891,8 +4903,12 @@ void *ir_emit_code(ir_ctx *ctx, size_t *size_ptr)
|
||||
}
|
||||
|
||||
if (ctx->fixed_stack_frame_size != -1) {
|
||||
IR_ASSERT(data.ra_data.stack_frame_size <= ctx->fixed_stack_frame_size + ctx->fixed_stack_red_zone);
|
||||
if (ctx->fixed_stack_red_zone) {
|
||||
IR_ASSERT(ctx->fixed_stack_red_zone == ctx->fixed_stack_frame_size + ctx->fixed_call_stack_size);
|
||||
}
|
||||
IR_ASSERT(data.ra_data.stack_frame_size <= ctx->fixed_stack_frame_size);
|
||||
data.ra_data.stack_frame_size = ctx->fixed_stack_frame_size;
|
||||
data.call_stack_size = ctx->fixed_call_stack_size;
|
||||
data.stack_frame_alignment = 0;
|
||||
}
|
||||
|
||||
|
1
ir_gcm.c
1
ir_gcm.c
@ -651,6 +651,7 @@ restart:
|
||||
new_ctx.spill_base = ctx->spill_base;
|
||||
new_ctx.fixed_stack_red_zone = ctx->fixed_stack_red_zone;
|
||||
new_ctx.fixed_stack_frame_size = ctx->fixed_stack_frame_size;
|
||||
new_ctx.fixed_call_stack_size = ctx->fixed_call_stack_size;
|
||||
new_ctx.fixed_regset = ctx->fixed_regset;
|
||||
new_ctx.fixed_save_regset = ctx->fixed_save_regset;
|
||||
|
||||
|
37
ir_x86.dasc
37
ir_x86.dasc
@ -2045,7 +2045,11 @@ static void ir_emit_prologue(ir_ctx *ctx)
|
||||
| mov Ra(IR_REG_RBP), Ra(IR_REG_RSP)
|
||||
}
|
||||
if (data->ra_data.stack_frame_size + data->call_stack_size) {
|
||||
| sub Ra(IR_REG_RSP), (data->ra_data.stack_frame_size + data->call_stack_size)
|
||||
if (ctx->fixed_stack_red_zone) {
|
||||
IR_ASSERT(data->ra_data.stack_frame_size + data->call_stack_size <= ctx->fixed_stack_red_zone);
|
||||
} else {
|
||||
| sub Ra(IR_REG_RSP), (data->ra_data.stack_frame_size + data->call_stack_size)
|
||||
}
|
||||
}
|
||||
if (data->used_preserved_regs) {
|
||||
int offset;
|
||||
@ -2103,7 +2107,11 @@ static void ir_emit_epilogue(ir_ctx *ctx)
|
||||
| mov Ra(IR_REG_RSP), Ra(IR_REG_RBP)
|
||||
| pop Ra(IR_REG_RBP)
|
||||
} else if (data->ra_data.stack_frame_size + data->call_stack_size) {
|
||||
| add Ra(IR_REG_RSP), (data->ra_data.stack_frame_size + data->call_stack_size)
|
||||
if (ctx->fixed_stack_red_zone) {
|
||||
IR_ASSERT(data->ra_data.stack_frame_size + data->call_stack_size <= ctx->fixed_stack_red_zone);
|
||||
} else {
|
||||
| add Ra(IR_REG_RSP), (data->ra_data.stack_frame_size + data->call_stack_size)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -5880,11 +5888,20 @@ static int32_t ir_emit_arguments(ir_ctx *ctx, ir_ref def, ir_insn *insn, ir_reg
|
||||
used_stack = 0;
|
||||
} else {
|
||||
used_stack = ir_call_used_stack(ctx, insn);
|
||||
/* Stack must be 16 byte aligned */
|
||||
int32_t aligned_stack = IR_ALIGNED_SIZE(used_stack, 16);
|
||||
data->call_stack_size += aligned_stack;
|
||||
if (aligned_stack) {
|
||||
| sub Ra(IR_REG_RSP), aligned_stack
|
||||
if (ctx->fixed_call_stack_size
|
||||
&& used_stack <= ctx->fixed_call_stack_size
|
||||
#ifdef IR_HAVE_FASTCALL
|
||||
&& !ir_is_fastcall(ctx, insn) /* fast call functions restore stack pointer */
|
||||
#endif
|
||||
) {
|
||||
used_stack = 0;
|
||||
} else {
|
||||
/* Stack must be 16 byte aligned */
|
||||
int32_t aligned_stack = IR_ALIGNED_SIZE(used_stack, 16);
|
||||
data->call_stack_size += aligned_stack;
|
||||
if (aligned_stack) {
|
||||
| sub Ra(IR_REG_RSP), aligned_stack
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -7739,8 +7756,12 @@ void *ir_emit_code(ir_ctx *ctx, size_t *size_ptr)
|
||||
}
|
||||
|
||||
if (ctx->fixed_stack_frame_size != -1) {
|
||||
IR_ASSERT(data.ra_data.stack_frame_size <= ctx->fixed_stack_frame_size + ctx->fixed_stack_red_zone);
|
||||
if (ctx->fixed_stack_red_zone) {
|
||||
IR_ASSERT(ctx->fixed_stack_red_zone == ctx->fixed_stack_frame_size + ctx->fixed_call_stack_size);
|
||||
}
|
||||
IR_ASSERT(data.ra_data.stack_frame_size <= ctx->fixed_stack_frame_size);
|
||||
data.ra_data.stack_frame_size = ctx->fixed_stack_frame_size;
|
||||
data.call_stack_size = ctx->fixed_call_stack_size;
|
||||
data.stack_frame_alignment = 0;
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user