2022-06-07 13:43:35 +02:00
|
|
|
#ifndef _GNU_SOURCE
|
|
|
|
# define _GNU_SOURCE
|
|
|
|
#endif
|
2022-04-05 23:19:23 +02:00
|
|
|
|
2022-05-27 12:18:04 +02:00
|
|
|
#include <stdlib.h>
|
2022-04-05 23:19:23 +02:00
|
|
|
#include "ir.h"
|
|
|
|
|
2022-05-27 12:18:04 +02:00
|
|
|
#if defined(IR_TARGET_X86) || defined(IR_TARGET_X64)
|
|
|
|
# include "ir_x86.h"
|
2022-05-31 10:22:31 +02:00
|
|
|
#elif defined(IR_TARGET_AARCH64)
|
|
|
|
# include "ir_aarch64.h"
|
2022-05-27 12:18:04 +02:00
|
|
|
#endif
|
2022-04-05 23:19:23 +02:00
|
|
|
|
2022-05-31 10:22:31 +02:00
|
|
|
#include "ir_private.h"
|
|
|
|
|
2022-06-21 10:41:59 +02:00
|
|
|
#if defined(__GNUC__)
|
|
|
|
# pragma GCC diagnostic ignored "-Warray-bounds"
|
|
|
|
#endif
|
|
|
|
|
2022-09-01 18:19:01 +02:00
|
|
|
#define IR_SKIP IR_LAST_OP
|
|
|
|
#define IR_SKIP_REG (IR_LAST_OP+1)
|
|
|
|
#define IR_SKIP_MEM (IR_LAST_OP+2)
|
|
|
|
|
2022-05-27 12:18:04 +02:00
|
|
|
int ir_regs_number(void)
|
|
|
|
{
|
|
|
|
return IR_REG_NUM;
|
|
|
|
}
|
|
|
|
|
2022-04-05 23:19:23 +02:00
|
|
|
/* RA - Register Allocation, Liveness, Coalescing and SSA Resolution */
|
|
|
|
|
|
|
|
int ir_assign_virtual_registers(ir_ctx *ctx)
|
|
|
|
{
|
|
|
|
uint32_t *vregs;
|
|
|
|
uint32_t vregs_count = 0;
|
|
|
|
int b, i, n;
|
|
|
|
ir_block *bb;
|
|
|
|
ir_insn *insn;
|
|
|
|
uint32_t flags;
|
|
|
|
|
|
|
|
/* Assign unique virtual register to each data node */
|
|
|
|
if (!ctx->prev_insn_len) {
|
|
|
|
ctx->prev_insn_len = ir_mem_malloc(ctx->insns_count * sizeof(uint32_t));
|
|
|
|
}
|
|
|
|
vregs = ir_mem_calloc(ctx->insns_count, sizeof(ir_ref));
|
2022-05-25 10:58:35 +02:00
|
|
|
n = 1;
|
2022-04-05 23:19:23 +02:00
|
|
|
for (b = 1, bb = ctx->cfg_blocks + b; b <= ctx->cfg_blocks_count; b++, bb++) {
|
2022-06-20 15:34:44 +02:00
|
|
|
if (bb->flags & IR_BB_UNREACHABLE) {
|
|
|
|
continue;
|
|
|
|
}
|
2022-04-05 23:19:23 +02:00
|
|
|
for (i = bb->start, insn = ctx->ir_base + i; i <= bb->end;) {
|
2022-05-25 10:58:35 +02:00
|
|
|
ctx->prev_insn_len[i] = n;
|
2022-04-05 23:19:23 +02:00
|
|
|
flags = ir_op_flags[insn->op];
|
|
|
|
if ((flags & IR_OP_FLAG_DATA) || ((flags & IR_OP_FLAG_MEM) && insn->type != IR_VOID)) {
|
|
|
|
if ((insn->op == IR_PARAM || insn->op == IR_VAR) && ctx->use_lists[i].count == 0) {
|
|
|
|
/* pass */
|
2022-04-19 21:35:29 +02:00
|
|
|
} else if (insn->op == IR_VAR && ctx->use_lists[i].count > 0) {
|
|
|
|
vregs[i] = ++vregs_count; /* for spill slot */
|
2022-04-14 21:40:13 +02:00
|
|
|
} else if (!ctx->rules || ir_needs_vreg(ctx, i)) {
|
2022-04-05 23:19:23 +02:00
|
|
|
vregs[i] = ++vregs_count;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
n = ir_operands_count(ctx, insn);
|
|
|
|
n = 1 + (n >> 2); // support for multi-word instructions like MERGE and PHI
|
|
|
|
i += n;
|
|
|
|
insn += n;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
ctx->vregs_count = vregs_count;
|
|
|
|
ctx->vregs = vregs;
|
|
|
|
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Lifetime intervals construction
|
|
|
|
*
|
|
|
|
* See "Linear Scan Register Allocation on SSA Form", Christian Wimmer and
|
|
|
|
* Michael Franz, CGO'10 (2010), Figure 4.
|
|
|
|
*/
|
2022-04-19 21:35:29 +02:00
|
|
|
static void ir_add_local_var(ir_ctx *ctx, int v, uint8_t type)
|
|
|
|
{
|
|
|
|
ir_live_interval *ival = ctx->live_intervals[v];
|
|
|
|
|
|
|
|
IR_ASSERT(!ival);
|
|
|
|
|
|
|
|
ival = ir_mem_malloc(sizeof(ir_live_interval));
|
|
|
|
IR_ASSERT(type != IR_VOID);
|
|
|
|
ival->type = type;
|
|
|
|
ival->reg = IR_REG_NONE;
|
2022-05-06 08:23:14 +02:00
|
|
|
ival->flags = IR_LIVE_INTERVAL_VAR;
|
2022-05-12 16:43:08 +02:00
|
|
|
ival->vreg = v;
|
2022-05-16 21:16:29 +02:00
|
|
|
ival->stack_spill_pos = -1; // not allocated
|
2022-05-19 09:53:08 +02:00
|
|
|
ival->range.start = IR_START_LIVE_POS_FROM_REF(1);
|
2022-08-10 08:47:06 +02:00
|
|
|
ival->range.end = ival->end = IR_END_LIVE_POS_FROM_REF(ctx->insns_count - 1);
|
2022-04-19 21:35:29 +02:00
|
|
|
ival->range.next = NULL;
|
|
|
|
ival->use_pos = NULL;
|
|
|
|
|
|
|
|
ival->top = ival;
|
|
|
|
ival->next = NULL;
|
|
|
|
|
|
|
|
ctx->live_intervals[v] = ival;
|
|
|
|
}
|
|
|
|
|
2022-05-25 09:57:21 +02:00
|
|
|
static void ir_add_live_range(ir_ctx *ctx, ir_live_range **unused, int v, uint8_t type, ir_live_pos start, ir_live_pos end)
|
2022-04-05 23:19:23 +02:00
|
|
|
{
|
|
|
|
ir_live_interval *ival = ctx->live_intervals[v];
|
|
|
|
ir_live_range *p, *q, *next, *prev;
|
|
|
|
|
|
|
|
if (!ival) {
|
|
|
|
ival = ir_mem_malloc(sizeof(ir_live_interval));
|
|
|
|
IR_ASSERT(type != IR_VOID);
|
|
|
|
ival->type = type;
|
2022-04-14 21:40:13 +02:00
|
|
|
ival->reg = IR_REG_NONE;
|
2022-04-21 15:38:18 +02:00
|
|
|
ival->flags = 0;
|
2022-05-12 16:43:08 +02:00
|
|
|
ival->vreg = v;
|
2022-05-16 21:16:29 +02:00
|
|
|
ival->stack_spill_pos = -1; // not allocated
|
2022-04-05 23:19:23 +02:00
|
|
|
ival->range.start = start;
|
2022-08-10 08:47:06 +02:00
|
|
|
ival->range.end = ival->end = end;
|
2022-04-05 23:19:23 +02:00
|
|
|
ival->range.next = NULL;
|
2022-04-07 10:11:57 +02:00
|
|
|
ival->use_pos = NULL;
|
2022-04-14 21:40:13 +02:00
|
|
|
|
|
|
|
ival->top = ival;
|
|
|
|
ival->next = NULL;
|
|
|
|
|
2022-04-05 23:19:23 +02:00
|
|
|
ctx->live_intervals[v] = ival;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
IR_ASSERT(type == IR_VOID || type == ival->type);
|
|
|
|
p = &ival->range;
|
|
|
|
prev = NULL;
|
2022-04-20 17:53:15 +02:00
|
|
|
while (p && end >= p->start) {
|
|
|
|
if (p->end >= start) {
|
2022-04-05 23:19:23 +02:00
|
|
|
if (start < p->start) {
|
|
|
|
p->start = start;
|
|
|
|
}
|
|
|
|
if (end > p->end) {
|
|
|
|
p->end = end;
|
|
|
|
/* merge with next */
|
|
|
|
next = p->next;
|
2022-04-20 17:53:15 +02:00
|
|
|
while (next && p->end >= next->start) {
|
2022-04-05 23:19:23 +02:00
|
|
|
if (next->end > p->end) {
|
|
|
|
p->end = next->end;
|
|
|
|
}
|
|
|
|
p->next = next->next;
|
2022-05-25 09:57:21 +02:00
|
|
|
/* list of deleted structures is keapt in "unused" list */
|
|
|
|
next->next = *unused;
|
|
|
|
*unused = next;
|
2022-04-05 23:19:23 +02:00
|
|
|
next = p->next;
|
|
|
|
}
|
2022-08-10 08:47:06 +02:00
|
|
|
if (!p->next) {
|
|
|
|
ival->end = p->end;
|
|
|
|
}
|
2022-04-05 23:19:23 +02:00
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
prev = p;
|
|
|
|
p = prev->next;
|
|
|
|
}
|
2022-05-25 09:57:21 +02:00
|
|
|
if (*unused) {
|
2022-04-05 23:19:23 +02:00
|
|
|
/* reuse */
|
2022-05-25 09:57:21 +02:00
|
|
|
q = *unused;
|
|
|
|
*unused = q->next;
|
2022-04-05 23:19:23 +02:00
|
|
|
} else {
|
|
|
|
q = ir_mem_malloc(sizeof(ir_live_range));
|
|
|
|
}
|
|
|
|
if (prev) {
|
|
|
|
prev->next = q;
|
|
|
|
} else {
|
|
|
|
q->start = ival->range.start;
|
|
|
|
q->end = ival->range.end;
|
|
|
|
q->next = ival->range.next;
|
|
|
|
p = q;
|
|
|
|
q = &ival->range;
|
|
|
|
}
|
|
|
|
q->start = start;
|
|
|
|
q->end = end;
|
|
|
|
q->next = p;
|
2022-08-10 08:47:06 +02:00
|
|
|
if (!p) {
|
|
|
|
ival->end = end;
|
|
|
|
}
|
2022-04-05 23:19:23 +02:00
|
|
|
}
|
|
|
|
|
2022-05-25 09:57:21 +02:00
|
|
|
static void ir_add_fixed_live_range(ir_ctx *ctx, ir_live_range **unused, ir_reg reg, ir_live_pos start, ir_live_pos end)
|
2022-04-07 10:11:57 +02:00
|
|
|
{
|
|
|
|
int v = ctx->vregs_count + 1 + reg;
|
|
|
|
ir_live_interval *ival = ctx->live_intervals[v];
|
|
|
|
if (!ival) {
|
|
|
|
ival = ir_mem_malloc(sizeof(ir_live_interval));
|
|
|
|
ival->type = IR_VOID;
|
|
|
|
ival->reg = reg;
|
2022-05-06 08:23:14 +02:00
|
|
|
ival->flags = IR_LIVE_INTERVAL_FIXED;
|
2022-05-12 16:43:08 +02:00
|
|
|
ival->vreg = v;
|
2022-05-16 21:16:29 +02:00
|
|
|
ival->stack_spill_pos = -1; // not allocated
|
2022-04-07 10:11:57 +02:00
|
|
|
ival->range.start = start;
|
2022-08-10 08:47:06 +02:00
|
|
|
ival->range.end = ival->end = end;
|
2022-04-07 10:11:57 +02:00
|
|
|
ival->range.next = NULL;
|
|
|
|
ival->use_pos = NULL;
|
2022-04-14 21:40:13 +02:00
|
|
|
|
|
|
|
ival->top = ival;
|
|
|
|
ival->next = NULL;
|
|
|
|
|
2022-04-07 10:11:57 +02:00
|
|
|
ctx->live_intervals[v] = ival;
|
|
|
|
return;
|
|
|
|
}
|
2022-05-25 09:57:21 +02:00
|
|
|
ir_add_live_range(ctx, unused, v, IR_VOID, start, end);
|
2022-04-07 10:11:57 +02:00
|
|
|
}
|
|
|
|
|
2022-05-12 23:32:37 +02:00
|
|
|
static void ir_add_tmp(ir_ctx *ctx, ir_ref ref, ir_tmp_reg tmp_reg)
|
2022-05-05 21:35:39 +02:00
|
|
|
{
|
|
|
|
ir_live_interval *ival = ir_mem_malloc(sizeof(ir_live_interval));
|
|
|
|
|
|
|
|
ival->type = tmp_reg.type;
|
|
|
|
ival->reg = IR_REG_NONE;
|
2022-05-12 23:32:37 +02:00
|
|
|
ival->flags = IR_LIVE_INTERVAL_TEMP | tmp_reg.num;
|
2022-05-12 16:43:08 +02:00
|
|
|
ival->vreg = 0;
|
2022-05-16 21:16:29 +02:00
|
|
|
ival->stack_spill_pos = -1; // not allocated
|
2022-05-05 21:35:39 +02:00
|
|
|
ival->range.start = IR_START_LIVE_POS_FROM_REF(ref) + tmp_reg.start;
|
2022-08-10 08:47:06 +02:00
|
|
|
ival->range.end = ival->end = IR_START_LIVE_POS_FROM_REF(ref) + tmp_reg.end;
|
2022-05-05 21:35:39 +02:00
|
|
|
ival->range.next = NULL;
|
|
|
|
ival->use_pos = NULL;
|
|
|
|
|
|
|
|
if (!ctx->live_intervals[0]) {
|
|
|
|
ival->top = ival;
|
|
|
|
ival->next = NULL;
|
|
|
|
ctx->live_intervals[0] = ival;
|
2022-05-16 09:19:30 +02:00
|
|
|
} else if (ival->range.start >= ctx->live_intervals[0]->range.start) {
|
|
|
|
ir_live_interval *prev = ctx->live_intervals[0];
|
2022-05-05 21:35:39 +02:00
|
|
|
|
|
|
|
while (prev->next && ival->range.start >= prev->next->range.start) {
|
|
|
|
prev = prev->next;
|
|
|
|
}
|
|
|
|
ival->top = prev->top;
|
|
|
|
ival->next = prev->next;
|
|
|
|
prev->next = ival;
|
|
|
|
} else {
|
|
|
|
ir_live_interval *next = ctx->live_intervals[0];
|
|
|
|
|
|
|
|
ival->top = ival;
|
|
|
|
ival->next = next;
|
|
|
|
ctx->live_intervals[0] = ival;
|
|
|
|
while (next) {
|
|
|
|
next->top = ival;
|
|
|
|
next = next->next;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2022-05-12 23:32:37 +02:00
|
|
|
static bool ir_has_tmp(ir_ctx *ctx, ir_ref ref, uint8_t num)
|
|
|
|
{
|
|
|
|
ir_live_interval *ival = ctx->live_intervals[0];
|
|
|
|
|
|
|
|
if (ival) {
|
|
|
|
while (ival && IR_LIVE_POS_TO_REF(ival->range.start) <= ref) {
|
|
|
|
if (IR_LIVE_POS_TO_REF(ival->range.start) == ref
|
|
|
|
&& ival->flags == (IR_LIVE_INTERVAL_TEMP | num)) {
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
ival = ival->next;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2022-04-14 21:40:13 +02:00
|
|
|
static void ir_fix_live_range(ir_ctx *ctx, int v, ir_live_pos old_start, ir_live_pos new_start)
|
2022-04-05 23:19:23 +02:00
|
|
|
{
|
|
|
|
ir_live_range *p = &ctx->live_intervals[v]->range;
|
|
|
|
|
|
|
|
while (p && p->start < old_start) {
|
|
|
|
p = p->next;
|
|
|
|
}
|
|
|
|
IR_ASSERT(p && p->start == old_start);
|
|
|
|
p->start = new_start;
|
|
|
|
}
|
|
|
|
|
2022-04-07 10:11:57 +02:00
|
|
|
static void ir_add_use_pos(ir_ctx *ctx, int v, ir_use_pos *use_pos)
|
|
|
|
{
|
|
|
|
ir_live_interval *ival = ctx->live_intervals[v];
|
|
|
|
ir_use_pos *prev = NULL;
|
|
|
|
ir_use_pos *p = ival->use_pos;
|
|
|
|
|
2022-05-12 16:43:08 +02:00
|
|
|
if (use_pos->hint != IR_REG_NONE || use_pos->hint_ref != 0) {
|
|
|
|
ival->flags |= IR_LIVE_INTERVAL_HAS_HINTS;
|
|
|
|
}
|
|
|
|
|
2022-04-27 20:24:51 +02:00
|
|
|
while (p && (p->pos < use_pos->pos ||
|
|
|
|
(p->pos == use_pos->pos &&
|
|
|
|
(use_pos->op_num == 0 || p->op_num < use_pos->op_num)))) {
|
2022-04-07 10:11:57 +02:00
|
|
|
prev = p;
|
|
|
|
p = p->next;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (prev) {
|
|
|
|
use_pos->next = prev->next;
|
|
|
|
prev->next = use_pos;
|
|
|
|
} else {
|
|
|
|
use_pos->next = ival->use_pos;
|
|
|
|
ival->use_pos = use_pos;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-26 23:11:31 +02:00
|
|
|
static void ir_add_use(ir_ctx *ctx, int v, int op_num, ir_live_pos pos, ir_reg hint, uint8_t use_flags, ir_ref hint_ref)
|
2022-04-07 10:11:57 +02:00
|
|
|
{
|
|
|
|
ir_use_pos *use_pos;
|
|
|
|
|
|
|
|
use_pos = ir_mem_malloc(sizeof(ir_use_pos));
|
|
|
|
use_pos->op_num = op_num;
|
|
|
|
use_pos->hint = hint;
|
2022-05-26 23:11:31 +02:00
|
|
|
use_pos->flags = use_flags;
|
2022-04-21 15:38:18 +02:00
|
|
|
use_pos->hint_ref = hint_ref;
|
2022-04-07 10:11:57 +02:00
|
|
|
use_pos->pos = pos;
|
|
|
|
|
|
|
|
ir_add_use_pos(ctx, v, use_pos);
|
|
|
|
}
|
|
|
|
|
2022-05-12 23:32:37 +02:00
|
|
|
static void ir_add_phi_use(ir_ctx *ctx, int v, int op_num, ir_live_pos pos, ir_ref phi_ref)
|
|
|
|
{
|
|
|
|
ir_use_pos *use_pos;
|
|
|
|
|
|
|
|
use_pos = ir_mem_malloc(sizeof(ir_use_pos));
|
|
|
|
use_pos->op_num = op_num;
|
|
|
|
use_pos->hint = IR_REG_NONE;
|
2022-05-26 23:11:31 +02:00
|
|
|
use_pos->flags = IR_PHI_USE | IR_USE_SHOULD_BE_IN_REG; // TODO: ???
|
2022-05-12 23:32:37 +02:00
|
|
|
use_pos->hint_ref = phi_ref;
|
|
|
|
use_pos->pos = pos;
|
|
|
|
|
|
|
|
ir_add_use_pos(ctx, v, use_pos);
|
|
|
|
}
|
|
|
|
|
2022-04-05 23:19:23 +02:00
|
|
|
int ir_compute_live_ranges(ir_ctx *ctx)
|
|
|
|
{
|
|
|
|
int i, j, k, n;
|
|
|
|
int b, succ;
|
|
|
|
uint32_t flags, len;
|
|
|
|
ir_insn *insn;
|
|
|
|
ir_block *bb, *succ_bb;
|
|
|
|
ir_bitset visited, live;
|
|
|
|
ir_bitset loops = NULL;
|
2022-08-12 18:25:10 +02:00
|
|
|
ir_bitqueue queue;
|
2022-04-07 10:11:57 +02:00
|
|
|
ir_reg reg;
|
2022-05-25 09:57:21 +02:00
|
|
|
ir_live_range *unused = NULL;
|
2022-04-05 23:19:23 +02:00
|
|
|
|
|
|
|
if (!(ctx->flags & IR_LINEAR) || !ctx->vregs) {
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Compute Live Ranges */
|
|
|
|
visited = ir_bitset_malloc(ctx->cfg_blocks_count + 1);
|
|
|
|
len = ir_bitset_len(ctx->vregs_count + 1);
|
|
|
|
live = ir_bitset_malloc((ctx->cfg_blocks_count + 1) * len * 8 * sizeof(*live));
|
2022-08-11 18:56:59 +02:00
|
|
|
ctx->live_intervals = ir_mem_calloc(ctx->vregs_count + 1 + IR_REG_NUM + 1, sizeof(ir_live_interval*));
|
2022-04-05 23:19:23 +02:00
|
|
|
for (b = ctx->cfg_blocks_count; b > 0; b--) {
|
|
|
|
bb = &ctx->cfg_blocks[b];
|
2022-06-20 15:34:44 +02:00
|
|
|
if (bb->flags & IR_BB_UNREACHABLE) {
|
|
|
|
continue;
|
|
|
|
}
|
2022-04-05 23:19:23 +02:00
|
|
|
/* for each successor of b */
|
|
|
|
ir_bitset_incl(visited, b);
|
2022-08-12 20:01:35 +02:00
|
|
|
if (bb->successors_count == 0) {
|
|
|
|
ir_bitset_clear(live, len);
|
|
|
|
}
|
2022-04-05 23:19:23 +02:00
|
|
|
for (i = 0; i < bb->successors_count; i++) {
|
|
|
|
succ = ctx->cfg_edges[bb->successors + i];
|
|
|
|
/* blocks must be ordered where all dominators of a block are before this block */
|
|
|
|
IR_ASSERT(ir_bitset_in(visited, succ) || bb->loop_header == succ);
|
|
|
|
/* live = union of successors.liveIn */
|
2022-08-12 18:52:24 +02:00
|
|
|
if (i == 0) {
|
|
|
|
ir_bitset_copy(live, live + (len * succ), len);
|
|
|
|
} else {
|
|
|
|
ir_bitset_union(live, live + (len * succ), len);
|
|
|
|
}
|
2022-04-05 23:19:23 +02:00
|
|
|
/* for each phi function phi of successor */
|
|
|
|
succ_bb = &ctx->cfg_blocks[succ];
|
|
|
|
if (succ_bb->predecessors_count > 1) {
|
|
|
|
ir_use_list *use_list = &ctx->use_lists[succ_bb->start];
|
|
|
|
|
|
|
|
k = 0;
|
|
|
|
for (j = 0; j < succ_bb->predecessors_count; j++) {
|
|
|
|
if (ctx->cfg_edges[succ_bb->predecessors + j] == b) {
|
|
|
|
k = j + 2;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
IR_ASSERT(k != 0);
|
|
|
|
for (j = 0; j < use_list->count; j++) {
|
2022-05-12 23:32:37 +02:00
|
|
|
ir_ref use = ctx->use_edges[use_list->refs + j];
|
|
|
|
insn = &ctx->ir_base[use];
|
2022-04-05 23:19:23 +02:00
|
|
|
if (insn->op == IR_PHI) {
|
|
|
|
if (insn->ops[k] > 0) {
|
|
|
|
/* live.add(phi.inputOf(b)) */
|
|
|
|
IR_ASSERT(ctx->vregs[insn->ops[k]]);
|
|
|
|
ir_bitset_incl(live, ctx->vregs[insn->ops[k]]);
|
|
|
|
// TODO: ir_add_live_range() is used just to set ival->type
|
|
|
|
/* intervals[phi.inputOf(b)].addRange(b.from, b.to) */
|
2022-05-25 09:57:21 +02:00
|
|
|
ir_add_live_range(ctx, &unused, ctx->vregs[insn->ops[k]], insn->type,
|
2022-04-14 21:40:13 +02:00
|
|
|
IR_START_LIVE_POS_FROM_REF(bb->start),
|
|
|
|
IR_END_LIVE_POS_FROM_REF(bb->end));
|
2022-05-12 23:32:37 +02:00
|
|
|
ir_add_phi_use(ctx, ctx->vregs[insn->ops[k]], k, IR_DEF_LIVE_POS_FROM_REF(bb->end), use);
|
2022-04-05 23:19:23 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/* for each opd in live */
|
|
|
|
IR_BITSET_FOREACH(live, len, i) {
|
|
|
|
/* intervals[opd].addRange(b.from, b.to) */
|
2022-05-25 09:57:21 +02:00
|
|
|
ir_add_live_range(ctx, &unused, i, IR_VOID,
|
2022-04-14 21:40:13 +02:00
|
|
|
IR_START_LIVE_POS_FROM_REF(bb->start),
|
|
|
|
IR_END_LIVE_POS_FROM_REF(bb->end));
|
2022-04-05 23:19:23 +02:00
|
|
|
} IR_BITSET_FOREACH_END();
|
|
|
|
|
|
|
|
/* for each operation op of b in reverse order */
|
|
|
|
for (i = bb->end; i > bb->start; i -= ctx->prev_insn_len[i]) {
|
2022-05-26 23:11:31 +02:00
|
|
|
uint8_t def_flags = 0;
|
|
|
|
|
2022-04-05 23:19:23 +02:00
|
|
|
insn = &ctx->ir_base[i];
|
|
|
|
flags = ir_op_flags[insn->op];
|
2022-05-05 21:35:39 +02:00
|
|
|
if (ctx->rules) {
|
|
|
|
ir_tmp_reg tmp_regs[4];
|
|
|
|
int n = ir_get_temporary_regs(ctx, i, tmp_regs);
|
|
|
|
|
|
|
|
while (n > 0) {
|
|
|
|
n--;
|
2022-05-12 23:32:37 +02:00
|
|
|
ir_add_tmp(ctx, i, tmp_regs[n]);
|
2022-05-05 21:35:39 +02:00
|
|
|
}
|
|
|
|
}
|
2022-04-05 23:19:23 +02:00
|
|
|
if ((flags & IR_OP_FLAG_DATA) || ((flags & IR_OP_FLAG_MEM) && insn->type != IR_VOID)) {
|
2022-04-19 21:35:29 +02:00
|
|
|
if (ctx->vregs[i]) {
|
|
|
|
if (ir_bitset_in(live, ctx->vregs[i])) {
|
2022-06-09 23:16:29 +02:00
|
|
|
if (insn->op == IR_RLOAD) {
|
|
|
|
ir_fix_live_range(ctx, ctx->vregs[i],
|
|
|
|
IR_START_LIVE_POS_FROM_REF(bb->start), IR_DEF_LIVE_POS_FROM_REF(i));
|
|
|
|
ctx->live_intervals[ctx->vregs[i]]->flags = IR_LIVE_INTERVAL_REG_LOAD;
|
|
|
|
ctx->live_intervals[ctx->vregs[i]]->reg = insn->op2;
|
|
|
|
} else if (insn->op != IR_PHI) {
|
2022-04-19 21:35:29 +02:00
|
|
|
ir_live_pos def_pos;
|
2022-04-21 15:38:18 +02:00
|
|
|
ir_ref hint_ref = 0;
|
2022-04-19 21:35:29 +02:00
|
|
|
|
2022-05-26 23:11:31 +02:00
|
|
|
if (ctx->rules) {
|
2022-08-12 20:17:19 +02:00
|
|
|
def_flags = ir_get_def_flags(ctx, i, ®);
|
2022-05-26 23:11:31 +02:00
|
|
|
} else {
|
|
|
|
reg = IR_REG_NONE;
|
|
|
|
}
|
2022-04-19 21:35:29 +02:00
|
|
|
if (reg != IR_REG_NONE) {
|
2022-04-20 17:53:15 +02:00
|
|
|
def_pos = IR_SAVE_LIVE_POS_FROM_REF(i);
|
2022-04-19 21:35:29 +02:00
|
|
|
if (insn->op == IR_PARAM) {
|
|
|
|
/* parameter register must be kept before it's copied */
|
2022-05-25 09:57:21 +02:00
|
|
|
ir_add_fixed_live_range(ctx, &unused, reg,
|
2022-04-19 21:35:29 +02:00
|
|
|
IR_START_LIVE_POS_FROM_REF(bb->start), def_pos);
|
|
|
|
} else {
|
2022-05-25 09:57:21 +02:00
|
|
|
ir_add_fixed_live_range(ctx, &unused, reg,
|
2022-04-20 17:53:15 +02:00
|
|
|
IR_DEF_LIVE_POS_FROM_REF(i), def_pos);
|
2022-04-19 21:35:29 +02:00
|
|
|
}
|
2022-05-26 23:11:31 +02:00
|
|
|
} else if (def_flags & IR_DEF_REUSES_OP1_REG) {
|
2022-04-29 18:24:15 +02:00
|
|
|
/* We add two uses to emulate move from op1 to res */
|
2022-05-26 23:11:31 +02:00
|
|
|
ir_add_use(ctx, ctx->vregs[i], 0, IR_DEF_LIVE_POS_FROM_REF(i), reg, def_flags, hint_ref);
|
2022-04-20 17:53:15 +02:00
|
|
|
def_pos = IR_LOAD_LIVE_POS_FROM_REF(i);
|
2022-05-05 21:35:39 +02:00
|
|
|
hint_ref = IR_IS_CONST_REF(insn->op1) ? 0 : insn->op1;
|
2022-05-26 23:11:31 +02:00
|
|
|
} else if (def_flags & IR_DEF_CONFLICTS_WITH_INPUT_REGS) {
|
|
|
|
def_pos = IR_LOAD_LIVE_POS_FROM_REF(i);
|
2022-04-14 21:40:13 +02:00
|
|
|
} else {
|
2022-05-19 09:53:08 +02:00
|
|
|
if (insn->op == IR_PARAM) {
|
|
|
|
/* We may reuse parameter stack slot for spilling */
|
|
|
|
ctx->live_intervals[ctx->vregs[i]]->flags |= IR_LIVE_INTERVAL_MEM_PARAM;
|
|
|
|
} else if (insn->op == IR_VLOAD) {
|
|
|
|
/* Load may be fused into the useage instruction */
|
|
|
|
ctx->live_intervals[ctx->vregs[i]]->flags |= IR_LIVE_INTERVAL_MEM_LOAD;
|
|
|
|
}
|
2022-04-19 21:35:29 +02:00
|
|
|
def_pos = IR_DEF_LIVE_POS_FROM_REF(i);
|
2022-04-14 21:40:13 +02:00
|
|
|
}
|
2022-04-19 21:35:29 +02:00
|
|
|
/* intervals[opd].setFrom(op.id) */
|
|
|
|
ir_fix_live_range(ctx, ctx->vregs[i],
|
|
|
|
IR_START_LIVE_POS_FROM_REF(bb->start), def_pos);
|
2022-05-26 23:11:31 +02:00
|
|
|
ir_add_use(ctx, ctx->vregs[i], 0, def_pos, reg, def_flags, hint_ref);
|
2022-04-07 13:18:59 +02:00
|
|
|
} else {
|
2022-05-26 23:11:31 +02:00
|
|
|
ir_add_use(ctx, ctx->vregs[i], 0, IR_DEF_LIVE_POS_FROM_REF(i), IR_REG_NONE, IR_USE_SHOULD_BE_IN_REG, 0);
|
2022-04-19 21:35:29 +02:00
|
|
|
}
|
|
|
|
/* live.remove(opd) */
|
|
|
|
ir_bitset_excl(live, ctx->vregs[i]);
|
|
|
|
} else if (insn->op == IR_VAR) {
|
|
|
|
if (ctx->use_lists[i].count > 0) {
|
|
|
|
ir_add_local_var(ctx, ctx->vregs[i], insn->type);
|
2022-04-07 13:18:59 +02:00
|
|
|
}
|
2022-04-07 10:11:57 +02:00
|
|
|
}
|
2022-04-05 23:19:23 +02:00
|
|
|
}
|
|
|
|
}
|
2022-09-01 18:19:01 +02:00
|
|
|
if (insn->op != IR_PHI && (!ctx->rules || ctx->rules[i] != IR_SKIP_MEM)) {
|
2022-04-05 23:19:23 +02:00
|
|
|
n = ir_input_edges_count(ctx, insn);
|
|
|
|
for (j = 1; j <= n; j++) {
|
|
|
|
if (IR_OPND_KIND(flags, j) == IR_OPND_DATA) {
|
|
|
|
ir_ref input = insn->ops[j];
|
2022-05-26 23:11:31 +02:00
|
|
|
uint8_t use_flags;
|
|
|
|
|
|
|
|
if (ctx->rules) {
|
2022-08-12 20:17:19 +02:00
|
|
|
use_flags = ir_get_use_flags(ctx, i, j, ®);
|
2022-05-26 23:11:31 +02:00
|
|
|
} else {
|
|
|
|
use_flags = 0;
|
|
|
|
reg = IR_REG_NONE;
|
|
|
|
}
|
2022-09-01 18:19:01 +02:00
|
|
|
if (input > 0 && ctx->rules && ctx->rules[input] == IR_SKIP_MEM) {
|
|
|
|
do {
|
|
|
|
if (ctx->ir_base[input].op == IR_LOAD) {
|
|
|
|
input = ctx->ir_base[input].op2;
|
|
|
|
if (input < 0 || ctx->rules[input] != IR_SKIP_MEM) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (ctx->ir_base[input].op == IR_RLOAD) {
|
|
|
|
/* pass */
|
|
|
|
} else if (ctx->ir_base[input].op == IR_ADD) {
|
|
|
|
IR_ASSERT(!IR_IS_CONST_REF(ctx->ir_base[input].op1));
|
|
|
|
IR_ASSERT(IR_IS_CONST_REF(ctx->ir_base[input].op2));
|
|
|
|
input = ctx->ir_base[input].op1;
|
|
|
|
} else {
|
|
|
|
input = 0;
|
|
|
|
}
|
|
|
|
} while (0);
|
|
|
|
}
|
2022-04-05 23:19:23 +02:00
|
|
|
if (input > 0 && ctx->vregs[input]) {
|
2022-04-14 21:40:13 +02:00
|
|
|
ir_live_pos use_pos;
|
|
|
|
|
2022-05-26 23:11:31 +02:00
|
|
|
if ((def_flags & IR_DEF_REUSES_OP1_REG) && j == 1) {
|
2022-04-20 17:53:15 +02:00
|
|
|
use_pos = IR_LOAD_LIVE_POS_FROM_REF(i);
|
|
|
|
if (reg != IR_REG_NONE) {
|
2022-05-25 09:57:21 +02:00
|
|
|
ir_add_fixed_live_range(ctx, &unused, reg,
|
2022-04-20 17:53:15 +02:00
|
|
|
use_pos, IR_USE_LIVE_POS_FROM_REF(i));
|
|
|
|
}
|
2022-04-14 21:40:13 +02:00
|
|
|
} else {
|
|
|
|
if (reg != IR_REG_NONE) {
|
2022-04-20 17:53:15 +02:00
|
|
|
use_pos = IR_LOAD_LIVE_POS_FROM_REF(i);
|
2022-05-25 09:57:21 +02:00
|
|
|
ir_add_fixed_live_range(ctx, &unused, reg,
|
2022-04-20 17:53:15 +02:00
|
|
|
use_pos, IR_USE_LIVE_POS_FROM_REF(i));
|
2022-05-26 23:11:31 +02:00
|
|
|
} else if ((def_flags & IR_DEF_REUSES_OP1_REG) && input == insn->op1) {
|
2022-04-21 15:38:18 +02:00
|
|
|
/* Input is the same as "op1" */
|
|
|
|
use_pos = IR_LOAD_LIVE_POS_FROM_REF(i);
|
2022-04-14 21:40:13 +02:00
|
|
|
} else {
|
|
|
|
use_pos = IR_USE_LIVE_POS_FROM_REF(i);
|
|
|
|
}
|
2022-04-07 10:11:57 +02:00
|
|
|
}
|
2022-04-14 21:40:13 +02:00
|
|
|
/* intervals[opd].addRange(b.from, op.id) */
|
2022-05-25 09:57:21 +02:00
|
|
|
ir_add_live_range(ctx, &unused, ctx->vregs[input], ctx->ir_base[input].type,
|
2022-04-14 21:40:13 +02:00
|
|
|
IR_START_LIVE_POS_FROM_REF(bb->start), use_pos);
|
2022-05-26 23:11:31 +02:00
|
|
|
ir_add_use(ctx, ctx->vregs[input], j, use_pos, reg, use_flags, 0);
|
2022-04-05 23:19:23 +02:00
|
|
|
/* live.add(opd) */
|
|
|
|
ir_bitset_incl(live, ctx->vregs[input]);
|
2022-05-26 23:11:31 +02:00
|
|
|
} else {
|
2022-05-13 14:26:11 +02:00
|
|
|
if (reg != IR_REG_NONE) {
|
2022-05-25 09:57:21 +02:00
|
|
|
ir_add_fixed_live_range(ctx, &unused, reg,
|
2022-05-13 14:26:11 +02:00
|
|
|
IR_LOAD_LIVE_POS_FROM_REF(i), IR_USE_LIVE_POS_FROM_REF(i));
|
|
|
|
}
|
2022-04-05 23:19:23 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2022-04-07 10:11:57 +02:00
|
|
|
/* CPU specific constraints */
|
2022-08-11 18:56:59 +02:00
|
|
|
if (insn->op == IR_CALL) {
|
|
|
|
ir_add_fixed_live_range(ctx, &unused, IR_REG_NUM,
|
|
|
|
IR_START_LIVE_POS_FROM_REF(i) + IR_USE_SUB_REF,
|
|
|
|
IR_START_LIVE_POS_FROM_REF(i) + IR_DEF_SUB_REF);
|
|
|
|
} else if (ctx->rules) {
|
2022-05-13 14:10:15 +02:00
|
|
|
ir_live_pos start, end;
|
|
|
|
ir_regset regset = ir_get_scratch_regset(ctx, i, &start, &end);
|
2022-04-14 21:40:13 +02:00
|
|
|
|
|
|
|
if (regset != IR_REGSET_EMPTY) {
|
|
|
|
IR_REGSET_FOREACH(regset, reg) {
|
2022-05-25 09:57:21 +02:00
|
|
|
ir_add_fixed_live_range(ctx, &unused, reg,
|
2022-05-13 14:10:15 +02:00
|
|
|
IR_START_LIVE_POS_FROM_REF(i) + start,
|
|
|
|
IR_START_LIVE_POS_FROM_REF(i) + end);
|
2022-04-14 21:40:13 +02:00
|
|
|
} IR_REGSET_FOREACH_END();
|
2022-04-07 10:11:57 +02:00
|
|
|
}
|
2022-04-05 23:19:23 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/* if b is loop header */
|
|
|
|
if ((bb->flags & IR_BB_LOOP_HEADER)
|
|
|
|
&& !ir_bitset_empty(live, len)) {
|
|
|
|
/* variables live at loop header are alive at the whole loop body */
|
|
|
|
uint32_t bb_set_len = ir_bitset_len(ctx->cfg_blocks_count + 1);
|
|
|
|
int child;
|
|
|
|
ir_block *child_bb;
|
|
|
|
|
|
|
|
if (!loops) {
|
|
|
|
loops = ir_bitset_malloc(ctx->cfg_blocks_count + 1);
|
2022-08-12 18:25:10 +02:00
|
|
|
ir_bitqueue_init(&queue, ctx->cfg_blocks_count + 1);
|
2022-04-05 23:19:23 +02:00
|
|
|
} else {
|
|
|
|
ir_bitset_clear(loops, bb_set_len);
|
2022-08-12 18:25:10 +02:00
|
|
|
ir_bitqueue_clear(&queue);
|
2022-04-05 23:19:23 +02:00
|
|
|
}
|
|
|
|
ir_bitset_incl(loops, b);
|
2022-08-11 19:42:03 +02:00
|
|
|
child = b;
|
2022-04-05 23:19:23 +02:00
|
|
|
do {
|
|
|
|
child_bb = &ctx->cfg_blocks[child];
|
|
|
|
|
|
|
|
IR_BITSET_FOREACH(live, len, i) {
|
2022-05-25 09:57:21 +02:00
|
|
|
ir_add_live_range(ctx, &unused, i, IR_VOID,
|
2022-04-14 21:40:13 +02:00
|
|
|
IR_START_LIVE_POS_FROM_REF(child_bb->start),
|
|
|
|
IR_END_LIVE_POS_FROM_REF(child_bb->end));
|
2022-04-05 23:19:23 +02:00
|
|
|
} IR_BITSET_FOREACH_END();
|
|
|
|
|
|
|
|
child = child_bb->dom_child;
|
|
|
|
while (child) {
|
|
|
|
child_bb = &ctx->cfg_blocks[child];
|
|
|
|
if (child_bb->loop_header && ir_bitset_in(loops, child_bb->loop_header)) {
|
2022-08-12 18:25:10 +02:00
|
|
|
ir_bitqueue_add(&queue, child);
|
2022-04-05 23:19:23 +02:00
|
|
|
if (child_bb->flags & IR_BB_LOOP_HEADER) {
|
|
|
|
ir_bitset_incl(loops, child);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
child = child_bb->dom_next_child;
|
|
|
|
}
|
2022-08-12 18:25:10 +02:00
|
|
|
} while ((child = ir_bitqueue_pop(&queue)) >= 0);
|
2022-04-05 23:19:23 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
/* b.liveIn = live */
|
|
|
|
ir_bitset_copy(live + (len * b), live, len);
|
|
|
|
}
|
|
|
|
|
2022-05-25 09:57:21 +02:00
|
|
|
if (unused) {
|
|
|
|
ir_free_live_ranges(unused);
|
|
|
|
}
|
|
|
|
|
2022-04-05 23:19:23 +02:00
|
|
|
if (loops) {
|
|
|
|
ir_mem_free(loops);
|
2022-08-12 18:25:10 +02:00
|
|
|
ir_bitqueue_free(&queue);
|
2022-04-05 23:19:23 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
ir_mem_free(live);
|
|
|
|
ir_mem_free(visited);
|
|
|
|
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
void ir_free_live_ranges(ir_live_range *live_range)
|
|
|
|
{
|
|
|
|
ir_live_range *p;
|
|
|
|
|
|
|
|
while (live_range) {
|
|
|
|
p = live_range;
|
|
|
|
live_range = live_range->next;
|
|
|
|
ir_mem_free(p);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void ir_free_live_intervals(ir_live_interval **live_intervals, int count)
|
|
|
|
{
|
|
|
|
uint32_t i;
|
2022-08-10 16:41:14 +02:00
|
|
|
ir_live_interval *ival, *next;
|
2022-06-22 22:59:56 +02:00
|
|
|
ir_use_pos *use_pos;
|
2022-04-05 23:19:23 +02:00
|
|
|
|
2022-08-11 18:56:59 +02:00
|
|
|
count += IR_REG_NUM + 1;
|
2022-06-22 22:59:56 +02:00
|
|
|
for (i = 0; i <= count; i++) {
|
2022-04-05 23:19:23 +02:00
|
|
|
ival = live_intervals[i];
|
2022-08-10 16:41:14 +02:00
|
|
|
while (ival) {
|
2022-04-05 23:19:23 +02:00
|
|
|
if (ival->range.next) {
|
|
|
|
ir_free_live_ranges(ival->range.next);
|
|
|
|
}
|
2022-06-22 22:59:56 +02:00
|
|
|
use_pos = ival->use_pos;
|
|
|
|
while (use_pos) {
|
|
|
|
ir_use_pos *p = use_pos;
|
|
|
|
use_pos = p->next;
|
|
|
|
ir_mem_free(p);
|
|
|
|
}
|
2022-08-10 16:41:14 +02:00
|
|
|
next = ival->next;
|
2022-04-05 23:19:23 +02:00
|
|
|
ir_mem_free(ival);
|
2022-08-10 16:41:14 +02:00
|
|
|
ival = next;
|
2022-04-05 23:19:23 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
ir_mem_free(live_intervals);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/* Live Ranges coalescing */
|
|
|
|
|
2022-08-10 12:59:34 +02:00
|
|
|
static ir_live_pos ir_ivals_overlap(ir_live_range *lrg1, ir_live_range *lrg2)
|
2022-04-05 23:19:23 +02:00
|
|
|
{
|
|
|
|
while (1) {
|
2022-08-10 14:37:14 +02:00
|
|
|
if (lrg2->start < lrg1->end) {
|
|
|
|
if (lrg1->start < lrg2->end) {
|
2022-04-05 23:19:23 +02:00
|
|
|
return IR_MAX(lrg1->start, lrg2->start);
|
|
|
|
} else {
|
2022-08-10 14:37:14 +02:00
|
|
|
lrg2 = lrg2->next;
|
|
|
|
if (!lrg2) {
|
2022-04-05 23:19:23 +02:00
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
2022-08-10 14:37:14 +02:00
|
|
|
lrg1 = lrg1->next;
|
|
|
|
if (!lrg1) {
|
2022-04-05 23:19:23 +02:00
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-12 16:43:08 +02:00
|
|
|
static ir_live_pos ir_vregs_overlap(ir_ctx *ctx, uint32_t r1, uint32_t r2)
|
|
|
|
{
|
2022-08-10 12:59:34 +02:00
|
|
|
return ir_ivals_overlap(&ctx->live_intervals[r1]->range, &ctx->live_intervals[r2]->range);
|
2022-05-12 16:43:08 +02:00
|
|
|
}
|
|
|
|
|
2022-05-25 09:57:21 +02:00
|
|
|
static void ir_vregs_join(ir_ctx *ctx, ir_live_range **unused, uint32_t r1, uint32_t r2)
|
2022-04-05 23:19:23 +02:00
|
|
|
{
|
|
|
|
ir_live_interval *ival = ctx->live_intervals[r2];
|
|
|
|
ir_live_range *live_range = &ival->range;
|
|
|
|
ir_live_range *next;
|
2022-04-07 10:11:57 +02:00
|
|
|
ir_use_pos *use_pos;
|
2022-04-05 23:19:23 +02:00
|
|
|
|
2022-04-21 15:38:18 +02:00
|
|
|
#if 0
|
|
|
|
fprintf(stderr, "COALESCE %d -> %d\n", r2, r1);
|
|
|
|
#endif
|
|
|
|
|
2022-05-25 09:57:21 +02:00
|
|
|
ir_add_live_range(ctx, unused, r1, ival->type, live_range->start, live_range->end);
|
2022-04-05 23:19:23 +02:00
|
|
|
live_range = live_range->next;
|
|
|
|
while (live_range) {
|
|
|
|
next = live_range->next;
|
2022-05-25 09:57:21 +02:00
|
|
|
live_range->next = *unused;
|
|
|
|
*unused = live_range;
|
|
|
|
ir_add_live_range(ctx, unused, r1, ival->type, live_range->start, live_range->end);
|
2022-04-05 23:19:23 +02:00
|
|
|
live_range = next;
|
|
|
|
} while (live_range);
|
2022-04-07 10:11:57 +02:00
|
|
|
|
|
|
|
use_pos = ival->use_pos;
|
|
|
|
while (use_pos) {
|
|
|
|
ir_use_pos *next_use_pos = use_pos->next;
|
2022-05-12 23:32:37 +02:00
|
|
|
if (!(use_pos->flags & IR_PHI_USE) && ctx->vregs[use_pos->hint_ref] == r1) {
|
2022-04-21 15:38:18 +02:00
|
|
|
use_pos->hint_ref = 0;
|
2022-04-15 15:02:23 +02:00
|
|
|
}
|
2022-04-07 10:11:57 +02:00
|
|
|
ir_add_use_pos(ctx, r1, use_pos);
|
|
|
|
use_pos = next_use_pos;
|
|
|
|
}
|
|
|
|
|
2022-04-05 23:19:23 +02:00
|
|
|
ir_mem_free(ival);
|
|
|
|
ctx->live_intervals[r2] = NULL;
|
2022-04-21 15:38:18 +02:00
|
|
|
ctx->live_intervals[r1]->flags |= IR_LIVE_INTERVAL_COALESCED;
|
2022-04-05 23:19:23 +02:00
|
|
|
}
|
|
|
|
|
2022-05-25 09:57:21 +02:00
|
|
|
static bool ir_try_coalesce(ir_ctx *ctx, ir_live_range **unused, ir_ref from, ir_ref to)
|
2022-04-05 23:19:23 +02:00
|
|
|
{
|
2022-04-21 15:38:18 +02:00
|
|
|
ir_ref i;
|
|
|
|
int v1 = ctx->vregs[from];
|
|
|
|
int v2 = ctx->vregs[to];
|
|
|
|
|
|
|
|
if (v1 != v2 && !ir_vregs_overlap(ctx, v1, v2)) {
|
|
|
|
uint8_t f1 = ctx->live_intervals[v1]->flags;
|
|
|
|
uint8_t f2 = ctx->live_intervals[v2]->flags;
|
|
|
|
|
|
|
|
if ((f1 & IR_LIVE_INTERVAL_COALESCED) && !(f2 & IR_LIVE_INTERVAL_COALESCED)) {
|
2022-05-25 09:57:21 +02:00
|
|
|
ir_vregs_join(ctx, unused, v1, v2);
|
2022-04-21 15:38:18 +02:00
|
|
|
ctx->vregs[to] = v1;
|
|
|
|
} else if ((f2 & IR_LIVE_INTERVAL_COALESCED) && !(f1 & IR_LIVE_INTERVAL_COALESCED)) {
|
2022-05-25 09:57:21 +02:00
|
|
|
ir_vregs_join(ctx, unused, v2, v1);
|
2022-04-21 15:38:18 +02:00
|
|
|
ctx->vregs[from] = v2;
|
|
|
|
} else if (v1 < v2) {
|
2022-05-25 09:57:21 +02:00
|
|
|
ir_vregs_join(ctx, unused, v1, v2);
|
2022-04-21 15:38:18 +02:00
|
|
|
if (f2 & IR_LIVE_INTERVAL_COALESCED) {
|
|
|
|
for (i = 0; i < ctx->insns_count; i++) {
|
|
|
|
if (ctx->vregs[i] == v2) {
|
|
|
|
ctx->vregs[i] = v1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
ctx->vregs[to] = v1;
|
|
|
|
}
|
2022-04-05 23:19:23 +02:00
|
|
|
} else {
|
2022-05-25 09:57:21 +02:00
|
|
|
ir_vregs_join(ctx,unused, v2, v1);
|
2022-04-21 15:38:18 +02:00
|
|
|
if (f1 & IR_LIVE_INTERVAL_COALESCED) {
|
|
|
|
for (i = 0; i < ctx->insns_count; i++) {
|
|
|
|
if (ctx->vregs[i] == v1) {
|
|
|
|
ctx->vregs[i] = v2;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
ctx->vregs[from] = v2;
|
|
|
|
}
|
2022-04-05 23:19:23 +02:00
|
|
|
}
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void ir_add_phi_move(ir_ctx *ctx, int b, ir_ref from, ir_ref to)
|
|
|
|
{
|
|
|
|
if (IR_IS_CONST_REF(from) || ctx->vregs[from] != ctx->vregs[to]) {
|
|
|
|
ctx->cfg_blocks[b].flags |= IR_BB_DESSA_MOVES;
|
|
|
|
#if 0
|
|
|
|
fprintf(stderr, "BB%d: MOV %d -> %d\n", b, from, to);
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static int ir_block_cmp(const void *b1, const void *b2, void *data)
|
|
|
|
{
|
|
|
|
ir_ctx *ctx = data;
|
|
|
|
int d1 = ctx->cfg_blocks[*(ir_ref*)b1].loop_depth;
|
|
|
|
int d2 = ctx->cfg_blocks[*(ir_ref*)b2].loop_depth;
|
|
|
|
|
|
|
|
if (d1 > d2) {
|
|
|
|
return -1;
|
|
|
|
} else if (d1 == d2) {
|
|
|
|
return 0;
|
|
|
|
} else {
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-04-21 15:38:18 +02:00
|
|
|
static void ir_swap_operands(ir_ctx *ctx, ir_ref i, ir_insn *insn)
|
|
|
|
{
|
|
|
|
ir_live_pos pos = IR_USE_LIVE_POS_FROM_REF(i);
|
|
|
|
ir_live_pos load_pos = IR_LOAD_LIVE_POS_FROM_REF(i);
|
|
|
|
ir_live_interval *ival;
|
|
|
|
ir_live_range *r;
|
2022-04-29 02:39:32 +02:00
|
|
|
ir_use_pos *p, *p1 = NULL, *p2 = NULL;
|
2022-04-21 15:38:18 +02:00
|
|
|
ir_ref tmp;
|
|
|
|
|
|
|
|
tmp = insn->op1;
|
|
|
|
insn->op1 = insn->op2;
|
|
|
|
insn->op2 = tmp;
|
|
|
|
|
2022-04-25 20:00:01 +02:00
|
|
|
ival = ctx->live_intervals[ctx->vregs[insn->op1]];
|
2022-04-21 15:38:18 +02:00
|
|
|
p = ival->use_pos;
|
|
|
|
while (p) {
|
|
|
|
if (p->pos == pos) {
|
|
|
|
p->pos = load_pos;
|
|
|
|
p->op_num = 1;
|
2022-04-29 02:39:32 +02:00
|
|
|
p1 = p;
|
2022-04-21 15:38:18 +02:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
p = p->next;
|
|
|
|
}
|
|
|
|
|
|
|
|
ival = ctx->live_intervals[ctx->vregs[i]];
|
|
|
|
p = ival->use_pos;
|
|
|
|
while (p) {
|
2022-04-25 20:00:01 +02:00
|
|
|
if (p->pos == load_pos) {
|
2022-04-21 15:38:18 +02:00
|
|
|
p->hint_ref = insn->op1;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
p = p->next;
|
|
|
|
}
|
|
|
|
|
2022-04-25 20:00:01 +02:00
|
|
|
if (insn->op2 > 0 && ctx->vregs[insn->op2]) {
|
|
|
|
ival = ctx->live_intervals[ctx->vregs[insn->op2]];
|
2022-04-21 15:38:18 +02:00
|
|
|
r = &ival->range;
|
|
|
|
while (r) {
|
|
|
|
if (r->end == load_pos) {
|
|
|
|
r->end = pos;
|
2022-08-10 08:47:06 +02:00
|
|
|
if (!r->next) {
|
|
|
|
ival->end = pos;
|
|
|
|
}
|
2022-04-21 15:38:18 +02:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
r = r->next;
|
|
|
|
}
|
|
|
|
p = ival->use_pos;
|
|
|
|
while (p) {
|
|
|
|
if (p->pos == load_pos) {
|
|
|
|
p->pos = pos;
|
|
|
|
p->op_num = 2;
|
2022-04-29 02:39:32 +02:00
|
|
|
p2 = p;
|
2022-04-21 15:38:18 +02:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
p = p->next;
|
|
|
|
}
|
|
|
|
}
|
2022-04-29 02:39:32 +02:00
|
|
|
if (p1 && p2) {
|
|
|
|
uint8_t tmp = p1->flags;
|
|
|
|
p1->flags = p2->flags;
|
|
|
|
p2->flags = tmp;
|
|
|
|
}
|
2022-04-21 15:38:18 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
static int ir_hint_conflict(ir_ctx *ctx, ir_ref ref, int use, int def)
|
|
|
|
{
|
|
|
|
ir_use_pos *p;
|
|
|
|
ir_reg r1 = IR_REG_NONE;
|
|
|
|
ir_reg r2 = IR_REG_NONE;
|
|
|
|
|
|
|
|
p = ctx->live_intervals[use]->use_pos;
|
|
|
|
while (p) {
|
|
|
|
if (IR_LIVE_POS_TO_REF(p->pos) == ref) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
if (p->hint != IR_REG_NONE) {
|
|
|
|
r1 = p->hint;
|
|
|
|
}
|
|
|
|
p = p->next;
|
|
|
|
}
|
|
|
|
|
|
|
|
p = ctx->live_intervals[def]->use_pos;
|
|
|
|
while (p) {
|
|
|
|
if (IR_LIVE_POS_TO_REF(p->pos) > ref) {
|
|
|
|
if (p->hint != IR_REG_NONE) {
|
|
|
|
r2 = p->hint;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
p = p->next;
|
|
|
|
}
|
|
|
|
return r1 != r2 && r1 != IR_REG_NONE && r2 != IR_REG_NONE;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int ir_try_swap_operands(ir_ctx *ctx, ir_ref i, ir_insn *insn)
|
|
|
|
{
|
|
|
|
if (insn->op1 > 0
|
|
|
|
&& ctx->vregs[insn->op1] != ctx->vregs[i]
|
|
|
|
&& !ir_vregs_overlap(ctx, ctx->vregs[insn->op1], ctx->vregs[i])
|
|
|
|
&& !ir_hint_conflict(ctx, i, ctx->vregs[insn->op1], ctx->vregs[i])) {
|
|
|
|
/* pass */
|
|
|
|
} else if (insn->op2 > 0 && insn->op1 != insn->op2
|
|
|
|
&& (ir_op_flags[insn->op] & IR_OP_FLAG_COMMUTATIVE)) {
|
|
|
|
if (ctx->vregs[insn->op2] != ctx->vregs[i]) {
|
|
|
|
ir_live_pos pos = IR_USE_LIVE_POS_FROM_REF(i);
|
|
|
|
ir_live_pos load_pos = IR_LOAD_LIVE_POS_FROM_REF(i);
|
|
|
|
ir_live_interval *ival = ctx->live_intervals[ctx->vregs[insn->op2]];
|
|
|
|
ir_live_range *r = &ival->range;
|
|
|
|
|
|
|
|
while (r) {
|
|
|
|
if (r->end == pos) {
|
|
|
|
r->end = load_pos;
|
2022-08-10 08:47:06 +02:00
|
|
|
if (!r->next) {
|
|
|
|
ival->end = load_pos;
|
|
|
|
}
|
2022-04-21 15:38:18 +02:00
|
|
|
if (!ir_vregs_overlap(ctx, ctx->vregs[insn->op2], ctx->vregs[i])
|
|
|
|
&& !ir_hint_conflict(ctx, i, ctx->vregs[insn->op2], ctx->vregs[i])) {
|
|
|
|
ir_swap_operands(ctx, i, insn);
|
|
|
|
return 1;
|
|
|
|
} else {
|
|
|
|
r->end = pos;
|
2022-08-10 08:47:06 +02:00
|
|
|
if (!r->next) {
|
|
|
|
ival->end = pos;
|
|
|
|
}
|
2022-04-21 15:38:18 +02:00
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
r = r->next;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2022-04-05 23:19:23 +02:00
|
|
|
int ir_coalesce(ir_ctx *ctx)
|
|
|
|
{
|
|
|
|
int b, i, n, succ;
|
|
|
|
ir_ref *p, use, input, k, j;
|
|
|
|
ir_block *bb, *succ_bb;
|
|
|
|
ir_use_list *use_list;
|
|
|
|
ir_insn *insn;
|
|
|
|
uint32_t *offsets;
|
|
|
|
ir_worklist blocks;
|
|
|
|
bool compact = 0;
|
2022-05-25 09:57:21 +02:00
|
|
|
ir_live_range *unused = NULL;
|
2022-08-12 20:17:19 +02:00
|
|
|
ir_reg reg;
|
2022-04-05 23:19:23 +02:00
|
|
|
|
|
|
|
/* Collect a list of blocks which are predecossors to block with phi finctions */
|
|
|
|
ir_worklist_init(&blocks, ctx->cfg_blocks_count + 1);
|
|
|
|
for (b = 1, bb = &ctx->cfg_blocks[1]; b <= ctx->cfg_blocks_count; b++, bb++) {
|
2022-06-20 15:34:44 +02:00
|
|
|
if (bb->flags & IR_BB_UNREACHABLE) {
|
|
|
|
continue;
|
|
|
|
}
|
2022-04-05 23:19:23 +02:00
|
|
|
if (bb->predecessors_count > 1) {
|
|
|
|
use_list = &ctx->use_lists[bb->start];
|
|
|
|
n = use_list->count;
|
|
|
|
for (i = 0, p = &ctx->use_edges[use_list->refs]; i < n; i++, p++) {
|
|
|
|
use = *p;
|
|
|
|
insn = &ctx->ir_base[use];
|
|
|
|
if (insn->op == IR_PHI) {
|
|
|
|
k = ir_input_edges_count(ctx, insn);
|
|
|
|
for (j = 2; j <= k; j++) {
|
|
|
|
ir_worklist_push(&blocks, ctx->cfg_edges[bb->predecessors + (j-2)]);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
qsort_r(blocks.l.a.refs, ir_worklist_len(&blocks), sizeof(ir_ref), ir_block_cmp, ctx);
|
|
|
|
|
|
|
|
while (ir_worklist_len(&blocks)) {
|
|
|
|
b = ir_worklist_pop(&blocks);
|
|
|
|
bb = &ctx->cfg_blocks[b];
|
|
|
|
IR_ASSERT(bb->successors_count == 1);
|
|
|
|
succ = ctx->cfg_edges[bb->successors];
|
|
|
|
succ_bb = &ctx->cfg_blocks[succ];
|
|
|
|
IR_ASSERT(succ_bb->predecessors_count > 1);
|
|
|
|
k = 0;
|
|
|
|
for (j = 0; j < succ_bb->predecessors_count; j++) {
|
|
|
|
if (ctx->cfg_edges[succ_bb->predecessors + j] == b) {
|
|
|
|
k = j + 2;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
IR_ASSERT(k != 0);
|
|
|
|
use_list = &ctx->use_lists[succ_bb->start];
|
|
|
|
n = use_list->count;
|
|
|
|
for (i = 0, p = &ctx->use_edges[use_list->refs]; i < n; i++, p++) {
|
|
|
|
use = *p;
|
|
|
|
insn = &ctx->ir_base[use];
|
|
|
|
if (insn->op == IR_PHI) {
|
|
|
|
input = insn->ops[k];
|
|
|
|
if (input > 0) {
|
2022-05-25 09:57:21 +02:00
|
|
|
if (!ir_try_coalesce(ctx, &unused, input, use)) {
|
2022-04-05 23:19:23 +02:00
|
|
|
ir_add_phi_move(ctx, b, input, use);
|
|
|
|
} else {
|
|
|
|
compact = 1;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
/* Move for constant input */
|
|
|
|
ir_add_phi_move(ctx, b, input, use);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2022-05-25 09:57:21 +02:00
|
|
|
if (unused) {
|
|
|
|
ir_free_live_ranges(unused);
|
|
|
|
}
|
2022-04-05 23:19:23 +02:00
|
|
|
ir_worklist_free(&blocks);
|
|
|
|
|
2022-04-21 15:38:18 +02:00
|
|
|
#if 1
|
|
|
|
if (ctx->rules) {
|
|
|
|
/* try to swap operands of commutative instructions for better register allocation */
|
|
|
|
for (b = 1, bb = &ctx->cfg_blocks[1]; b <= ctx->cfg_blocks_count; b++, bb++) {
|
|
|
|
for (i = bb->start, insn = ctx->ir_base + i; i <= bb->end;) {
|
2022-08-12 20:17:19 +02:00
|
|
|
if (ir_get_def_flags(ctx, i, ®) & IR_DEF_REUSES_OP1_REG) {
|
2022-04-21 15:38:18 +02:00
|
|
|
if (insn->op2 > 0 && insn->op1 != insn->op2
|
|
|
|
&& (ir_op_flags[insn->op] & IR_OP_FLAG_COMMUTATIVE)) {
|
|
|
|
ir_try_swap_operands(ctx, i, insn);
|
|
|
|
}
|
|
|
|
// if (insn->op1 > 0) {
|
|
|
|
// ir_try_coalesce(ctx, insn->op1, i);
|
|
|
|
// }
|
|
|
|
// } else if (insn->op == IR_COPY) {
|
|
|
|
// if (insn->op1 > 0) {
|
|
|
|
// ir_try_coalesce(ctx, insn->op1, i);
|
|
|
|
// }
|
|
|
|
}
|
|
|
|
n = ir_operands_count(ctx, insn);
|
|
|
|
n = 1 + (n >> 2); // support for multi-word instructions like MERGE and PHI
|
|
|
|
i += n;
|
|
|
|
insn += n;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2022-04-05 23:19:23 +02:00
|
|
|
if (compact) {
|
|
|
|
#if 1
|
|
|
|
offsets = ir_mem_calloc(ctx->vregs_count + 1, sizeof(uint32_t));
|
|
|
|
for (i = 1, n = 1; i <= ctx->vregs_count; i++) {
|
|
|
|
if (ctx->live_intervals[i]) {
|
|
|
|
if (i != n) {
|
|
|
|
ctx->live_intervals[n] = ctx->live_intervals[i];
|
2022-05-13 00:15:24 +02:00
|
|
|
ctx->live_intervals[n]->vreg = n;
|
2022-04-05 23:19:23 +02:00
|
|
|
offsets[i] = i - n;
|
|
|
|
}
|
|
|
|
n++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
n--;
|
|
|
|
if (n != ctx->vregs_count) {
|
2022-04-07 10:11:57 +02:00
|
|
|
j = ctx->vregs_count - n;
|
2022-08-11 18:56:59 +02:00
|
|
|
for (i = n + 1; i <= n + IR_REG_NUM + 1; i++) {
|
2022-04-26 20:16:22 +02:00
|
|
|
ctx->live_intervals[i] = ctx->live_intervals[i + j];
|
2022-05-13 00:15:24 +02:00
|
|
|
if (ctx->live_intervals[i]) {
|
|
|
|
ctx->live_intervals[i]->vreg = i;
|
|
|
|
}
|
2022-04-07 10:11:57 +02:00
|
|
|
}
|
2022-04-05 23:19:23 +02:00
|
|
|
for (j = 1; j < ctx->insns_count; j++) {
|
|
|
|
if (ctx->vregs[j]) {
|
|
|
|
ctx->vregs[j] -= offsets[ctx->vregs[j]];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
ctx->vregs_count = n;
|
|
|
|
}
|
|
|
|
ir_mem_free(offsets);
|
|
|
|
#endif
|
2022-04-15 13:46:03 +02:00
|
|
|
}
|
2022-04-05 23:19:23 +02:00
|
|
|
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* SSA Deconstruction */
|
|
|
|
|
|
|
|
int ir_compute_dessa_moves(ir_ctx *ctx)
|
|
|
|
{
|
|
|
|
int b, i, n;
|
|
|
|
ir_ref j, k, *p, use;
|
|
|
|
ir_block *bb;
|
|
|
|
ir_use_list *use_list;
|
|
|
|
ir_insn *insn;
|
|
|
|
|
|
|
|
for (b = 1, bb = &ctx->cfg_blocks[1]; b <= ctx->cfg_blocks_count; b++, bb++) {
|
2022-06-20 15:34:44 +02:00
|
|
|
if (bb->flags & IR_BB_UNREACHABLE) {
|
|
|
|
continue;
|
|
|
|
}
|
2022-04-05 23:19:23 +02:00
|
|
|
if (bb->predecessors_count > 1) {
|
|
|
|
use_list = &ctx->use_lists[bb->start];
|
|
|
|
n = use_list->count;
|
|
|
|
for (i = 0, p = &ctx->use_edges[use_list->refs]; i < n; i++, p++) {
|
|
|
|
use = *p;
|
|
|
|
insn = &ctx->ir_base[use];
|
|
|
|
if (insn->op == IR_PHI) {
|
|
|
|
k = ir_input_edges_count(ctx, insn);
|
|
|
|
for (j = 2; j <= k; j++) {
|
|
|
|
if (IR_IS_CONST_REF(insn->ops[j]) || ctx->vregs[insn->ops[j]] != ctx->vregs[use]) {
|
|
|
|
int pred = ctx->cfg_edges[bb->predecessors + (j-2)];
|
|
|
|
ctx->cfg_blocks[pred].flags |= IR_BB_DESSA_MOVES;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
int ir_gen_dessa_moves(ir_ctx *ctx, int b, emit_copy_t emit_copy)
|
|
|
|
{
|
|
|
|
int succ, j, k = 0, n = 0;
|
|
|
|
ir_block *bb, *succ_bb;
|
|
|
|
ir_use_list *use_list;
|
2022-05-06 15:19:57 +02:00
|
|
|
ir_ref *loc, *pred;
|
2022-04-05 23:19:23 +02:00
|
|
|
uint32_t len;
|
|
|
|
ir_bitset todo, ready;
|
|
|
|
|
|
|
|
bb = &ctx->cfg_blocks[b];
|
|
|
|
if (!(bb->flags & IR_BB_DESSA_MOVES)) {
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
IR_ASSERT(bb->successors_count == 1);
|
|
|
|
succ = ctx->cfg_edges[bb->successors];
|
|
|
|
succ_bb = &ctx->cfg_blocks[succ];
|
|
|
|
IR_ASSERT(succ_bb->predecessors_count > 1);
|
|
|
|
use_list = &ctx->use_lists[succ_bb->start];
|
|
|
|
|
|
|
|
for (j = 0; j < succ_bb->predecessors_count; j++) {
|
|
|
|
if (ctx->cfg_edges[succ_bb->predecessors + j] == b) {
|
|
|
|
k = j + 2;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
IR_ASSERT(k != 0);
|
|
|
|
|
2022-05-06 15:19:57 +02:00
|
|
|
loc = ir_mem_calloc(ctx->insns_count * 2, sizeof(ir_ref));
|
|
|
|
pred = loc + ctx->insns_count;
|
|
|
|
len = ir_bitset_len(ctx->insns_count);
|
|
|
|
todo = ir_bitset_malloc(ctx->insns_count);
|
|
|
|
ready = ir_bitset_malloc(ctx->insns_count);
|
2022-04-05 23:19:23 +02:00
|
|
|
|
|
|
|
for (j = 0; j < use_list->count; j++) {
|
|
|
|
ir_ref ref = ctx->use_edges[use_list->refs + j];
|
|
|
|
ir_insn *insn = &ctx->ir_base[ref];
|
|
|
|
if (insn->op == IR_PHI) {
|
|
|
|
ir_ref input = insn->ops[k];
|
|
|
|
if (IR_IS_CONST_REF(input)) {
|
2022-05-06 15:19:57 +02:00
|
|
|
emit_copy(ctx, insn->type, input, ref);
|
2022-04-05 23:19:23 +02:00
|
|
|
} else if (ctx->vregs[input] != ctx->vregs[ref]) {
|
2022-05-06 15:19:57 +02:00
|
|
|
loc[input] = input;
|
|
|
|
pred[ref] = input;
|
|
|
|
ir_bitset_incl(todo, ref);
|
2022-04-05 23:19:23 +02:00
|
|
|
n++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
IR_BITSET_FOREACH(todo, len, j) {
|
|
|
|
if (!loc[j]) {
|
|
|
|
ir_bitset_incl(ready, j);
|
|
|
|
}
|
|
|
|
} IR_BITSET_FOREACH_END();
|
|
|
|
|
2022-08-11 19:42:03 +02:00
|
|
|
while ((j = ir_bitset_pop_first(todo, len)) >= 0) {
|
2022-04-05 23:19:23 +02:00
|
|
|
uint32_t a, b, c;
|
|
|
|
|
2022-08-11 19:42:03 +02:00
|
|
|
while ((b = ir_bitset_pop_first(ready, len)) != (uint32_t)-1) {
|
2022-04-05 23:19:23 +02:00
|
|
|
a = pred[b];
|
|
|
|
c = loc[a];
|
2022-05-06 15:19:57 +02:00
|
|
|
emit_copy(ctx, ctx->ir_base[b].type, c, b);
|
2022-04-05 23:19:23 +02:00
|
|
|
loc[a] = b;
|
|
|
|
if (a == c && pred[a]) {
|
|
|
|
ir_bitset_incl(ready, a);
|
|
|
|
}
|
|
|
|
}
|
2022-08-11 19:42:03 +02:00
|
|
|
b = j;
|
2022-04-05 23:19:23 +02:00
|
|
|
if (b != loc[pred[b]]) {
|
2022-05-06 15:19:57 +02:00
|
|
|
emit_copy(ctx, ctx->ir_base[b].type, b, 0);
|
2022-04-05 23:19:23 +02:00
|
|
|
loc[b] = 0;
|
|
|
|
ir_bitset_incl(ready, b);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
ir_mem_free(ready);
|
|
|
|
ir_mem_free(todo);
|
|
|
|
ir_mem_free(loc);
|
|
|
|
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Linear Scan Register Allocation
|
|
|
|
*
|
|
|
|
* See "Optimized Interval Splitting in a Linear Scan Register Allocator",
|
|
|
|
* Christian Wimmer VEE'10 (2005), Figure 2.
|
|
|
|
*/
|
2022-04-29 11:14:26 +02:00
|
|
|
#ifdef IR_DEBUG
|
2022-05-12 16:43:08 +02:00
|
|
|
# define IR_LOG_LSRA(action, ival, comment) do { \
|
2022-04-29 11:14:26 +02:00
|
|
|
if (ctx->flags & IR_DEBUG_RA) { \
|
|
|
|
ir_live_interval *_ival = (ival); \
|
|
|
|
ir_live_pos _start = _ival->range.start; \
|
2022-04-29 18:24:15 +02:00
|
|
|
ir_live_pos _end = ir_ival_end(_ival); \
|
2022-04-29 11:14:26 +02:00
|
|
|
fprintf(stderr, action " R%d [%d.%d...%d.%d)" comment "\n", \
|
2022-05-12 16:43:08 +02:00
|
|
|
_ival->vreg, \
|
2022-04-29 11:14:26 +02:00
|
|
|
IR_LIVE_POS_TO_REF(_start), IR_LIVE_POS_TO_SUB_REF(_start), \
|
|
|
|
IR_LIVE_POS_TO_REF(_end), IR_LIVE_POS_TO_SUB_REF(_end)); \
|
|
|
|
} \
|
|
|
|
} while (0)
|
2022-05-12 16:43:08 +02:00
|
|
|
# define IR_LOG_LSRA_ASSIGN(action, ival, comment) do { \
|
2022-04-29 11:14:26 +02:00
|
|
|
if (ctx->flags & IR_DEBUG_RA) { \
|
|
|
|
ir_live_interval *_ival = (ival); \
|
|
|
|
ir_live_pos _start = _ival->range.start; \
|
2022-04-29 18:24:15 +02:00
|
|
|
ir_live_pos _end = ir_ival_end(_ival); \
|
2022-04-29 11:14:26 +02:00
|
|
|
fprintf(stderr, action " R%d [%d.%d...%d.%d) to %s" comment "\n", \
|
2022-05-12 16:43:08 +02:00
|
|
|
_ival->vreg, \
|
2022-04-29 11:14:26 +02:00
|
|
|
IR_LIVE_POS_TO_REF(_start), IR_LIVE_POS_TO_SUB_REF(_start), \
|
|
|
|
IR_LIVE_POS_TO_REF(_end), IR_LIVE_POS_TO_SUB_REF(_end), \
|
|
|
|
ir_reg_name(_ival->reg, _ival->type)); \
|
|
|
|
} \
|
|
|
|
} while (0)
|
2022-05-12 16:43:08 +02:00
|
|
|
# define IR_LOG_LSRA_SPLIT(ival, pos) do { \
|
2022-04-29 11:14:26 +02:00
|
|
|
if (ctx->flags & IR_DEBUG_RA) { \
|
|
|
|
ir_live_interval *_ival = (ival); \
|
|
|
|
ir_live_pos _start = _ival->range.start; \
|
2022-04-29 18:24:15 +02:00
|
|
|
ir_live_pos _end = ir_ival_end(_ival); \
|
2022-04-29 11:14:26 +02:00
|
|
|
ir_live_pos _pos = (pos); \
|
|
|
|
fprintf(stderr, " ---- Split R%d [%d.%d...%d.%d) at %d.%d\n", \
|
2022-05-12 16:43:08 +02:00
|
|
|
_ival->vreg, \
|
2022-04-29 11:14:26 +02:00
|
|
|
IR_LIVE_POS_TO_REF(_start), IR_LIVE_POS_TO_SUB_REF(_start), \
|
|
|
|
IR_LIVE_POS_TO_REF(_end), IR_LIVE_POS_TO_SUB_REF(_end), \
|
|
|
|
IR_LIVE_POS_TO_REF(_pos), IR_LIVE_POS_TO_SUB_REF(_pos)); \
|
|
|
|
} \
|
|
|
|
} while (0)
|
2022-05-12 16:43:08 +02:00
|
|
|
# define IR_LOG_LSRA_CONFLICT(action, ival, pos) do { \
|
2022-04-29 11:14:26 +02:00
|
|
|
if (ctx->flags & IR_DEBUG_RA) { \
|
|
|
|
ir_live_interval *_ival = (ival); \
|
|
|
|
ir_live_pos _start = _ival->range.start; \
|
2022-04-29 18:24:15 +02:00
|
|
|
ir_live_pos _end = ir_ival_end(_ival); \
|
2022-04-29 11:14:26 +02:00
|
|
|
ir_live_pos _pos = (pos); \
|
|
|
|
fprintf(stderr, action " R%d [%d.%d...%d.%d) assigned to %s at %d.%d\n", \
|
2022-05-12 16:43:08 +02:00
|
|
|
_ival->vreg, \
|
2022-04-29 11:14:26 +02:00
|
|
|
IR_LIVE_POS_TO_REF(_start), IR_LIVE_POS_TO_SUB_REF(_start), \
|
|
|
|
IR_LIVE_POS_TO_REF(_end), IR_LIVE_POS_TO_SUB_REF(_end), \
|
|
|
|
ir_reg_name(_ival->reg, _ival->type), \
|
|
|
|
IR_LIVE_POS_TO_REF(_pos), IR_LIVE_POS_TO_SUB_REF(_pos)); \
|
|
|
|
} \
|
|
|
|
} while (0)
|
|
|
|
#else
|
2022-05-12 16:43:08 +02:00
|
|
|
# define IR_LOG_LSRA(action, ival, comment)
|
|
|
|
# define IR_LOG_LSRA_ASSIGN(action, ival, comment)
|
|
|
|
# define IR_LOG_LSRA_SPLIT(ival, pos)
|
|
|
|
# define IR_LOG_LSRA_CONFLICT(action, ival, pos);
|
2022-04-29 11:14:26 +02:00
|
|
|
#endif
|
|
|
|
|
2022-08-10 08:47:06 +02:00
|
|
|
IR_ALWAYS_INLINE ir_live_pos ir_ival_end(ir_live_interval *ival)
|
2022-04-05 23:19:23 +02:00
|
|
|
{
|
2022-08-10 08:47:06 +02:00
|
|
|
#if 1
|
|
|
|
return ival->end;
|
|
|
|
#else
|
2022-04-05 23:19:23 +02:00
|
|
|
ir_live_range *live_range = &ival->range;
|
|
|
|
|
|
|
|
while (live_range->next) {
|
|
|
|
live_range = live_range->next;
|
|
|
|
}
|
|
|
|
return live_range->end;
|
2022-08-10 08:47:06 +02:00
|
|
|
#endif
|
2022-04-05 23:19:23 +02:00
|
|
|
}
|
|
|
|
|
2022-09-05 21:43:27 +02:00
|
|
|
#ifdef IR_DEBUG
|
2022-04-29 18:24:15 +02:00
|
|
|
static bool ir_ival_covers(ir_live_interval *ival, ir_live_pos position)
|
2022-04-05 23:19:23 +02:00
|
|
|
{
|
|
|
|
ir_live_range *live_range = &ival->range;
|
|
|
|
|
|
|
|
do {
|
2022-08-10 09:38:30 +02:00
|
|
|
if (position < live_range->end) {
|
|
|
|
return position >= live_range->start;
|
2022-04-05 23:19:23 +02:00
|
|
|
}
|
|
|
|
live_range = live_range->next;
|
|
|
|
} while (live_range);
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
2022-09-05 21:43:27 +02:00
|
|
|
#endif
|
2022-04-05 23:19:23 +02:00
|
|
|
|
2022-04-29 18:24:15 +02:00
|
|
|
static bool ir_ival_has_hole_between(ir_live_interval *ival, ir_live_pos from, ir_live_pos to)
|
|
|
|
{
|
|
|
|
ir_live_range *r = &ival->range;
|
|
|
|
|
|
|
|
while (r) {
|
|
|
|
if (from < r->start) {
|
|
|
|
return 1;
|
|
|
|
} else if (to <= r->end) {
|
|
|
|
return 0;
|
|
|
|
} else if (from >= r->end) {
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
r = r->next;
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2022-04-29 14:24:41 +02:00
|
|
|
static ir_live_pos ir_last_use_pos_before(ir_live_interval *ival, ir_live_pos pos, uint8_t flags)
|
|
|
|
{
|
|
|
|
ir_live_pos ret = 0;
|
|
|
|
ir_use_pos *p = ival->use_pos;
|
|
|
|
|
2022-04-29 18:24:15 +02:00
|
|
|
while (p && p->pos <= pos) {
|
2022-04-29 14:24:41 +02:00
|
|
|
if (p->flags & flags) {
|
|
|
|
ret = p->pos;
|
|
|
|
}
|
|
|
|
p = p->next;
|
|
|
|
}
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static ir_live_pos ir_first_use_pos_after(ir_live_interval *ival, ir_live_pos pos, uint8_t flags)
|
|
|
|
{
|
|
|
|
ir_use_pos *p = ival->use_pos;
|
|
|
|
|
|
|
|
while (p && p->pos <= pos) {
|
|
|
|
p = p->next;
|
|
|
|
}
|
|
|
|
while (p && !(p->flags & flags)) {
|
|
|
|
p = p->next;
|
|
|
|
}
|
|
|
|
return p ? p->pos : 0x7fffffff;
|
|
|
|
}
|
|
|
|
|
2022-04-29 18:24:15 +02:00
|
|
|
static ir_block *ir_block_from_live_pos(ir_ctx *ctx, ir_live_pos pos)
|
|
|
|
{
|
|
|
|
int b;
|
|
|
|
ir_block *bb;
|
|
|
|
ir_ref ref = IR_LIVE_POS_TO_REF(pos);
|
|
|
|
|
|
|
|
// TODO: use binary search or map
|
|
|
|
for (b = 1, bb = ctx->cfg_blocks + 1; b <= ctx->cfg_blocks_count; b++, bb++) {
|
|
|
|
if (ref >= bb->start && ref <= bb->end) {
|
|
|
|
return bb;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
IR_ASSERT(0);
|
2022-05-05 21:35:39 +02:00
|
|
|
return NULL;
|
2022-04-29 18:24:15 +02:00
|
|
|
}
|
|
|
|
|
2022-05-16 10:53:10 +02:00
|
|
|
static ir_live_pos ir_find_optimal_split_position(ir_ctx *ctx, ir_live_interval *ival, ir_live_pos min_pos, ir_live_pos max_pos, bool prefer_max)
|
2022-04-29 18:24:15 +02:00
|
|
|
{
|
|
|
|
ir_block *min_bb, *max_bb;
|
|
|
|
|
|
|
|
if (min_pos == max_pos) {
|
|
|
|
return max_pos;
|
|
|
|
}
|
|
|
|
|
|
|
|
IR_ASSERT(min_pos < max_pos);
|
|
|
|
IR_ASSERT(min_pos >= ival->range.start);
|
|
|
|
IR_ASSERT(max_pos < ir_ival_end(ival));
|
|
|
|
|
|
|
|
min_bb = ir_block_from_live_pos(ctx, min_pos);
|
|
|
|
max_bb = ir_block_from_live_pos(ctx, max_pos);
|
|
|
|
|
|
|
|
if (min_bb == max_bb
|
|
|
|
|| ir_ival_has_hole_between(ival, min_pos, max_pos)) { // TODO: ???
|
2022-05-16 10:53:10 +02:00
|
|
|
return (prefer_max) ? max_pos : min_pos;
|
2022-04-29 18:24:15 +02:00
|
|
|
}
|
|
|
|
|
2022-05-04 10:59:35 +02:00
|
|
|
if (min_bb->loop_depth < max_bb->loop_depth) {
|
|
|
|
/* Split at the end of the loop entry */
|
|
|
|
do {
|
|
|
|
if (max_bb->loop_header) {
|
|
|
|
max_bb = &ctx->cfg_blocks[max_bb->loop_header];
|
|
|
|
}
|
|
|
|
max_bb = &ctx->cfg_blocks[ctx->cfg_edges[max_bb->predecessors]];
|
|
|
|
IR_ASSERT(ir_ival_covers(ival, IR_DEF_LIVE_POS_FROM_REF(max_bb->end)));
|
|
|
|
} while (min_bb->loop_depth < max_bb->loop_depth);
|
|
|
|
|
|
|
|
return IR_DEF_LIVE_POS_FROM_REF(max_bb->end);
|
|
|
|
}
|
|
|
|
|
|
|
|
IR_ASSERT(min_bb->loop_depth == max_bb->loop_depth); // TODO: Can "min_bb" be in a deeper loop than "max_bb" ???
|
2022-04-29 18:24:15 +02:00
|
|
|
|
|
|
|
return IR_LOAD_LIVE_POS_FROM_REF(max_bb->start);
|
|
|
|
}
|
|
|
|
|
2022-05-12 16:43:08 +02:00
|
|
|
static ir_live_interval *ir_split_interval_at(ir_ctx *ctx, ir_live_interval *ival, ir_live_pos pos)
|
2022-04-14 21:40:13 +02:00
|
|
|
{
|
|
|
|
ir_live_interval *child;
|
|
|
|
ir_live_range *p, *prev;
|
|
|
|
ir_use_pos *use_pos, *prev_use_pos;
|
|
|
|
|
2022-05-12 16:43:08 +02:00
|
|
|
IR_LOG_LSRA_SPLIT(ival, pos);
|
2022-04-14 21:40:13 +02:00
|
|
|
IR_ASSERT(pos > ival->range.start);
|
|
|
|
|
|
|
|
p = &ival->range;
|
|
|
|
prev = NULL;
|
|
|
|
while (p && pos >= p->end) {
|
|
|
|
prev = p;
|
|
|
|
p = prev->next;
|
|
|
|
}
|
|
|
|
IR_ASSERT(p);
|
|
|
|
|
2022-04-29 17:50:57 +02:00
|
|
|
if (pos < p->start) {
|
|
|
|
/* split between ranges */
|
|
|
|
pos = p->start;
|
|
|
|
}
|
|
|
|
|
2022-04-14 21:40:13 +02:00
|
|
|
use_pos = ival->use_pos;
|
|
|
|
prev_use_pos = NULL;
|
2022-04-27 23:12:01 +02:00
|
|
|
|
2022-05-12 16:43:08 +02:00
|
|
|
ival->flags &= ~IR_LIVE_INTERVAL_HAS_HINTS;
|
2022-04-27 23:12:01 +02:00
|
|
|
if (p->start == pos) {
|
|
|
|
while (use_pos && pos > use_pos->pos) {
|
2022-05-12 16:43:08 +02:00
|
|
|
if (use_pos->hint != IR_REG_NONE || use_pos->hint_ref != 0) {
|
|
|
|
ival->flags |= IR_LIVE_INTERVAL_HAS_HINTS;
|
|
|
|
}
|
2022-04-27 23:12:01 +02:00
|
|
|
prev_use_pos = use_pos;
|
|
|
|
use_pos = use_pos->next;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
while (use_pos && pos >= use_pos->pos) {
|
2022-05-12 16:43:08 +02:00
|
|
|
if (use_pos->hint != IR_REG_NONE || use_pos->hint_ref != 0) {
|
|
|
|
ival->flags |= IR_LIVE_INTERVAL_HAS_HINTS;
|
|
|
|
}
|
2022-04-27 23:12:01 +02:00
|
|
|
prev_use_pos = use_pos;
|
|
|
|
use_pos = use_pos->next;
|
|
|
|
}
|
2022-04-14 21:40:13 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
child = ir_mem_malloc(sizeof(ir_live_interval));
|
|
|
|
child->type = ival->type;
|
|
|
|
child->reg = IR_REG_NONE;
|
2022-04-21 15:38:18 +02:00
|
|
|
child->flags = 0;
|
2022-05-12 16:43:08 +02:00
|
|
|
child->vreg = ival->vreg;
|
2022-05-16 21:16:29 +02:00
|
|
|
child->stack_spill_pos = -1; // not allocated
|
2022-04-14 21:40:13 +02:00
|
|
|
child->range.start = pos;
|
|
|
|
child->range.end = p->end;
|
|
|
|
child->range.next = p->next;
|
2022-08-10 08:47:06 +02:00
|
|
|
child->end = ival->end;
|
2022-04-14 21:40:13 +02:00
|
|
|
child->use_pos = prev_use_pos ? prev_use_pos->next : use_pos;
|
|
|
|
|
|
|
|
child->top = ival->top;
|
2022-04-27 17:18:53 +02:00
|
|
|
child->next = ival->next;
|
2022-04-14 21:40:13 +02:00
|
|
|
ival->next = child;
|
|
|
|
|
|
|
|
if (pos == p->start) {
|
|
|
|
prev->next = NULL;
|
2022-08-10 08:47:06 +02:00
|
|
|
ival->end = prev->end;
|
2022-04-14 21:40:13 +02:00
|
|
|
} else {
|
2022-08-10 08:47:06 +02:00
|
|
|
p->end = ival->end = pos;
|
2022-04-14 21:40:13 +02:00
|
|
|
p->next = NULL;
|
|
|
|
}
|
|
|
|
if (prev_use_pos) {
|
|
|
|
prev_use_pos->next = NULL;
|
|
|
|
} else {
|
|
|
|
ival->use_pos = NULL;
|
|
|
|
}
|
|
|
|
|
2022-05-12 16:43:08 +02:00
|
|
|
use_pos = child->use_pos;
|
|
|
|
while (use_pos) {
|
|
|
|
if (use_pos->hint != IR_REG_NONE || use_pos->hint_ref != 0) {
|
|
|
|
child->flags |= IR_LIVE_INTERVAL_HAS_HINTS;
|
|
|
|
}
|
|
|
|
use_pos = use_pos->next;
|
|
|
|
}
|
|
|
|
|
2022-04-14 21:40:13 +02:00
|
|
|
return child;
|
|
|
|
}
|
|
|
|
|
2022-05-27 12:18:04 +02:00
|
|
|
int32_t ir_allocate_spill_slot(ir_ctx *ctx, ir_type type, ir_reg_alloc_data *data)
|
2022-04-14 21:40:13 +02:00
|
|
|
{
|
2022-05-26 10:58:51 +02:00
|
|
|
int32_t ret;
|
|
|
|
uint8_t size = ir_type_size[type];
|
2022-05-19 09:53:08 +02:00
|
|
|
|
|
|
|
if (size == 8) {
|
2022-05-26 10:58:51 +02:00
|
|
|
ret = data->stack_frame_size;
|
2022-05-19 09:53:08 +02:00
|
|
|
data->stack_frame_size += 8;
|
|
|
|
} else if (size == 4) {
|
|
|
|
if (data->unused_slot_4) {
|
2022-05-26 10:58:51 +02:00
|
|
|
ret = data->unused_slot_4;
|
2022-05-19 09:53:08 +02:00
|
|
|
data->unused_slot_4 = 0;
|
|
|
|
} else {
|
2022-05-26 10:58:51 +02:00
|
|
|
ret = data->stack_frame_size;
|
|
|
|
if (sizeof(void*) == 8) {
|
|
|
|
data->unused_slot_4 = data->stack_frame_size + 4;
|
|
|
|
data->stack_frame_size += 8;
|
|
|
|
} else {
|
|
|
|
data->stack_frame_size += 4;
|
|
|
|
}
|
2022-05-19 09:53:08 +02:00
|
|
|
}
|
|
|
|
} else if (size == 2) {
|
|
|
|
if (data->unused_slot_2) {
|
2022-05-26 10:58:51 +02:00
|
|
|
ret = data->unused_slot_2;
|
2022-05-19 09:53:08 +02:00
|
|
|
data->unused_slot_2 = 0;
|
|
|
|
} else if (data->unused_slot_4) {
|
2022-05-26 10:58:51 +02:00
|
|
|
ret = data->unused_slot_4;
|
2022-05-19 09:53:08 +02:00
|
|
|
data->unused_slot_2 = data->unused_slot_4 + 2;
|
|
|
|
data->unused_slot_4 = 0;
|
2022-05-16 21:16:29 +02:00
|
|
|
} else {
|
2022-05-26 10:58:51 +02:00
|
|
|
ret = data->stack_frame_size;
|
2022-05-19 09:53:08 +02:00
|
|
|
data->unused_slot_2 = data->stack_frame_size + 2;
|
2022-05-26 10:58:51 +02:00
|
|
|
if (sizeof(void*) == 8) {
|
|
|
|
data->unused_slot_4 = data->stack_frame_size + 4;
|
|
|
|
data->stack_frame_size += 8;
|
|
|
|
} else {
|
|
|
|
data->stack_frame_size += 4;
|
|
|
|
}
|
2022-05-19 09:53:08 +02:00
|
|
|
}
|
|
|
|
} else if (size == 1) {
|
|
|
|
if (data->unused_slot_1) {
|
2022-05-26 10:58:51 +02:00
|
|
|
ret = data->unused_slot_1;
|
2022-05-19 09:53:08 +02:00
|
|
|
data->unused_slot_1 = 0;
|
|
|
|
} else if (data->unused_slot_2) {
|
2022-05-26 10:58:51 +02:00
|
|
|
ret = data->unused_slot_2;
|
2022-05-19 09:53:08 +02:00
|
|
|
data->unused_slot_1 = data->unused_slot_2 + 1;
|
|
|
|
data->unused_slot_2 = 0;
|
|
|
|
} else if (data->unused_slot_4) {
|
2022-05-26 10:58:51 +02:00
|
|
|
ret = data->unused_slot_4;
|
2022-05-19 09:53:08 +02:00
|
|
|
data->unused_slot_1 = data->unused_slot_4 + 1;
|
|
|
|
data->unused_slot_2 = data->unused_slot_4 + 2;
|
|
|
|
data->unused_slot_4 = 0;
|
|
|
|
} else {
|
2022-05-26 10:58:51 +02:00
|
|
|
ret = data->stack_frame_size;
|
2022-05-19 09:53:08 +02:00
|
|
|
data->unused_slot_1 = data->stack_frame_size + 1;
|
|
|
|
data->unused_slot_2 = data->stack_frame_size + 2;
|
2022-05-26 10:58:51 +02:00
|
|
|
if (sizeof(void*) == 8) {
|
|
|
|
data->unused_slot_4 = data->stack_frame_size + 4;
|
|
|
|
data->stack_frame_size += 8;
|
|
|
|
} else {
|
|
|
|
data->stack_frame_size += 4;
|
|
|
|
}
|
2022-05-16 21:16:29 +02:00
|
|
|
}
|
2022-05-19 09:53:08 +02:00
|
|
|
} else {
|
|
|
|
IR_ASSERT(0);
|
2022-05-26 10:58:51 +02:00
|
|
|
ret = -1;
|
2022-04-14 21:40:13 +02:00
|
|
|
}
|
2022-05-26 10:58:51 +02:00
|
|
|
return ret;
|
2022-04-14 21:40:13 +02:00
|
|
|
}
|
|
|
|
|
2022-05-04 08:08:23 +02:00
|
|
|
static ir_reg ir_try_allocate_preferred_reg(ir_ctx *ctx, ir_live_interval *ival, ir_regset available, ir_live_pos *freeUntilPos)
|
2022-04-07 13:18:59 +02:00
|
|
|
{
|
|
|
|
ir_use_pos *use_pos;
|
2022-05-11 20:10:35 +02:00
|
|
|
ir_reg reg;
|
2022-04-07 13:18:59 +02:00
|
|
|
|
|
|
|
use_pos = ival->use_pos;
|
|
|
|
while (use_pos) {
|
2022-05-11 20:10:35 +02:00
|
|
|
reg = use_pos->hint;
|
|
|
|
if (reg >= 0 && IR_REGSET_IN(available, reg)) {
|
|
|
|
if (ir_ival_end(ival) <= freeUntilPos[reg]) {
|
2022-04-07 13:18:59 +02:00
|
|
|
/* register available for the whole interval */
|
2022-05-11 20:10:35 +02:00
|
|
|
return reg;
|
2022-04-07 13:18:59 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
use_pos = use_pos->next;
|
|
|
|
}
|
2022-04-15 15:02:23 +02:00
|
|
|
|
|
|
|
use_pos = ival->use_pos;
|
|
|
|
while (use_pos) {
|
2022-04-21 15:38:18 +02:00
|
|
|
if (use_pos->hint_ref) {
|
2022-05-11 20:10:35 +02:00
|
|
|
reg = ctx->live_intervals[ctx->vregs[use_pos->hint_ref]]->reg;
|
|
|
|
if (reg >= 0 && IR_REGSET_IN(available, reg)) {
|
2022-04-29 18:24:15 +02:00
|
|
|
if (ir_ival_end(ival) <= freeUntilPos[reg]) {
|
2022-04-15 15:02:23 +02:00
|
|
|
/* register available for the whole interval */
|
|
|
|
return reg;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
use_pos = use_pos->next;
|
|
|
|
}
|
|
|
|
|
2022-04-07 13:18:59 +02:00
|
|
|
return IR_REG_NONE;
|
|
|
|
}
|
|
|
|
|
2022-05-11 20:10:35 +02:00
|
|
|
static ir_reg ir_get_preferred_reg(ir_ctx *ctx, ir_live_interval *ival, ir_regset available)
|
|
|
|
{
|
|
|
|
ir_use_pos *use_pos;
|
|
|
|
ir_reg reg;
|
|
|
|
|
|
|
|
use_pos = ival->use_pos;
|
|
|
|
while (use_pos) {
|
|
|
|
reg = use_pos->hint;
|
|
|
|
if (reg >= 0 && IR_REGSET_IN(available, reg)) {
|
|
|
|
return reg;
|
|
|
|
} else if (use_pos->hint_ref) {
|
|
|
|
reg = ctx->live_intervals[ctx->vregs[use_pos->hint_ref]]->reg;
|
|
|
|
if (reg >= 0 && IR_REGSET_IN(available, reg)) {
|
|
|
|
return reg;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
use_pos = use_pos->next;
|
|
|
|
}
|
|
|
|
|
|
|
|
return IR_REG_NONE;
|
|
|
|
}
|
|
|
|
|
2022-05-12 16:43:08 +02:00
|
|
|
static void ir_add_to_unhandled(ir_live_interval **unhandled, ir_live_interval *ival)
|
2022-04-05 23:19:23 +02:00
|
|
|
{
|
2022-05-12 16:43:08 +02:00
|
|
|
ir_live_pos pos = ival->range.start;
|
|
|
|
|
|
|
|
if (*unhandled == NULL
|
|
|
|
|| pos < (*unhandled)->range.start
|
|
|
|
|| (pos == (*unhandled)->range.start
|
|
|
|
&& (ival->flags & IR_LIVE_INTERVAL_HAS_HINTS)
|
|
|
|
&& !((*unhandled)->flags & IR_LIVE_INTERVAL_HAS_HINTS))
|
|
|
|
|| (pos == (*unhandled)->range.start
|
|
|
|
&& ival->vreg > (*unhandled)->vreg)) {
|
|
|
|
ival->list_next = *unhandled;
|
|
|
|
*unhandled = ival;
|
2022-04-14 21:40:13 +02:00
|
|
|
} else {
|
2022-05-12 16:43:08 +02:00
|
|
|
ir_live_interval *prev = *unhandled;
|
|
|
|
|
|
|
|
while (prev->list_next) {
|
|
|
|
if (pos < prev->list_next->range.start
|
|
|
|
|| (pos == prev->list_next->range.start
|
|
|
|
&& (ival->flags & IR_LIVE_INTERVAL_HAS_HINTS)
|
|
|
|
&& !(prev->list_next->flags & IR_LIVE_INTERVAL_HAS_HINTS))
|
|
|
|
|| (pos == prev->list_next->range.start
|
|
|
|
&& ival->vreg > prev->list_next->vreg)) {
|
2022-04-14 21:40:13 +02:00
|
|
|
break;
|
|
|
|
}
|
2022-05-12 16:43:08 +02:00
|
|
|
prev = prev->list_next;
|
2022-04-14 21:40:13 +02:00
|
|
|
}
|
2022-05-12 16:43:08 +02:00
|
|
|
ival->list_next = prev->list_next;
|
|
|
|
prev->list_next = ival;
|
2022-04-14 21:40:13 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-12 16:43:08 +02:00
|
|
|
static ir_reg ir_try_allocate_free_reg(ir_ctx *ctx, ir_live_interval *ival, ir_live_interval **active, ir_live_interval *inactive, ir_live_interval **unhandled)
|
2022-04-14 21:40:13 +02:00
|
|
|
{
|
|
|
|
ir_live_pos freeUntilPos[IR_REG_NUM];
|
2022-04-05 23:19:23 +02:00
|
|
|
int i, reg;
|
2022-04-14 21:40:13 +02:00
|
|
|
ir_live_pos pos, next;
|
2022-05-12 16:43:08 +02:00
|
|
|
ir_live_interval *other;
|
2022-04-05 23:19:23 +02:00
|
|
|
ir_regset available;
|
|
|
|
|
|
|
|
if (IR_IS_TYPE_FP(ival->type)) {
|
|
|
|
available = IR_REGSET_FP;
|
|
|
|
/* set freeUntilPos of all physical registers to maxInt */
|
|
|
|
for (i = IR_REG_FP_FIRST; i <= IR_REG_FP_LAST; i++) {
|
|
|
|
freeUntilPos[i] = 0x7fffffff;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
available = IR_REGSET_GP;
|
|
|
|
if (ctx->flags & IR_USE_FRAME_POINTER) {
|
|
|
|
IR_REGSET_EXCL(available, IR_REG_FRAME_POINTER);
|
|
|
|
}
|
|
|
|
/* set freeUntilPos of all physical registers to maxInt */
|
|
|
|
for (i = IR_REG_GP_FIRST; i <= IR_REG_GP_LAST; i++) {
|
|
|
|
freeUntilPos[i] = 0x7fffffff;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-06-21 15:13:14 +02:00
|
|
|
available = IR_REGSET_DIFFERENCE(available, (ir_regset)ctx->fixed_regset);
|
2022-04-27 13:47:52 +02:00
|
|
|
|
2022-04-05 23:19:23 +02:00
|
|
|
/* for each interval it in active */
|
2022-05-12 16:43:08 +02:00
|
|
|
other = *active;
|
|
|
|
while (other) {
|
2022-04-05 23:19:23 +02:00
|
|
|
/* freeUntilPos[it.reg] = 0 */
|
2022-05-12 16:43:08 +02:00
|
|
|
reg = other->reg;
|
2022-04-05 23:19:23 +02:00
|
|
|
IR_ASSERT(reg >= 0);
|
2022-08-11 18:56:59 +02:00
|
|
|
if (reg == IR_REG_NUM) {
|
|
|
|
ir_regset regset = IR_REGSET_INTERSECTION(available, IR_REGSET_SCRATCH);
|
|
|
|
|
|
|
|
IR_REGSET_FOREACH(regset, reg) {
|
|
|
|
freeUntilPos[reg] = 0;
|
|
|
|
} IR_REGSET_FOREACH_END();
|
|
|
|
} else if (IR_REGSET_IN(available, reg)) {
|
2022-04-05 23:19:23 +02:00
|
|
|
freeUntilPos[reg] = 0;
|
|
|
|
}
|
2022-05-12 16:43:08 +02:00
|
|
|
other = other->list_next;
|
|
|
|
}
|
2022-04-05 23:19:23 +02:00
|
|
|
|
|
|
|
/* for each interval it in inactive intersecting with current
|
|
|
|
*
|
|
|
|
* This loop is not necessary for program in SSA form (see LSRA on SSA fig. 6),
|
|
|
|
* but it is still necessary after coalescing and splitting
|
|
|
|
*/
|
2022-05-12 16:43:08 +02:00
|
|
|
other = inactive;
|
|
|
|
while (other) {
|
2022-04-05 23:19:23 +02:00
|
|
|
/* freeUntilPos[it.reg] = next intersection of it with current */
|
2022-05-12 16:43:08 +02:00
|
|
|
reg = other->reg;
|
2022-04-05 23:19:23 +02:00
|
|
|
IR_ASSERT(reg >= 0);
|
2022-08-11 18:56:59 +02:00
|
|
|
if (reg == IR_REG_NUM) {
|
|
|
|
next = ir_ivals_overlap(&ival->range, other->current_range);
|
|
|
|
if (next) {
|
|
|
|
ir_regset regset = IR_REGSET_INTERSECTION(available, IR_REGSET_SCRATCH);
|
|
|
|
|
|
|
|
IR_REGSET_FOREACH(regset, reg) {
|
|
|
|
if (next < freeUntilPos[reg]) {
|
|
|
|
freeUntilPos[reg] = next;
|
|
|
|
}
|
|
|
|
} IR_REGSET_FOREACH_END();
|
|
|
|
}
|
|
|
|
} else if (IR_REGSET_IN(available, reg)) {
|
2022-08-10 12:59:34 +02:00
|
|
|
next = ir_ivals_overlap(&ival->range, other->current_range);
|
2022-04-05 23:19:23 +02:00
|
|
|
if (next && next < freeUntilPos[reg]) {
|
|
|
|
freeUntilPos[reg] = next;
|
|
|
|
}
|
|
|
|
}
|
2022-05-12 16:43:08 +02:00
|
|
|
other = other->list_next;
|
|
|
|
}
|
2022-04-05 23:19:23 +02:00
|
|
|
|
2022-04-07 10:11:57 +02:00
|
|
|
/* Try to use hint */
|
2022-05-04 08:08:23 +02:00
|
|
|
reg = ir_try_allocate_preferred_reg(ctx, ival, available, freeUntilPos);
|
2022-04-07 13:18:59 +02:00
|
|
|
if (reg != IR_REG_NONE) {
|
|
|
|
ival->reg = reg;
|
2022-05-12 16:43:08 +02:00
|
|
|
IR_LOG_LSRA_ASSIGN(" ---- Assign", ival, " (hint available without spilling)");
|
|
|
|
ival->list_next = *active;
|
|
|
|
*active = ival;
|
2022-04-07 13:18:59 +02:00
|
|
|
return reg;
|
2022-04-07 10:11:57 +02:00
|
|
|
}
|
|
|
|
|
2022-04-14 21:40:13 +02:00
|
|
|
/* reg = register with highest freeUntilPos */
|
2022-05-16 10:53:10 +02:00
|
|
|
reg = IR_REG_NONE;
|
|
|
|
pos = 0;
|
2022-04-05 23:19:23 +02:00
|
|
|
IR_REGSET_FOREACH(available, i) {
|
|
|
|
if (freeUntilPos[i] > pos) {
|
|
|
|
pos = freeUntilPos[i];
|
|
|
|
reg = i;
|
2022-04-15 13:22:35 +02:00
|
|
|
} else if (freeUntilPos[i] == pos
|
|
|
|
&& !IR_REGSET_IN(IR_REGSET_SCRATCH, reg)
|
|
|
|
&& IR_REGSET_IN(IR_REGSET_SCRATCH, i)) {
|
|
|
|
/* prefer caller-saved registers to avoid save/restore in prologue/epilogue */
|
|
|
|
pos = freeUntilPos[i];
|
|
|
|
reg = i;
|
2022-04-05 23:19:23 +02:00
|
|
|
}
|
|
|
|
} IR_REGSET_FOREACH_END();
|
|
|
|
|
|
|
|
if (!pos) {
|
|
|
|
/* no register available without spilling */
|
|
|
|
return IR_REG_NONE;
|
2022-04-29 18:24:15 +02:00
|
|
|
} else if (ir_ival_end(ival) <= pos) {
|
2022-04-05 23:19:23 +02:00
|
|
|
/* register available for the whole interval */
|
|
|
|
ival->reg = reg;
|
2022-05-12 16:43:08 +02:00
|
|
|
IR_LOG_LSRA_ASSIGN(" ---- Assign", ival, " (available without spilling)");
|
|
|
|
ival->list_next = *active;
|
|
|
|
*active = ival;
|
2022-04-05 23:19:23 +02:00
|
|
|
return reg;
|
2022-04-27 00:04:03 +02:00
|
|
|
} else if (pos > ival->range.start) {
|
2022-04-05 23:19:23 +02:00
|
|
|
/* register available for the first part of the interval */
|
2022-04-14 21:40:13 +02:00
|
|
|
/* split current before freeUntilPos[reg] */
|
2022-04-29 14:24:41 +02:00
|
|
|
ir_live_pos split_pos = ir_last_use_pos_before(ival, pos,
|
|
|
|
IR_USE_MUST_BE_IN_REG | IR_USE_SHOULD_BE_IN_REG);
|
|
|
|
if (split_pos > ival->range.start) {
|
2022-05-11 20:10:35 +02:00
|
|
|
ir_reg pref_reg;
|
|
|
|
|
2022-05-16 10:53:10 +02:00
|
|
|
split_pos = ir_find_optimal_split_position(ctx, ival, split_pos, pos, 0);
|
2022-05-12 16:43:08 +02:00
|
|
|
other = ir_split_interval_at(ctx, ival, split_pos);
|
2022-05-11 20:10:35 +02:00
|
|
|
pref_reg = ir_try_allocate_preferred_reg(ctx, ival, available, freeUntilPos);
|
|
|
|
if (pref_reg != IR_REG_NONE) {
|
|
|
|
ival->reg = pref_reg;
|
|
|
|
} else {
|
|
|
|
ival->reg = reg;
|
|
|
|
}
|
2022-05-12 16:43:08 +02:00
|
|
|
IR_LOG_LSRA_ASSIGN(" ---- Assign", ival, " (available without spilling for the first part)");
|
|
|
|
ival->list_next = *active;
|
|
|
|
*active = ival;
|
|
|
|
ir_add_to_unhandled(unhandled, other);
|
|
|
|
IR_LOG_LSRA(" ---- Queue", other, "");
|
2022-04-29 14:24:41 +02:00
|
|
|
return reg;
|
2022-04-29 13:19:53 +02:00
|
|
|
}
|
|
|
|
}
|
2022-04-29 14:24:41 +02:00
|
|
|
return IR_REG_NONE;
|
2022-04-29 13:19:53 +02:00
|
|
|
}
|
|
|
|
|
2022-05-12 16:43:08 +02:00
|
|
|
static ir_reg ir_allocate_blocked_reg(ir_ctx *ctx, ir_live_interval *ival, ir_live_interval **active, ir_live_interval *inactive, ir_live_interval **unhandled)
|
2022-04-05 23:19:23 +02:00
|
|
|
{
|
2022-04-14 21:40:13 +02:00
|
|
|
ir_live_pos nextUsePos[IR_REG_NUM];
|
2022-04-25 23:54:07 +02:00
|
|
|
ir_live_pos blockPos[IR_REG_NUM];
|
2022-04-14 21:40:13 +02:00
|
|
|
int i, reg;
|
|
|
|
ir_live_pos pos, next_use_pos;
|
2022-05-12 16:43:08 +02:00
|
|
|
ir_live_interval *other, *prev;
|
2022-04-14 21:40:13 +02:00
|
|
|
ir_use_pos *use_pos;
|
|
|
|
ir_regset available;
|
|
|
|
|
2022-05-05 21:35:39 +02:00
|
|
|
if (!(ival->flags & IR_LIVE_INTERVAL_TEMP)) {
|
|
|
|
use_pos = ival->use_pos;
|
|
|
|
while (use_pos && !(use_pos->flags & IR_USE_MUST_BE_IN_REG)) {
|
|
|
|
use_pos = use_pos->next;
|
|
|
|
}
|
|
|
|
if (!use_pos) {
|
|
|
|
/* spill */
|
2022-05-12 16:43:08 +02:00
|
|
|
IR_LOG_LSRA(" ---- Spill", ival, " (no use pos that must be in reg)");
|
2022-05-05 21:35:39 +02:00
|
|
|
return IR_REG_NONE;
|
|
|
|
}
|
|
|
|
next_use_pos = use_pos->pos;
|
|
|
|
} else {
|
|
|
|
next_use_pos = ival->range.end;
|
2022-04-14 21:40:13 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if (IR_IS_TYPE_FP(ival->type)) {
|
|
|
|
available = IR_REGSET_FP;
|
|
|
|
/* set nextUsePos of all physical registers to maxInt */
|
|
|
|
for (i = IR_REG_FP_FIRST; i <= IR_REG_FP_LAST; i++) {
|
|
|
|
nextUsePos[i] = 0x7fffffff;
|
2022-04-25 23:54:07 +02:00
|
|
|
blockPos[i] = 0x7fffffff;
|
2022-04-14 21:40:13 +02:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
available = IR_REGSET_GP;
|
|
|
|
if (ctx->flags & IR_USE_FRAME_POINTER) {
|
|
|
|
IR_REGSET_EXCL(available, IR_REG_FRAME_POINTER);
|
|
|
|
}
|
|
|
|
/* set nextUsePos of all physical registers to maxInt */
|
|
|
|
for (i = IR_REG_GP_FIRST; i <= IR_REG_GP_LAST; i++) {
|
|
|
|
nextUsePos[i] = 0x7fffffff;
|
2022-04-25 23:54:07 +02:00
|
|
|
blockPos[i] = 0x7fffffff;
|
2022-04-14 21:40:13 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-06-21 15:13:14 +02:00
|
|
|
available = IR_REGSET_DIFFERENCE(available, (ir_regset)ctx->fixed_regset);
|
2022-04-27 13:47:52 +02:00
|
|
|
|
2022-05-13 11:14:21 +02:00
|
|
|
if (IR_REGSET_IS_EMPTY(available)) {
|
|
|
|
fprintf(stderr, "LSRA Internal Error: No registers available. Allocation is not possible\n");
|
|
|
|
IR_ASSERT(0);
|
|
|
|
exit(-1);
|
|
|
|
}
|
|
|
|
|
2022-04-14 21:40:13 +02:00
|
|
|
/* for each interval it in active */
|
2022-05-12 16:43:08 +02:00
|
|
|
other = *active;
|
|
|
|
while (other) {
|
2022-04-14 21:40:13 +02:00
|
|
|
/* nextUsePos[it.reg] = next use of it after start of current */
|
2022-05-12 16:43:08 +02:00
|
|
|
reg = other->reg;
|
2022-04-14 21:40:13 +02:00
|
|
|
IR_ASSERT(reg >= 0);
|
2022-08-11 18:56:59 +02:00
|
|
|
if (reg == IR_REG_NUM) {
|
|
|
|
ir_regset regset = IR_REGSET_INTERSECTION(available, IR_REGSET_SCRATCH);
|
|
|
|
|
|
|
|
IR_REGSET_FOREACH(regset, reg) {
|
|
|
|
blockPos[reg] = nextUsePos[reg] = 0;
|
|
|
|
} IR_REGSET_FOREACH_END();
|
|
|
|
} else if (IR_REGSET_IN(available, reg)) {
|
2022-05-12 16:43:08 +02:00
|
|
|
if (other->flags & (IR_LIVE_INTERVAL_FIXED|IR_LIVE_INTERVAL_TEMP)) {
|
2022-04-28 00:25:10 +02:00
|
|
|
blockPos[reg] = nextUsePos[reg] = 0;
|
2022-04-25 23:54:07 +02:00
|
|
|
} else {
|
2022-05-12 16:43:08 +02:00
|
|
|
pos = ir_first_use_pos_after(other, ival->range.start,
|
2022-05-13 11:14:21 +02:00
|
|
|
IR_USE_MUST_BE_IN_REG /* | IR_USE_SHOULD_BE_IN_REG */); // TODO: ???
|
2022-04-29 13:19:53 +02:00
|
|
|
if (pos < nextUsePos[reg]) {
|
|
|
|
nextUsePos[reg] = pos;
|
2022-04-25 23:54:07 +02:00
|
|
|
}
|
2022-04-14 21:40:13 +02:00
|
|
|
}
|
|
|
|
}
|
2022-05-12 16:43:08 +02:00
|
|
|
other = other->list_next;
|
|
|
|
}
|
2022-04-14 21:40:13 +02:00
|
|
|
|
|
|
|
/* for each interval it in inactive intersecting with current */
|
2022-05-12 16:43:08 +02:00
|
|
|
other = inactive;
|
|
|
|
while (other) {
|
2022-04-14 21:40:13 +02:00
|
|
|
/* freeUntilPos[it.reg] = next intersection of it with current */
|
2022-05-12 16:43:08 +02:00
|
|
|
reg = other->reg;
|
2022-04-14 21:40:13 +02:00
|
|
|
IR_ASSERT(reg >= 0);
|
2022-08-11 18:56:59 +02:00
|
|
|
if (reg == IR_REG_NUM) {
|
|
|
|
ir_live_pos overlap = ir_ivals_overlap(&ival->range, other->current_range);
|
|
|
|
|
|
|
|
if (overlap) {
|
|
|
|
ir_regset regset = IR_REGSET_INTERSECTION(available, IR_REGSET_SCRATCH);
|
|
|
|
|
|
|
|
IR_REGSET_FOREACH(regset, reg) {
|
|
|
|
if (overlap < nextUsePos[reg]) {
|
|
|
|
nextUsePos[reg] = overlap;
|
|
|
|
}
|
|
|
|
if (overlap < blockPos[reg]) {
|
|
|
|
blockPos[reg] = overlap;
|
|
|
|
}
|
|
|
|
} IR_REGSET_FOREACH_END();
|
|
|
|
}
|
|
|
|
} else if (IR_REGSET_IN(available, reg)) {
|
2022-08-10 12:59:34 +02:00
|
|
|
ir_live_pos overlap = ir_ivals_overlap(&ival->range, other->current_range);
|
2022-04-25 23:54:07 +02:00
|
|
|
|
|
|
|
if (overlap) {
|
2022-05-12 16:43:08 +02:00
|
|
|
if (other->flags & (IR_LIVE_INTERVAL_FIXED|IR_LIVE_INTERVAL_TEMP)) {
|
2022-04-25 23:54:07 +02:00
|
|
|
if (overlap < nextUsePos[reg]) {
|
|
|
|
nextUsePos[reg] = overlap;
|
|
|
|
}
|
|
|
|
if (overlap < blockPos[reg]) {
|
|
|
|
blockPos[reg] = overlap;
|
|
|
|
}
|
|
|
|
} else {
|
2022-05-12 16:43:08 +02:00
|
|
|
pos = ir_first_use_pos_after(other, ival->range.start,
|
2022-05-13 11:14:21 +02:00
|
|
|
IR_USE_MUST_BE_IN_REG /* | IR_USE_SHOULD_BE_IN_REG */); // TODO: ???
|
2022-04-29 13:19:53 +02:00
|
|
|
if (pos < nextUsePos[reg]) {
|
|
|
|
nextUsePos[reg] = pos;
|
2022-04-25 23:54:07 +02:00
|
|
|
}
|
2022-04-14 21:40:13 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2022-05-12 16:43:08 +02:00
|
|
|
other = other->list_next;
|
|
|
|
}
|
2022-04-14 21:40:13 +02:00
|
|
|
|
2022-05-11 20:10:35 +02:00
|
|
|
/* register hinting */
|
|
|
|
reg = ir_get_preferred_reg(ctx, ival, available);
|
|
|
|
if (reg == IR_REG_NONE) {
|
|
|
|
reg = IR_REGSET_FIRST(available);
|
|
|
|
}
|
2022-04-14 21:40:13 +02:00
|
|
|
|
|
|
|
/* reg = register with highest nextUsePos */
|
|
|
|
IR_REGSET_EXCL(available, reg);
|
|
|
|
pos = nextUsePos[reg];
|
|
|
|
IR_REGSET_FOREACH(available, i) {
|
|
|
|
if (nextUsePos[i] > pos) {
|
|
|
|
pos = nextUsePos[i];
|
|
|
|
reg = i;
|
|
|
|
}
|
|
|
|
} IR_REGSET_FOREACH_END();
|
2022-04-05 23:19:23 +02:00
|
|
|
|
2022-04-14 21:40:13 +02:00
|
|
|
/* if first usage of current is after nextUsePos[reg] then */
|
2022-05-05 21:35:39 +02:00
|
|
|
if (next_use_pos > pos && !(ival->flags & IR_LIVE_INTERVAL_TEMP)) {
|
2022-04-14 21:40:13 +02:00
|
|
|
/* all other intervals are used before current, so it is best to spill current itself */
|
|
|
|
/* assign spill slot to current */
|
|
|
|
/* split current before its first use position that requires a register */
|
2022-05-04 08:50:23 +02:00
|
|
|
ir_live_pos split_pos;
|
|
|
|
|
|
|
|
if (next_use_pos == ival->range.start) {
|
2022-05-26 20:44:36 +02:00
|
|
|
IR_ASSERT(ival->use_pos && ival->use_pos->op_num == 0);
|
2022-05-04 08:50:23 +02:00
|
|
|
/* split right after definition */
|
|
|
|
split_pos = next_use_pos + 1;
|
|
|
|
} else {
|
2022-05-16 10:53:10 +02:00
|
|
|
split_pos = ir_find_optimal_split_position(ctx, ival, ival->range.start, next_use_pos - 1, 1);
|
2022-05-04 08:50:23 +02:00
|
|
|
}
|
2022-04-25 23:54:07 +02:00
|
|
|
|
2022-04-27 14:02:51 +02:00
|
|
|
if (split_pos > ival->range.start) {
|
2022-05-12 16:43:08 +02:00
|
|
|
IR_LOG_LSRA(" ---- Conflict with others", ival, " (all others are used before)");
|
|
|
|
other = ir_split_interval_at(ctx, ival, split_pos);
|
|
|
|
IR_LOG_LSRA(" ---- Spill", ival, "");
|
|
|
|
ir_add_to_unhandled(unhandled, other);
|
|
|
|
IR_LOG_LSRA(" ---- Queue", other, "");
|
2022-04-27 14:02:51 +02:00
|
|
|
return IR_REG_NONE;
|
|
|
|
}
|
|
|
|
}
|
2022-04-25 23:54:07 +02:00
|
|
|
|
2022-04-29 18:24:15 +02:00
|
|
|
if (ir_ival_end(ival) > blockPos[reg]) {
|
2022-04-27 17:18:53 +02:00
|
|
|
/* spilling make a register free only for the first part of current */
|
2022-05-12 16:43:08 +02:00
|
|
|
IR_LOG_LSRA(" ---- Conflict with others", ival, " (spilling make a register free only for the first part)");
|
2022-04-27 17:18:53 +02:00
|
|
|
/* split current at optimal position before block_pos[reg] */
|
2022-04-29 14:24:41 +02:00
|
|
|
ir_live_pos split_pos = ir_last_use_pos_before(ival, blockPos[reg] + 1,
|
|
|
|
IR_USE_MUST_BE_IN_REG | IR_USE_SHOULD_BE_IN_REG);
|
2022-05-16 10:53:10 +02:00
|
|
|
split_pos = ir_find_optimal_split_position(ctx, ival, split_pos, blockPos[reg], 1);
|
2022-05-12 16:43:08 +02:00
|
|
|
other = ir_split_interval_at(ctx, ival, split_pos);
|
|
|
|
ir_add_to_unhandled(unhandled, other);
|
|
|
|
IR_LOG_LSRA(" ---- Queue", other, "");
|
2022-04-27 17:18:53 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
/* spill intervals that currently block reg */
|
2022-05-12 16:43:08 +02:00
|
|
|
prev = NULL;
|
|
|
|
other = *active;
|
|
|
|
while (other) {
|
2022-04-29 13:19:53 +02:00
|
|
|
ir_live_pos split_pos;
|
2022-04-29 11:14:26 +02:00
|
|
|
|
|
|
|
if (reg == other->reg) {
|
2022-04-27 14:02:51 +02:00
|
|
|
/* split active interval for reg at position */
|
2022-08-10 12:59:34 +02:00
|
|
|
ir_live_pos overlap = ir_ivals_overlap(&ival->range, other->current_range);
|
2022-04-27 17:18:53 +02:00
|
|
|
|
|
|
|
if (overlap) {
|
2022-05-12 16:43:08 +02:00
|
|
|
ir_live_interval *child, *child2;
|
|
|
|
|
2022-04-29 11:14:26 +02:00
|
|
|
IR_ASSERT(other->type != IR_VOID);
|
2022-05-12 16:43:08 +02:00
|
|
|
IR_LOG_LSRA_CONFLICT(" ---- Conflict with active", other, overlap);
|
2022-04-29 13:19:53 +02:00
|
|
|
|
2022-05-13 11:14:21 +02:00
|
|
|
split_pos = ir_last_use_pos_before(other, ival->range.start, IR_USE_MUST_BE_IN_REG | IR_USE_SHOULD_BE_IN_REG);
|
|
|
|
if (split_pos == 0) {
|
|
|
|
split_pos = ival->range.start;
|
2022-04-29 18:24:15 +02:00
|
|
|
}
|
2022-05-16 10:53:10 +02:00
|
|
|
split_pos = ir_find_optimal_split_position(ctx, other, split_pos, ival->range.start, 1);
|
2022-04-29 13:19:53 +02:00
|
|
|
if (split_pos > other->range.start) {
|
2022-05-12 16:43:08 +02:00
|
|
|
child = ir_split_interval_at(ctx, other, split_pos);
|
|
|
|
IR_LOG_LSRA(" ---- Finish", other, "");
|
2022-04-29 13:19:53 +02:00
|
|
|
} else {
|
2022-05-13 11:14:21 +02:00
|
|
|
if (ir_first_use_pos_after(other, other->range.start, IR_USE_MUST_BE_IN_REG) < ir_ival_end(other)) {
|
|
|
|
fprintf(stderr, "LSRA Internal Error: Unsolvable conflict. Allocation is not possible\n");
|
|
|
|
IR_ASSERT(0);
|
|
|
|
exit(-1);
|
|
|
|
}
|
2022-05-12 16:43:08 +02:00
|
|
|
child = other;
|
2022-04-29 13:19:53 +02:00
|
|
|
other->reg = IR_REG_NONE;
|
2022-05-12 16:43:08 +02:00
|
|
|
if (prev) {
|
|
|
|
prev->list_next = other->list_next;
|
|
|
|
} else {
|
|
|
|
*active = other->list_next;
|
|
|
|
}
|
|
|
|
IR_LOG_LSRA(" ---- Spill and Finish", other, " (it must not be in reg)");
|
2022-04-29 13:19:53 +02:00
|
|
|
}
|
|
|
|
|
2022-05-13 11:14:21 +02:00
|
|
|
split_pos = ir_first_use_pos_after(child, ival->range.start, IR_USE_MUST_BE_IN_REG | IR_USE_SHOULD_BE_IN_REG) - 1; // TODO: ???
|
|
|
|
if (split_pos > child->range.start && split_pos < ir_ival_end(child)) {
|
2022-05-16 10:53:10 +02:00
|
|
|
split_pos = ir_find_optimal_split_position(ctx, child, ival->range.start, split_pos, 1);
|
2022-05-12 16:43:08 +02:00
|
|
|
child2 = ir_split_interval_at(ctx, child, split_pos);
|
|
|
|
IR_LOG_LSRA(" ---- Spill", child, "");
|
|
|
|
ir_add_to_unhandled(unhandled, child2);
|
|
|
|
IR_LOG_LSRA(" ---- Queue", child2, "");
|
2022-05-13 11:14:21 +02:00
|
|
|
} else if (child != other) {
|
2022-05-06 15:19:57 +02:00
|
|
|
// TODO: this may cause endless loop
|
2022-05-13 11:14:21 +02:00
|
|
|
ir_add_to_unhandled(unhandled, child);
|
|
|
|
IR_LOG_LSRA(" ---- Queue", child, "");
|
2022-04-29 13:19:53 +02:00
|
|
|
}
|
2022-04-27 17:18:53 +02:00
|
|
|
}
|
2022-04-27 14:02:51 +02:00
|
|
|
break;
|
|
|
|
}
|
2022-05-12 16:43:08 +02:00
|
|
|
prev = other;
|
|
|
|
other = other->list_next;
|
|
|
|
}
|
2022-04-27 14:02:51 +02:00
|
|
|
|
|
|
|
/* split any inactive interval for reg at the end of its lifetime hole */
|
2022-05-12 16:43:08 +02:00
|
|
|
other = inactive;
|
|
|
|
while (other) {
|
2022-04-27 14:02:51 +02:00
|
|
|
/* freeUntilPos[it.reg] = next intersection of it with current */
|
2022-04-29 11:14:26 +02:00
|
|
|
if (reg == other->reg) {
|
2022-08-10 12:59:34 +02:00
|
|
|
ir_live_pos overlap = ir_ivals_overlap(&ival->range, other->current_range);
|
2022-04-27 14:02:51 +02:00
|
|
|
|
|
|
|
if (overlap) {
|
2022-04-29 11:14:26 +02:00
|
|
|
IR_ASSERT(other->type != IR_VOID);
|
2022-05-12 16:43:08 +02:00
|
|
|
IR_LOG_LSRA_CONFLICT(" ---- Conflict with inactive", other, overlap);
|
2022-04-29 14:24:41 +02:00
|
|
|
// TODO: optimal split position (this case is not tested)
|
2022-05-12 16:43:08 +02:00
|
|
|
ir_split_interval_at(ctx, other, overlap);
|
2022-04-25 23:54:07 +02:00
|
|
|
}
|
|
|
|
}
|
2022-05-12 16:43:08 +02:00
|
|
|
other = other->list_next;
|
|
|
|
}
|
2022-04-25 23:54:07 +02:00
|
|
|
|
2022-04-29 11:14:26 +02:00
|
|
|
/* current.reg = reg */
|
|
|
|
ival->reg = reg;
|
2022-05-12 16:43:08 +02:00
|
|
|
IR_LOG_LSRA_ASSIGN(" ---- Assign", ival, " (after splitting others)");
|
|
|
|
ival->list_next = *active;
|
|
|
|
*active = ival;
|
2022-04-14 21:40:13 +02:00
|
|
|
|
2022-04-27 14:02:51 +02:00
|
|
|
return reg;
|
2022-04-05 23:19:23 +02:00
|
|
|
}
|
|
|
|
|
2022-05-06 15:19:57 +02:00
|
|
|
static int ir_fix_dessa_tmps(ir_ctx *ctx, uint8_t type, ir_ref from, ir_ref to)
|
2022-04-12 15:09:53 +02:00
|
|
|
{
|
2022-05-12 23:32:37 +02:00
|
|
|
ir_block *bb = ctx->data;
|
|
|
|
ir_tmp_reg tmp_reg;
|
2022-04-12 15:09:53 +02:00
|
|
|
|
2022-05-12 23:32:37 +02:00
|
|
|
if (to == 0) {
|
|
|
|
if (IR_IS_TYPE_INT(type)) {
|
|
|
|
tmp_reg.num = 0;
|
|
|
|
tmp_reg.type = type;
|
|
|
|
tmp_reg.start = IR_DEF_SUB_REF;
|
|
|
|
tmp_reg.end = IR_SAVE_SUB_REF;
|
|
|
|
} else if (IR_IS_TYPE_FP(type)) {
|
|
|
|
tmp_reg.num = 2;
|
|
|
|
tmp_reg.type = type;
|
|
|
|
tmp_reg.start = IR_DEF_SUB_REF;
|
|
|
|
tmp_reg.end = IR_SAVE_SUB_REF;
|
|
|
|
} else {
|
|
|
|
IR_ASSERT(0);
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
} else if (from != 0) {
|
2022-04-12 15:09:53 +02:00
|
|
|
if (IR_IS_TYPE_INT(type)) {
|
2022-05-12 23:32:37 +02:00
|
|
|
tmp_reg.num = 1;
|
|
|
|
tmp_reg.type = type;
|
|
|
|
tmp_reg.start = IR_DEF_SUB_REF;
|
|
|
|
tmp_reg.end = IR_SAVE_SUB_REF;
|
2022-04-12 15:09:53 +02:00
|
|
|
} else if (IR_IS_TYPE_FP(type)) {
|
2022-05-12 23:32:37 +02:00
|
|
|
tmp_reg.num = 3;
|
|
|
|
tmp_reg.type = type;
|
|
|
|
tmp_reg.start = IR_DEF_SUB_REF;
|
|
|
|
tmp_reg.end = IR_SAVE_SUB_REF;
|
2022-04-12 15:09:53 +02:00
|
|
|
} else {
|
|
|
|
IR_ASSERT(0);
|
2022-04-14 21:40:13 +02:00
|
|
|
return 0;
|
2022-04-12 15:09:53 +02:00
|
|
|
}
|
2022-05-26 17:08:39 +02:00
|
|
|
} else {
|
|
|
|
return 1;
|
2022-05-12 23:32:37 +02:00
|
|
|
}
|
|
|
|
if (!ir_has_tmp(ctx, bb->end, tmp_reg.num)) {
|
|
|
|
ir_add_tmp(ctx, bb->end, tmp_reg);
|
2022-04-12 15:09:53 +02:00
|
|
|
}
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2022-05-27 12:18:04 +02:00
|
|
|
static bool ir_ival_spill_for_fuse_load(ir_ctx *ctx, ir_live_interval *ival, ir_reg_alloc_data *data)
|
2022-05-19 09:53:08 +02:00
|
|
|
{
|
|
|
|
ir_use_pos *use_pos = ival->use_pos;
|
|
|
|
ir_insn *insn;
|
|
|
|
|
|
|
|
if (ival->flags & IR_LIVE_INTERVAL_MEM_PARAM) {
|
|
|
|
IR_ASSERT(ival->top == ival && !ival->next && use_pos && use_pos->op_num == 0);
|
|
|
|
insn = &ctx->ir_base[IR_LIVE_POS_TO_REF(use_pos->pos)];
|
|
|
|
IR_ASSERT(insn->op == IR_PARAM);
|
|
|
|
use_pos =use_pos->next;
|
|
|
|
if (use_pos && (use_pos->next || (use_pos->flags & IR_USE_MUST_BE_IN_REG))) {
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (use_pos) {
|
|
|
|
ir_block *bb = ir_block_from_live_pos(ctx, use_pos->pos);
|
|
|
|
if (bb->loop_depth) {
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return 1;
|
|
|
|
} else if (ival->flags & IR_LIVE_INTERVAL_MEM_LOAD) {
|
|
|
|
insn = &ctx->ir_base[IR_LIVE_POS_TO_REF(use_pos->pos)];
|
|
|
|
IR_ASSERT(insn->op == IR_VLOAD);
|
|
|
|
use_pos =use_pos->next;
|
|
|
|
if (use_pos && (use_pos->next || (use_pos->flags & IR_USE_MUST_BE_IN_REG))) {
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (use_pos) {
|
|
|
|
ir_block *bb = ir_block_from_live_pos(ctx, use_pos->pos);
|
|
|
|
if (bb->loop_depth && bb != ir_block_from_live_pos(ctx, ival->use_pos->pos)) {
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
IR_ASSERT(ctx->ir_base[insn->op2].op == IR_VAR);
|
|
|
|
if (ctx->live_intervals[ctx->vregs[insn->op2]]->stack_spill_pos != -1) {
|
|
|
|
ival->stack_spill_pos =
|
|
|
|
ctx->live_intervals[ctx->vregs[insn->op2]]->stack_spill_pos;
|
|
|
|
} else {
|
2022-05-26 10:58:51 +02:00
|
|
|
ival->stack_spill_pos = ir_allocate_spill_slot(ctx, ival->type, data);
|
2022-05-19 09:53:08 +02:00
|
|
|
ctx->live_intervals[ctx->vregs[insn->op2]]->stack_spill_pos =
|
|
|
|
ival->stack_spill_pos;
|
|
|
|
}
|
|
|
|
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2022-04-05 23:19:23 +02:00
|
|
|
static int ir_linear_scan(ir_ctx *ctx)
|
|
|
|
{
|
2022-04-12 15:09:53 +02:00
|
|
|
int b;
|
|
|
|
ir_block *bb;
|
2022-05-12 16:43:08 +02:00
|
|
|
ir_live_interval *unhandled = NULL;
|
|
|
|
ir_live_interval *active = NULL;
|
|
|
|
ir_live_interval *inactive = NULL;
|
|
|
|
ir_live_interval *ival, *other, *prev;
|
|
|
|
int j;
|
2022-04-14 21:40:13 +02:00
|
|
|
ir_live_pos position;
|
2022-04-05 23:19:23 +02:00
|
|
|
ir_reg reg;
|
2022-05-27 12:18:04 +02:00
|
|
|
ir_reg_alloc_data data;
|
2022-04-05 23:19:23 +02:00
|
|
|
|
|
|
|
if (!ctx->live_intervals) {
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2022-04-12 15:09:53 +02:00
|
|
|
/* Add fixed intervals for temporary registers used for DESSA moves */
|
|
|
|
for (b = 1, bb = &ctx->cfg_blocks[1]; b <= ctx->cfg_blocks_count; b++, bb++) {
|
2022-06-20 15:34:44 +02:00
|
|
|
if (bb->flags & IR_BB_UNREACHABLE) {
|
|
|
|
continue;
|
|
|
|
}
|
2022-04-12 15:09:53 +02:00
|
|
|
if (bb->flags & IR_BB_DESSA_MOVES) {
|
|
|
|
ctx->data = bb;
|
|
|
|
ir_gen_dessa_moves(ctx, b, ir_fix_dessa_tmps);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
ctx->data = &data;
|
2022-04-05 23:19:23 +02:00
|
|
|
data.stack_frame_size = 0;
|
2022-05-16 23:17:59 +02:00
|
|
|
data.unused_slot_4 = 0;
|
|
|
|
data.unused_slot_2 = 0;
|
|
|
|
data.unused_slot_1 = 0;
|
2022-04-05 23:19:23 +02:00
|
|
|
|
2022-08-10 14:04:01 +02:00
|
|
|
for (j = 0; j <= ctx->vregs_count; j++) {
|
2022-05-12 16:43:08 +02:00
|
|
|
ival = ctx->live_intervals[j];
|
2022-05-19 09:53:08 +02:00
|
|
|
if (ival) {
|
|
|
|
if (ival->flags & IR_LIVE_INTERVAL_VAR) {
|
|
|
|
if (ival->stack_spill_pos == -1) {
|
2022-05-26 10:58:51 +02:00
|
|
|
ival->stack_spill_pos = ir_allocate_spill_slot(ctx, ival->type, &data);
|
2022-05-19 09:53:08 +02:00
|
|
|
}
|
2022-08-10 14:04:01 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for (j = ctx->vregs_count; j != 0; j--) {
|
|
|
|
ival = ctx->live_intervals[j];
|
|
|
|
if (ival) {
|
|
|
|
if (ival->flags & IR_LIVE_INTERVAL_VAR) {
|
|
|
|
/* pass */
|
2022-06-09 23:16:29 +02:00
|
|
|
} else if (ival->flags & IR_LIVE_INTERVAL_REG_LOAD) {
|
|
|
|
/* pre-allocated fixed register */
|
2022-05-19 09:53:08 +02:00
|
|
|
} else if (!(ival->flags & (IR_LIVE_INTERVAL_MEM_PARAM|IR_LIVE_INTERVAL_MEM_LOAD))
|
|
|
|
|| !ir_ival_spill_for_fuse_load(ctx, ival, &data)) {
|
|
|
|
ir_add_to_unhandled(&unhandled, ival);
|
|
|
|
}
|
2022-04-05 23:19:23 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-12 16:43:08 +02:00
|
|
|
ival = ctx->live_intervals[0];
|
|
|
|
while (ival) {
|
|
|
|
ir_add_to_unhandled(&unhandled, ival);
|
|
|
|
ival = ival->next;
|
2022-04-07 10:11:57 +02:00
|
|
|
}
|
|
|
|
|
2022-08-11 18:56:59 +02:00
|
|
|
for (j = ctx->vregs_count + 1; j <= ctx->vregs_count + IR_REG_NUM + 1; j++) {
|
2022-05-12 16:43:08 +02:00
|
|
|
ival = ctx->live_intervals[j];
|
|
|
|
if (ival) {
|
2022-08-10 12:59:34 +02:00
|
|
|
ival->current_range = &ival->range;
|
2022-05-12 16:43:08 +02:00
|
|
|
ival->list_next = inactive;
|
|
|
|
inactive = ival;
|
|
|
|
}
|
|
|
|
}
|
2022-04-05 23:19:23 +02:00
|
|
|
|
2022-04-29 11:14:26 +02:00
|
|
|
#ifdef IR_DEBUG
|
|
|
|
if (ctx->flags & IR_DEBUG_RA) {
|
|
|
|
fprintf(stderr, "----\n");
|
|
|
|
ir_dump_live_ranges(ctx, stderr);
|
|
|
|
fprintf(stderr, "---- Start LSRA\n");
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2022-05-12 16:43:08 +02:00
|
|
|
while (unhandled) {
|
|
|
|
ival = unhandled;
|
2022-08-10 12:59:34 +02:00
|
|
|
ival->current_range = &ival->range;
|
2022-05-12 16:43:08 +02:00
|
|
|
unhandled = ival->list_next;
|
|
|
|
position = ival->range.start;
|
2022-04-27 22:31:20 +02:00
|
|
|
|
2022-05-12 16:43:08 +02:00
|
|
|
IR_LOG_LSRA(" ---- Processing", ival, "...");
|
2022-04-29 11:14:26 +02:00
|
|
|
|
2022-04-05 23:19:23 +02:00
|
|
|
/* for each interval i in active */
|
2022-05-12 16:43:08 +02:00
|
|
|
other = active;
|
|
|
|
prev = NULL;
|
|
|
|
while (other) {
|
2022-08-10 12:59:34 +02:00
|
|
|
ir_live_range *r = other->current_range;
|
|
|
|
|
2022-08-10 14:24:09 +02:00
|
|
|
if (r && r->end <= position) {
|
|
|
|
do {
|
2022-08-10 12:59:34 +02:00
|
|
|
r = r->next;
|
2022-08-10 14:24:09 +02:00
|
|
|
} while (r && r->end <= position);
|
2022-08-10 12:59:34 +02:00
|
|
|
other->current_range = r;
|
|
|
|
}
|
|
|
|
/* if (ir_ival_end(other) <= position) {*/
|
|
|
|
if (!r) {
|
2022-04-05 23:19:23 +02:00
|
|
|
/* move i from active to handled */
|
2022-05-12 16:43:08 +02:00
|
|
|
if (prev) {
|
|
|
|
prev->list_next = other->list_next;
|
|
|
|
} else {
|
|
|
|
active = other->list_next;
|
2022-04-27 17:18:53 +02:00
|
|
|
}
|
2022-08-10 12:59:34 +02:00
|
|
|
} else if (position < r->start) {
|
2022-04-05 23:19:23 +02:00
|
|
|
/* move i from active to inactive */
|
2022-05-12 16:43:08 +02:00
|
|
|
if (prev) {
|
|
|
|
prev->list_next = other->list_next;
|
|
|
|
} else {
|
|
|
|
active = other->list_next;
|
|
|
|
}
|
|
|
|
other->list_next = inactive;
|
|
|
|
inactive = other;
|
|
|
|
} else {
|
|
|
|
prev = other;
|
2022-04-05 23:19:23 +02:00
|
|
|
}
|
2022-05-12 16:43:08 +02:00
|
|
|
other = prev ? prev->list_next : active;
|
|
|
|
}
|
2022-04-05 23:19:23 +02:00
|
|
|
|
|
|
|
/* for each interval i in inactive */
|
2022-05-12 16:43:08 +02:00
|
|
|
other = inactive;
|
|
|
|
prev = NULL;
|
|
|
|
while (other) {
|
2022-08-10 12:59:34 +02:00
|
|
|
ir_live_range *r = other->current_range;
|
|
|
|
|
2022-08-10 14:24:09 +02:00
|
|
|
if (r && r->end <= position) {
|
|
|
|
do {
|
2022-08-10 12:59:34 +02:00
|
|
|
r = r->next;
|
2022-08-10 14:24:09 +02:00
|
|
|
} while (r && r->end <= position);
|
2022-08-10 12:59:34 +02:00
|
|
|
other->current_range = r;
|
|
|
|
}
|
|
|
|
/* if (ir_ival_end(other) <= position) {*/
|
|
|
|
if (!r) {
|
2022-04-05 23:19:23 +02:00
|
|
|
/* move i from inactive to handled */
|
2022-05-12 16:43:08 +02:00
|
|
|
if (prev) {
|
|
|
|
prev->list_next = other->list_next;
|
|
|
|
} else {
|
|
|
|
inactive = other->list_next;
|
2022-04-27 17:18:53 +02:00
|
|
|
}
|
2022-08-10 12:59:34 +02:00
|
|
|
/* } else if (ir_ival_covers(other, position)) {*/
|
|
|
|
} else if (position >= r->start) {
|
2022-04-05 23:19:23 +02:00
|
|
|
/* move i from active to inactive */
|
2022-05-12 16:43:08 +02:00
|
|
|
if (prev) {
|
|
|
|
prev->list_next = other->list_next;
|
|
|
|
} else {
|
|
|
|
inactive = other->list_next;
|
|
|
|
}
|
|
|
|
other->list_next = active;
|
|
|
|
active = other;
|
|
|
|
} else {
|
|
|
|
prev = other;
|
2022-04-05 23:19:23 +02:00
|
|
|
}
|
2022-05-12 16:43:08 +02:00
|
|
|
other = prev ? prev->list_next : inactive;
|
|
|
|
}
|
2022-04-05 23:19:23 +02:00
|
|
|
|
2022-05-12 16:43:08 +02:00
|
|
|
reg = ir_try_allocate_free_reg(ctx, ival, &active, inactive, &unhandled);
|
2022-04-05 23:19:23 +02:00
|
|
|
if (reg == IR_REG_NONE) {
|
2022-05-12 16:43:08 +02:00
|
|
|
reg = ir_allocate_blocked_reg(ctx, ival, &active, inactive, &unhandled);
|
2022-04-05 23:19:23 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-12 16:43:08 +02:00
|
|
|
#if 0 //def IR_DEBUG
|
2022-04-27 22:31:20 +02:00
|
|
|
/* all intervals must be processed */
|
2022-05-12 16:43:08 +02:00
|
|
|
ival = active;
|
|
|
|
while (ival) {
|
2022-04-27 22:31:20 +02:00
|
|
|
IR_ASSERT(!ival->next);
|
2022-05-12 16:43:08 +02:00
|
|
|
ival = ival->list_next;
|
|
|
|
}
|
|
|
|
ival = inactive;
|
|
|
|
while (ival) {
|
2022-04-27 22:31:20 +02:00
|
|
|
IR_ASSERT(!ival->next);
|
2022-05-12 16:43:08 +02:00
|
|
|
ival = ival->list_next;
|
|
|
|
}
|
2022-04-27 22:31:20 +02:00
|
|
|
#endif
|
|
|
|
|
2022-05-12 16:43:08 +02:00
|
|
|
for (j = 1; j <= ctx->vregs_count; j++) {
|
|
|
|
ival = ctx->live_intervals[j];
|
2022-05-19 09:53:08 +02:00
|
|
|
if (ival && ival->stack_spill_pos == -1 && !(ival->flags & IR_LIVE_INTERVAL_MEM_PARAM)) {
|
2022-04-14 21:40:13 +02:00
|
|
|
if (ival->next || ival->reg == IR_REG_NONE) {
|
2022-05-26 10:58:51 +02:00
|
|
|
ival->stack_spill_pos = ir_allocate_spill_slot(ctx, ival->type, &data);
|
2022-04-14 21:40:13 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2022-04-05 23:19:23 +02:00
|
|
|
|
2022-04-29 11:14:26 +02:00
|
|
|
#ifdef IR_DEBUG
|
|
|
|
if (ctx->flags & IR_DEBUG_RA) {
|
|
|
|
fprintf(stderr, "---- Finish LSRA\n");
|
|
|
|
ir_dump_live_ranges(ctx, stderr);
|
|
|
|
fprintf(stderr, "----\n");
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2022-04-05 23:19:23 +02:00
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2022-05-05 21:35:39 +02:00
|
|
|
static void assign_regs(ir_ctx *ctx)
|
|
|
|
{
|
|
|
|
uint32_t i;
|
|
|
|
ir_live_interval *ival;
|
|
|
|
ir_use_pos *use_pos;
|
|
|
|
int8_t reg;
|
2022-05-12 23:32:37 +02:00
|
|
|
ir_ref ref;
|
2022-05-05 21:35:39 +02:00
|
|
|
|
|
|
|
ctx->regs = ir_mem_malloc(sizeof(ir_regs) * ctx->insns_count);
|
|
|
|
memset(ctx->regs, IR_REG_NONE, sizeof(ir_regs) * ctx->insns_count);
|
|
|
|
|
|
|
|
for (i = 1; i <= ctx->vregs_count; i++) {
|
|
|
|
ival = ctx->live_intervals[i];
|
|
|
|
if (ival) {
|
|
|
|
do {
|
|
|
|
if (ival->reg >= 0) {
|
|
|
|
use_pos = ival->use_pos;
|
|
|
|
while (use_pos) {
|
|
|
|
reg = ival->reg;
|
2022-05-16 21:16:29 +02:00
|
|
|
if (ival->top->stack_spill_pos != -1) {
|
2022-05-05 21:35:39 +02:00
|
|
|
// TODO: Insert spill loads and stotres in optimal positons (resolution)
|
|
|
|
|
|
|
|
if (use_pos->op_num == 0) {
|
|
|
|
reg |= IR_REG_SPILL_STORE;
|
|
|
|
} else {
|
|
|
|
reg |= IR_REG_SPILL_LOAD;
|
|
|
|
}
|
|
|
|
}
|
2022-05-12 23:32:37 +02:00
|
|
|
if (use_pos->flags & IR_PHI_USE) {
|
|
|
|
IR_ASSERT(use_pos->hint_ref > 0);
|
|
|
|
ref = use_pos->hint_ref;
|
|
|
|
IR_ASSERT(use_pos->op_num <= IR_MAX(3, ir_input_edges_count(ctx, &ctx->ir_base[ref])));
|
|
|
|
ctx->regs[ref][use_pos->op_num] = reg;
|
|
|
|
} else {
|
|
|
|
ref = IR_LIVE_POS_TO_REF(use_pos->pos);
|
|
|
|
IR_ASSERT(use_pos->op_num <= IR_MAX(3, ir_input_edges_count(ctx, &ctx->ir_base[ref])));
|
|
|
|
ctx->regs[ref][use_pos->op_num] = reg;
|
|
|
|
}
|
2022-05-05 21:35:39 +02:00
|
|
|
use_pos = use_pos->next;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
ival = ival->next;
|
|
|
|
} while (ival);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Temporary registers */
|
|
|
|
ival = ctx->live_intervals[0];
|
|
|
|
if (ival) {
|
|
|
|
do {
|
|
|
|
IR_ASSERT(ival->reg != IR_REG_NONE);
|
2022-05-19 08:15:45 +02:00
|
|
|
ctx->regs[IR_LIVE_POS_TO_REF(ival->range.start)][ival->flags & IR_LIVE_INTERVAL_TEMP_NUM_MASK] = ival->reg;
|
2022-05-05 21:35:39 +02:00
|
|
|
ival = ival->next;
|
|
|
|
} while (ival);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-16 10:53:10 +02:00
|
|
|
static void ir_add_hint(ir_ctx *ctx, ir_ref ref, ir_live_pos pos, ir_reg hint)
|
|
|
|
{
|
|
|
|
ir_live_interval *ival = ctx->live_intervals[ctx->vregs[ref]];
|
|
|
|
ir_use_pos *use_pos = ival->use_pos;
|
|
|
|
|
|
|
|
while (use_pos) {
|
|
|
|
if (use_pos->pos == pos) {
|
|
|
|
if (use_pos->hint == IR_REG_NONE) {
|
|
|
|
use_pos->hint = hint;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
use_pos = use_pos->next;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static void ir_hint_propagation(ir_ctx *ctx)
|
|
|
|
{
|
|
|
|
int i;
|
|
|
|
ir_live_interval *ival;
|
|
|
|
ir_use_pos *use_pos;
|
|
|
|
ir_use_pos *hint_use_pos;
|
|
|
|
|
|
|
|
for (i = 1; i <= ctx->vregs_count; i++) {
|
|
|
|
ival = ctx->live_intervals[i];
|
|
|
|
if (ival) {
|
|
|
|
use_pos = ival->use_pos;
|
|
|
|
hint_use_pos = NULL;
|
|
|
|
while (use_pos) {
|
|
|
|
if (use_pos->hint_ref) {
|
|
|
|
hint_use_pos = use_pos;
|
|
|
|
} else if (use_pos->hint != IR_REG_NONE) {
|
|
|
|
if (hint_use_pos) {
|
|
|
|
if (use_pos->op_num != 0) {
|
|
|
|
ir_add_hint(ctx, hint_use_pos->hint_ref, hint_use_pos->pos, use_pos->hint);
|
|
|
|
}
|
|
|
|
hint_use_pos = NULL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
use_pos = use_pos->next;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-04-05 23:19:23 +02:00
|
|
|
int ir_reg_alloc(ir_ctx *ctx)
|
|
|
|
{
|
2022-05-16 10:53:10 +02:00
|
|
|
ir_hint_propagation(ctx);
|
2022-05-05 21:35:39 +02:00
|
|
|
if (ir_linear_scan(ctx)) {
|
|
|
|
assign_regs(ctx);
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
return 0;
|
2022-04-05 23:19:23 +02:00
|
|
|
}
|