Use IR_HIOP for generalized two-register returns.

Sponsored by OpenResty Inc.
This commit is contained in:
Mike Pall 2021-09-19 17:47:11 +02:00
parent c6f5ef649b
commit 986bb406ad
8 changed files with 84 additions and 96 deletions

View File

@ -818,11 +818,11 @@ static void ra_leftov(ASMState *as, Reg dest, IRRef lref)
}
#endif
#if !LJ_64
/* Force a RID_RETLO/RID_RETHI destination register pair (marked as free). */
static void ra_destpair(ASMState *as, IRIns *ir)
{
Reg destlo = ir->r, desthi = (ir+1)->r;
IRIns *irx = (LJ_64 && !irt_is64(ir->t)) ? ir+1 : ir;
/* First spill unrelated refs blocking the destination registers. */
if (!rset_test(as->freeset, RID_RETLO) &&
destlo != RID_RETLO && desthi != RID_RETLO)
@ -846,29 +846,28 @@ static void ra_destpair(ASMState *as, IRIns *ir)
/* Check for conflicts and shuffle the registers as needed. */
if (destlo == RID_RETHI) {
if (desthi == RID_RETLO) {
#if LJ_TARGET_X86
*--as->mcp = XI_XCHGa + RID_RETHI;
#if LJ_TARGET_X86ORX64
*--as->mcp = REX_64IR(irx, XI_XCHGa + RID_RETHI);
#else
emit_movrr(as, ir, RID_RETHI, RID_TMP);
emit_movrr(as, ir, RID_RETLO, RID_RETHI);
emit_movrr(as, ir, RID_TMP, RID_RETLO);
emit_movrr(as, irx, RID_RETHI, RID_TMP);
emit_movrr(as, irx, RID_RETLO, RID_RETHI);
emit_movrr(as, irx, RID_TMP, RID_RETLO);
#endif
} else {
emit_movrr(as, ir, RID_RETHI, RID_RETLO);
if (desthi != RID_RETHI) emit_movrr(as, ir, desthi, RID_RETHI);
emit_movrr(as, irx, RID_RETHI, RID_RETLO);
if (desthi != RID_RETHI) emit_movrr(as, irx, desthi, RID_RETHI);
}
} else if (desthi == RID_RETLO) {
emit_movrr(as, ir, RID_RETLO, RID_RETHI);
if (destlo != RID_RETLO) emit_movrr(as, ir, destlo, RID_RETLO);
emit_movrr(as, irx, RID_RETLO, RID_RETHI);
if (destlo != RID_RETLO) emit_movrr(as, irx, destlo, RID_RETLO);
} else {
if (desthi != RID_RETHI) emit_movrr(as, ir, desthi, RID_RETHI);
if (destlo != RID_RETLO) emit_movrr(as, ir, destlo, RID_RETLO);
if (desthi != RID_RETHI) emit_movrr(as, irx, desthi, RID_RETHI);
if (destlo != RID_RETLO) emit_movrr(as, irx, destlo, RID_RETLO);
}
/* Restore spill slots (if any). */
if (ra_hasspill((ir+1)->s)) ra_save(as, ir+1, RID_RETHI);
if (ra_hasspill(ir->s)) ra_save(as, ir, RID_RETLO);
}
#endif
/* -- Snapshot handling --------- ----------------------------------------- */
@ -2234,7 +2233,6 @@ static void asm_setup_regsp(ASMState *as)
(RSET_SCRATCH & ~RSET_FPR) : RSET_SCRATCH;
continue;
}
#if LJ_SOFTFP || (LJ_32 && LJ_HASFFI)
case IR_HIOP:
switch ((ir-1)->o) {
#if LJ_SOFTFP && LJ_TARGET_ARM
@ -2245,7 +2243,7 @@ static void asm_setup_regsp(ASMState *as)
}
break;
#endif
#if !LJ_SOFTFP && LJ_NEED_FP64
#if !LJ_SOFTFP && LJ_NEED_FP64 && LJ_32 && LJ_HASFFI
case IR_CONV:
if (irt_isfp((ir-1)->t)) {
ir->prev = REGSP_HINT(RID_FPRET);
@ -2253,7 +2251,7 @@ static void asm_setup_regsp(ASMState *as)
}
#endif
/* fallthrough */
case IR_CALLN: case IR_CALLXS:
case IR_CALLN: case IR_CALLL: case IR_CALLS: case IR_CALLXS:
#if LJ_SOFTFP
case IR_MIN: case IR_MAX:
#endif
@ -2264,7 +2262,6 @@ static void asm_setup_regsp(ASMState *as)
break;
}
break;
#endif
#if LJ_SOFTFP
case IR_MIN: case IR_MAX:
if ((ir+1)->o != IR_HIOP) break;

View File

@ -1885,15 +1885,15 @@ static void asm_int64comp(ASMState *as, IRIns *ir)
}
#endif
/* -- Support for 64 bit ops in 32 bit mode ------------------------------- */
/* -- Split register ops -------------------------------------------------- */
/* Hiword op of a split 64 bit op. Previous op must be the loword op. */
/* Hiword op of a split 32/32 bit op. Previous op is the loword op. */
static void asm_hiop(ASMState *as, IRIns *ir)
{
#if LJ_HASFFI || LJ_SOFTFP
/* HIOP is marked as a store because it needs its own DCE logic. */
int uselo = ra_used(ir-1), usehi = ra_used(ir); /* Loword/hiword used? */
if (LJ_UNLIKELY(!(as->flags & JIT_F_OPT_DCE))) uselo = usehi = 1;
#if LJ_HASFFI || LJ_SOFTFP
if ((ir-1)->o <= IR_NE) { /* 64 bit integer or FP comparisons. ORDER IR. */
as->curins--; /* Always skip the loword comparison. */
#if LJ_SOFTFP
@ -1924,6 +1924,7 @@ static void asm_hiop(ASMState *as, IRIns *ir)
asm_xstore_(as, ir, 4);
return;
}
#endif
if (!usehi) return; /* Skip unused hiword op for all remaining ops. */
switch ((ir-1)->o) {
#if LJ_HASFFI
@ -1942,6 +1943,9 @@ static void asm_hiop(ASMState *as, IRIns *ir)
asm_intneg(as, ir, ARMI_RSC);
asm_intneg(as, ir-1, ARMI_RSB|ARMI_S);
break;
case IR_CNEWI:
/* Nothing to do here. Handled by lo op itself. */
break;
#endif
#if LJ_SOFTFP
case IR_SLOAD: case IR_ALOAD: case IR_HLOAD: case IR_ULOAD: case IR_VLOAD:
@ -1949,25 +1953,16 @@ static void asm_hiop(ASMState *as, IRIns *ir)
if (!uselo)
ra_allocref(as, ir->op1, RSET_GPR); /* Mark lo op as used. */
break;
case IR_ASTORE: case IR_HSTORE: case IR_USTORE: case IR_TOSTR: case IR_TMPREF:
/* Nothing to do here. Handled by lo op itself. */
break;
#endif
case IR_CALLN:
case IR_CALLS:
case IR_CALLXS:
case IR_CALLN: case IR_CALLL: case IR_CALLS: case IR_CALLXS:
if (!uselo)
ra_allocref(as, ir->op1, RID2RSET(RID_RETLO)); /* Mark lo op as used. */
break;
#if LJ_SOFTFP
case IR_ASTORE: case IR_HSTORE: case IR_USTORE: case IR_TOSTR: case IR_TMPREF:
#endif
case IR_CNEWI:
/* Nothing to do here. Handled by lo op itself. */
break;
default: lj_assertA(0, "bad HIOP for op %d", (ir-1)->o); break;
}
#else
/* Unused without SOFTFP or FFI. */
UNUSED(as); UNUSED(ir); lj_assertA(0, "unexpected HIOP");
#endif
}
/* -- Profiling ----------------------------------------------------------- */

View File

@ -460,8 +460,11 @@ static void asm_gencall(ASMState *as, const CCallInfo *ci, IRRef *args)
static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci)
{
RegSet drop = RSET_SCRATCH;
int hiop = ((ir+1)->o == IR_HIOP && !irt_isnil((ir+1)->t));
if (ra_hasreg(ir->r))
rset_clear(drop, ir->r); /* Dest reg handled below. */
if (hiop && ra_hasreg((ir+1)->r))
rset_clear(drop, (ir+1)->r); /* Dest reg handled below. */
ra_evictset(as, drop); /* Evictions must be performed first. */
if (ra_used(ir)) {
lj_assertA(!irt_ispri(ir->t), "PRI dest");
@ -473,6 +476,8 @@ static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci)
} else {
ra_destreg(as, ir, RID_FPRET);
}
} else if (hiop) {
ra_destpair(as, ir);
} else {
ra_destreg(as, ir, RID_RET);
}
@ -1720,13 +1725,25 @@ static void asm_comp(ASMState *as, IRIns *ir)
#define asm_equal(as, ir) asm_comp(as, ir)
/* -- Support for 64 bit ops in 32 bit mode ------------------------------- */
/* -- Split register ops -------------------------------------------------- */
/* Hiword op of a split 64 bit op. Previous op must be the loword op. */
/* Hiword op of a split 64/64 bit op. Previous op is the loword op. */
static void asm_hiop(ASMState *as, IRIns *ir)
{
UNUSED(as); UNUSED(ir);
lj_assertA(0, "unexpected HIOP"); /* Unused on 64 bit. */
/* HIOP is marked as a store because it needs its own DCE logic. */
int uselo = ra_used(ir-1), usehi = ra_used(ir); /* Loword/hiword used? */
if (LJ_UNLIKELY(!(as->flags & JIT_F_OPT_DCE))) uselo = usehi = 1;
if (!usehi) return; /* Skip unused hiword op for all remaining ops. */
switch ((ir-1)->o) {
case IR_CALLN:
case IR_CALLL:
case IR_CALLS:
case IR_CALLXS:
if (!uselo)
ra_allocref(as, ir->op1, RID2RSET(RID_RETLO)); /* Mark lo op as used. */
break;
default: lj_assertA(0, "bad HIOP for op %d", (ir-1)->o); break;
}
}
/* -- Profiling ----------------------------------------------------------- */

View File

@ -351,19 +351,15 @@ static void asm_gencall(ASMState *as, const CCallInfo *ci, IRRef *args)
static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci)
{
RegSet drop = RSET_SCRATCH;
#if LJ_32
int hiop = ((ir+1)->o == IR_HIOP && !irt_isnil((ir+1)->t));
#endif
#if !LJ_SOFTFP
if ((ci->flags & CCI_NOFPRCLOBBER))
drop &= ~RSET_FPR;
#endif
if (ra_hasreg(ir->r))
rset_clear(drop, ir->r); /* Dest reg handled below. */
#if LJ_32
if (hiop && ra_hasreg((ir+1)->r))
rset_clear(drop, (ir+1)->r); /* Dest reg handled below. */
#endif
ra_evictset(as, drop); /* Evictions must be performed first. */
if (ra_used(ir)) {
lj_assertA(!irt_ispri(ir->t), "PRI dest");
@ -392,10 +388,8 @@ static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci)
} else {
ra_destreg(as, ir, RID_FPRET);
}
#if LJ_32
} else if (hiop) {
ra_destpair(as, ir);
#endif
} else {
ra_destreg(as, ir, RID_RET);
}
@ -2393,15 +2387,15 @@ static void asm_comp64eq(ASMState *as, IRIns *ir)
}
#endif
/* -- Support for 64 bit ops in 32 bit mode ------------------------------- */
/* -- Split register ops -------------------------------------------------- */
/* Hiword op of a split 64 bit op. Previous op must be the loword op. */
/* Hiword op of a split 32/32 or 64/64 bit op. Previous op is the loword op. */
static void asm_hiop(ASMState *as, IRIns *ir)
{
#if LJ_32 && (LJ_HASFFI || LJ_SOFTFP)
/* HIOP is marked as a store because it needs its own DCE logic. */
int uselo = ra_used(ir-1), usehi = ra_used(ir); /* Loword/hiword used? */
if (LJ_UNLIKELY(!(as->flags & JIT_F_OPT_DCE))) uselo = usehi = 1;
#if LJ_32 && (LJ_HASFFI || LJ_SOFTFP)
if ((ir-1)->o == IR_CONV) { /* Conversions to/from 64 bit. */
as->curins--; /* Always skip the CONV. */
#if LJ_HASFFI && !LJ_SOFTFP
@ -2448,38 +2442,33 @@ static void asm_hiop(ASMState *as, IRIns *ir)
}
return;
}
#endif
if (!usehi) return; /* Skip unused hiword op for all remaining ops. */
switch ((ir-1)->o) {
#if LJ_HASFFI
#if LJ_32 && LJ_HASFFI
case IR_ADD: as->curins--; asm_add64(as, ir); break;
case IR_SUB: as->curins--; asm_sub64(as, ir); break;
case IR_NEG: as->curins--; asm_neg64(as, ir); break;
case IR_CNEWI:
/* Nothing to do here. Handled by lo op itself. */
break;
#endif
#if LJ_SOFTFP
#if LJ_32 && LJ_SOFTFP
case IR_SLOAD: case IR_ALOAD: case IR_HLOAD: case IR_ULOAD: case IR_VLOAD:
case IR_STRTO:
if (!uselo)
ra_allocref(as, ir->op1, RSET_GPR); /* Mark lo op as used. */
break;
case IR_ASTORE: case IR_HSTORE: case IR_USTORE: case IR_TOSTR: case IR_TMPREF:
/* Nothing to do here. Handled by lo op itself. */
break;
#endif
case IR_CALLN:
case IR_CALLS:
case IR_CALLXS:
case IR_CALLN: case IR_CALLL: case IR_CALLS: case IR_CALLXS:
if (!uselo)
ra_allocref(as, ir->op1, RID2RSET(RID_RETLO)); /* Mark lo op as used. */
break;
#if LJ_SOFTFP
case IR_ASTORE: case IR_HSTORE: case IR_USTORE: case IR_TOSTR: case IR_TMPREF:
#endif
case IR_CNEWI:
/* Nothing to do here. Handled by lo op itself. */
break;
default: lj_assertA(0, "bad HIOP for op %d", (ir-1)->o); break;
}
#else
/* Unused on MIPS64 or without SOFTFP or FFI. */
UNUSED(as); UNUSED(ir); lj_assertA(0, "unexpected HIOP");
#endif
}
/* -- Profiling ----------------------------------------------------------- */

View File

@ -340,10 +340,8 @@ static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci)
} else {
ra_destreg(as, ir, RID_FPRET);
}
#if LJ_32
} else if (hiop) {
ra_destpair(as, ir);
#endif
} else {
ra_destreg(as, ir, RID_RET);
}
@ -1942,15 +1940,15 @@ static void asm_comp64(ASMState *as, IRIns *ir)
}
#endif
/* -- Support for 64 bit ops in 32 bit mode ------------------------------- */
/* -- Split register ops -------------------------------------------------- */
/* Hiword op of a split 64 bit op. Previous op must be the loword op. */
/* Hiword op of a split 32/32 bit op. Previous op is be the loword op. */
static void asm_hiop(ASMState *as, IRIns *ir)
{
#if LJ_HASFFI || LJ_SOFTFP
/* HIOP is marked as a store because it needs its own DCE logic. */
int uselo = ra_used(ir-1), usehi = ra_used(ir); /* Loword/hiword used? */
if (LJ_UNLIKELY(!(as->flags & JIT_F_OPT_DCE))) uselo = usehi = 1;
#if LJ_HASFFI || LJ_SOFTFP
if ((ir-1)->o == IR_CONV) { /* Conversions to/from 64 bit. */
as->curins--; /* Always skip the CONV. */
#if LJ_HASFFI && !LJ_SOFTFP
@ -1985,12 +1983,16 @@ static void asm_hiop(ASMState *as, IRIns *ir)
}
return;
}
#endif
if (!usehi) return; /* Skip unused hiword op for all remaining ops. */
switch ((ir-1)->o) {
#if LJ_HASFFI
case IR_ADD: as->curins--; asm_add64(as, ir); break;
case IR_SUB: as->curins--; asm_sub64(as, ir); break;
case IR_NEG: as->curins--; asm_neg64(as, ir); break;
case IR_CNEWI:
/* Nothing to do here. Handled by lo op itself. */
break;
#endif
#if LJ_SOFTFP
case IR_SLOAD: case IR_ALOAD: case IR_HLOAD: case IR_ULOAD: case IR_VLOAD:
@ -1998,25 +2000,16 @@ static void asm_hiop(ASMState *as, IRIns *ir)
if (!uselo)
ra_allocref(as, ir->op1, RSET_GPR); /* Mark lo op as used. */
break;
case IR_ASTORE: case IR_HSTORE: case IR_USTORE: case IR_TOSTR: case IR_TMPREF:
/* Nothing to do here. Handled by lo op itself. */
break;
#endif
case IR_CALLN:
case IR_CALLS:
case IR_CALLXS:
case IR_CALLN: case IR_CALLL: case IR_CALLS: case IR_CALLXS:
if (!uselo)
ra_allocref(as, ir->op1, RID2RSET(RID_RETLO)); /* Mark lo op as used. */
break;
#if LJ_SOFTFP
case IR_ASTORE: case IR_HSTORE: case IR_USTORE: case IR_TOSTR: case IR_TMPREF:
#endif
case IR_CNEWI:
/* Nothing to do here. Handled by lo op itself. */
break;
default: lj_assertA(0, "bad HIOP for op %d", (ir-1)->o); break;
}
#else
/* Unused without SOFTFP or FFI. */
UNUSED(as); UNUSED(ir); lj_assertA(0, "unexpected HIOP");
#endif
}
/* -- Profiling ----------------------------------------------------------- */

View File

@ -659,7 +659,7 @@ static void asm_gencall(ASMState *as, const CCallInfo *ci, IRRef *args)
static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci)
{
RegSet drop = RSET_SCRATCH;
int hiop = (LJ_32 && (ir+1)->o == IR_HIOP && !irt_isnil((ir+1)->t));
int hiop = ((ir+1)->o == IR_HIOP && !irt_isnil((ir+1)->t));
if ((ci->flags & CCI_NOFPRCLOBBER))
drop &= ~RSET_FPR;
if (ra_hasreg(ir->r))
@ -699,10 +699,8 @@ static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci)
irt_isnum(ir->t) ? XOg_FSTPq : XOg_FSTPd, RID_ESP, ofs);
}
#endif
#if LJ_32
} else if (hiop) {
ra_destpair(as, ir);
#endif
} else {
lj_assertA(!irt_ispri(ir->t), "PRI dest");
ra_destreg(as, ir, RID_RET);
@ -2611,15 +2609,15 @@ static void asm_comp_int64(ASMState *as, IRIns *ir)
}
#endif
/* -- Support for 64 bit ops in 32 bit mode ------------------------------- */
/* -- Split register ops -------------------------------------------------- */
/* Hiword op of a split 64 bit op. Previous op must be the loword op. */
/* Hiword op of a split 32/32 or 64/64 bit op. Previous op is the loword op. */
static void asm_hiop(ASMState *as, IRIns *ir)
{
#if LJ_32 && LJ_HASFFI
/* HIOP is marked as a store because it needs its own DCE logic. */
int uselo = ra_used(ir-1), usehi = ra_used(ir); /* Loword/hiword used? */
if (LJ_UNLIKELY(!(as->flags & JIT_F_OPT_DCE))) uselo = usehi = 1;
#if LJ_32 && LJ_HASFFI
if ((ir-1)->o == IR_CONV) { /* Conversions to/from 64 bit. */
as->curins--; /* Always skip the CONV. */
if (usehi || uselo)
@ -2633,8 +2631,10 @@ static void asm_hiop(ASMState *as, IRIns *ir)
asm_fxstore(as, ir);
return;
}
#endif
if (!usehi) return; /* Skip unused hiword op for all remaining ops. */
switch ((ir-1)->o) {
#if LJ_32 && LJ_HASFFI
case IR_ADD:
as->flagmcp = NULL;
as->curins--;
@ -2657,20 +2657,16 @@ static void asm_hiop(ASMState *as, IRIns *ir)
asm_neg_not(as, ir-1, XOg_NEG);
break;
}
case IR_CALLN:
case IR_CALLXS:
if (!uselo)
ra_allocref(as, ir->op1, RID2RSET(RID_RETLO)); /* Mark lo op as used. */
break;
case IR_CNEWI:
/* Nothing to do here. Handled by CNEWI itself. */
break;
#endif
case IR_CALLN: case IR_CALLL: case IR_CALLS: case IR_CALLXS:
if (!uselo)
ra_allocref(as, ir->op1, RID2RSET(RID_RETLO)); /* Mark lo op as used. */
break;
default: lj_assertA(0, "bad HIOP for op %d", (ir-1)->o); break;
}
#else
/* Unused on x64 or without FFI. */
UNUSED(as); UNUSED(ir); lj_assertA(0, "unexpected HIOP");
#endif
}
/* -- Profiling ----------------------------------------------------------- */

View File

@ -31,6 +31,8 @@ enum {
/* Calling conventions. */
RID_RET = RID_X0,
RID_RETLO = RID_X0,
RID_RETHI = RID_X1,
RID_FPRET = RID_D0,
/* These definitions must match with the *.dasc file(s): */

View File

@ -38,10 +38,9 @@ enum {
RID_RET = RID_EAX,
#if LJ_64
RID_FPRET = RID_XMM0,
#else
#endif
RID_RETLO = RID_EAX,
RID_RETHI = RID_EDX,
#endif
/* These definitions must match with the *.dasc file(s): */
RID_BASE = RID_EDX, /* Interpreter BASE. */