mirror of
https://github.com/LuaJIT/LuaJIT.git
synced 2025-02-07 23:24:09 +00:00
Use IR_HIOP for generalized two-register returns.
Sponsored by OpenResty Inc.
This commit is contained in:
parent
c6f5ef649b
commit
986bb406ad
31
src/lj_asm.c
31
src/lj_asm.c
@ -818,11 +818,11 @@ static void ra_leftov(ASMState *as, Reg dest, IRRef lref)
|
|||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if !LJ_64
|
|
||||||
/* Force a RID_RETLO/RID_RETHI destination register pair (marked as free). */
|
/* Force a RID_RETLO/RID_RETHI destination register pair (marked as free). */
|
||||||
static void ra_destpair(ASMState *as, IRIns *ir)
|
static void ra_destpair(ASMState *as, IRIns *ir)
|
||||||
{
|
{
|
||||||
Reg destlo = ir->r, desthi = (ir+1)->r;
|
Reg destlo = ir->r, desthi = (ir+1)->r;
|
||||||
|
IRIns *irx = (LJ_64 && !irt_is64(ir->t)) ? ir+1 : ir;
|
||||||
/* First spill unrelated refs blocking the destination registers. */
|
/* First spill unrelated refs blocking the destination registers. */
|
||||||
if (!rset_test(as->freeset, RID_RETLO) &&
|
if (!rset_test(as->freeset, RID_RETLO) &&
|
||||||
destlo != RID_RETLO && desthi != RID_RETLO)
|
destlo != RID_RETLO && desthi != RID_RETLO)
|
||||||
@ -846,29 +846,28 @@ static void ra_destpair(ASMState *as, IRIns *ir)
|
|||||||
/* Check for conflicts and shuffle the registers as needed. */
|
/* Check for conflicts and shuffle the registers as needed. */
|
||||||
if (destlo == RID_RETHI) {
|
if (destlo == RID_RETHI) {
|
||||||
if (desthi == RID_RETLO) {
|
if (desthi == RID_RETLO) {
|
||||||
#if LJ_TARGET_X86
|
#if LJ_TARGET_X86ORX64
|
||||||
*--as->mcp = XI_XCHGa + RID_RETHI;
|
*--as->mcp = REX_64IR(irx, XI_XCHGa + RID_RETHI);
|
||||||
#else
|
#else
|
||||||
emit_movrr(as, ir, RID_RETHI, RID_TMP);
|
emit_movrr(as, irx, RID_RETHI, RID_TMP);
|
||||||
emit_movrr(as, ir, RID_RETLO, RID_RETHI);
|
emit_movrr(as, irx, RID_RETLO, RID_RETHI);
|
||||||
emit_movrr(as, ir, RID_TMP, RID_RETLO);
|
emit_movrr(as, irx, RID_TMP, RID_RETLO);
|
||||||
#endif
|
#endif
|
||||||
} else {
|
} else {
|
||||||
emit_movrr(as, ir, RID_RETHI, RID_RETLO);
|
emit_movrr(as, irx, RID_RETHI, RID_RETLO);
|
||||||
if (desthi != RID_RETHI) emit_movrr(as, ir, desthi, RID_RETHI);
|
if (desthi != RID_RETHI) emit_movrr(as, irx, desthi, RID_RETHI);
|
||||||
}
|
}
|
||||||
} else if (desthi == RID_RETLO) {
|
} else if (desthi == RID_RETLO) {
|
||||||
emit_movrr(as, ir, RID_RETLO, RID_RETHI);
|
emit_movrr(as, irx, RID_RETLO, RID_RETHI);
|
||||||
if (destlo != RID_RETLO) emit_movrr(as, ir, destlo, RID_RETLO);
|
if (destlo != RID_RETLO) emit_movrr(as, irx, destlo, RID_RETLO);
|
||||||
} else {
|
} else {
|
||||||
if (desthi != RID_RETHI) emit_movrr(as, ir, desthi, RID_RETHI);
|
if (desthi != RID_RETHI) emit_movrr(as, irx, desthi, RID_RETHI);
|
||||||
if (destlo != RID_RETLO) emit_movrr(as, ir, destlo, RID_RETLO);
|
if (destlo != RID_RETLO) emit_movrr(as, irx, destlo, RID_RETLO);
|
||||||
}
|
}
|
||||||
/* Restore spill slots (if any). */
|
/* Restore spill slots (if any). */
|
||||||
if (ra_hasspill((ir+1)->s)) ra_save(as, ir+1, RID_RETHI);
|
if (ra_hasspill((ir+1)->s)) ra_save(as, ir+1, RID_RETHI);
|
||||||
if (ra_hasspill(ir->s)) ra_save(as, ir, RID_RETLO);
|
if (ra_hasspill(ir->s)) ra_save(as, ir, RID_RETLO);
|
||||||
}
|
}
|
||||||
#endif
|
|
||||||
|
|
||||||
/* -- Snapshot handling --------- ----------------------------------------- */
|
/* -- Snapshot handling --------- ----------------------------------------- */
|
||||||
|
|
||||||
@ -2234,7 +2233,6 @@ static void asm_setup_regsp(ASMState *as)
|
|||||||
(RSET_SCRATCH & ~RSET_FPR) : RSET_SCRATCH;
|
(RSET_SCRATCH & ~RSET_FPR) : RSET_SCRATCH;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
#if LJ_SOFTFP || (LJ_32 && LJ_HASFFI)
|
|
||||||
case IR_HIOP:
|
case IR_HIOP:
|
||||||
switch ((ir-1)->o) {
|
switch ((ir-1)->o) {
|
||||||
#if LJ_SOFTFP && LJ_TARGET_ARM
|
#if LJ_SOFTFP && LJ_TARGET_ARM
|
||||||
@ -2245,7 +2243,7 @@ static void asm_setup_regsp(ASMState *as)
|
|||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
#endif
|
#endif
|
||||||
#if !LJ_SOFTFP && LJ_NEED_FP64
|
#if !LJ_SOFTFP && LJ_NEED_FP64 && LJ_32 && LJ_HASFFI
|
||||||
case IR_CONV:
|
case IR_CONV:
|
||||||
if (irt_isfp((ir-1)->t)) {
|
if (irt_isfp((ir-1)->t)) {
|
||||||
ir->prev = REGSP_HINT(RID_FPRET);
|
ir->prev = REGSP_HINT(RID_FPRET);
|
||||||
@ -2253,7 +2251,7 @@ static void asm_setup_regsp(ASMState *as)
|
|||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
/* fallthrough */
|
/* fallthrough */
|
||||||
case IR_CALLN: case IR_CALLXS:
|
case IR_CALLN: case IR_CALLL: case IR_CALLS: case IR_CALLXS:
|
||||||
#if LJ_SOFTFP
|
#if LJ_SOFTFP
|
||||||
case IR_MIN: case IR_MAX:
|
case IR_MIN: case IR_MAX:
|
||||||
#endif
|
#endif
|
||||||
@ -2264,7 +2262,6 @@ static void asm_setup_regsp(ASMState *as)
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
#endif
|
|
||||||
#if LJ_SOFTFP
|
#if LJ_SOFTFP
|
||||||
case IR_MIN: case IR_MAX:
|
case IR_MIN: case IR_MAX:
|
||||||
if ((ir+1)->o != IR_HIOP) break;
|
if ((ir+1)->o != IR_HIOP) break;
|
||||||
|
@ -1885,15 +1885,15 @@ static void asm_int64comp(ASMState *as, IRIns *ir)
|
|||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/* -- Support for 64 bit ops in 32 bit mode ------------------------------- */
|
/* -- Split register ops -------------------------------------------------- */
|
||||||
|
|
||||||
/* Hiword op of a split 64 bit op. Previous op must be the loword op. */
|
/* Hiword op of a split 32/32 bit op. Previous op is the loword op. */
|
||||||
static void asm_hiop(ASMState *as, IRIns *ir)
|
static void asm_hiop(ASMState *as, IRIns *ir)
|
||||||
{
|
{
|
||||||
#if LJ_HASFFI || LJ_SOFTFP
|
|
||||||
/* HIOP is marked as a store because it needs its own DCE logic. */
|
/* HIOP is marked as a store because it needs its own DCE logic. */
|
||||||
int uselo = ra_used(ir-1), usehi = ra_used(ir); /* Loword/hiword used? */
|
int uselo = ra_used(ir-1), usehi = ra_used(ir); /* Loword/hiword used? */
|
||||||
if (LJ_UNLIKELY(!(as->flags & JIT_F_OPT_DCE))) uselo = usehi = 1;
|
if (LJ_UNLIKELY(!(as->flags & JIT_F_OPT_DCE))) uselo = usehi = 1;
|
||||||
|
#if LJ_HASFFI || LJ_SOFTFP
|
||||||
if ((ir-1)->o <= IR_NE) { /* 64 bit integer or FP comparisons. ORDER IR. */
|
if ((ir-1)->o <= IR_NE) { /* 64 bit integer or FP comparisons. ORDER IR. */
|
||||||
as->curins--; /* Always skip the loword comparison. */
|
as->curins--; /* Always skip the loword comparison. */
|
||||||
#if LJ_SOFTFP
|
#if LJ_SOFTFP
|
||||||
@ -1924,6 +1924,7 @@ static void asm_hiop(ASMState *as, IRIns *ir)
|
|||||||
asm_xstore_(as, ir, 4);
|
asm_xstore_(as, ir, 4);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
if (!usehi) return; /* Skip unused hiword op for all remaining ops. */
|
if (!usehi) return; /* Skip unused hiword op for all remaining ops. */
|
||||||
switch ((ir-1)->o) {
|
switch ((ir-1)->o) {
|
||||||
#if LJ_HASFFI
|
#if LJ_HASFFI
|
||||||
@ -1942,6 +1943,9 @@ static void asm_hiop(ASMState *as, IRIns *ir)
|
|||||||
asm_intneg(as, ir, ARMI_RSC);
|
asm_intneg(as, ir, ARMI_RSC);
|
||||||
asm_intneg(as, ir-1, ARMI_RSB|ARMI_S);
|
asm_intneg(as, ir-1, ARMI_RSB|ARMI_S);
|
||||||
break;
|
break;
|
||||||
|
case IR_CNEWI:
|
||||||
|
/* Nothing to do here. Handled by lo op itself. */
|
||||||
|
break;
|
||||||
#endif
|
#endif
|
||||||
#if LJ_SOFTFP
|
#if LJ_SOFTFP
|
||||||
case IR_SLOAD: case IR_ALOAD: case IR_HLOAD: case IR_ULOAD: case IR_VLOAD:
|
case IR_SLOAD: case IR_ALOAD: case IR_HLOAD: case IR_ULOAD: case IR_VLOAD:
|
||||||
@ -1949,25 +1953,16 @@ static void asm_hiop(ASMState *as, IRIns *ir)
|
|||||||
if (!uselo)
|
if (!uselo)
|
||||||
ra_allocref(as, ir->op1, RSET_GPR); /* Mark lo op as used. */
|
ra_allocref(as, ir->op1, RSET_GPR); /* Mark lo op as used. */
|
||||||
break;
|
break;
|
||||||
|
case IR_ASTORE: case IR_HSTORE: case IR_USTORE: case IR_TOSTR: case IR_TMPREF:
|
||||||
|
/* Nothing to do here. Handled by lo op itself. */
|
||||||
|
break;
|
||||||
#endif
|
#endif
|
||||||
case IR_CALLN:
|
case IR_CALLN: case IR_CALLL: case IR_CALLS: case IR_CALLXS:
|
||||||
case IR_CALLS:
|
|
||||||
case IR_CALLXS:
|
|
||||||
if (!uselo)
|
if (!uselo)
|
||||||
ra_allocref(as, ir->op1, RID2RSET(RID_RETLO)); /* Mark lo op as used. */
|
ra_allocref(as, ir->op1, RID2RSET(RID_RETLO)); /* Mark lo op as used. */
|
||||||
break;
|
break;
|
||||||
#if LJ_SOFTFP
|
|
||||||
case IR_ASTORE: case IR_HSTORE: case IR_USTORE: case IR_TOSTR: case IR_TMPREF:
|
|
||||||
#endif
|
|
||||||
case IR_CNEWI:
|
|
||||||
/* Nothing to do here. Handled by lo op itself. */
|
|
||||||
break;
|
|
||||||
default: lj_assertA(0, "bad HIOP for op %d", (ir-1)->o); break;
|
default: lj_assertA(0, "bad HIOP for op %d", (ir-1)->o); break;
|
||||||
}
|
}
|
||||||
#else
|
|
||||||
/* Unused without SOFTFP or FFI. */
|
|
||||||
UNUSED(as); UNUSED(ir); lj_assertA(0, "unexpected HIOP");
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* -- Profiling ----------------------------------------------------------- */
|
/* -- Profiling ----------------------------------------------------------- */
|
||||||
|
@ -460,8 +460,11 @@ static void asm_gencall(ASMState *as, const CCallInfo *ci, IRRef *args)
|
|||||||
static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci)
|
static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci)
|
||||||
{
|
{
|
||||||
RegSet drop = RSET_SCRATCH;
|
RegSet drop = RSET_SCRATCH;
|
||||||
|
int hiop = ((ir+1)->o == IR_HIOP && !irt_isnil((ir+1)->t));
|
||||||
if (ra_hasreg(ir->r))
|
if (ra_hasreg(ir->r))
|
||||||
rset_clear(drop, ir->r); /* Dest reg handled below. */
|
rset_clear(drop, ir->r); /* Dest reg handled below. */
|
||||||
|
if (hiop && ra_hasreg((ir+1)->r))
|
||||||
|
rset_clear(drop, (ir+1)->r); /* Dest reg handled below. */
|
||||||
ra_evictset(as, drop); /* Evictions must be performed first. */
|
ra_evictset(as, drop); /* Evictions must be performed first. */
|
||||||
if (ra_used(ir)) {
|
if (ra_used(ir)) {
|
||||||
lj_assertA(!irt_ispri(ir->t), "PRI dest");
|
lj_assertA(!irt_ispri(ir->t), "PRI dest");
|
||||||
@ -473,6 +476,8 @@ static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci)
|
|||||||
} else {
|
} else {
|
||||||
ra_destreg(as, ir, RID_FPRET);
|
ra_destreg(as, ir, RID_FPRET);
|
||||||
}
|
}
|
||||||
|
} else if (hiop) {
|
||||||
|
ra_destpair(as, ir);
|
||||||
} else {
|
} else {
|
||||||
ra_destreg(as, ir, RID_RET);
|
ra_destreg(as, ir, RID_RET);
|
||||||
}
|
}
|
||||||
@ -1720,13 +1725,25 @@ static void asm_comp(ASMState *as, IRIns *ir)
|
|||||||
|
|
||||||
#define asm_equal(as, ir) asm_comp(as, ir)
|
#define asm_equal(as, ir) asm_comp(as, ir)
|
||||||
|
|
||||||
/* -- Support for 64 bit ops in 32 bit mode ------------------------------- */
|
/* -- Split register ops -------------------------------------------------- */
|
||||||
|
|
||||||
/* Hiword op of a split 64 bit op. Previous op must be the loword op. */
|
/* Hiword op of a split 64/64 bit op. Previous op is the loword op. */
|
||||||
static void asm_hiop(ASMState *as, IRIns *ir)
|
static void asm_hiop(ASMState *as, IRIns *ir)
|
||||||
{
|
{
|
||||||
UNUSED(as); UNUSED(ir);
|
/* HIOP is marked as a store because it needs its own DCE logic. */
|
||||||
lj_assertA(0, "unexpected HIOP"); /* Unused on 64 bit. */
|
int uselo = ra_used(ir-1), usehi = ra_used(ir); /* Loword/hiword used? */
|
||||||
|
if (LJ_UNLIKELY(!(as->flags & JIT_F_OPT_DCE))) uselo = usehi = 1;
|
||||||
|
if (!usehi) return; /* Skip unused hiword op for all remaining ops. */
|
||||||
|
switch ((ir-1)->o) {
|
||||||
|
case IR_CALLN:
|
||||||
|
case IR_CALLL:
|
||||||
|
case IR_CALLS:
|
||||||
|
case IR_CALLXS:
|
||||||
|
if (!uselo)
|
||||||
|
ra_allocref(as, ir->op1, RID2RSET(RID_RETLO)); /* Mark lo op as used. */
|
||||||
|
break;
|
||||||
|
default: lj_assertA(0, "bad HIOP for op %d", (ir-1)->o); break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* -- Profiling ----------------------------------------------------------- */
|
/* -- Profiling ----------------------------------------------------------- */
|
||||||
|
@ -351,19 +351,15 @@ static void asm_gencall(ASMState *as, const CCallInfo *ci, IRRef *args)
|
|||||||
static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci)
|
static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci)
|
||||||
{
|
{
|
||||||
RegSet drop = RSET_SCRATCH;
|
RegSet drop = RSET_SCRATCH;
|
||||||
#if LJ_32
|
|
||||||
int hiop = ((ir+1)->o == IR_HIOP && !irt_isnil((ir+1)->t));
|
int hiop = ((ir+1)->o == IR_HIOP && !irt_isnil((ir+1)->t));
|
||||||
#endif
|
|
||||||
#if !LJ_SOFTFP
|
#if !LJ_SOFTFP
|
||||||
if ((ci->flags & CCI_NOFPRCLOBBER))
|
if ((ci->flags & CCI_NOFPRCLOBBER))
|
||||||
drop &= ~RSET_FPR;
|
drop &= ~RSET_FPR;
|
||||||
#endif
|
#endif
|
||||||
if (ra_hasreg(ir->r))
|
if (ra_hasreg(ir->r))
|
||||||
rset_clear(drop, ir->r); /* Dest reg handled below. */
|
rset_clear(drop, ir->r); /* Dest reg handled below. */
|
||||||
#if LJ_32
|
|
||||||
if (hiop && ra_hasreg((ir+1)->r))
|
if (hiop && ra_hasreg((ir+1)->r))
|
||||||
rset_clear(drop, (ir+1)->r); /* Dest reg handled below. */
|
rset_clear(drop, (ir+1)->r); /* Dest reg handled below. */
|
||||||
#endif
|
|
||||||
ra_evictset(as, drop); /* Evictions must be performed first. */
|
ra_evictset(as, drop); /* Evictions must be performed first. */
|
||||||
if (ra_used(ir)) {
|
if (ra_used(ir)) {
|
||||||
lj_assertA(!irt_ispri(ir->t), "PRI dest");
|
lj_assertA(!irt_ispri(ir->t), "PRI dest");
|
||||||
@ -392,10 +388,8 @@ static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci)
|
|||||||
} else {
|
} else {
|
||||||
ra_destreg(as, ir, RID_FPRET);
|
ra_destreg(as, ir, RID_FPRET);
|
||||||
}
|
}
|
||||||
#if LJ_32
|
|
||||||
} else if (hiop) {
|
} else if (hiop) {
|
||||||
ra_destpair(as, ir);
|
ra_destpair(as, ir);
|
||||||
#endif
|
|
||||||
} else {
|
} else {
|
||||||
ra_destreg(as, ir, RID_RET);
|
ra_destreg(as, ir, RID_RET);
|
||||||
}
|
}
|
||||||
@ -2393,15 +2387,15 @@ static void asm_comp64eq(ASMState *as, IRIns *ir)
|
|||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/* -- Support for 64 bit ops in 32 bit mode ------------------------------- */
|
/* -- Split register ops -------------------------------------------------- */
|
||||||
|
|
||||||
/* Hiword op of a split 64 bit op. Previous op must be the loword op. */
|
/* Hiword op of a split 32/32 or 64/64 bit op. Previous op is the loword op. */
|
||||||
static void asm_hiop(ASMState *as, IRIns *ir)
|
static void asm_hiop(ASMState *as, IRIns *ir)
|
||||||
{
|
{
|
||||||
#if LJ_32 && (LJ_HASFFI || LJ_SOFTFP)
|
|
||||||
/* HIOP is marked as a store because it needs its own DCE logic. */
|
/* HIOP is marked as a store because it needs its own DCE logic. */
|
||||||
int uselo = ra_used(ir-1), usehi = ra_used(ir); /* Loword/hiword used? */
|
int uselo = ra_used(ir-1), usehi = ra_used(ir); /* Loword/hiword used? */
|
||||||
if (LJ_UNLIKELY(!(as->flags & JIT_F_OPT_DCE))) uselo = usehi = 1;
|
if (LJ_UNLIKELY(!(as->flags & JIT_F_OPT_DCE))) uselo = usehi = 1;
|
||||||
|
#if LJ_32 && (LJ_HASFFI || LJ_SOFTFP)
|
||||||
if ((ir-1)->o == IR_CONV) { /* Conversions to/from 64 bit. */
|
if ((ir-1)->o == IR_CONV) { /* Conversions to/from 64 bit. */
|
||||||
as->curins--; /* Always skip the CONV. */
|
as->curins--; /* Always skip the CONV. */
|
||||||
#if LJ_HASFFI && !LJ_SOFTFP
|
#if LJ_HASFFI && !LJ_SOFTFP
|
||||||
@ -2448,38 +2442,33 @@ static void asm_hiop(ASMState *as, IRIns *ir)
|
|||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
if (!usehi) return; /* Skip unused hiword op for all remaining ops. */
|
if (!usehi) return; /* Skip unused hiword op for all remaining ops. */
|
||||||
switch ((ir-1)->o) {
|
switch ((ir-1)->o) {
|
||||||
#if LJ_HASFFI
|
#if LJ_32 && LJ_HASFFI
|
||||||
case IR_ADD: as->curins--; asm_add64(as, ir); break;
|
case IR_ADD: as->curins--; asm_add64(as, ir); break;
|
||||||
case IR_SUB: as->curins--; asm_sub64(as, ir); break;
|
case IR_SUB: as->curins--; asm_sub64(as, ir); break;
|
||||||
case IR_NEG: as->curins--; asm_neg64(as, ir); break;
|
case IR_NEG: as->curins--; asm_neg64(as, ir); break;
|
||||||
|
case IR_CNEWI:
|
||||||
|
/* Nothing to do here. Handled by lo op itself. */
|
||||||
|
break;
|
||||||
#endif
|
#endif
|
||||||
#if LJ_SOFTFP
|
#if LJ_32 && LJ_SOFTFP
|
||||||
case IR_SLOAD: case IR_ALOAD: case IR_HLOAD: case IR_ULOAD: case IR_VLOAD:
|
case IR_SLOAD: case IR_ALOAD: case IR_HLOAD: case IR_ULOAD: case IR_VLOAD:
|
||||||
case IR_STRTO:
|
case IR_STRTO:
|
||||||
if (!uselo)
|
if (!uselo)
|
||||||
ra_allocref(as, ir->op1, RSET_GPR); /* Mark lo op as used. */
|
ra_allocref(as, ir->op1, RSET_GPR); /* Mark lo op as used. */
|
||||||
break;
|
break;
|
||||||
|
case IR_ASTORE: case IR_HSTORE: case IR_USTORE: case IR_TOSTR: case IR_TMPREF:
|
||||||
|
/* Nothing to do here. Handled by lo op itself. */
|
||||||
|
break;
|
||||||
#endif
|
#endif
|
||||||
case IR_CALLN:
|
case IR_CALLN: case IR_CALLL: case IR_CALLS: case IR_CALLXS:
|
||||||
case IR_CALLS:
|
|
||||||
case IR_CALLXS:
|
|
||||||
if (!uselo)
|
if (!uselo)
|
||||||
ra_allocref(as, ir->op1, RID2RSET(RID_RETLO)); /* Mark lo op as used. */
|
ra_allocref(as, ir->op1, RID2RSET(RID_RETLO)); /* Mark lo op as used. */
|
||||||
break;
|
break;
|
||||||
#if LJ_SOFTFP
|
|
||||||
case IR_ASTORE: case IR_HSTORE: case IR_USTORE: case IR_TOSTR: case IR_TMPREF:
|
|
||||||
#endif
|
|
||||||
case IR_CNEWI:
|
|
||||||
/* Nothing to do here. Handled by lo op itself. */
|
|
||||||
break;
|
|
||||||
default: lj_assertA(0, "bad HIOP for op %d", (ir-1)->o); break;
|
default: lj_assertA(0, "bad HIOP for op %d", (ir-1)->o); break;
|
||||||
}
|
}
|
||||||
#else
|
|
||||||
/* Unused on MIPS64 or without SOFTFP or FFI. */
|
|
||||||
UNUSED(as); UNUSED(ir); lj_assertA(0, "unexpected HIOP");
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* -- Profiling ----------------------------------------------------------- */
|
/* -- Profiling ----------------------------------------------------------- */
|
||||||
|
@ -340,10 +340,8 @@ static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci)
|
|||||||
} else {
|
} else {
|
||||||
ra_destreg(as, ir, RID_FPRET);
|
ra_destreg(as, ir, RID_FPRET);
|
||||||
}
|
}
|
||||||
#if LJ_32
|
|
||||||
} else if (hiop) {
|
} else if (hiop) {
|
||||||
ra_destpair(as, ir);
|
ra_destpair(as, ir);
|
||||||
#endif
|
|
||||||
} else {
|
} else {
|
||||||
ra_destreg(as, ir, RID_RET);
|
ra_destreg(as, ir, RID_RET);
|
||||||
}
|
}
|
||||||
@ -1942,15 +1940,15 @@ static void asm_comp64(ASMState *as, IRIns *ir)
|
|||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/* -- Support for 64 bit ops in 32 bit mode ------------------------------- */
|
/* -- Split register ops -------------------------------------------------- */
|
||||||
|
|
||||||
/* Hiword op of a split 64 bit op. Previous op must be the loword op. */
|
/* Hiword op of a split 32/32 bit op. Previous op is be the loword op. */
|
||||||
static void asm_hiop(ASMState *as, IRIns *ir)
|
static void asm_hiop(ASMState *as, IRIns *ir)
|
||||||
{
|
{
|
||||||
#if LJ_HASFFI || LJ_SOFTFP
|
|
||||||
/* HIOP is marked as a store because it needs its own DCE logic. */
|
/* HIOP is marked as a store because it needs its own DCE logic. */
|
||||||
int uselo = ra_used(ir-1), usehi = ra_used(ir); /* Loword/hiword used? */
|
int uselo = ra_used(ir-1), usehi = ra_used(ir); /* Loword/hiword used? */
|
||||||
if (LJ_UNLIKELY(!(as->flags & JIT_F_OPT_DCE))) uselo = usehi = 1;
|
if (LJ_UNLIKELY(!(as->flags & JIT_F_OPT_DCE))) uselo = usehi = 1;
|
||||||
|
#if LJ_HASFFI || LJ_SOFTFP
|
||||||
if ((ir-1)->o == IR_CONV) { /* Conversions to/from 64 bit. */
|
if ((ir-1)->o == IR_CONV) { /* Conversions to/from 64 bit. */
|
||||||
as->curins--; /* Always skip the CONV. */
|
as->curins--; /* Always skip the CONV. */
|
||||||
#if LJ_HASFFI && !LJ_SOFTFP
|
#if LJ_HASFFI && !LJ_SOFTFP
|
||||||
@ -1985,12 +1983,16 @@ static void asm_hiop(ASMState *as, IRIns *ir)
|
|||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
if (!usehi) return; /* Skip unused hiword op for all remaining ops. */
|
if (!usehi) return; /* Skip unused hiword op for all remaining ops. */
|
||||||
switch ((ir-1)->o) {
|
switch ((ir-1)->o) {
|
||||||
#if LJ_HASFFI
|
#if LJ_HASFFI
|
||||||
case IR_ADD: as->curins--; asm_add64(as, ir); break;
|
case IR_ADD: as->curins--; asm_add64(as, ir); break;
|
||||||
case IR_SUB: as->curins--; asm_sub64(as, ir); break;
|
case IR_SUB: as->curins--; asm_sub64(as, ir); break;
|
||||||
case IR_NEG: as->curins--; asm_neg64(as, ir); break;
|
case IR_NEG: as->curins--; asm_neg64(as, ir); break;
|
||||||
|
case IR_CNEWI:
|
||||||
|
/* Nothing to do here. Handled by lo op itself. */
|
||||||
|
break;
|
||||||
#endif
|
#endif
|
||||||
#if LJ_SOFTFP
|
#if LJ_SOFTFP
|
||||||
case IR_SLOAD: case IR_ALOAD: case IR_HLOAD: case IR_ULOAD: case IR_VLOAD:
|
case IR_SLOAD: case IR_ALOAD: case IR_HLOAD: case IR_ULOAD: case IR_VLOAD:
|
||||||
@ -1998,25 +2000,16 @@ static void asm_hiop(ASMState *as, IRIns *ir)
|
|||||||
if (!uselo)
|
if (!uselo)
|
||||||
ra_allocref(as, ir->op1, RSET_GPR); /* Mark lo op as used. */
|
ra_allocref(as, ir->op1, RSET_GPR); /* Mark lo op as used. */
|
||||||
break;
|
break;
|
||||||
|
case IR_ASTORE: case IR_HSTORE: case IR_USTORE: case IR_TOSTR: case IR_TMPREF:
|
||||||
|
/* Nothing to do here. Handled by lo op itself. */
|
||||||
|
break;
|
||||||
#endif
|
#endif
|
||||||
case IR_CALLN:
|
case IR_CALLN: case IR_CALLL: case IR_CALLS: case IR_CALLXS:
|
||||||
case IR_CALLS:
|
|
||||||
case IR_CALLXS:
|
|
||||||
if (!uselo)
|
if (!uselo)
|
||||||
ra_allocref(as, ir->op1, RID2RSET(RID_RETLO)); /* Mark lo op as used. */
|
ra_allocref(as, ir->op1, RID2RSET(RID_RETLO)); /* Mark lo op as used. */
|
||||||
break;
|
break;
|
||||||
#if LJ_SOFTFP
|
|
||||||
case IR_ASTORE: case IR_HSTORE: case IR_USTORE: case IR_TOSTR: case IR_TMPREF:
|
|
||||||
#endif
|
|
||||||
case IR_CNEWI:
|
|
||||||
/* Nothing to do here. Handled by lo op itself. */
|
|
||||||
break;
|
|
||||||
default: lj_assertA(0, "bad HIOP for op %d", (ir-1)->o); break;
|
default: lj_assertA(0, "bad HIOP for op %d", (ir-1)->o); break;
|
||||||
}
|
}
|
||||||
#else
|
|
||||||
/* Unused without SOFTFP or FFI. */
|
|
||||||
UNUSED(as); UNUSED(ir); lj_assertA(0, "unexpected HIOP");
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* -- Profiling ----------------------------------------------------------- */
|
/* -- Profiling ----------------------------------------------------------- */
|
||||||
|
@ -659,7 +659,7 @@ static void asm_gencall(ASMState *as, const CCallInfo *ci, IRRef *args)
|
|||||||
static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci)
|
static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci)
|
||||||
{
|
{
|
||||||
RegSet drop = RSET_SCRATCH;
|
RegSet drop = RSET_SCRATCH;
|
||||||
int hiop = (LJ_32 && (ir+1)->o == IR_HIOP && !irt_isnil((ir+1)->t));
|
int hiop = ((ir+1)->o == IR_HIOP && !irt_isnil((ir+1)->t));
|
||||||
if ((ci->flags & CCI_NOFPRCLOBBER))
|
if ((ci->flags & CCI_NOFPRCLOBBER))
|
||||||
drop &= ~RSET_FPR;
|
drop &= ~RSET_FPR;
|
||||||
if (ra_hasreg(ir->r))
|
if (ra_hasreg(ir->r))
|
||||||
@ -699,10 +699,8 @@ static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci)
|
|||||||
irt_isnum(ir->t) ? XOg_FSTPq : XOg_FSTPd, RID_ESP, ofs);
|
irt_isnum(ir->t) ? XOg_FSTPq : XOg_FSTPd, RID_ESP, ofs);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
#if LJ_32
|
|
||||||
} else if (hiop) {
|
} else if (hiop) {
|
||||||
ra_destpair(as, ir);
|
ra_destpair(as, ir);
|
||||||
#endif
|
|
||||||
} else {
|
} else {
|
||||||
lj_assertA(!irt_ispri(ir->t), "PRI dest");
|
lj_assertA(!irt_ispri(ir->t), "PRI dest");
|
||||||
ra_destreg(as, ir, RID_RET);
|
ra_destreg(as, ir, RID_RET);
|
||||||
@ -2611,15 +2609,15 @@ static void asm_comp_int64(ASMState *as, IRIns *ir)
|
|||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/* -- Support for 64 bit ops in 32 bit mode ------------------------------- */
|
/* -- Split register ops -------------------------------------------------- */
|
||||||
|
|
||||||
/* Hiword op of a split 64 bit op. Previous op must be the loword op. */
|
/* Hiword op of a split 32/32 or 64/64 bit op. Previous op is the loword op. */
|
||||||
static void asm_hiop(ASMState *as, IRIns *ir)
|
static void asm_hiop(ASMState *as, IRIns *ir)
|
||||||
{
|
{
|
||||||
#if LJ_32 && LJ_HASFFI
|
|
||||||
/* HIOP is marked as a store because it needs its own DCE logic. */
|
/* HIOP is marked as a store because it needs its own DCE logic. */
|
||||||
int uselo = ra_used(ir-1), usehi = ra_used(ir); /* Loword/hiword used? */
|
int uselo = ra_used(ir-1), usehi = ra_used(ir); /* Loword/hiword used? */
|
||||||
if (LJ_UNLIKELY(!(as->flags & JIT_F_OPT_DCE))) uselo = usehi = 1;
|
if (LJ_UNLIKELY(!(as->flags & JIT_F_OPT_DCE))) uselo = usehi = 1;
|
||||||
|
#if LJ_32 && LJ_HASFFI
|
||||||
if ((ir-1)->o == IR_CONV) { /* Conversions to/from 64 bit. */
|
if ((ir-1)->o == IR_CONV) { /* Conversions to/from 64 bit. */
|
||||||
as->curins--; /* Always skip the CONV. */
|
as->curins--; /* Always skip the CONV. */
|
||||||
if (usehi || uselo)
|
if (usehi || uselo)
|
||||||
@ -2633,8 +2631,10 @@ static void asm_hiop(ASMState *as, IRIns *ir)
|
|||||||
asm_fxstore(as, ir);
|
asm_fxstore(as, ir);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
if (!usehi) return; /* Skip unused hiword op for all remaining ops. */
|
if (!usehi) return; /* Skip unused hiword op for all remaining ops. */
|
||||||
switch ((ir-1)->o) {
|
switch ((ir-1)->o) {
|
||||||
|
#if LJ_32 && LJ_HASFFI
|
||||||
case IR_ADD:
|
case IR_ADD:
|
||||||
as->flagmcp = NULL;
|
as->flagmcp = NULL;
|
||||||
as->curins--;
|
as->curins--;
|
||||||
@ -2657,20 +2657,16 @@ static void asm_hiop(ASMState *as, IRIns *ir)
|
|||||||
asm_neg_not(as, ir-1, XOg_NEG);
|
asm_neg_not(as, ir-1, XOg_NEG);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case IR_CALLN:
|
|
||||||
case IR_CALLXS:
|
|
||||||
if (!uselo)
|
|
||||||
ra_allocref(as, ir->op1, RID2RSET(RID_RETLO)); /* Mark lo op as used. */
|
|
||||||
break;
|
|
||||||
case IR_CNEWI:
|
case IR_CNEWI:
|
||||||
/* Nothing to do here. Handled by CNEWI itself. */
|
/* Nothing to do here. Handled by CNEWI itself. */
|
||||||
break;
|
break;
|
||||||
|
#endif
|
||||||
|
case IR_CALLN: case IR_CALLL: case IR_CALLS: case IR_CALLXS:
|
||||||
|
if (!uselo)
|
||||||
|
ra_allocref(as, ir->op1, RID2RSET(RID_RETLO)); /* Mark lo op as used. */
|
||||||
|
break;
|
||||||
default: lj_assertA(0, "bad HIOP for op %d", (ir-1)->o); break;
|
default: lj_assertA(0, "bad HIOP for op %d", (ir-1)->o); break;
|
||||||
}
|
}
|
||||||
#else
|
|
||||||
/* Unused on x64 or without FFI. */
|
|
||||||
UNUSED(as); UNUSED(ir); lj_assertA(0, "unexpected HIOP");
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* -- Profiling ----------------------------------------------------------- */
|
/* -- Profiling ----------------------------------------------------------- */
|
||||||
|
@ -31,6 +31,8 @@ enum {
|
|||||||
|
|
||||||
/* Calling conventions. */
|
/* Calling conventions. */
|
||||||
RID_RET = RID_X0,
|
RID_RET = RID_X0,
|
||||||
|
RID_RETLO = RID_X0,
|
||||||
|
RID_RETHI = RID_X1,
|
||||||
RID_FPRET = RID_D0,
|
RID_FPRET = RID_D0,
|
||||||
|
|
||||||
/* These definitions must match with the *.dasc file(s): */
|
/* These definitions must match with the *.dasc file(s): */
|
||||||
|
@ -38,10 +38,9 @@ enum {
|
|||||||
RID_RET = RID_EAX,
|
RID_RET = RID_EAX,
|
||||||
#if LJ_64
|
#if LJ_64
|
||||||
RID_FPRET = RID_XMM0,
|
RID_FPRET = RID_XMM0,
|
||||||
#else
|
#endif
|
||||||
RID_RETLO = RID_EAX,
|
RID_RETLO = RID_EAX,
|
||||||
RID_RETHI = RID_EDX,
|
RID_RETHI = RID_EDX,
|
||||||
#endif
|
|
||||||
|
|
||||||
/* These definitions must match with the *.dasc file(s): */
|
/* These definitions must match with the *.dasc file(s): */
|
||||||
RID_BASE = RID_EDX, /* Interpreter BASE. */
|
RID_BASE = RID_EDX, /* Interpreter BASE. */
|
||||||
|
Loading…
Reference in New Issue
Block a user