Reorganize generic operations common to all assembler backends.

This commit is contained in:
Mike Pall 2013-04-22 17:34:36 +02:00
parent 5f1781a127
commit 988e183965
5 changed files with 219 additions and 461 deletions

View File

@ -949,44 +949,6 @@ static void asm_snap_prep(ASMState *as)
/* -- Miscellaneous helpers ----------------------------------------------- */
/* Collect arguments from CALL* and CARG instructions. */
static void asm_collectargs(ASMState *as, IRIns *ir,
const CCallInfo *ci, IRRef *args)
{
uint32_t n = CCI_NARGS(ci);
lua_assert(n <= CCI_NARGS_MAX);
if ((ci->flags & CCI_L)) { *args++ = ASMREF_L; n--; }
while (n-- > 1) {
ir = IR(ir->op1);
lua_assert(ir->o == IR_CARG);
args[n] = ir->op2 == REF_NIL ? 0 : ir->op2;
}
args[0] = ir->op1 == REF_NIL ? 0 : ir->op1;
lua_assert(IR(ir->op1)->o != IR_CARG);
}
/* Reconstruct CCallInfo flags for CALLX*. */
static uint32_t asm_callx_flags(ASMState *as, IRIns *ir)
{
uint32_t nargs = 0;
if (ir->op1 != REF_NIL) { /* Count number of arguments first. */
IRIns *ira = IR(ir->op1);
nargs++;
while (ira->o == IR_CARG) { nargs++; ira = IR(ira->op1); }
}
#if LJ_HASFFI
if (IR(ir->op2)->o == IR_CARG) { /* Copy calling convention info. */
CTypeID id = (CTypeID)IR(IR(ir->op2)->op2)->i;
CType *ct = ctype_get(ctype_ctsG(J2G(as->J)), id);
nargs |= ((ct->info & CTF_VARARG) ? CCI_VARARG : 0);
#if LJ_TARGET_X86
nargs |= (ctype_cconv(ct->info) << CCI_CC_SHIFT);
#endif
}
#endif
return (nargs | (ir->t.irt << CCI_OTSHIFT));
}
/* Calculate stack adjustment. */
static int32_t asm_stack_adjust(ASMState *as)
{
@ -1071,7 +1033,9 @@ static void asm_gcstep(ASMState *as, IRIns *ir)
as->gcsteps = 0x80000000; /* Prevent implicit GC check further up. */
}
/* -- Buffer handling ----------------------------------------------------- */
/* -- Buffer operations --------------------------------------------------- */
static void asm_tvptr(ASMState *as, Reg dest, IRRef ref);
static void asm_bufhdr(ASMState *as, IRIns *ir)
{
@ -1091,10 +1055,6 @@ static void asm_bufhdr(ASMState *as, IRIns *ir)
}
}
#if !LJ_TARGET_X86ORX64
static void asm_tvptr(ASMState *as, Reg dest, IRRef ref);
#endif
static void asm_bufput(ASMState *as, IRIns *ir)
{
const CCallInfo *ci;
@ -1115,14 +1075,8 @@ static void asm_bufput(ASMState *as, IRIns *ir)
}
asm_setupresult(as, ir, ci); /* SBuf * */
asm_gencall(as, ci, args);
if (args[1] == ASMREF_TMP1) {
#if LJ_TARGET_X86ORX64
emit_rmro(as, XO_LEA, ra_releasetmp(as, ASMREF_TMP1)|REX_64,
RID_ESP, ra_spill(as, IR(ir->op2)));
#else
if (args[1] == ASMREF_TMP1)
asm_tvptr(as, ra_releasetmp(as, ASMREF_TMP1), ir->op2);
#endif
}
}
static void asm_bufstr(ASMState *as, IRIns *ir)
@ -1135,6 +1089,161 @@ static void asm_bufstr(ASMState *as, IRIns *ir)
asm_gencall(as, ci, args);
}
/* -- Type conversions ---------------------------------------------------- */
static void asm_tostr(ASMState *as, IRIns *ir)
{
IRRef args[2];
args[0] = ASMREF_L;
as->gcsteps++;
if (irt_isnum(IR(ir->op1)->t) || (LJ_SOFTFP && (ir+1)->o == IR_HIOP)) {
const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_str_fromnum];
args[1] = ASMREF_TMP1; /* const lua_Number * */
asm_setupresult(as, ir, ci); /* GCstr * */
asm_gencall(as, ci, args);
asm_tvptr(as, ra_releasetmp(as, ASMREF_TMP1), ir->op1);
} else {
const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_str_fromint];
args[1] = ir->op1; /* int32_t k */
asm_setupresult(as, ir, ci); /* GCstr * */
asm_gencall(as, ci, args);
}
}
#if LJ_32 && LJ_HASFFI && !LJ_SOFTFP && !LJ_TARGET_X86
static void asm_conv64(ASMState *as, IRIns *ir)
{
IRType st = (IRType)((ir-1)->op2 & IRCONV_SRCMASK);
IRType dt = (((ir-1)->op2 & IRCONV_DSTMASK) >> IRCONV_DSH);
IRCallID id;
IRRef args[2];
lua_assert((ir-1)->o == IR_CONV && ir->o == IR_HIOP);
args[LJ_BE] = (ir-1)->op1;
args[LJ_LE] = ir->op1;
if (st == IRT_NUM || st == IRT_FLOAT) {
id = IRCALL_fp64_d2l + ((st == IRT_FLOAT) ? 2 : 0) + (dt - IRT_I64);
ir--;
} else {
id = IRCALL_fp64_l2d + ((dt == IRT_FLOAT) ? 2 : 0) + (st - IRT_I64);
}
{
#if LJ_TARGET_ARM && !LJ_ABI_SOFTFP
CCallInfo cim = lj_ir_callinfo[id], *ci = &cim;
cim.flags |= CCI_VARARG; /* These calls don't use the hard-float ABI! */
#else
const CCallInfo *ci = &lj_ir_callinfo[id];
#endif
asm_setupresult(as, ir, ci);
asm_gencall(as, ci, args);
}
}
#endif
/* -- Memory references --------------------------------------------------- */
static void asm_newref(ASMState *as, IRIns *ir)
{
const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_tab_newkey];
IRRef args[3];
if (ir->r == RID_SINK)
return;
args[0] = ASMREF_L; /* lua_State *L */
args[1] = ir->op1; /* GCtab *t */
args[2] = ASMREF_TMP1; /* cTValue *key */
asm_setupresult(as, ir, ci); /* TValue * */
asm_gencall(as, ci, args);
asm_tvptr(as, ra_releasetmp(as, ASMREF_TMP1), ir->op2);
}
/* -- Calls --------------------------------------------------------------- */
/* Collect arguments from CALL* and CARG instructions. */
static void asm_collectargs(ASMState *as, IRIns *ir,
const CCallInfo *ci, IRRef *args)
{
uint32_t n = CCI_NARGS(ci);
lua_assert(n <= CCI_NARGS_MAX);
if ((ci->flags & CCI_L)) { *args++ = ASMREF_L; n--; }
while (n-- > 1) {
ir = IR(ir->op1);
lua_assert(ir->o == IR_CARG);
args[n] = ir->op2 == REF_NIL ? 0 : ir->op2;
}
args[0] = ir->op1 == REF_NIL ? 0 : ir->op1;
lua_assert(IR(ir->op1)->o != IR_CARG);
}
/* Reconstruct CCallInfo flags for CALLX*. */
static uint32_t asm_callx_flags(ASMState *as, IRIns *ir)
{
uint32_t nargs = 0;
if (ir->op1 != REF_NIL) { /* Count number of arguments first. */
IRIns *ira = IR(ir->op1);
nargs++;
while (ira->o == IR_CARG) { nargs++; ira = IR(ira->op1); }
}
#if LJ_HASFFI
if (IR(ir->op2)->o == IR_CARG) { /* Copy calling convention info. */
CTypeID id = (CTypeID)IR(IR(ir->op2)->op2)->i;
CType *ct = ctype_get(ctype_ctsG(J2G(as->J)), id);
nargs |= ((ct->info & CTF_VARARG) ? CCI_VARARG : 0);
#if LJ_TARGET_X86
nargs |= (ctype_cconv(ct->info) << CCI_CC_SHIFT);
#endif
}
#endif
return (nargs | (ir->t.irt << CCI_OTSHIFT));
}
static void asm_callid(ASMState *as, IRIns *ir, IRCallID id)
{
const CCallInfo *ci = &lj_ir_callinfo[id];
IRRef args[2];
args[0] = ir->op1;
args[1] = ir->op2;
asm_setupresult(as, ir, ci);
asm_gencall(as, ci, args);
}
static void asm_call(ASMState *as, IRIns *ir)
{
IRRef args[CCI_NARGS_MAX];
const CCallInfo *ci = &lj_ir_callinfo[ir->op2];
asm_collectargs(as, ir, ci, args);
asm_setupresult(as, ir, ci);
asm_gencall(as, ci, args);
}
#if !LJ_SOFTFP
static void asm_fppow(ASMState *as, IRIns *ir, IRRef lref, IRRef rref);
#if !LJ_TARGET_X86ORX64
static void asm_fppow(ASMState *as, IRIns *ir, IRRef lref, IRRef rref)
{
const CCallInfo *ci = &lj_ir_callinfo[IRCALL_pow];
IRRef args[2];
args[0] = lref;
args[1] = rref;
asm_setupresult(as, ir, ci);
asm_gencall(as, ci, args);
}
#endif
static int asm_fpjoin_pow(ASMState *as, IRIns *ir)
{
IRIns *irp = IR(ir->op1);
if (irp == ir-1 && irp->o == IR_MUL && !ra_used(irp)) {
IRIns *irpp = IR(irp->op1);
if (irpp == ir-2 && irpp->o == IR_FPMATH &&
irpp->op2 == IRFPM_LOG2 && !ra_used(irpp)) {
asm_fppow(as, ir, irpp->op1, irp->op2);
return 1;
}
}
return 0;
}
#endif
/* -- PHI and loop handling ----------------------------------------------- */
/* Break a PHI cycle by renaming to a free register (evict if needed). */

View File

@ -453,15 +453,6 @@ static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci)
UNUSED(ci);
}
static void asm_call(ASMState *as, IRIns *ir)
{
IRRef args[CCI_NARGS_MAX];
const CCallInfo *ci = &lj_ir_callinfo[ir->op2];
asm_collectargs(as, ir, ci, args);
asm_setupresult(as, ir, ci);
asm_gencall(as, ci, args);
}
static void asm_callx(ASMState *as, IRIns *ir)
{
IRRef args[CCI_NARGS_MAX];
@ -600,31 +591,6 @@ static void asm_conv(ASMState *as, IRIns *ir)
}
}
#if !LJ_SOFTFP && LJ_HASFFI
static void asm_conv64(ASMState *as, IRIns *ir)
{
IRType st = (IRType)((ir-1)->op2 & IRCONV_SRCMASK);
IRType dt = (((ir-1)->op2 & IRCONV_DSTMASK) >> IRCONV_DSH);
IRCallID id;
CCallInfo ci;
IRRef args[2];
args[0] = (ir-1)->op1;
args[1] = ir->op1;
if (st == IRT_NUM || st == IRT_FLOAT) {
id = IRCALL_fp64_d2l + ((st == IRT_FLOAT) ? 2 : 0) + (dt - IRT_I64);
ir--;
} else {
id = IRCALL_fp64_l2d + ((dt == IRT_FLOAT) ? 2 : 0) + (st - IRT_I64);
}
ci = lj_ir_callinfo[id];
#if !LJ_ABI_SOFTFP
ci.flags |= CCI_VARARG; /* These calls don't use the hard-float ABI! */
#endif
asm_setupresult(as, ir, &ci);
asm_gencall(as, &ci, args);
}
#endif
static void asm_strto(ASMState *as, IRIns *ir)
{
const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_strscan_num];
@ -688,6 +654,8 @@ static void asm_strto(ASMState *as, IRIns *ir)
emit_opk(as, ARMI_ADD, tmp, RID_SP, ofs, RSET_GPR);
}
/* -- Memory references --------------------------------------------------- */
/* Get pointer to TValue. */
static void asm_tvptr(ASMState *as, Reg dest, IRRef ref)
{
@ -713,7 +681,7 @@ static void asm_tvptr(ASMState *as, Reg dest, IRRef ref)
Reg src = ra_alloc1(as, ref, allow);
emit_lso(as, ARMI_STR, src, RID_SP, 0);
}
if ((ir+1)->o == IR_HIOP)
if (LJ_SOFTFP && (ir+1)->o == IR_HIOP)
type = ra_alloc1(as, ref+1, allow);
else
type = ra_allock(as, irt_toitype(ir->t), allow);
@ -721,27 +689,6 @@ static void asm_tvptr(ASMState *as, Reg dest, IRRef ref)
}
}
static void asm_tostr(ASMState *as, IRIns *ir)
{
IRRef args[2];
args[0] = ASMREF_L;
as->gcsteps++;
if (irt_isnum(IR(ir->op1)->t) || (ir+1)->o == IR_HIOP) {
const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_str_fromnum];
args[1] = ASMREF_TMP1; /* const lua_Number * */
asm_setupresult(as, ir, ci); /* GCstr * */
asm_gencall(as, ci, args);
asm_tvptr(as, ra_releasetmp(as, ASMREF_TMP1), ir->op1);
} else {
const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_str_fromint];
args[1] = ir->op1; /* int32_t k */
asm_setupresult(as, ir, ci); /* GCstr * */
asm_gencall(as, ci, args);
}
}
/* -- Memory references --------------------------------------------------- */
static void asm_aref(ASMState *as, IRIns *ir)
{
Reg dest = ra_dest(as, ir, RSET_GPR);
@ -959,20 +906,6 @@ static void asm_hrefk(ASMState *as, IRIns *ir)
emit_opk(as, ARMI_ADD, dest, node, ofs, RSET_GPR);
}
static void asm_newref(ASMState *as, IRIns *ir)
{
const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_tab_newkey];
IRRef args[3];
if (ir->r == RID_SINK)
return;
args[0] = ASMREF_L; /* lua_State *L */
args[1] = ir->op1; /* GCtab *t */
args[2] = ASMREF_TMP1; /* cTValue *key */
asm_setupresult(as, ir, ci); /* TValue * */
asm_gencall(as, ci, args);
asm_tvptr(as, ra_releasetmp(as, ASMREF_TMP1), ir->op2);
}
static void asm_uref(ASMState *as, IRIns *ir)
{
/* NYI: Check that UREFO is still open and not aliasing a slot. */
@ -1391,25 +1324,6 @@ static void asm_fpunary(ASMState *as, IRIns *ir, ARMIns ai)
Reg left = ra_hintalloc(as, ir->op1, dest, RSET_FPR);
emit_dm(as, ai, (dest & 15), (left & 15));
}
static int asm_fpjoin_pow(ASMState *as, IRIns *ir)
{
IRIns *irp = IR(ir->op1);
if (irp == ir-1 && irp->o == IR_MUL && !ra_used(irp)) {
IRIns *irpp = IR(irp->op1);
if (irpp == ir-2 && irpp->o == IR_FPMATH &&
irpp->op2 == IRFPM_LOG2 && !ra_used(irpp)) {
const CCallInfo *ci = &lj_ir_callinfo[IRCALL_pow];
IRRef args[2];
args[0] = irpp->op1;
args[1] = irp->op2;
asm_setupresult(as, ir, ci);
asm_gencall(as, ci, args);
return 1;
}
}
return 0;
}
#endif
static int asm_swapops(ASMState *as, IRRef lref, IRRef rref)
@ -1561,16 +1475,6 @@ static void asm_neg(ASMState *as, IRIns *ir)
asm_intneg(as, ir, ARMI_RSB);
}
static void asm_callid(ASMState *as, IRIns *ir, IRCallID id)
{
const CCallInfo *ci = &lj_ir_callinfo[id];
IRRef args[2];
args[0] = ir->op1;
args[1] = ir->op2;
asm_setupresult(as, ir, ci);
asm_gencall(as, ci, args);
}
#if !LJ_SOFTFP
static void asm_callround(ASMState *as, IRIns *ir, int id)
{

View File

@ -326,15 +326,6 @@ static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci)
}
}
static void asm_call(ASMState *as, IRIns *ir)
{
IRRef args[CCI_NARGS_MAX];
const CCallInfo *ci = &lj_ir_callinfo[ir->op2];
asm_collectargs(as, ir, ci, args);
asm_setupresult(as, ir, ci);
asm_gencall(as, ci, args);
}
static void asm_callx(ASMState *as, IRIns *ir)
{
IRRef args[CCI_NARGS_MAX];
@ -362,16 +353,6 @@ static void asm_callx(ASMState *as, IRIns *ir)
asm_gencall(as, &ci, args);
}
static void asm_callid(ASMState *as, IRIns *ir, IRCallID id)
{
const CCallInfo *ci = &lj_ir_callinfo[id];
IRRef args[2];
args[0] = ir->op1;
args[1] = ir->op2;
asm_setupresult(as, ir, ci);
asm_gencall(as, ci, args);
}
static void asm_callround(ASMState *as, IRIns *ir, IRCallID id)
{
/* The modified regs must match with the *.dasc implementation. */
@ -519,28 +500,6 @@ static void asm_conv(ASMState *as, IRIns *ir)
}
}
#if LJ_HASFFI
static void asm_conv64(ASMState *as, IRIns *ir)
{
IRType st = (IRType)((ir-1)->op2 & IRCONV_SRCMASK);
IRType dt = (((ir-1)->op2 & IRCONV_DSTMASK) >> IRCONV_DSH);
IRCallID id;
const CCallInfo *ci;
IRRef args[2];
args[LJ_BE?0:1] = ir->op1;
args[LJ_BE?1:0] = (ir-1)->op1;
if (st == IRT_NUM || st == IRT_FLOAT) {
id = IRCALL_fp64_d2l + ((st == IRT_FLOAT) ? 2 : 0) + (dt - IRT_I64);
ir--;
} else {
id = IRCALL_fp64_l2d + ((dt == IRT_FLOAT) ? 2 : 0) + (st - IRT_I64);
}
ci = &lj_ir_callinfo[id];
asm_setupresult(as, ir, ci);
asm_gencall(as, ci, args);
}
#endif
static void asm_strto(ASMState *as, IRIns *ir)
{
const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_strscan_num];
@ -557,6 +516,8 @@ static void asm_strto(ASMState *as, IRIns *ir)
RID_SP, sps_scale(ir->s));
}
/* -- Memory references --------------------------------------------------- */
/* Get pointer to TValue. */
static void asm_tvptr(ASMState *as, Reg dest, IRRef ref)
{
@ -580,27 +541,6 @@ static void asm_tvptr(ASMState *as, Reg dest, IRRef ref)
}
}
static void asm_tostr(ASMState *as, IRIns *ir)
{
IRRef args[2];
args[0] = ASMREF_L;
as->gcsteps++;
if (irt_isnum(IR(ir->op1)->t) || (ir+1)->o == IR_HIOP) {
const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_str_fromnum];
args[1] = ASMREF_TMP1; /* const lua_Number * */
asm_setupresult(as, ir, ci); /* GCstr * */
asm_gencall(as, ci, args);
asm_tvptr(as, ra_releasetmp(as, ASMREF_TMP1), ir->op1);
} else {
const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_str_fromint];
args[1] = ir->op1; /* int32_t k */
asm_setupresult(as, ir, ci); /* GCstr * */
asm_gencall(as, ci, args);
}
}
/* -- Memory references --------------------------------------------------- */
static void asm_aref(ASMState *as, IRIns *ir)
{
Reg dest = ra_dest(as, ir, RSET_GPR);
@ -776,20 +716,6 @@ nolo:
emit_tsi(as, MIPSI_ADDU, dest, node, ra_allock(as, ofs, allow));
}
static void asm_newref(ASMState *as, IRIns *ir)
{
if (ir->r != RID_SINK) {
const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_tab_newkey];
IRRef args[3];
args[0] = ASMREF_L; /* lua_State *L */
args[1] = ir->op1; /* GCtab *t */
args[2] = ASMREF_TMP1; /* cTValue *key */
asm_setupresult(as, ir, ci); /* TValue * */
asm_gencall(as, ci, args);
asm_tvptr(as, ra_releasetmp(as, ASMREF_TMP1), ir->op2);
}
}
static void asm_uref(ASMState *as, IRIns *ir)
{
/* NYI: Check that UREFO is still open and not aliasing a slot. */
@ -1152,25 +1078,6 @@ static void asm_fpunary(ASMState *as, IRIns *ir, MIPSIns mi)
emit_fg(as, mi, dest, left);
}
static int asm_fpjoin_pow(ASMState *as, IRIns *ir)
{
IRIns *irp = IR(ir->op1);
if (irp == ir-1 && irp->o == IR_MUL && !ra_used(irp)) {
IRIns *irpp = IR(irp->op1);
if (irpp == ir-2 && irpp->o == IR_FPMATH &&
irpp->op2 == IRFPM_LOG2 && !ra_used(irpp)) {
const CCallInfo *ci = &lj_ir_callinfo[IRCALL_pow];
IRRef args[2];
args[0] = irpp->op1;
args[1] = irp->op2;
asm_setupresult(as, ir, ci);
asm_gencall(as, ci, args);
return 1;
}
}
return 0;
}
static void asm_add(ASMState *as, IRIns *ir)
{
if (irt_isnum(ir->t)) {

View File

@ -329,15 +329,6 @@ static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci)
}
}
static void asm_call(ASMState *as, IRIns *ir)
{
IRRef args[CCI_NARGS_MAX];
const CCallInfo *ci = &lj_ir_callinfo[ir->op2];
asm_collectargs(as, ir, ci, args);
asm_setupresult(as, ir, ci);
asm_gencall(as, ci, args);
}
static void asm_callx(ASMState *as, IRIns *ir)
{
IRRef args[CCI_NARGS_MAX];
@ -361,16 +352,6 @@ static void asm_callx(ASMState *as, IRIns *ir)
asm_gencall(as, &ci, args);
}
static void asm_callid(ASMState *as, IRIns *ir, IRCallID id)
{
const CCallInfo *ci = &lj_ir_callinfo[id];
IRRef args[2];
args[0] = ir->op1;
args[1] = ir->op2;
asm_setupresult(as, ir, ci);
asm_gencall(as, ci, args);
}
/* -- Returns ------------------------------------------------------------- */
/* Return to lower frame. Guard that it goes to the right spot. */
@ -510,28 +491,6 @@ static void asm_conv(ASMState *as, IRIns *ir)
}
}
#if LJ_HASFFI
static void asm_conv64(ASMState *as, IRIns *ir)
{
IRType st = (IRType)((ir-1)->op2 & IRCONV_SRCMASK);
IRType dt = (((ir-1)->op2 & IRCONV_DSTMASK) >> IRCONV_DSH);
IRCallID id;
const CCallInfo *ci;
IRRef args[2];
args[0] = ir->op1;
args[1] = (ir-1)->op1;
if (st == IRT_NUM || st == IRT_FLOAT) {
id = IRCALL_fp64_d2l + ((st == IRT_FLOAT) ? 2 : 0) + (dt - IRT_I64);
ir--;
} else {
id = IRCALL_fp64_l2d + ((dt == IRT_FLOAT) ? 2 : 0) + (st - IRT_I64);
}
ci = &lj_ir_callinfo[id];
asm_setupresult(as, ir, ci);
asm_gencall(as, ci, args);
}
#endif
static void asm_strto(ASMState *as, IRIns *ir)
{
const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_strscan_num];
@ -550,6 +509,8 @@ static void asm_strto(ASMState *as, IRIns *ir)
emit_tai(as, PPCI_ADDI, ra_releasetmp(as, ASMREF_TMP1), RID_SP, ofs);
}
/* -- Memory references --------------------------------------------------- */
/* Get pointer to TValue. */
static void asm_tvptr(ASMState *as, Reg dest, IRRef ref)
{
@ -573,27 +534,6 @@ static void asm_tvptr(ASMState *as, Reg dest, IRRef ref)
}
}
static void asm_tostr(ASMState *as, IRIns *ir)
{
IRRef args[2];
args[0] = ASMREF_L;
as->gcsteps++;
if (irt_isnum(IR(ir->op1)->t) || (ir+1)->o == IR_HIOP) {
const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_str_fromnum];
args[1] = ASMREF_TMP1; /* const lua_Number * */
asm_setupresult(as, ir, ci); /* GCstr * */
asm_gencall(as, ci, args);
asm_tvptr(as, ra_releasetmp(as, ASMREF_TMP1), ir->op1);
} else {
const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_str_fromint];
args[1] = ir->op1; /* int32_t k */
asm_setupresult(as, ir, ci); /* GCstr * */
asm_gencall(as, ci, args);
}
}
/* -- Memory references --------------------------------------------------- */
static void asm_aref(ASMState *as, IRIns *ir)
{
Reg dest = ra_dest(as, ir, RSET_GPR);
@ -770,20 +710,6 @@ static void asm_hrefk(ASMState *as, IRIns *ir)
}
}
static void asm_newref(ASMState *as, IRIns *ir)
{
const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_tab_newkey];
IRRef args[3];
if (ir->r == RID_SINK)
return;
args[0] = ASMREF_L; /* lua_State *L */
args[1] = ir->op1; /* GCtab *t */
args[2] = ASMREF_TMP1; /* cTValue *key */
asm_setupresult(as, ir, ci); /* TValue * */
asm_gencall(as, ci, args);
asm_tvptr(as, ra_releasetmp(as, ASMREF_TMP1), ir->op2);
}
static void asm_uref(ASMState *as, IRIns *ir)
{
/* NYI: Check that UREFO is still open and not aliasing a slot. */
@ -1194,25 +1120,6 @@ static void asm_fpunary(ASMState *as, IRIns *ir, PPCIns pi)
emit_fb(as, pi, dest, left);
}
static int asm_fpjoin_pow(ASMState *as, IRIns *ir)
{
IRIns *irp = IR(ir->op1);
if (irp == ir-1 && irp->o == IR_MUL && !ra_used(irp)) {
IRIns *irpp = IR(irp->op1);
if (irpp == ir-2 && irpp->o == IR_FPMATH &&
irpp->op2 == IRFPM_LOG2 && !ra_used(irpp)) {
const CCallInfo *ci = &lj_ir_callinfo[IRCALL_pow];
IRRef args[2];
args[0] = irpp->op1;
args[1] = irp->op2;
asm_setupresult(as, ir, ci);
asm_gencall(as, ci, args);
return 1;
}
}
return 0;
}
static void asm_add(ASMState *as, IRIns *ir)
{
if (irt_isnum(ir->t)) {

View File

@ -576,15 +576,6 @@ static void asm_setupresult(ASMState *as, IRIns *ir, const CCallInfo *ci)
}
}
static void asm_call(ASMState *as, IRIns *ir)
{
IRRef args[CCI_NARGS_MAX];
const CCallInfo *ci = &lj_ir_callinfo[ir->op2];
asm_collectargs(as, ir, ci, args);
asm_setupresult(as, ir, ci);
asm_gencall(as, ci, args);
}
/* Return a constant function pointer or NULL for indirect calls. */
static void *asm_callx_func(ASMState *as, IRIns *irf, IRRef func)
{
@ -891,6 +882,14 @@ static void asm_conv_int64_fp(ASMState *as, IRIns *ir)
st == IRT_NUM ? XOg_FLDq: XOg_FLDd,
asm_fuseload(as, ir->op1, RSET_EMPTY));
}
static void asm_conv64(ASMState *as, IRIns *ir)
{
if (irt_isfp(ir->t))
asm_conv_fp_int64(as, ir);
else
asm_conv_int64_fp(as, ir);
}
#endif
static void asm_strto(ASMState *as, IRIns *ir)
@ -912,29 +911,32 @@ static void asm_strto(ASMState *as, IRIns *ir)
RID_ESP, sps_scale(ir->s));
}
static void asm_tostr(ASMState *as, IRIns *ir)
{
IRIns *irl = IR(ir->op1);
IRRef args[2];
args[0] = ASMREF_L;
as->gcsteps++;
if (irt_isnum(irl->t)) {
const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_str_fromnum];
args[1] = ASMREF_TMP1; /* const lua_Number * */
asm_setupresult(as, ir, ci); /* GCstr * */
asm_gencall(as, ci, args);
emit_rmro(as, XO_LEA, ra_releasetmp(as, ASMREF_TMP1)|REX_64,
RID_ESP, ra_spill(as, irl));
} else {
const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_str_fromint];
args[1] = ir->op1; /* int32_t k */
asm_setupresult(as, ir, ci); /* GCstr * */
asm_gencall(as, ci, args);
}
}
/* -- Memory references --------------------------------------------------- */
/* Get pointer to TValue. */
static void asm_tvptr(ASMState *as, Reg dest, IRRef ref)
{
IRIns *ir = IR(ref);
if (irt_isnum(ir->t)) {
/* For numbers use the constant itself or a spill slot as a TValue. */
if (irref_isk(ref))
emit_loada(as, dest, ir_knum(ir));
else
emit_rmro(as, XO_LEA, dest|REX_64, RID_ESP, ra_spill(as, ir));
} else {
/* Otherwise use g->tmptv to hold the TValue. */
if (!irref_isk(ref)) {
Reg src = ra_alloc1(as, ref, rset_exclude(RSET_GPR, dest));
emit_movtomro(as, REX_64IR(ir, src), dest, 0);
} else if (!irt_ispri(ir->t)) {
emit_movmroi(as, dest, 0, ir->i);
}
if (!(LJ_64 && irt_islightud(ir->t)))
emit_movmroi(as, dest, 4, irt_toitype(ir->t));
emit_loada(as, dest, &J2G(as->J)->tmptv);
}
}
static void asm_aref(ASMState *as, IRIns *ir)
{
Reg dest = ra_dest(as, ir, RSET_GPR);
@ -1163,41 +1165,6 @@ static void asm_hrefk(ASMState *as, IRIns *ir)
#endif
}
static void asm_newref(ASMState *as, IRIns *ir)
{
const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_tab_newkey];
IRRef args[3];
IRIns *irkey;
Reg tmp;
if (ir->r == RID_SINK)
return;
args[0] = ASMREF_L; /* lua_State *L */
args[1] = ir->op1; /* GCtab *t */
args[2] = ASMREF_TMP1; /* cTValue *key */
asm_setupresult(as, ir, ci); /* TValue * */
asm_gencall(as, ci, args);
tmp = ra_releasetmp(as, ASMREF_TMP1);
irkey = IR(ir->op2);
if (irt_isnum(irkey->t)) {
/* For numbers use the constant itself or a spill slot as a TValue. */
if (irref_isk(ir->op2))
emit_loada(as, tmp, ir_knum(irkey));
else
emit_rmro(as, XO_LEA, tmp|REX_64, RID_ESP, ra_spill(as, irkey));
} else {
/* Otherwise use g->tmptv to hold the TValue. */
if (!irref_isk(ir->op2)) {
Reg src = ra_alloc1(as, ir->op2, rset_exclude(RSET_GPR, tmp));
emit_movtomro(as, REX_64IR(irkey, src), tmp, 0);
} else if (!irt_ispri(irkey->t)) {
emit_movmroi(as, tmp, 0, irkey->i);
}
if (!(LJ_64 && irt_islightud(irkey->t)))
emit_movmroi(as, tmp, 4, irt_toitype(irkey->t));
emit_loada(as, tmp, &J2G(as->J)->tmptv);
}
}
static void asm_uref(ASMState *as, IRIns *ir)
{
/* NYI: Check that UREFO is still open and not aliasing a slot. */
@ -1621,14 +1588,8 @@ static void asm_x87load(ASMState *as, IRRef ref)
}
}
/* Try to rejoin pow from EXP2, MUL and LOG2 (if still unsplit). */
static int fpmjoin_pow(ASMState *as, IRIns *ir)
static void asm_fppow(ASMState *as, IRIns *ir, IRRef lref, IRRef rref)
{
IRIns *irp = IR(ir->op1);
if (irp == ir-1 && irp->o == IR_MUL && !ra_used(irp)) {
IRIns *irpp = IR(irp->op1);
if (irpp == ir-2 && irpp->o == IR_FPMATH &&
irpp->op2 == IRFPM_LOG2 && !ra_used(irpp)) {
/* The modified regs must match with the *.dasc implementation. */
RegSet drop = RSET_RANGE(RID_XMM0, RID_XMM2+1)|RID2RSET(RID_EAX);
IRIns *irx;
@ -1637,15 +1598,11 @@ static int fpmjoin_pow(ASMState *as, IRIns *ir)
ra_evictset(as, drop);
ra_destreg(as, ir, RID_XMM0);
emit_call(as, lj_vm_pow_sse);
irx = IR(irpp->op1);
irx = IR(lref);
if (ra_noreg(irx->r) && ra_gethint(irx->r) == RID_XMM1)
irx->r = RID_INIT; /* Avoid allocating xmm1 for x. */
ra_left(as, RID_XMM0, irpp->op1);
ra_left(as, RID_XMM1, irp->op2);
return 1;
}
}
return 0;
ra_left(as, RID_XMM0, lref);
ra_left(as, RID_XMM1, rref);
}
static void asm_fpmath(ASMState *as, IRIns *ir)
@ -1681,7 +1638,7 @@ static void asm_fpmath(ASMState *as, IRIns *ir)
fpm == IRFPM_CEIL ? lj_vm_ceil_sse : lj_vm_trunc_sse);
ra_left(as, RID_XMM0, ir->op1);
}
} else if (fpm == IRFPM_EXP2 && fpmjoin_pow(as, ir)) {
} else if (fpm == IRFPM_EXP2 && asm_fpjoin_pow(as, ir)) {
/* Rejoined to pow(). */
} else { /* Handle x87 ops. */
int32_t ofs = sps_scale(ir->s); /* Use spill slot or temp slots. */
@ -1741,28 +1698,6 @@ static void asm_fppowi(ASMState *as, IRIns *ir)
ra_left(as, RID_EAX, ir->op2);
}
#if LJ_64 && LJ_HASFFI
static void asm_arith64(ASMState *as, IRIns *ir, IRCallID id)
{
const CCallInfo *ci = &lj_ir_callinfo[id];
IRRef args[2];
args[0] = ir->op1;
args[1] = ir->op2;
asm_setupresult(as, ir, ci);
asm_gencall(as, ci, args);
}
#endif
static void asm_intmod(ASMState *as, IRIns *ir)
{
const CCallInfo *ci = &lj_ir_callinfo[IRCALL_lj_vm_modi];
IRRef args[2];
args[0] = ir->op1;
args[1] = ir->op2;
asm_setupresult(as, ir, ci);
asm_gencall(as, ci, args);
}
static int asm_swapops(ASMState *as, IRIns *ir)
{
IRIns *irl = IR(ir->op1);
@ -2268,13 +2203,9 @@ static void asm_hiop(ASMState *as, IRIns *ir)
int uselo = ra_used(ir-1), usehi = ra_used(ir); /* Loword/hiword used? */
if (LJ_UNLIKELY(!(as->flags & JIT_F_OPT_DCE))) uselo = usehi = 1;
if ((ir-1)->o == IR_CONV) { /* Conversions to/from 64 bit. */
if (usehi || uselo) {
if (irt_isfp(ir->t))
asm_conv_fp_int64(as, ir);
else
asm_conv_int64_fp(as, ir);
}
as->curins--; /* Always skip the CONV. */
if (usehi || uselo)
asm_conv64(as, ir);
return;
} else if ((ir-1)->o <= IR_NE) { /* 64 bit integer comparisons. ORDER IR. */
asm_comp_int64(as, ir);
@ -2627,7 +2558,7 @@ static void asm_ir(ASMState *as, IRIns *ir)
case IR_DIV:
#if LJ_64 && LJ_HASFFI
if (!irt_isnum(ir->t))
asm_arith64(as, ir, irt_isi64(ir->t) ? IRCALL_lj_carith_divi64 :
asm_callid(as, ir, irt_isi64(ir->t) ? IRCALL_lj_carith_divi64 :
IRCALL_lj_carith_divu64);
else
#endif
@ -2636,11 +2567,11 @@ static void asm_ir(ASMState *as, IRIns *ir)
case IR_MOD:
#if LJ_64 && LJ_HASFFI
if (!irt_isint(ir->t))
asm_arith64(as, ir, irt_isi64(ir->t) ? IRCALL_lj_carith_modi64 :
asm_callid(as, ir, irt_isi64(ir->t) ? IRCALL_lj_carith_modi64 :
IRCALL_lj_carith_modu64);
else
#endif
asm_intmod(as, ir);
asm_callid(as, ir, IRCALL_lj_vm_modi);
break;
case IR_NEG:
@ -2670,7 +2601,7 @@ static void asm_ir(ASMState *as, IRIns *ir)
case IR_POW:
#if LJ_64 && LJ_HASFFI
if (!irt_isnum(ir->t))
asm_arith64(as, ir, irt_isi64(ir->t) ? IRCALL_lj_carith_powi64 :
asm_callid(as, ir, irt_isi64(ir->t) ? IRCALL_lj_carith_powi64 :
IRCALL_lj_carith_powu64);
else
#endif