Use same HREF+EQ/NE optimization in all assembler backends.

This commit is contained in:
Mike Pall 2013-04-22 20:47:39 +02:00
parent 988e183965
commit 2cd4ce6141
2 changed files with 47 additions and 47 deletions

View File

@ -570,7 +570,7 @@ static void asm_aref(ASMState *as, IRIns *ir)
** } while ((n = nextnode(n))); ** } while ((n = nextnode(n)));
** return niltv(L); ** return niltv(L);
*/ */
static void asm_href(ASMState *as, IRIns *ir) static void asm_href(ASMState *as, IRIns *ir, IROp merge)
{ {
RegSet allow = RSET_GPR; RegSet allow = RSET_GPR;
int destused = ra_used(ir); int destused = ra_used(ir);
@ -596,37 +596,42 @@ static void asm_href(ASMState *as, IRIns *ir)
tmp2 = ra_scratch(as, allow); tmp2 = ra_scratch(as, allow);
rset_clear(allow, tmp2); rset_clear(allow, tmp2);
/* Key not found in chain: load niltv. */ /* Key not found in chain: jump to exit (if merged) or load niltv. */
l_end = emit_label(as); l_end = emit_label(as);
if (destused) as->invmcp = NULL;
if (merge == IR_NE)
asm_guard(as, MIPSI_B, RID_ZERO, RID_ZERO);
else if (destused)
emit_loada(as, dest, niltvg(J2G(as->J))); emit_loada(as, dest, niltvg(J2G(as->J)));
else
*--as->mcp = MIPSI_NOP;
/* Follow hash chain until the end. */ /* Follow hash chain until the end. */
emit_move(as, dest, tmp1); emit_move(as, dest, tmp2);
l_loop = --as->mcp; l_loop = --as->mcp;
emit_tsi(as, MIPSI_LW, tmp1, dest, (int32_t)offsetof(Node, next)); emit_tsi(as, MIPSI_LW, tmp2, dest, (int32_t)offsetof(Node, next));
l_next = emit_label(as); l_next = emit_label(as);
/* Type and value comparison. */ /* Type and value comparison. */
if (merge == IR_EQ) { /* Must match asm_guard(). */
emit_ti(as, MIPSI_LI, RID_TMP, as->snapno);
l_end = asm_exitstub_addr(as);
}
if (irt_isnum(kt)) { if (irt_isnum(kt)) {
emit_branch(as, MIPSI_BC1T, 0, 0, l_end); emit_branch(as, MIPSI_BC1T, 0, 0, l_end);
emit_fgh(as, MIPSI_C_EQ_D, 0, tmpnum, key); emit_fgh(as, MIPSI_C_EQ_D, 0, tmpnum, key);
emit_tg(as, MIPSI_MFC1, tmp1, key+1); *--as->mcp = MIPSI_NOP; /* Avoid NaN comparison overhead. */
emit_branch(as, MIPSI_BEQ, tmp1, RID_ZERO, l_next); emit_branch(as, MIPSI_BEQ, tmp2, RID_ZERO, l_next);
emit_tsi(as, MIPSI_SLTIU, tmp1, tmp1, (int32_t)LJ_TISNUM); emit_tsi(as, MIPSI_SLTIU, tmp2, tmp2, (int32_t)LJ_TISNUM);
emit_hsi(as, MIPSI_LDC1, tmpnum, dest, (int32_t)offsetof(Node, key.n)); emit_hsi(as, MIPSI_LDC1, tmpnum, dest, (int32_t)offsetof(Node, key.n));
} else { } else {
if (irt_ispri(kt)) { if (irt_ispri(kt)) {
emit_branch(as, MIPSI_BEQ, tmp1, type, l_end); emit_branch(as, MIPSI_BEQ, tmp2, type, l_end);
} else { } else {
emit_branch(as, MIPSI_BEQ, tmp2, key, l_end); emit_branch(as, MIPSI_BEQ, tmp1, key, l_end);
emit_tsi(as, MIPSI_LW, tmp2, dest, (int32_t)offsetof(Node, key.gcr)); emit_tsi(as, MIPSI_LW, tmp1, dest, (int32_t)offsetof(Node, key.gcr));
emit_branch(as, MIPSI_BNE, tmp1, type, l_next); emit_branch(as, MIPSI_BNE, tmp2, type, l_next);
} }
} }
emit_tsi(as, MIPSI_LW, tmp1, dest, (int32_t)offsetof(Node, key.it)); emit_tsi(as, MIPSI_LW, tmp2, dest, (int32_t)offsetof(Node, key.it));
*l_loop = MIPSI_BNE | MIPSF_S(tmp1) | ((as->mcp-l_loop-1) & 0xffffu); *l_loop = MIPSI_BNE | MIPSF_S(tmp2) | ((as->mcp-l_loop-1) & 0xffffu);
/* Load main position relative to tab->node into dest. */ /* Load main position relative to tab->node into dest. */
khash = irref_isk(refkey) ? ir_khash(irkey) : 1; khash = irref_isk(refkey) ? ir_khash(irkey) : 1;
@ -1694,7 +1699,14 @@ static void asm_ir(ASMState *as, IRIns *ir)
case IR_GCSTEP: asm_gcstep(as, ir); break; case IR_GCSTEP: asm_gcstep(as, ir); break;
/* Guarded assertions. */ /* Guarded assertions. */
case IR_EQ: case IR_NE: asm_compeq(as, ir); break; case IR_EQ: case IR_NE:
if ((ir-1)->o == IR_HREF && ir->op1 == as->curins-1) {
as->curins--;
asm_href(as, ir-1, (IROp)ir->o);
break;
}
asm_compeq(as, ir);
break;
case IR_LT: case IR_GE: case IR_LE: case IR_GT: case IR_LT: case IR_GE: case IR_LE: case IR_GT:
case IR_ULT: case IR_UGE: case IR_ULE: case IR_UGT: case IR_ULT: case IR_UGE: case IR_ULE: case IR_UGT:
case IR_ABC: case IR_ABC:
@ -1749,7 +1761,7 @@ static void asm_ir(ASMState *as, IRIns *ir)
/* Memory references. */ /* Memory references. */
case IR_AREF: asm_aref(as, ir); break; case IR_AREF: asm_aref(as, ir); break;
case IR_HREF: asm_href(as, ir); break; case IR_HREF: asm_href(as, ir, 0); break;
case IR_HREFK: asm_hrefk(as, ir); break; case IR_HREFK: asm_hrefk(as, ir); break;
case IR_NEWREF: asm_newref(as, ir); break; case IR_NEWREF: asm_newref(as, ir); break;
case IR_UREFO: case IR_UREFC: asm_uref(as, ir); break; case IR_UREFO: case IR_UREFC: asm_uref(as, ir); break;

View File

@ -947,23 +947,6 @@ static void asm_aref(ASMState *as, IRIns *ir)
emit_rr(as, XO_MOV, dest, as->mrm.base); emit_rr(as, XO_MOV, dest, as->mrm.base);
} }
/* Merge NE(HREF, niltv) check. */
static MCode *merge_href_niltv(ASMState *as, IRIns *ir)
{
/* Assumes nothing else generates NE of HREF. */
if ((ir[1].o == IR_NE || ir[1].o == IR_EQ) && ir[1].op1 == as->curins &&
ra_hasreg(ir->r)) {
MCode *p = as->mcp;
p += (LJ_64 && *p != XI_ARITHi) ? 7+6 : 6+6;
/* Ensure no loop branch inversion happened. */
if (p[-6] == 0x0f && p[-5] == XI_JCCn+(CC_NE^(ir[1].o & 1))) {
as->mcp = p; /* Kill cmp reg, imm32 + jz exit. */
return p + *(int32_t *)(p-4); /* Return exit address. */
}
}
return NULL;
}
/* Inlined hash lookup. Specialized for key type and for const keys. /* Inlined hash lookup. Specialized for key type and for const keys.
** The equivalent C code is: ** The equivalent C code is:
** Node *n = hashkey(t, key); ** Node *n = hashkey(t, key);
@ -972,10 +955,10 @@ static MCode *merge_href_niltv(ASMState *as, IRIns *ir)
** } while ((n = nextnode(n))); ** } while ((n = nextnode(n)));
** return niltv(L); ** return niltv(L);
*/ */
static void asm_href(ASMState *as, IRIns *ir) static void asm_href(ASMState *as, IRIns *ir, IROp merge)
{ {
MCode *nilexit = merge_href_niltv(as, ir); /* Do this before any restores. */
RegSet allow = RSET_GPR; RegSet allow = RSET_GPR;
int destused = ra_used(ir);
Reg dest = ra_dest(as, ir, allow); Reg dest = ra_dest(as, ir, allow);
Reg tab = ra_alloc1(as, ir->op1, rset_clear(allow, dest)); Reg tab = ra_alloc1(as, ir->op1, rset_clear(allow, dest));
Reg key = RID_NONE, tmp = RID_NONE; Reg key = RID_NONE, tmp = RID_NONE;
@ -992,14 +975,12 @@ static void asm_href(ASMState *as, IRIns *ir)
tmp = ra_scratch(as, rset_exclude(allow, key)); tmp = ra_scratch(as, rset_exclude(allow, key));
} }
/* Key not found in chain: jump to exit (if merged with NE) or load niltv. */ /* Key not found in chain: jump to exit (if merged) or load niltv. */
l_end = emit_label(as); l_end = emit_label(as);
if (nilexit && ir[1].o == IR_NE) { if (merge == IR_NE)
emit_jcc(as, CC_E, nilexit); /* XI_JMP is not found by lj_asm_patchexit. */ asm_guardcc(as, CC_E); /* XI_JMP is not found by lj_asm_patchexit. */
nilexit = NULL; else if (destused)
} else {
emit_loada(as, dest, niltvg(J2G(as->J))); emit_loada(as, dest, niltvg(J2G(as->J)));
}
/* Follow hash chain until the end. */ /* Follow hash chain until the end. */
l_loop = emit_sjcc_label(as, CC_NZ); l_loop = emit_sjcc_label(as, CC_NZ);
@ -1008,8 +989,8 @@ static void asm_href(ASMState *as, IRIns *ir)
l_next = emit_label(as); l_next = emit_label(as);
/* Type and value comparison. */ /* Type and value comparison. */
if (nilexit) if (merge == IR_EQ)
emit_jcc(as, CC_E, nilexit); asm_guardcc(as, CC_E);
else else
emit_sjcc(as, CC_E, l_end); emit_sjcc(as, CC_E, l_end);
if (irt_isnum(kt)) { if (irt_isnum(kt)) {
@ -2519,9 +2500,16 @@ static void asm_ir(ASMState *as, IRIns *ir)
case IR_GCSTEP: asm_gcstep(as, ir); break; case IR_GCSTEP: asm_gcstep(as, ir); break;
/* Guarded assertions. */ /* Guarded assertions. */
case IR_EQ: case IR_NE:
if ((ir-1)->o == IR_HREF && ir->op1 == as->curins-1) {
as->curins--;
asm_href(as, ir-1, (IROp)ir->o);
break;
}
/* fallthrough */
case IR_LT: case IR_GE: case IR_LE: case IR_GT: case IR_LT: case IR_GE: case IR_LE: case IR_GT:
case IR_ULT: case IR_UGE: case IR_ULE: case IR_UGT: case IR_ULT: case IR_UGE: case IR_ULE: case IR_UGT:
case IR_EQ: case IR_NE: case IR_ABC: case IR_ABC:
asm_comp(as, ir, asm_compmap[ir->o]); asm_comp(as, ir, asm_compmap[ir->o]);
break; break;
@ -2615,7 +2603,7 @@ static void asm_ir(ASMState *as, IRIns *ir)
/* Memory references. */ /* Memory references. */
case IR_AREF: asm_aref(as, ir); break; case IR_AREF: asm_aref(as, ir); break;
case IR_HREF: asm_href(as, ir); break; case IR_HREF: asm_href(as, ir, 0); break;
case IR_HREFK: asm_hrefk(as, ir); break; case IR_HREFK: asm_hrefk(as, ir); break;
case IR_NEWREF: asm_newref(as, ir); break; case IR_NEWREF: asm_newref(as, ir); break;
case IR_UREFO: case IR_UREFC: asm_uref(as, ir); break; case IR_UREFO: case IR_UREFC: asm_uref(as, ir); break;