mirror of
https://github.com/LuaJIT/LuaJIT.git
synced 2025-03-13 05:55:18 +00:00
Improve FOLD/CSE of field loads and array/hash refs across NEWREF.
This commit is contained in:
parent
23b5c56d41
commit
38628d93b8
@ -111,6 +111,7 @@ LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_uload(jit_State *J);
|
|||||||
LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_fload(jit_State *J);
|
LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_fload(jit_State *J);
|
||||||
LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_tab_len(jit_State *J);
|
LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_tab_len(jit_State *J);
|
||||||
LJ_FUNC int LJ_FASTCALL lj_opt_fwd_href_nokey(jit_State *J);
|
LJ_FUNC int LJ_FASTCALL lj_opt_fwd_href_nokey(jit_State *J);
|
||||||
|
LJ_FUNC int LJ_FASTCALL lj_opt_fwd_tptr(jit_State *J, IRRef lim);
|
||||||
LJ_FUNC int lj_opt_fwd_wasnonnil(jit_State *J, IROpT loadop, IRRef xref);
|
LJ_FUNC int lj_opt_fwd_wasnonnil(jit_State *J, IROpT loadop, IRRef xref);
|
||||||
|
|
||||||
/* Dead-store elimination. */
|
/* Dead-store elimination. */
|
||||||
|
@ -1233,23 +1233,15 @@ LJFOLDF(fwd_href_tdup)
|
|||||||
}
|
}
|
||||||
|
|
||||||
/* We can safely FOLD/CSE array/hash refs and field loads, since there
|
/* We can safely FOLD/CSE array/hash refs and field loads, since there
|
||||||
** are no corresponding stores. But NEWREF may invalidate all of them.
|
** are no corresponding stores. But we need to check for any NEWREF with
|
||||||
** Lacking better disambiguation for table references, these optimizations
|
** an aliased table, as it may invalidate all of the pointers and fields.
|
||||||
** are simply disabled across any NEWREF.
|
|
||||||
** Only HREF needs the NEWREF check -- AREF and HREFK already depend on
|
** Only HREF needs the NEWREF check -- AREF and HREFK already depend on
|
||||||
** FLOADs. And NEWREF itself is treated like a store (see below).
|
** FLOADs. And NEWREF itself is treated like a store (see below).
|
||||||
*/
|
*/
|
||||||
LJFOLD(HREF any any)
|
|
||||||
LJFOLDF(cse_href)
|
|
||||||
{
|
|
||||||
TRef tr = lj_opt_cse(J);
|
|
||||||
return tref_ref(tr) < J->chain[IR_NEWREF] ? EMITFOLD : tr;
|
|
||||||
}
|
|
||||||
|
|
||||||
LJFOLD(FLOAD TNEW IRFL_TAB_ASIZE)
|
LJFOLD(FLOAD TNEW IRFL_TAB_ASIZE)
|
||||||
LJFOLDF(fload_tab_tnew_asize)
|
LJFOLDF(fload_tab_tnew_asize)
|
||||||
{
|
{
|
||||||
if (LJ_LIKELY(J->flags & JIT_F_OPT_FOLD) && fins->op1 > J->chain[IR_NEWREF])
|
if (LJ_LIKELY(J->flags & JIT_F_OPT_FOLD) && lj_opt_fwd_tptr(J, fins->op1))
|
||||||
return INTFOLD(fleft->op1);
|
return INTFOLD(fleft->op1);
|
||||||
return NEXTFOLD;
|
return NEXTFOLD;
|
||||||
}
|
}
|
||||||
@ -1257,7 +1249,7 @@ LJFOLDF(fload_tab_tnew_asize)
|
|||||||
LJFOLD(FLOAD TNEW IRFL_TAB_HMASK)
|
LJFOLD(FLOAD TNEW IRFL_TAB_HMASK)
|
||||||
LJFOLDF(fload_tab_tnew_hmask)
|
LJFOLDF(fload_tab_tnew_hmask)
|
||||||
{
|
{
|
||||||
if (LJ_LIKELY(J->flags & JIT_F_OPT_FOLD) && fins->op1 > J->chain[IR_NEWREF])
|
if (LJ_LIKELY(J->flags & JIT_F_OPT_FOLD) && lj_opt_fwd_tptr(J, fins->op1))
|
||||||
return INTFOLD((1 << fleft->op2)-1);
|
return INTFOLD((1 << fleft->op2)-1);
|
||||||
return NEXTFOLD;
|
return NEXTFOLD;
|
||||||
}
|
}
|
||||||
@ -1265,7 +1257,7 @@ LJFOLDF(fload_tab_tnew_hmask)
|
|||||||
LJFOLD(FLOAD TDUP IRFL_TAB_ASIZE)
|
LJFOLD(FLOAD TDUP IRFL_TAB_ASIZE)
|
||||||
LJFOLDF(fload_tab_tdup_asize)
|
LJFOLDF(fload_tab_tdup_asize)
|
||||||
{
|
{
|
||||||
if (LJ_LIKELY(J->flags & JIT_F_OPT_FOLD) && fins->op1 > J->chain[IR_NEWREF])
|
if (LJ_LIKELY(J->flags & JIT_F_OPT_FOLD) && lj_opt_fwd_tptr(J, fins->op1))
|
||||||
return INTFOLD((int32_t)ir_ktab(IR(fleft->op1))->asize);
|
return INTFOLD((int32_t)ir_ktab(IR(fleft->op1))->asize);
|
||||||
return NEXTFOLD;
|
return NEXTFOLD;
|
||||||
}
|
}
|
||||||
@ -1273,11 +1265,12 @@ LJFOLDF(fload_tab_tdup_asize)
|
|||||||
LJFOLD(FLOAD TDUP IRFL_TAB_HMASK)
|
LJFOLD(FLOAD TDUP IRFL_TAB_HMASK)
|
||||||
LJFOLDF(fload_tab_tdup_hmask)
|
LJFOLDF(fload_tab_tdup_hmask)
|
||||||
{
|
{
|
||||||
if (LJ_LIKELY(J->flags & JIT_F_OPT_FOLD) && fins->op1 > J->chain[IR_NEWREF])
|
if (LJ_LIKELY(J->flags & JIT_F_OPT_FOLD) && lj_opt_fwd_tptr(J, fins->op1))
|
||||||
return INTFOLD((int32_t)ir_ktab(IR(fleft->op1))->hmask);
|
return INTFOLD((int32_t)ir_ktab(IR(fleft->op1))->hmask);
|
||||||
return NEXTFOLD;
|
return NEXTFOLD;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
LJFOLD(HREF any any)
|
||||||
LJFOLD(FLOAD any IRFL_TAB_ARRAY)
|
LJFOLD(FLOAD any IRFL_TAB_ARRAY)
|
||||||
LJFOLD(FLOAD any IRFL_TAB_NODE)
|
LJFOLD(FLOAD any IRFL_TAB_NODE)
|
||||||
LJFOLD(FLOAD any IRFL_TAB_ASIZE)
|
LJFOLD(FLOAD any IRFL_TAB_ASIZE)
|
||||||
@ -1285,7 +1278,7 @@ LJFOLD(FLOAD any IRFL_TAB_HMASK)
|
|||||||
LJFOLDF(fload_tab_ah)
|
LJFOLDF(fload_tab_ah)
|
||||||
{
|
{
|
||||||
TRef tr = lj_opt_cse(J);
|
TRef tr = lj_opt_cse(J);
|
||||||
return tref_ref(tr) < J->chain[IR_NEWREF] ? EMITFOLD : tr;
|
return lj_opt_fwd_tptr(J, tref_ref(tr)) ? tr : EMITFOLD;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Strings are immutable, so we can safely FOLD/CSE the related FLOAD. */
|
/* Strings are immutable, so we can safely FOLD/CSE the related FLOAD. */
|
||||||
|
@ -277,6 +277,20 @@ int LJ_FASTCALL lj_opt_fwd_href_nokey(jit_State *J)
|
|||||||
return 1; /* No conflict. Can fold to niltv. */
|
return 1; /* No conflict. Can fold to niltv. */
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Check whether there's no aliasing NEWREF for the left operand. */
|
||||||
|
int LJ_FASTCALL lj_opt_fwd_tptr(jit_State *J, IRRef lim)
|
||||||
|
{
|
||||||
|
IRRef ta = fins->op1;
|
||||||
|
IRRef ref = J->chain[IR_NEWREF];
|
||||||
|
while (ref > lim) {
|
||||||
|
IRIns *newref = IR(ref);
|
||||||
|
if (ta == newref->op1 || aa_table(J, ta, newref->op1) != ALIAS_NO)
|
||||||
|
return 0; /* Conflict. */
|
||||||
|
ref = newref->prev;
|
||||||
|
}
|
||||||
|
return 1; /* No conflict. Can safely FOLD/CSE. */
|
||||||
|
}
|
||||||
|
|
||||||
/* ASTORE/HSTORE elimination. */
|
/* ASTORE/HSTORE elimination. */
|
||||||
TRef LJ_FASTCALL lj_opt_dse_ahstore(jit_State *J)
|
TRef LJ_FASTCALL lj_opt_dse_ahstore(jit_State *J)
|
||||||
{
|
{
|
||||||
|
Loading…
Reference in New Issue
Block a user