21
21
22
22
#if defined(IR_TARGET_X86)
23
23
# define IR_REG_SP 4 /* IR_REG_RSP */
24
+ # define IR_REG_FP 5 /* IR_REG_RBP */
24
25
# define ZREG_FP 6 /* IR_REG_RSI */
25
26
# define ZREG_IP 7 /* IR_REG_RDI */
26
27
# define ZREG_FIRST_FPR 8
27
28
# define IR_REGSET_PRESERVED ((1<<3) | (1<<5) | (1<<6) | (1<<7)) /* all preserved registers */
28
29
#elif defined(IR_TARGET_X64)
29
30
# define IR_REG_SP 4 /* IR_REG_RSP */
31
+ # define IR_REG_FP 5 /* IR_REG_RBP */
30
32
# define ZREG_FP 14 /* IR_REG_R14 */
31
33
# define ZREG_IP 15 /* IR_REG_R15 */
32
34
# define ZREG_FIRST_FPR 16
42
44
# endif
43
45
#elif defined(IR_TARGET_AARCH64)
44
46
# define IR_REG_SP 31 /* IR_REG_RSP */
47
+ # define IR_REG_FP 29 /* IR_REG_X29 */
45
48
# define ZREG_FP 27 /* IR_REG_X27 */
46
49
# define ZREG_IP 28 /* IR_REG_X28 */
47
50
# define ZREG_FIRST_FPR 32
@@ -676,9 +679,17 @@ void *zend_jit_snapshot_handler(ir_ctx *ctx, ir_ref snapshot_ref, ir_insn *snaps
676
679
677
680
if (ref > 0) {
678
681
if (reg != ZREG_NONE) {
679
- t->stack_map[t->exit_info[exit_point].stack_offset + var].reg = IR_REG_NUM(reg);
680
682
if (reg & IR_REG_SPILL_LOAD) {
681
- t->stack_map[t->exit_info[exit_point].stack_offset + var].flags |= ZREG_LOAD;
683
+ ZEND_ASSERT(!(reg & IR_REG_SPILL_SPECIAL));
684
+ /* spill slot on a CPU stack */
685
+ t->stack_map[t->exit_info[exit_point].stack_offset + var].ref = ref;
686
+ t->stack_map[t->exit_info[exit_point].stack_offset + var].reg = ZREG_NONE;
687
+ t->stack_map[t->exit_info[exit_point].stack_offset + var].flags |= ZREG_SPILL_SLOT;
688
+ } else if (reg & IR_REG_SPILL_SPECIAL) {
689
+ /* spill slot on a VM stack */
690
+ t->stack_map[t->exit_info[exit_point].stack_offset + var].flags = ZREG_TYPE_ONLY;
691
+ } else {
692
+ t->stack_map[t->exit_info[exit_point].stack_offset + var].reg = IR_REG_NUM(reg);
682
693
}
683
694
} else {
684
695
t->stack_map[t->exit_info[exit_point].stack_offset + var].flags = ZREG_TYPE_ONLY;
@@ -1149,6 +1160,29 @@ static ir_ref jit_Z_DVAL_ref(zend_jit_ctx *jit, ir_ref ref)
1149
1160
return ir_LOAD_D(ref);
1150
1161
}
1151
1162
1163
+ static bool zend_jit_spilling_may_cause_conflict(zend_jit_ctx *jit, int var, ir_ref val)
1164
+ {
1165
+ // if (jit->ctx.ir_base[val].op == IR_RLOAD) {
1166
+ // /* Deoptimization */
1167
+ // return 0;
1168
+ // }
1169
+ // if (jit->ctx.ir_base[val].op == IR_LOAD
1170
+ // && jit->ctx.ir_base[jit->ctx.ir_base[val].op2].op == IR_ADD
1171
+ // && jit->ctx.ir_base[jit->ctx.ir_base[jit->ctx.ir_base[val].op2].op1].op == IR_RLOAD
1172
+ // && jit->ctx.ir_base[jit->ctx.ir_base[jit->ctx.ir_base[val].op2].op1].op2 == ZREG_FP
1173
+ // && IR_IS_CONST_REF(jit->ctx.ir_base[jit->ctx.ir_base[val].op2].op2)
1174
+ // && jit->ctx.ir_base[jit->ctx.ir_base[jit->ctx.ir_base[val].op2].op2].val.addr == (uintptr_t)EX_NUM_TO_VAR(jit->ssa->vars[var].var)) {
1175
+ // /* LOAD from the same location (the LOAD is pinned) */
1176
+ // // TODO: should be anti-dependent with the following stores ???
1177
+ // return 0;
1178
+ // }
1179
+ // if (jit->ssa->vars[var].var < jit->current_op_array->last_var) {
1180
+ // /* IS_CV */
1181
+ // return 0;
1182
+ // }
1183
+ return 1;
1184
+ }
1185
+
1152
1186
static void zend_jit_def_reg(zend_jit_ctx *jit, zend_jit_addr addr, ir_ref val)
1153
1187
{
1154
1188
int var;
@@ -1161,46 +1195,8 @@ static void zend_jit_def_reg(zend_jit_ctx *jit, zend_jit_addr addr, ir_ref val)
1161
1195
}
1162
1196
ZEND_ASSERT(jit->ra && jit->ra[var].ref == IR_NULL);
1163
1197
1164
- /* Disable CSE for temporary variables */
1165
- /* TODO: This is a workarounf to fix ext/standard/tests/strings/htmlentities20.phpt failure with tracing JIT ??? */
1166
- if (0 && val > 0 && jit->ssa->vars[var].var >= jit->current_op_array->last_var) {
1167
- ir_insn *insn = &jit->ctx.ir_base[val];
1168
- ir_op op = insn->op;
1169
-
1170
- if (op <= IR_LAST_FOLDABLE_OP && jit->ctx.prev_insn_chain[op]) {
1171
- if (jit->ctx.prev_insn_chain[op] == val) {
1172
- if (insn->prev_insn_offset) {
1173
- jit->ctx.prev_insn_chain[op] = val - (ir_ref)(uint32_t)insn->prev_insn_offset;
1174
- } else {
1175
- jit->ctx.prev_insn_chain[op] = IR_UNUSED;
1176
- }
1177
- } else {
1178
- ir_ref prev = jit->ctx.prev_insn_chain[op];
1179
- ir_ref tmp;
1180
-
1181
- while (prev) {
1182
- insn = &jit->ctx.ir_base[prev];
1183
- if (!insn->prev_insn_offset) {
1184
- break;
1185
- }
1186
- tmp = prev - (ir_ref)(uint32_t)insn->prev_insn_offset;
1187
- if (tmp == val) {
1188
- ir_ref offset = jit->ctx.ir_base[tmp].prev_insn_offset;
1189
-
1190
- if (!offset || prev - tmp + offset > 0xffff) {
1191
- insn->prev_insn_offset = 0;
1192
- } else {
1193
- insn->prev_insn_offset = prev - tmp + offset;
1194
- }
1195
- }
1196
- prev = tmp;
1197
- }
1198
- }
1199
- }
1200
- }
1201
-
1202
1198
/* Negative "var" has special meaning for IR */
1203
- if (val > 0 && jit->ssa->vars[ var].var < jit->current_op_array->last_var ) {
1199
+ if (val > 0 && !zend_jit_spilling_may_cause_conflict( jit, var, val) ) {
1204
1200
val = ir_bind(&jit->ctx, -EX_NUM_TO_VAR(jit->ssa->vars[var].var), val);
1205
1201
}
1206
1202
jit->ra[var].ref = val;
@@ -2623,7 +2619,7 @@ static void zend_jit_init_ctx(zend_jit_ctx *jit, uint32_t flags)
2623
2619
jit->ctx.fixed_call_stack_size = 16;
2624
2620
#endif
2625
2621
#if defined(IR_TARGET_X86) || defined(IR_TARGET_X64)
2626
- jit->ctx.fixed_regset |= (1<<5 ); /* prevent %rbp (%r5) usage */
2622
+ jit->ctx.fixed_regset |= (1<<IR_REG_FP ); /* prevent %rbp (%r5) usage */
2627
2623
#endif
2628
2624
}
2629
2625
}
@@ -4148,6 +4144,43 @@ static int zend_jit_store_reg(zend_jit_ctx *jit, uint32_t info, int var, int8_t
4148
4144
return 1;
4149
4145
}
4150
4146
4147
+ static int zend_jit_store_spill_slot(zend_jit_ctx *jit, uint32_t info, int var, int8_t reg, int32_t offset, bool set_type)
4148
+ {
4149
+ zend_jit_addr src;
4150
+ zend_jit_addr dst = ZEND_ADDR_MEM_ZVAL(ZREG_FP, EX_NUM_TO_VAR(var));
4151
+
4152
+ if ((info & MAY_BE_ANY) == MAY_BE_LONG) {
4153
+ src = ir_LOAD_L(ir_ADD_OFFSET(ir_RLOAD_A(reg), offset));
4154
+ if (jit->ra && jit->ra[var].ref == IR_NULL) {
4155
+ zend_jit_def_reg(jit, ZEND_ADDR_REG(var), src);
4156
+ } else {
4157
+ jit_set_Z_LVAL(jit, dst, src);
4158
+ if (set_type &&
4159
+ (Z_REG(dst) != ZREG_FP ||
4160
+ !JIT_G(current_frame) ||
4161
+ STACK_MEM_TYPE(JIT_G(current_frame)->stack, EX_VAR_TO_NUM(Z_OFFSET(dst))) != IS_LONG)) {
4162
+ jit_set_Z_TYPE_INFO(jit, dst, IS_LONG);
4163
+ }
4164
+ }
4165
+ } else if ((info & MAY_BE_ANY) == MAY_BE_DOUBLE) {
4166
+ src = ir_LOAD_D(ir_ADD_OFFSET(ir_RLOAD_A(reg), offset));
4167
+ if (jit->ra && jit->ra[var].ref == IR_NULL) {
4168
+ zend_jit_def_reg(jit, ZEND_ADDR_REG(var), src);
4169
+ } else {
4170
+ jit_set_Z_DVAL(jit, dst, src);
4171
+ if (set_type &&
4172
+ (Z_REG(dst) != ZREG_FP ||
4173
+ !JIT_G(current_frame) ||
4174
+ STACK_MEM_TYPE(JIT_G(current_frame)->stack, EX_VAR_TO_NUM(Z_OFFSET(dst))) != IS_DOUBLE)) {
4175
+ jit_set_Z_TYPE_INFO(jit, dst, IS_DOUBLE);
4176
+ }
4177
+ }
4178
+ } else {
4179
+ ZEND_UNREACHABLE();
4180
+ }
4181
+ return 1;
4182
+ }
4183
+
4151
4184
static int zend_jit_store_var_type(zend_jit_ctx *jit, int var, uint32_t type)
4152
4185
{
4153
4186
zend_jit_addr dst = ZEND_ADDR_MEM_ZVAL(ZREG_FP, EX_NUM_TO_VAR(var));
@@ -6169,6 +6202,9 @@ static int zend_jit_assign_to_variable(zend_jit_ctx *jit,
6169
6202
phi = ir_PHI_N(res_inputs->count, res_inputs->refs);
6170
6203
}
6171
6204
if (Z_MODE(var_addr) == IS_REG) {
6205
+ if ((var_info & (MAY_BE_REF|MAY_BE_STRING|MAY_BE_ARRAY|MAY_BE_OBJECT|MAY_BE_RESOURCE)) || ref_addr) {
6206
+ phi = ir_emit2(&jit->ctx, IR_OPT(IR_COPY, jit->ctx.ir_base[phi].type), phi, 1);
6207
+ }
6172
6208
zend_jit_def_reg(jit, var_addr, phi);
6173
6209
if (real_res_addr) {
6174
6210
if (var_def_info & MAY_BE_LONG) {
@@ -15547,6 +15583,20 @@ static void *zend_jit_finish(zend_jit_ctx *jit)
15547
15583
}
15548
15584
} else {
15549
15585
/* Only for tracing JIT */
15586
+ zend_jit_trace_info *t = jit->trace;
15587
+ zend_jit_trace_stack *stack;
15588
+ uint32_t i;
15589
+
15590
+ if (t) {
15591
+ for (i = 0; i < t->stack_map_size; i++) {
15592
+ stack = t->stack_map + i;
15593
+ if (stack->flags & ZREG_SPILL_SLOT) {
15594
+ stack->reg = (jit->ctx.flags & IR_USE_FRAME_POINTER) ? IR_REG_FP : IR_REG_SP;
15595
+ stack->ref = ir_get_spill_slot_offset(&jit->ctx, stack->ref);
15596
+ }
15597
+ }
15598
+ }
15599
+
15550
15600
zend_jit_trace_add_code(entry, size);
15551
15601
15552
15602
#if ZEND_JIT_SUPPORT_CLDEMOTE
@@ -16023,12 +16073,12 @@ static int zend_jit_trace_start(zend_jit_ctx *jit,
16023
16073
16024
16074
if (STACK_FLAGS(parent_stack, i) & (ZREG_LOAD|ZREG_STORE)) {
16025
16075
/* op3 is used as a flag that the value is already stored in memory.
16026
- * In case the IR framework desides to spill the result of IR_LOAD,
16076
+ * In case the IR framework decides to spill the result of IR_LOAD,
16027
16077
* it doesn't have to store the value once again.
16028
16078
*
16029
16079
* See: insn->op3 check in ir_emit_rload()
16030
16080
*/
16031
- ir_set_op(&jit->ctx, ref, 3, 1 );
16081
+ ir_set_op(&jit->ctx, ref, 3, EX_NUM_TO_VAR(i) );
16032
16082
}
16033
16083
}
16034
16084
}
0 commit comments