@@ -83,8 +83,14 @@ static PyObject *value, *value1, *value2, *left, *right, *res, *sum, *prod, *sub
83
83
static PyObject * container , * start , * stop , * v , * lhs , * rhs ;
84
84
static PyObject * list , * tuple , * dict ;
85
85
static PyObject * exit_func , * lasti , * val ;
86
+ static PyObject * jump ;
87
+ // Dummy variables for stack effects
88
+ static int when_to_jump_mask ;
89
+ // Dummy opcode names for 'op' opcodes
86
90
#define _BINARY_OP_INPLACE_ADD_UNICODE_PART_1 1
87
91
#define _BINARY_OP_INPLACE_ADD_UNICODE_PART_2 2
92
+ #define _COMPARE_OP_FLOAT 3
93
+ #define _JUMP_ON_SIGN 4
88
94
89
95
static PyObject *
90
96
dummy_func (
@@ -2054,12 +2060,12 @@ dummy_func(
2054
2060
JUMPBY (INLINE_CACHE_ENTRIES_STORE_ATTR );
2055
2061
}
2056
2062
2057
- // family(compare_op) = {
2058
- // COMPARE_OP,
2059
- // COMPARE_OP_FLOAT_JUMP ,
2060
- // COMPARE_OP_INT_JUMP,
2061
- // COMPARE_OP_STR_JUMP,
2062
- // };
2063
+ family (compare_op ) = {
2064
+ COMPARE_OP ,
2065
+ _COMPARE_OP_FLOAT ,
2066
+ // COMPARE_OP_INT_JUMP,
2067
+ // COMPARE_OP_STR_JUMP,
2068
+ };
2063
2069
2064
2070
inst (COMPARE_OP , (unused /1 , left , right , unused /1 -- res )) {
2065
2071
_PyCompareOpCache * cache = (_PyCompareOpCache * )next_instr ;
@@ -2078,36 +2084,32 @@ dummy_func(
2078
2084
ERROR_IF (res == NULL , error );
2079
2085
}
2080
2086
2081
- // stack effect: (__0 -- )
2082
- inst ( COMPARE_OP_FLOAT_JUMP ) {
2087
+ // The result is an int disguised as an object pointer.
2088
+ op ( _COMPARE_OP_FLOAT , ( unused / 1 , left , right , when_to_jump_mask / 1 -- jump ) ) {
2083
2089
assert (cframe .use_tracing == 0 );
2084
2090
// Combined: COMPARE_OP (float ? float) + POP_JUMP_IF_(true/false)
2085
- _PyCompareOpCache * cache = (_PyCompareOpCache * )next_instr ;
2086
- int when_to_jump_mask = cache -> mask ;
2087
- PyObject * right = TOP ();
2088
- PyObject * left = SECOND ();
2089
2091
DEOPT_IF (!PyFloat_CheckExact (left ), COMPARE_OP );
2090
2092
DEOPT_IF (!PyFloat_CheckExact (right ), COMPARE_OP );
2091
2093
double dleft = PyFloat_AS_DOUBLE (left );
2092
2094
double dright = PyFloat_AS_DOUBLE (right );
2093
- int sign = (dleft > dright ) - (dleft < dright );
2095
+ // 1 if <, 2 if ==, 4 if >; this matches when _to_jump_mask
2096
+ int sign_ish = 2 * (dleft > dright ) + 2 - (dleft < dright );
2094
2097
DEOPT_IF (isnan (dleft ), COMPARE_OP );
2095
2098
DEOPT_IF (isnan (dright ), COMPARE_OP );
2096
2099
STAT_INC (COMPARE_OP , hit );
2097
- JUMPBY (INLINE_CACHE_ENTRIES_COMPARE_OP );
2098
- NEXTOPARG ();
2099
- STACK_SHRINK (2 );
2100
2100
_Py_DECREF_SPECIALIZED (left , _PyFloat_ExactDealloc );
2101
2101
_Py_DECREF_SPECIALIZED (right , _PyFloat_ExactDealloc );
2102
+ jump = (PyObject * )(size_t )(sign_ish & when_to_jump_mask );
2103
+ }
2104
+ // The input is an int disguised as an object pointer!
2105
+ op (_JUMP_ON_SIGN , (jump -- )) {
2102
2106
assert (opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE );
2103
- int jump = (1 << (sign + 1 )) & when_to_jump_mask ;
2104
- if (!jump ) {
2105
- next_instr ++ ;
2106
- }
2107
- else {
2108
- JUMPBY (1 + oparg );
2107
+ if (jump ) {
2108
+ JUMPBY (oparg );
2109
2109
}
2110
2110
}
2111
+ // We're praying that the compiler optimizes the flags manipuations.
2112
+ super (COMPARE_OP_FLOAT_JUMP ) = _COMPARE_OP_FLOAT + _JUMP_ON_SIGN ;
2111
2113
2112
2114
// stack effect: (__0 -- )
2113
2115
inst (COMPARE_OP_INT_JUMP ) {
0 commit comments