@@ -394,7 +394,7 @@ _PyUop_Replacements[OPCODE_METADATA_SIZE] = {
394
394
};
395
395
396
396
static const uint16_t
397
- BRANCH_TO_GUARDS [4 ][2 ] = {
397
+ BRANCH_TO_GUARD [4 ][2 ] = {
398
398
[POP_JUMP_IF_FALSE - POP_JUMP_IF_FALSE ][0 ] = _GUARD_IS_TRUE_POP ,
399
399
[POP_JUMP_IF_FALSE - POP_JUMP_IF_FALSE ][1 ] = _GUARD_IS_FALSE_POP ,
400
400
[POP_JUMP_IF_TRUE - POP_JUMP_IF_FALSE ][0 ] = _GUARD_IS_FALSE_POP ,
@@ -545,7 +545,7 @@ translate_bytecode_to_trace(
545
545
int counter = instr [1 ].cache ;
546
546
int bitcount = _Py_popcount32 (counter );
547
547
int jump_likely = bitcount > 8 ;
548
- uint32_t uopcode = BRANCH_TO_GUARDS [opcode - POP_JUMP_IF_FALSE ][jump_likely ];
548
+ uint32_t uopcode = BRANCH_TO_GUARD [opcode - POP_JUMP_IF_FALSE ][jump_likely ];
549
549
_Py_CODEUNIT * next_instr = instr + 1 + _PyOpcode_Caches [_PyOpcode_Deopt [opcode ]];
550
550
DPRINTF (4 , "%s(%d): counter=%x, bitcount=%d, likely=%d, uopcode=%s\n" ,
551
551
uop_name (opcode ), oparg ,
@@ -762,16 +762,6 @@ translate_bytecode_to_trace(
762
762
#define SET_BIT (array , bit ) (array[(bit)>>5] |= (1<<((bit)&31)))
763
763
#define BIT_IS_SET (array , bit ) (array[(bit)>>5] & (1<<((bit)&31)))
764
764
765
- static bool
766
- is_branch (int opcode ) {
767
- /* Currently there are no jumps in the buffer,
768
- * but we expect the optimizer to add them
769
- * in the future. */
770
- assert (opcode != _POP_JUMP_IF_FALSE &&
771
- opcode != _POP_JUMP_IF_TRUE );
772
- return false;
773
- }
774
-
775
765
/* Count the number of used uops, and mark them in the bit vector `used`.
776
766
* This can be done in a single pass using simple reachability analysis,
777
767
* as there are no backward jumps.
0 commit comments