@@ -1515,18 +1515,20 @@ fold_tuple_of_constants(basicblock *bb, int i, PyObject *consts,
15151515 ...
15161516 LOAD_CONST cN
15171517 LIST_APPEND/SET_ADD 1
1518- [CALL_INTRINSIC_1 INTRINSIC_LIST_TO_TUPLE] <-- when intrinsic_at_i is true
1518+ [CALL_INTRINSIC_1 INTRINSIC_LIST_TO_TUPLE] <-- when expected_append is true
15191519 with:
15201520 LOAD_CONST (c1, c2, ... cN)
1521- When intrinsic_at_i is true, the instruction at `i` is the LIST_TO_TUPLE
1522- intrinsic and only the BUILD_LIST/LIST_APPEND form is expected. Otherwise
1523- the instruction at `i` is the trailing LIST_APPEND or SET_ADD itself, and
1524- the matching BUILD_LIST/BUILD_SET start is selected from it; for sets the
1525- result is wrapped in a frozenset.
1521+ When expected_append is true, the instruction at `i` is the LIST_TO_TUPLE
1522+ intrinsic (so the immediately preceding non-NOP instruction is expected
1523+ to be a LIST_APPEND), and only the BUILD_LIST/LIST_APPEND form is
1524+ considered. When expected_append is false, the instruction at `i` is the
1525+ trailing LIST_APPEND or SET_ADD itself, the matching BUILD_LIST/BUILD_SET
1526+ start is selected from its opcode, and for sets the result is wrapped in
1527+ a frozenset.
15261528*/
15271529static int
15281530fold_constant_seq_into_load_const (basicblock * bb , int i ,
1529- bool intrinsic_at_i ,
1531+ bool expected_append ,
15301532 PyObject * consts , PyObject * const_cache ,
15311533 _Py_hashtable_t * consts_index )
15321534{
@@ -1536,15 +1538,15 @@ fold_constant_seq_into_load_const(basicblock *bb, int i,
15361538 assert (i < bb -> b_iused );
15371539
15381540 cfg_instr * target = & bb -> b_instr [i ];
1539- int append_op = intrinsic_at_i ? LIST_APPEND : target -> i_opcode ;
1541+ int append_op = expected_append ? LIST_APPEND : target -> i_opcode ;
15401542 assert (append_op == LIST_APPEND || append_op == SET_ADD );
15411543 int build_op = append_op == LIST_APPEND ? BUILD_LIST : BUILD_SET ;
15421544 int consts_found = 0 ;
15431545 /* Walking backward from `i`, we expect LIST_APPEND/SET_ADD and
15441546 LOAD_CONST to alternate. If `i` is the trailing LIST_TO_TUPLE
15451547 intrinsic, the next instruction back is an APPEND. If `i` is the
15461548 trailing APPEND itself, the next instruction back is a LOAD_CONST. */
1547- bool expect_append = intrinsic_at_i ;
1549+ bool expect_append = expected_append ;
15481550
15491551 for (int pos = i - 1 ; pos >= 0 ; pos -- ) {
15501552 cfg_instr * instr = & bb -> b_instr [pos ];
@@ -1567,7 +1569,7 @@ fold_constant_seq_into_load_const(basicblock *bb, int i,
15671569 return ERROR ;
15681570 }
15691571
1570- int newpos_start = intrinsic_at_i ? i - 1 : i ;
1572+ int newpos_start = expected_append ? i - 1 : i ;
15711573 for (int newpos = newpos_start ; newpos >= pos ; newpos -- ) {
15721574 instr = & bb -> b_instr [newpos ];
15731575 if (instr -> i_opcode == NOP ) {
@@ -2537,13 +2539,12 @@ optimize_basic_block(PyObject *const_cache, basicblock *bb, PyObject *consts,
25372539 break ;
25382540 case CALL_INTRINSIC_1 :
25392541 if (oparg == INTRINSIC_LIST_TO_TUPLE ) {
2540- RETURN_IF_ERROR (fold_constant_intrinsic_list_to_tuple (bb , i , consts , const_cache , consts_index ));
2541- /* If folding didn't apply, the list-to-tuple conversion
2542- is unnecessary before GET_ITER since iterating a list
2543- and iterating a tuple are equivalent. */
2544- if (inst -> i_opcode == CALL_INTRINSIC_1 && nextop == GET_ITER ) {
2542+ if (nextop == GET_ITER ) {
25452543 INSTR_SET_OP0 (inst , NOP );
25462544 }
2545+ else {
2546+ RETURN_IF_ERROR (fold_constant_intrinsic_list_to_tuple (bb , i , consts , const_cache , consts_index ));
2547+ }
25472548 }
25482549 else if (oparg == INTRINSIC_UNARY_POSITIVE ) {
25492550 RETURN_IF_ERROR (fold_const_unaryop (bb , i , consts , const_cache , consts_index ));
0 commit comments