@@ -2864,13 +2864,14 @@ static int compiler_addcompare(struct compiler *c, location loc,
2864
2864
2865
2865
2866
2866
static int
2867
- compiler_jump_if (struct compiler * c , location * ploc ,
2867
+ compiler_jump_if (struct compiler * c , location loc ,
2868
2868
expr_ty e , jump_target_label next , int cond )
2869
2869
{
2870
2870
switch (e -> kind ) {
2871
2871
case UnaryOp_kind :
2872
- if (e -> v .UnaryOp .op == Not )
2873
- return compiler_jump_if (c , ploc , e -> v .UnaryOp .operand , next , !cond );
2872
+ if (e -> v .UnaryOp .op == Not ) {
2873
+ return compiler_jump_if (c , loc , e -> v .UnaryOp .operand , next , !cond );
2874
+ }
2874
2875
/* fallback to general implementation */
2875
2876
break ;
2876
2877
case BoolOp_kind : {
@@ -2884,11 +2885,13 @@ compiler_jump_if(struct compiler *c, location *ploc,
2884
2885
next2 = new_next2 ;
2885
2886
}
2886
2887
for (i = 0 ; i < n ; ++ i ) {
2887
- if (!compiler_jump_if (c , ploc , (expr_ty )asdl_seq_GET (s , i ), next2 , cond2 ))
2888
+ if (!compiler_jump_if (c , loc , (expr_ty )asdl_seq_GET (s , i ), next2 , cond2 )) {
2888
2889
return 0 ;
2890
+ }
2889
2891
}
2890
- if (!compiler_jump_if (c , ploc , (expr_ty )asdl_seq_GET (s , n ), next , cond ))
2892
+ if (!compiler_jump_if (c , loc , (expr_ty )asdl_seq_GET (s , n ), next , cond )) {
2891
2893
return 0 ;
2894
+ }
2892
2895
if (!SAME_LABEL (next2 , next )) {
2893
2896
USE_LABEL (c , next2 );
2894
2897
}
@@ -2897,45 +2900,46 @@ compiler_jump_if(struct compiler *c, location *ploc,
2897
2900
case IfExp_kind : {
2898
2901
NEW_JUMP_TARGET_LABEL (c , end );
2899
2902
NEW_JUMP_TARGET_LABEL (c , next2 );
2900
- if (!compiler_jump_if (c , ploc , e -> v .IfExp .test , next2 , 0 ))
2903
+ if (!compiler_jump_if (c , loc , e -> v .IfExp .test , next2 , 0 )) {
2901
2904
return 0 ;
2902
- if (!compiler_jump_if (c , ploc , e -> v .IfExp .body , next , cond ))
2905
+ }
2906
+ if (!compiler_jump_if (c , loc , e -> v .IfExp .body , next , cond )) {
2903
2907
return 0 ;
2908
+ }
2904
2909
ADDOP_JUMP (c , NO_LOCATION , JUMP , end );
2905
2910
2906
2911
USE_LABEL (c , next2 );
2907
- if (!compiler_jump_if (c , ploc , e -> v .IfExp .orelse , next , cond ))
2912
+ if (!compiler_jump_if (c , loc , e -> v .IfExp .orelse , next , cond )) {
2908
2913
return 0 ;
2914
+ }
2909
2915
2910
2916
USE_LABEL (c , end );
2911
2917
return 1 ;
2912
2918
}
2913
2919
case Compare_kind : {
2914
- SET_LOC (c , e );
2915
- * ploc = LOC (e );
2916
- Py_ssize_t i , n = asdl_seq_LEN (e -> v .Compare .ops ) - 1 ;
2920
+ Py_ssize_t n = asdl_seq_LEN (e -> v .Compare .ops ) - 1 ;
2917
2921
if (n > 0 ) {
2918
2922
if (!check_compare (c , e )) {
2919
2923
return 0 ;
2920
2924
}
2921
2925
NEW_JUMP_TARGET_LABEL (c , cleanup );
2922
2926
VISIT (c , expr , e -> v .Compare .left );
2923
- for (i = 0 ; i < n ; i ++ ) {
2927
+ for (Py_ssize_t i = 0 ; i < n ; i ++ ) {
2924
2928
VISIT (c , expr ,
2925
2929
(expr_ty )asdl_seq_GET (e -> v .Compare .comparators , i ));
2926
- ADDOP_I (c , * ploc , SWAP , 2 );
2927
- ADDOP_I (c , * ploc , COPY , 2 );
2928
- ADDOP_COMPARE (c , * ploc , asdl_seq_GET (e -> v .Compare .ops , i ));
2929
- ADDOP_JUMP (c , * ploc , POP_JUMP_IF_FALSE , cleanup );
2930
+ ADDOP_I (c , LOC ( e ) , SWAP , 2 );
2931
+ ADDOP_I (c , LOC ( e ) , COPY , 2 );
2932
+ ADDOP_COMPARE (c , LOC ( e ) , asdl_seq_GET (e -> v .Compare .ops , i ));
2933
+ ADDOP_JUMP (c , LOC ( e ) , POP_JUMP_IF_FALSE , cleanup );
2930
2934
}
2931
2935
VISIT (c , expr , (expr_ty )asdl_seq_GET (e -> v .Compare .comparators , n ));
2932
- ADDOP_COMPARE (c , * ploc , asdl_seq_GET (e -> v .Compare .ops , n ));
2933
- ADDOP_JUMP (c , * ploc , cond ? POP_JUMP_IF_TRUE : POP_JUMP_IF_FALSE , next );
2936
+ ADDOP_COMPARE (c , LOC ( e ) , asdl_seq_GET (e -> v .Compare .ops , n ));
2937
+ ADDOP_JUMP (c , LOC ( e ) , cond ? POP_JUMP_IF_TRUE : POP_JUMP_IF_FALSE , next );
2934
2938
NEW_JUMP_TARGET_LABEL (c , end );
2935
2939
ADDOP_JUMP (c , NO_LOCATION , JUMP , end );
2936
2940
2937
2941
USE_LABEL (c , cleanup );
2938
- ADDOP (c , * ploc , POP_TOP );
2942
+ ADDOP (c , LOC ( e ) , POP_TOP );
2939
2943
if (!cond ) {
2940
2944
ADDOP_JUMP (c , NO_LOCATION , JUMP , next );
2941
2945
}
@@ -2964,8 +2968,7 @@ compiler_ifexp(struct compiler *c, expr_ty e)
2964
2968
NEW_JUMP_TARGET_LABEL (c , end );
2965
2969
NEW_JUMP_TARGET_LABEL (c , next );
2966
2970
2967
- location loc = LOC (e );
2968
- if (!compiler_jump_if (c , & loc , e -> v .IfExp .test , next , 0 )) {
2971
+ if (!compiler_jump_if (c , LOC (e ), e -> v .IfExp .test , next , 0 )) {
2969
2972
return 0 ;
2970
2973
}
2971
2974
VISIT (c , expr , e -> v .IfExp .body );
@@ -3050,8 +3053,7 @@ compiler_if(struct compiler *c, stmt_ty s)
3050
3053
else {
3051
3054
next = end ;
3052
3055
}
3053
- location loc = LOC (s );
3054
- if (!compiler_jump_if (c , & loc , s -> v .If .test , next , 0 )) {
3056
+ if (!compiler_jump_if (c , LOC (s ), s -> v .If .test , next , 0 )) {
3055
3057
return 0 ;
3056
3058
}
3057
3059
VISIT_SEQ (c , stmt , s -> v .If .body );
@@ -3158,25 +3160,22 @@ compiler_async_for(struct compiler *c, stmt_ty s)
3158
3160
static int
3159
3161
compiler_while (struct compiler * c , stmt_ty s )
3160
3162
{
3161
- location loc = LOC (s );
3162
3163
NEW_JUMP_TARGET_LABEL (c , loop );
3163
3164
NEW_JUMP_TARGET_LABEL (c , body );
3164
3165
NEW_JUMP_TARGET_LABEL (c , end );
3165
3166
NEW_JUMP_TARGET_LABEL (c , anchor );
3166
3167
3167
3168
USE_LABEL (c , loop );
3168
- if (!compiler_push_fblock (c , loc , WHILE_LOOP , loop , end , NULL )) {
3169
+ if (!compiler_push_fblock (c , LOC ( s ) , WHILE_LOOP , loop , end , NULL )) {
3169
3170
return 0 ;
3170
3171
}
3171
- if (!compiler_jump_if (c , & loc , s -> v .While .test , anchor , 0 )) {
3172
+ if (!compiler_jump_if (c , LOC ( s ) , s -> v .While .test , anchor , 0 )) {
3172
3173
return 0 ;
3173
3174
}
3174
3175
3175
3176
USE_LABEL (c , body );
3176
3177
VISIT_SEQ (c , stmt , s -> v .While .body );
3177
- SET_LOC (c , s );
3178
- loc = LOC (s );
3179
- if (!compiler_jump_if (c , & loc , s -> v .While .test , body , 1 )) {
3178
+ if (!compiler_jump_if (c , LOC (s ), s -> v .While .test , body , 1 )) {
3180
3179
return 0 ;
3181
3180
}
3182
3181
@@ -3977,8 +3976,7 @@ compiler_assert(struct compiler *c, stmt_ty s)
3977
3976
return 1 ;
3978
3977
}
3979
3978
NEW_JUMP_TARGET_LABEL (c , end );
3980
- location loc = LOC (s );
3981
- if (!compiler_jump_if (c , & loc , s -> v .Assert .test , end , 1 )) {
3979
+ if (!compiler_jump_if (c , LOC (s ), s -> v .Assert .test , end , 1 )) {
3982
3980
return 0 ;
3983
3981
}
3984
3982
ADDOP (c , LOC (s ), LOAD_ASSERTION_ERROR );
@@ -4008,18 +4006,13 @@ compiler_stmt_expr(struct compiler *c, location loc, expr_ty value)
4008
4006
}
4009
4007
4010
4008
VISIT (c , expr , value );
4011
- /* Mark POP_TOP as artificial */
4012
- UNSET_LOC (c );
4013
- ADDOP (c , NO_LOCATION , POP_TOP );
4009
+ ADDOP (c , NO_LOCATION , POP_TOP ); /* artificial */
4014
4010
return 1 ;
4015
4011
}
4016
4012
4017
4013
static int
4018
4014
compiler_visit_stmt (struct compiler * c , stmt_ty s )
4019
4015
{
4020
- Py_ssize_t i , n ;
4021
- /* Always assign a lineno to the next instruction for a stmt. */
4022
- SET_LOC (c , s );
4023
4016
4024
4017
switch (s -> kind ) {
4025
4018
case FunctionDef_kind :
@@ -4033,12 +4026,11 @@ compiler_visit_stmt(struct compiler *c, stmt_ty s)
4033
4026
break ;
4034
4027
case Assign_kind :
4035
4028
{
4036
- n = asdl_seq_LEN (s -> v .Assign .targets );
4029
+ Py_ssize_t n = asdl_seq_LEN (s -> v .Assign .targets );
4037
4030
VISIT (c , expr , s -> v .Assign .value );
4038
- location loc = LOC (s );
4039
- for (i = 0 ; i < n ; i ++ ) {
4031
+ for (Py_ssize_t i = 0 ; i < n ; i ++ ) {
4040
4032
if (i < n - 1 ) {
4041
- ADDOP_I (c , loc , COPY , 1 );
4033
+ ADDOP_I (c , LOC ( s ) , COPY , 1 );
4042
4034
}
4043
4035
VISIT (c , expr ,
4044
4036
(expr_ty )asdl_seq_GET (s -> v .Assign .targets , i ));
@@ -4059,7 +4051,7 @@ compiler_visit_stmt(struct compiler *c, stmt_ty s)
4059
4051
return compiler_match (c , s );
4060
4052
case Raise_kind :
4061
4053
{
4062
- n = 0 ;
4054
+ Py_ssize_t n = 0 ;
4063
4055
if (s -> v .Raise .exc ) {
4064
4056
VISIT (c , expr , s -> v .Raise .exc );
4065
4057
n ++ ;
@@ -4068,8 +4060,7 @@ compiler_visit_stmt(struct compiler *c, stmt_ty s)
4068
4060
n ++ ;
4069
4061
}
4070
4062
}
4071
- location loc = LOC (s );
4072
- ADDOP_I (c , loc , RAISE_VARARGS , (int )n );
4063
+ ADDOP_I (c , LOC (s ), RAISE_VARARGS , (int )n );
4073
4064
break ;
4074
4065
}
4075
4066
case Try_kind :
@@ -4087,24 +4078,20 @@ compiler_visit_stmt(struct compiler *c, stmt_ty s)
4087
4078
break ;
4088
4079
case Expr_kind :
4089
4080
{
4090
- location loc = LOC (s );
4091
- return compiler_stmt_expr (c , loc , s -> v .Expr .value );
4081
+ return compiler_stmt_expr (c , LOC (s ), s -> v .Expr .value );
4092
4082
}
4093
4083
case Pass_kind :
4094
4084
{
4095
- location loc = LOC (s );
4096
- ADDOP (c , loc , NOP );
4085
+ ADDOP (c , LOC (s ), NOP );
4097
4086
break ;
4098
4087
}
4099
4088
case Break_kind :
4100
4089
{
4101
- location loc = LOC (s );
4102
- return compiler_break (c , loc );
4090
+ return compiler_break (c , LOC (s ));
4103
4091
}
4104
4092
case Continue_kind :
4105
4093
{
4106
- location loc = LOC (s );
4107
- return compiler_continue (c , loc );
4094
+ return compiler_continue (c , LOC (s ));
4108
4095
}
4109
4096
case With_kind :
4110
4097
return compiler_with (c , s , 0 );
@@ -5266,7 +5253,7 @@ compiler_sync_comprehension_generator(struct compiler *c, location loc,
5266
5253
Py_ssize_t n = asdl_seq_LEN (gen -> ifs );
5267
5254
for (Py_ssize_t i = 0 ; i < n ; i ++ ) {
5268
5255
expr_ty e = (expr_ty )asdl_seq_GET (gen -> ifs , i );
5269
- if (!compiler_jump_if (c , & loc , e , if_cleanup , 0 )) {
5256
+ if (!compiler_jump_if (c , loc , e , if_cleanup , 0 )) {
5270
5257
return 0 ;
5271
5258
}
5272
5259
}
@@ -5365,7 +5352,7 @@ compiler_async_comprehension_generator(struct compiler *c, location loc,
5365
5352
Py_ssize_t n = asdl_seq_LEN (gen -> ifs );
5366
5353
for (Py_ssize_t i = 0 ; i < n ; i ++ ) {
5367
5354
expr_ty e = (expr_ty )asdl_seq_GET (gen -> ifs , i );
5368
- if (!compiler_jump_if (c , & loc , e , if_cleanup , 0 )) {
5355
+ if (!compiler_jump_if (c , loc , e , if_cleanup , 0 )) {
5369
5356
return 0 ;
5370
5357
}
5371
5358
}
@@ -7100,7 +7087,7 @@ compiler_match_inner(struct compiler *c, stmt_ty s, pattern_context *pc)
7100
7087
// NOTE: Returning macros are safe again.
7101
7088
if (m -> guard ) {
7102
7089
RETURN_IF_FALSE (ensure_fail_pop (c , pc , 0 ));
7103
- RETURN_IF_FALSE (compiler_jump_if (c , & loc , m -> guard , pc -> fail_pop [0 ], 0 ));
7090
+ RETURN_IF_FALSE (compiler_jump_if (c , loc , m -> guard , pc -> fail_pop [0 ], 0 ));
7104
7091
}
7105
7092
// Success! Pop the subject off, we're done with it:
7106
7093
if (i != cases - has_default - 1 ) {
@@ -7129,7 +7116,7 @@ compiler_match_inner(struct compiler *c, stmt_ty s, pattern_context *pc)
7129
7116
ADDOP (c , loc , NOP );
7130
7117
}
7131
7118
if (m -> guard ) {
7132
- RETURN_IF_FALSE (compiler_jump_if (c , & loc , m -> guard , end , 0 ));
7119
+ RETURN_IF_FALSE (compiler_jump_if (c , loc , m -> guard , end , 0 ));
7133
7120
}
7134
7121
VISIT_SEQ (c , stmt , m -> body );
7135
7122
}
0 commit comments