diff mbox series

[06/46] tcg/optimize: Use fold_masks_zsa in fold_and

Message ID 20241210152401.1823648-7-richard.henderson@linaro.org (mailing list archive)
State New
Headers show
Series tcg: Remove in-flight mask data from OptContext | expand

Commit Message

Richard Henderson Dec. 10, 2024, 3:23 p.m. UTC
Avoid the use of the OptContext slots.

Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
---
 tcg/optimize.c | 12 ++++++------
 1 file changed, 6 insertions(+), 6 deletions(-)

Comments

Pierrick Bouvier Dec. 17, 2024, 8:15 p.m. UTC | #1
On 12/10/24 07:23, Richard Henderson wrote:
> Avoid the use of the OptContext slots.
> 
> Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
> ---
>   tcg/optimize.c | 12 ++++++------
>   1 file changed, 6 insertions(+), 6 deletions(-)
> 
> diff --git a/tcg/optimize.c b/tcg/optimize.c
> index 135b14974f..000ac0f810 100644
> --- a/tcg/optimize.c
> +++ b/tcg/optimize.c
> @@ -1306,7 +1306,7 @@ static bool fold_add2(OptContext *ctx, TCGOp *op)
>   
>   static bool fold_and(OptContext *ctx, TCGOp *op)
>   {
> -    uint64_t z1, z2;
> +    uint64_t z1, z2, z_mask, s_mask, a_mask = -1;
>   
>       if (fold_const2_commutative(ctx, op) ||
>           fold_xi_to_i(ctx, op, 0) ||
> @@ -1317,24 +1317,24 @@ static bool fold_and(OptContext *ctx, TCGOp *op)
>   
>       z1 = arg_info(op->args[1])->z_mask;
>       z2 = arg_info(op->args[2])->z_mask;
> -    ctx->z_mask = z1 & z2;
> +    z_mask = z1 & z2;
>   
>       /*
>        * Sign repetitions are perforce all identical, whether they are 1 or 0.
>        * Bitwise operations preserve the relative quantity of the repetitions.
>        */
> -    ctx->s_mask = arg_info(op->args[1])->s_mask
> -                & arg_info(op->args[2])->s_mask;
> +    s_mask = arg_info(op->args[1])->s_mask
> +           & arg_info(op->args[2])->s_mask;
>   
>       /*
>        * Known-zeros does not imply known-ones.  Therefore unless
>        * arg2 is constant, we can't infer affected bits from it.
>        */
>       if (arg_is_const(op->args[2])) {
> -        ctx->a_mask = z1 & ~z2;
> +        a_mask = z1 & ~z2;
>       }
>   
> -    return fold_masks(ctx, op);
> +    return fold_masks_zsa(ctx, op, z_mask, s_mask, a_mask);
>   }
>   
>   static bool fold_andc(OptContext *ctx, TCGOp *op)

Call me young and naive, but having a masks struct, with a, s, z fields 
would be more readable IMHO, and less error prone between the "local" 
and the "context" values.

Reviewed-by: Pierrick Bouvier <pierrick.bouvier@linaro.org>
diff mbox series

Patch

diff --git a/tcg/optimize.c b/tcg/optimize.c
index 135b14974f..000ac0f810 100644
--- a/tcg/optimize.c
+++ b/tcg/optimize.c
@@ -1306,7 +1306,7 @@  static bool fold_add2(OptContext *ctx, TCGOp *op)
 
 static bool fold_and(OptContext *ctx, TCGOp *op)
 {
-    uint64_t z1, z2;
+    uint64_t z1, z2, z_mask, s_mask, a_mask = -1;
 
     if (fold_const2_commutative(ctx, op) ||
         fold_xi_to_i(ctx, op, 0) ||
@@ -1317,24 +1317,24 @@  static bool fold_and(OptContext *ctx, TCGOp *op)
 
     z1 = arg_info(op->args[1])->z_mask;
     z2 = arg_info(op->args[2])->z_mask;
-    ctx->z_mask = z1 & z2;
+    z_mask = z1 & z2;
 
     /*
      * Sign repetitions are perforce all identical, whether they are 1 or 0.
      * Bitwise operations preserve the relative quantity of the repetitions.
      */
-    ctx->s_mask = arg_info(op->args[1])->s_mask
-                & arg_info(op->args[2])->s_mask;
+    s_mask = arg_info(op->args[1])->s_mask
+           & arg_info(op->args[2])->s_mask;
 
     /*
      * Known-zeros does not imply known-ones.  Therefore unless
      * arg2 is constant, we can't infer affected bits from it.
      */
     if (arg_is_const(op->args[2])) {
-        ctx->a_mask = z1 & ~z2;
+        a_mask = z1 & ~z2;
     }
 
-    return fold_masks(ctx, op);
+    return fold_masks_zsa(ctx, op, z_mask, s_mask, a_mask);
 }
 
 static bool fold_andc(OptContext *ctx, TCGOp *op)