Message ID | 20220301115828.355-9-liweiwei@iscas.ac.cn (mailing list archive) |
---|---|
State | New, archived |
Headers | show |
Series | support subsets of scalar crypto extension | expand |
On Tue, Mar 1, 2022 at 10:09 PM Weiwei Li <liweiwei@iscas.ac.cn> wrote: > > - add sha256sig0, sha256sig1, sha256sum0 and sha256sum1 instructions > > Co-authored-by: Zewen Ye <lustrew@foxmail.com> > Signed-off-by: Weiwei Li <liweiwei@iscas.ac.cn> > Signed-off-by: Junqiang Wang <wangjunqiang@iscas.ac.cn> > Reviewed-by: Richard Henderson <richard.henderson@linaro.org> Reviewed-by: Alistair Francis <alistair.francis@wdc.com> Alistair > --- > target/riscv/insn32.decode | 5 +++ > target/riscv/insn_trans/trans_rvk.c.inc | 55 +++++++++++++++++++++++++ > 2 files changed, 60 insertions(+) > > diff --git a/target/riscv/insn32.decode b/target/riscv/insn32.decode > index 0b800b4093..db28ecdd2b 100644 > --- a/target/riscv/insn32.decode > +++ b/target/riscv/insn32.decode > @@ -857,3 +857,8 @@ aes64esm 00 11011 ..... ..... 000 ..... 0110011 @r > # *** RV64 Zkne/zknd Standard Extension *** > aes64ks2 01 11111 ..... ..... 000 ..... 0110011 @r > aes64ks1i 00 11000 1.... ..... 001 ..... 0010011 @i_aes > +# *** RV32 Zknh Standard Extension *** > +sha256sig0 00 01000 00010 ..... 001 ..... 0010011 @r2 > +sha256sig1 00 01000 00011 ..... 001 ..... 0010011 @r2 > +sha256sum0 00 01000 00000 ..... 001 ..... 0010011 @r2 > +sha256sum1 00 01000 00001 ..... 001 ..... 0010011 @r2 > diff --git a/target/riscv/insn_trans/trans_rvk.c.inc b/target/riscv/insn_trans/trans_rvk.c.inc > index b86f931b13..beea7f8e96 100644 > --- a/target/riscv/insn_trans/trans_rvk.c.inc > +++ b/target/riscv/insn_trans/trans_rvk.c.inc > @@ -29,6 +29,12 @@ > } \ > } while (0) > > +#define REQUIRE_ZKNH(ctx) do { \ > + if (!ctx->cfg_ptr->ext_zknh) { \ > + return false; \ > + } \ > +} while (0) > + > static bool gen_aes32_sm4(DisasContext *ctx, arg_k_aes *a, > void (*func)(TCGv, TCGv, TCGv, TCGv)) > { > @@ -112,3 +118,52 @@ static bool trans_aes64im(DisasContext *ctx, arg_aes64im *a) > REQUIRE_ZKND(ctx); > return gen_unary(ctx, a, EXT_NONE, gen_helper_aes64im); > } > + > +static bool gen_sha256(DisasContext *ctx, arg_r2 *a, DisasExtend ext, > + void (*func)(TCGv_i32, TCGv_i32, int32_t), > + int32_t num1, int32_t num2, int32_t num3) > +{ > + TCGv dest = dest_gpr(ctx, a->rd); > + TCGv src1 = get_gpr(ctx, a->rs1, ext); > + TCGv_i32 t0 = tcg_temp_new_i32(); > + TCGv_i32 t1 = tcg_temp_new_i32(); > + TCGv_i32 t2 = tcg_temp_new_i32(); > + > + tcg_gen_trunc_tl_i32(t0, src1); > + tcg_gen_rotri_i32(t1, t0, num1); > + tcg_gen_rotri_i32(t2, t0, num2); > + tcg_gen_xor_i32(t1, t1, t2); > + func(t2, t0, num3); > + tcg_gen_xor_i32(t1, t1, t2); > + tcg_gen_ext_i32_tl(dest, t1); > + > + gen_set_gpr(ctx, a->rd, dest); > + tcg_temp_free_i32(t0); > + tcg_temp_free_i32(t1); > + tcg_temp_free_i32(t2); > + return true; > +} > + > +static bool trans_sha256sig0(DisasContext *ctx, arg_sha256sig0 *a) > +{ > + REQUIRE_ZKNH(ctx); > + return gen_sha256(ctx, a, EXT_NONE, tcg_gen_shri_i32, 7, 18, 3); > +} > + > +static bool trans_sha256sig1(DisasContext *ctx, arg_sha256sig1 *a) > +{ > + REQUIRE_ZKNH(ctx); > + return gen_sha256(ctx, a, EXT_NONE, tcg_gen_shri_i32, 17, 19, 10); > +} > + > +static bool trans_sha256sum0(DisasContext *ctx, arg_sha256sum0 *a) > +{ > + REQUIRE_ZKNH(ctx); > + return gen_sha256(ctx, a, EXT_NONE, tcg_gen_rotri_i32, 2, 13, 22); > +} > + > +static bool trans_sha256sum1(DisasContext *ctx, arg_sha256sum1 *a) > +{ > + REQUIRE_ZKNH(ctx); > + return gen_sha256(ctx, a, EXT_NONE, tcg_gen_rotri_i32, 6, 11, 25); > +} > -- > 2.17.1 > >
diff --git a/target/riscv/insn32.decode b/target/riscv/insn32.decode index 0b800b4093..db28ecdd2b 100644 --- a/target/riscv/insn32.decode +++ b/target/riscv/insn32.decode @@ -857,3 +857,8 @@ aes64esm 00 11011 ..... ..... 000 ..... 0110011 @r # *** RV64 Zkne/zknd Standard Extension *** aes64ks2 01 11111 ..... ..... 000 ..... 0110011 @r aes64ks1i 00 11000 1.... ..... 001 ..... 0010011 @i_aes +# *** RV32 Zknh Standard Extension *** +sha256sig0 00 01000 00010 ..... 001 ..... 0010011 @r2 +sha256sig1 00 01000 00011 ..... 001 ..... 0010011 @r2 +sha256sum0 00 01000 00000 ..... 001 ..... 0010011 @r2 +sha256sum1 00 01000 00001 ..... 001 ..... 0010011 @r2 diff --git a/target/riscv/insn_trans/trans_rvk.c.inc b/target/riscv/insn_trans/trans_rvk.c.inc index b86f931b13..beea7f8e96 100644 --- a/target/riscv/insn_trans/trans_rvk.c.inc +++ b/target/riscv/insn_trans/trans_rvk.c.inc @@ -29,6 +29,12 @@ } \ } while (0) +#define REQUIRE_ZKNH(ctx) do { \ + if (!ctx->cfg_ptr->ext_zknh) { \ + return false; \ + } \ +} while (0) + static bool gen_aes32_sm4(DisasContext *ctx, arg_k_aes *a, void (*func)(TCGv, TCGv, TCGv, TCGv)) { @@ -112,3 +118,52 @@ static bool trans_aes64im(DisasContext *ctx, arg_aes64im *a) REQUIRE_ZKND(ctx); return gen_unary(ctx, a, EXT_NONE, gen_helper_aes64im); } + +static bool gen_sha256(DisasContext *ctx, arg_r2 *a, DisasExtend ext, + void (*func)(TCGv_i32, TCGv_i32, int32_t), + int32_t num1, int32_t num2, int32_t num3) +{ + TCGv dest = dest_gpr(ctx, a->rd); + TCGv src1 = get_gpr(ctx, a->rs1, ext); + TCGv_i32 t0 = tcg_temp_new_i32(); + TCGv_i32 t1 = tcg_temp_new_i32(); + TCGv_i32 t2 = tcg_temp_new_i32(); + + tcg_gen_trunc_tl_i32(t0, src1); + tcg_gen_rotri_i32(t1, t0, num1); + tcg_gen_rotri_i32(t2, t0, num2); + tcg_gen_xor_i32(t1, t1, t2); + func(t2, t0, num3); + tcg_gen_xor_i32(t1, t1, t2); + tcg_gen_ext_i32_tl(dest, t1); + + gen_set_gpr(ctx, a->rd, dest); + tcg_temp_free_i32(t0); + tcg_temp_free_i32(t1); + tcg_temp_free_i32(t2); + return true; +} + +static bool trans_sha256sig0(DisasContext *ctx, arg_sha256sig0 *a) +{ + REQUIRE_ZKNH(ctx); + return gen_sha256(ctx, a, EXT_NONE, tcg_gen_shri_i32, 7, 18, 3); +} + +static bool trans_sha256sig1(DisasContext *ctx, arg_sha256sig1 *a) +{ + REQUIRE_ZKNH(ctx); + return gen_sha256(ctx, a, EXT_NONE, tcg_gen_shri_i32, 17, 19, 10); +} + +static bool trans_sha256sum0(DisasContext *ctx, arg_sha256sum0 *a) +{ + REQUIRE_ZKNH(ctx); + return gen_sha256(ctx, a, EXT_NONE, tcg_gen_rotri_i32, 2, 13, 22); +} + +static bool trans_sha256sum1(DisasContext *ctx, arg_sha256sum1 *a) +{ + REQUIRE_ZKNH(ctx); + return gen_sha256(ctx, a, EXT_NONE, tcg_gen_rotri_i32, 6, 11, 25); +}