@@ -27,18 +27,16 @@
#define SERPENT_AVX2_PARALLEL_BLOCKS 16
/* 16-way AVX2 parallel cipher functions */
-asmlinkage void serpent_ecb_enc_16way(struct serpent_ctx *ctx, u8 *dst,
- const u8 *src);
-asmlinkage void serpent_ecb_dec_16way(struct serpent_ctx *ctx, u8 *dst,
- const u8 *src);
+asmlinkage void serpent_ecb_enc_16way(void *ctx, u8 *dst, const u8 *src);
+asmlinkage void serpent_ecb_dec_16way(void *ctx, u8 *dst, const u8 *src);
asmlinkage void serpent_cbc_dec_16way(void *ctx, u128 *dst, const u128 *src);
asmlinkage void serpent_ctr_16way(void *ctx, u128 *dst, const u128 *src,
le128 *iv);
-asmlinkage void serpent_xts_enc_16way(struct serpent_ctx *ctx, u8 *dst,
- const u8 *src, le128 *iv);
-asmlinkage void serpent_xts_dec_16way(struct serpent_ctx *ctx, u8 *dst,
- const u8 *src, le128 *iv);
+asmlinkage void serpent_xts_enc_16way(void *ctx, u8 *dst, const u8 *src,
+ le128 *iv);
+asmlinkage void serpent_xts_dec_16way(void *ctx, u8 *dst, const u8 *src,
+ le128 *iv);
static const struct common_glue_ctx serpent_enc = {
.num_funcs = 3,
@@ -41,28 +41,25 @@
#include <asm/crypto/glue_helper.h>
/* 8-way parallel cipher functions */
-asmlinkage void serpent_ecb_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst,
- const u8 *src);
+asmlinkage void serpent_ecb_enc_8way_avx(void *ctx, u8 *dst, const u8 *src);
EXPORT_SYMBOL_GPL(serpent_ecb_enc_8way_avx);
-asmlinkage void serpent_ecb_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
- const u8 *src);
+asmlinkage void serpent_ecb_dec_8way_avx(void *ctx, u8 *dst, const u8 *src);
EXPORT_SYMBOL_GPL(serpent_ecb_dec_8way_avx);
-asmlinkage void serpent_cbc_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
- const u8 *src);
+asmlinkage void serpent_cbc_dec_8way_avx(void *ctx, u8 *dst, const u8 *src);
EXPORT_SYMBOL_GPL(serpent_cbc_dec_8way_avx);
-asmlinkage void serpent_ctr_8way_avx(struct serpent_ctx *ctx, u8 *dst,
+asmlinkage void serpent_ctr_8way_avx(void *ctx, u8 *dst,
const u8 *src, le128 *iv);
EXPORT_SYMBOL_GPL(serpent_ctr_8way_avx);
-asmlinkage void serpent_xts_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst,
- const u8 *src, le128 *iv);
+asmlinkage void serpent_xts_enc_8way_avx(void *ctx, u8 *dst, const u8 *src,
+ le128 *iv);
EXPORT_SYMBOL_GPL(serpent_xts_enc_8way_avx);
-asmlinkage void serpent_xts_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
- const u8 *src, le128 *iv);
+asmlinkage void serpent_xts_dec_8way_avx(void *ctx, u8 *dst, const u8 *src,
+ le128 *iv);
EXPORT_SYMBOL_GPL(serpent_xts_dec_8way_avx);
void __serpent_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv)
@@ -17,20 +17,17 @@ struct serpent_xts_ctx {
struct serpent_ctx crypt_ctx;
};
-asmlinkage void serpent_ecb_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst,
- const u8 *src);
-asmlinkage void serpent_ecb_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
- const u8 *src);
-
-asmlinkage void serpent_cbc_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
- const u8 *src);
-asmlinkage void serpent_ctr_8way_avx(struct serpent_ctx *ctx, u8 *dst,
- const u8 *src, le128 *iv);
-
-asmlinkage void serpent_xts_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst,
- const u8 *src, le128 *iv);
-asmlinkage void serpent_xts_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
- const u8 *src, le128 *iv);
+asmlinkage void serpent_ecb_enc_8way_avx(void *ctx, u8 *dst, const u8 *src);
+asmlinkage void serpent_ecb_dec_8way_avx(void *ctx, u8 *dst, const u8 *src);
+
+asmlinkage void serpent_cbc_dec_8way_avx(void *ctx, u8 *dst, const u8 *src);
+asmlinkage void serpent_ctr_8way_avx(void *ctx, u8 *dst, const u8 *src,
+ le128 *iv);
+
+asmlinkage void serpent_xts_enc_8way_avx(void *ctx, u8 *dst, const u8 *src,
+ le128 *iv);
+asmlinkage void serpent_xts_dec_8way_avx(void *ctx, u8 *dst, const u8 *src,
+ le128 *iv);
extern void __serpent_crypt_ctr(void *ctx, u128 *dst, const u128 *src,
le128 *iv);
@@ -14,22 +14,19 @@ asmlinkage void __serpent_enc_blk_4way(struct serpent_ctx *ctx, u8 *dst,
asmlinkage void serpent_dec_blk_4way(struct serpent_ctx *ctx, u8 *dst,
const u8 *src);
-static inline void serpent_enc_blk_xway(struct serpent_ctx *ctx, u8 *dst,
- const u8 *src)
+static inline void serpent_enc_blk_xway(void *ctx, u8 *dst, const u8 *src)
{
- __serpent_enc_blk_4way(ctx, dst, src, false);
+ __serpent_enc_blk_4way((struct serpent_ctx *) ctx, dst, src, false);
}
-static inline void serpent_enc_blk_xway_xor(struct serpent_ctx *ctx, u8 *dst,
- const u8 *src)
+static inline void serpent_enc_blk_xway_xor(void *ctx, u8 *dst, const u8 *src)
{
- __serpent_enc_blk_4way(ctx, dst, src, true);
+ __serpent_enc_blk_4way((struct serpent_ctx *) ctx, dst, src, true);
}
-static inline void serpent_dec_blk_xway(struct serpent_ctx *ctx, u8 *dst,
- const u8 *src)
+static inline void serpent_dec_blk_xway(void *ctx, u8 *dst, const u8 *src)
{
- serpent_dec_blk_4way(ctx, dst, src);
+ serpent_dec_blk_4way((struct serpent_ctx *) ctx, dst, src);
}
#else
@@ -41,22 +38,19 @@ asmlinkage void __serpent_enc_blk_8way(struct serpent_ctx *ctx, u8 *dst,
asmlinkage void serpent_dec_blk_8way(struct serpent_ctx *ctx, u8 *dst,
const u8 *src);
-static inline void serpent_enc_blk_xway(struct serpent_ctx *ctx, u8 *dst,
- const u8 *src)
+static inline void serpent_enc_blk_xway(void *ctx, u8 *dst, const u8 *src)
{
- __serpent_enc_blk_8way(ctx, dst, src, false);
+ __serpent_enc_blk_8way((struct serpent_ctx *) ctx, dst, src, false);
}
-static inline void serpent_enc_blk_xway_xor(struct serpent_ctx *ctx, u8 *dst,
- const u8 *src)
+static inline void serpent_enc_blk_xway_xor(void *ctx, u8 *dst, const u8 *src)
{
- __serpent_enc_blk_8way(ctx, dst, src, true);
+ __serpent_enc_blk_8way((struct serpent_ctx *) ctx, dst, src, true);
}
-static inline void serpent_dec_blk_xway(struct serpent_ctx *ctx, u8 *dst,
- const u8 *src)
+static inline void serpent_dec_blk_xway(void *ctx, u8 *dst, const u8 *src)
{
- serpent_dec_blk_8way(ctx, dst, src);
+ serpent_dec_blk_8way((struct serpent_ctx *) ctx, dst, src);
}
#endif
Convert the use of 'struct serpent_ctx *' to 'void *' in prototypes of functions which are referenced through 'struct common_glue_func_entry', making their prototypes match those of this struct and, consequently, turning them compatible with CFI requirements. Whenever needed, cast 'void *' to 'struct serpent_ctx *'. Signed-off-by: João Moreira <jmoreira@suse.de> --- arch/x86/crypto/serpent_avx2_glue.c | 14 ++++++-------- arch/x86/crypto/serpent_avx_glue.c | 19 ++++++++----------- arch/x86/include/asm/crypto/serpent-avx.h | 25 +++++++++++-------------- arch/x86/include/asm/crypto/serpent-sse2.h | 30 ++++++++++++------------------ 4 files changed, 37 insertions(+), 51 deletions(-)