@@ -123,6 +123,20 @@ static void chacha_dosimd(u32 *state, u8 *dst, const u8 *src,
}
}
+void chacha_crypt(u32 *state, u8 *dst, const u8 *src, unsigned int bytes,
+ int nrounds)
+{
+ state = PTR_ALIGN(state, CHACHA_STATE_ALIGN);
+
+ if (bytes <= CHACHA_BLOCK_SIZE || !crypto_simd_usable())
+ return chacha_crypt_generic(state, dst, src, bytes, nrounds);
+
+ kernel_fpu_begin();
+ chacha_dosimd(state, dst, src, bytes, nrounds);
+ kernel_fpu_end();
+}
+EXPORT_SYMBOL(chacha_crypt);
+
static int chacha_simd_stream_xor(struct skcipher_walk *walk,
const struct chacha_ctx *ctx, const u8 *iv)
{
@@ -1408,6 +1408,7 @@ config CRYPTO_CHACHA20_X86_64
depends on X86 && 64BIT
select CRYPTO_BLKCIPHER
select CRYPTO_CHACHA20
+ select CRYPTO_ARCH_HAVE_LIB_CHACHA
help
SSSE3, AVX2, and AVX-512VL optimized implementations of the ChaCha20,
XChaCha20, and XChaCha12 stream ciphers.
@@ -25,6 +25,12 @@
#define CHACHA_BLOCK_SIZE 64
#define CHACHAPOLY_IV_SIZE 12
+#ifdef CONFIG_X86_64
+#define CHACHA_STATE_WORDS ((CHACHA_BLOCK_SIZE + 12) / sizeof(u32))
+#else
+#define CHACHA_STATE_WORDS (CHACHA_BLOCK_SIZE / sizeof(u32))
+#endif
+
/* 192-bit nonce, then 64-bit stream position */
#define XCHACHA_IV_SIZE 32
@@ -57,6 +63,9 @@ static inline void chacha_init_generic(u32 *state, const u32 *key, const u8 *iv)
static inline void chacha_init(u32 *state, const u32 *key, const u8 *iv)
{
+ if (IS_ENABLED(CONFIG_X86_64))
+ state = PTR_ALIGN(state, 16);
+
chacha_init_generic(state, key, iv);
}
Wire the existing x86 SIMD ChaCha code into the new ChaCha library interface. Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> --- arch/x86/crypto/chacha_glue.c | 14 ++++++++++++++ crypto/Kconfig | 1 + include/crypto/chacha.h | 9 +++++++++ 3 files changed, 24 insertions(+)