diff mbox series

[v3,11/31] crypto: arm/chacha - Add support for chaining

Message ID E1k0Jt8-0006Mx-Tq@fornost.hmeau.com (mailing list archive)
State Changes Requested
Delegated to: Herbert Xu
Headers show
Series crypto: skcipher - Add support for no chaining and partial chaining | expand

Commit Message

Herbert Xu July 28, 2020, 7:19 a.m. UTC
As it stands chacha cannot do chaining.  That is, it has to handle
each request as a whole.  This patch adds support for chaining when
the CRYPTO_TFM_REQ_MORE flag is set.
    
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
---

 arch/arm/crypto/chacha-glue.c |   48 ++++++++++++++++++++++++++++--------------
 1 file changed, 32 insertions(+), 16 deletions(-)
diff mbox series

Patch

diff --git a/arch/arm/crypto/chacha-glue.c b/arch/arm/crypto/chacha-glue.c
index 59da6c0b63b62..7f753fc54137a 100644
--- a/arch/arm/crypto/chacha-glue.c
+++ b/arch/arm/crypto/chacha-glue.c
@@ -7,10 +7,8 @@ 
  * Copyright (C) 2015 Martin Willi
  */
 
-#include <crypto/algapi.h>
 #include <crypto/internal/chacha.h>
 #include <crypto/internal/simd.h>
-#include <crypto/internal/skcipher.h>
 #include <linux/jump_label.h>
 #include <linux/kernel.h>
 #include <linux/module.h>
@@ -106,16 +104,16 @@  void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes,
 EXPORT_SYMBOL(chacha_crypt_arch);
 
 static int chacha_stream_xor(struct skcipher_request *req,
-			     const struct chacha_ctx *ctx, const u8 *iv,
-			     bool neon)
+			     int nrounds, bool neon)
 {
+	struct chacha_reqctx *rctx = skcipher_request_ctx(req);
 	struct skcipher_walk walk;
-	u32 state[16];
+	u32 *state = rctx->state;
 	int err;
 
-	err = skcipher_walk_virt(&walk, req, false);
+	rctx->init = req->base.flags & CRYPTO_TFM_REQ_MORE;
 
-	chacha_init_generic(state, ctx->key, iv);
+	err = skcipher_walk_virt(&walk, req, false);
 
 	while (walk.nbytes > 0) {
 		unsigned int nbytes = walk.nbytes;
@@ -125,12 +123,12 @@  static int chacha_stream_xor(struct skcipher_request *req,
 
 		if (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) || !neon) {
 			chacha_doarm(walk.dst.virt.addr, walk.src.virt.addr,
-				     nbytes, state, ctx->nrounds);
+				     nbytes, state, nrounds);
 			state[12] += DIV_ROUND_UP(nbytes, CHACHA_BLOCK_SIZE);
 		} else {
 			kernel_neon_begin();
 			chacha_doneon(state, walk.dst.virt.addr,
-				      walk.src.virt.addr, nbytes, ctx->nrounds);
+				      walk.src.virt.addr, nbytes, nrounds);
 			kernel_neon_end();
 		}
 		err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
@@ -142,9 +140,13 @@  static int chacha_stream_xor(struct skcipher_request *req,
 static int do_chacha(struct skcipher_request *req, bool neon)
 {
 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
+	struct chacha_reqctx *rctx = skcipher_request_ctx(req);
 	struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
 
-	return chacha_stream_xor(req, ctx, req->iv, neon);
+	if (!rctx->init)
+		chacha_init_generic(rctx->state, ctx->key, req->iv);
+
+	return chacha_stream_xor(req, ctx->nrounds, neon);
 }
 
 static int chacha_arm(struct skcipher_request *req)
@@ -160,25 +162,33 @@  static int chacha_neon(struct skcipher_request *req)
 static int do_xchacha(struct skcipher_request *req, bool neon)
 {
 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
+	struct chacha_reqctx *rctx = skcipher_request_ctx(req);
 	struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
-	struct chacha_ctx subctx;
-	u32 state[16];
+	int nrounds = ctx->nrounds;
+	u32 *state = rctx->state;
 	u8 real_iv[16];
+	u32 key[8];
+
+	if (rctx->init)
+		goto skip_init;
 
 	chacha_init_generic(state, ctx->key, req->iv);
 
 	if (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) || !neon) {
-		hchacha_block_arm(state, subctx.key, ctx->nrounds);
+		hchacha_block_arm(state, key, nrounds);
 	} else {
 		kernel_neon_begin();
-		hchacha_block_neon(state, subctx.key, ctx->nrounds);
+		hchacha_block_neon(state, key, nrounds);
 		kernel_neon_end();
 	}
-	subctx.nrounds = ctx->nrounds;
 
 	memcpy(&real_iv[0], req->iv + 24, 8);
 	memcpy(&real_iv[8], req->iv + 16, 8);
-	return chacha_stream_xor(req, &subctx, real_iv, neon);
+
+	chacha_init_generic(state, key, real_iv);
+
+skip_init:
+	return chacha_stream_xor(req, nrounds, neon);
 }
 
 static int xchacha_arm(struct skcipher_request *req)
@@ -204,6 +214,7 @@  static struct skcipher_alg arm_algs[] = {
 		.max_keysize		= CHACHA_KEY_SIZE,
 		.ivsize			= CHACHA_IV_SIZE,
 		.chunksize		= CHACHA_BLOCK_SIZE,
+		.reqsize		= sizeof(struct chacha_reqctx),
 		.setkey			= chacha20_setkey,
 		.encrypt		= chacha_arm,
 		.decrypt		= chacha_arm,
@@ -219,6 +230,7 @@  static struct skcipher_alg arm_algs[] = {
 		.max_keysize		= CHACHA_KEY_SIZE,
 		.ivsize			= XCHACHA_IV_SIZE,
 		.chunksize		= CHACHA_BLOCK_SIZE,
+		.reqsize		= sizeof(struct chacha_reqctx),
 		.setkey			= chacha20_setkey,
 		.encrypt		= xchacha_arm,
 		.decrypt		= xchacha_arm,
@@ -234,6 +246,7 @@  static struct skcipher_alg arm_algs[] = {
 		.max_keysize		= CHACHA_KEY_SIZE,
 		.ivsize			= XCHACHA_IV_SIZE,
 		.chunksize		= CHACHA_BLOCK_SIZE,
+		.reqsize		= sizeof(struct chacha_reqctx),
 		.setkey			= chacha12_setkey,
 		.encrypt		= xchacha_arm,
 		.decrypt		= xchacha_arm,
@@ -254,6 +267,7 @@  static struct skcipher_alg neon_algs[] = {
 		.ivsize			= CHACHA_IV_SIZE,
 		.chunksize		= CHACHA_BLOCK_SIZE,
 		.walksize		= 4 * CHACHA_BLOCK_SIZE,
+		.reqsize		= sizeof(struct chacha_reqctx),
 		.setkey			= chacha20_setkey,
 		.encrypt		= chacha_neon,
 		.decrypt		= chacha_neon,
@@ -270,6 +284,7 @@  static struct skcipher_alg neon_algs[] = {
 		.ivsize			= XCHACHA_IV_SIZE,
 		.chunksize		= CHACHA_BLOCK_SIZE,
 		.walksize		= 4 * CHACHA_BLOCK_SIZE,
+		.reqsize		= sizeof(struct chacha_reqctx),
 		.setkey			= chacha20_setkey,
 		.encrypt		= xchacha_neon,
 		.decrypt		= xchacha_neon,
@@ -286,6 +301,7 @@  static struct skcipher_alg neon_algs[] = {
 		.ivsize			= XCHACHA_IV_SIZE,
 		.chunksize		= CHACHA_BLOCK_SIZE,
 		.walksize		= 4 * CHACHA_BLOCK_SIZE,
+		.reqsize		= sizeof(struct chacha_reqctx),
 		.setkey			= chacha12_setkey,
 		.encrypt		= xchacha_neon,
 		.decrypt		= xchacha_neon,