@@ -283,11 +283,15 @@ static int cts_cbc_encrypt(struct skcipher_request *req)
skcipher_request_set_callback(&subreq, skcipher_request_flags(req),
NULL, NULL);
- if (req->cryptlen <= AES_BLOCK_SIZE) {
- if (req->cryptlen < AES_BLOCK_SIZE)
+ if (req->cryptlen < AES_BLOCK_SIZE)
+ return -EINVAL;
+
+ if (req->base.flags & CRYPTO_TFM_REQ_MORE) {
+ if (req->cryptlen & (AES_BLOCK_SIZE - 1))
return -EINVAL;
+ cbc_blocks += 2;
+ } else if (req->cryptlen == AES_BLOCK_SIZE)
cbc_blocks = 1;
- }
if (cbc_blocks > 0) {
skcipher_request_set_crypt(&subreq, req->src, req->dst,
@@ -299,7 +303,8 @@ static int cts_cbc_encrypt(struct skcipher_request *req)
if (err)
return err;
- if (req->cryptlen == AES_BLOCK_SIZE)
+ if (req->cryptlen == AES_BLOCK_SIZE ||
+ req->base.flags & CRYPTO_TFM_REQ_MORE)
return 0;
dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
@@ -738,13 +743,15 @@ static struct skcipher_alg aes_algs[] = { {
.cra_driver_name = "__cts-cbc-aes-" MODE,
.cra_priority = PRIO,
.cra_flags = CRYPTO_ALG_INTERNAL,
- .cra_blocksize = AES_BLOCK_SIZE,
+ .cra_blocksize = 1,
.cra_ctxsize = sizeof(struct crypto_aes_ctx),
.cra_module = THIS_MODULE,
},
.min_keysize = AES_MIN_KEY_SIZE,
.max_keysize = AES_MAX_KEY_SIZE,
.ivsize = AES_BLOCK_SIZE,
+ .chunksize = AES_BLOCK_SIZE,
+ .final_chunksize = 2 * AES_BLOCK_SIZE,
.walksize = 2 * AES_BLOCK_SIZE,
.setkey = skcipher_aes_setkey,
.encrypt = cts_cbc_encrypt,
As it stands cts cannot do chaining. That is, it always performs the cipher-text stealing at the end of a request. This patch adds support for chaining when the CRYPTO_TM_REQ_MORE flag is set. It also sets the final_chunksize so that data can be withheld by the caller to enable correct processing at the true end of a request. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au> --- arch/arm64/crypto/aes-glue.c | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-)