@@ -82,11 +82,6 @@ struct iv_essiv_private {
u8 *salt;
};
-/* Duplicated per CPU state for cipher */
-struct iv_essiv_private_cpu {
- struct crypto_cipher *tfm;
-};
-
struct iv_benbi_private {
int shift;
};
@@ -101,7 +96,9 @@ enum flags { DM_CRYPT_SUSPENDED, DM_CRYPT_KEY_VALID };
struct crypt_cpu {
struct ablkcipher_request *req;
struct crypto_ablkcipher *tfm;
- struct iv_essiv_private_cpu ie;
+
+ /* ESSIV: struct crypto_cipher *essiv_tfm */
+ void *iv_private;
};
/*
@@ -235,6 +232,8 @@ static int crypt_iv_essiv_init(struct crypt_config *cc)
struct iv_essiv_private *essiv = &cc->iv_gen_private.essiv;
struct hash_desc desc;
struct scatterlist sg;
+ struct crypt_cpu *cs;
+ struct crypto_cipher *essiv_tfm;
int err, n, cpu;
sg_init_one(&sg, cc->key, cc->key_size);
@@ -246,9 +245,10 @@ static int crypt_iv_essiv_init(struct crypt_config *cc)
return err;
for_each_possible_cpu (cpu) {
- struct crypt_cpu *cs = per_cpu_ptr(cc->cpu, cpu);
+ cs = per_cpu_ptr(cc->cpu, cpu);
+ essiv_tfm = cs->iv_private,
- n = crypto_cipher_setkey(cs->ie.tfm, essiv->salt,
+ n = crypto_cipher_setkey(essiv_tfm, essiv->salt,
crypto_hash_digestsize(essiv->hash_tfm));
if (n) {
err = n;
@@ -264,14 +264,17 @@ static int crypt_iv_essiv_wipe(struct crypt_config *cc)
{
struct iv_essiv_private *essiv = &cc->iv_gen_private.essiv;
unsigned salt_size = crypto_hash_digestsize(essiv->hash_tfm);
+ struct crypt_cpu *cs;
+ struct crypto_cipher *essiv_tfm;
int cpu, err, n;
memset(essiv->salt, 0, salt_size);
err = 0;
for_each_possible_cpu (cpu) {
- struct crypt_cpu *cs = per_cpu_ptr(cc->cpu, cpu);
- n = crypto_cipher_setkey(cs->ie.tfm, essiv->salt, salt_size);
+ cs = per_cpu_ptr(cc->cpu, cpu);
+ essiv_tfm = cs->iv_private;
+ n = crypto_cipher_setkey(essiv_tfm, essiv->salt, salt_size);
if (n)
err = n;
}
@@ -314,6 +317,8 @@ static struct crypto_cipher *setup_essiv_cpu(struct crypt_config *cc,
static void crypt_iv_essiv_dtr(struct crypt_config *cc)
{
int cpu;
+ struct crypt_cpu *cs;
+ struct crypto_cipher *essiv_tfm;
struct iv_essiv_private *essiv = &cc->iv_gen_private.essiv;
crypto_free_hash(essiv->hash_tfm);
@@ -323,11 +328,11 @@ static void crypt_iv_essiv_dtr(struct crypt_config *cc)
essiv->salt = NULL;
for_each_possible_cpu (cpu) {
- struct crypt_cpu *cs = per_cpu_ptr(cc->cpu, cpu);
- if (cs->ie.tfm) {
- crypto_free_cipher(cs->ie.tfm);
- cs->ie.tfm = NULL;
- }
+ cs = per_cpu_ptr(cc->cpu, cpu);
+ essiv_tfm = cs->iv_private;
+ if (essiv_tfm)
+ crypto_free_cipher(essiv_tfm);
+ cs->iv_private = NULL;
}
}
@@ -371,7 +376,7 @@ static int crypt_iv_essiv_ctr(struct crypt_config *cc, struct dm_target *ti,
crypt_iv_essiv_dtr(cc);
return PTR_ERR(essiv_tfm);
}
- per_cpu_ptr(cc->cpu, cpu)->ie.tfm = essiv_tfm;
+ per_cpu_ptr(cc->cpu, cpu)->iv_private = essiv_tfm;
}
return 0;
@@ -384,9 +389,11 @@ bad:
static int crypt_iv_essiv_gen(struct crypt_config *cc, u8 *iv, sector_t sector)
{
+ struct crypto_cipher *essiv_tfm = crypt_me(cc)->iv_private;
+
memset(iv, 0, cc->iv_size);
*(u64 *)iv = cpu_to_le64(sector);
- crypto_cipher_encrypt_one(crypt_me(cc)->ie.tfm, iv, iv);
+ crypto_cipher_encrypt_one(essiv_tfm, iv, iv);
return 0;
}