@@ -82,11 +82,6 @@ struct iv_essiv_private {
u8 *salt;
};
-/* Duplicated per CPU state for cipher */
-struct iv_essiv_private_cpu {
- struct crypto_cipher *tfm;
-};
-
struct iv_benbi_private {
int shift;
};
@@ -101,7 +96,9 @@ enum flags { DM_CRYPT_SUSPENDED, DM_CRYPT_KEY_VALID };
struct crypt_cpu {
struct ablkcipher_request *req;
struct crypto_ablkcipher *tfm;
- struct iv_essiv_private_cpu ie;
+
+ /* ESSIV: struct crypto_cipher *essiv_tfm */
+ void *iv_private;
};
/*
@@ -234,6 +231,8 @@ static int crypt_iv_essiv_init(struct crypt_config *cc)
struct iv_essiv_private *essiv = &cc->iv_gen_private.essiv;
struct hash_desc desc;
struct scatterlist sg;
+ struct crypt_cpu *cs;
+ struct crypto_cipher *essiv_tfm;
int err, n, cpu;
sg_init_one(&sg, cc->key, cc->key_size);
@@ -245,9 +244,10 @@ static int crypt_iv_essiv_init(struct crypt_config *cc)
return err;
for_each_possible_cpu (cpu) {
- struct crypt_cpu *cs = per_cpu_ptr(cc->cpu, cpu);
+ cs = per_cpu_ptr(cc->cpu, cpu);
+ essiv_tfm = cs->iv_private,
- n = crypto_cipher_setkey(cs->ie.tfm, essiv->salt,
+ n = crypto_cipher_setkey(essiv_tfm, essiv->salt,
crypto_hash_digestsize(essiv->hash_tfm));
if (n) {
err = n;
@@ -263,14 +263,17 @@ static int crypt_iv_essiv_wipe(struct crypt_config *cc)
{
struct iv_essiv_private *essiv = &cc->iv_gen_private.essiv;
unsigned salt_size = crypto_hash_digestsize(essiv->hash_tfm);
+ struct crypt_cpu *cs;
+ struct crypto_cipher *essiv_tfm;
int cpu, err, n;
memset(essiv->salt, 0, salt_size);
err = 0;
for_each_possible_cpu (cpu) {
- struct crypt_cpu *cs = per_cpu_ptr(cc->cpu, cpu);
- n = crypto_cipher_setkey(cs->ie.tfm, essiv->salt, salt_size);
+ cs = per_cpu_ptr(cc->cpu, cpu);
+ essiv_tfm = cs->iv_private;
+ n = crypto_cipher_setkey(essiv_tfm, essiv->salt, salt_size);
if (n)
err = n;
}
@@ -312,6 +315,8 @@ static struct crypto_cipher *setup_essiv_cpu(struct crypt_config *cc,
static void crypt_iv_essiv_dtr(struct crypt_config *cc)
{
int cpu;
+ struct crypt_cpu *cs;
+ struct crypto_cipher *essiv_tfm;
struct iv_essiv_private *essiv = &cc->iv_gen_private.essiv;
crypto_free_hash(essiv->hash_tfm);
@@ -321,11 +326,11 @@ static void crypt_iv_essiv_dtr(struct crypt_config *cc)
essiv->salt = NULL;
for_each_possible_cpu (cpu) {
- struct crypt_cpu *cs = per_cpu_ptr(cc->cpu, cpu);
- if (cs->ie.tfm) {
- crypto_free_cipher(cs->ie.tfm);
- cs->ie.tfm = NULL;
- }
+ cs = per_cpu_ptr(cc->cpu, cpu);
+ essiv_tfm = cs->iv_private;
+ if (essiv_tfm)
+ crypto_free_cipher(essiv_tfm);
+ cs->iv_private = NULL;
}
}
@@ -365,11 +370,10 @@ static int crypt_iv_essiv_ctr(struct crypt_config *cc, struct dm_target *ti,
essiv_tfm = setup_essiv_cpu(cc, ti, salt,
crypto_hash_digestsize(hash_tfm));
if (IS_ERR(essiv_tfm)) {
- kfree(salt);
crypt_iv_essiv_dtr(cc);
return PTR_ERR(essiv_tfm);
}
- per_cpu_ptr(cc->cpu, cpu)->ie.tfm = essiv_tfm;
+ per_cpu_ptr(cc->cpu, cpu)->iv_private = essiv_tfm;
}
return 0;
@@ -382,9 +386,11 @@ bad:
static int crypt_iv_essiv_gen(struct crypt_config *cc, u8 *iv, sector_t sector)
{
+ struct crypto_cipher *essiv_tfm = crypt_me(cc)->iv_private;
+
memset(iv, 0, cc->iv_size);
*(u64 *)iv = cpu_to_le64(sector);
- crypto_cipher_encrypt_one(crypt_me(cc)->ie.tfm, iv, iv);
+ crypto_cipher_encrypt_one(essiv_tfm, iv, iv);
return 0;
}