diff options
author | Herbert Xu <herbert@gondor.apana.org.au> | 2005-07-06 13:53:29 -0700 |
---|---|---|
committer | David S. Miller <davem@davemloft.net> | 2005-07-06 13:53:29 -0700 |
commit | fbdae9f3e7fb57c07cb0d973f113eb25da2e8ff2 (patch) | |
tree | b20909c92c2d48ab449343967b1c365732d7e4ff /crypto | |
parent | 176c3652c544b6f8d4bb1984c58c10080f45dbf0 (diff) |
[CRYPTO] Ensure cit_iv is aligned correctly
This patch ensures that cit_iv is aligned according to cra_alignmask
by allocating it as part of the tfm structure. As a side effect the
crypto layer will also guarantee that the tfm ctx area has enough space
to be aligned by cra_alignmask. This allows us to remove the extra
space reservation from the Padlock driver.
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'crypto')
-rw-r--r-- | crypto/api.c | 32 | ||||
-rw-r--r-- | crypto/cipher.c | 15 | ||||
-rw-r--r-- | crypto/internal.h | 28 |
3 files changed, 66 insertions, 9 deletions
diff --git a/crypto/api.c b/crypto/api.c index 0b583d24f7fa..2d8d828c0ca2 100644 --- a/crypto/api.c +++ b/crypto/api.c @@ -125,20 +125,46 @@ static void crypto_exit_ops(struct crypto_tfm *tfm) } } +static unsigned int crypto_ctxsize(struct crypto_alg *alg, int flags) +{ + unsigned int len; + + switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) { + default: + BUG(); + + case CRYPTO_ALG_TYPE_CIPHER: + len = crypto_cipher_ctxsize(alg, flags); + break; + + case CRYPTO_ALG_TYPE_DIGEST: + len = crypto_digest_ctxsize(alg, flags); + break; + + case CRYPTO_ALG_TYPE_COMPRESS: + len = crypto_compress_ctxsize(alg, flags); + break; + } + + return len + alg->cra_alignmask; +} + struct crypto_tfm *crypto_alloc_tfm(const char *name, u32 flags) { struct crypto_tfm *tfm = NULL; struct crypto_alg *alg; + unsigned int tfm_size; alg = crypto_alg_mod_lookup(name); if (alg == NULL) goto out; - - tfm = kmalloc(sizeof(*tfm) + alg->cra_ctxsize, GFP_KERNEL); + + tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, flags); + tfm = kmalloc(tfm_size, GFP_KERNEL); if (tfm == NULL) goto out_put; - memset(tfm, 0, sizeof(*tfm) + alg->cra_ctxsize); + memset(tfm, 0, tfm_size); tfm->__crt_alg = alg; diff --git a/crypto/cipher.c b/crypto/cipher.c index 85eb12f8e564..d3295ce14a57 100644 --- a/crypto/cipher.c +++ b/crypto/cipher.c @@ -41,7 +41,7 @@ static unsigned int crypt_slow(const struct cipher_desc *desc, struct scatter_walk *in, struct scatter_walk *out, unsigned int bsize) { - unsigned int alignmask = desc->tfm->__crt_alg->cra_alignmask; + unsigned int alignmask = crypto_tfm_alg_alignmask(desc->tfm); u8 buffer[bsize * 2 + alignmask]; u8 *src = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); u8 *dst = src + bsize; @@ -98,7 +98,7 @@ static int crypt(const struct cipher_desc *desc, struct scatter_walk walk_in, walk_out; struct crypto_tfm *tfm = desc->tfm; const unsigned int bsize = crypto_tfm_alg_blocksize(tfm); - unsigned int alignmask = tfm->__crt_alg->cra_alignmask; + unsigned int alignmask = crypto_tfm_alg_alignmask(tfm); unsigned long buffer = 0; if (!nbytes) @@ -399,6 +399,8 @@ int crypto_init_cipher_ops(struct crypto_tfm *tfm) } if (ops->cit_mode == CRYPTO_TFM_MODE_CBC) { + unsigned int align; + unsigned long addr; switch (crypto_tfm_alg_blocksize(tfm)) { case 8: @@ -418,9 +420,11 @@ int crypto_init_cipher_ops(struct crypto_tfm *tfm) } ops->cit_ivsize = crypto_tfm_alg_blocksize(tfm); - ops->cit_iv = kmalloc(ops->cit_ivsize, GFP_KERNEL); - if (ops->cit_iv == NULL) - ret = -ENOMEM; + align = crypto_tfm_alg_alignmask(tfm) + 1; + addr = (unsigned long)crypto_tfm_ctx(tfm); + addr = ALIGN(addr, align); + addr += ALIGN(tfm->__crt_alg->cra_ctxsize, align); + ops->cit_iv = (void *)addr; } out: @@ -429,5 +433,4 @@ out: void crypto_exit_cipher_ops(struct crypto_tfm *tfm) { - kfree(tfm->crt_cipher.cit_iv); } diff --git a/crypto/internal.h b/crypto/internal.h index 83b1b6d6d92b..68612874b5fd 100644 --- a/crypto/internal.h +++ b/crypto/internal.h @@ -16,6 +16,7 @@ #include <linux/highmem.h> #include <linux/interrupt.h> #include <linux/init.h> +#include <linux/kernel.h> #include <asm/kmap_types.h> extern enum km_type crypto_km_types[]; @@ -61,6 +62,33 @@ static inline void crypto_init_proc(void) { } #endif +static inline unsigned int crypto_digest_ctxsize(struct crypto_alg *alg, + int flags) +{ + return alg->cra_ctxsize; +} + +static inline unsigned int crypto_cipher_ctxsize(struct crypto_alg *alg, + int flags) +{ + unsigned int len = alg->cra_ctxsize; + + switch (flags & CRYPTO_TFM_MODE_MASK) { + case CRYPTO_TFM_MODE_CBC: + len = ALIGN(len, alg->cra_alignmask + 1); + len += alg->cra_blocksize; + break; + } + + return len; +} + +static inline unsigned int crypto_compress_ctxsize(struct crypto_alg *alg, + int flags) +{ + return alg->cra_ctxsize; +} + int crypto_init_digest_flags(struct crypto_tfm *tfm, u32 flags); int crypto_init_cipher_flags(struct crypto_tfm *tfm, u32 flags); int crypto_init_compress_flags(struct crypto_tfm *tfm, u32 flags); |