Commit 28874f26 authored by Kees Cook's avatar Kees Cook Committed by Herbert Xu

crypto: chelsio - Remove VLA usage of skcipher

In the quest to remove all stack VLA usage from the kernel[1], this
replaces struct crypto_skcipher and SKCIPHER_REQUEST_ON_STACK() usage
with struct crypto_sync_skcipher and SYNC_SKCIPHER_REQUEST_ON_STACK(),
which uses a fixed stack size.

[1] https://lkml.kernel.org/r/CA+55aFzCG-zNmZwX4A2FQpadafLfEzK6CC=qPXydAacU1RqZWA@mail.gmail.com

Cc: Harsh Jain <harsh@chelsio.com>
Signed-off-by: default avatarKees Cook <keescook@chromium.org>
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent 888a649c
...@@ -671,7 +671,7 @@ static int chcr_sg_ent_in_wr(struct scatterlist *src, ...@@ -671,7 +671,7 @@ static int chcr_sg_ent_in_wr(struct scatterlist *src,
return min(srclen, dstlen); return min(srclen, dstlen);
} }
static int chcr_cipher_fallback(struct crypto_skcipher *cipher, static int chcr_cipher_fallback(struct crypto_sync_skcipher *cipher,
u32 flags, u32 flags,
struct scatterlist *src, struct scatterlist *src,
struct scatterlist *dst, struct scatterlist *dst,
...@@ -681,9 +681,9 @@ static int chcr_cipher_fallback(struct crypto_skcipher *cipher, ...@@ -681,9 +681,9 @@ static int chcr_cipher_fallback(struct crypto_skcipher *cipher,
{ {
int err; int err;
SKCIPHER_REQUEST_ON_STACK(subreq, cipher); SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, cipher);
skcipher_request_set_tfm(subreq, cipher); skcipher_request_set_sync_tfm(subreq, cipher);
skcipher_request_set_callback(subreq, flags, NULL, NULL); skcipher_request_set_callback(subreq, flags, NULL, NULL);
skcipher_request_set_crypt(subreq, src, dst, skcipher_request_set_crypt(subreq, src, dst,
nbytes, iv); nbytes, iv);
...@@ -854,13 +854,14 @@ static int chcr_cipher_fallback_setkey(struct crypto_ablkcipher *cipher, ...@@ -854,13 +854,14 @@ static int chcr_cipher_fallback_setkey(struct crypto_ablkcipher *cipher,
struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(cipher)); struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(cipher));
int err = 0; int err = 0;
crypto_skcipher_clear_flags(ablkctx->sw_cipher, CRYPTO_TFM_REQ_MASK); crypto_sync_skcipher_clear_flags(ablkctx->sw_cipher,
crypto_skcipher_set_flags(ablkctx->sw_cipher, cipher->base.crt_flags & CRYPTO_TFM_REQ_MASK);
CRYPTO_TFM_REQ_MASK); crypto_sync_skcipher_set_flags(ablkctx->sw_cipher,
err = crypto_skcipher_setkey(ablkctx->sw_cipher, key, keylen); cipher->base.crt_flags & CRYPTO_TFM_REQ_MASK);
err = crypto_sync_skcipher_setkey(ablkctx->sw_cipher, key, keylen);
tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK; tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
tfm->crt_flags |= tfm->crt_flags |=
crypto_skcipher_get_flags(ablkctx->sw_cipher) & crypto_sync_skcipher_get_flags(ablkctx->sw_cipher) &
CRYPTO_TFM_RES_MASK; CRYPTO_TFM_RES_MASK;
return err; return err;
} }
...@@ -1360,8 +1361,8 @@ static int chcr_cra_init(struct crypto_tfm *tfm) ...@@ -1360,8 +1361,8 @@ static int chcr_cra_init(struct crypto_tfm *tfm)
struct chcr_context *ctx = crypto_tfm_ctx(tfm); struct chcr_context *ctx = crypto_tfm_ctx(tfm);
struct ablk_ctx *ablkctx = ABLK_CTX(ctx); struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
ablkctx->sw_cipher = crypto_alloc_skcipher(alg->cra_name, 0, ablkctx->sw_cipher = crypto_alloc_sync_skcipher(alg->cra_name, 0,
CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK); CRYPTO_ALG_NEED_FALLBACK);
if (IS_ERR(ablkctx->sw_cipher)) { if (IS_ERR(ablkctx->sw_cipher)) {
pr_err("failed to allocate fallback for %s\n", alg->cra_name); pr_err("failed to allocate fallback for %s\n", alg->cra_name);
return PTR_ERR(ablkctx->sw_cipher); return PTR_ERR(ablkctx->sw_cipher);
...@@ -1390,8 +1391,8 @@ static int chcr_rfc3686_init(struct crypto_tfm *tfm) ...@@ -1390,8 +1391,8 @@ static int chcr_rfc3686_init(struct crypto_tfm *tfm)
/*RFC3686 initialises IV counter value to 1, rfc3686(ctr(aes)) /*RFC3686 initialises IV counter value to 1, rfc3686(ctr(aes))
* cannot be used as fallback in chcr_handle_cipher_response * cannot be used as fallback in chcr_handle_cipher_response
*/ */
ablkctx->sw_cipher = crypto_alloc_skcipher("ctr(aes)", 0, ablkctx->sw_cipher = crypto_alloc_sync_skcipher("ctr(aes)", 0,
CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK); CRYPTO_ALG_NEED_FALLBACK);
if (IS_ERR(ablkctx->sw_cipher)) { if (IS_ERR(ablkctx->sw_cipher)) {
pr_err("failed to allocate fallback for %s\n", alg->cra_name); pr_err("failed to allocate fallback for %s\n", alg->cra_name);
return PTR_ERR(ablkctx->sw_cipher); return PTR_ERR(ablkctx->sw_cipher);
...@@ -1406,7 +1407,7 @@ static void chcr_cra_exit(struct crypto_tfm *tfm) ...@@ -1406,7 +1407,7 @@ static void chcr_cra_exit(struct crypto_tfm *tfm)
struct chcr_context *ctx = crypto_tfm_ctx(tfm); struct chcr_context *ctx = crypto_tfm_ctx(tfm);
struct ablk_ctx *ablkctx = ABLK_CTX(ctx); struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
crypto_free_skcipher(ablkctx->sw_cipher); crypto_free_sync_skcipher(ablkctx->sw_cipher);
if (ablkctx->aes_generic) if (ablkctx->aes_generic)
crypto_free_cipher(ablkctx->aes_generic); crypto_free_cipher(ablkctx->aes_generic);
} }
......
...@@ -170,7 +170,7 @@ static inline struct chcr_context *h_ctx(struct crypto_ahash *tfm) ...@@ -170,7 +170,7 @@ static inline struct chcr_context *h_ctx(struct crypto_ahash *tfm)
} }
struct ablk_ctx { struct ablk_ctx {
struct crypto_skcipher *sw_cipher; struct crypto_sync_skcipher *sw_cipher;
struct crypto_cipher *aes_generic; struct crypto_cipher *aes_generic;
__be32 key_ctx_hdr; __be32 key_ctx_hdr;
unsigned int enckey_len; unsigned int enckey_len;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment