Commit 6adfbd62 authored by Kees Cook's avatar Kees Cook Committed by Herbert Xu

crypto: picoxcell - Remove VLA usage of skcipher

In the quest to remove all stack VLA usage from the kernel[1], this
replaces struct crypto_skcipher and SKCIPHER_REQUEST_ON_STACK() usage
with struct crypto_sync_skcipher and SYNC_SKCIPHER_REQUEST_ON_STACK(),
which uses a fixed stack size.

[1] https://lkml.kernel.org/r/CA+55aFzCG-zNmZwX4A2FQpadafLfEzK6CC=qPXydAacU1RqZWA@mail.gmail.com

Cc: Jamie Iles <jamie@jamieiles.com>
Cc: linux-arm-kernel@lists.infradead.org
Signed-off-by: default avatarKees Cook <keescook@chromium.org>
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent e87f203c
...@@ -171,7 +171,7 @@ struct spacc_ablk_ctx { ...@@ -171,7 +171,7 @@ struct spacc_ablk_ctx {
* The fallback cipher. If the operation can't be done in hardware, * The fallback cipher. If the operation can't be done in hardware,
* fallback to a software version. * fallback to a software version.
*/ */
struct crypto_skcipher *sw_cipher; struct crypto_sync_skcipher *sw_cipher;
}; };
/* AEAD cipher context. */ /* AEAD cipher context. */
...@@ -799,17 +799,17 @@ static int spacc_aes_setkey(struct crypto_ablkcipher *cipher, const u8 *key, ...@@ -799,17 +799,17 @@ static int spacc_aes_setkey(struct crypto_ablkcipher *cipher, const u8 *key,
* Set the fallback transform to use the same request flags as * Set the fallback transform to use the same request flags as
* the hardware transform. * the hardware transform.
*/ */
crypto_skcipher_clear_flags(ctx->sw_cipher, crypto_sync_skcipher_clear_flags(ctx->sw_cipher,
CRYPTO_TFM_REQ_MASK); CRYPTO_TFM_REQ_MASK);
crypto_skcipher_set_flags(ctx->sw_cipher, crypto_sync_skcipher_set_flags(ctx->sw_cipher,
cipher->base.crt_flags & cipher->base.crt_flags &
CRYPTO_TFM_REQ_MASK); CRYPTO_TFM_REQ_MASK);
err = crypto_skcipher_setkey(ctx->sw_cipher, key, len); err = crypto_sync_skcipher_setkey(ctx->sw_cipher, key, len);
tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK; tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
tfm->crt_flags |= tfm->crt_flags |=
crypto_skcipher_get_flags(ctx->sw_cipher) & crypto_sync_skcipher_get_flags(ctx->sw_cipher) &
CRYPTO_TFM_RES_MASK; CRYPTO_TFM_RES_MASK;
if (err) if (err)
...@@ -914,7 +914,7 @@ static int spacc_ablk_do_fallback(struct ablkcipher_request *req, ...@@ -914,7 +914,7 @@ static int spacc_ablk_do_fallback(struct ablkcipher_request *req,
struct crypto_tfm *old_tfm = struct crypto_tfm *old_tfm =
crypto_ablkcipher_tfm(crypto_ablkcipher_reqtfm(req)); crypto_ablkcipher_tfm(crypto_ablkcipher_reqtfm(req));
struct spacc_ablk_ctx *ctx = crypto_tfm_ctx(old_tfm); struct spacc_ablk_ctx *ctx = crypto_tfm_ctx(old_tfm);
SKCIPHER_REQUEST_ON_STACK(subreq, ctx->sw_cipher); SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->sw_cipher);
int err; int err;
/* /*
...@@ -922,7 +922,7 @@ static int spacc_ablk_do_fallback(struct ablkcipher_request *req, ...@@ -922,7 +922,7 @@ static int spacc_ablk_do_fallback(struct ablkcipher_request *req,
* the ciphering has completed, put the old transform back into the * the ciphering has completed, put the old transform back into the
* request. * request.
*/ */
skcipher_request_set_tfm(subreq, ctx->sw_cipher); skcipher_request_set_sync_tfm(subreq, ctx->sw_cipher);
skcipher_request_set_callback(subreq, req->base.flags, NULL, NULL); skcipher_request_set_callback(subreq, req->base.flags, NULL, NULL);
skcipher_request_set_crypt(subreq, req->src, req->dst, skcipher_request_set_crypt(subreq, req->src, req->dst,
req->nbytes, req->info); req->nbytes, req->info);
...@@ -1020,9 +1020,8 @@ static int spacc_ablk_cra_init(struct crypto_tfm *tfm) ...@@ -1020,9 +1020,8 @@ static int spacc_ablk_cra_init(struct crypto_tfm *tfm)
ctx->generic.flags = spacc_alg->type; ctx->generic.flags = spacc_alg->type;
ctx->generic.engine = engine; ctx->generic.engine = engine;
if (alg->cra_flags & CRYPTO_ALG_NEED_FALLBACK) { if (alg->cra_flags & CRYPTO_ALG_NEED_FALLBACK) {
ctx->sw_cipher = crypto_alloc_skcipher( ctx->sw_cipher = crypto_alloc_sync_skcipher(
alg->cra_name, 0, CRYPTO_ALG_ASYNC | alg->cra_name, 0, CRYPTO_ALG_NEED_FALLBACK);
CRYPTO_ALG_NEED_FALLBACK);
if (IS_ERR(ctx->sw_cipher)) { if (IS_ERR(ctx->sw_cipher)) {
dev_warn(engine->dev, "failed to allocate fallback for %s\n", dev_warn(engine->dev, "failed to allocate fallback for %s\n",
alg->cra_name); alg->cra_name);
...@@ -1041,7 +1040,7 @@ static void spacc_ablk_cra_exit(struct crypto_tfm *tfm) ...@@ -1041,7 +1040,7 @@ static void spacc_ablk_cra_exit(struct crypto_tfm *tfm)
{ {
struct spacc_ablk_ctx *ctx = crypto_tfm_ctx(tfm); struct spacc_ablk_ctx *ctx = crypto_tfm_ctx(tfm);
crypto_free_skcipher(ctx->sw_cipher); crypto_free_sync_skcipher(ctx->sw_cipher);
} }
static int spacc_ablk_encrypt(struct ablkcipher_request *req) static int spacc_ablk_encrypt(struct ablkcipher_request *req)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment