Commit ec2088b6 authored by Tudor Ambarus's avatar Tudor Ambarus Committed by Herbert Xu

crypto: atmel-aes - Allocate aes dev at tfm init time

Allocate the atmel_aes_dev data at tfm init time, and not for
each crypt request.
There's a single AES IP per SoC, clarify that in the code.
Signed-off-by: default avatarTudor Ambarus <tudor.ambarus@microchip.com>
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent bf2db8e7
...@@ -420,24 +420,15 @@ static inline size_t atmel_aes_padlen(size_t len, size_t block_size) ...@@ -420,24 +420,15 @@ static inline size_t atmel_aes_padlen(size_t len, size_t block_size)
return len ? block_size - len : 0; return len ? block_size - len : 0;
} }
static struct atmel_aes_dev *atmel_aes_find_dev(struct atmel_aes_base_ctx *ctx) static struct atmel_aes_dev *atmel_aes_dev_alloc(struct atmel_aes_base_ctx *ctx)
{ {
struct atmel_aes_dev *aes_dd = NULL; struct atmel_aes_dev *aes_dd;
struct atmel_aes_dev *tmp;
spin_lock_bh(&atmel_aes.lock); spin_lock_bh(&atmel_aes.lock);
if (!ctx->dd) { /* One AES IP per SoC. */
list_for_each_entry(tmp, &atmel_aes.dev_list, list) { aes_dd = list_first_entry_or_null(&atmel_aes.dev_list,
aes_dd = tmp; struct atmel_aes_dev, list);
break;
}
ctx->dd = aes_dd;
} else {
aes_dd = ctx->dd;
}
spin_unlock_bh(&atmel_aes.lock); spin_unlock_bh(&atmel_aes.lock);
return aes_dd; return aes_dd;
} }
...@@ -969,7 +960,6 @@ static int atmel_aes_handle_queue(struct atmel_aes_dev *dd, ...@@ -969,7 +960,6 @@ static int atmel_aes_handle_queue(struct atmel_aes_dev *dd,
ctx = crypto_tfm_ctx(areq->tfm); ctx = crypto_tfm_ctx(areq->tfm);
dd->areq = areq; dd->areq = areq;
dd->ctx = ctx;
start_async = (areq != new_areq); start_async = (areq != new_areq);
dd->is_async = start_async; dd->is_async = start_async;
...@@ -1106,7 +1096,6 @@ static int atmel_aes_crypt(struct skcipher_request *req, unsigned long mode) ...@@ -1106,7 +1096,6 @@ static int atmel_aes_crypt(struct skcipher_request *req, unsigned long mode)
struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
struct atmel_aes_base_ctx *ctx = crypto_skcipher_ctx(skcipher); struct atmel_aes_base_ctx *ctx = crypto_skcipher_ctx(skcipher);
struct atmel_aes_reqctx *rctx; struct atmel_aes_reqctx *rctx;
struct atmel_aes_dev *dd;
u32 opmode = mode & AES_FLAGS_OPMODE_MASK; u32 opmode = mode & AES_FLAGS_OPMODE_MASK;
if (opmode == AES_FLAGS_XTS) { if (opmode == AES_FLAGS_XTS) {
...@@ -1152,10 +1141,6 @@ static int atmel_aes_crypt(struct skcipher_request *req, unsigned long mode) ...@@ -1152,10 +1141,6 @@ static int atmel_aes_crypt(struct skcipher_request *req, unsigned long mode)
} }
ctx->is_aead = false; ctx->is_aead = false;
dd = atmel_aes_find_dev(ctx);
if (!dd)
return -ENODEV;
rctx = skcipher_request_ctx(req); rctx = skcipher_request_ctx(req);
rctx->mode = mode; rctx->mode = mode;
...@@ -1169,7 +1154,7 @@ static int atmel_aes_crypt(struct skcipher_request *req, unsigned long mode) ...@@ -1169,7 +1154,7 @@ static int atmel_aes_crypt(struct skcipher_request *req, unsigned long mode)
ivsize, 0); ivsize, 0);
} }
return atmel_aes_handle_queue(dd, &req->base); return atmel_aes_handle_queue(ctx->dd, &req->base);
} }
static int atmel_aes_setkey(struct crypto_skcipher *tfm, const u8 *key, static int atmel_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
...@@ -1281,8 +1266,15 @@ static int atmel_aes_ctr_decrypt(struct skcipher_request *req) ...@@ -1281,8 +1266,15 @@ static int atmel_aes_ctr_decrypt(struct skcipher_request *req)
static int atmel_aes_init_tfm(struct crypto_skcipher *tfm) static int atmel_aes_init_tfm(struct crypto_skcipher *tfm)
{ {
struct atmel_aes_ctx *ctx = crypto_skcipher_ctx(tfm); struct atmel_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
struct atmel_aes_dev *dd;
dd = atmel_aes_dev_alloc(&ctx->base);
if (!dd)
return -ENODEV;
crypto_skcipher_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx)); crypto_skcipher_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx));
ctx->base.dd = dd;
ctx->base.dd->ctx = &ctx->base;
ctx->base.start = atmel_aes_start; ctx->base.start = atmel_aes_start;
return 0; return 0;
...@@ -1291,8 +1283,15 @@ static int atmel_aes_init_tfm(struct crypto_skcipher *tfm) ...@@ -1291,8 +1283,15 @@ static int atmel_aes_init_tfm(struct crypto_skcipher *tfm)
static int atmel_aes_ctr_init_tfm(struct crypto_skcipher *tfm) static int atmel_aes_ctr_init_tfm(struct crypto_skcipher *tfm)
{ {
struct atmel_aes_ctx *ctx = crypto_skcipher_ctx(tfm); struct atmel_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
struct atmel_aes_dev *dd;
dd = atmel_aes_dev_alloc(&ctx->base);
if (!dd)
return -ENODEV;
crypto_skcipher_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx)); crypto_skcipher_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx));
ctx->base.dd = dd;
ctx->base.dd->ctx = &ctx->base;
ctx->base.start = atmel_aes_ctr_start; ctx->base.start = atmel_aes_ctr_start;
return 0; return 0;
...@@ -1730,20 +1729,15 @@ static int atmel_aes_gcm_crypt(struct aead_request *req, ...@@ -1730,20 +1729,15 @@ static int atmel_aes_gcm_crypt(struct aead_request *req,
{ {
struct atmel_aes_base_ctx *ctx; struct atmel_aes_base_ctx *ctx;
struct atmel_aes_reqctx *rctx; struct atmel_aes_reqctx *rctx;
struct atmel_aes_dev *dd;
ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
ctx->block_size = AES_BLOCK_SIZE; ctx->block_size = AES_BLOCK_SIZE;
ctx->is_aead = true; ctx->is_aead = true;
dd = atmel_aes_find_dev(ctx);
if (!dd)
return -ENODEV;
rctx = aead_request_ctx(req); rctx = aead_request_ctx(req);
rctx->mode = AES_FLAGS_GCM | mode; rctx->mode = AES_FLAGS_GCM | mode;
return atmel_aes_handle_queue(dd, &req->base); return atmel_aes_handle_queue(ctx->dd, &req->base);
} }
static int atmel_aes_gcm_setkey(struct crypto_aead *tfm, const u8 *key, static int atmel_aes_gcm_setkey(struct crypto_aead *tfm, const u8 *key,
...@@ -1781,8 +1775,15 @@ static int atmel_aes_gcm_decrypt(struct aead_request *req) ...@@ -1781,8 +1775,15 @@ static int atmel_aes_gcm_decrypt(struct aead_request *req)
static int atmel_aes_gcm_init(struct crypto_aead *tfm) static int atmel_aes_gcm_init(struct crypto_aead *tfm)
{ {
struct atmel_aes_gcm_ctx *ctx = crypto_aead_ctx(tfm); struct atmel_aes_gcm_ctx *ctx = crypto_aead_ctx(tfm);
struct atmel_aes_dev *dd;
dd = atmel_aes_dev_alloc(&ctx->base);
if (!dd)
return -ENODEV;
crypto_aead_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx)); crypto_aead_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx));
ctx->base.dd = dd;
ctx->base.dd->ctx = &ctx->base;
ctx->base.start = atmel_aes_gcm_start; ctx->base.start = atmel_aes_gcm_start;
return 0; return 0;
...@@ -1915,8 +1916,13 @@ static int atmel_aes_xts_decrypt(struct skcipher_request *req) ...@@ -1915,8 +1916,13 @@ static int atmel_aes_xts_decrypt(struct skcipher_request *req)
static int atmel_aes_xts_init_tfm(struct crypto_skcipher *tfm) static int atmel_aes_xts_init_tfm(struct crypto_skcipher *tfm)
{ {
struct atmel_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm); struct atmel_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
struct atmel_aes_dev *dd;
const char *tfm_name = crypto_tfm_alg_name(&tfm->base); const char *tfm_name = crypto_tfm_alg_name(&tfm->base);
dd = atmel_aes_dev_alloc(&ctx->base);
if (!dd)
return -ENODEV;
ctx->fallback_tfm = crypto_alloc_skcipher(tfm_name, 0, ctx->fallback_tfm = crypto_alloc_skcipher(tfm_name, 0,
CRYPTO_ALG_NEED_FALLBACK); CRYPTO_ALG_NEED_FALLBACK);
if (IS_ERR(ctx->fallback_tfm)) if (IS_ERR(ctx->fallback_tfm))
...@@ -1924,6 +1930,8 @@ static int atmel_aes_xts_init_tfm(struct crypto_skcipher *tfm) ...@@ -1924,6 +1930,8 @@ static int atmel_aes_xts_init_tfm(struct crypto_skcipher *tfm)
crypto_skcipher_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx) + crypto_skcipher_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx) +
crypto_skcipher_reqsize(ctx->fallback_tfm)); crypto_skcipher_reqsize(ctx->fallback_tfm));
ctx->base.dd = dd;
ctx->base.dd->ctx = &ctx->base;
ctx->base.start = atmel_aes_xts_start; ctx->base.start = atmel_aes_xts_start;
return 0; return 0;
...@@ -2137,6 +2145,11 @@ static int atmel_aes_authenc_init_tfm(struct crypto_aead *tfm, ...@@ -2137,6 +2145,11 @@ static int atmel_aes_authenc_init_tfm(struct crypto_aead *tfm,
{ {
struct atmel_aes_authenc_ctx *ctx = crypto_aead_ctx(tfm); struct atmel_aes_authenc_ctx *ctx = crypto_aead_ctx(tfm);
unsigned int auth_reqsize = atmel_sha_authenc_get_reqsize(); unsigned int auth_reqsize = atmel_sha_authenc_get_reqsize();
struct atmel_aes_dev *dd;
dd = atmel_aes_dev_alloc(&ctx->base);
if (!dd)
return -ENODEV;
ctx->auth = atmel_sha_authenc_spawn(auth_mode); ctx->auth = atmel_sha_authenc_spawn(auth_mode);
if (IS_ERR(ctx->auth)) if (IS_ERR(ctx->auth))
...@@ -2144,6 +2157,8 @@ static int atmel_aes_authenc_init_tfm(struct crypto_aead *tfm, ...@@ -2144,6 +2157,8 @@ static int atmel_aes_authenc_init_tfm(struct crypto_aead *tfm,
crypto_aead_set_reqsize(tfm, (sizeof(struct atmel_aes_authenc_reqctx) + crypto_aead_set_reqsize(tfm, (sizeof(struct atmel_aes_authenc_reqctx) +
auth_reqsize)); auth_reqsize));
ctx->base.dd = dd;
ctx->base.dd->ctx = &ctx->base;
ctx->base.start = atmel_aes_authenc_start; ctx->base.start = atmel_aes_authenc_start;
return 0; return 0;
...@@ -2189,7 +2204,6 @@ static int atmel_aes_authenc_crypt(struct aead_request *req, ...@@ -2189,7 +2204,6 @@ static int atmel_aes_authenc_crypt(struct aead_request *req,
struct atmel_aes_base_ctx *ctx = crypto_aead_ctx(tfm); struct atmel_aes_base_ctx *ctx = crypto_aead_ctx(tfm);
u32 authsize = crypto_aead_authsize(tfm); u32 authsize = crypto_aead_authsize(tfm);
bool enc = (mode & AES_FLAGS_ENCRYPT); bool enc = (mode & AES_FLAGS_ENCRYPT);
struct atmel_aes_dev *dd;
/* Compute text length. */ /* Compute text length. */
if (!enc && req->cryptlen < authsize) if (!enc && req->cryptlen < authsize)
...@@ -2208,11 +2222,7 @@ static int atmel_aes_authenc_crypt(struct aead_request *req, ...@@ -2208,11 +2222,7 @@ static int atmel_aes_authenc_crypt(struct aead_request *req,
ctx->block_size = AES_BLOCK_SIZE; ctx->block_size = AES_BLOCK_SIZE;
ctx->is_aead = true; ctx->is_aead = true;
dd = atmel_aes_find_dev(ctx); return atmel_aes_handle_queue(ctx->dd, &req->base);
if (!dd)
return -ENODEV;
return atmel_aes_handle_queue(dd, &req->base);
} }
static int atmel_aes_authenc_cbc_aes_encrypt(struct aead_request *req) static int atmel_aes_authenc_cbc_aes_encrypt(struct aead_request *req)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment