Commit cc815653 authored by Herbert Xu's avatar Herbert Xu

crypto: nx - Convert ccm to new AEAD interface

This patch converts the nx ccm and 4309 implementations to the
new AEAD interface.
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent 2642d6ab
...@@ -94,8 +94,6 @@ static int ccm_aes_nx_setauthsize(struct crypto_aead *tfm, ...@@ -94,8 +94,6 @@ static int ccm_aes_nx_setauthsize(struct crypto_aead *tfm,
return -EINVAL; return -EINVAL;
} }
crypto_aead_crt(tfm)->authsize = authsize;
return 0; return 0;
} }
...@@ -111,8 +109,6 @@ static int ccm4309_aes_nx_setauthsize(struct crypto_aead *tfm, ...@@ -111,8 +109,6 @@ static int ccm4309_aes_nx_setauthsize(struct crypto_aead *tfm,
return -EINVAL; return -EINVAL;
} }
crypto_aead_crt(tfm)->authsize = authsize;
return 0; return 0;
} }
...@@ -174,6 +170,7 @@ static int generate_pat(u8 *iv, ...@@ -174,6 +170,7 @@ static int generate_pat(u8 *iv,
struct nx_crypto_ctx *nx_ctx, struct nx_crypto_ctx *nx_ctx,
unsigned int authsize, unsigned int authsize,
unsigned int nbytes, unsigned int nbytes,
unsigned int assoclen,
u8 *out) u8 *out)
{ {
struct nx_sg *nx_insg = nx_ctx->in_sg; struct nx_sg *nx_insg = nx_ctx->in_sg;
...@@ -200,16 +197,16 @@ static int generate_pat(u8 *iv, ...@@ -200,16 +197,16 @@ static int generate_pat(u8 *iv,
* greater than 2^32. * greater than 2^32.
*/ */
if (!req->assoclen) { if (!assoclen) {
b0 = nx_ctx->csbcpb->cpb.aes_ccm.in_pat_or_b0; b0 = nx_ctx->csbcpb->cpb.aes_ccm.in_pat_or_b0;
} else if (req->assoclen <= 14) { } else if (assoclen <= 14) {
/* if associated data is 14 bytes or less, we do 1 GCM /* if associated data is 14 bytes or less, we do 1 GCM
* operation on 2 AES blocks, B0 (stored in the csbcpb) and B1, * operation on 2 AES blocks, B0 (stored in the csbcpb) and B1,
* which is fed in through the source buffers here */ * which is fed in through the source buffers here */
b0 = nx_ctx->csbcpb->cpb.aes_ccm.in_pat_or_b0; b0 = nx_ctx->csbcpb->cpb.aes_ccm.in_pat_or_b0;
b1 = nx_ctx->priv.ccm.iauth_tag; b1 = nx_ctx->priv.ccm.iauth_tag;
iauth_len = req->assoclen; iauth_len = assoclen;
} else if (req->assoclen <= 65280) { } else if (assoclen <= 65280) {
/* if associated data is less than (2^16 - 2^8), we construct /* if associated data is less than (2^16 - 2^8), we construct
* B1 differently and feed in the associated data to a CCA * B1 differently and feed in the associated data to a CCA
* operation */ * operation */
...@@ -223,7 +220,7 @@ static int generate_pat(u8 *iv, ...@@ -223,7 +220,7 @@ static int generate_pat(u8 *iv,
} }
/* generate B0 */ /* generate B0 */
rc = generate_b0(iv, req->assoclen, authsize, nbytes, b0); rc = generate_b0(iv, assoclen, authsize, nbytes, b0);
if (rc) if (rc)
return rc; return rc;
...@@ -233,22 +230,22 @@ static int generate_pat(u8 *iv, ...@@ -233,22 +230,22 @@ static int generate_pat(u8 *iv,
*/ */
if (b1) { if (b1) {
memset(b1, 0, 16); memset(b1, 0, 16);
if (req->assoclen <= 65280) { if (assoclen <= 65280) {
*(u16 *)b1 = (u16)req->assoclen; *(u16 *)b1 = assoclen;
scatterwalk_map_and_copy(b1 + 2, req->assoc, 0, scatterwalk_map_and_copy(b1 + 2, req->src, 0,
iauth_len, SCATTERWALK_FROM_SG); iauth_len, SCATTERWALK_FROM_SG);
} else { } else {
*(u16 *)b1 = (u16)(0xfffe); *(u16 *)b1 = (u16)(0xfffe);
*(u32 *)&b1[2] = (u32)req->assoclen; *(u32 *)&b1[2] = assoclen;
scatterwalk_map_and_copy(b1 + 6, req->assoc, 0, scatterwalk_map_and_copy(b1 + 6, req->src, 0,
iauth_len, SCATTERWALK_FROM_SG); iauth_len, SCATTERWALK_FROM_SG);
} }
} }
/* now copy any remaining AAD to scatterlist and call nx... */ /* now copy any remaining AAD to scatterlist and call nx... */
if (!req->assoclen) { if (!assoclen) {
return rc; return rc;
} else if (req->assoclen <= 14) { } else if (assoclen <= 14) {
unsigned int len = 16; unsigned int len = 16;
nx_insg = nx_build_sg_list(nx_insg, b1, &len, nx_ctx->ap->sglen); nx_insg = nx_build_sg_list(nx_insg, b1, &len, nx_ctx->ap->sglen);
...@@ -280,7 +277,7 @@ static int generate_pat(u8 *iv, ...@@ -280,7 +277,7 @@ static int generate_pat(u8 *iv,
return rc; return rc;
atomic_inc(&(nx_ctx->stats->aes_ops)); atomic_inc(&(nx_ctx->stats->aes_ops));
atomic64_add(req->assoclen, &(nx_ctx->stats->aes_bytes)); atomic64_add(assoclen, &nx_ctx->stats->aes_bytes);
} else { } else {
unsigned int processed = 0, to_process; unsigned int processed = 0, to_process;
...@@ -294,15 +291,15 @@ static int generate_pat(u8 *iv, ...@@ -294,15 +291,15 @@ static int generate_pat(u8 *iv,
nx_ctx->ap->databytelen/NX_PAGE_SIZE); nx_ctx->ap->databytelen/NX_PAGE_SIZE);
do { do {
to_process = min_t(u32, req->assoclen - processed, to_process = min_t(u32, assoclen - processed,
nx_ctx->ap->databytelen); nx_ctx->ap->databytelen);
nx_insg = nx_walk_and_build(nx_ctx->in_sg, nx_insg = nx_walk_and_build(nx_ctx->in_sg,
nx_ctx->ap->sglen, nx_ctx->ap->sglen,
req->assoc, processed, req->src, processed,
&to_process); &to_process);
if ((to_process + processed) < req->assoclen) { if ((to_process + processed) < assoclen) {
NX_CPB_FDM(nx_ctx->csbcpb_aead) |= NX_CPB_FDM(nx_ctx->csbcpb_aead) |=
NX_FDM_INTERMEDIATE; NX_FDM_INTERMEDIATE;
} else { } else {
...@@ -328,11 +325,10 @@ static int generate_pat(u8 *iv, ...@@ -328,11 +325,10 @@ static int generate_pat(u8 *iv,
NX_CPB_FDM(nx_ctx->csbcpb_aead) |= NX_FDM_CONTINUATION; NX_CPB_FDM(nx_ctx->csbcpb_aead) |= NX_FDM_CONTINUATION;
atomic_inc(&(nx_ctx->stats->aes_ops)); atomic_inc(&(nx_ctx->stats->aes_ops));
atomic64_add(req->assoclen, atomic64_add(assoclen, &nx_ctx->stats->aes_bytes);
&(nx_ctx->stats->aes_bytes));
processed += to_process; processed += to_process;
} while (processed < req->assoclen); } while (processed < assoclen);
result = nx_ctx->csbcpb_aead->cpb.aes_cca.out_pat_or_b0; result = nx_ctx->csbcpb_aead->cpb.aes_cca.out_pat_or_b0;
} }
...@@ -343,7 +339,8 @@ static int generate_pat(u8 *iv, ...@@ -343,7 +339,8 @@ static int generate_pat(u8 *iv,
} }
static int ccm_nx_decrypt(struct aead_request *req, static int ccm_nx_decrypt(struct aead_request *req,
struct blkcipher_desc *desc) struct blkcipher_desc *desc,
unsigned int assoclen)
{ {
struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm);
struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
...@@ -360,10 +357,10 @@ static int ccm_nx_decrypt(struct aead_request *req, ...@@ -360,10 +357,10 @@ static int ccm_nx_decrypt(struct aead_request *req,
/* copy out the auth tag to compare with later */ /* copy out the auth tag to compare with later */
scatterwalk_map_and_copy(priv->oauth_tag, scatterwalk_map_and_copy(priv->oauth_tag,
req->src, nbytes, authsize, req->src, nbytes + req->assoclen, authsize,
SCATTERWALK_FROM_SG); SCATTERWALK_FROM_SG);
rc = generate_pat(desc->info, req, nx_ctx, authsize, nbytes, rc = generate_pat(desc->info, req, nx_ctx, authsize, nbytes, assoclen,
csbcpb->cpb.aes_ccm.in_pat_or_b0); csbcpb->cpb.aes_ccm.in_pat_or_b0);
if (rc) if (rc)
goto out; goto out;
...@@ -383,8 +380,8 @@ static int ccm_nx_decrypt(struct aead_request *req, ...@@ -383,8 +380,8 @@ static int ccm_nx_decrypt(struct aead_request *req,
NX_CPB_FDM(nx_ctx->csbcpb) &= ~NX_FDM_ENDE_ENCRYPT; NX_CPB_FDM(nx_ctx->csbcpb) &= ~NX_FDM_ENDE_ENCRYPT;
rc = nx_build_sg_lists(nx_ctx, desc, req->dst, req->src, rc = nx_build_sg_lists(nx_ctx, desc, req->dst, req->src,
&to_process, processed, &to_process, processed + req->assoclen,
csbcpb->cpb.aes_ccm.iv_or_ctr); csbcpb->cpb.aes_ccm.iv_or_ctr);
if (rc) if (rc)
goto out; goto out;
...@@ -420,7 +417,8 @@ static int ccm_nx_decrypt(struct aead_request *req, ...@@ -420,7 +417,8 @@ static int ccm_nx_decrypt(struct aead_request *req,
} }
static int ccm_nx_encrypt(struct aead_request *req, static int ccm_nx_encrypt(struct aead_request *req,
struct blkcipher_desc *desc) struct blkcipher_desc *desc,
unsigned int assoclen)
{ {
struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm);
struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
...@@ -432,7 +430,7 @@ static int ccm_nx_encrypt(struct aead_request *req, ...@@ -432,7 +430,7 @@ static int ccm_nx_encrypt(struct aead_request *req,
spin_lock_irqsave(&nx_ctx->lock, irq_flags); spin_lock_irqsave(&nx_ctx->lock, irq_flags);
rc = generate_pat(desc->info, req, nx_ctx, authsize, nbytes, rc = generate_pat(desc->info, req, nx_ctx, authsize, nbytes, assoclen,
csbcpb->cpb.aes_ccm.in_pat_or_b0); csbcpb->cpb.aes_ccm.in_pat_or_b0);
if (rc) if (rc)
goto out; goto out;
...@@ -451,7 +449,7 @@ static int ccm_nx_encrypt(struct aead_request *req, ...@@ -451,7 +449,7 @@ static int ccm_nx_encrypt(struct aead_request *req,
NX_CPB_FDM(csbcpb) |= NX_FDM_ENDE_ENCRYPT; NX_CPB_FDM(csbcpb) |= NX_FDM_ENDE_ENCRYPT;
rc = nx_build_sg_lists(nx_ctx, desc, req->dst, req->src, rc = nx_build_sg_lists(nx_ctx, desc, req->dst, req->src,
&to_process, processed, &to_process, processed + req->assoclen,
csbcpb->cpb.aes_ccm.iv_or_ctr); csbcpb->cpb.aes_ccm.iv_or_ctr);
if (rc) if (rc)
goto out; goto out;
...@@ -483,7 +481,7 @@ static int ccm_nx_encrypt(struct aead_request *req, ...@@ -483,7 +481,7 @@ static int ccm_nx_encrypt(struct aead_request *req,
/* copy out the auth tag */ /* copy out the auth tag */
scatterwalk_map_and_copy(csbcpb->cpb.aes_ccm.out_pat_or_mac, scatterwalk_map_and_copy(csbcpb->cpb.aes_ccm.out_pat_or_mac,
req->dst, nbytes, authsize, req->dst, nbytes + req->assoclen, authsize,
SCATTERWALK_TO_SG); SCATTERWALK_TO_SG);
out: out:
...@@ -503,9 +501,8 @@ static int ccm4309_aes_nx_encrypt(struct aead_request *req) ...@@ -503,9 +501,8 @@ static int ccm4309_aes_nx_encrypt(struct aead_request *req)
memcpy(iv + 4, req->iv, 8); memcpy(iv + 4, req->iv, 8);
desc.info = iv; desc.info = iv;
desc.tfm = (struct crypto_blkcipher *)req->base.tfm;
return ccm_nx_encrypt(req, &desc); return ccm_nx_encrypt(req, &desc, req->assoclen - 8);
} }
static int ccm_aes_nx_encrypt(struct aead_request *req) static int ccm_aes_nx_encrypt(struct aead_request *req)
...@@ -514,13 +511,12 @@ static int ccm_aes_nx_encrypt(struct aead_request *req) ...@@ -514,13 +511,12 @@ static int ccm_aes_nx_encrypt(struct aead_request *req)
int rc; int rc;
desc.info = req->iv; desc.info = req->iv;
desc.tfm = (struct crypto_blkcipher *)req->base.tfm;
rc = crypto_ccm_check_iv(desc.info); rc = crypto_ccm_check_iv(desc.info);
if (rc) if (rc)
return rc; return rc;
return ccm_nx_encrypt(req, &desc); return ccm_nx_encrypt(req, &desc, req->assoclen);
} }
static int ccm4309_aes_nx_decrypt(struct aead_request *req) static int ccm4309_aes_nx_decrypt(struct aead_request *req)
...@@ -535,9 +531,8 @@ static int ccm4309_aes_nx_decrypt(struct aead_request *req) ...@@ -535,9 +531,8 @@ static int ccm4309_aes_nx_decrypt(struct aead_request *req)
memcpy(iv + 4, req->iv, 8); memcpy(iv + 4, req->iv, 8);
desc.info = iv; desc.info = iv;
desc.tfm = (struct crypto_blkcipher *)req->base.tfm;
return ccm_nx_decrypt(req, &desc); return ccm_nx_decrypt(req, &desc, req->assoclen - 8);
} }
static int ccm_aes_nx_decrypt(struct aead_request *req) static int ccm_aes_nx_decrypt(struct aead_request *req)
...@@ -546,13 +541,12 @@ static int ccm_aes_nx_decrypt(struct aead_request *req) ...@@ -546,13 +541,12 @@ static int ccm_aes_nx_decrypt(struct aead_request *req)
int rc; int rc;
desc.info = req->iv; desc.info = req->iv;
desc.tfm = (struct crypto_blkcipher *)req->base.tfm;
rc = crypto_ccm_check_iv(desc.info); rc = crypto_ccm_check_iv(desc.info);
if (rc) if (rc)
return rc; return rc;
return ccm_nx_decrypt(req, &desc); return ccm_nx_decrypt(req, &desc, req->assoclen);
} }
/* tell the block cipher walk routines that this is a stream cipher by /* tell the block cipher walk routines that this is a stream cipher by
...@@ -560,47 +554,44 @@ static int ccm_aes_nx_decrypt(struct aead_request *req) ...@@ -560,47 +554,44 @@ static int ccm_aes_nx_decrypt(struct aead_request *req)
* during encrypt/decrypt doesn't solve this problem, because it calls * during encrypt/decrypt doesn't solve this problem, because it calls
* blkcipher_walk_done under the covers, which doesn't use walk->blocksize, * blkcipher_walk_done under the covers, which doesn't use walk->blocksize,
* but instead uses this tfm->blocksize. */ * but instead uses this tfm->blocksize. */
struct crypto_alg nx_ccm_aes_alg = { struct aead_alg nx_ccm_aes_alg = {
.cra_name = "ccm(aes)", .base = {
.cra_driver_name = "ccm-aes-nx", .cra_name = "ccm(aes)",
.cra_priority = 300, .cra_driver_name = "ccm-aes-nx",
.cra_flags = CRYPTO_ALG_TYPE_AEAD | .cra_priority = 300,
CRYPTO_ALG_NEED_FALLBACK, .cra_flags = CRYPTO_ALG_NEED_FALLBACK |
.cra_blocksize = 1, CRYPTO_ALG_AEAD_NEW,
.cra_ctxsize = sizeof(struct nx_crypto_ctx), .cra_blocksize = 1,
.cra_type = &crypto_aead_type, .cra_ctxsize = sizeof(struct nx_crypto_ctx),
.cra_module = THIS_MODULE, .cra_module = THIS_MODULE,
.cra_init = nx_crypto_ctx_aes_ccm_init, },
.cra_exit = nx_crypto_ctx_exit, .init = nx_crypto_ctx_aes_ccm_init,
.cra_aead = { .exit = nx_crypto_ctx_aead_exit,
.ivsize = AES_BLOCK_SIZE, .ivsize = AES_BLOCK_SIZE,
.maxauthsize = AES_BLOCK_SIZE, .maxauthsize = AES_BLOCK_SIZE,
.setkey = ccm_aes_nx_set_key, .setkey = ccm_aes_nx_set_key,
.setauthsize = ccm_aes_nx_setauthsize, .setauthsize = ccm_aes_nx_setauthsize,
.encrypt = ccm_aes_nx_encrypt, .encrypt = ccm_aes_nx_encrypt,
.decrypt = ccm_aes_nx_decrypt, .decrypt = ccm_aes_nx_decrypt,
}
}; };
struct crypto_alg nx_ccm4309_aes_alg = { struct aead_alg nx_ccm4309_aes_alg = {
.cra_name = "rfc4309(ccm(aes))", .base = {
.cra_driver_name = "rfc4309-ccm-aes-nx", .cra_name = "rfc4309(ccm(aes))",
.cra_priority = 300, .cra_driver_name = "rfc4309-ccm-aes-nx",
.cra_flags = CRYPTO_ALG_TYPE_AEAD | .cra_priority = 300,
CRYPTO_ALG_NEED_FALLBACK, .cra_flags = CRYPTO_ALG_NEED_FALLBACK |
.cra_blocksize = 1, CRYPTO_ALG_AEAD_NEW,
.cra_ctxsize = sizeof(struct nx_crypto_ctx), .cra_blocksize = 1,
.cra_type = &crypto_nivaead_type, .cra_ctxsize = sizeof(struct nx_crypto_ctx),
.cra_module = THIS_MODULE, .cra_module = THIS_MODULE,
.cra_init = nx_crypto_ctx_aes_ccm_init, },
.cra_exit = nx_crypto_ctx_exit, .init = nx_crypto_ctx_aes_ccm_init,
.cra_aead = { .exit = nx_crypto_ctx_aead_exit,
.ivsize = 8, .ivsize = 8,
.maxauthsize = AES_BLOCK_SIZE, .maxauthsize = AES_BLOCK_SIZE,
.setkey = ccm4309_aes_nx_set_key, .setkey = ccm4309_aes_nx_set_key,
.setauthsize = ccm4309_aes_nx_setauthsize, .setauthsize = ccm4309_aes_nx_setauthsize,
.encrypt = ccm4309_aes_nx_encrypt, .encrypt = ccm4309_aes_nx_encrypt,
.decrypt = ccm4309_aes_nx_decrypt, .decrypt = ccm4309_aes_nx_decrypt,
.geniv = "seqiv",
}
}; };
...@@ -612,11 +612,11 @@ static int nx_register_algs(void) ...@@ -612,11 +612,11 @@ static int nx_register_algs(void)
if (rc) if (rc)
goto out_unreg_gcm; goto out_unreg_gcm;
rc = nx_register_alg(&nx_ccm_aes_alg, NX_FC_AES, NX_MODE_AES_CCM); rc = nx_register_aead(&nx_ccm_aes_alg, NX_FC_AES, NX_MODE_AES_CCM);
if (rc) if (rc)
goto out_unreg_gcm4106; goto out_unreg_gcm4106;
rc = nx_register_alg(&nx_ccm4309_aes_alg, NX_FC_AES, NX_MODE_AES_CCM); rc = nx_register_aead(&nx_ccm4309_aes_alg, NX_FC_AES, NX_MODE_AES_CCM);
if (rc) if (rc)
goto out_unreg_ccm; goto out_unreg_ccm;
...@@ -644,9 +644,9 @@ static int nx_register_algs(void) ...@@ -644,9 +644,9 @@ static int nx_register_algs(void)
nx_unregister_shash(&nx_shash_sha256_alg, NX_FC_SHA, NX_MODE_SHA, nx_unregister_shash(&nx_shash_sha256_alg, NX_FC_SHA, NX_MODE_SHA,
NX_PROPS_SHA256); NX_PROPS_SHA256);
out_unreg_ccm4309: out_unreg_ccm4309:
nx_unregister_alg(&nx_ccm4309_aes_alg, NX_FC_AES, NX_MODE_AES_CCM); nx_unregister_aead(&nx_ccm4309_aes_alg, NX_FC_AES, NX_MODE_AES_CCM);
out_unreg_ccm: out_unreg_ccm:
nx_unregister_alg(&nx_ccm_aes_alg, NX_FC_AES, NX_MODE_AES_CCM); nx_unregister_aead(&nx_ccm_aes_alg, NX_FC_AES, NX_MODE_AES_CCM);
out_unreg_gcm4106: out_unreg_gcm4106:
nx_unregister_aead(&nx_gcm4106_aes_alg, NX_FC_AES, NX_MODE_AES_GCM); nx_unregister_aead(&nx_gcm4106_aes_alg, NX_FC_AES, NX_MODE_AES_GCM);
out_unreg_gcm: out_unreg_gcm:
...@@ -711,11 +711,10 @@ static int nx_crypto_ctx_init(struct nx_crypto_ctx *nx_ctx, u32 fc, u32 mode) ...@@ -711,11 +711,10 @@ static int nx_crypto_ctx_init(struct nx_crypto_ctx *nx_ctx, u32 fc, u32 mode)
} }
/* entry points from the crypto tfm initializers */ /* entry points from the crypto tfm initializers */
int nx_crypto_ctx_aes_ccm_init(struct crypto_tfm *tfm) int nx_crypto_ctx_aes_ccm_init(struct crypto_aead *tfm)
{ {
crypto_aead_set_reqsize(__crypto_aead_cast(tfm), crypto_aead_set_reqsize(tfm, sizeof(struct nx_ccm_rctx));
sizeof(struct nx_ccm_rctx)); return nx_crypto_ctx_init(crypto_aead_ctx(tfm), NX_FC_AES,
return nx_crypto_ctx_init(crypto_tfm_ctx(tfm), NX_FC_AES,
NX_MODE_AES_CCM); NX_MODE_AES_CCM);
} }
...@@ -813,9 +812,9 @@ static int nx_remove(struct vio_dev *viodev) ...@@ -813,9 +812,9 @@ static int nx_remove(struct vio_dev *viodev)
NX_FC_SHA, NX_MODE_SHA, NX_PROPS_SHA256); NX_FC_SHA, NX_MODE_SHA, NX_PROPS_SHA256);
nx_unregister_shash(&nx_shash_sha256_alg, nx_unregister_shash(&nx_shash_sha256_alg,
NX_FC_SHA, NX_MODE_SHA, NX_PROPS_SHA512); NX_FC_SHA, NX_MODE_SHA, NX_PROPS_SHA512);
nx_unregister_alg(&nx_ccm4309_aes_alg, nx_unregister_aead(&nx_ccm4309_aes_alg,
NX_FC_AES, NX_MODE_AES_CCM); NX_FC_AES, NX_MODE_AES_CCM);
nx_unregister_alg(&nx_ccm_aes_alg, NX_FC_AES, NX_MODE_AES_CCM); nx_unregister_aead(&nx_ccm_aes_alg, NX_FC_AES, NX_MODE_AES_CCM);
nx_unregister_aead(&nx_gcm4106_aes_alg, nx_unregister_aead(&nx_gcm4106_aes_alg,
NX_FC_AES, NX_MODE_AES_GCM); NX_FC_AES, NX_MODE_AES_GCM);
nx_unregister_aead(&nx_gcm_aes_alg, nx_unregister_aead(&nx_gcm_aes_alg,
......
...@@ -150,7 +150,7 @@ struct nx_crypto_ctx { ...@@ -150,7 +150,7 @@ struct nx_crypto_ctx {
}; };
/* prototypes */ /* prototypes */
int nx_crypto_ctx_aes_ccm_init(struct crypto_tfm *tfm); int nx_crypto_ctx_aes_ccm_init(struct crypto_aead *tfm);
int nx_crypto_ctx_aes_gcm_init(struct crypto_aead *tfm); int nx_crypto_ctx_aes_gcm_init(struct crypto_aead *tfm);
int nx_crypto_ctx_aes_xcbc_init(struct crypto_tfm *tfm); int nx_crypto_ctx_aes_xcbc_init(struct crypto_tfm *tfm);
int nx_crypto_ctx_aes_ctr_init(struct crypto_tfm *tfm); int nx_crypto_ctx_aes_ctr_init(struct crypto_tfm *tfm);
...@@ -189,8 +189,8 @@ extern struct aead_alg nx_gcm_aes_alg; ...@@ -189,8 +189,8 @@ extern struct aead_alg nx_gcm_aes_alg;
extern struct aead_alg nx_gcm4106_aes_alg; extern struct aead_alg nx_gcm4106_aes_alg;
extern struct crypto_alg nx_ctr_aes_alg; extern struct crypto_alg nx_ctr_aes_alg;
extern struct crypto_alg nx_ctr3686_aes_alg; extern struct crypto_alg nx_ctr3686_aes_alg;
extern struct crypto_alg nx_ccm_aes_alg; extern struct aead_alg nx_ccm_aes_alg;
extern struct crypto_alg nx_ccm4309_aes_alg; extern struct aead_alg nx_ccm4309_aes_alg;
extern struct shash_alg nx_shash_aes_xcbc_alg; extern struct shash_alg nx_shash_aes_xcbc_alg;
extern struct shash_alg nx_shash_sha512_alg; extern struct shash_alg nx_shash_sha512_alg;
extern struct shash_alg nx_shash_sha256_alg; extern struct shash_alg nx_shash_sha256_alg;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment