Commit 37b6aab6 authored by Ard Biesheuvel's avatar Ard Biesheuvel Committed by Herbert Xu

crypto: arm64/ghash - drop PMULL based shash

There are two ways to implement SIMD accelerated GCM on arm64:
- using the PMULL instructions for carryless 64x64->128 multiplication,
  in which case the architecture guarantees that the AES instructions are
  available as well, and so we can use the AEAD implementation that combines
  both,
- using the PMULL instructions for carryless 8x8->16 bit multiplication,
  which is implemented as a shash, and can be combined with any ctr(aes)
  implementation by the generic GCM AEAD template driver.

So let's drop the 64x64->128 shash driver, which is never needed for GCM,
and not suitable for use anywhere else.
Signed-off-by: default avatarArd Biesheuvel <ardb@kernel.org>
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent 813ec3f1
...@@ -113,12 +113,8 @@ static void ghash_do_update(int blocks, u64 dg[], const char *src, ...@@ -113,12 +113,8 @@ static void ghash_do_update(int blocks, u64 dg[], const char *src,
/* avoid hogging the CPU for too long */ /* avoid hogging the CPU for too long */
#define MAX_BLOCKS (SZ_64K / GHASH_BLOCK_SIZE) #define MAX_BLOCKS (SZ_64K / GHASH_BLOCK_SIZE)
static int __ghash_update(struct shash_desc *desc, const u8 *src, static int ghash_update(struct shash_desc *desc, const u8 *src,
unsigned int len, unsigned int len)
void (*simd_update)(int blocks, u64 dg[],
const char *src,
struct ghash_key const *k,
const char *head))
{ {
struct ghash_desc_ctx *ctx = shash_desc_ctx(desc); struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
unsigned int partial = ctx->count % GHASH_BLOCK_SIZE; unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
...@@ -145,7 +141,7 @@ static int __ghash_update(struct shash_desc *desc, const u8 *src, ...@@ -145,7 +141,7 @@ static int __ghash_update(struct shash_desc *desc, const u8 *src,
ghash_do_update(chunk, ctx->digest, src, key, ghash_do_update(chunk, ctx->digest, src, key,
partial ? ctx->buf : NULL, partial ? ctx->buf : NULL,
simd_update); pmull_ghash_update_p8);
blocks -= chunk; blocks -= chunk;
src += chunk * GHASH_BLOCK_SIZE; src += chunk * GHASH_BLOCK_SIZE;
...@@ -157,19 +153,7 @@ static int __ghash_update(struct shash_desc *desc, const u8 *src, ...@@ -157,19 +153,7 @@ static int __ghash_update(struct shash_desc *desc, const u8 *src,
return 0; return 0;
} }
static int ghash_update_p8(struct shash_desc *desc, const u8 *src, static int ghash_final(struct shash_desc *desc, u8 *dst)
unsigned int len)
{
return __ghash_update(desc, src, len, pmull_ghash_update_p8);
}
static int ghash_update_p64(struct shash_desc *desc, const u8 *src,
unsigned int len)
{
return __ghash_update(desc, src, len, pmull_ghash_update_p64);
}
static int ghash_final_p8(struct shash_desc *desc, u8 *dst)
{ {
struct ghash_desc_ctx *ctx = shash_desc_ctx(desc); struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
unsigned int partial = ctx->count % GHASH_BLOCK_SIZE; unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
...@@ -189,26 +173,6 @@ static int ghash_final_p8(struct shash_desc *desc, u8 *dst) ...@@ -189,26 +173,6 @@ static int ghash_final_p8(struct shash_desc *desc, u8 *dst)
return 0; return 0;
} }
static int ghash_final_p64(struct shash_desc *desc, u8 *dst)
{
struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
if (partial) {
struct ghash_key *key = crypto_shash_ctx(desc->tfm);
memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial);
ghash_do_update(1, ctx->digest, ctx->buf, key, NULL,
pmull_ghash_update_p64);
}
put_unaligned_be64(ctx->digest[1], dst);
put_unaligned_be64(ctx->digest[0], dst + 8);
*ctx = (struct ghash_desc_ctx){};
return 0;
}
static void ghash_reflect(u64 h[], const be128 *k) static void ghash_reflect(u64 h[], const be128 *k)
{ {
u64 carry = be64_to_cpu(k->a) & BIT(63) ? 1 : 0; u64 carry = be64_to_cpu(k->a) & BIT(63) ? 1 : 0;
...@@ -254,7 +218,7 @@ static int ghash_setkey(struct crypto_shash *tfm, ...@@ -254,7 +218,7 @@ static int ghash_setkey(struct crypto_shash *tfm,
return __ghash_setkey(key, inkey, keylen); return __ghash_setkey(key, inkey, keylen);
} }
static struct shash_alg ghash_alg[] = {{ static struct shash_alg ghash_alg = {
.base.cra_name = "ghash", .base.cra_name = "ghash",
.base.cra_driver_name = "ghash-neon", .base.cra_driver_name = "ghash-neon",
.base.cra_priority = 150, .base.cra_priority = 150,
...@@ -264,25 +228,11 @@ static struct shash_alg ghash_alg[] = {{ ...@@ -264,25 +228,11 @@ static struct shash_alg ghash_alg[] = {{
.digestsize = GHASH_DIGEST_SIZE, .digestsize = GHASH_DIGEST_SIZE,
.init = ghash_init, .init = ghash_init,
.update = ghash_update_p8, .update = ghash_update,
.final = ghash_final_p8, .final = ghash_final,
.setkey = ghash_setkey,
.descsize = sizeof(struct ghash_desc_ctx),
}, {
.base.cra_name = "ghash",
.base.cra_driver_name = "ghash-ce",
.base.cra_priority = 200,
.base.cra_blocksize = GHASH_BLOCK_SIZE,
.base.cra_ctxsize = sizeof(struct ghash_key),
.base.cra_module = THIS_MODULE,
.digestsize = GHASH_DIGEST_SIZE,
.init = ghash_init,
.update = ghash_update_p64,
.final = ghash_final_p64,
.setkey = ghash_setkey, .setkey = ghash_setkey,
.descsize = sizeof(struct ghash_desc_ctx), .descsize = sizeof(struct ghash_desc_ctx),
}}; };
static int num_rounds(struct crypto_aes_ctx *ctx) static int num_rounds(struct crypto_aes_ctx *ctx)
{ {
...@@ -641,37 +591,21 @@ static struct aead_alg gcm_aes_alg = { ...@@ -641,37 +591,21 @@ static struct aead_alg gcm_aes_alg = {
static int __init ghash_ce_mod_init(void) static int __init ghash_ce_mod_init(void)
{ {
int ret;
if (!cpu_have_named_feature(ASIMD)) if (!cpu_have_named_feature(ASIMD))
return -ENODEV; return -ENODEV;
if (cpu_have_named_feature(PMULL)) if (cpu_have_named_feature(PMULL))
ret = crypto_register_shashes(ghash_alg, return crypto_register_aead(&gcm_aes_alg);
ARRAY_SIZE(ghash_alg));
else
/* only register the first array element */
ret = crypto_register_shash(ghash_alg);
if (ret) return crypto_register_shash(&ghash_alg);
return ret;
if (cpu_have_named_feature(PMULL)) {
ret = crypto_register_aead(&gcm_aes_alg);
if (ret)
crypto_unregister_shashes(ghash_alg,
ARRAY_SIZE(ghash_alg));
}
return ret;
} }
static void __exit ghash_ce_mod_exit(void) static void __exit ghash_ce_mod_exit(void)
{ {
if (cpu_have_named_feature(PMULL)) if (cpu_have_named_feature(PMULL))
crypto_unregister_shashes(ghash_alg, ARRAY_SIZE(ghash_alg));
else
crypto_unregister_shash(ghash_alg);
crypto_unregister_aead(&gcm_aes_alg); crypto_unregister_aead(&gcm_aes_alg);
else
crypto_unregister_shash(&ghash_alg);
} }
static const struct cpu_feature ghash_cpu_feature[] = { static const struct cpu_feature ghash_cpu_feature[] = {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment