Commit 626ddb2f authored by Eric Biggers's avatar Eric Biggers Committed by Herbert Xu

crypto: powerpc - convert to use crypto_simd_usable()

Replace all calls to in_interrupt() in the PowerPC crypto code with
!crypto_simd_usable().  This causes the crypto self-tests to test the
no-SIMD code paths when CONFIG_CRYPTO_MANAGER_EXTRA_TESTS=y.

The p8_ghash algorithm is currently failing and needs to be fixed, as it
produces the wrong digest when no-SIMD updates are mixed with SIMD ones.
Signed-off-by: default avatarEric Biggers <ebiggers@google.com>
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent 0edf8593
#include <linux/crc32.h> #include <linux/crc32.h>
#include <crypto/internal/hash.h> #include <crypto/internal/hash.h>
#include <crypto/internal/simd.h>
#include <linux/init.h> #include <linux/init.h>
#include <linux/module.h> #include <linux/module.h>
#include <linux/string.h> #include <linux/string.h>
#include <linux/kernel.h> #include <linux/kernel.h>
#include <linux/cpufeature.h> #include <linux/cpufeature.h>
#include <asm/simd.h>
#include <asm/switch_to.h> #include <asm/switch_to.h>
#define CHKSUM_BLOCK_SIZE 1 #define CHKSUM_BLOCK_SIZE 1
...@@ -22,7 +24,7 @@ static u32 crc32c_vpmsum(u32 crc, unsigned char const *p, size_t len) ...@@ -22,7 +24,7 @@ static u32 crc32c_vpmsum(u32 crc, unsigned char const *p, size_t len)
unsigned int prealign; unsigned int prealign;
unsigned int tail; unsigned int tail;
if (len < (VECTOR_BREAKPOINT + VMX_ALIGN) || in_interrupt()) if (len < (VECTOR_BREAKPOINT + VMX_ALIGN) || !crypto_simd_usable())
return __crc32c_le(crc, p, len); return __crc32c_le(crc, p, len);
if ((unsigned long)p & VMX_ALIGN_MASK) { if ((unsigned long)p & VMX_ALIGN_MASK) {
......
...@@ -12,11 +12,13 @@ ...@@ -12,11 +12,13 @@
#include <linux/crc-t10dif.h> #include <linux/crc-t10dif.h>
#include <crypto/internal/hash.h> #include <crypto/internal/hash.h>
#include <crypto/internal/simd.h>
#include <linux/init.h> #include <linux/init.h>
#include <linux/module.h> #include <linux/module.h>
#include <linux/string.h> #include <linux/string.h>
#include <linux/kernel.h> #include <linux/kernel.h>
#include <linux/cpufeature.h> #include <linux/cpufeature.h>
#include <asm/simd.h>
#include <asm/switch_to.h> #include <asm/switch_to.h>
#define VMX_ALIGN 16 #define VMX_ALIGN 16
...@@ -32,7 +34,7 @@ static u16 crct10dif_vpmsum(u16 crci, unsigned char const *p, size_t len) ...@@ -32,7 +34,7 @@ static u16 crct10dif_vpmsum(u16 crci, unsigned char const *p, size_t len)
unsigned int tail; unsigned int tail;
u32 crc = crci; u32 crc = crci;
if (len < (VECTOR_BREAKPOINT + VMX_ALIGN) || in_interrupt()) if (len < (VECTOR_BREAKPOINT + VMX_ALIGN) || !crypto_simd_usable())
return crc_t10dif_generic(crc, p, len); return crc_t10dif_generic(crc, p, len);
if ((unsigned long)p & VMX_ALIGN_MASK) { if ((unsigned long)p & VMX_ALIGN_MASK) {
......
...@@ -11,3 +11,4 @@ generic-y += preempt.h ...@@ -11,3 +11,4 @@ generic-y += preempt.h
generic-y += rwsem.h generic-y += rwsem.h
generic-y += vtime.h generic-y += vtime.h
generic-y += msi.h generic-y += msi.h
generic-y += simd.h
...@@ -23,9 +23,10 @@ ...@@ -23,9 +23,10 @@
#include <linux/err.h> #include <linux/err.h>
#include <linux/crypto.h> #include <linux/crypto.h>
#include <linux/delay.h> #include <linux/delay.h>
#include <linux/hardirq.h> #include <asm/simd.h>
#include <asm/switch_to.h> #include <asm/switch_to.h>
#include <crypto/aes.h> #include <crypto/aes.h>
#include <crypto/internal/simd.h>
#include "aesp8-ppc.h" #include "aesp8-ppc.h"
...@@ -92,7 +93,7 @@ static void p8_aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) ...@@ -92,7 +93,7 @@ static void p8_aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
{ {
struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm);
if (in_interrupt()) { if (!crypto_simd_usable()) {
crypto_cipher_encrypt_one(ctx->fallback, dst, src); crypto_cipher_encrypt_one(ctx->fallback, dst, src);
} else { } else {
preempt_disable(); preempt_disable();
...@@ -109,7 +110,7 @@ static void p8_aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) ...@@ -109,7 +110,7 @@ static void p8_aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
{ {
struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm);
if (in_interrupt()) { if (!crypto_simd_usable()) {
crypto_cipher_decrypt_one(ctx->fallback, dst, src); crypto_cipher_decrypt_one(ctx->fallback, dst, src);
} else { } else {
preempt_disable(); preempt_disable();
......
...@@ -23,9 +23,10 @@ ...@@ -23,9 +23,10 @@
#include <linux/err.h> #include <linux/err.h>
#include <linux/crypto.h> #include <linux/crypto.h>
#include <linux/delay.h> #include <linux/delay.h>
#include <linux/hardirq.h> #include <asm/simd.h>
#include <asm/switch_to.h> #include <asm/switch_to.h>
#include <crypto/aes.h> #include <crypto/aes.h>
#include <crypto/internal/simd.h>
#include <crypto/scatterwalk.h> #include <crypto/scatterwalk.h>
#include <crypto/skcipher.h> #include <crypto/skcipher.h>
...@@ -100,7 +101,7 @@ static int p8_aes_cbc_encrypt(struct blkcipher_desc *desc, ...@@ -100,7 +101,7 @@ static int p8_aes_cbc_encrypt(struct blkcipher_desc *desc,
struct p8_aes_cbc_ctx *ctx = struct p8_aes_cbc_ctx *ctx =
crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm)); crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
if (in_interrupt()) { if (!crypto_simd_usable()) {
SYNC_SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback); SYNC_SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback);
skcipher_request_set_sync_tfm(req, ctx->fallback); skcipher_request_set_sync_tfm(req, ctx->fallback);
skcipher_request_set_callback(req, desc->flags, NULL, NULL); skcipher_request_set_callback(req, desc->flags, NULL, NULL);
...@@ -139,7 +140,7 @@ static int p8_aes_cbc_decrypt(struct blkcipher_desc *desc, ...@@ -139,7 +140,7 @@ static int p8_aes_cbc_decrypt(struct blkcipher_desc *desc,
struct p8_aes_cbc_ctx *ctx = struct p8_aes_cbc_ctx *ctx =
crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm)); crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
if (in_interrupt()) { if (!crypto_simd_usable()) {
SYNC_SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback); SYNC_SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback);
skcipher_request_set_sync_tfm(req, ctx->fallback); skcipher_request_set_sync_tfm(req, ctx->fallback);
skcipher_request_set_callback(req, desc->flags, NULL, NULL); skcipher_request_set_callback(req, desc->flags, NULL, NULL);
......
...@@ -23,9 +23,10 @@ ...@@ -23,9 +23,10 @@
#include <linux/err.h> #include <linux/err.h>
#include <linux/crypto.h> #include <linux/crypto.h>
#include <linux/delay.h> #include <linux/delay.h>
#include <linux/hardirq.h> #include <asm/simd.h>
#include <asm/switch_to.h> #include <asm/switch_to.h>
#include <crypto/aes.h> #include <crypto/aes.h>
#include <crypto/internal/simd.h>
#include <crypto/scatterwalk.h> #include <crypto/scatterwalk.h>
#include <crypto/skcipher.h> #include <crypto/skcipher.h>
...@@ -119,7 +120,7 @@ static int p8_aes_ctr_crypt(struct blkcipher_desc *desc, ...@@ -119,7 +120,7 @@ static int p8_aes_ctr_crypt(struct blkcipher_desc *desc,
struct p8_aes_ctr_ctx *ctx = struct p8_aes_ctr_ctx *ctx =
crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm)); crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
if (in_interrupt()) { if (!crypto_simd_usable()) {
SYNC_SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback); SYNC_SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback);
skcipher_request_set_sync_tfm(req, ctx->fallback); skcipher_request_set_sync_tfm(req, ctx->fallback);
skcipher_request_set_callback(req, desc->flags, NULL, NULL); skcipher_request_set_callback(req, desc->flags, NULL, NULL);
......
...@@ -23,9 +23,10 @@ ...@@ -23,9 +23,10 @@
#include <linux/err.h> #include <linux/err.h>
#include <linux/crypto.h> #include <linux/crypto.h>
#include <linux/delay.h> #include <linux/delay.h>
#include <linux/hardirq.h> #include <asm/simd.h>
#include <asm/switch_to.h> #include <asm/switch_to.h>
#include <crypto/aes.h> #include <crypto/aes.h>
#include <crypto/internal/simd.h>
#include <crypto/scatterwalk.h> #include <crypto/scatterwalk.h>
#include <crypto/xts.h> #include <crypto/xts.h>
#include <crypto/skcipher.h> #include <crypto/skcipher.h>
...@@ -109,7 +110,7 @@ static int p8_aes_xts_crypt(struct blkcipher_desc *desc, ...@@ -109,7 +110,7 @@ static int p8_aes_xts_crypt(struct blkcipher_desc *desc,
struct p8_aes_xts_ctx *ctx = struct p8_aes_xts_ctx *ctx =
crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm)); crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
if (in_interrupt()) { if (!crypto_simd_usable()) {
SYNC_SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback); SYNC_SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback);
skcipher_request_set_sync_tfm(req, ctx->fallback); skcipher_request_set_sync_tfm(req, ctx->fallback);
skcipher_request_set_callback(req, desc->flags, NULL, NULL); skcipher_request_set_callback(req, desc->flags, NULL, NULL);
......
...@@ -23,16 +23,15 @@ ...@@ -23,16 +23,15 @@
#include <linux/err.h> #include <linux/err.h>
#include <linux/crypto.h> #include <linux/crypto.h>
#include <linux/delay.h> #include <linux/delay.h>
#include <linux/hardirq.h> #include <asm/simd.h>
#include <asm/switch_to.h> #include <asm/switch_to.h>
#include <crypto/aes.h> #include <crypto/aes.h>
#include <crypto/ghash.h> #include <crypto/ghash.h>
#include <crypto/scatterwalk.h> #include <crypto/scatterwalk.h>
#include <crypto/internal/hash.h> #include <crypto/internal/hash.h>
#include <crypto/internal/simd.h>
#include <crypto/b128ops.h> #include <crypto/b128ops.h>
#define IN_INTERRUPT in_interrupt()
void gcm_init_p8(u128 htable[16], const u64 Xi[2]); void gcm_init_p8(u128 htable[16], const u64 Xi[2]);
void gcm_gmult_p8(u64 Xi[2], const u128 htable[16]); void gcm_gmult_p8(u64 Xi[2], const u128 htable[16]);
void gcm_ghash_p8(u64 Xi[2], const u128 htable[16], void gcm_ghash_p8(u64 Xi[2], const u128 htable[16],
...@@ -131,7 +130,7 @@ static int p8_ghash_update(struct shash_desc *desc, ...@@ -131,7 +130,7 @@ static int p8_ghash_update(struct shash_desc *desc,
struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm)); struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm));
struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc); struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
if (IN_INTERRUPT) { if (!crypto_simd_usable()) {
return crypto_shash_update(&dctx->fallback_desc, src, return crypto_shash_update(&dctx->fallback_desc, src,
srclen); srclen);
} else { } else {
...@@ -182,7 +181,7 @@ static int p8_ghash_final(struct shash_desc *desc, u8 *out) ...@@ -182,7 +181,7 @@ static int p8_ghash_final(struct shash_desc *desc, u8 *out)
struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm)); struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm));
struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc); struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
if (IN_INTERRUPT) { if (!crypto_simd_usable()) {
return crypto_shash_final(&dctx->fallback_desc, out); return crypto_shash_final(&dctx->fallback_desc, out);
} else { } else {
if (dctx->bytes) { if (dctx->bytes) {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment