Commit b8716614 authored by Linus Torvalds's avatar Linus Torvalds

Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6

Pull crypto update from Herbert Xu:
 "* sha512 bug fixes (already in your tree).
  * SHA224/SHA384 AEAD support in caam.
  * X86-64 optimised version of Camellia.
  * Tegra AES support.
  * Bulk algorithm registration interface to make driver registration easier.
  * padata race fixes.
  * Misc fixes."

* git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (31 commits)
  padata: Fix race on sequence number wrap
  padata: Fix race in the serialization path
  crypto: camellia - add assembler implementation for x86_64
  crypto: camellia - rename camellia.c to camellia_generic.c
  crypto: camellia - fix checkpatch warnings
  crypto: camellia - rename camellia module to camellia_generic
  crypto: tcrypt - add more camellia tests
  crypto: testmgr - add more camellia test vectors
  crypto: camellia - simplify key setup and CAMELLIA_ROUNDSM macro
  crypto: twofish-x86_64/i586 - set alignmask to zero
  crypto: blowfish-x86_64 - set alignmask to zero
  crypto: serpent-sse2 - combine ablk_*_init functions
  crypto: blowfish-x86_64 - use crypto_[un]register_algs
  crypto: twofish-x86_64-3way - use crypto_[un]register_algs
  crypto: serpent-sse2 - use crypto_[un]register_algs
  crypto: serpent-sse2 - remove dead code from serpent_sse2_glue.c::serpent_sse2_init()
  crypto: twofish-x86 - Remove dead code from twofish_glue_3way.c::init()
  crypto: In crypto_add_alg(), 'exact' wants to be initialized to 0
  crypto: caam - fix gcc 4.6 warning
  crypto: Add bulk algorithm registration interface
  ...
parents 31f67652 2dc9b5db
...@@ -19,6 +19,7 @@ ...@@ -19,6 +19,7 @@
#include <linux/kernel.h> #include <linux/kernel.h>
#include <linux/io.h> #include <linux/io.h>
#include <linux/module.h>
#include <mach/iomap.h> #include <mach/iomap.h>
...@@ -58,6 +59,7 @@ unsigned long long tegra_chip_uid(void) ...@@ -58,6 +59,7 @@ unsigned long long tegra_chip_uid(void)
hi = fuse_readl(FUSE_UID_HIGH); hi = fuse_readl(FUSE_UID_HIGH);
return (hi << 32ull) | lo; return (hi << 32ull) | lo;
} }
EXPORT_SYMBOL(tegra_chip_uid);
int tegra_sku_id(void) int tegra_sku_id(void)
{ {
......
...@@ -8,6 +8,7 @@ obj-$(CONFIG_CRYPTO_SALSA20_586) += salsa20-i586.o ...@@ -8,6 +8,7 @@ obj-$(CONFIG_CRYPTO_SALSA20_586) += salsa20-i586.o
obj-$(CONFIG_CRYPTO_SERPENT_SSE2_586) += serpent-sse2-i586.o obj-$(CONFIG_CRYPTO_SERPENT_SSE2_586) += serpent-sse2-i586.o
obj-$(CONFIG_CRYPTO_AES_X86_64) += aes-x86_64.o obj-$(CONFIG_CRYPTO_AES_X86_64) += aes-x86_64.o
obj-$(CONFIG_CRYPTO_CAMELLIA_X86_64) += camellia-x86_64.o
obj-$(CONFIG_CRYPTO_BLOWFISH_X86_64) += blowfish-x86_64.o obj-$(CONFIG_CRYPTO_BLOWFISH_X86_64) += blowfish-x86_64.o
obj-$(CONFIG_CRYPTO_TWOFISH_X86_64) += twofish-x86_64.o obj-$(CONFIG_CRYPTO_TWOFISH_X86_64) += twofish-x86_64.o
obj-$(CONFIG_CRYPTO_TWOFISH_X86_64_3WAY) += twofish-x86_64-3way.o obj-$(CONFIG_CRYPTO_TWOFISH_X86_64_3WAY) += twofish-x86_64-3way.o
...@@ -25,6 +26,7 @@ salsa20-i586-y := salsa20-i586-asm_32.o salsa20_glue.o ...@@ -25,6 +26,7 @@ salsa20-i586-y := salsa20-i586-asm_32.o salsa20_glue.o
serpent-sse2-i586-y := serpent-sse2-i586-asm_32.o serpent_sse2_glue.o serpent-sse2-i586-y := serpent-sse2-i586-asm_32.o serpent_sse2_glue.o
aes-x86_64-y := aes-x86_64-asm_64.o aes_glue.o aes-x86_64-y := aes-x86_64-asm_64.o aes_glue.o
camellia-x86_64-y := camellia-x86_64-asm_64.o camellia_glue.o
blowfish-x86_64-y := blowfish-x86_64-asm_64.o blowfish_glue.o blowfish-x86_64-y := blowfish-x86_64-asm_64.o blowfish_glue.o
twofish-x86_64-y := twofish-x86_64-asm_64.o twofish_glue.o twofish-x86_64-y := twofish-x86_64-asm_64.o twofish_glue.o
twofish-x86_64-3way-y := twofish-x86_64-asm_64-3way.o twofish_glue_3way.o twofish-x86_64-3way-y := twofish-x86_64-asm_64-3way.o twofish_glue_3way.o
......
...@@ -25,6 +25,7 @@ ...@@ -25,6 +25,7 @@
* *
*/ */
#include <asm/processor.h>
#include <crypto/blowfish.h> #include <crypto/blowfish.h>
#include <linux/crypto.h> #include <linux/crypto.h>
#include <linux/init.h> #include <linux/init.h>
...@@ -76,27 +77,6 @@ static void blowfish_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) ...@@ -76,27 +77,6 @@ static void blowfish_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
blowfish_dec_blk(crypto_tfm_ctx(tfm), dst, src); blowfish_dec_blk(crypto_tfm_ctx(tfm), dst, src);
} }
static struct crypto_alg bf_alg = {
.cra_name = "blowfish",
.cra_driver_name = "blowfish-asm",
.cra_priority = 200,
.cra_flags = CRYPTO_ALG_TYPE_CIPHER,
.cra_blocksize = BF_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct bf_ctx),
.cra_alignmask = 3,
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(bf_alg.cra_list),
.cra_u = {
.cipher = {
.cia_min_keysize = BF_MIN_KEY_SIZE,
.cia_max_keysize = BF_MAX_KEY_SIZE,
.cia_setkey = blowfish_setkey,
.cia_encrypt = blowfish_encrypt,
.cia_decrypt = blowfish_decrypt,
}
}
};
static int ecb_crypt(struct blkcipher_desc *desc, struct blkcipher_walk *walk, static int ecb_crypt(struct blkcipher_desc *desc, struct blkcipher_walk *walk,
void (*fn)(struct bf_ctx *, u8 *, const u8 *), void (*fn)(struct bf_ctx *, u8 *, const u8 *),
void (*fn_4way)(struct bf_ctx *, u8 *, const u8 *)) void (*fn_4way)(struct bf_ctx *, u8 *, const u8 *))
...@@ -160,28 +140,6 @@ static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, ...@@ -160,28 +140,6 @@ static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
return ecb_crypt(desc, &walk, blowfish_dec_blk, blowfish_dec_blk_4way); return ecb_crypt(desc, &walk, blowfish_dec_blk, blowfish_dec_blk_4way);
} }
static struct crypto_alg blk_ecb_alg = {
.cra_name = "ecb(blowfish)",
.cra_driver_name = "ecb-blowfish-asm",
.cra_priority = 300,
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
.cra_blocksize = BF_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct bf_ctx),
.cra_alignmask = 0,
.cra_type = &crypto_blkcipher_type,
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(blk_ecb_alg.cra_list),
.cra_u = {
.blkcipher = {
.min_keysize = BF_MIN_KEY_SIZE,
.max_keysize = BF_MAX_KEY_SIZE,
.setkey = blowfish_setkey,
.encrypt = ecb_encrypt,
.decrypt = ecb_decrypt,
},
},
};
static unsigned int __cbc_encrypt(struct blkcipher_desc *desc, static unsigned int __cbc_encrypt(struct blkcipher_desc *desc,
struct blkcipher_walk *walk) struct blkcipher_walk *walk)
{ {
...@@ -307,29 +265,6 @@ static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, ...@@ -307,29 +265,6 @@ static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
return err; return err;
} }
static struct crypto_alg blk_cbc_alg = {
.cra_name = "cbc(blowfish)",
.cra_driver_name = "cbc-blowfish-asm",
.cra_priority = 300,
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
.cra_blocksize = BF_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct bf_ctx),
.cra_alignmask = 0,
.cra_type = &crypto_blkcipher_type,
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(blk_cbc_alg.cra_list),
.cra_u = {
.blkcipher = {
.min_keysize = BF_MIN_KEY_SIZE,
.max_keysize = BF_MAX_KEY_SIZE,
.ivsize = BF_BLOCK_SIZE,
.setkey = blowfish_setkey,
.encrypt = cbc_encrypt,
.decrypt = cbc_decrypt,
},
},
};
static void ctr_crypt_final(struct bf_ctx *ctx, struct blkcipher_walk *walk) static void ctr_crypt_final(struct bf_ctx *ctx, struct blkcipher_walk *walk)
{ {
u8 *ctrblk = walk->iv; u8 *ctrblk = walk->iv;
...@@ -423,7 +358,67 @@ static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst, ...@@ -423,7 +358,67 @@ static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
return err; return err;
} }
static struct crypto_alg blk_ctr_alg = { static struct crypto_alg bf_algs[4] = { {
.cra_name = "blowfish",
.cra_driver_name = "blowfish-asm",
.cra_priority = 200,
.cra_flags = CRYPTO_ALG_TYPE_CIPHER,
.cra_blocksize = BF_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct bf_ctx),
.cra_alignmask = 0,
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(bf_algs[0].cra_list),
.cra_u = {
.cipher = {
.cia_min_keysize = BF_MIN_KEY_SIZE,
.cia_max_keysize = BF_MAX_KEY_SIZE,
.cia_setkey = blowfish_setkey,
.cia_encrypt = blowfish_encrypt,
.cia_decrypt = blowfish_decrypt,
}
}
}, {
.cra_name = "ecb(blowfish)",
.cra_driver_name = "ecb-blowfish-asm",
.cra_priority = 300,
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
.cra_blocksize = BF_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct bf_ctx),
.cra_alignmask = 0,
.cra_type = &crypto_blkcipher_type,
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(bf_algs[1].cra_list),
.cra_u = {
.blkcipher = {
.min_keysize = BF_MIN_KEY_SIZE,
.max_keysize = BF_MAX_KEY_SIZE,
.setkey = blowfish_setkey,
.encrypt = ecb_encrypt,
.decrypt = ecb_decrypt,
},
},
}, {
.cra_name = "cbc(blowfish)",
.cra_driver_name = "cbc-blowfish-asm",
.cra_priority = 300,
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
.cra_blocksize = BF_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct bf_ctx),
.cra_alignmask = 0,
.cra_type = &crypto_blkcipher_type,
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(bf_algs[2].cra_list),
.cra_u = {
.blkcipher = {
.min_keysize = BF_MIN_KEY_SIZE,
.max_keysize = BF_MAX_KEY_SIZE,
.ivsize = BF_BLOCK_SIZE,
.setkey = blowfish_setkey,
.encrypt = cbc_encrypt,
.decrypt = cbc_decrypt,
},
},
}, {
.cra_name = "ctr(blowfish)", .cra_name = "ctr(blowfish)",
.cra_driver_name = "ctr-blowfish-asm", .cra_driver_name = "ctr-blowfish-asm",
.cra_priority = 300, .cra_priority = 300,
...@@ -433,7 +428,7 @@ static struct crypto_alg blk_ctr_alg = { ...@@ -433,7 +428,7 @@ static struct crypto_alg blk_ctr_alg = {
.cra_alignmask = 0, .cra_alignmask = 0,
.cra_type = &crypto_blkcipher_type, .cra_type = &crypto_blkcipher_type,
.cra_module = THIS_MODULE, .cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(blk_ctr_alg.cra_list), .cra_list = LIST_HEAD_INIT(bf_algs[3].cra_list),
.cra_u = { .cra_u = {
.blkcipher = { .blkcipher = {
.min_keysize = BF_MIN_KEY_SIZE, .min_keysize = BF_MIN_KEY_SIZE,
...@@ -444,43 +439,45 @@ static struct crypto_alg blk_ctr_alg = { ...@@ -444,43 +439,45 @@ static struct crypto_alg blk_ctr_alg = {
.decrypt = ctr_crypt, .decrypt = ctr_crypt,
}, },
}, },
}; } };
static bool is_blacklisted_cpu(void)
{
if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL)
return false;
if (boot_cpu_data.x86 == 0x0f) {
/*
* On Pentium 4, blowfish-x86_64 is slower than generic C
* implementation because use of 64bit rotates (which are really
* slow on P4). Therefore blacklist P4s.
*/
return true;
}
return false;
}
static int force;
module_param(force, int, 0);
MODULE_PARM_DESC(force, "Force module load, ignore CPU blacklist");
static int __init init(void) static int __init init(void)
{ {
int err; if (!force && is_blacklisted_cpu()) {
printk(KERN_INFO
"blowfish-x86_64: performance on this CPU "
"would be suboptimal: disabling "
"blowfish-x86_64.\n");
return -ENODEV;
}
err = crypto_register_alg(&bf_alg); return crypto_register_algs(bf_algs, ARRAY_SIZE(bf_algs));
if (err)
goto bf_err;
err = crypto_register_alg(&blk_ecb_alg);
if (err)
goto ecb_err;
err = crypto_register_alg(&blk_cbc_alg);
if (err)
goto cbc_err;
err = crypto_register_alg(&blk_ctr_alg);
if (err)
goto ctr_err;
return 0;
ctr_err:
crypto_unregister_alg(&blk_cbc_alg);
cbc_err:
crypto_unregister_alg(&blk_ecb_alg);
ecb_err:
crypto_unregister_alg(&bf_alg);
bf_err:
return err;
} }
static void __exit fini(void) static void __exit fini(void)
{ {
crypto_unregister_alg(&blk_ctr_alg); crypto_unregister_algs(bf_algs, ARRAY_SIZE(bf_algs));
crypto_unregister_alg(&blk_cbc_alg);
crypto_unregister_alg(&blk_ecb_alg);
crypto_unregister_alg(&bf_alg);
} }
module_init(init); module_init(init);
......
This diff is collapsed.
This diff is collapsed.
...@@ -463,23 +463,20 @@ ...@@ -463,23 +463,20 @@
pand x0, x4; \ pand x0, x4; \
pxor x2, x4; pxor x2, x4;
#define transpose_4x4(x0, x1, x2, x3, t1, t2, t3) \ #define transpose_4x4(x0, x1, x2, x3, t0, t1, t2) \
movdqa x2, t3; \
movdqa x0, t1; \
unpcklps x3, t3; \
movdqa x0, t2; \ movdqa x0, t2; \
unpcklps x1, t1; \ punpckldq x1, x0; \
unpckhps x1, t2; \ punpckhdq x1, t2; \
movdqa t3, x1; \ movdqa x2, t1; \
unpckhps x3, x2; \ punpckhdq x3, x2; \
movdqa t1, x0; \ punpckldq x3, t1; \
movhlps t1, x1; \ movdqa x0, x1; \
movdqa t2, t1; \ punpcklqdq t1, x0; \
movlhps t3, x0; \ punpckhqdq t1, x1; \
movlhps x2, t1; \ movdqa t2, x3; \
movhlps t2, x2; \ punpcklqdq x2, t2; \
movdqa x2, x3; \ punpckhqdq x2, x3; \
movdqa t1, x2; movdqa t2, x2;
#define read_blocks(in, x0, x1, x2, x3, t0, t1, t2) \ #define read_blocks(in, x0, x1, x2, x3, t0, t1, t2) \
movdqu (0*4*4)(in), x0; \ movdqu (0*4*4)(in), x0; \
......
...@@ -585,23 +585,20 @@ ...@@ -585,23 +585,20 @@
get_key(i, 1, RK1); \ get_key(i, 1, RK1); \
SBOX ## _2(x0 ## 2, x1 ## 2, x2 ## 2, x3 ## 2, x4 ## 2); \ SBOX ## _2(x0 ## 2, x1 ## 2, x2 ## 2, x3 ## 2, x4 ## 2); \
#define transpose_4x4(x0, x1, x2, x3, t1, t2, t3) \ #define transpose_4x4(x0, x1, x2, x3, t0, t1, t2) \
movdqa x2, t3; \
movdqa x0, t1; \
unpcklps x3, t3; \
movdqa x0, t2; \ movdqa x0, t2; \
unpcklps x1, t1; \ punpckldq x1, x0; \
unpckhps x1, t2; \ punpckhdq x1, t2; \
movdqa t3, x1; \ movdqa x2, t1; \
unpckhps x3, x2; \ punpckhdq x3, x2; \
movdqa t1, x0; \ punpckldq x3, t1; \
movhlps t1, x1; \ movdqa x0, x1; \
movdqa t2, t1; \ punpcklqdq t1, x0; \
movlhps t3, x0; \ punpckhqdq t1, x1; \
movlhps x2, t1; \ movdqa t2, x3; \
movhlps t2, x2; \ punpcklqdq x2, t2; \
movdqa x2, x3; \ punpckhqdq x2, x3; \
movdqa t1, x2; movdqa t2, x2;
#define read_blocks(in, x0, x1, x2, x3, t0, t1, t2) \ #define read_blocks(in, x0, x1, x2, x3, t0, t1, t2) \
movdqu (0*4*4)(in), x0; \ movdqu (0*4*4)(in), x0; \
......
This diff is collapsed.
...@@ -68,7 +68,7 @@ static struct crypto_alg alg = { ...@@ -68,7 +68,7 @@ static struct crypto_alg alg = {
.cra_flags = CRYPTO_ALG_TYPE_CIPHER, .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
.cra_blocksize = TF_BLOCK_SIZE, .cra_blocksize = TF_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct twofish_ctx), .cra_ctxsize = sizeof(struct twofish_ctx),
.cra_alignmask = 3, .cra_alignmask = 0,
.cra_module = THIS_MODULE, .cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(alg.cra_list), .cra_list = LIST_HEAD_INIT(alg.cra_list),
.cra_u = { .cra_u = {
......
...@@ -25,6 +25,7 @@ ...@@ -25,6 +25,7 @@
* *
*/ */
#include <asm/processor.h>
#include <linux/crypto.h> #include <linux/crypto.h>
#include <linux/init.h> #include <linux/init.h>
#include <linux/module.h> #include <linux/module.h>
...@@ -122,28 +123,6 @@ static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, ...@@ -122,28 +123,6 @@ static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
return ecb_crypt(desc, &walk, twofish_dec_blk, twofish_dec_blk_3way); return ecb_crypt(desc, &walk, twofish_dec_blk, twofish_dec_blk_3way);
} }
static struct crypto_alg blk_ecb_alg = {
.cra_name = "ecb(twofish)",
.cra_driver_name = "ecb-twofish-3way",
.cra_priority = 300,
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
.cra_blocksize = TF_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct twofish_ctx),
.cra_alignmask = 0,
.cra_type = &crypto_blkcipher_type,
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(blk_ecb_alg.cra_list),
.cra_u = {
.blkcipher = {
.min_keysize = TF_MIN_KEY_SIZE,
.max_keysize = TF_MAX_KEY_SIZE,
.setkey = twofish_setkey,
.encrypt = ecb_encrypt,
.decrypt = ecb_decrypt,
},
},
};
static unsigned int __cbc_encrypt(struct blkcipher_desc *desc, static unsigned int __cbc_encrypt(struct blkcipher_desc *desc,
struct blkcipher_walk *walk) struct blkcipher_walk *walk)
{ {
...@@ -267,29 +246,6 @@ static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, ...@@ -267,29 +246,6 @@ static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
return err; return err;
} }
static struct crypto_alg blk_cbc_alg = {
.cra_name = "cbc(twofish)",
.cra_driver_name = "cbc-twofish-3way",
.cra_priority = 300,
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
.cra_blocksize = TF_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct twofish_ctx),
.cra_alignmask = 0,
.cra_type = &crypto_blkcipher_type,
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(blk_cbc_alg.cra_list),
.cra_u = {
.blkcipher = {
.min_keysize = TF_MIN_KEY_SIZE,
.max_keysize = TF_MAX_KEY_SIZE,
.ivsize = TF_BLOCK_SIZE,
.setkey = twofish_setkey,
.encrypt = cbc_encrypt,
.decrypt = cbc_decrypt,
},
},
};
static inline void u128_to_be128(be128 *dst, const u128 *src) static inline void u128_to_be128(be128 *dst, const u128 *src)
{ {
dst->a = cpu_to_be64(src->a); dst->a = cpu_to_be64(src->a);
...@@ -411,29 +367,6 @@ static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst, ...@@ -411,29 +367,6 @@ static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
return err; return err;
} }
static struct crypto_alg blk_ctr_alg = {
.cra_name = "ctr(twofish)",
.cra_driver_name = "ctr-twofish-3way",
.cra_priority = 300,
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
.cra_blocksize = 1,
.cra_ctxsize = sizeof(struct twofish_ctx),
.cra_alignmask = 0,
.cra_type = &crypto_blkcipher_type,
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(blk_ctr_alg.cra_list),
.cra_u = {
.blkcipher = {
.min_keysize = TF_MIN_KEY_SIZE,
.max_keysize = TF_MAX_KEY_SIZE,
.ivsize = TF_BLOCK_SIZE,
.setkey = twofish_setkey,
.encrypt = ctr_crypt,
.decrypt = ctr_crypt,
},
},
};
static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes) static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
{ {
const unsigned int bsize = TF_BLOCK_SIZE; const unsigned int bsize = TF_BLOCK_SIZE;
...@@ -524,30 +457,6 @@ static void lrw_exit_tfm(struct crypto_tfm *tfm) ...@@ -524,30 +457,6 @@ static void lrw_exit_tfm(struct crypto_tfm *tfm)
lrw_free_table(&ctx->lrw_table); lrw_free_table(&ctx->lrw_table);
} }
static struct crypto_alg blk_lrw_alg = {
.cra_name = "lrw(twofish)",
.cra_driver_name = "lrw-twofish-3way",
.cra_priority = 300,
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
.cra_blocksize = TF_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct twofish_lrw_ctx),
.cra_alignmask = 0,
.cra_type = &crypto_blkcipher_type,
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(blk_lrw_alg.cra_list),
.cra_exit = lrw_exit_tfm,
.cra_u = {
.blkcipher = {
.min_keysize = TF_MIN_KEY_SIZE + TF_BLOCK_SIZE,
.max_keysize = TF_MAX_KEY_SIZE + TF_BLOCK_SIZE,
.ivsize = TF_BLOCK_SIZE,
.setkey = lrw_twofish_setkey,
.encrypt = lrw_encrypt,
.decrypt = lrw_decrypt,
},
},
};
struct twofish_xts_ctx { struct twofish_xts_ctx {
struct twofish_ctx tweak_ctx; struct twofish_ctx tweak_ctx;
struct twofish_ctx crypt_ctx; struct twofish_ctx crypt_ctx;
...@@ -614,7 +523,91 @@ static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, ...@@ -614,7 +523,91 @@ static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
return xts_crypt(desc, dst, src, nbytes, &req); return xts_crypt(desc, dst, src, nbytes, &req);
} }
static struct crypto_alg blk_xts_alg = { static struct crypto_alg tf_algs[5] = { {
.cra_name = "ecb(twofish)",
.cra_driver_name = "ecb-twofish-3way",
.cra_priority = 300,
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
.cra_blocksize = TF_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct twofish_ctx),
.cra_alignmask = 0,
.cra_type = &crypto_blkcipher_type,
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(tf_algs[0].cra_list),
.cra_u = {
.blkcipher = {
.min_keysize = TF_MIN_KEY_SIZE,
.max_keysize = TF_MAX_KEY_SIZE,
.setkey = twofish_setkey,
.encrypt = ecb_encrypt,
.decrypt = ecb_decrypt,
},
},
}, {
.cra_name = "cbc(twofish)",
.cra_driver_name = "cbc-twofish-3way",
.cra_priority = 300,
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
.cra_blocksize = TF_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct twofish_ctx),
.cra_alignmask = 0,
.cra_type = &crypto_blkcipher_type,
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(tf_algs[1].cra_list),
.cra_u = {
.blkcipher = {
.min_keysize = TF_MIN_KEY_SIZE,
.max_keysize = TF_MAX_KEY_SIZE,
.ivsize = TF_BLOCK_SIZE,
.setkey = twofish_setkey,
.encrypt = cbc_encrypt,
.decrypt = cbc_decrypt,
},
},
}, {
.cra_name = "ctr(twofish)",
.cra_driver_name = "ctr-twofish-3way",
.cra_priority = 300,
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
.cra_blocksize = 1,
.cra_ctxsize = sizeof(struct twofish_ctx),
.cra_alignmask = 0,
.cra_type = &crypto_blkcipher_type,
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(tf_algs[2].cra_list),
.cra_u = {
.blkcipher = {
.min_keysize = TF_MIN_KEY_SIZE,
.max_keysize = TF_MAX_KEY_SIZE,
.ivsize = TF_BLOCK_SIZE,
.setkey = twofish_setkey,
.encrypt = ctr_crypt,
.decrypt = ctr_crypt,
},
},
}, {
.cra_name = "lrw(twofish)",
.cra_driver_name = "lrw-twofish-3way",
.cra_priority = 300,
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
.cra_blocksize = TF_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct twofish_lrw_ctx),
.cra_alignmask = 0,
.cra_type = &crypto_blkcipher_type,
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(tf_algs[3].cra_list),
.cra_exit = lrw_exit_tfm,
.cra_u = {
.blkcipher = {
.min_keysize = TF_MIN_KEY_SIZE + TF_BLOCK_SIZE,
.max_keysize = TF_MAX_KEY_SIZE + TF_BLOCK_SIZE,
.ivsize = TF_BLOCK_SIZE,
.setkey = lrw_twofish_setkey,
.encrypt = lrw_encrypt,
.decrypt = lrw_decrypt,
},
},
}, {
.cra_name = "xts(twofish)", .cra_name = "xts(twofish)",
.cra_driver_name = "xts-twofish-3way", .cra_driver_name = "xts-twofish-3way",
.cra_priority = 300, .cra_priority = 300,
...@@ -624,7 +617,7 @@ static struct crypto_alg blk_xts_alg = { ...@@ -624,7 +617,7 @@ static struct crypto_alg blk_xts_alg = {
.cra_alignmask = 0, .cra_alignmask = 0,
.cra_type = &crypto_blkcipher_type, .cra_type = &crypto_blkcipher_type,
.cra_module = THIS_MODULE, .cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(blk_xts_alg.cra_list), .cra_list = LIST_HEAD_INIT(tf_algs[4].cra_list),
.cra_u = { .cra_u = {
.blkcipher = { .blkcipher = {
.min_keysize = TF_MIN_KEY_SIZE * 2, .min_keysize = TF_MIN_KEY_SIZE * 2,
...@@ -635,50 +628,62 @@ static struct crypto_alg blk_xts_alg = { ...@@ -635,50 +628,62 @@ static struct crypto_alg blk_xts_alg = {
.decrypt = xts_decrypt, .decrypt = xts_decrypt,
}, },
}, },
}; } };
static bool is_blacklisted_cpu(void)
{
if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL)
return false;
if (boot_cpu_data.x86 == 0x06 &&
(boot_cpu_data.x86_model == 0x1c ||
boot_cpu_data.x86_model == 0x26 ||
boot_cpu_data.x86_model == 0x36)) {
/*
* On Atom, twofish-3way is slower than original assembler
* implementation. Twofish-3way trades off some performance in
* storing blocks in 64bit registers to allow three blocks to
* be processed parallel. Parallel operation then allows gaining
* more performance than was trade off, on out-of-order CPUs.
* However Atom does not benefit from this parallellism and
* should be blacklisted.
*/
return true;
}
if (boot_cpu_data.x86 == 0x0f) {
/*
* On Pentium 4, twofish-3way is slower than original assembler
* implementation because excessive uses of 64bit rotate and
* left-shifts (which are really slow on P4) needed to store and
* handle 128bit block in two 64bit registers.
*/
return true;
}
return false;
}
static int force;
module_param(force, int, 0);
MODULE_PARM_DESC(force, "Force module load, ignore CPU blacklist");
int __init init(void) int __init init(void)
{ {
int err; if (!force && is_blacklisted_cpu()) {
printk(KERN_INFO
"twofish-x86_64-3way: performance on this CPU "
"would be suboptimal: disabling "
"twofish-x86_64-3way.\n");
return -ENODEV;
}
err = crypto_register_alg(&blk_ecb_alg); return crypto_register_algs(tf_algs, ARRAY_SIZE(tf_algs));
if (err)
goto ecb_err;
err = crypto_register_alg(&blk_cbc_alg);
if (err)
goto cbc_err;
err = crypto_register_alg(&blk_ctr_alg);
if (err)
goto ctr_err;
err = crypto_register_alg(&blk_lrw_alg);
if (err)
goto blk_lrw_err;
err = crypto_register_alg(&blk_xts_alg);
if (err)
goto blk_xts_err;
return 0;
crypto_unregister_alg(&blk_xts_alg);
blk_xts_err:
crypto_unregister_alg(&blk_lrw_alg);
blk_lrw_err:
crypto_unregister_alg(&blk_ctr_alg);
ctr_err:
crypto_unregister_alg(&blk_cbc_alg);
cbc_err:
crypto_unregister_alg(&blk_ecb_alg);
ecb_err:
return err;
} }
void __exit fini(void) void __exit fini(void)
{ {
crypto_unregister_alg(&blk_xts_alg); crypto_unregister_algs(tf_algs, ARRAY_SIZE(tf_algs));
crypto_unregister_alg(&blk_lrw_alg);
crypto_unregister_alg(&blk_ctr_alg);
crypto_unregister_alg(&blk_cbc_alg);
crypto_unregister_alg(&blk_ecb_alg);
} }
module_init(init); module_init(init);
......
...@@ -654,6 +654,24 @@ config CRYPTO_CAMELLIA ...@@ -654,6 +654,24 @@ config CRYPTO_CAMELLIA
See also: See also:
<https://info.isl.ntt.co.jp/crypt/eng/camellia/index_s.html> <https://info.isl.ntt.co.jp/crypt/eng/camellia/index_s.html>
config CRYPTO_CAMELLIA_X86_64
tristate "Camellia cipher algorithm (x86_64)"
depends on (X86 || UML_X86) && 64BIT
depends on CRYPTO
select CRYPTO_ALGAPI
select CRYPTO_LRW
select CRYPTO_XTS
help
Camellia cipher algorithm module (x86_64).
Camellia is a symmetric key block cipher developed jointly
at NTT and Mitsubishi Electric Corporation.
The Camellia specifies three key sizes: 128, 192 and 256 bits.
See also:
<https://info.isl.ntt.co.jp/crypt/eng/camellia/index_s.html>
config CRYPTO_CAST5 config CRYPTO_CAST5
tristate "CAST5 (CAST-128) cipher algorithm" tristate "CAST5 (CAST-128) cipher algorithm"
select CRYPTO_ALGAPI select CRYPTO_ALGAPI
......
...@@ -67,7 +67,7 @@ obj-$(CONFIG_CRYPTO_TWOFISH) += twofish_generic.o ...@@ -67,7 +67,7 @@ obj-$(CONFIG_CRYPTO_TWOFISH) += twofish_generic.o
obj-$(CONFIG_CRYPTO_TWOFISH_COMMON) += twofish_common.o obj-$(CONFIG_CRYPTO_TWOFISH_COMMON) += twofish_common.o
obj-$(CONFIG_CRYPTO_SERPENT) += serpent_generic.o obj-$(CONFIG_CRYPTO_SERPENT) += serpent_generic.o
obj-$(CONFIG_CRYPTO_AES) += aes_generic.o obj-$(CONFIG_CRYPTO_AES) += aes_generic.o
obj-$(CONFIG_CRYPTO_CAMELLIA) += camellia.o obj-$(CONFIG_CRYPTO_CAMELLIA) += camellia_generic.o
obj-$(CONFIG_CRYPTO_CAST5) += cast5.o obj-$(CONFIG_CRYPTO_CAST5) += cast5.o
obj-$(CONFIG_CRYPTO_CAST6) += cast6.o obj-$(CONFIG_CRYPTO_CAST6) += cast6.o
obj-$(CONFIG_CRYPTO_ARC4) += arc4.o obj-$(CONFIG_CRYPTO_ARC4) += arc4.o
......
...@@ -405,6 +405,41 @@ int crypto_unregister_alg(struct crypto_alg *alg) ...@@ -405,6 +405,41 @@ int crypto_unregister_alg(struct crypto_alg *alg)
} }
EXPORT_SYMBOL_GPL(crypto_unregister_alg); EXPORT_SYMBOL_GPL(crypto_unregister_alg);
int crypto_register_algs(struct crypto_alg *algs, int count)
{
int i, ret;
for (i = 0; i < count; i++) {
ret = crypto_register_alg(&algs[i]);
if (ret)
goto err;
}
return 0;
err:
for (--i; i >= 0; --i)
crypto_unregister_alg(&algs[i]);
return ret;
}
EXPORT_SYMBOL_GPL(crypto_register_algs);
int crypto_unregister_algs(struct crypto_alg *algs, int count)
{
int i, ret;
for (i = 0; i < count; i++) {
ret = crypto_unregister_alg(&algs[i]);
if (ret)
pr_err("Failed to unregister %s %s: %d\n",
algs[i].cra_driver_name, algs[i].cra_name, ret);
}
return 0;
}
EXPORT_SYMBOL_GPL(crypto_unregister_algs);
int crypto_register_template(struct crypto_template *tmpl) int crypto_register_template(struct crypto_template *tmpl)
{ {
struct crypto_template *q; struct crypto_template *q;
......
...@@ -337,43 +337,40 @@ static const u32 camellia_sp4404[256] = { ...@@ -337,43 +337,40 @@ static const u32 camellia_sp4404[256] = {
/* /*
* macros * macros
*/ */
#define ROLDQ(ll, lr, rl, rr, w0, w1, bits) \ #define ROLDQ(ll, lr, rl, rr, w0, w1, bits) ({ \
do { \
w0 = ll; \ w0 = ll; \
ll = (ll << bits) + (lr >> (32 - bits)); \ ll = (ll << bits) + (lr >> (32 - bits)); \
lr = (lr << bits) + (rl >> (32 - bits)); \ lr = (lr << bits) + (rl >> (32 - bits)); \
rl = (rl << bits) + (rr >> (32 - bits)); \ rl = (rl << bits) + (rr >> (32 - bits)); \
rr = (rr << bits) + (w0 >> (32 - bits)); \ rr = (rr << bits) + (w0 >> (32 - bits)); \
} while (0) })
#define ROLDQo32(ll, lr, rl, rr, w0, w1, bits) \ #define ROLDQo32(ll, lr, rl, rr, w0, w1, bits) ({ \
do { \
w0 = ll; \ w0 = ll; \
w1 = lr; \ w1 = lr; \
ll = (lr << (bits - 32)) + (rl >> (64 - bits)); \ ll = (lr << (bits - 32)) + (rl >> (64 - bits)); \
lr = (rl << (bits - 32)) + (rr >> (64 - bits)); \ lr = (rl << (bits - 32)) + (rr >> (64 - bits)); \
rl = (rr << (bits - 32)) + (w0 >> (64 - bits)); \ rl = (rr << (bits - 32)) + (w0 >> (64 - bits)); \
rr = (w0 << (bits - 32)) + (w1 >> (64 - bits)); \ rr = (w0 << (bits - 32)) + (w1 >> (64 - bits)); \
} while (0) })
#define CAMELLIA_F(xl, xr, kl, kr, yl, yr, il, ir, t0, t1) \ #define CAMELLIA_F(xl, xr, kl, kr, yl, yr, il, ir, t0, t1) ({ \
do { \
il = xl ^ kl; \ il = xl ^ kl; \
ir = xr ^ kr; \ ir = xr ^ kr; \
t0 = il >> 16; \ t0 = il >> 16; \
t1 = ir >> 16; \ t1 = ir >> 16; \
yl = camellia_sp1110[(u8)(ir )] \ yl = camellia_sp1110[(u8)(ir)] \
^ camellia_sp0222[ (t1 >> 8)] \ ^ camellia_sp0222[(u8)(t1 >> 8)] \
^ camellia_sp3033[(u8)(t1 )] \ ^ camellia_sp3033[(u8)(t1)] \
^ camellia_sp4404[(u8)(ir >> 8)]; \ ^ camellia_sp4404[(u8)(ir >> 8)]; \
yr = camellia_sp1110[ (t0 >> 8)] \ yr = camellia_sp1110[(u8)(t0 >> 8)] \
^ camellia_sp0222[(u8)(t0 )] \ ^ camellia_sp0222[(u8)(t0)] \
^ camellia_sp3033[(u8)(il >> 8)] \ ^ camellia_sp3033[(u8)(il >> 8)] \
^ camellia_sp4404[(u8)(il )]; \ ^ camellia_sp4404[(u8)(il)]; \
yl ^= yr; \ yl ^= yr; \
yr = ror32(yr, 8); \ yr = ror32(yr, 8); \
yr ^= yl; \ yr ^= yl; \
} while (0) })
#define SUBKEY_L(INDEX) (subkey[(INDEX)*2]) #define SUBKEY_L(INDEX) (subkey[(INDEX)*2])
#define SUBKEY_R(INDEX) (subkey[(INDEX)*2 + 1]) #define SUBKEY_R(INDEX) (subkey[(INDEX)*2 + 1])
...@@ -382,7 +379,6 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max) ...@@ -382,7 +379,6 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
{ {
u32 dw, tl, tr; u32 dw, tl, tr;
u32 kw4l, kw4r; u32 kw4l, kw4r;
int i;
/* absorb kw2 to other subkeys */ /* absorb kw2 to other subkeys */
/* round 2 */ /* round 2 */
...@@ -557,24 +553,6 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max) ...@@ -557,24 +553,6 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
SUBKEY_L(32) = subL[32] ^ subL[31]; /* kw3 */ SUBKEY_L(32) = subL[32] ^ subL[31]; /* kw3 */
SUBKEY_R(32) = subR[32] ^ subR[31]; SUBKEY_R(32) = subR[32] ^ subR[31];
} }
/* apply the inverse of the last half of P-function */
i = 2;
do {
dw = SUBKEY_L(i + 0) ^ SUBKEY_R(i + 0); dw = rol32(dw, 8);/* round 1 */
SUBKEY_R(i + 0) = SUBKEY_L(i + 0) ^ dw; SUBKEY_L(i + 0) = dw;
dw = SUBKEY_L(i + 1) ^ SUBKEY_R(i + 1); dw = rol32(dw, 8);/* round 2 */
SUBKEY_R(i + 1) = SUBKEY_L(i + 1) ^ dw; SUBKEY_L(i + 1) = dw;
dw = SUBKEY_L(i + 2) ^ SUBKEY_R(i + 2); dw = rol32(dw, 8);/* round 3 */
SUBKEY_R(i + 2) = SUBKEY_L(i + 2) ^ dw; SUBKEY_L(i + 2) = dw;
dw = SUBKEY_L(i + 3) ^ SUBKEY_R(i + 3); dw = rol32(dw, 8);/* round 4 */
SUBKEY_R(i + 3) = SUBKEY_L(i + 3) ^ dw; SUBKEY_L(i + 3) = dw;
dw = SUBKEY_L(i + 4) ^ SUBKEY_R(i + 4); dw = rol32(dw, 8);/* round 5 */
SUBKEY_R(i + 4) = SUBKEY_L(i + 4) ^ dw; SUBKEY_L(i + 4) = dw;
dw = SUBKEY_L(i + 5) ^ SUBKEY_R(i + 5); dw = rol32(dw, 8);/* round 6 */
SUBKEY_R(i + 5) = SUBKEY_L(i + 5) ^ dw; SUBKEY_L(i + 5) = dw;
i += 8;
} while (i < max);
} }
static void camellia_setup128(const unsigned char *key, u32 *subkey) static void camellia_setup128(const unsigned char *key, u32 *subkey)
...@@ -851,8 +829,7 @@ static void camellia_setup192(const unsigned char *key, u32 *subkey) ...@@ -851,8 +829,7 @@ static void camellia_setup192(const unsigned char *key, u32 *subkey)
/* /*
* Encrypt/decrypt * Encrypt/decrypt
*/ */
#define CAMELLIA_FLS(ll, lr, rl, rr, kll, klr, krl, krr, t0, t1, t2, t3) \ #define CAMELLIA_FLS(ll, lr, rl, rr, kll, klr, krl, krr, t0, t1, t2, t3) ({ \
do { \
t0 = kll; \ t0 = kll; \
t2 = krr; \ t2 = krr; \
t0 &= ll; \ t0 &= ll; \
...@@ -865,23 +842,23 @@ static void camellia_setup192(const unsigned char *key, u32 *subkey) ...@@ -865,23 +842,23 @@ static void camellia_setup192(const unsigned char *key, u32 *subkey)
t1 |= lr; \ t1 |= lr; \
ll ^= t1; \ ll ^= t1; \
rr ^= rol32(t3, 1); \ rr ^= rol32(t3, 1); \
} while (0) })
#define CAMELLIA_ROUNDSM(xl, xr, kl, kr, yl, yr, il, ir) \ #define CAMELLIA_ROUNDSM(xl, xr, kl, kr, yl, yr, il, ir) ({ \
do { \ yl ^= kl; \
yr ^= kr; \
ir = camellia_sp1110[(u8)xr]; \ ir = camellia_sp1110[(u8)xr]; \
il = camellia_sp1110[ (xl >> 24)]; \ il = camellia_sp1110[(u8)(xl >> 24)]; \
ir ^= camellia_sp0222[ (xr >> 24)]; \ ir ^= camellia_sp0222[(u8)(xr >> 24)]; \
il ^= camellia_sp0222[(u8)(xl >> 16)]; \ il ^= camellia_sp0222[(u8)(xl >> 16)]; \
ir ^= camellia_sp3033[(u8)(xr >> 16)]; \ ir ^= camellia_sp3033[(u8)(xr >> 16)]; \
il ^= camellia_sp3033[(u8)(xl >> 8)]; \ il ^= camellia_sp3033[(u8)(xl >> 8)]; \
ir ^= camellia_sp4404[(u8)(xr >> 8)]; \ ir ^= camellia_sp4404[(u8)(xr >> 8)]; \
il ^= camellia_sp4404[(u8)xl]; \ il ^= camellia_sp4404[(u8)xl]; \
il ^= kl; \ ir ^= il; \
ir ^= il ^ kr; \
yl ^= ir; \ yl ^= ir; \
yr ^= ror32(il, 8) ^ ir; \ yr ^= ror32(il, 8) ^ ir; \
} while (0) })
/* max = 24: 128bit encrypt, max = 32: 256bit encrypt */ /* max = 24: 128bit encrypt, max = 32: 256bit encrypt */
static void camellia_do_encrypt(const u32 *subkey, u32 *io, unsigned max) static void camellia_do_encrypt(const u32 *subkey, u32 *io, unsigned max)
...@@ -893,7 +870,7 @@ static void camellia_do_encrypt(const u32 *subkey, u32 *io, unsigned max) ...@@ -893,7 +870,7 @@ static void camellia_do_encrypt(const u32 *subkey, u32 *io, unsigned max)
io[1] ^= SUBKEY_R(0); io[1] ^= SUBKEY_R(0);
/* main iteration */ /* main iteration */
#define ROUNDS(i) do { \ #define ROUNDS(i) ({ \
CAMELLIA_ROUNDSM(io[0], io[1], \ CAMELLIA_ROUNDSM(io[0], io[1], \
SUBKEY_L(i + 2), SUBKEY_R(i + 2), \ SUBKEY_L(i + 2), SUBKEY_R(i + 2), \
io[2], io[3], il, ir); \ io[2], io[3], il, ir); \
...@@ -912,13 +889,13 @@ static void camellia_do_encrypt(const u32 *subkey, u32 *io, unsigned max) ...@@ -912,13 +889,13 @@ static void camellia_do_encrypt(const u32 *subkey, u32 *io, unsigned max)
CAMELLIA_ROUNDSM(io[2], io[3], \ CAMELLIA_ROUNDSM(io[2], io[3], \
SUBKEY_L(i + 7), SUBKEY_R(i + 7), \ SUBKEY_L(i + 7), SUBKEY_R(i + 7), \
io[0], io[1], il, ir); \ io[0], io[1], il, ir); \
} while (0) })
#define FLS(i) do { \ #define FLS(i) ({ \
CAMELLIA_FLS(io[0], io[1], io[2], io[3], \ CAMELLIA_FLS(io[0], io[1], io[2], io[3], \
SUBKEY_L(i + 0), SUBKEY_R(i + 0), \ SUBKEY_L(i + 0), SUBKEY_R(i + 0), \
SUBKEY_L(i + 1), SUBKEY_R(i + 1), \ SUBKEY_L(i + 1), SUBKEY_R(i + 1), \
t0, t1, il, ir); \ t0, t1, il, ir); \
} while (0) })
ROUNDS(0); ROUNDS(0);
FLS(8); FLS(8);
...@@ -948,7 +925,7 @@ static void camellia_do_decrypt(const u32 *subkey, u32 *io, unsigned i) ...@@ -948,7 +925,7 @@ static void camellia_do_decrypt(const u32 *subkey, u32 *io, unsigned i)
io[1] ^= SUBKEY_R(i); io[1] ^= SUBKEY_R(i);
/* main iteration */ /* main iteration */
#define ROUNDS(i) do { \ #define ROUNDS(i) ({ \
CAMELLIA_ROUNDSM(io[0], io[1], \ CAMELLIA_ROUNDSM(io[0], io[1], \
SUBKEY_L(i + 7), SUBKEY_R(i + 7), \ SUBKEY_L(i + 7), SUBKEY_R(i + 7), \
io[2], io[3], il, ir); \ io[2], io[3], il, ir); \
...@@ -967,13 +944,13 @@ static void camellia_do_decrypt(const u32 *subkey, u32 *io, unsigned i) ...@@ -967,13 +944,13 @@ static void camellia_do_decrypt(const u32 *subkey, u32 *io, unsigned i)
CAMELLIA_ROUNDSM(io[2], io[3], \ CAMELLIA_ROUNDSM(io[2], io[3], \
SUBKEY_L(i + 2), SUBKEY_R(i + 2), \ SUBKEY_L(i + 2), SUBKEY_R(i + 2), \
io[0], io[1], il, ir); \ io[0], io[1], il, ir); \
} while (0) })
#define FLS(i) do { \ #define FLS(i) ({ \
CAMELLIA_FLS(io[0], io[1], io[2], io[3], \ CAMELLIA_FLS(io[0], io[1], io[2], io[3], \
SUBKEY_L(i + 1), SUBKEY_R(i + 1), \ SUBKEY_L(i + 1), SUBKEY_R(i + 1), \
SUBKEY_L(i + 0), SUBKEY_R(i + 0), \ SUBKEY_L(i + 0), SUBKEY_R(i + 0), \
t0, t1, il, ir); \ t0, t1, il, ir); \
} while (0) })
if (i == 32) { if (i == 32) {
ROUNDS(24); ROUNDS(24);
...@@ -1035,6 +1012,7 @@ static void camellia_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) ...@@ -1035,6 +1012,7 @@ static void camellia_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
const struct camellia_ctx *cctx = crypto_tfm_ctx(tfm); const struct camellia_ctx *cctx = crypto_tfm_ctx(tfm);
const __be32 *src = (const __be32 *)in; const __be32 *src = (const __be32 *)in;
__be32 *dst = (__be32 *)out; __be32 *dst = (__be32 *)out;
unsigned int max;
u32 tmp[4]; u32 tmp[4];
...@@ -1043,9 +1021,12 @@ static void camellia_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) ...@@ -1043,9 +1021,12 @@ static void camellia_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
tmp[2] = be32_to_cpu(src[2]); tmp[2] = be32_to_cpu(src[2]);
tmp[3] = be32_to_cpu(src[3]); tmp[3] = be32_to_cpu(src[3]);
camellia_do_encrypt(cctx->key_table, tmp, if (cctx->key_length == 16)
cctx->key_length == 16 ? 24 : 32 /* for key lengths of 24 and 32 */ max = 24;
); else
max = 32; /* for key lengths of 24 and 32 */
camellia_do_encrypt(cctx->key_table, tmp, max);
/* do_encrypt returns 0,1 swapped with 2,3 */ /* do_encrypt returns 0,1 swapped with 2,3 */
dst[0] = cpu_to_be32(tmp[2]); dst[0] = cpu_to_be32(tmp[2]);
...@@ -1059,6 +1040,7 @@ static void camellia_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) ...@@ -1059,6 +1040,7 @@ static void camellia_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
const struct camellia_ctx *cctx = crypto_tfm_ctx(tfm); const struct camellia_ctx *cctx = crypto_tfm_ctx(tfm);
const __be32 *src = (const __be32 *)in; const __be32 *src = (const __be32 *)in;
__be32 *dst = (__be32 *)out; __be32 *dst = (__be32 *)out;
unsigned int max;
u32 tmp[4]; u32 tmp[4];
...@@ -1067,9 +1049,12 @@ static void camellia_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) ...@@ -1067,9 +1049,12 @@ static void camellia_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
tmp[2] = be32_to_cpu(src[2]); tmp[2] = be32_to_cpu(src[2]);
tmp[3] = be32_to_cpu(src[3]); tmp[3] = be32_to_cpu(src[3]);
camellia_do_decrypt(cctx->key_table, tmp, if (cctx->key_length == 16)
cctx->key_length == 16 ? 24 : 32 /* for key lengths of 24 and 32 */ max = 24;
); else
max = 32; /* for key lengths of 24 and 32 */
camellia_do_decrypt(cctx->key_table, tmp, max);
/* do_decrypt returns 0,1 swapped with 2,3 */ /* do_decrypt returns 0,1 swapped with 2,3 */
dst[0] = cpu_to_be32(tmp[2]); dst[0] = cpu_to_be32(tmp[2]);
...@@ -1114,3 +1099,4 @@ module_exit(camellia_fini); ...@@ -1114,3 +1099,4 @@ module_exit(camellia_fini);
MODULE_DESCRIPTION("Camellia Cipher Algorithm"); MODULE_DESCRIPTION("Camellia Cipher Algorithm");
MODULE_LICENSE("GPL"); MODULE_LICENSE("GPL");
MODULE_ALIAS("camellia");
...@@ -304,7 +304,7 @@ static int crypto_del_alg(struct sk_buff *skb, struct nlmsghdr *nlh, ...@@ -304,7 +304,7 @@ static int crypto_del_alg(struct sk_buff *skb, struct nlmsghdr *nlh,
static int crypto_add_alg(struct sk_buff *skb, struct nlmsghdr *nlh, static int crypto_add_alg(struct sk_buff *skb, struct nlmsghdr *nlh,
struct nlattr **attrs) struct nlattr **attrs)
{ {
int exact; int exact = 0;
const char *name; const char *name;
struct crypto_alg *alg; struct crypto_alg *alg;
struct crypto_user_alg *p = nlmsg_data(nlh); struct crypto_user_alg *p = nlmsg_data(nlh);
......
...@@ -1297,6 +1297,18 @@ static int do_test(int m) ...@@ -1297,6 +1297,18 @@ static int do_test(int m)
speed_template_16_24_32); speed_template_16_24_32);
test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0, test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
speed_template_16_24_32); speed_template_16_24_32);
test_cipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
speed_template_16_24_32);
test_cipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
speed_template_16_24_32);
test_cipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
speed_template_32_40_48);
test_cipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
speed_template_32_40_48);
test_cipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
speed_template_32_48_64);
test_cipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
speed_template_32_48_64);
break; break;
case 206: case 206:
......
...@@ -1845,6 +1845,21 @@ static const struct alg_test_desc alg_test_descs[] = { ...@@ -1845,6 +1845,21 @@ static const struct alg_test_desc alg_test_descs[] = {
} }
} }
} }
}, {
.alg = "ctr(camellia)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = camellia_ctr_enc_tv_template,
.count = CAMELLIA_CTR_ENC_TEST_VECTORS
},
.dec = {
.vecs = camellia_ctr_dec_tv_template,
.count = CAMELLIA_CTR_DEC_TEST_VECTORS
}
}
}
}, { }, {
.alg = "ctr(serpent)", .alg = "ctr(serpent)",
.test = alg_test_skcipher, .test = alg_test_skcipher,
...@@ -2296,6 +2311,21 @@ static const struct alg_test_desc alg_test_descs[] = { ...@@ -2296,6 +2311,21 @@ static const struct alg_test_desc alg_test_descs[] = {
} }
} }
} }
}, {
.alg = "lrw(camellia)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = camellia_lrw_enc_tv_template,
.count = CAMELLIA_LRW_ENC_TEST_VECTORS
},
.dec = {
.vecs = camellia_lrw_dec_tv_template,
.count = CAMELLIA_LRW_DEC_TEST_VECTORS
}
}
}
}, { }, {
.alg = "lrw(serpent)", .alg = "lrw(serpent)",
.test = alg_test_skcipher, .test = alg_test_skcipher,
...@@ -2633,6 +2663,21 @@ static const struct alg_test_desc alg_test_descs[] = { ...@@ -2633,6 +2663,21 @@ static const struct alg_test_desc alg_test_descs[] = {
} }
} }
} }
}, {
.alg = "xts(camellia)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = camellia_xts_enc_tv_template,
.count = CAMELLIA_XTS_ENC_TEST_VECTORS
},
.dec = {
.vecs = camellia_xts_dec_tv_template,
.count = CAMELLIA_XTS_DEC_TEST_VECTORS
}
}
}
}, { }, {
.alg = "xts(serpent)", .alg = "xts(serpent)",
.test = alg_test_skcipher, .test = alg_test_skcipher,
......
This diff is collapsed.
...@@ -115,10 +115,7 @@ static int __init tx4939_rng_probe(struct platform_device *dev) ...@@ -115,10 +115,7 @@ static int __init tx4939_rng_probe(struct platform_device *dev)
rngdev = devm_kzalloc(&dev->dev, sizeof(*rngdev), GFP_KERNEL); rngdev = devm_kzalloc(&dev->dev, sizeof(*rngdev), GFP_KERNEL);
if (!rngdev) if (!rngdev)
return -ENOMEM; return -ENOMEM;
if (!devm_request_mem_region(&dev->dev, r->start, resource_size(r), rngdev->base = devm_request_and_ioremap(&dev->dev, r);
dev_name(&dev->dev)))
return -EBUSY;
rngdev->base = devm_ioremap(&dev->dev, r->start, resource_size(r));
if (!rngdev->base) if (!rngdev->base)
return -EBUSY; return -EBUSY;
......
...@@ -293,4 +293,15 @@ config CRYPTO_DEV_S5P ...@@ -293,4 +293,15 @@ config CRYPTO_DEV_S5P
Select this to offload Samsung S5PV210 or S5PC110 from AES Select this to offload Samsung S5PV210 or S5PC110 from AES
algorithms execution. algorithms execution.
config CRYPTO_DEV_TEGRA_AES
tristate "Support for TEGRA AES hw engine"
depends on ARCH_TEGRA
select CRYPTO_AES
help
TEGRA processors have AES module accelerator. Select this if you
want to use the TEGRA module for AES algorithms.
To compile this driver as a module, choose M here: the module
will be called tegra-aes.
endif # CRYPTO_HW endif # CRYPTO_HW
...@@ -13,3 +13,4 @@ obj-$(CONFIG_CRYPTO_DEV_OMAP_SHAM) += omap-sham.o ...@@ -13,3 +13,4 @@ obj-$(CONFIG_CRYPTO_DEV_OMAP_SHAM) += omap-sham.o
obj-$(CONFIG_CRYPTO_DEV_OMAP_AES) += omap-aes.o obj-$(CONFIG_CRYPTO_DEV_OMAP_AES) += omap-aes.o
obj-$(CONFIG_CRYPTO_DEV_PICOXCELL) += picoxcell_crypto.o obj-$(CONFIG_CRYPTO_DEV_PICOXCELL) += picoxcell_crypto.o
obj-$(CONFIG_CRYPTO_DEV_S5P) += s5p-sss.o obj-$(CONFIG_CRYPTO_DEV_S5P) += s5p-sss.o
obj-$(CONFIG_CRYPTO_DEV_TEGRA_AES) += tegra-aes.o
...@@ -1843,6 +1843,25 @@ static struct caam_alg_template driver_algs[] = { ...@@ -1843,6 +1843,25 @@ static struct caam_alg_template driver_algs[] = {
.class2_alg_type = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC_PRECOMP, .class2_alg_type = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC_PRECOMP,
.alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC, .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
}, },
{
.name = "authenc(hmac(sha224),cbc(aes))",
.driver_name = "authenc-hmac-sha224-cbc-aes-caam",
.blocksize = AES_BLOCK_SIZE,
.template_aead = {
.setkey = aead_setkey,
.setauthsize = aead_setauthsize,
.encrypt = aead_encrypt,
.decrypt = aead_decrypt,
.givencrypt = aead_givencrypt,
.geniv = "<built-in>",
.ivsize = AES_BLOCK_SIZE,
.maxauthsize = SHA224_DIGEST_SIZE,
},
.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
OP_ALG_AAI_HMAC_PRECOMP,
.alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
},
{ {
.name = "authenc(hmac(sha256),cbc(aes))", .name = "authenc(hmac(sha256),cbc(aes))",
.driver_name = "authenc-hmac-sha256-cbc-aes-caam", .driver_name = "authenc-hmac-sha256-cbc-aes-caam",
...@@ -1863,6 +1882,26 @@ static struct caam_alg_template driver_algs[] = { ...@@ -1863,6 +1882,26 @@ static struct caam_alg_template driver_algs[] = {
OP_ALG_AAI_HMAC_PRECOMP, OP_ALG_AAI_HMAC_PRECOMP,
.alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC, .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
}, },
{
.name = "authenc(hmac(sha384),cbc(aes))",
.driver_name = "authenc-hmac-sha384-cbc-aes-caam",
.blocksize = AES_BLOCK_SIZE,
.template_aead = {
.setkey = aead_setkey,
.setauthsize = aead_setauthsize,
.encrypt = aead_encrypt,
.decrypt = aead_decrypt,
.givencrypt = aead_givencrypt,
.geniv = "<built-in>",
.ivsize = AES_BLOCK_SIZE,
.maxauthsize = SHA384_DIGEST_SIZE,
},
.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
OP_ALG_AAI_HMAC_PRECOMP,
.alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
},
{ {
.name = "authenc(hmac(sha512),cbc(aes))", .name = "authenc(hmac(sha512),cbc(aes))",
.driver_name = "authenc-hmac-sha512-cbc-aes-caam", .driver_name = "authenc-hmac-sha512-cbc-aes-caam",
...@@ -1921,6 +1960,25 @@ static struct caam_alg_template driver_algs[] = { ...@@ -1921,6 +1960,25 @@ static struct caam_alg_template driver_algs[] = {
.class2_alg_type = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC_PRECOMP, .class2_alg_type = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC_PRECOMP,
.alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC, .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
}, },
{
.name = "authenc(hmac(sha224),cbc(des3_ede))",
.driver_name = "authenc-hmac-sha224-cbc-des3_ede-caam",
.blocksize = DES3_EDE_BLOCK_SIZE,
.template_aead = {
.setkey = aead_setkey,
.setauthsize = aead_setauthsize,
.encrypt = aead_encrypt,
.decrypt = aead_decrypt,
.givencrypt = aead_givencrypt,
.geniv = "<built-in>",
.ivsize = DES3_EDE_BLOCK_SIZE,
.maxauthsize = SHA224_DIGEST_SIZE,
},
.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
OP_ALG_AAI_HMAC_PRECOMP,
.alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
},
{ {
.name = "authenc(hmac(sha256),cbc(des3_ede))", .name = "authenc(hmac(sha256),cbc(des3_ede))",
.driver_name = "authenc-hmac-sha256-cbc-des3_ede-caam", .driver_name = "authenc-hmac-sha256-cbc-des3_ede-caam",
...@@ -1941,6 +1999,25 @@ static struct caam_alg_template driver_algs[] = { ...@@ -1941,6 +1999,25 @@ static struct caam_alg_template driver_algs[] = {
OP_ALG_AAI_HMAC_PRECOMP, OP_ALG_AAI_HMAC_PRECOMP,
.alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC, .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
}, },
{
.name = "authenc(hmac(sha384),cbc(des3_ede))",
.driver_name = "authenc-hmac-sha384-cbc-des3_ede-caam",
.blocksize = DES3_EDE_BLOCK_SIZE,
.template_aead = {
.setkey = aead_setkey,
.setauthsize = aead_setauthsize,
.encrypt = aead_encrypt,
.decrypt = aead_decrypt,
.givencrypt = aead_givencrypt,
.geniv = "<built-in>",
.ivsize = DES3_EDE_BLOCK_SIZE,
.maxauthsize = SHA384_DIGEST_SIZE,
},
.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
OP_ALG_AAI_HMAC_PRECOMP,
.alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
},
{ {
.name = "authenc(hmac(sha512),cbc(des3_ede))", .name = "authenc(hmac(sha512),cbc(des3_ede))",
.driver_name = "authenc-hmac-sha512-cbc-des3_ede-caam", .driver_name = "authenc-hmac-sha512-cbc-des3_ede-caam",
...@@ -1999,6 +2076,25 @@ static struct caam_alg_template driver_algs[] = { ...@@ -1999,6 +2076,25 @@ static struct caam_alg_template driver_algs[] = {
.class2_alg_type = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC_PRECOMP, .class2_alg_type = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC_PRECOMP,
.alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC, .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
}, },
{
.name = "authenc(hmac(sha224),cbc(des))",
.driver_name = "authenc-hmac-sha224-cbc-des-caam",
.blocksize = DES_BLOCK_SIZE,
.template_aead = {
.setkey = aead_setkey,
.setauthsize = aead_setauthsize,
.encrypt = aead_encrypt,
.decrypt = aead_decrypt,
.givencrypt = aead_givencrypt,
.geniv = "<built-in>",
.ivsize = DES_BLOCK_SIZE,
.maxauthsize = SHA224_DIGEST_SIZE,
},
.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
OP_ALG_AAI_HMAC_PRECOMP,
.alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
},
{ {
.name = "authenc(hmac(sha256),cbc(des))", .name = "authenc(hmac(sha256),cbc(des))",
.driver_name = "authenc-hmac-sha256-cbc-des-caam", .driver_name = "authenc-hmac-sha256-cbc-des-caam",
...@@ -2019,6 +2115,25 @@ static struct caam_alg_template driver_algs[] = { ...@@ -2019,6 +2115,25 @@ static struct caam_alg_template driver_algs[] = {
OP_ALG_AAI_HMAC_PRECOMP, OP_ALG_AAI_HMAC_PRECOMP,
.alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC, .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
}, },
{
.name = "authenc(hmac(sha384),cbc(des))",
.driver_name = "authenc-hmac-sha384-cbc-des-caam",
.blocksize = DES_BLOCK_SIZE,
.template_aead = {
.setkey = aead_setkey,
.setauthsize = aead_setauthsize,
.encrypt = aead_encrypt,
.decrypt = aead_decrypt,
.givencrypt = aead_givencrypt,
.geniv = "<built-in>",
.ivsize = DES_BLOCK_SIZE,
.maxauthsize = SHA384_DIGEST_SIZE,
},
.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
OP_ALG_AAI_HMAC_PRECOMP,
.alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
},
{ {
.name = "authenc(hmac(sha512),cbc(des))", .name = "authenc(hmac(sha512),cbc(des))",
.driver_name = "authenc-hmac-sha512-cbc-des-caam", .driver_name = "authenc-hmac-sha512-cbc-des-caam",
...@@ -2205,7 +2320,8 @@ static struct caam_crypto_alg *caam_alg_alloc(struct device *ctrldev, ...@@ -2205,7 +2320,8 @@ static struct caam_crypto_alg *caam_alg_alloc(struct device *ctrldev,
alg->cra_blocksize = template->blocksize; alg->cra_blocksize = template->blocksize;
alg->cra_alignmask = 0; alg->cra_alignmask = 0;
alg->cra_ctxsize = sizeof(struct caam_ctx); alg->cra_ctxsize = sizeof(struct caam_ctx);
alg->cra_flags = CRYPTO_ALG_ASYNC | template->type; alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
template->type;
switch (template->type) { switch (template->type) {
case CRYPTO_ALG_TYPE_ABLKCIPHER: case CRYPTO_ALG_TYPE_ABLKCIPHER:
alg->cra_type = &crypto_ablkcipher_type; alg->cra_type = &crypto_ablkcipher_type;
...@@ -2285,12 +2401,12 @@ static int __init caam_algapi_init(void) ...@@ -2285,12 +2401,12 @@ static int __init caam_algapi_init(void)
dev_warn(ctrldev, "%s alg registration failed\n", dev_warn(ctrldev, "%s alg registration failed\n",
t_alg->crypto_alg.cra_driver_name); t_alg->crypto_alg.cra_driver_name);
kfree(t_alg); kfree(t_alg);
} else { } else
list_add_tail(&t_alg->entry, &priv->alg_list); list_add_tail(&t_alg->entry, &priv->alg_list);
dev_info(ctrldev, "%s\n",
t_alg->crypto_alg.cra_driver_name);
}
} }
if (!list_empty(&priv->alg_list))
dev_info(ctrldev, "%s algorithms registered in /proc/crypto\n",
(char *)of_get_property(dev_node, "compatible", NULL));
return err; return err;
} }
......
...@@ -46,7 +46,7 @@ static int caam_remove(struct platform_device *pdev) ...@@ -46,7 +46,7 @@ static int caam_remove(struct platform_device *pdev)
/* Probe routine for CAAM top (controller) level */ /* Probe routine for CAAM top (controller) level */
static int caam_probe(struct platform_device *pdev) static int caam_probe(struct platform_device *pdev)
{ {
int d, ring, rspec; int ring, rspec;
struct device *dev; struct device *dev;
struct device_node *nprop, *np; struct device_node *nprop, *np;
struct caam_ctrl __iomem *ctrl; struct caam_ctrl __iomem *ctrl;
......
...@@ -393,6 +393,7 @@ static struct crypto_alg geode_cbc_alg = { ...@@ -393,6 +393,7 @@ static struct crypto_alg geode_cbc_alg = {
.cra_driver_name = "cbc-aes-geode", .cra_driver_name = "cbc-aes-geode",
.cra_priority = 400, .cra_priority = 400,
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
CRYPTO_ALG_KERN_DRIVER_ONLY |
CRYPTO_ALG_NEED_FALLBACK, CRYPTO_ALG_NEED_FALLBACK,
.cra_init = fallback_init_blk, .cra_init = fallback_init_blk,
.cra_exit = fallback_exit_blk, .cra_exit = fallback_exit_blk,
...@@ -479,6 +480,7 @@ static struct crypto_alg geode_ecb_alg = { ...@@ -479,6 +480,7 @@ static struct crypto_alg geode_ecb_alg = {
.cra_driver_name = "ecb-aes-geode", .cra_driver_name = "ecb-aes-geode",
.cra_priority = 400, .cra_priority = 400,
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
CRYPTO_ALG_KERN_DRIVER_ONLY |
CRYPTO_ALG_NEED_FALLBACK, CRYPTO_ALG_NEED_FALLBACK,
.cra_init = fallback_init_blk, .cra_init = fallback_init_blk,
.cra_exit = fallback_exit_blk, .cra_exit = fallback_exit_blk,
......
...@@ -2494,7 +2494,8 @@ static int hifn_alg_alloc(struct hifn_device *dev, struct hifn_alg_template *t) ...@@ -2494,7 +2494,8 @@ static int hifn_alg_alloc(struct hifn_device *dev, struct hifn_alg_template *t)
t->drv_name, dev->name); t->drv_name, dev->name);
alg->alg.cra_priority = 300; alg->alg.cra_priority = 300;
alg->alg.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC; alg->alg.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC;
alg->alg.cra_blocksize = t->bsize; alg->alg.cra_blocksize = t->bsize;
alg->alg.cra_ctxsize = sizeof(struct hifn_context); alg->alg.cra_ctxsize = sizeof(struct hifn_context);
alg->alg.cra_alignmask = 0; alg->alg.cra_alignmask = 0;
......
...@@ -265,7 +265,7 @@ static int setup_crypt_desc(void) ...@@ -265,7 +265,7 @@ static int setup_crypt_desc(void)
BUILD_BUG_ON(sizeof(struct crypt_ctl) != 64); BUILD_BUG_ON(sizeof(struct crypt_ctl) != 64);
crypt_virt = dma_alloc_coherent(dev, crypt_virt = dma_alloc_coherent(dev,
NPE_QLEN * sizeof(struct crypt_ctl), NPE_QLEN * sizeof(struct crypt_ctl),
&crypt_phys, GFP_KERNEL); &crypt_phys, GFP_ATOMIC);
if (!crypt_virt) if (!crypt_virt)
return -ENOMEM; return -ENOMEM;
memset(crypt_virt, 0, NPE_QLEN * sizeof(struct crypt_ctl)); memset(crypt_virt, 0, NPE_QLEN * sizeof(struct crypt_ctl));
...@@ -1449,6 +1449,7 @@ static int __init ixp_module_init(void) ...@@ -1449,6 +1449,7 @@ static int __init ixp_module_init(void)
/* block ciphers */ /* block ciphers */
cra->cra_type = &crypto_ablkcipher_type; cra->cra_type = &crypto_ablkcipher_type;
cra->cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | cra->cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
CRYPTO_ALG_KERN_DRIVER_ONLY |
CRYPTO_ALG_ASYNC; CRYPTO_ALG_ASYNC;
if (!cra->cra_ablkcipher.setkey) if (!cra->cra_ablkcipher.setkey)
cra->cra_ablkcipher.setkey = ablk_setkey; cra->cra_ablkcipher.setkey = ablk_setkey;
...@@ -1461,6 +1462,7 @@ static int __init ixp_module_init(void) ...@@ -1461,6 +1462,7 @@ static int __init ixp_module_init(void)
/* authenc */ /* authenc */
cra->cra_type = &crypto_aead_type; cra->cra_type = &crypto_aead_type;
cra->cra_flags = CRYPTO_ALG_TYPE_AEAD | cra->cra_flags = CRYPTO_ALG_TYPE_AEAD |
CRYPTO_ALG_KERN_DRIVER_ONLY |
CRYPTO_ALG_ASYNC; CRYPTO_ALG_ASYNC;
cra->cra_aead.setkey = aead_setkey; cra->cra_aead.setkey = aead_setkey;
cra->cra_aead.setauthsize = aead_setauthsize; cra->cra_aead.setauthsize = aead_setauthsize;
......
...@@ -899,7 +899,8 @@ struct crypto_alg mv_aes_alg_ecb = { ...@@ -899,7 +899,8 @@ struct crypto_alg mv_aes_alg_ecb = {
.cra_name = "ecb(aes)", .cra_name = "ecb(aes)",
.cra_driver_name = "mv-ecb-aes", .cra_driver_name = "mv-ecb-aes",
.cra_priority = 300, .cra_priority = 300,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC,
.cra_blocksize = 16, .cra_blocksize = 16,
.cra_ctxsize = sizeof(struct mv_ctx), .cra_ctxsize = sizeof(struct mv_ctx),
.cra_alignmask = 0, .cra_alignmask = 0,
...@@ -921,7 +922,8 @@ struct crypto_alg mv_aes_alg_cbc = { ...@@ -921,7 +922,8 @@ struct crypto_alg mv_aes_alg_cbc = {
.cra_name = "cbc(aes)", .cra_name = "cbc(aes)",
.cra_driver_name = "mv-cbc-aes", .cra_driver_name = "mv-cbc-aes",
.cra_priority = 300, .cra_priority = 300,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC,
.cra_blocksize = AES_BLOCK_SIZE, .cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct mv_ctx), .cra_ctxsize = sizeof(struct mv_ctx),
.cra_alignmask = 0, .cra_alignmask = 0,
...@@ -953,7 +955,8 @@ struct ahash_alg mv_sha1_alg = { ...@@ -953,7 +955,8 @@ struct ahash_alg mv_sha1_alg = {
.cra_driver_name = "mv-sha1", .cra_driver_name = "mv-sha1",
.cra_priority = 300, .cra_priority = 300,
.cra_flags = .cra_flags =
CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK, CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
CRYPTO_ALG_NEED_FALLBACK,
.cra_blocksize = SHA1_BLOCK_SIZE, .cra_blocksize = SHA1_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct mv_tfm_hash_ctx), .cra_ctxsize = sizeof(struct mv_tfm_hash_ctx),
.cra_init = mv_cra_hash_sha1_init, .cra_init = mv_cra_hash_sha1_init,
...@@ -977,7 +980,8 @@ struct ahash_alg mv_hmac_sha1_alg = { ...@@ -977,7 +980,8 @@ struct ahash_alg mv_hmac_sha1_alg = {
.cra_driver_name = "mv-hmac-sha1", .cra_driver_name = "mv-hmac-sha1",
.cra_priority = 300, .cra_priority = 300,
.cra_flags = .cra_flags =
CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK, CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
CRYPTO_ALG_NEED_FALLBACK,
.cra_blocksize = SHA1_BLOCK_SIZE, .cra_blocksize = SHA1_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct mv_tfm_hash_ctx), .cra_ctxsize = sizeof(struct mv_tfm_hash_ctx),
.cra_init = mv_cra_hash_hmac_sha1_init, .cra_init = mv_cra_hash_hmac_sha1_init,
......
...@@ -1402,7 +1402,8 @@ static int __devinit __n2_register_one_cipher(const struct n2_cipher_tmpl *tmpl) ...@@ -1402,7 +1402,8 @@ static int __devinit __n2_register_one_cipher(const struct n2_cipher_tmpl *tmpl)
snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name); snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name);
snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s-n2", tmpl->drv_name); snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s-n2", tmpl->drv_name);
alg->cra_priority = N2_CRA_PRIORITY; alg->cra_priority = N2_CRA_PRIORITY;
alg->cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC; alg->cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC;
alg->cra_blocksize = tmpl->block_size; alg->cra_blocksize = tmpl->block_size;
p->enc_type = tmpl->enc_type; p->enc_type = tmpl->enc_type;
alg->cra_ctxsize = sizeof(struct n2_cipher_context); alg->cra_ctxsize = sizeof(struct n2_cipher_context);
...@@ -1493,7 +1494,9 @@ static int __devinit __n2_register_one_ahash(const struct n2_hash_tmpl *tmpl) ...@@ -1493,7 +1494,9 @@ static int __devinit __n2_register_one_ahash(const struct n2_hash_tmpl *tmpl)
snprintf(base->cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name); snprintf(base->cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name);
snprintf(base->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s-n2", tmpl->name); snprintf(base->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s-n2", tmpl->name);
base->cra_priority = N2_CRA_PRIORITY; base->cra_priority = N2_CRA_PRIORITY;
base->cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_NEED_FALLBACK; base->cra_flags = CRYPTO_ALG_TYPE_AHASH |
CRYPTO_ALG_KERN_DRIVER_ONLY |
CRYPTO_ALG_NEED_FALLBACK;
base->cra_blocksize = tmpl->block_size; base->cra_blocksize = tmpl->block_size;
base->cra_ctxsize = sizeof(struct n2_hash_ctx); base->cra_ctxsize = sizeof(struct n2_hash_ctx);
base->cra_module = THIS_MODULE; base->cra_module = THIS_MODULE;
......
...@@ -756,7 +756,9 @@ static struct crypto_alg algs[] = { ...@@ -756,7 +756,9 @@ static struct crypto_alg algs[] = {
.cra_name = "ecb(aes)", .cra_name = "ecb(aes)",
.cra_driver_name = "ecb-aes-omap", .cra_driver_name = "ecb-aes-omap",
.cra_priority = 100, .cra_priority = 100,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
CRYPTO_ALG_KERN_DRIVER_ONLY |
CRYPTO_ALG_ASYNC,
.cra_blocksize = AES_BLOCK_SIZE, .cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct omap_aes_ctx), .cra_ctxsize = sizeof(struct omap_aes_ctx),
.cra_alignmask = 0, .cra_alignmask = 0,
...@@ -776,7 +778,9 @@ static struct crypto_alg algs[] = { ...@@ -776,7 +778,9 @@ static struct crypto_alg algs[] = {
.cra_name = "cbc(aes)", .cra_name = "cbc(aes)",
.cra_driver_name = "cbc-aes-omap", .cra_driver_name = "cbc-aes-omap",
.cra_priority = 100, .cra_priority = 100,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
CRYPTO_ALG_KERN_DRIVER_ONLY |
CRYPTO_ALG_ASYNC,
.cra_blocksize = AES_BLOCK_SIZE, .cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct omap_aes_ctx), .cra_ctxsize = sizeof(struct omap_aes_ctx),
.cra_alignmask = 0, .cra_alignmask = 0,
......
...@@ -953,6 +953,7 @@ static struct ahash_alg algs[] = { ...@@ -953,6 +953,7 @@ static struct ahash_alg algs[] = {
.cra_driver_name = "omap-sha1", .cra_driver_name = "omap-sha1",
.cra_priority = 100, .cra_priority = 100,
.cra_flags = CRYPTO_ALG_TYPE_AHASH | .cra_flags = CRYPTO_ALG_TYPE_AHASH |
CRYPTO_ALG_KERN_DRIVER_ONLY |
CRYPTO_ALG_ASYNC | CRYPTO_ALG_ASYNC |
CRYPTO_ALG_NEED_FALLBACK, CRYPTO_ALG_NEED_FALLBACK,
.cra_blocksize = SHA1_BLOCK_SIZE, .cra_blocksize = SHA1_BLOCK_SIZE,
...@@ -975,6 +976,7 @@ static struct ahash_alg algs[] = { ...@@ -975,6 +976,7 @@ static struct ahash_alg algs[] = {
.cra_driver_name = "omap-md5", .cra_driver_name = "omap-md5",
.cra_priority = 100, .cra_priority = 100,
.cra_flags = CRYPTO_ALG_TYPE_AHASH | .cra_flags = CRYPTO_ALG_TYPE_AHASH |
CRYPTO_ALG_KERN_DRIVER_ONLY |
CRYPTO_ALG_ASYNC | CRYPTO_ALG_ASYNC |
CRYPTO_ALG_NEED_FALLBACK, CRYPTO_ALG_NEED_FALLBACK,
.cra_blocksize = SHA1_BLOCK_SIZE, .cra_blocksize = SHA1_BLOCK_SIZE,
...@@ -998,6 +1000,7 @@ static struct ahash_alg algs[] = { ...@@ -998,6 +1000,7 @@ static struct ahash_alg algs[] = {
.cra_driver_name = "omap-hmac-sha1", .cra_driver_name = "omap-hmac-sha1",
.cra_priority = 100, .cra_priority = 100,
.cra_flags = CRYPTO_ALG_TYPE_AHASH | .cra_flags = CRYPTO_ALG_TYPE_AHASH |
CRYPTO_ALG_KERN_DRIVER_ONLY |
CRYPTO_ALG_ASYNC | CRYPTO_ALG_ASYNC |
CRYPTO_ALG_NEED_FALLBACK, CRYPTO_ALG_NEED_FALLBACK,
.cra_blocksize = SHA1_BLOCK_SIZE, .cra_blocksize = SHA1_BLOCK_SIZE,
...@@ -1022,6 +1025,7 @@ static struct ahash_alg algs[] = { ...@@ -1022,6 +1025,7 @@ static struct ahash_alg algs[] = {
.cra_driver_name = "omap-hmac-md5", .cra_driver_name = "omap-hmac-md5",
.cra_priority = 100, .cra_priority = 100,
.cra_flags = CRYPTO_ALG_TYPE_AHASH | .cra_flags = CRYPTO_ALG_TYPE_AHASH |
CRYPTO_ALG_KERN_DRIVER_ONLY |
CRYPTO_ALG_ASYNC | CRYPTO_ALG_ASYNC |
CRYPTO_ALG_NEED_FALLBACK, CRYPTO_ALG_NEED_FALLBACK,
.cra_blocksize = SHA1_BLOCK_SIZE, .cra_blocksize = SHA1_BLOCK_SIZE,
......
...@@ -1322,6 +1322,7 @@ static struct spacc_alg ipsec_engine_algs[] = { ...@@ -1322,6 +1322,7 @@ static struct spacc_alg ipsec_engine_algs[] = {
.cra_driver_name = "cbc-aes-picoxcell", .cra_driver_name = "cbc-aes-picoxcell",
.cra_priority = SPACC_CRYPTO_ALG_PRIORITY, .cra_priority = SPACC_CRYPTO_ALG_PRIORITY,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
CRYPTO_ALG_KERN_DRIVER_ONLY |
CRYPTO_ALG_ASYNC | CRYPTO_ALG_ASYNC |
CRYPTO_ALG_NEED_FALLBACK, CRYPTO_ALG_NEED_FALLBACK,
.cra_blocksize = AES_BLOCK_SIZE, .cra_blocksize = AES_BLOCK_SIZE,
...@@ -1349,6 +1350,7 @@ static struct spacc_alg ipsec_engine_algs[] = { ...@@ -1349,6 +1350,7 @@ static struct spacc_alg ipsec_engine_algs[] = {
.cra_driver_name = "ecb-aes-picoxcell", .cra_driver_name = "ecb-aes-picoxcell",
.cra_priority = SPACC_CRYPTO_ALG_PRIORITY, .cra_priority = SPACC_CRYPTO_ALG_PRIORITY,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
CRYPTO_ALG_KERN_DRIVER_ONLY |
CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK, CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
.cra_blocksize = AES_BLOCK_SIZE, .cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct spacc_ablk_ctx), .cra_ctxsize = sizeof(struct spacc_ablk_ctx),
...@@ -1373,7 +1375,9 @@ static struct spacc_alg ipsec_engine_algs[] = { ...@@ -1373,7 +1375,9 @@ static struct spacc_alg ipsec_engine_algs[] = {
.cra_name = "cbc(des)", .cra_name = "cbc(des)",
.cra_driver_name = "cbc-des-picoxcell", .cra_driver_name = "cbc-des-picoxcell",
.cra_priority = SPACC_CRYPTO_ALG_PRIORITY, .cra_priority = SPACC_CRYPTO_ALG_PRIORITY,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
CRYPTO_ALG_ASYNC |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = DES_BLOCK_SIZE, .cra_blocksize = DES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct spacc_ablk_ctx), .cra_ctxsize = sizeof(struct spacc_ablk_ctx),
.cra_type = &crypto_ablkcipher_type, .cra_type = &crypto_ablkcipher_type,
...@@ -1398,7 +1402,9 @@ static struct spacc_alg ipsec_engine_algs[] = { ...@@ -1398,7 +1402,9 @@ static struct spacc_alg ipsec_engine_algs[] = {
.cra_name = "ecb(des)", .cra_name = "ecb(des)",
.cra_driver_name = "ecb-des-picoxcell", .cra_driver_name = "ecb-des-picoxcell",
.cra_priority = SPACC_CRYPTO_ALG_PRIORITY, .cra_priority = SPACC_CRYPTO_ALG_PRIORITY,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
CRYPTO_ALG_ASYNC |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = DES_BLOCK_SIZE, .cra_blocksize = DES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct spacc_ablk_ctx), .cra_ctxsize = sizeof(struct spacc_ablk_ctx),
.cra_type = &crypto_ablkcipher_type, .cra_type = &crypto_ablkcipher_type,
...@@ -1422,7 +1428,9 @@ static struct spacc_alg ipsec_engine_algs[] = { ...@@ -1422,7 +1428,9 @@ static struct spacc_alg ipsec_engine_algs[] = {
.cra_name = "cbc(des3_ede)", .cra_name = "cbc(des3_ede)",
.cra_driver_name = "cbc-des3-ede-picoxcell", .cra_driver_name = "cbc-des3-ede-picoxcell",
.cra_priority = SPACC_CRYPTO_ALG_PRIORITY, .cra_priority = SPACC_CRYPTO_ALG_PRIORITY,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
CRYPTO_ALG_ASYNC |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = DES3_EDE_BLOCK_SIZE, .cra_blocksize = DES3_EDE_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct spacc_ablk_ctx), .cra_ctxsize = sizeof(struct spacc_ablk_ctx),
.cra_type = &crypto_ablkcipher_type, .cra_type = &crypto_ablkcipher_type,
...@@ -1447,7 +1455,9 @@ static struct spacc_alg ipsec_engine_algs[] = { ...@@ -1447,7 +1455,9 @@ static struct spacc_alg ipsec_engine_algs[] = {
.cra_name = "ecb(des3_ede)", .cra_name = "ecb(des3_ede)",
.cra_driver_name = "ecb-des3-ede-picoxcell", .cra_driver_name = "ecb-des3-ede-picoxcell",
.cra_priority = SPACC_CRYPTO_ALG_PRIORITY, .cra_priority = SPACC_CRYPTO_ALG_PRIORITY,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
CRYPTO_ALG_ASYNC |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = DES3_EDE_BLOCK_SIZE, .cra_blocksize = DES3_EDE_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct spacc_ablk_ctx), .cra_ctxsize = sizeof(struct spacc_ablk_ctx),
.cra_type = &crypto_ablkcipher_type, .cra_type = &crypto_ablkcipher_type,
...@@ -1472,7 +1482,9 @@ static struct spacc_alg ipsec_engine_algs[] = { ...@@ -1472,7 +1482,9 @@ static struct spacc_alg ipsec_engine_algs[] = {
.cra_name = "authenc(hmac(sha1),cbc(aes))", .cra_name = "authenc(hmac(sha1),cbc(aes))",
.cra_driver_name = "authenc-hmac-sha1-cbc-aes-picoxcell", .cra_driver_name = "authenc-hmac-sha1-cbc-aes-picoxcell",
.cra_priority = SPACC_CRYPTO_ALG_PRIORITY, .cra_priority = SPACC_CRYPTO_ALG_PRIORITY,
.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC, .cra_flags = CRYPTO_ALG_TYPE_AEAD |
CRYPTO_ALG_ASYNC |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = AES_BLOCK_SIZE, .cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct spacc_aead_ctx), .cra_ctxsize = sizeof(struct spacc_aead_ctx),
.cra_type = &crypto_aead_type, .cra_type = &crypto_aead_type,
...@@ -1500,7 +1512,9 @@ static struct spacc_alg ipsec_engine_algs[] = { ...@@ -1500,7 +1512,9 @@ static struct spacc_alg ipsec_engine_algs[] = {
.cra_name = "authenc(hmac(sha256),cbc(aes))", .cra_name = "authenc(hmac(sha256),cbc(aes))",
.cra_driver_name = "authenc-hmac-sha256-cbc-aes-picoxcell", .cra_driver_name = "authenc-hmac-sha256-cbc-aes-picoxcell",
.cra_priority = SPACC_CRYPTO_ALG_PRIORITY, .cra_priority = SPACC_CRYPTO_ALG_PRIORITY,
.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC, .cra_flags = CRYPTO_ALG_TYPE_AEAD |
CRYPTO_ALG_ASYNC |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = AES_BLOCK_SIZE, .cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct spacc_aead_ctx), .cra_ctxsize = sizeof(struct spacc_aead_ctx),
.cra_type = &crypto_aead_type, .cra_type = &crypto_aead_type,
...@@ -1527,7 +1541,9 @@ static struct spacc_alg ipsec_engine_algs[] = { ...@@ -1527,7 +1541,9 @@ static struct spacc_alg ipsec_engine_algs[] = {
.cra_name = "authenc(hmac(md5),cbc(aes))", .cra_name = "authenc(hmac(md5),cbc(aes))",
.cra_driver_name = "authenc-hmac-md5-cbc-aes-picoxcell", .cra_driver_name = "authenc-hmac-md5-cbc-aes-picoxcell",
.cra_priority = SPACC_CRYPTO_ALG_PRIORITY, .cra_priority = SPACC_CRYPTO_ALG_PRIORITY,
.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC, .cra_flags = CRYPTO_ALG_TYPE_AEAD |
CRYPTO_ALG_ASYNC |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = AES_BLOCK_SIZE, .cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct spacc_aead_ctx), .cra_ctxsize = sizeof(struct spacc_aead_ctx),
.cra_type = &crypto_aead_type, .cra_type = &crypto_aead_type,
...@@ -1554,7 +1570,9 @@ static struct spacc_alg ipsec_engine_algs[] = { ...@@ -1554,7 +1570,9 @@ static struct spacc_alg ipsec_engine_algs[] = {
.cra_name = "authenc(hmac(sha1),cbc(des3_ede))", .cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
.cra_driver_name = "authenc-hmac-sha1-cbc-3des-picoxcell", .cra_driver_name = "authenc-hmac-sha1-cbc-3des-picoxcell",
.cra_priority = SPACC_CRYPTO_ALG_PRIORITY, .cra_priority = SPACC_CRYPTO_ALG_PRIORITY,
.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC, .cra_flags = CRYPTO_ALG_TYPE_AEAD |
CRYPTO_ALG_ASYNC |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = DES3_EDE_BLOCK_SIZE, .cra_blocksize = DES3_EDE_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct spacc_aead_ctx), .cra_ctxsize = sizeof(struct spacc_aead_ctx),
.cra_type = &crypto_aead_type, .cra_type = &crypto_aead_type,
...@@ -1582,7 +1600,9 @@ static struct spacc_alg ipsec_engine_algs[] = { ...@@ -1582,7 +1600,9 @@ static struct spacc_alg ipsec_engine_algs[] = {
.cra_name = "authenc(hmac(sha256),cbc(des3_ede))", .cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
.cra_driver_name = "authenc-hmac-sha256-cbc-3des-picoxcell", .cra_driver_name = "authenc-hmac-sha256-cbc-3des-picoxcell",
.cra_priority = SPACC_CRYPTO_ALG_PRIORITY, .cra_priority = SPACC_CRYPTO_ALG_PRIORITY,
.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC, .cra_flags = CRYPTO_ALG_TYPE_AEAD |
CRYPTO_ALG_ASYNC |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = DES3_EDE_BLOCK_SIZE, .cra_blocksize = DES3_EDE_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct spacc_aead_ctx), .cra_ctxsize = sizeof(struct spacc_aead_ctx),
.cra_type = &crypto_aead_type, .cra_type = &crypto_aead_type,
...@@ -1609,7 +1629,9 @@ static struct spacc_alg ipsec_engine_algs[] = { ...@@ -1609,7 +1629,9 @@ static struct spacc_alg ipsec_engine_algs[] = {
.cra_name = "authenc(hmac(md5),cbc(des3_ede))", .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
.cra_driver_name = "authenc-hmac-md5-cbc-3des-picoxcell", .cra_driver_name = "authenc-hmac-md5-cbc-3des-picoxcell",
.cra_priority = SPACC_CRYPTO_ALG_PRIORITY, .cra_priority = SPACC_CRYPTO_ALG_PRIORITY,
.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC, .cra_flags = CRYPTO_ALG_TYPE_AEAD |
CRYPTO_ALG_ASYNC |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = DES3_EDE_BLOCK_SIZE, .cra_blocksize = DES3_EDE_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct spacc_aead_ctx), .cra_ctxsize = sizeof(struct spacc_aead_ctx),
.cra_type = &crypto_aead_type, .cra_type = &crypto_aead_type,
...@@ -1639,7 +1661,9 @@ static struct spacc_alg l2_engine_algs[] = { ...@@ -1639,7 +1661,9 @@ static struct spacc_alg l2_engine_algs[] = {
.cra_name = "f8(kasumi)", .cra_name = "f8(kasumi)",
.cra_driver_name = "f8-kasumi-picoxcell", .cra_driver_name = "f8-kasumi-picoxcell",
.cra_priority = SPACC_CRYPTO_ALG_PRIORITY, .cra_priority = SPACC_CRYPTO_ALG_PRIORITY,
.cra_flags = CRYPTO_ALG_TYPE_GIVCIPHER | CRYPTO_ALG_ASYNC, .cra_flags = CRYPTO_ALG_TYPE_GIVCIPHER |
CRYPTO_ALG_ASYNC |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = 8, .cra_blocksize = 8,
.cra_ctxsize = sizeof(struct spacc_ablk_ctx), .cra_ctxsize = sizeof(struct spacc_ablk_ctx),
.cra_type = &crypto_ablkcipher_type, .cra_type = &crypto_ablkcipher_type,
......
...@@ -518,7 +518,8 @@ static struct crypto_alg algs[] = { ...@@ -518,7 +518,8 @@ static struct crypto_alg algs[] = {
.cra_driver_name = "ecb-aes-s5p", .cra_driver_name = "ecb-aes-s5p",
.cra_priority = 100, .cra_priority = 100,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
CRYPTO_ALG_ASYNC, CRYPTO_ALG_ASYNC |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = AES_BLOCK_SIZE, .cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct s5p_aes_ctx), .cra_ctxsize = sizeof(struct s5p_aes_ctx),
.cra_alignmask = 0x0f, .cra_alignmask = 0x0f,
...@@ -538,7 +539,8 @@ static struct crypto_alg algs[] = { ...@@ -538,7 +539,8 @@ static struct crypto_alg algs[] = {
.cra_driver_name = "cbc-aes-s5p", .cra_driver_name = "cbc-aes-s5p",
.cra_priority = 100, .cra_priority = 100,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
CRYPTO_ALG_ASYNC, CRYPTO_ALG_ASYNC |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = AES_BLOCK_SIZE, .cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct s5p_aes_ctx), .cra_ctxsize = sizeof(struct s5p_aes_ctx),
.cra_alignmask = 0x0f, .cra_alignmask = 0x0f,
......
...@@ -2648,6 +2648,7 @@ static struct talitos_crypto_alg *talitos_alg_alloc(struct device *dev, ...@@ -2648,6 +2648,7 @@ static struct talitos_crypto_alg *talitos_alg_alloc(struct device *dev,
alg->cra_priority = TALITOS_CRA_PRIORITY; alg->cra_priority = TALITOS_CRA_PRIORITY;
alg->cra_alignmask = 0; alg->cra_alignmask = 0;
alg->cra_ctxsize = sizeof(struct talitos_ctx); alg->cra_ctxsize = sizeof(struct talitos_ctx);
alg->cra_flags |= CRYPTO_ALG_KERN_DRIVER_ONLY;
t_alg->dev = dev; t_alg->dev = dev;
......
This diff is collapsed.
/*
* Copyright (c) 2010, NVIDIA Corporation.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
#ifndef __CRYPTODEV_TEGRA_AES_H
#define __CRYPTODEV_TEGRA_AES_H
#define TEGRA_AES_ICMDQUE_WR 0x1000
#define TEGRA_AES_CMDQUE_CONTROL 0x1008
#define TEGRA_AES_INTR_STATUS 0x1018
#define TEGRA_AES_INT_ENB 0x1040
#define TEGRA_AES_CONFIG 0x1044
#define TEGRA_AES_IRAM_ACCESS_CFG 0x10A0
#define TEGRA_AES_SECURE_DEST_ADDR 0x1100
#define TEGRA_AES_SECURE_INPUT_SELECT 0x1104
#define TEGRA_AES_SECURE_CONFIG 0x1108
#define TEGRA_AES_SECURE_CONFIG_EXT 0x110C
#define TEGRA_AES_SECURE_SECURITY 0x1110
#define TEGRA_AES_SECURE_HASH_RESULT0 0x1120
#define TEGRA_AES_SECURE_HASH_RESULT1 0x1124
#define TEGRA_AES_SECURE_HASH_RESULT2 0x1128
#define TEGRA_AES_SECURE_HASH_RESULT3 0x112C
#define TEGRA_AES_SECURE_SEC_SEL0 0x1140
#define TEGRA_AES_SECURE_SEC_SEL1 0x1144
#define TEGRA_AES_SECURE_SEC_SEL2 0x1148
#define TEGRA_AES_SECURE_SEC_SEL3 0x114C
#define TEGRA_AES_SECURE_SEC_SEL4 0x1150
#define TEGRA_AES_SECURE_SEC_SEL5 0x1154
#define TEGRA_AES_SECURE_SEC_SEL6 0x1158
#define TEGRA_AES_SECURE_SEC_SEL7 0x115C
/* interrupt status reg masks and shifts */
#define TEGRA_AES_ENGINE_BUSY_FIELD BIT(0)
#define TEGRA_AES_ICQ_EMPTY_FIELD BIT(3)
#define TEGRA_AES_DMA_BUSY_FIELD BIT(23)
/* secure select reg masks and shifts */
#define TEGRA_AES_SECURE_SEL0_KEYREAD_ENB0_FIELD BIT(0)
/* secure config ext masks and shifts */
#define TEGRA_AES_SECURE_KEY_SCH_DIS_FIELD BIT(15)
/* secure config masks and shifts */
#define TEGRA_AES_SECURE_KEY_INDEX_SHIFT 20
#define TEGRA_AES_SECURE_KEY_INDEX_FIELD (0x1F << TEGRA_AES_SECURE_KEY_INDEX_SHIFT)
#define TEGRA_AES_SECURE_BLOCK_CNT_SHIFT 0
#define TEGRA_AES_SECURE_BLOCK_CNT_FIELD (0xFFFFF << TEGRA_AES_SECURE_BLOCK_CNT_SHIFT)
/* stream interface select masks and shifts */
#define TEGRA_AES_CMDQ_CTRL_UCMDQEN_FIELD BIT(0)
#define TEGRA_AES_CMDQ_CTRL_ICMDQEN_FIELD BIT(1)
#define TEGRA_AES_CMDQ_CTRL_SRC_STM_SEL_FIELD BIT(4)
#define TEGRA_AES_CMDQ_CTRL_DST_STM_SEL_FIELD BIT(5)
/* config register masks and shifts */
#define TEGRA_AES_CONFIG_ENDIAN_ENB_FIELD BIT(10)
#define TEGRA_AES_CONFIG_MODE_SEL_SHIFT 0
#define TEGRA_AES_CONFIG_MODE_SEL_FIELD (0x1F << TEGRA_AES_CONFIG_MODE_SEL_SHIFT)
/* extended config */
#define TEGRA_AES_SECURE_OFFSET_CNT_SHIFT 24
#define TEGRA_AES_SECURE_OFFSET_CNT_FIELD (0xFF << TEGRA_AES_SECURE_OFFSET_CNT_SHIFT)
#define TEGRA_AES_SECURE_KEYSCHED_GEN_FIELD BIT(15)
/* init vector select */
#define TEGRA_AES_SECURE_IV_SELECT_SHIFT 10
#define TEGRA_AES_SECURE_IV_SELECT_FIELD BIT(10)
/* secure engine input */
#define TEGRA_AES_SECURE_INPUT_ALG_SEL_SHIFT 28
#define TEGRA_AES_SECURE_INPUT_ALG_SEL_FIELD (0xF << TEGRA_AES_SECURE_INPUT_ALG_SEL_SHIFT)
#define TEGRA_AES_SECURE_INPUT_KEY_LEN_SHIFT 16
#define TEGRA_AES_SECURE_INPUT_KEY_LEN_FIELD (0xFFF << TEGRA_AES_SECURE_INPUT_KEY_LEN_SHIFT)
#define TEGRA_AES_SECURE_RNG_ENB_FIELD BIT(11)
#define TEGRA_AES_SECURE_CORE_SEL_SHIFT 9
#define TEGRA_AES_SECURE_CORE_SEL_FIELD BIT(9)
#define TEGRA_AES_SECURE_VCTRAM_SEL_SHIFT 7
#define TEGRA_AES_SECURE_VCTRAM_SEL_FIELD (0x3 << TEGRA_AES_SECURE_VCTRAM_SEL_SHIFT)
#define TEGRA_AES_SECURE_INPUT_SEL_SHIFT 5
#define TEGRA_AES_SECURE_INPUT_SEL_FIELD (0x3 << TEGRA_AES_SECURE_INPUT_SEL_SHIFT)
#define TEGRA_AES_SECURE_XOR_POS_SHIFT 3
#define TEGRA_AES_SECURE_XOR_POS_FIELD (0x3 << TEGRA_AES_SECURE_XOR_POS_SHIFT)
#define TEGRA_AES_SECURE_HASH_ENB_FIELD BIT(2)
#define TEGRA_AES_SECURE_ON_THE_FLY_FIELD BIT(0)
/* interrupt error mask */
#define TEGRA_AES_INT_ERROR_MASK 0xFFF000
#endif
...@@ -75,6 +75,11 @@ ...@@ -75,6 +75,11 @@
*/ */
#define CRYPTO_ALG_INSTANCE 0x00000800 #define CRYPTO_ALG_INSTANCE 0x00000800
/* Set this bit if the algorithm provided is hardware accelerated but
* not available to userspace via instruction set or so.
*/
#define CRYPTO_ALG_KERN_DRIVER_ONLY 0x00001000
/* /*
* Transform masks and values (for crt_flags). * Transform masks and values (for crt_flags).
*/ */
...@@ -309,6 +314,8 @@ struct crypto_alg { ...@@ -309,6 +314,8 @@ struct crypto_alg {
*/ */
int crypto_register_alg(struct crypto_alg *alg); int crypto_register_alg(struct crypto_alg *alg);
int crypto_unregister_alg(struct crypto_alg *alg); int crypto_unregister_alg(struct crypto_alg *alg);
int crypto_register_algs(struct crypto_alg *algs, int count);
int crypto_unregister_algs(struct crypto_alg *algs, int count);
/* /*
* Algorithm query interface. * Algorithm query interface.
......
...@@ -46,7 +46,6 @@ struct padata_priv { ...@@ -46,7 +46,6 @@ struct padata_priv {
struct list_head list; struct list_head list;
struct parallel_data *pd; struct parallel_data *pd;
int cb_cpu; int cb_cpu;
int seq_nr;
int info; int info;
void (*parallel)(struct padata_priv *padata); void (*parallel)(struct padata_priv *padata);
void (*serial)(struct padata_priv *padata); void (*serial)(struct padata_priv *padata);
...@@ -116,7 +115,6 @@ struct padata_cpumask { ...@@ -116,7 +115,6 @@ struct padata_cpumask {
* @pinst: padata instance. * @pinst: padata instance.
* @pqueue: percpu padata queues used for parallelization. * @pqueue: percpu padata queues used for parallelization.
* @squeue: percpu padata queues used for serialuzation. * @squeue: percpu padata queues used for serialuzation.
* @seq_nr: The sequence number that will be attached to the next object.
* @reorder_objects: Number of objects waiting in the reorder queues. * @reorder_objects: Number of objects waiting in the reorder queues.
* @refcnt: Number of objects holding a reference on this parallel_data. * @refcnt: Number of objects holding a reference on this parallel_data.
* @max_seq_nr: Maximal used sequence number. * @max_seq_nr: Maximal used sequence number.
...@@ -129,12 +127,12 @@ struct parallel_data { ...@@ -129,12 +127,12 @@ struct parallel_data {
struct padata_instance *pinst; struct padata_instance *pinst;
struct padata_parallel_queue __percpu *pqueue; struct padata_parallel_queue __percpu *pqueue;
struct padata_serial_queue __percpu *squeue; struct padata_serial_queue __percpu *squeue;
atomic_t seq_nr;
atomic_t reorder_objects; atomic_t reorder_objects;
atomic_t refcnt; atomic_t refcnt;
unsigned int max_seq_nr;
struct padata_cpumask cpumask; struct padata_cpumask cpumask;
spinlock_t lock ____cacheline_aligned; spinlock_t lock ____cacheline_aligned;
spinlock_t seq_lock;
unsigned int seq_nr;
unsigned int processed; unsigned int processed;
struct timer_list timer; struct timer_list timer;
}; };
......
...@@ -29,7 +29,6 @@ ...@@ -29,7 +29,6 @@
#include <linux/sysfs.h> #include <linux/sysfs.h>
#include <linux/rcupdate.h> #include <linux/rcupdate.h>
#define MAX_SEQ_NR (INT_MAX - NR_CPUS)
#define MAX_OBJ_NUM 1000 #define MAX_OBJ_NUM 1000
static int padata_index_to_cpu(struct parallel_data *pd, int cpu_index) static int padata_index_to_cpu(struct parallel_data *pd, int cpu_index)
...@@ -43,18 +42,19 @@ static int padata_index_to_cpu(struct parallel_data *pd, int cpu_index) ...@@ -43,18 +42,19 @@ static int padata_index_to_cpu(struct parallel_data *pd, int cpu_index)
return target_cpu; return target_cpu;
} }
static int padata_cpu_hash(struct padata_priv *padata) static int padata_cpu_hash(struct parallel_data *pd)
{ {
int cpu_index; int cpu_index;
struct parallel_data *pd;
pd = padata->pd;
/* /*
* Hash the sequence numbers to the cpus by taking * Hash the sequence numbers to the cpus by taking
* seq_nr mod. number of cpus in use. * seq_nr mod. number of cpus in use.
*/ */
cpu_index = padata->seq_nr % cpumask_weight(pd->cpumask.pcpu);
spin_lock(&pd->seq_lock);
cpu_index = pd->seq_nr % cpumask_weight(pd->cpumask.pcpu);
pd->seq_nr++;
spin_unlock(&pd->seq_lock);
return padata_index_to_cpu(pd, cpu_index); return padata_index_to_cpu(pd, cpu_index);
} }
...@@ -132,12 +132,7 @@ int padata_do_parallel(struct padata_instance *pinst, ...@@ -132,12 +132,7 @@ int padata_do_parallel(struct padata_instance *pinst,
padata->pd = pd; padata->pd = pd;
padata->cb_cpu = cb_cpu; padata->cb_cpu = cb_cpu;
if (unlikely(atomic_read(&pd->seq_nr) == pd->max_seq_nr)) target_cpu = padata_cpu_hash(pd);
atomic_set(&pd->seq_nr, -1);
padata->seq_nr = atomic_inc_return(&pd->seq_nr);
target_cpu = padata_cpu_hash(padata);
queue = per_cpu_ptr(pd->pqueue, target_cpu); queue = per_cpu_ptr(pd->pqueue, target_cpu);
spin_lock(&queue->parallel.lock); spin_lock(&queue->parallel.lock);
...@@ -173,7 +168,7 @@ EXPORT_SYMBOL(padata_do_parallel); ...@@ -173,7 +168,7 @@ EXPORT_SYMBOL(padata_do_parallel);
static struct padata_priv *padata_get_next(struct parallel_data *pd) static struct padata_priv *padata_get_next(struct parallel_data *pd)
{ {
int cpu, num_cpus; int cpu, num_cpus;
int next_nr, next_index; unsigned int next_nr, next_index;
struct padata_parallel_queue *queue, *next_queue; struct padata_parallel_queue *queue, *next_queue;
struct padata_priv *padata; struct padata_priv *padata;
struct padata_list *reorder; struct padata_list *reorder;
...@@ -189,14 +184,6 @@ static struct padata_priv *padata_get_next(struct parallel_data *pd) ...@@ -189,14 +184,6 @@ static struct padata_priv *padata_get_next(struct parallel_data *pd)
cpu = padata_index_to_cpu(pd, next_index); cpu = padata_index_to_cpu(pd, next_index);
next_queue = per_cpu_ptr(pd->pqueue, cpu); next_queue = per_cpu_ptr(pd->pqueue, cpu);
if (unlikely(next_nr > pd->max_seq_nr)) {
next_nr = next_nr - pd->max_seq_nr - 1;
next_index = next_nr % num_cpus;
cpu = padata_index_to_cpu(pd, next_index);
next_queue = per_cpu_ptr(pd->pqueue, cpu);
pd->processed = 0;
}
padata = NULL; padata = NULL;
reorder = &next_queue->reorder; reorder = &next_queue->reorder;
...@@ -205,8 +192,6 @@ static struct padata_priv *padata_get_next(struct parallel_data *pd) ...@@ -205,8 +192,6 @@ static struct padata_priv *padata_get_next(struct parallel_data *pd)
padata = list_entry(reorder->list.next, padata = list_entry(reorder->list.next,
struct padata_priv, list); struct padata_priv, list);
BUG_ON(next_nr != padata->seq_nr);
spin_lock(&reorder->lock); spin_lock(&reorder->lock);
list_del_init(&padata->list); list_del_init(&padata->list);
atomic_dec(&pd->reorder_objects); atomic_dec(&pd->reorder_objects);
...@@ -230,6 +215,7 @@ static struct padata_priv *padata_get_next(struct parallel_data *pd) ...@@ -230,6 +215,7 @@ static struct padata_priv *padata_get_next(struct parallel_data *pd)
static void padata_reorder(struct parallel_data *pd) static void padata_reorder(struct parallel_data *pd)
{ {
int cb_cpu;
struct padata_priv *padata; struct padata_priv *padata;
struct padata_serial_queue *squeue; struct padata_serial_queue *squeue;
struct padata_instance *pinst = pd->pinst; struct padata_instance *pinst = pd->pinst;
...@@ -270,13 +256,14 @@ static void padata_reorder(struct parallel_data *pd) ...@@ -270,13 +256,14 @@ static void padata_reorder(struct parallel_data *pd)
return; return;
} }
squeue = per_cpu_ptr(pd->squeue, padata->cb_cpu); cb_cpu = padata->cb_cpu;
squeue = per_cpu_ptr(pd->squeue, cb_cpu);
spin_lock(&squeue->serial.lock); spin_lock(&squeue->serial.lock);
list_add_tail(&padata->list, &squeue->serial.list); list_add_tail(&padata->list, &squeue->serial.list);
spin_unlock(&squeue->serial.lock); spin_unlock(&squeue->serial.lock);
queue_work_on(padata->cb_cpu, pinst->wq, &squeue->work); queue_work_on(cb_cpu, pinst->wq, &squeue->work);
} }
spin_unlock_bh(&pd->lock); spin_unlock_bh(&pd->lock);
...@@ -400,7 +387,7 @@ static void padata_init_squeues(struct parallel_data *pd) ...@@ -400,7 +387,7 @@ static void padata_init_squeues(struct parallel_data *pd)
/* Initialize all percpu queues used by parallel workers */ /* Initialize all percpu queues used by parallel workers */
static void padata_init_pqueues(struct parallel_data *pd) static void padata_init_pqueues(struct parallel_data *pd)
{ {
int cpu_index, num_cpus, cpu; int cpu_index, cpu;
struct padata_parallel_queue *pqueue; struct padata_parallel_queue *pqueue;
cpu_index = 0; cpu_index = 0;
...@@ -415,9 +402,6 @@ static void padata_init_pqueues(struct parallel_data *pd) ...@@ -415,9 +402,6 @@ static void padata_init_pqueues(struct parallel_data *pd)
INIT_WORK(&pqueue->work, padata_parallel_worker); INIT_WORK(&pqueue->work, padata_parallel_worker);
atomic_set(&pqueue->num_obj, 0); atomic_set(&pqueue->num_obj, 0);
} }
num_cpus = cpumask_weight(pd->cpumask.pcpu);
pd->max_seq_nr = num_cpus ? (MAX_SEQ_NR / num_cpus) * num_cpus - 1 : 0;
} }
/* Allocate and initialize the internal cpumask dependend resources. */ /* Allocate and initialize the internal cpumask dependend resources. */
...@@ -444,7 +428,7 @@ static struct parallel_data *padata_alloc_pd(struct padata_instance *pinst, ...@@ -444,7 +428,7 @@ static struct parallel_data *padata_alloc_pd(struct padata_instance *pinst,
padata_init_pqueues(pd); padata_init_pqueues(pd);
padata_init_squeues(pd); padata_init_squeues(pd);
setup_timer(&pd->timer, padata_reorder_timer, (unsigned long)pd); setup_timer(&pd->timer, padata_reorder_timer, (unsigned long)pd);
atomic_set(&pd->seq_nr, -1); pd->seq_nr = 0;
atomic_set(&pd->reorder_objects, 0); atomic_set(&pd->reorder_objects, 0);
atomic_set(&pd->refcnt, 0); atomic_set(&pd->refcnt, 0);
pd->pinst = pinst; pd->pinst = pinst;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment