Commit f6bccf69 authored by Linus Torvalds's avatar Linus Torvalds

Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6

* git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6:
  crypto: skcipher - Use RNG interface instead of get_random_bytes
  crypto: rng - RNG interface and implementation
  crypto: api - Add fips_enable flag
  crypto: skcipher - Move IV generators into their own modules
  crypto: cryptomgr - Test ciphers using ECB
  crypto: api - Use test infrastructure
  crypto: cryptomgr - Add test infrastructure
  crypto: tcrypt - Add alg_test interface
  crypto: tcrypt - Abort and only log if there is an error
  crypto: crc32c - Use Intel CRC32 instruction
  crypto: tcrypt - Avoid using contiguous pages
  crypto: api - Display larval objects properly
  crypto: api - Export crypto_alg_lookup instead of __crypto_alg_lookup
  crypto: Kconfig - Replace leading spaces with tabs
parents 3af73d39 a0f000ec
...@@ -10,6 +10,8 @@ obj-$(CONFIG_CRYPTO_AES_X86_64) += aes-x86_64.o ...@@ -10,6 +10,8 @@ obj-$(CONFIG_CRYPTO_AES_X86_64) += aes-x86_64.o
obj-$(CONFIG_CRYPTO_TWOFISH_X86_64) += twofish-x86_64.o obj-$(CONFIG_CRYPTO_TWOFISH_X86_64) += twofish-x86_64.o
obj-$(CONFIG_CRYPTO_SALSA20_X86_64) += salsa20-x86_64.o obj-$(CONFIG_CRYPTO_SALSA20_X86_64) += salsa20-x86_64.o
obj-$(CONFIG_CRYPTO_CRC32C_INTEL) += crc32c-intel.o
aes-i586-y := aes-i586-asm_32.o aes_glue.o aes-i586-y := aes-i586-asm_32.o aes_glue.o
twofish-i586-y := twofish-i586-asm_32.o twofish_glue.o twofish-i586-y := twofish-i586-asm_32.o twofish_glue.o
salsa20-i586-y := salsa20-i586-asm_32.o salsa20_glue.o salsa20-i586-y := salsa20-i586-asm_32.o salsa20_glue.o
......
/*
* Using hardware provided CRC32 instruction to accelerate the CRC32 disposal.
* CRC32C polynomial:0x1EDC6F41(BE)/0x82F63B78(LE)
* CRC32 is a new instruction in Intel SSE4.2, the reference can be found at:
* http://www.intel.com/products/processor/manuals/
* Intel(R) 64 and IA-32 Architectures Software Developer's Manual
* Volume 2A: Instruction Set Reference, A-M
*
* Copyright (c) 2008 Austin Zhang <austin_zhang@linux.intel.com>
* Copyright (c) 2008 Kent Liu <kent.liu@intel.com>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
*/
#include <linux/init.h>
#include <linux/module.h>
#include <linux/string.h>
#include <linux/kernel.h>
#include <crypto/internal/hash.h>
#include <asm/cpufeature.h>
#define CHKSUM_BLOCK_SIZE 1
#define CHKSUM_DIGEST_SIZE 4
#define SCALE_F sizeof(unsigned long)
#ifdef CONFIG_X86_64
#define REX_PRE "0x48, "
#else
#define REX_PRE
#endif
static u32 crc32c_intel_le_hw_byte(u32 crc, unsigned char const *data, size_t length)
{
while (length--) {
__asm__ __volatile__(
".byte 0xf2, 0xf, 0x38, 0xf0, 0xf1"
:"=S"(crc)
:"0"(crc), "c"(*data)
);
data++;
}
return crc;
}
static u32 __pure crc32c_intel_le_hw(u32 crc, unsigned char const *p, size_t len)
{
unsigned int iquotient = len / SCALE_F;
unsigned int iremainder = len % SCALE_F;
unsigned long *ptmp = (unsigned long *)p;
while (iquotient--) {
__asm__ __volatile__(
".byte 0xf2, " REX_PRE "0xf, 0x38, 0xf1, 0xf1;"
:"=S"(crc)
:"0"(crc), "c"(*ptmp)
);
ptmp++;
}
if (iremainder)
crc = crc32c_intel_le_hw_byte(crc, (unsigned char *)ptmp,
iremainder);
return crc;
}
/*
* Setting the seed allows arbitrary accumulators and flexible XOR policy
* If your algorithm starts with ~0, then XOR with ~0 before you set
* the seed.
*/
static int crc32c_intel_setkey(struct crypto_ahash *hash, const u8 *key,
unsigned int keylen)
{
u32 *mctx = crypto_ahash_ctx(hash);
if (keylen != sizeof(u32)) {
crypto_ahash_set_flags(hash, CRYPTO_TFM_RES_BAD_KEY_LEN);
return -EINVAL;
}
*mctx = le32_to_cpup((__le32 *)key);
return 0;
}
static int crc32c_intel_init(struct ahash_request *req)
{
u32 *mctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
u32 *crcp = ahash_request_ctx(req);
*crcp = *mctx;
return 0;
}
static int crc32c_intel_update(struct ahash_request *req)
{
struct crypto_hash_walk walk;
u32 *crcp = ahash_request_ctx(req);
u32 crc = *crcp;
int nbytes;
for (nbytes = crypto_hash_walk_first(req, &walk); nbytes;
nbytes = crypto_hash_walk_done(&walk, 0))
crc = crc32c_intel_le_hw(crc, walk.data, nbytes);
*crcp = crc;
return 0;
}
static int crc32c_intel_final(struct ahash_request *req)
{
u32 *crcp = ahash_request_ctx(req);
*(__le32 *)req->result = ~cpu_to_le32p(crcp);
return 0;
}
static int crc32c_intel_digest(struct ahash_request *req)
{
struct crypto_hash_walk walk;
u32 *mctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
u32 crc = *mctx;
int nbytes;
for (nbytes = crypto_hash_walk_first(req, &walk); nbytes;
nbytes = crypto_hash_walk_done(&walk, 0))
crc = crc32c_intel_le_hw(crc, walk.data, nbytes);
*(__le32 *)req->result = ~cpu_to_le32(crc);
return 0;
}
static int crc32c_intel_cra_init(struct crypto_tfm *tfm)
{
u32 *key = crypto_tfm_ctx(tfm);
*key = ~0;
tfm->crt_ahash.reqsize = sizeof(u32);
return 0;
}
static struct crypto_alg alg = {
.cra_name = "crc32c",
.cra_driver_name = "crc32c-intel",
.cra_priority = 200,
.cra_flags = CRYPTO_ALG_TYPE_AHASH,
.cra_blocksize = CHKSUM_BLOCK_SIZE,
.cra_alignmask = 3,
.cra_ctxsize = sizeof(u32),
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(alg.cra_list),
.cra_init = crc32c_intel_cra_init,
.cra_type = &crypto_ahash_type,
.cra_u = {
.ahash = {
.digestsize = CHKSUM_DIGEST_SIZE,
.setkey = crc32c_intel_setkey,
.init = crc32c_intel_init,
.update = crc32c_intel_update,
.final = crc32c_intel_final,
.digest = crc32c_intel_digest,
}
}
};
static int __init crc32c_intel_mod_init(void)
{
if (cpu_has_xmm4_2)
return crypto_register_alg(&alg);
else
return -ENODEV;
}
static void __exit crc32c_intel_mod_fini(void)
{
crypto_unregister_alg(&alg);
}
module_init(crc32c_intel_mod_init);
module_exit(crc32c_intel_mod_fini);
MODULE_AUTHOR("Austin Zhang <austin.zhang@intel.com>, Kent Liu <kent.liu@intel.com>");
MODULE_DESCRIPTION("CRC32c (Castagnoli) optimization using Intel Hardware.");
MODULE_LICENSE("GPL");
MODULE_ALIAS("crc32c");
MODULE_ALIAS("crc32c-intel");
...@@ -21,6 +21,14 @@ if CRYPTO ...@@ -21,6 +21,14 @@ if CRYPTO
comment "Crypto core or helper" comment "Crypto core or helper"
config CRYPTO_FIPS
bool "FIPS 200 compliance"
help
This options enables the fips boot option which is
required if you want to system to operate in a FIPS 200
certification. You should say no unless you know what
this is.
config CRYPTO_ALGAPI config CRYPTO_ALGAPI
tristate tristate
help help
...@@ -33,14 +41,21 @@ config CRYPTO_AEAD ...@@ -33,14 +41,21 @@ config CRYPTO_AEAD
config CRYPTO_BLKCIPHER config CRYPTO_BLKCIPHER
tristate tristate
select CRYPTO_ALGAPI select CRYPTO_ALGAPI
select CRYPTO_RNG
config CRYPTO_HASH config CRYPTO_HASH
tristate tristate
select CRYPTO_ALGAPI select CRYPTO_ALGAPI
config CRYPTO_RNG
tristate
select CRYPTO_ALGAPI
config CRYPTO_MANAGER config CRYPTO_MANAGER
tristate "Cryptographic algorithm manager" tristate "Cryptographic algorithm manager"
select CRYPTO_ALGAPI select CRYPTO_AEAD
select CRYPTO_HASH
select CRYPTO_BLKCIPHER
help help
Create default cryptographic template instantiations such as Create default cryptographic template instantiations such as
cbc(aes). cbc(aes).
...@@ -85,9 +100,7 @@ config CRYPTO_AUTHENC ...@@ -85,9 +100,7 @@ config CRYPTO_AUTHENC
config CRYPTO_TEST config CRYPTO_TEST
tristate "Testing module" tristate "Testing module"
depends on m depends on m
select CRYPTO_ALGAPI select CRYPTO_MANAGER
select CRYPTO_AEAD
select CRYPTO_BLKCIPHER
help help
Quick & dirty crypto test module. Quick & dirty crypto test module.
...@@ -113,6 +126,7 @@ config CRYPTO_SEQIV ...@@ -113,6 +126,7 @@ config CRYPTO_SEQIV
tristate "Sequence Number IV Generator" tristate "Sequence Number IV Generator"
select CRYPTO_AEAD select CRYPTO_AEAD
select CRYPTO_BLKCIPHER select CRYPTO_BLKCIPHER
select CRYPTO_RNG
help help
This IV generator generates an IV based on a sequence number by This IV generator generates an IV based on a sequence number by
xoring it with a salt. This algorithm is mainly useful for CTR xoring it with a salt. This algorithm is mainly useful for CTR
...@@ -221,6 +235,18 @@ config CRYPTO_CRC32C ...@@ -221,6 +235,18 @@ config CRYPTO_CRC32C
See Castagnoli93. This implementation uses lib/libcrc32c. See Castagnoli93. This implementation uses lib/libcrc32c.
Module will be crc32c. Module will be crc32c.
config CRYPTO_CRC32C_INTEL
tristate "CRC32c INTEL hardware acceleration"
depends on X86
select CRYPTO_HASH
help
In Intel processor with SSE4.2 supported, the processor will
support CRC32C implementation using hardware accelerated CRC32
instruction. This option will create 'crc32c-intel' module,
which will enable any routine to use the CRC32 instruction to
gain performance compared with software implementation.
Module will be crc32c-intel.
config CRYPTO_MD4 config CRYPTO_MD4
tristate "MD4 digest algorithm" tristate "MD4 digest algorithm"
select CRYPTO_ALGAPI select CRYPTO_ALGAPI
...@@ -263,10 +289,11 @@ config CRYPTO_RMD160 ...@@ -263,10 +289,11 @@ config CRYPTO_RMD160
RIPEMD-160 is a 160-bit cryptographic hash function. It is intended RIPEMD-160 is a 160-bit cryptographic hash function. It is intended
to be used as a secure replacement for the 128-bit hash functions to be used as a secure replacement for the 128-bit hash functions
MD4, MD5 and it's predecessor RIPEMD (not to be confused with RIPEMD-128). MD4, MD5 and it's predecessor RIPEMD
(not to be confused with RIPEMD-128).
It's speed is comparable to SHA1 and there are no known attacks against It's speed is comparable to SHA1 and there are no known attacks
RIPEMD-160. against RIPEMD-160.
Developed by Hans Dobbertin, Antoon Bosselaers and Bart Preneel. Developed by Hans Dobbertin, Antoon Bosselaers and Bart Preneel.
See <http://home.esat.kuleuven.be/~bosselae/ripemd160.html> See <http://home.esat.kuleuven.be/~bosselae/ripemd160.html>
...@@ -275,9 +302,10 @@ config CRYPTO_RMD256 ...@@ -275,9 +302,10 @@ config CRYPTO_RMD256
tristate "RIPEMD-256 digest algorithm" tristate "RIPEMD-256 digest algorithm"
select CRYPTO_ALGAPI select CRYPTO_ALGAPI
help help
RIPEMD-256 is an optional extension of RIPEMD-128 with a 256 bit hash. RIPEMD-256 is an optional extension of RIPEMD-128 with a
It is intended for applications that require longer hash-results, without 256 bit hash. It is intended for applications that require
needing a larger security level (than RIPEMD-128). longer hash-results, without needing a larger security level
(than RIPEMD-128).
Developed by Hans Dobbertin, Antoon Bosselaers and Bart Preneel. Developed by Hans Dobbertin, Antoon Bosselaers and Bart Preneel.
See <http://home.esat.kuleuven.be/~bosselae/ripemd160.html> See <http://home.esat.kuleuven.be/~bosselae/ripemd160.html>
...@@ -286,9 +314,10 @@ config CRYPTO_RMD320 ...@@ -286,9 +314,10 @@ config CRYPTO_RMD320
tristate "RIPEMD-320 digest algorithm" tristate "RIPEMD-320 digest algorithm"
select CRYPTO_ALGAPI select CRYPTO_ALGAPI
help help
RIPEMD-320 is an optional extension of RIPEMD-160 with a 320 bit hash. RIPEMD-320 is an optional extension of RIPEMD-160 with a
It is intended for applications that require longer hash-results, without 320 bit hash. It is intended for applications that require
needing a larger security level (than RIPEMD-160). longer hash-results, without needing a larger security level
(than RIPEMD-160).
Developed by Hans Dobbertin, Antoon Bosselaers and Bart Preneel. Developed by Hans Dobbertin, Antoon Bosselaers and Bart Preneel.
See <http://home.esat.kuleuven.be/~bosselae/ripemd160.html> See <http://home.esat.kuleuven.be/~bosselae/ripemd160.html>
...@@ -666,6 +695,18 @@ config CRYPTO_LZO ...@@ -666,6 +695,18 @@ config CRYPTO_LZO
help help
This is the LZO algorithm. This is the LZO algorithm.
comment "Random Number Generation"
config CRYPTO_ANSI_CPRNG
tristate "Pseudo Random Number Generation for Cryptographic modules"
select CRYPTO_AES
select CRYPTO_RNG
select CRYPTO_FIPS
help
This option enables the generic pseudo random number generator
for cryptographic modules. Uses the Algorithm specified in
ANSI X9.31 A.2.4
source "drivers/crypto/Kconfig" source "drivers/crypto/Kconfig"
endif # if CRYPTO endif # if CRYPTO
...@@ -5,6 +5,8 @@ ...@@ -5,6 +5,8 @@
obj-$(CONFIG_CRYPTO) += crypto.o obj-$(CONFIG_CRYPTO) += crypto.o
crypto-objs := api.o cipher.o digest.o compress.o crypto-objs := api.o cipher.o digest.o compress.o
obj-$(CONFIG_CRYPTO_FIPS) += fips.o
crypto_algapi-$(CONFIG_PROC_FS) += proc.o crypto_algapi-$(CONFIG_PROC_FS) += proc.o
crypto_algapi-objs := algapi.o scatterwalk.o $(crypto_algapi-y) crypto_algapi-objs := algapi.o scatterwalk.o $(crypto_algapi-y)
obj-$(CONFIG_CRYPTO_ALGAPI) += crypto_algapi.o obj-$(CONFIG_CRYPTO_ALGAPI) += crypto_algapi.o
...@@ -13,15 +15,17 @@ obj-$(CONFIG_CRYPTO_AEAD) += aead.o ...@@ -13,15 +15,17 @@ obj-$(CONFIG_CRYPTO_AEAD) += aead.o
crypto_blkcipher-objs := ablkcipher.o crypto_blkcipher-objs := ablkcipher.o
crypto_blkcipher-objs += blkcipher.o crypto_blkcipher-objs += blkcipher.o
crypto_blkcipher-objs += chainiv.o
crypto_blkcipher-objs += eseqiv.o
obj-$(CONFIG_CRYPTO_BLKCIPHER) += crypto_blkcipher.o obj-$(CONFIG_CRYPTO_BLKCIPHER) += crypto_blkcipher.o
obj-$(CONFIG_CRYPTO_BLKCIPHER) += chainiv.o
obj-$(CONFIG_CRYPTO_BLKCIPHER) += eseqiv.o
obj-$(CONFIG_CRYPTO_SEQIV) += seqiv.o obj-$(CONFIG_CRYPTO_SEQIV) += seqiv.o
crypto_hash-objs := hash.o crypto_hash-objs := hash.o
crypto_hash-objs += ahash.o crypto_hash-objs += ahash.o
obj-$(CONFIG_CRYPTO_HASH) += crypto_hash.o obj-$(CONFIG_CRYPTO_HASH) += crypto_hash.o
cryptomgr-objs := algboss.o testmgr.o
obj-$(CONFIG_CRYPTO_MANAGER) += cryptomgr.o obj-$(CONFIG_CRYPTO_MANAGER) += cryptomgr.o
obj-$(CONFIG_CRYPTO_HMAC) += hmac.o obj-$(CONFIG_CRYPTO_HMAC) += hmac.o
obj-$(CONFIG_CRYPTO_XCBC) += xcbc.o obj-$(CONFIG_CRYPTO_XCBC) += xcbc.o
...@@ -69,7 +73,9 @@ obj-$(CONFIG_CRYPTO_MICHAEL_MIC) += michael_mic.o ...@@ -69,7 +73,9 @@ obj-$(CONFIG_CRYPTO_MICHAEL_MIC) += michael_mic.o
obj-$(CONFIG_CRYPTO_CRC32C) += crc32c.o obj-$(CONFIG_CRYPTO_CRC32C) += crc32c.o
obj-$(CONFIG_CRYPTO_AUTHENC) += authenc.o obj-$(CONFIG_CRYPTO_AUTHENC) += authenc.o
obj-$(CONFIG_CRYPTO_LZO) += lzo.o obj-$(CONFIG_CRYPTO_LZO) += lzo.o
obj-$(CONFIG_CRYPTO_RNG) += rng.o
obj-$(CONFIG_CRYPTO_RNG) += krng.o
obj-$(CONFIG_CRYPTO_ANSI_CPRNG) += ansi_cprng.o
obj-$(CONFIG_CRYPTO_TEST) += tcrypt.o obj-$(CONFIG_CRYPTO_TEST) += tcrypt.o
# #
......
...@@ -21,15 +21,15 @@ ...@@ -21,15 +21,15 @@
#include "internal.h" #include "internal.h"
static void crypto_remove_final(struct list_head *list);
static LIST_HEAD(crypto_template_list); static LIST_HEAD(crypto_template_list);
void crypto_larval_error(const char *name, u32 type, u32 mask) void crypto_larval_error(const char *name, u32 type, u32 mask)
{ {
struct crypto_alg *alg; struct crypto_alg *alg;
down_read(&crypto_alg_sem); alg = crypto_alg_lookup(name, type, mask);
alg = __crypto_alg_lookup(name, type, mask);
up_read(&crypto_alg_sem);
if (alg) { if (alg) {
if (crypto_is_larval(alg)) { if (crypto_is_larval(alg)) {
...@@ -128,24 +128,98 @@ static void crypto_remove_spawns(struct list_head *spawns, ...@@ -128,24 +128,98 @@ static void crypto_remove_spawns(struct list_head *spawns,
} }
} }
static int __crypto_register_alg(struct crypto_alg *alg, static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
struct list_head *list)
{ {
struct crypto_alg *q; struct crypto_alg *q;
struct crypto_larval *larval;
int ret = -EAGAIN; int ret = -EAGAIN;
if (crypto_is_dead(alg)) if (crypto_is_dead(alg))
goto out; goto err;
INIT_LIST_HEAD(&alg->cra_users); INIT_LIST_HEAD(&alg->cra_users);
/* No cheating! */
alg->cra_flags &= ~CRYPTO_ALG_TESTED;
ret = -EEXIST; ret = -EEXIST;
atomic_set(&alg->cra_refcnt, 1); atomic_set(&alg->cra_refcnt, 1);
list_for_each_entry(q, &crypto_alg_list, cra_list) { list_for_each_entry(q, &crypto_alg_list, cra_list) {
if (q == alg) if (q == alg)
goto err;
if (crypto_is_larval(q)) {
if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
goto err;
continue;
}
if (!strcmp(q->cra_driver_name, alg->cra_name) ||
!strcmp(q->cra_name, alg->cra_driver_name))
goto err;
}
larval = crypto_larval_alloc(alg->cra_name,
alg->cra_flags | CRYPTO_ALG_TESTED, 0);
if (IS_ERR(larval))
goto out; goto out;
ret = -ENOENT;
larval->adult = crypto_mod_get(alg);
if (!larval->adult)
goto free_larval;
atomic_set(&larval->alg.cra_refcnt, 1);
memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
CRYPTO_MAX_ALG_NAME);
larval->alg.cra_priority = alg->cra_priority;
list_add(&alg->cra_list, &crypto_alg_list);
list_add(&larval->alg.cra_list, &crypto_alg_list);
out:
return larval;
free_larval:
kfree(larval);
err:
larval = ERR_PTR(ret);
goto out;
}
void crypto_alg_tested(const char *name, int err)
{
struct crypto_larval *test;
struct crypto_alg *alg;
struct crypto_alg *q;
LIST_HEAD(list);
down_write(&crypto_alg_sem);
list_for_each_entry(q, &crypto_alg_list, cra_list) {
if (!crypto_is_larval(q))
continue;
test = (struct crypto_larval *)q;
if (!strcmp(q->cra_driver_name, name))
goto found;
}
printk(KERN_ERR "alg: Unexpected test result for %s: %d\n", name, err);
goto unlock;
found:
alg = test->adult;
if (err || list_empty(&alg->cra_list))
goto complete;
alg->cra_flags |= CRYPTO_ALG_TESTED;
list_for_each_entry(q, &crypto_alg_list, cra_list) {
if (q == alg)
continue;
if (crypto_is_moribund(q)) if (crypto_is_moribund(q))
continue; continue;
...@@ -180,17 +254,18 @@ static int __crypto_register_alg(struct crypto_alg *alg, ...@@ -180,17 +254,18 @@ static int __crypto_register_alg(struct crypto_alg *alg,
q->cra_priority > alg->cra_priority) q->cra_priority > alg->cra_priority)
continue; continue;
crypto_remove_spawns(&q->cra_users, list, alg->cra_flags); crypto_remove_spawns(&q->cra_users, &list, alg->cra_flags);
} }
list_add(&alg->cra_list, &crypto_alg_list); complete:
complete_all(&test->completion);
crypto_notify(CRYPTO_MSG_ALG_REGISTER, alg); unlock:
ret = 0; up_write(&crypto_alg_sem);
out: crypto_remove_final(&list);
return ret;
} }
EXPORT_SYMBOL_GPL(crypto_alg_tested);
static void crypto_remove_final(struct list_head *list) static void crypto_remove_final(struct list_head *list)
{ {
...@@ -203,9 +278,27 @@ static void crypto_remove_final(struct list_head *list) ...@@ -203,9 +278,27 @@ static void crypto_remove_final(struct list_head *list)
} }
} }
static void crypto_wait_for_test(struct crypto_larval *larval)
{
int err;
err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
if (err != NOTIFY_STOP) {
if (WARN_ON(err != NOTIFY_DONE))
goto out;
crypto_alg_tested(larval->alg.cra_driver_name, 0);
}
err = wait_for_completion_interruptible(&larval->completion);
WARN_ON(err);
out:
crypto_larval_kill(&larval->alg);
}
int crypto_register_alg(struct crypto_alg *alg) int crypto_register_alg(struct crypto_alg *alg)
{ {
LIST_HEAD(list); struct crypto_larval *larval;
int err; int err;
err = crypto_check_alg(alg); err = crypto_check_alg(alg);
...@@ -213,11 +306,14 @@ int crypto_register_alg(struct crypto_alg *alg) ...@@ -213,11 +306,14 @@ int crypto_register_alg(struct crypto_alg *alg)
return err; return err;
down_write(&crypto_alg_sem); down_write(&crypto_alg_sem);
err = __crypto_register_alg(alg, &list); larval = __crypto_register_alg(alg);
up_write(&crypto_alg_sem); up_write(&crypto_alg_sem);
crypto_remove_final(&list); if (IS_ERR(larval))
return err; return PTR_ERR(larval);
crypto_wait_for_test(larval);
return 0;
} }
EXPORT_SYMBOL_GPL(crypto_register_alg); EXPORT_SYMBOL_GPL(crypto_register_alg);
...@@ -335,8 +431,8 @@ EXPORT_SYMBOL_GPL(crypto_lookup_template); ...@@ -335,8 +431,8 @@ EXPORT_SYMBOL_GPL(crypto_lookup_template);
int crypto_register_instance(struct crypto_template *tmpl, int crypto_register_instance(struct crypto_template *tmpl,
struct crypto_instance *inst) struct crypto_instance *inst)
{ {
LIST_HEAD(list); struct crypto_larval *larval;
int err = -EINVAL; int err;
err = crypto_check_alg(&inst->alg); err = crypto_check_alg(&inst->alg);
if (err) if (err)
...@@ -346,8 +442,8 @@ int crypto_register_instance(struct crypto_template *tmpl, ...@@ -346,8 +442,8 @@ int crypto_register_instance(struct crypto_template *tmpl,
down_write(&crypto_alg_sem); down_write(&crypto_alg_sem);
err = __crypto_register_alg(&inst->alg, &list); larval = __crypto_register_alg(&inst->alg);
if (err) if (IS_ERR(larval))
goto unlock; goto unlock;
hlist_add_head(&inst->list, &tmpl->instances); hlist_add_head(&inst->list, &tmpl->instances);
...@@ -356,7 +452,12 @@ int crypto_register_instance(struct crypto_template *tmpl, ...@@ -356,7 +452,12 @@ int crypto_register_instance(struct crypto_template *tmpl,
unlock: unlock:
up_write(&crypto_alg_sem); up_write(&crypto_alg_sem);
crypto_remove_final(&list); err = PTR_ERR(larval);
if (IS_ERR(larval))
goto err;
crypto_wait_for_test(larval);
err = 0;
err: err:
return err; return err;
......
...@@ -45,6 +45,15 @@ struct cryptomgr_param { ...@@ -45,6 +45,15 @@ struct cryptomgr_param {
char larval[CRYPTO_MAX_ALG_NAME]; char larval[CRYPTO_MAX_ALG_NAME];
char template[CRYPTO_MAX_ALG_NAME]; char template[CRYPTO_MAX_ALG_NAME];
u32 otype;
u32 omask;
};
struct crypto_test_param {
char driver[CRYPTO_MAX_ALG_NAME];
char alg[CRYPTO_MAX_ALG_NAME];
u32 type;
}; };
static int cryptomgr_probe(void *data) static int cryptomgr_probe(void *data)
...@@ -76,8 +85,7 @@ static int cryptomgr_probe(void *data) ...@@ -76,8 +85,7 @@ static int cryptomgr_probe(void *data)
module_put_and_exit(0); module_put_and_exit(0);
err: err:
crypto_larval_error(param->larval, param->type.data.type, crypto_larval_error(param->larval, param->otype, param->omask);
param->type.data.mask);
goto out; goto out;
} }
...@@ -169,13 +177,65 @@ static int cryptomgr_schedule_probe(struct crypto_larval *larval) ...@@ -169,13 +177,65 @@ static int cryptomgr_schedule_probe(struct crypto_larval *larval)
param->type.attr.rta_len = sizeof(param->type); param->type.attr.rta_len = sizeof(param->type);
param->type.attr.rta_type = CRYPTOA_TYPE; param->type.attr.rta_type = CRYPTOA_TYPE;
param->type.data.type = larval->alg.cra_flags; param->type.data.type = larval->alg.cra_flags & ~CRYPTO_ALG_TESTED;
param->type.data.mask = larval->mask; param->type.data.mask = larval->mask & ~CRYPTO_ALG_TESTED;
param->tb[0] = &param->type.attr; param->tb[0] = &param->type.attr;
param->otype = larval->alg.cra_flags;
param->omask = larval->mask;
memcpy(param->larval, larval->alg.cra_name, CRYPTO_MAX_ALG_NAME); memcpy(param->larval, larval->alg.cra_name, CRYPTO_MAX_ALG_NAME);
thread = kthread_run(cryptomgr_probe, param, "cryptomgr"); thread = kthread_run(cryptomgr_probe, param, "cryptomgr_probe");
if (IS_ERR(thread))
goto err_free_param;
return NOTIFY_STOP;
err_free_param:
kfree(param);
err_put_module:
module_put(THIS_MODULE);
err:
return NOTIFY_OK;
}
static int cryptomgr_test(void *data)
{
struct crypto_test_param *param = data;
u32 type = param->type;
int err = 0;
if (!((type ^ CRYPTO_ALG_TYPE_BLKCIPHER) &
CRYPTO_ALG_TYPE_BLKCIPHER_MASK) && !(type & CRYPTO_ALG_GENIV))
goto skiptest;
err = alg_test(param->driver, param->alg, type, CRYPTO_ALG_TESTED);
skiptest:
crypto_alg_tested(param->driver, err);
kfree(param);
module_put_and_exit(0);
}
static int cryptomgr_schedule_test(struct crypto_alg *alg)
{
struct task_struct *thread;
struct crypto_test_param *param;
if (!try_module_get(THIS_MODULE))
goto err;
param = kzalloc(sizeof(*param), GFP_KERNEL);
if (!param)
goto err_put_module;
memcpy(param->driver, alg->cra_driver_name, sizeof(param->driver));
memcpy(param->alg, alg->cra_name, sizeof(param->alg));
param->type = alg->cra_flags;
thread = kthread_run(cryptomgr_test, param, "cryptomgr_test");
if (IS_ERR(thread)) if (IS_ERR(thread))
goto err_free_param; goto err_free_param;
...@@ -195,6 +255,8 @@ static int cryptomgr_notify(struct notifier_block *this, unsigned long msg, ...@@ -195,6 +255,8 @@ static int cryptomgr_notify(struct notifier_block *this, unsigned long msg,
switch (msg) { switch (msg) {
case CRYPTO_MSG_ALG_REQUEST: case CRYPTO_MSG_ALG_REQUEST:
return cryptomgr_schedule_probe(data); return cryptomgr_schedule_probe(data);
case CRYPTO_MSG_ALG_REGISTER:
return cryptomgr_schedule_test(data);
} }
return NOTIFY_DONE; return NOTIFY_DONE;
...@@ -206,16 +268,32 @@ static struct notifier_block cryptomgr_notifier = { ...@@ -206,16 +268,32 @@ static struct notifier_block cryptomgr_notifier = {
static int __init cryptomgr_init(void) static int __init cryptomgr_init(void)
{ {
return crypto_register_notifier(&cryptomgr_notifier); int err;
err = testmgr_init();
if (err)
return err;
err = crypto_register_notifier(&cryptomgr_notifier);
if (err)
goto free_testmgr;
return 0;
free_testmgr:
testmgr_exit();
return err;
} }
static void __exit cryptomgr_exit(void) static void __exit cryptomgr_exit(void)
{ {
int err = crypto_unregister_notifier(&cryptomgr_notifier); int err = crypto_unregister_notifier(&cryptomgr_notifier);
BUG_ON(err); BUG_ON(err);
testmgr_exit();
} }
module_init(cryptomgr_init); subsys_initcall(cryptomgr_init);
module_exit(cryptomgr_exit); module_exit(cryptomgr_exit);
MODULE_LICENSE("GPL"); MODULE_LICENSE("GPL");
......
/*
* PRNG: Pseudo Random Number Generator
* Based on NIST Recommended PRNG From ANSI X9.31 Appendix A.2.4 using
* AES 128 cipher
*
* (C) Neil Horman <nhorman@tuxdriver.com>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* any later version.
*
*
*/
#include <crypto/internal/rng.h>
#include <linux/err.h>
#include <linux/init.h>
#include <linux/module.h>
#include <linux/moduleparam.h>
#include <linux/string.h>
#include "internal.h"
#define DEFAULT_PRNG_KEY "0123456789abcdef"
#define DEFAULT_PRNG_KSZ 16
#define DEFAULT_BLK_SZ 16
#define DEFAULT_V_SEED "zaybxcwdveuftgsh"
/*
* Flags for the prng_context flags field
*/
#define PRNG_FIXED_SIZE 0x1
#define PRNG_NEED_RESET 0x2
/*
* Note: DT is our counter value
* I is our intermediate value
* V is our seed vector
* See http://csrc.nist.gov/groups/STM/cavp/documents/rng/931rngext.pdf
* for implementation details
*/
struct prng_context {
spinlock_t prng_lock;
unsigned char rand_data[DEFAULT_BLK_SZ];
unsigned char last_rand_data[DEFAULT_BLK_SZ];
unsigned char DT[DEFAULT_BLK_SZ];
unsigned char I[DEFAULT_BLK_SZ];
unsigned char V[DEFAULT_BLK_SZ];
u32 rand_data_valid;
struct crypto_cipher *tfm;
u32 flags;
};
static int dbg;
static void hexdump(char *note, unsigned char *buf, unsigned int len)
{
if (dbg) {
printk(KERN_CRIT "%s", note);
print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
16, 1,
buf, len, false);
}
}
#define dbgprint(format, args...) do {\
if (dbg)\
printk(format, ##args);\
} while (0)
static void xor_vectors(unsigned char *in1, unsigned char *in2,
unsigned char *out, unsigned int size)
{
int i;
for (i = 0; i < size; i++)
out[i] = in1[i] ^ in2[i];
}
/*
* Returns DEFAULT_BLK_SZ bytes of random data per call
* returns 0 if generation succeded, <0 if something went wrong
*/
static int _get_more_prng_bytes(struct prng_context *ctx)
{
int i;
unsigned char tmp[DEFAULT_BLK_SZ];
unsigned char *output = NULL;
dbgprint(KERN_CRIT "Calling _get_more_prng_bytes for context %p\n",
ctx);
hexdump("Input DT: ", ctx->DT, DEFAULT_BLK_SZ);
hexdump("Input I: ", ctx->I, DEFAULT_BLK_SZ);
hexdump("Input V: ", ctx->V, DEFAULT_BLK_SZ);
/*
* This algorithm is a 3 stage state machine
*/
for (i = 0; i < 3; i++) {
switch (i) {
case 0:
/*
* Start by encrypting the counter value
* This gives us an intermediate value I
*/
memcpy(tmp, ctx->DT, DEFAULT_BLK_SZ);
output = ctx->I;
hexdump("tmp stage 0: ", tmp, DEFAULT_BLK_SZ);
break;
case 1:
/*
* Next xor I with our secret vector V
* encrypt that result to obtain our
* pseudo random data which we output
*/
xor_vectors(ctx->I, ctx->V, tmp, DEFAULT_BLK_SZ);
hexdump("tmp stage 1: ", tmp, DEFAULT_BLK_SZ);
output = ctx->rand_data;
break;
case 2:
/*
* First check that we didn't produce the same
* random data that we did last time around through this
*/
if (!memcmp(ctx->rand_data, ctx->last_rand_data,
DEFAULT_BLK_SZ)) {
printk(KERN_ERR
"ctx %p Failed repetition check!\n",
ctx);
ctx->flags |= PRNG_NEED_RESET;
return -EINVAL;
}
memcpy(ctx->last_rand_data, ctx->rand_data,
DEFAULT_BLK_SZ);
/*
* Lastly xor the random data with I
* and encrypt that to obtain a new secret vector V
*/
xor_vectors(ctx->rand_data, ctx->I, tmp,
DEFAULT_BLK_SZ);
output = ctx->V;
hexdump("tmp stage 2: ", tmp, DEFAULT_BLK_SZ);
break;
}
/* do the encryption */
crypto_cipher_encrypt_one(ctx->tfm, output, tmp);
}
/*
* Now update our DT value
*/
for (i = 0; i < DEFAULT_BLK_SZ; i++) {
ctx->DT[i] += 1;
if (ctx->DT[i] != 0)
break;
}
dbgprint("Returning new block for context %p\n", ctx);
ctx->rand_data_valid = 0;
hexdump("Output DT: ", ctx->DT, DEFAULT_BLK_SZ);
hexdump("Output I: ", ctx->I, DEFAULT_BLK_SZ);
hexdump("Output V: ", ctx->V, DEFAULT_BLK_SZ);
hexdump("New Random Data: ", ctx->rand_data, DEFAULT_BLK_SZ);
return 0;
}
/* Our exported functions */
static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx)
{
unsigned long flags;
unsigned char *ptr = buf;
unsigned int byte_count = (unsigned int)nbytes;
int err;
if (nbytes < 0)
return -EINVAL;
spin_lock_irqsave(&ctx->prng_lock, flags);
err = -EINVAL;
if (ctx->flags & PRNG_NEED_RESET)
goto done;
/*
* If the FIXED_SIZE flag is on, only return whole blocks of
* pseudo random data
*/
err = -EINVAL;
if (ctx->flags & PRNG_FIXED_SIZE) {
if (nbytes < DEFAULT_BLK_SZ)
goto done;
byte_count = DEFAULT_BLK_SZ;
}
err = byte_count;
dbgprint(KERN_CRIT "getting %d random bytes for context %p\n",
byte_count, ctx);
remainder:
if (ctx->rand_data_valid == DEFAULT_BLK_SZ) {
if (_get_more_prng_bytes(ctx) < 0) {
memset(buf, 0, nbytes);
err = -EINVAL;
goto done;
}
}
/*
* Copy up to the next whole block size
*/
if (byte_count < DEFAULT_BLK_SZ) {
for (; ctx->rand_data_valid < DEFAULT_BLK_SZ;
ctx->rand_data_valid++) {
*ptr = ctx->rand_data[ctx->rand_data_valid];
ptr++;
byte_count--;
if (byte_count == 0)
goto done;
}
}
/*
* Now copy whole blocks
*/
for (; byte_count >= DEFAULT_BLK_SZ; byte_count -= DEFAULT_BLK_SZ) {
if (_get_more_prng_bytes(ctx) < 0) {
memset(buf, 0, nbytes);
err = -EINVAL;
goto done;
}
memcpy(ptr, ctx->rand_data, DEFAULT_BLK_SZ);
ctx->rand_data_valid += DEFAULT_BLK_SZ;
ptr += DEFAULT_BLK_SZ;
}
/*
* Now copy any extra partial data
*/
if (byte_count)
goto remainder;
done:
spin_unlock_irqrestore(&ctx->prng_lock, flags);
dbgprint(KERN_CRIT "returning %d from get_prng_bytes in context %p\n",
err, ctx);
return err;
}
static void free_prng_context(struct prng_context *ctx)
{
crypto_free_cipher(ctx->tfm);
}
static int reset_prng_context(struct prng_context *ctx,
unsigned char *key, size_t klen,
unsigned char *V, unsigned char *DT)
{
int ret;
int rc = -EINVAL;
unsigned char *prng_key;
spin_lock(&ctx->prng_lock);
ctx->flags |= PRNG_NEED_RESET;
prng_key = (key != NULL) ? key : (unsigned char *)DEFAULT_PRNG_KEY;
if (!key)
klen = DEFAULT_PRNG_KSZ;
if (V)
memcpy(ctx->V, V, DEFAULT_BLK_SZ);
else
memcpy(ctx->V, DEFAULT_V_SEED, DEFAULT_BLK_SZ);
if (DT)
memcpy(ctx->DT, DT, DEFAULT_BLK_SZ);
else
memset(ctx->DT, 0, DEFAULT_BLK_SZ);
memset(ctx->rand_data, 0, DEFAULT_BLK_SZ);
memset(ctx->last_rand_data, 0, DEFAULT_BLK_SZ);
if (ctx->tfm)
crypto_free_cipher(ctx->tfm);
ctx->tfm = crypto_alloc_cipher("aes", 0, 0);
if (IS_ERR(ctx->tfm)) {
dbgprint(KERN_CRIT "Failed to alloc tfm for context %p\n",
ctx);
ctx->tfm = NULL;
goto out;
}
ctx->rand_data_valid = DEFAULT_BLK_SZ;
ret = crypto_cipher_setkey(ctx->tfm, prng_key, klen);
if (ret) {
dbgprint(KERN_CRIT "PRNG: setkey() failed flags=%x\n",
crypto_cipher_get_flags(ctx->tfm));
crypto_free_cipher(ctx->tfm);
goto out;
}
rc = 0;
ctx->flags &= ~PRNG_NEED_RESET;
out:
spin_unlock(&ctx->prng_lock);
return rc;
}
static int cprng_init(struct crypto_tfm *tfm)
{
struct prng_context *ctx = crypto_tfm_ctx(tfm);
spin_lock_init(&ctx->prng_lock);
return reset_prng_context(ctx, NULL, DEFAULT_PRNG_KSZ, NULL, NULL);
}
static void cprng_exit(struct crypto_tfm *tfm)
{
free_prng_context(crypto_tfm_ctx(tfm));
}
static int cprng_get_random(struct crypto_rng *tfm, u8 *rdata,
unsigned int dlen)
{
struct prng_context *prng = crypto_rng_ctx(tfm);
return get_prng_bytes(rdata, dlen, prng);
}
static int cprng_reset(struct crypto_rng *tfm, u8 *seed, unsigned int slen)
{
struct prng_context *prng = crypto_rng_ctx(tfm);
u8 *key = seed + DEFAULT_PRNG_KSZ;
if (slen < DEFAULT_PRNG_KSZ + DEFAULT_BLK_SZ)
return -EINVAL;
reset_prng_context(prng, key, DEFAULT_PRNG_KSZ, seed, NULL);
if (prng->flags & PRNG_NEED_RESET)
return -EINVAL;
return 0;
}
static struct crypto_alg rng_alg = {
.cra_name = "stdrng",
.cra_driver_name = "ansi_cprng",
.cra_priority = 100,
.cra_flags = CRYPTO_ALG_TYPE_RNG,
.cra_ctxsize = sizeof(struct prng_context),
.cra_type = &crypto_rng_type,
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(rng_alg.cra_list),
.cra_init = cprng_init,
.cra_exit = cprng_exit,
.cra_u = {
.rng = {
.rng_make_random = cprng_get_random,
.rng_reset = cprng_reset,
.seedsize = DEFAULT_PRNG_KSZ + DEFAULT_BLK_SZ,
}
}
};
/* Module initalization */
static int __init prng_mod_init(void)
{
int ret = 0;
if (fips_enabled)
rng_alg.cra_priority += 200;
ret = crypto_register_alg(&rng_alg);
if (ret)
goto out;
out:
return 0;
}
static void __exit prng_mod_fini(void)
{
crypto_unregister_alg(&rng_alg);
return;
}
MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("Software Pseudo Random Number Generator");
MODULE_AUTHOR("Neil Horman <nhorman@tuxdriver.com>");
module_param(dbg, int, 0);
MODULE_PARM_DESC(dbg, "Boolean to enable debugging (0/1 == off/on)");
module_init(prng_mod_init);
module_exit(prng_mod_fini);
MODULE_ALIAS("stdrng");
...@@ -55,7 +55,13 @@ void crypto_mod_put(struct crypto_alg *alg) ...@@ -55,7 +55,13 @@ void crypto_mod_put(struct crypto_alg *alg)
} }
EXPORT_SYMBOL_GPL(crypto_mod_put); EXPORT_SYMBOL_GPL(crypto_mod_put);
struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type, u32 mask) static inline int crypto_is_test_larval(struct crypto_larval *larval)
{
return larval->alg.cra_driver_name[0];
}
static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
u32 mask)
{ {
struct crypto_alg *q, *alg = NULL; struct crypto_alg *q, *alg = NULL;
int best = -2; int best = -2;
...@@ -70,6 +76,7 @@ struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type, u32 mask) ...@@ -70,6 +76,7 @@ struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type, u32 mask)
continue; continue;
if (crypto_is_larval(q) && if (crypto_is_larval(q) &&
!crypto_is_test_larval((struct crypto_larval *)q) &&
((struct crypto_larval *)q)->mask != mask) ((struct crypto_larval *)q)->mask != mask)
continue; continue;
...@@ -92,7 +99,6 @@ struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type, u32 mask) ...@@ -92,7 +99,6 @@ struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type, u32 mask)
return alg; return alg;
} }
EXPORT_SYMBOL_GPL(__crypto_alg_lookup);
static void crypto_larval_destroy(struct crypto_alg *alg) static void crypto_larval_destroy(struct crypto_alg *alg)
{ {
...@@ -104,10 +110,8 @@ static void crypto_larval_destroy(struct crypto_alg *alg) ...@@ -104,10 +110,8 @@ static void crypto_larval_destroy(struct crypto_alg *alg)
kfree(larval); kfree(larval);
} }
static struct crypto_alg *crypto_larval_alloc(const char *name, u32 type, struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
u32 mask)
{ {
struct crypto_alg *alg;
struct crypto_larval *larval; struct crypto_larval *larval;
larval = kzalloc(sizeof(*larval), GFP_KERNEL); larval = kzalloc(sizeof(*larval), GFP_KERNEL);
...@@ -119,10 +123,25 @@ static struct crypto_alg *crypto_larval_alloc(const char *name, u32 type, ...@@ -119,10 +123,25 @@ static struct crypto_alg *crypto_larval_alloc(const char *name, u32 type,
larval->alg.cra_priority = -1; larval->alg.cra_priority = -1;
larval->alg.cra_destroy = crypto_larval_destroy; larval->alg.cra_destroy = crypto_larval_destroy;
atomic_set(&larval->alg.cra_refcnt, 2);
strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME); strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
init_completion(&larval->completion); init_completion(&larval->completion);
return larval;
}
EXPORT_SYMBOL_GPL(crypto_larval_alloc);
static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
u32 mask)
{
struct crypto_alg *alg;
struct crypto_larval *larval;
larval = crypto_larval_alloc(name, type, mask);
if (IS_ERR(larval))
return ERR_CAST(larval);
atomic_set(&larval->alg.cra_refcnt, 2);
down_write(&crypto_alg_sem); down_write(&crypto_alg_sem);
alg = __crypto_alg_lookup(name, type, mask); alg = __crypto_alg_lookup(name, type, mask);
if (!alg) { if (!alg) {
...@@ -152,21 +171,29 @@ EXPORT_SYMBOL_GPL(crypto_larval_kill); ...@@ -152,21 +171,29 @@ EXPORT_SYMBOL_GPL(crypto_larval_kill);
static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg) static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
{ {
struct crypto_larval *larval = (void *)alg; struct crypto_larval *larval = (void *)alg;
long timeout;
timeout = wait_for_completion_interruptible_timeout(
&larval->completion, 60 * HZ);
wait_for_completion_interruptible_timeout(&larval->completion, 60 * HZ);
alg = larval->adult; alg = larval->adult;
if (alg) { if (timeout < 0)
if (!crypto_mod_get(alg)) alg = ERR_PTR(-EINTR);
alg = ERR_PTR(-EAGAIN); else if (!timeout)
} else alg = ERR_PTR(-ETIMEDOUT);
else if (!alg)
alg = ERR_PTR(-ENOENT); alg = ERR_PTR(-ENOENT);
else if (crypto_is_test_larval(larval) &&
!(alg->cra_flags & CRYPTO_ALG_TESTED))
alg = ERR_PTR(-EAGAIN);
else if (!crypto_mod_get(alg))
alg = ERR_PTR(-EAGAIN);
crypto_mod_put(&larval->alg); crypto_mod_put(&larval->alg);
return alg; return alg;
} }
static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask)
u32 mask)
{ {
struct crypto_alg *alg; struct crypto_alg *alg;
...@@ -176,6 +203,7 @@ static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, ...@@ -176,6 +203,7 @@ static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
return alg; return alg;
} }
EXPORT_SYMBOL_GPL(crypto_alg_lookup);
struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask) struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask)
{ {
...@@ -192,25 +220,40 @@ struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask) ...@@ -192,25 +220,40 @@ struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask)
if (alg) if (alg)
return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg; return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg;
return crypto_larval_alloc(name, type, mask); return crypto_larval_add(name, type, mask);
} }
EXPORT_SYMBOL_GPL(crypto_larval_lookup); EXPORT_SYMBOL_GPL(crypto_larval_lookup);
int crypto_probing_notify(unsigned long val, void *v)
{
int ok;
ok = blocking_notifier_call_chain(&crypto_chain, val, v);
if (ok == NOTIFY_DONE) {
request_module("cryptomgr");
ok = blocking_notifier_call_chain(&crypto_chain, val, v);
}
return ok;
}
EXPORT_SYMBOL_GPL(crypto_probing_notify);
struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask) struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
{ {
struct crypto_alg *alg; struct crypto_alg *alg;
struct crypto_alg *larval; struct crypto_alg *larval;
int ok; int ok;
if (!(mask & CRYPTO_ALG_TESTED)) {
type |= CRYPTO_ALG_TESTED;
mask |= CRYPTO_ALG_TESTED;
}
larval = crypto_larval_lookup(name, type, mask); larval = crypto_larval_lookup(name, type, mask);
if (IS_ERR(larval) || !crypto_is_larval(larval)) if (IS_ERR(larval) || !crypto_is_larval(larval))
return larval; return larval;
ok = crypto_notify(CRYPTO_MSG_ALG_REQUEST, larval); ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
if (ok == NOTIFY_DONE) {
request_module("cryptomgr");
ok = crypto_notify(CRYPTO_MSG_ALG_REQUEST, larval);
}
if (ok == NOTIFY_STOP) if (ok == NOTIFY_STOP)
alg = crypto_larval_wait(larval); alg = crypto_larval_wait(larval);
......
...@@ -696,34 +696,5 @@ void skcipher_geniv_exit(struct crypto_tfm *tfm) ...@@ -696,34 +696,5 @@ void skcipher_geniv_exit(struct crypto_tfm *tfm)
} }
EXPORT_SYMBOL_GPL(skcipher_geniv_exit); EXPORT_SYMBOL_GPL(skcipher_geniv_exit);
static int __init blkcipher_module_init(void)
{
int err;
err = chainiv_module_init();
if (err)
goto out;
err = eseqiv_module_init();
if (err)
goto eseqiv_err;
out:
return err;
eseqiv_err:
chainiv_module_exit();
goto out;
}
static void __exit blkcipher_module_exit(void)
{
eseqiv_module_exit();
chainiv_module_exit();
}
module_init(blkcipher_module_init);
module_exit(blkcipher_module_exit);
MODULE_LICENSE("GPL"); MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("Generic block chaining cipher type"); MODULE_DESCRIPTION("Generic block chaining cipher type");
...@@ -14,11 +14,11 @@ ...@@ -14,11 +14,11 @@
*/ */
#include <crypto/internal/skcipher.h> #include <crypto/internal/skcipher.h>
#include <crypto/rng.h>
#include <linux/err.h> #include <linux/err.h>
#include <linux/init.h> #include <linux/init.h>
#include <linux/kernel.h> #include <linux/kernel.h>
#include <linux/module.h> #include <linux/module.h>
#include <linux/random.h>
#include <linux/spinlock.h> #include <linux/spinlock.h>
#include <linux/string.h> #include <linux/string.h>
#include <linux/workqueue.h> #include <linux/workqueue.h>
...@@ -83,6 +83,7 @@ static int chainiv_givencrypt_first(struct skcipher_givcrypt_request *req) ...@@ -83,6 +83,7 @@ static int chainiv_givencrypt_first(struct skcipher_givcrypt_request *req)
{ {
struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
struct chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); struct chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
int err = 0;
spin_lock_bh(&ctx->lock); spin_lock_bh(&ctx->lock);
if (crypto_ablkcipher_crt(geniv)->givencrypt != if (crypto_ablkcipher_crt(geniv)->givencrypt !=
...@@ -90,11 +91,15 @@ static int chainiv_givencrypt_first(struct skcipher_givcrypt_request *req) ...@@ -90,11 +91,15 @@ static int chainiv_givencrypt_first(struct skcipher_givcrypt_request *req)
goto unlock; goto unlock;
crypto_ablkcipher_crt(geniv)->givencrypt = chainiv_givencrypt; crypto_ablkcipher_crt(geniv)->givencrypt = chainiv_givencrypt;
get_random_bytes(ctx->iv, crypto_ablkcipher_ivsize(geniv)); err = crypto_rng_get_bytes(crypto_default_rng, ctx->iv,
crypto_ablkcipher_ivsize(geniv));
unlock: unlock:
spin_unlock_bh(&ctx->lock); spin_unlock_bh(&ctx->lock);
if (err)
return err;
return chainiv_givencrypt(req); return chainiv_givencrypt(req);
} }
...@@ -203,6 +208,7 @@ static int async_chainiv_givencrypt_first(struct skcipher_givcrypt_request *req) ...@@ -203,6 +208,7 @@ static int async_chainiv_givencrypt_first(struct skcipher_givcrypt_request *req)
{ {
struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
struct async_chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); struct async_chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
int err = 0;
if (test_and_set_bit(CHAINIV_STATE_INUSE, &ctx->state)) if (test_and_set_bit(CHAINIV_STATE_INUSE, &ctx->state))
goto out; goto out;
...@@ -212,11 +218,15 @@ static int async_chainiv_givencrypt_first(struct skcipher_givcrypt_request *req) ...@@ -212,11 +218,15 @@ static int async_chainiv_givencrypt_first(struct skcipher_givcrypt_request *req)
goto unlock; goto unlock;
crypto_ablkcipher_crt(geniv)->givencrypt = async_chainiv_givencrypt; crypto_ablkcipher_crt(geniv)->givencrypt = async_chainiv_givencrypt;
get_random_bytes(ctx->iv, crypto_ablkcipher_ivsize(geniv)); err = crypto_rng_get_bytes(crypto_default_rng, ctx->iv,
crypto_ablkcipher_ivsize(geniv));
unlock: unlock:
clear_bit(CHAINIV_STATE_INUSE, &ctx->state); clear_bit(CHAINIV_STATE_INUSE, &ctx->state);
if (err)
return err;
out: out:
return async_chainiv_givencrypt(req); return async_chainiv_givencrypt(req);
} }
...@@ -284,9 +294,13 @@ static struct crypto_instance *chainiv_alloc(struct rtattr **tb) ...@@ -284,9 +294,13 @@ static struct crypto_instance *chainiv_alloc(struct rtattr **tb)
if (IS_ERR(algt)) if (IS_ERR(algt))
return ERR_PTR(err); return ERR_PTR(err);
err = crypto_get_default_rng();
if (err)
return ERR_PTR(err);
inst = skcipher_geniv_alloc(&chainiv_tmpl, tb, 0, 0); inst = skcipher_geniv_alloc(&chainiv_tmpl, tb, 0, 0);
if (IS_ERR(inst)) if (IS_ERR(inst))
goto out; goto put_rng;
inst->alg.cra_ablkcipher.givencrypt = chainiv_givencrypt_first; inst->alg.cra_ablkcipher.givencrypt = chainiv_givencrypt_first;
...@@ -311,21 +325,37 @@ static struct crypto_instance *chainiv_alloc(struct rtattr **tb) ...@@ -311,21 +325,37 @@ static struct crypto_instance *chainiv_alloc(struct rtattr **tb)
out: out:
return inst; return inst;
put_rng:
crypto_put_default_rng();
goto out;
}
static void chainiv_free(struct crypto_instance *inst)
{
skcipher_geniv_free(inst);
crypto_put_default_rng();
} }
static struct crypto_template chainiv_tmpl = { static struct crypto_template chainiv_tmpl = {
.name = "chainiv", .name = "chainiv",
.alloc = chainiv_alloc, .alloc = chainiv_alloc,
.free = skcipher_geniv_free, .free = chainiv_free,
.module = THIS_MODULE, .module = THIS_MODULE,
}; };
int __init chainiv_module_init(void) static int __init chainiv_module_init(void)
{ {
return crypto_register_template(&chainiv_tmpl); return crypto_register_template(&chainiv_tmpl);
} }
void chainiv_module_exit(void) static void chainiv_module_exit(void)
{ {
crypto_unregister_template(&chainiv_tmpl); crypto_unregister_template(&chainiv_tmpl);
} }
module_init(chainiv_module_init);
module_exit(chainiv_module_exit);
MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("Chain IV Generator");
...@@ -16,13 +16,13 @@ ...@@ -16,13 +16,13 @@
*/ */
#include <crypto/internal/skcipher.h> #include <crypto/internal/skcipher.h>
#include <crypto/rng.h>
#include <crypto/scatterwalk.h> #include <crypto/scatterwalk.h>
#include <linux/err.h> #include <linux/err.h>
#include <linux/init.h> #include <linux/init.h>
#include <linux/kernel.h> #include <linux/kernel.h>
#include <linux/mm.h> #include <linux/mm.h>
#include <linux/module.h> #include <linux/module.h>
#include <linux/random.h>
#include <linux/scatterlist.h> #include <linux/scatterlist.h>
#include <linux/spinlock.h> #include <linux/spinlock.h>
#include <linux/string.h> #include <linux/string.h>
...@@ -163,17 +163,22 @@ static int eseqiv_givencrypt_first(struct skcipher_givcrypt_request *req) ...@@ -163,17 +163,22 @@ static int eseqiv_givencrypt_first(struct skcipher_givcrypt_request *req)
{ {
struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
struct eseqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); struct eseqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
int err = 0;
spin_lock_bh(&ctx->lock); spin_lock_bh(&ctx->lock);
if (crypto_ablkcipher_crt(geniv)->givencrypt != eseqiv_givencrypt_first) if (crypto_ablkcipher_crt(geniv)->givencrypt != eseqiv_givencrypt_first)
goto unlock; goto unlock;
crypto_ablkcipher_crt(geniv)->givencrypt = eseqiv_givencrypt; crypto_ablkcipher_crt(geniv)->givencrypt = eseqiv_givencrypt;
get_random_bytes(ctx->salt, crypto_ablkcipher_ivsize(geniv)); err = crypto_rng_get_bytes(crypto_default_rng, ctx->salt,
crypto_ablkcipher_ivsize(geniv));
unlock: unlock:
spin_unlock_bh(&ctx->lock); spin_unlock_bh(&ctx->lock);
if (err)
return err;
return eseqiv_givencrypt(req); return eseqiv_givencrypt(req);
} }
...@@ -216,9 +221,13 @@ static struct crypto_instance *eseqiv_alloc(struct rtattr **tb) ...@@ -216,9 +221,13 @@ static struct crypto_instance *eseqiv_alloc(struct rtattr **tb)
struct crypto_instance *inst; struct crypto_instance *inst;
int err; int err;
err = crypto_get_default_rng();
if (err)
return ERR_PTR(err);
inst = skcipher_geniv_alloc(&eseqiv_tmpl, tb, 0, 0); inst = skcipher_geniv_alloc(&eseqiv_tmpl, tb, 0, 0);
if (IS_ERR(inst)) if (IS_ERR(inst))
goto out; goto put_rng;
err = -EINVAL; err = -EINVAL;
if (inst->alg.cra_ablkcipher.ivsize != inst->alg.cra_blocksize) if (inst->alg.cra_ablkcipher.ivsize != inst->alg.cra_blocksize)
...@@ -238,22 +247,36 @@ static struct crypto_instance *eseqiv_alloc(struct rtattr **tb) ...@@ -238,22 +247,36 @@ static struct crypto_instance *eseqiv_alloc(struct rtattr **tb)
free_inst: free_inst:
skcipher_geniv_free(inst); skcipher_geniv_free(inst);
inst = ERR_PTR(err); inst = ERR_PTR(err);
put_rng:
crypto_put_default_rng();
goto out; goto out;
} }
static void eseqiv_free(struct crypto_instance *inst)
{
skcipher_geniv_free(inst);
crypto_put_default_rng();
}
static struct crypto_template eseqiv_tmpl = { static struct crypto_template eseqiv_tmpl = {
.name = "eseqiv", .name = "eseqiv",
.alloc = eseqiv_alloc, .alloc = eseqiv_alloc,
.free = skcipher_geniv_free, .free = eseqiv_free,
.module = THIS_MODULE, .module = THIS_MODULE,
}; };
int __init eseqiv_module_init(void) static int __init eseqiv_module_init(void)
{ {
return crypto_register_template(&eseqiv_tmpl); return crypto_register_template(&eseqiv_tmpl);
} }
void __exit eseqiv_module_exit(void) static void __exit eseqiv_module_exit(void)
{ {
crypto_unregister_template(&eseqiv_tmpl); crypto_unregister_template(&eseqiv_tmpl);
} }
module_init(eseqiv_module_init);
module_exit(eseqiv_module_exit);
MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("Encrypted Sequence Number IV Generator");
/*
* FIPS 200 support.
*
* Copyright (c) 2008 Neil Horman <nhorman@tuxdriver.com>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
*/
#include "internal.h"
int fips_enabled;
EXPORT_SYMBOL_GPL(fips_enabled);
/* Process kernel command-line parameter at boot time. fips=0 or fips=1 */
static int fips_enable(char *str)
{
fips_enabled = !!simple_strtol(str, NULL, 0);
printk(KERN_INFO "fips mode: %s\n",
fips_enabled ? "enabled" : "disabled");
return 1;
}
__setup("fips=", fips_enable);
...@@ -26,6 +26,12 @@ ...@@ -26,6 +26,12 @@
#include <linux/rwsem.h> #include <linux/rwsem.h>
#include <linux/slab.h> #include <linux/slab.h>
#ifdef CONFIG_CRYPTO_FIPS
extern int fips_enabled;
#else
#define fips_enabled 0
#endif
/* Crypto notification events. */ /* Crypto notification events. */
enum { enum {
CRYPTO_MSG_ALG_REQUEST, CRYPTO_MSG_ALG_REQUEST,
...@@ -82,7 +88,7 @@ static inline unsigned int crypto_compress_ctxsize(struct crypto_alg *alg) ...@@ -82,7 +88,7 @@ static inline unsigned int crypto_compress_ctxsize(struct crypto_alg *alg)
} }
struct crypto_alg *crypto_mod_get(struct crypto_alg *alg); struct crypto_alg *crypto_mod_get(struct crypto_alg *alg);
struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type, u32 mask); struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask);
struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask); struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask);
int crypto_init_digest_ops(struct crypto_tfm *tfm); int crypto_init_digest_ops(struct crypto_tfm *tfm);
...@@ -94,9 +100,11 @@ void crypto_exit_digest_ops(struct crypto_tfm *tfm); ...@@ -94,9 +100,11 @@ void crypto_exit_digest_ops(struct crypto_tfm *tfm);
void crypto_exit_cipher_ops(struct crypto_tfm *tfm); void crypto_exit_cipher_ops(struct crypto_tfm *tfm);
void crypto_exit_compress_ops(struct crypto_tfm *tfm); void crypto_exit_compress_ops(struct crypto_tfm *tfm);
struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask);
void crypto_larval_kill(struct crypto_alg *alg); void crypto_larval_kill(struct crypto_alg *alg);
struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask); struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask);
void crypto_larval_error(const char *name, u32 type, u32 mask); void crypto_larval_error(const char *name, u32 type, u32 mask);
void crypto_alg_tested(const char *name, int err);
void crypto_shoot_alg(struct crypto_alg *alg); void crypto_shoot_alg(struct crypto_alg *alg);
struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type, struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
...@@ -107,6 +115,10 @@ int crypto_register_instance(struct crypto_template *tmpl, ...@@ -107,6 +115,10 @@ int crypto_register_instance(struct crypto_template *tmpl,
int crypto_register_notifier(struct notifier_block *nb); int crypto_register_notifier(struct notifier_block *nb);
int crypto_unregister_notifier(struct notifier_block *nb); int crypto_unregister_notifier(struct notifier_block *nb);
int crypto_probing_notify(unsigned long val, void *v);
int __init testmgr_init(void);
void testmgr_exit(void);
static inline void crypto_alg_put(struct crypto_alg *alg) static inline void crypto_alg_put(struct crypto_alg *alg)
{ {
...@@ -139,9 +151,9 @@ static inline int crypto_is_moribund(struct crypto_alg *alg) ...@@ -139,9 +151,9 @@ static inline int crypto_is_moribund(struct crypto_alg *alg)
return alg->cra_flags & (CRYPTO_ALG_DEAD | CRYPTO_ALG_DYING); return alg->cra_flags & (CRYPTO_ALG_DEAD | CRYPTO_ALG_DYING);
} }
static inline int crypto_notify(unsigned long val, void *v) static inline void crypto_notify(unsigned long val, void *v)
{ {
return blocking_notifier_call_chain(&crypto_chain, val, v); blocking_notifier_call_chain(&crypto_chain, val, v);
} }
#endif /* _CRYPTO_INTERNAL_H */ #endif /* _CRYPTO_INTERNAL_H */
......
/*
* RNG implementation using standard kernel RNG.
*
* Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* any later version.
*
*/
#include <crypto/internal/rng.h>
#include <linux/err.h>
#include <linux/init.h>
#include <linux/module.h>
#include <linux/random.h>
static int krng_get_random(struct crypto_rng *tfm, u8 *rdata, unsigned int dlen)
{
get_random_bytes(rdata, dlen);
return 0;
}
static int krng_reset(struct crypto_rng *tfm, u8 *seed, unsigned int slen)
{
return 0;
}
static struct crypto_alg krng_alg = {
.cra_name = "stdrng",
.cra_driver_name = "krng",
.cra_priority = 200,
.cra_flags = CRYPTO_ALG_TYPE_RNG,
.cra_ctxsize = 0,
.cra_type = &crypto_rng_type,
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(krng_alg.cra_list),
.cra_u = {
.rng = {
.rng_make_random = krng_get_random,
.rng_reset = krng_reset,
.seedsize = 0,
}
}
};
/* Module initalization */
static int __init krng_mod_init(void)
{
return crypto_register_alg(&krng_alg);
}
static void __exit krng_mod_fini(void)
{
crypto_unregister_alg(&krng_alg);
return;
}
module_init(krng_mod_init);
module_exit(krng_mod_fini);
MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("Kernel Random Number Generator");
MODULE_ALIAS("stdrng");
...@@ -19,8 +19,53 @@ ...@@ -19,8 +19,53 @@
#include <linux/rwsem.h> #include <linux/rwsem.h>
#include <linux/proc_fs.h> #include <linux/proc_fs.h>
#include <linux/seq_file.h> #include <linux/seq_file.h>
#include <linux/sysctl.h>
#include "internal.h" #include "internal.h"
#ifdef CONFIG_CRYPTO_FIPS
static struct ctl_table crypto_sysctl_table[] = {
{
.ctl_name = CTL_UNNUMBERED,
.procname = "fips_enabled",
.data = &fips_enabled,
.maxlen = sizeof(int),
.mode = 0444,
.proc_handler = &proc_dointvec
},
{
.ctl_name = 0,
},
};
static struct ctl_table crypto_dir_table[] = {
{
.ctl_name = CTL_UNNUMBERED,
.procname = "crypto",
.mode = 0555,
.child = crypto_sysctl_table
},
{
.ctl_name = 0,
},
};
static struct ctl_table_header *crypto_sysctls;
static void crypto_proc_fips_init(void)
{
crypto_sysctls = register_sysctl_table(crypto_dir_table);
}
static void crypto_proc_fips_exit(void)
{
if (crypto_sysctls)
unregister_sysctl_table(crypto_sysctls);
}
#else
#define crypto_proc_fips_init()
#define crypto_proc_fips_exit()
#endif
static void *c_start(struct seq_file *m, loff_t *pos) static void *c_start(struct seq_file *m, loff_t *pos)
{ {
down_read(&crypto_alg_sem); down_read(&crypto_alg_sem);
...@@ -46,8 +91,11 @@ static int c_show(struct seq_file *m, void *p) ...@@ -46,8 +91,11 @@ static int c_show(struct seq_file *m, void *p)
seq_printf(m, "module : %s\n", module_name(alg->cra_module)); seq_printf(m, "module : %s\n", module_name(alg->cra_module));
seq_printf(m, "priority : %d\n", alg->cra_priority); seq_printf(m, "priority : %d\n", alg->cra_priority);
seq_printf(m, "refcnt : %d\n", atomic_read(&alg->cra_refcnt)); seq_printf(m, "refcnt : %d\n", atomic_read(&alg->cra_refcnt));
seq_printf(m, "selftest : %s\n",
(alg->cra_flags & CRYPTO_ALG_TESTED) ?
"passed" : "unknown");
switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) { switch (alg->cra_flags & (CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_LARVAL)) {
case CRYPTO_ALG_TYPE_CIPHER: case CRYPTO_ALG_TYPE_CIPHER:
seq_printf(m, "type : cipher\n"); seq_printf(m, "type : cipher\n");
seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
...@@ -67,7 +115,10 @@ static int c_show(struct seq_file *m, void *p) ...@@ -67,7 +115,10 @@ static int c_show(struct seq_file *m, void *p)
seq_printf(m, "type : compression\n"); seq_printf(m, "type : compression\n");
break; break;
default: default:
if (alg->cra_type && alg->cra_type->show) if (alg->cra_flags & CRYPTO_ALG_LARVAL) {
seq_printf(m, "type : larval\n");
seq_printf(m, "flags : 0x%x\n", alg->cra_flags);
} else if (alg->cra_type && alg->cra_type->show)
alg->cra_type->show(m, alg); alg->cra_type->show(m, alg);
else else
seq_printf(m, "type : unknown\n"); seq_printf(m, "type : unknown\n");
...@@ -100,9 +151,11 @@ static const struct file_operations proc_crypto_ops = { ...@@ -100,9 +151,11 @@ static const struct file_operations proc_crypto_ops = {
void __init crypto_init_proc(void) void __init crypto_init_proc(void)
{ {
proc_create("crypto", 0, NULL, &proc_crypto_ops); proc_create("crypto", 0, NULL, &proc_crypto_ops);
crypto_proc_fips_init();
} }
void __exit crypto_exit_proc(void) void __exit crypto_exit_proc(void)
{ {
crypto_proc_fips_exit();
remove_proc_entry("crypto", NULL); remove_proc_entry("crypto", NULL);
} }
/*
* Cryptographic API.
*
* RNG operations.
*
* Copyright (c) 2008 Neil Horman <nhorman@tuxdriver.com>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
*/
#include <asm/atomic.h>
#include <crypto/internal/rng.h>
#include <linux/err.h>
#include <linux/module.h>
#include <linux/mutex.h>
#include <linux/random.h>
#include <linux/seq_file.h>
#include <linux/string.h>
static DEFINE_MUTEX(crypto_default_rng_lock);
struct crypto_rng *crypto_default_rng;
EXPORT_SYMBOL_GPL(crypto_default_rng);
static int crypto_default_rng_refcnt;
static int rngapi_reset(struct crypto_rng *tfm, u8 *seed, unsigned int slen)
{
u8 *buf = NULL;
int err;
if (!seed && slen) {
buf = kmalloc(slen, GFP_KERNEL);
if (!buf)
return -ENOMEM;
get_random_bytes(buf, slen);
seed = buf;
}
err = crypto_rng_alg(tfm)->rng_reset(tfm, seed, slen);
kfree(buf);
return err;
}
static int crypto_init_rng_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
{
struct rng_alg *alg = &tfm->__crt_alg->cra_rng;
struct rng_tfm *ops = &tfm->crt_rng;
ops->rng_gen_random = alg->rng_make_random;
ops->rng_reset = rngapi_reset;
return 0;
}
static void crypto_rng_show(struct seq_file *m, struct crypto_alg *alg)
__attribute__ ((unused));
static void crypto_rng_show(struct seq_file *m, struct crypto_alg *alg)
{
seq_printf(m, "type : rng\n");
seq_printf(m, "seedsize : %u\n", alg->cra_rng.seedsize);
}
static unsigned int crypto_rng_ctxsize(struct crypto_alg *alg, u32 type,
u32 mask)
{
return alg->cra_ctxsize;
}
const struct crypto_type crypto_rng_type = {
.ctxsize = crypto_rng_ctxsize,
.init = crypto_init_rng_ops,
#ifdef CONFIG_PROC_FS
.show = crypto_rng_show,
#endif
};
EXPORT_SYMBOL_GPL(crypto_rng_type);
int crypto_get_default_rng(void)
{
struct crypto_rng *rng;
int err;
mutex_lock(&crypto_default_rng_lock);
if (!crypto_default_rng) {
rng = crypto_alloc_rng("stdrng", 0, 0);
err = PTR_ERR(rng);
if (IS_ERR(rng))
goto unlock;
err = crypto_rng_reset(rng, NULL, crypto_rng_seedsize(rng));
if (err) {
crypto_free_rng(rng);
goto unlock;
}
crypto_default_rng = rng;
}
crypto_default_rng_refcnt++;
err = 0;
unlock:
mutex_unlock(&crypto_default_rng_lock);
return err;
}
EXPORT_SYMBOL_GPL(crypto_get_default_rng);
void crypto_put_default_rng(void)
{
mutex_lock(&crypto_default_rng_lock);
if (!--crypto_default_rng_refcnt) {
crypto_free_rng(crypto_default_rng);
crypto_default_rng = NULL;
}
mutex_unlock(&crypto_default_rng_lock);
}
EXPORT_SYMBOL_GPL(crypto_put_default_rng);
MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("Random Number Genertor");
...@@ -15,11 +15,11 @@ ...@@ -15,11 +15,11 @@
#include <crypto/internal/aead.h> #include <crypto/internal/aead.h>
#include <crypto/internal/skcipher.h> #include <crypto/internal/skcipher.h>
#include <crypto/rng.h>
#include <linux/err.h> #include <linux/err.h>
#include <linux/init.h> #include <linux/init.h>
#include <linux/kernel.h> #include <linux/kernel.h>
#include <linux/module.h> #include <linux/module.h>
#include <linux/random.h>
#include <linux/spinlock.h> #include <linux/spinlock.h>
#include <linux/string.h> #include <linux/string.h>
...@@ -189,17 +189,22 @@ static int seqiv_givencrypt_first(struct skcipher_givcrypt_request *req) ...@@ -189,17 +189,22 @@ static int seqiv_givencrypt_first(struct skcipher_givcrypt_request *req)
{ {
struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
struct seqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); struct seqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
int err = 0;
spin_lock_bh(&ctx->lock); spin_lock_bh(&ctx->lock);
if (crypto_ablkcipher_crt(geniv)->givencrypt != seqiv_givencrypt_first) if (crypto_ablkcipher_crt(geniv)->givencrypt != seqiv_givencrypt_first)
goto unlock; goto unlock;
crypto_ablkcipher_crt(geniv)->givencrypt = seqiv_givencrypt; crypto_ablkcipher_crt(geniv)->givencrypt = seqiv_givencrypt;
get_random_bytes(ctx->salt, crypto_ablkcipher_ivsize(geniv)); err = crypto_rng_get_bytes(crypto_default_rng, ctx->salt,
crypto_ablkcipher_ivsize(geniv));
unlock: unlock:
spin_unlock_bh(&ctx->lock); spin_unlock_bh(&ctx->lock);
if (err)
return err;
return seqiv_givencrypt(req); return seqiv_givencrypt(req);
} }
...@@ -207,17 +212,22 @@ static int seqiv_aead_givencrypt_first(struct aead_givcrypt_request *req) ...@@ -207,17 +212,22 @@ static int seqiv_aead_givencrypt_first(struct aead_givcrypt_request *req)
{ {
struct crypto_aead *geniv = aead_givcrypt_reqtfm(req); struct crypto_aead *geniv = aead_givcrypt_reqtfm(req);
struct seqiv_ctx *ctx = crypto_aead_ctx(geniv); struct seqiv_ctx *ctx = crypto_aead_ctx(geniv);
int err = 0;
spin_lock_bh(&ctx->lock); spin_lock_bh(&ctx->lock);
if (crypto_aead_crt(geniv)->givencrypt != seqiv_aead_givencrypt_first) if (crypto_aead_crt(geniv)->givencrypt != seqiv_aead_givencrypt_first)
goto unlock; goto unlock;
crypto_aead_crt(geniv)->givencrypt = seqiv_aead_givencrypt; crypto_aead_crt(geniv)->givencrypt = seqiv_aead_givencrypt;
get_random_bytes(ctx->salt, crypto_aead_ivsize(geniv)); err = crypto_rng_get_bytes(crypto_default_rng, ctx->salt,
crypto_aead_ivsize(geniv));
unlock: unlock:
spin_unlock_bh(&ctx->lock); spin_unlock_bh(&ctx->lock);
if (err)
return err;
return seqiv_aead_givencrypt(req); return seqiv_aead_givencrypt(req);
} }
...@@ -298,19 +308,27 @@ static struct crypto_instance *seqiv_alloc(struct rtattr **tb) ...@@ -298,19 +308,27 @@ static struct crypto_instance *seqiv_alloc(struct rtattr **tb)
if (IS_ERR(algt)) if (IS_ERR(algt))
return ERR_PTR(err); return ERR_PTR(err);
err = crypto_get_default_rng();
if (err)
return ERR_PTR(err);
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK) if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK)
inst = seqiv_ablkcipher_alloc(tb); inst = seqiv_ablkcipher_alloc(tb);
else else
inst = seqiv_aead_alloc(tb); inst = seqiv_aead_alloc(tb);
if (IS_ERR(inst)) if (IS_ERR(inst))
goto out; goto put_rng;
inst->alg.cra_alignmask |= __alignof__(u32) - 1; inst->alg.cra_alignmask |= __alignof__(u32) - 1;
inst->alg.cra_ctxsize += sizeof(struct seqiv_ctx); inst->alg.cra_ctxsize += sizeof(struct seqiv_ctx);
out: out:
return inst; return inst;
put_rng:
crypto_put_default_rng();
goto out;
} }
static void seqiv_free(struct crypto_instance *inst) static void seqiv_free(struct crypto_instance *inst)
...@@ -319,6 +337,7 @@ static void seqiv_free(struct crypto_instance *inst) ...@@ -319,6 +337,7 @@ static void seqiv_free(struct crypto_instance *inst)
skcipher_geniv_free(inst); skcipher_geniv_free(inst);
else else
aead_geniv_free(inst); aead_geniv_free(inst);
crypto_put_default_rng();
} }
static struct crypto_template seqiv_tmpl = { static struct crypto_template seqiv_tmpl = {
......
...@@ -19,11 +19,9 @@ ...@@ -19,11 +19,9 @@
#include <linux/err.h> #include <linux/err.h>
#include <linux/init.h> #include <linux/init.h>
#include <linux/module.h> #include <linux/module.h>
#include <linux/mm.h>
#include <linux/slab.h> #include <linux/slab.h>
#include <linux/scatterlist.h> #include <linux/scatterlist.h>
#include <linux/string.h> #include <linux/string.h>
#include <linux/crypto.h>
#include <linux/moduleparam.h> #include <linux/moduleparam.h>
#include <linux/jiffies.h> #include <linux/jiffies.h>
#include <linux/timex.h> #include <linux/timex.h>
...@@ -31,45 +29,23 @@ ...@@ -31,45 +29,23 @@
#include "tcrypt.h" #include "tcrypt.h"
/* /*
* Need to kmalloc() memory for testing. * Need slab memory for testing (size in number of pages).
*/ */
#define TVMEMSIZE 16384 #define TVMEMSIZE 4
#define XBUFSIZE 32768
/* /*
* Indexes into the xbuf to simulate cross-page access. * Used by test_cipher_speed()
*/
#define IDX1 32
#define IDX2 32400
#define IDX3 1
#define IDX4 8193
#define IDX5 22222
#define IDX6 17101
#define IDX7 27333
#define IDX8 3000
/*
* Used by test_cipher()
*/ */
#define ENCRYPT 1 #define ENCRYPT 1
#define DECRYPT 0 #define DECRYPT 0
struct tcrypt_result {
struct completion completion;
int err;
};
static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
/* /*
* Used by test_cipher_speed() * Used by test_cipher_speed()
*/ */
static unsigned int sec; static unsigned int sec;
static int mode; static int mode;
static char *xbuf; static char *tvmem[TVMEMSIZE];
static char *axbuf;
static char *tvmem;
static char *check[] = { static char *check[] = {
"des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256", "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256",
...@@ -80,655 +56,13 @@ static char *check[] = { ...@@ -80,655 +56,13 @@ static char *check[] = {
"lzo", "cts", NULL "lzo", "cts", NULL
}; };
static void hexdump(unsigned char *buf, unsigned int len) static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc,
{ struct scatterlist *sg, int blen, int sec)
print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
16, 1,
buf, len, false);
}
static void tcrypt_complete(struct crypto_async_request *req, int err)
{
struct tcrypt_result *res = req->data;
if (err == -EINPROGRESS)
return;
res->err = err;
complete(&res->completion);
}
static void test_hash(char *algo, struct hash_testvec *template,
unsigned int tcount)
{
unsigned int i, j, k, temp;
struct scatterlist sg[8];
char result[64];
struct crypto_ahash *tfm;
struct ahash_request *req;
struct tcrypt_result tresult;
int ret;
void *hash_buff;
printk("\ntesting %s\n", algo);
init_completion(&tresult.completion);
tfm = crypto_alloc_ahash(algo, 0, 0);
if (IS_ERR(tfm)) {
printk("failed to load transform for %s: %ld\n", algo,
PTR_ERR(tfm));
return;
}
req = ahash_request_alloc(tfm, GFP_KERNEL);
if (!req) {
printk(KERN_ERR "failed to allocate request for %s\n", algo);
goto out_noreq;
}
ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
tcrypt_complete, &tresult);
for (i = 0; i < tcount; i++) {
printk("test %u:\n", i + 1);
memset(result, 0, 64);
hash_buff = kzalloc(template[i].psize, GFP_KERNEL);
if (!hash_buff)
continue;
memcpy(hash_buff, template[i].plaintext, template[i].psize);
sg_init_one(&sg[0], hash_buff, template[i].psize);
if (template[i].ksize) {
crypto_ahash_clear_flags(tfm, ~0);
ret = crypto_ahash_setkey(tfm, template[i].key,
template[i].ksize);
if (ret) {
printk("setkey() failed ret=%d\n", ret);
kfree(hash_buff);
goto out;
}
}
ahash_request_set_crypt(req, sg, result, template[i].psize);
ret = crypto_ahash_digest(req);
switch (ret) {
case 0:
break;
case -EINPROGRESS:
case -EBUSY:
ret = wait_for_completion_interruptible(
&tresult.completion);
if (!ret && !(ret = tresult.err)) {
INIT_COMPLETION(tresult.completion);
break;
}
/* fall through */
default:
printk("digest () failed ret=%d\n", ret);
kfree(hash_buff);
goto out;
}
hexdump(result, crypto_ahash_digestsize(tfm));
printk("%s\n",
memcmp(result, template[i].digest,
crypto_ahash_digestsize(tfm)) ?
"fail" : "pass");
kfree(hash_buff);
}
printk("testing %s across pages\n", algo);
/* setup the dummy buffer first */
memset(xbuf, 0, XBUFSIZE);
j = 0;
for (i = 0; i < tcount; i++) {
if (template[i].np) {
j++;
printk("test %u:\n", j);
memset(result, 0, 64);
temp = 0;
sg_init_table(sg, template[i].np);
for (k = 0; k < template[i].np; k++) {
memcpy(&xbuf[IDX[k]],
template[i].plaintext + temp,
template[i].tap[k]);
temp += template[i].tap[k];
sg_set_buf(&sg[k], &xbuf[IDX[k]],
template[i].tap[k]);
}
if (template[i].ksize) {
crypto_ahash_clear_flags(tfm, ~0);
ret = crypto_ahash_setkey(tfm, template[i].key,
template[i].ksize);
if (ret) {
printk("setkey() failed ret=%d\n", ret);
goto out;
}
}
ahash_request_set_crypt(req, sg, result,
template[i].psize);
ret = crypto_ahash_digest(req);
switch (ret) {
case 0:
break;
case -EINPROGRESS:
case -EBUSY:
ret = wait_for_completion_interruptible(
&tresult.completion);
if (!ret && !(ret = tresult.err)) {
INIT_COMPLETION(tresult.completion);
break;
}
/* fall through */
default:
printk("digest () failed ret=%d\n", ret);
goto out;
}
hexdump(result, crypto_ahash_digestsize(tfm));
printk("%s\n",
memcmp(result, template[i].digest,
crypto_ahash_digestsize(tfm)) ?
"fail" : "pass");
}
}
out:
ahash_request_free(req);
out_noreq:
crypto_free_ahash(tfm);
}
static void test_aead(char *algo, int enc, struct aead_testvec *template,
unsigned int tcount)
{
unsigned int ret, i, j, k, n, temp;
char *q;
struct crypto_aead *tfm;
char *key;
struct aead_request *req;
struct scatterlist sg[8];
struct scatterlist asg[8];
const char *e;
struct tcrypt_result result;
unsigned int authsize;
void *input;
void *assoc;
char iv[MAX_IVLEN];
if (enc == ENCRYPT)
e = "encryption";
else
e = "decryption";
printk(KERN_INFO "\ntesting %s %s\n", algo, e);
init_completion(&result.completion);
tfm = crypto_alloc_aead(algo, 0, 0);
if (IS_ERR(tfm)) {
printk(KERN_INFO "failed to load transform for %s: %ld\n",
algo, PTR_ERR(tfm));
return;
}
req = aead_request_alloc(tfm, GFP_KERNEL);
if (!req) {
printk(KERN_INFO "failed to allocate request for %s\n", algo);
goto out;
}
aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
tcrypt_complete, &result);
for (i = 0, j = 0; i < tcount; i++) {
if (!template[i].np) {
printk(KERN_INFO "test %u (%d bit key):\n",
++j, template[i].klen * 8);
/* some tepmplates have no input data but they will
* touch input
*/
input = kzalloc(template[i].ilen + template[i].rlen, GFP_KERNEL);
if (!input)
continue;
assoc = kzalloc(template[i].alen, GFP_KERNEL);
if (!assoc) {
kfree(input);
continue;
}
memcpy(input, template[i].input, template[i].ilen);
memcpy(assoc, template[i].assoc, template[i].alen);
if (template[i].iv)
memcpy(iv, template[i].iv, MAX_IVLEN);
else
memset(iv, 0, MAX_IVLEN);
crypto_aead_clear_flags(tfm, ~0);
if (template[i].wk)
crypto_aead_set_flags(
tfm, CRYPTO_TFM_REQ_WEAK_KEY);
if (template[i].key)
key = template[i].key;
else
key = kzalloc(template[i].klen, GFP_KERNEL);
ret = crypto_aead_setkey(tfm, key,
template[i].klen);
if (ret) {
printk(KERN_INFO "setkey() failed flags=%x\n",
crypto_aead_get_flags(tfm));
if (!template[i].fail)
goto next_one;
}
authsize = abs(template[i].rlen - template[i].ilen);
ret = crypto_aead_setauthsize(tfm, authsize);
if (ret) {
printk(KERN_INFO
"failed to set authsize = %u\n",
authsize);
goto next_one;
}
sg_init_one(&sg[0], input,
template[i].ilen + (enc ? authsize : 0));
sg_init_one(&asg[0], assoc, template[i].alen);
aead_request_set_crypt(req, sg, sg,
template[i].ilen, iv);
aead_request_set_assoc(req, asg, template[i].alen);
ret = enc ?
crypto_aead_encrypt(req) :
crypto_aead_decrypt(req);
switch (ret) {
case 0:
break;
case -EINPROGRESS:
case -EBUSY:
ret = wait_for_completion_interruptible(
&result.completion);
if (!ret && !(ret = result.err)) {
INIT_COMPLETION(result.completion);
break;
}
/* fall through */
default:
printk(KERN_INFO "%s () failed err=%d\n",
e, -ret);
goto next_one;
}
q = input;
hexdump(q, template[i].rlen);
printk(KERN_INFO "enc/dec: %s\n",
memcmp(q, template[i].result,
template[i].rlen) ? "fail" : "pass");
next_one:
if (!template[i].key)
kfree(key);
kfree(assoc);
kfree(input);
}
}
printk(KERN_INFO "\ntesting %s %s across pages (chunking)\n", algo, e);
memset(axbuf, 0, XBUFSIZE);
for (i = 0, j = 0; i < tcount; i++) {
if (template[i].np) {
printk(KERN_INFO "test %u (%d bit key):\n",
++j, template[i].klen * 8);
if (template[i].iv)
memcpy(iv, template[i].iv, MAX_IVLEN);
else
memset(iv, 0, MAX_IVLEN);
crypto_aead_clear_flags(tfm, ~0);
if (template[i].wk)
crypto_aead_set_flags(
tfm, CRYPTO_TFM_REQ_WEAK_KEY);
key = template[i].key;
ret = crypto_aead_setkey(tfm, key, template[i].klen);
if (ret) {
printk(KERN_INFO "setkey() failed flags=%x\n",
crypto_aead_get_flags(tfm));
if (!template[i].fail)
goto out;
}
memset(xbuf, 0, XBUFSIZE);
sg_init_table(sg, template[i].np);
for (k = 0, temp = 0; k < template[i].np; k++) {
memcpy(&xbuf[IDX[k]],
template[i].input + temp,
template[i].tap[k]);
temp += template[i].tap[k];
sg_set_buf(&sg[k], &xbuf[IDX[k]],
template[i].tap[k]);
}
authsize = abs(template[i].rlen - template[i].ilen);
ret = crypto_aead_setauthsize(tfm, authsize);
if (ret) {
printk(KERN_INFO
"failed to set authsize = %u\n",
authsize);
goto out;
}
if (enc)
sg[k - 1].length += authsize;
sg_init_table(asg, template[i].anp);
for (k = 0, temp = 0; k < template[i].anp; k++) {
memcpy(&axbuf[IDX[k]],
template[i].assoc + temp,
template[i].atap[k]);
temp += template[i].atap[k];
sg_set_buf(&asg[k], &axbuf[IDX[k]],
template[i].atap[k]);
}
aead_request_set_crypt(req, sg, sg,
template[i].ilen,
iv);
aead_request_set_assoc(req, asg, template[i].alen);
ret = enc ?
crypto_aead_encrypt(req) :
crypto_aead_decrypt(req);
switch (ret) {
case 0:
break;
case -EINPROGRESS:
case -EBUSY:
ret = wait_for_completion_interruptible(
&result.completion);
if (!ret && !(ret = result.err)) {
INIT_COMPLETION(result.completion);
break;
}
/* fall through */
default:
printk(KERN_INFO "%s () failed err=%d\n",
e, -ret);
goto out;
}
for (k = 0, temp = 0; k < template[i].np; k++) {
printk(KERN_INFO "page %u\n", k);
q = &xbuf[IDX[k]];
n = template[i].tap[k];
if (k == template[i].np - 1)
n += enc ? authsize : -authsize;
hexdump(q, n);
printk(KERN_INFO "%s\n",
memcmp(q, template[i].result + temp, n) ?
"fail" : "pass");
q += n;
if (k == template[i].np - 1 && !enc) {
if (memcmp(q, template[i].input +
temp + n, authsize))
n = authsize;
else
n = 0;
} else {
for (n = 0; q[n]; n++)
;
}
if (n) {
printk("Result buffer corruption %u "
"bytes:\n", n);
hexdump(q, n);
}
temp += template[i].tap[k];
}
}
}
out:
crypto_free_aead(tfm);
aead_request_free(req);
}
static void test_cipher(char *algo, int enc,
struct cipher_testvec *template, unsigned int tcount)
{
unsigned int ret, i, j, k, n, temp;
char *q;
struct crypto_ablkcipher *tfm;
struct ablkcipher_request *req;
struct scatterlist sg[8];
const char *e;
struct tcrypt_result result;
void *data;
char iv[MAX_IVLEN];
if (enc == ENCRYPT)
e = "encryption";
else
e = "decryption";
printk("\ntesting %s %s\n", algo, e);
init_completion(&result.completion);
tfm = crypto_alloc_ablkcipher(algo, 0, 0);
if (IS_ERR(tfm)) {
printk("failed to load transform for %s: %ld\n", algo,
PTR_ERR(tfm));
return;
}
req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
if (!req) {
printk("failed to allocate request for %s\n", algo);
goto out;
}
ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
tcrypt_complete, &result);
j = 0;
for (i = 0; i < tcount; i++) {
data = kzalloc(template[i].ilen, GFP_KERNEL);
if (!data)
continue;
memcpy(data, template[i].input, template[i].ilen);
if (template[i].iv)
memcpy(iv, template[i].iv, MAX_IVLEN);
else
memset(iv, 0, MAX_IVLEN);
if (!(template[i].np)) {
j++;
printk("test %u (%d bit key):\n",
j, template[i].klen * 8);
crypto_ablkcipher_clear_flags(tfm, ~0);
if (template[i].wk)
crypto_ablkcipher_set_flags(
tfm, CRYPTO_TFM_REQ_WEAK_KEY);
ret = crypto_ablkcipher_setkey(tfm, template[i].key,
template[i].klen);
if (ret) {
printk("setkey() failed flags=%x\n",
crypto_ablkcipher_get_flags(tfm));
if (!template[i].fail) {
kfree(data);
goto out;
}
}
sg_init_one(&sg[0], data, template[i].ilen);
ablkcipher_request_set_crypt(req, sg, sg,
template[i].ilen, iv);
ret = enc ?
crypto_ablkcipher_encrypt(req) :
crypto_ablkcipher_decrypt(req);
switch (ret) {
case 0:
break;
case -EINPROGRESS:
case -EBUSY:
ret = wait_for_completion_interruptible(
&result.completion);
if (!ret && !((ret = result.err))) {
INIT_COMPLETION(result.completion);
break;
}
/* fall through */
default:
printk("%s () failed err=%d\n", e, -ret);
kfree(data);
goto out;
}
q = data;
hexdump(q, template[i].rlen);
printk("%s\n",
memcmp(q, template[i].result,
template[i].rlen) ? "fail" : "pass");
}
kfree(data);
}
printk("\ntesting %s %s across pages (chunking)\n", algo, e);
j = 0;
for (i = 0; i < tcount; i++) {
if (template[i].iv)
memcpy(iv, template[i].iv, MAX_IVLEN);
else
memset(iv, 0, MAX_IVLEN);
if (template[i].np) {
j++;
printk("test %u (%d bit key):\n",
j, template[i].klen * 8);
memset(xbuf, 0, XBUFSIZE);
crypto_ablkcipher_clear_flags(tfm, ~0);
if (template[i].wk)
crypto_ablkcipher_set_flags(
tfm, CRYPTO_TFM_REQ_WEAK_KEY);
ret = crypto_ablkcipher_setkey(tfm, template[i].key,
template[i].klen);
if (ret) {
printk("setkey() failed flags=%x\n",
crypto_ablkcipher_get_flags(tfm));
if (!template[i].fail)
goto out;
}
temp = 0;
sg_init_table(sg, template[i].np);
for (k = 0; k < template[i].np; k++) {
memcpy(&xbuf[IDX[k]],
template[i].input + temp,
template[i].tap[k]);
temp += template[i].tap[k];
sg_set_buf(&sg[k], &xbuf[IDX[k]],
template[i].tap[k]);
}
ablkcipher_request_set_crypt(req, sg, sg,
template[i].ilen, iv);
ret = enc ?
crypto_ablkcipher_encrypt(req) :
crypto_ablkcipher_decrypt(req);
switch (ret) {
case 0:
break;
case -EINPROGRESS:
case -EBUSY:
ret = wait_for_completion_interruptible(
&result.completion);
if (!ret && !((ret = result.err))) {
INIT_COMPLETION(result.completion);
break;
}
/* fall through */
default:
printk("%s () failed err=%d\n", e, -ret);
goto out;
}
temp = 0;
for (k = 0; k < template[i].np; k++) {
printk("page %u\n", k);
q = &xbuf[IDX[k]];
hexdump(q, template[i].tap[k]);
printk("%s\n",
memcmp(q, template[i].result + temp,
template[i].tap[k]) ? "fail" :
"pass");
for (n = 0; q[template[i].tap[k] + n]; n++)
;
if (n) {
printk("Result buffer corruption %u "
"bytes:\n", n);
hexdump(&q[template[i].tap[k]], n);
}
temp += template[i].tap[k];
}
}
}
out:
crypto_free_ablkcipher(tfm);
ablkcipher_request_free(req);
}
static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc, char *p,
int blen, int sec)
{ {
struct scatterlist sg[1];
unsigned long start, end; unsigned long start, end;
int bcount; int bcount;
int ret; int ret;
sg_init_one(sg, p, blen);
for (start = jiffies, end = start + sec * HZ, bcount = 0; for (start = jiffies, end = start + sec * HZ, bcount = 0;
time_before(jiffies, end); bcount++) { time_before(jiffies, end); bcount++) {
if (enc) if (enc)
...@@ -745,16 +79,13 @@ static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc, char *p, ...@@ -745,16 +79,13 @@ static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc, char *p,
return 0; return 0;
} }
static int test_cipher_cycles(struct blkcipher_desc *desc, int enc, char *p, static int test_cipher_cycles(struct blkcipher_desc *desc, int enc,
int blen) struct scatterlist *sg, int blen)
{ {
struct scatterlist sg[1];
unsigned long cycles = 0; unsigned long cycles = 0;
int ret = 0; int ret = 0;
int i; int i;
sg_init_one(sg, p, blen);
local_bh_disable(); local_bh_disable();
local_irq_disable(); local_irq_disable();
...@@ -799,12 +130,12 @@ static int test_cipher_cycles(struct blkcipher_desc *desc, int enc, char *p, ...@@ -799,12 +130,12 @@ static int test_cipher_cycles(struct blkcipher_desc *desc, int enc, char *p,
static u32 block_sizes[] = { 16, 64, 256, 1024, 8192, 0 }; static u32 block_sizes[] = { 16, 64, 256, 1024, 8192, 0 };
static void test_cipher_speed(char *algo, int enc, unsigned int sec, static void test_cipher_speed(const char *algo, int enc, unsigned int sec,
struct cipher_testvec *template, struct cipher_speed_template *template,
unsigned int tcount, u8 *keysize) unsigned int tcount, u8 *keysize)
{ {
unsigned int ret, i, j, iv_len; unsigned int ret, i, j, iv_len;
unsigned char *key, *p, iv[128]; const char *key, iv[128];
struct crypto_blkcipher *tfm; struct crypto_blkcipher *tfm;
struct blkcipher_desc desc; struct blkcipher_desc desc;
const char *e; const char *e;
...@@ -832,27 +163,28 @@ static void test_cipher_speed(char *algo, int enc, unsigned int sec, ...@@ -832,27 +163,28 @@ static void test_cipher_speed(char *algo, int enc, unsigned int sec,
b_size = block_sizes; b_size = block_sizes;
do { do {
struct scatterlist sg[TVMEMSIZE];
if ((*keysize + *b_size) > TVMEMSIZE) { if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
printk("template (%u) too big for tvmem (%u)\n", printk("template (%u) too big for "
*keysize + *b_size, TVMEMSIZE); "tvmem (%lu)\n", *keysize + *b_size,
TVMEMSIZE * PAGE_SIZE);
goto out; goto out;
} }
printk("test %u (%d bit key, %d byte blocks): ", i, printk("test %u (%d bit key, %d byte blocks): ", i,
*keysize * 8, *b_size); *keysize * 8, *b_size);
memset(tvmem, 0xff, *keysize + *b_size); memset(tvmem[0], 0xff, PAGE_SIZE);
/* set key, plain text and IV */ /* set key, plain text and IV */
key = (unsigned char *)tvmem; key = tvmem[0];
for (j = 0; j < tcount; j++) { for (j = 0; j < tcount; j++) {
if (template[j].klen == *keysize) { if (template[j].klen == *keysize) {
key = template[j].key; key = template[j].key;
break; break;
} }
} }
p = (unsigned char *)tvmem + *keysize;
ret = crypto_blkcipher_setkey(tfm, key, *keysize); ret = crypto_blkcipher_setkey(tfm, key, *keysize);
if (ret) { if (ret) {
...@@ -861,6 +193,14 @@ static void test_cipher_speed(char *algo, int enc, unsigned int sec, ...@@ -861,6 +193,14 @@ static void test_cipher_speed(char *algo, int enc, unsigned int sec,
goto out; goto out;
} }
sg_init_table(sg, TVMEMSIZE);
sg_set_buf(sg, tvmem[0] + *keysize,
PAGE_SIZE - *keysize);
for (j = 1; j < TVMEMSIZE; j++) {
sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
memset (tvmem[j], 0xff, PAGE_SIZE);
}
iv_len = crypto_blkcipher_ivsize(tfm); iv_len = crypto_blkcipher_ivsize(tfm);
if (iv_len) { if (iv_len) {
memset(&iv, 0xff, iv_len); memset(&iv, 0xff, iv_len);
...@@ -868,9 +208,11 @@ static void test_cipher_speed(char *algo, int enc, unsigned int sec, ...@@ -868,9 +208,11 @@ static void test_cipher_speed(char *algo, int enc, unsigned int sec,
} }
if (sec) if (sec)
ret = test_cipher_jiffies(&desc, enc, p, *b_size, sec); ret = test_cipher_jiffies(&desc, enc, sg,
*b_size, sec);
else else
ret = test_cipher_cycles(&desc, enc, p, *b_size); ret = test_cipher_cycles(&desc, enc, sg,
*b_size);
if (ret) { if (ret) {
printk("%s() failed flags=%x\n", e, desc.flags); printk("%s() failed flags=%x\n", e, desc.flags);
...@@ -886,19 +228,16 @@ static void test_cipher_speed(char *algo, int enc, unsigned int sec, ...@@ -886,19 +228,16 @@ static void test_cipher_speed(char *algo, int enc, unsigned int sec,
crypto_free_blkcipher(tfm); crypto_free_blkcipher(tfm);
} }
static int test_hash_jiffies_digest(struct hash_desc *desc, char *p, int blen, static int test_hash_jiffies_digest(struct hash_desc *desc,
struct scatterlist *sg, int blen,
char *out, int sec) char *out, int sec)
{ {
struct scatterlist sg[1];
unsigned long start, end; unsigned long start, end;
int bcount; int bcount;
int ret; int ret;
sg_init_table(sg, 1);
for (start = jiffies, end = start + sec * HZ, bcount = 0; for (start = jiffies, end = start + sec * HZ, bcount = 0;
time_before(jiffies, end); bcount++) { time_before(jiffies, end); bcount++) {
sg_set_buf(sg, p, blen);
ret = crypto_hash_digest(desc, sg, blen, out); ret = crypto_hash_digest(desc, sg, blen, out);
if (ret) if (ret)
return ret; return ret;
...@@ -910,18 +249,15 @@ static int test_hash_jiffies_digest(struct hash_desc *desc, char *p, int blen, ...@@ -910,18 +249,15 @@ static int test_hash_jiffies_digest(struct hash_desc *desc, char *p, int blen,
return 0; return 0;
} }
static int test_hash_jiffies(struct hash_desc *desc, char *p, int blen, static int test_hash_jiffies(struct hash_desc *desc, struct scatterlist *sg,
int plen, char *out, int sec) int blen, int plen, char *out, int sec)
{ {
struct scatterlist sg[1];
unsigned long start, end; unsigned long start, end;
int bcount, pcount; int bcount, pcount;
int ret; int ret;
if (plen == blen) if (plen == blen)
return test_hash_jiffies_digest(desc, p, blen, out, sec); return test_hash_jiffies_digest(desc, sg, blen, out, sec);
sg_init_table(sg, 1);
for (start = jiffies, end = start + sec * HZ, bcount = 0; for (start = jiffies, end = start + sec * HZ, bcount = 0;
time_before(jiffies, end); bcount++) { time_before(jiffies, end); bcount++) {
...@@ -929,7 +265,6 @@ static int test_hash_jiffies(struct hash_desc *desc, char *p, int blen, ...@@ -929,7 +265,6 @@ static int test_hash_jiffies(struct hash_desc *desc, char *p, int blen,
if (ret) if (ret)
return ret; return ret;
for (pcount = 0; pcount < blen; pcount += plen) { for (pcount = 0; pcount < blen; pcount += plen) {
sg_set_buf(sg, p + pcount, plen);
ret = crypto_hash_update(desc, sg, plen); ret = crypto_hash_update(desc, sg, plen);
if (ret) if (ret)
return ret; return ret;
...@@ -946,22 +281,18 @@ static int test_hash_jiffies(struct hash_desc *desc, char *p, int blen, ...@@ -946,22 +281,18 @@ static int test_hash_jiffies(struct hash_desc *desc, char *p, int blen,
return 0; return 0;
} }
static int test_hash_cycles_digest(struct hash_desc *desc, char *p, int blen, static int test_hash_cycles_digest(struct hash_desc *desc,
char *out) struct scatterlist *sg, int blen, char *out)
{ {
struct scatterlist sg[1];
unsigned long cycles = 0; unsigned long cycles = 0;
int i; int i;
int ret; int ret;
sg_init_table(sg, 1);
local_bh_disable(); local_bh_disable();
local_irq_disable(); local_irq_disable();
/* Warm-up run. */ /* Warm-up run. */
for (i = 0; i < 4; i++) { for (i = 0; i < 4; i++) {
sg_set_buf(sg, p, blen);
ret = crypto_hash_digest(desc, sg, blen, out); ret = crypto_hash_digest(desc, sg, blen, out);
if (ret) if (ret)
goto out; goto out;
...@@ -973,7 +304,6 @@ static int test_hash_cycles_digest(struct hash_desc *desc, char *p, int blen, ...@@ -973,7 +304,6 @@ static int test_hash_cycles_digest(struct hash_desc *desc, char *p, int blen,
start = get_cycles(); start = get_cycles();
sg_set_buf(sg, p, blen);
ret = crypto_hash_digest(desc, sg, blen, out); ret = crypto_hash_digest(desc, sg, blen, out);
if (ret) if (ret)
goto out; goto out;
...@@ -996,18 +326,15 @@ static int test_hash_cycles_digest(struct hash_desc *desc, char *p, int blen, ...@@ -996,18 +326,15 @@ static int test_hash_cycles_digest(struct hash_desc *desc, char *p, int blen,
return 0; return 0;
} }
static int test_hash_cycles(struct hash_desc *desc, char *p, int blen, static int test_hash_cycles(struct hash_desc *desc, struct scatterlist *sg,
int plen, char *out) int blen, int plen, char *out)
{ {
struct scatterlist sg[1];
unsigned long cycles = 0; unsigned long cycles = 0;
int i, pcount; int i, pcount;
int ret; int ret;
if (plen == blen) if (plen == blen)
return test_hash_cycles_digest(desc, p, blen, out); return test_hash_cycles_digest(desc, sg, blen, out);
sg_init_table(sg, 1);
local_bh_disable(); local_bh_disable();
local_irq_disable(); local_irq_disable();
...@@ -1018,7 +345,6 @@ static int test_hash_cycles(struct hash_desc *desc, char *p, int blen, ...@@ -1018,7 +345,6 @@ static int test_hash_cycles(struct hash_desc *desc, char *p, int blen,
if (ret) if (ret)
goto out; goto out;
for (pcount = 0; pcount < blen; pcount += plen) { for (pcount = 0; pcount < blen; pcount += plen) {
sg_set_buf(sg, p + pcount, plen);
ret = crypto_hash_update(desc, sg, plen); ret = crypto_hash_update(desc, sg, plen);
if (ret) if (ret)
goto out; goto out;
...@@ -1038,7 +364,6 @@ static int test_hash_cycles(struct hash_desc *desc, char *p, int blen, ...@@ -1038,7 +364,6 @@ static int test_hash_cycles(struct hash_desc *desc, char *p, int blen,
if (ret) if (ret)
goto out; goto out;
for (pcount = 0; pcount < blen; pcount += plen) { for (pcount = 0; pcount < blen; pcount += plen) {
sg_set_buf(sg, p + pcount, plen);
ret = crypto_hash_update(desc, sg, plen); ret = crypto_hash_update(desc, sg, plen);
if (ret) if (ret)
goto out; goto out;
...@@ -1065,9 +390,10 @@ static int test_hash_cycles(struct hash_desc *desc, char *p, int blen, ...@@ -1065,9 +390,10 @@ static int test_hash_cycles(struct hash_desc *desc, char *p, int blen,
return 0; return 0;
} }
static void test_hash_speed(char *algo, unsigned int sec, static void test_hash_speed(const char *algo, unsigned int sec,
struct hash_speed *speed) struct hash_speed *speed)
{ {
struct scatterlist sg[TVMEMSIZE];
struct crypto_hash *tfm; struct crypto_hash *tfm;
struct hash_desc desc; struct hash_desc desc;
char output[1024]; char output[1024];
...@@ -1093,23 +419,27 @@ static void test_hash_speed(char *algo, unsigned int sec, ...@@ -1093,23 +419,27 @@ static void test_hash_speed(char *algo, unsigned int sec,
goto out; goto out;
} }
sg_init_table(sg, TVMEMSIZE);
for (i = 0; i < TVMEMSIZE; i++) {
sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
memset(tvmem[i], 0xff, PAGE_SIZE);
}
for (i = 0; speed[i].blen != 0; i++) { for (i = 0; speed[i].blen != 0; i++) {
if (speed[i].blen > TVMEMSIZE) { if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
printk("template (%u) too big for tvmem (%u)\n", printk("template (%u) too big for tvmem (%lu)\n",
speed[i].blen, TVMEMSIZE); speed[i].blen, TVMEMSIZE * PAGE_SIZE);
goto out; goto out;
} }
printk("test%3u (%5u byte blocks,%5u bytes per update,%4u updates): ", printk("test%3u (%5u byte blocks,%5u bytes per update,%4u updates): ",
i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen); i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
memset(tvmem, 0xff, speed[i].blen);
if (sec) if (sec)
ret = test_hash_jiffies(&desc, tvmem, speed[i].blen, ret = test_hash_jiffies(&desc, sg, speed[i].blen,
speed[i].plen, output, sec); speed[i].plen, output, sec);
else else
ret = test_hash_cycles(&desc, tvmem, speed[i].blen, ret = test_hash_cycles(&desc, sg, speed[i].blen,
speed[i].plen, output); speed[i].plen, output);
if (ret) { if (ret) {
...@@ -1122,73 +452,6 @@ static void test_hash_speed(char *algo, unsigned int sec, ...@@ -1122,73 +452,6 @@ static void test_hash_speed(char *algo, unsigned int sec,
crypto_free_hash(tfm); crypto_free_hash(tfm);
} }
static void test_comp(char *algo, struct comp_testvec *ctemplate,
struct comp_testvec *dtemplate, int ctcount, int dtcount)
{
unsigned int i;
char result[COMP_BUF_SIZE];
struct crypto_comp *tfm;
unsigned int tsize;
printk("\ntesting %s compression\n", algo);
tfm = crypto_alloc_comp(algo, 0, CRYPTO_ALG_ASYNC);
if (IS_ERR(tfm)) {
printk("failed to load transform for %s\n", algo);
return;
}
for (i = 0; i < ctcount; i++) {
int ilen, ret, dlen = COMP_BUF_SIZE;
printk("test %u:\n", i + 1);
memset(result, 0, sizeof (result));
ilen = ctemplate[i].inlen;
ret = crypto_comp_compress(tfm, ctemplate[i].input,
ilen, result, &dlen);
if (ret) {
printk("fail: ret=%d\n", ret);
continue;
}
hexdump(result, dlen);
printk("%s (ratio %d:%d)\n",
memcmp(result, ctemplate[i].output, dlen) ? "fail" : "pass",
ilen, dlen);
}
printk("\ntesting %s decompression\n", algo);
tsize = sizeof(struct comp_testvec);
tsize *= dtcount;
if (tsize > TVMEMSIZE) {
printk("template (%u) too big for tvmem (%u)\n", tsize,
TVMEMSIZE);
goto out;
}
for (i = 0; i < dtcount; i++) {
int ilen, ret, dlen = COMP_BUF_SIZE;
printk("test %u:\n", i + 1);
memset(result, 0, sizeof (result));
ilen = dtemplate[i].inlen;
ret = crypto_comp_decompress(tfm, dtemplate[i].input,
ilen, result, &dlen);
if (ret) {
printk("fail: ret=%d\n", ret);
continue;
}
hexdump(result, dlen);
printk("%s (ratio %d:%d)\n",
memcmp(result, dtemplate[i].output, dlen) ? "fail" : "pass",
ilen, dlen);
}
out:
crypto_free_comp(tfm);
}
static void test_available(void) static void test_available(void)
{ {
char **name = check; char **name = check;
...@@ -1201,549 +464,237 @@ static void test_available(void) ...@@ -1201,549 +464,237 @@ static void test_available(void)
} }
} }
static void do_test(void) static inline int tcrypt_test(const char *alg)
{ {
switch (mode) { return alg_test(alg, alg, 0, 0);
}
static void do_test(int m)
{
int i;
switch (m) {
case 0: case 0:
test_hash("md5", md5_tv_template, MD5_TEST_VECTORS); for (i = 1; i < 200; i++)
do_test(i);
test_hash("sha1", sha1_tv_template, SHA1_TEST_VECTORS);
//DES
test_cipher("ecb(des)", ENCRYPT, des_enc_tv_template,
DES_ENC_TEST_VECTORS);
test_cipher("ecb(des)", DECRYPT, des_dec_tv_template,
DES_DEC_TEST_VECTORS);
test_cipher("cbc(des)", ENCRYPT, des_cbc_enc_tv_template,
DES_CBC_ENC_TEST_VECTORS);
test_cipher("cbc(des)", DECRYPT, des_cbc_dec_tv_template,
DES_CBC_DEC_TEST_VECTORS);
//DES3_EDE
test_cipher("ecb(des3_ede)", ENCRYPT, des3_ede_enc_tv_template,
DES3_EDE_ENC_TEST_VECTORS);
test_cipher("ecb(des3_ede)", DECRYPT, des3_ede_dec_tv_template,
DES3_EDE_DEC_TEST_VECTORS);
test_cipher("cbc(des3_ede)", ENCRYPT,
des3_ede_cbc_enc_tv_template,
DES3_EDE_CBC_ENC_TEST_VECTORS);
test_cipher("cbc(des3_ede)", DECRYPT,
des3_ede_cbc_dec_tv_template,
DES3_EDE_CBC_DEC_TEST_VECTORS);
test_hash("md4", md4_tv_template, MD4_TEST_VECTORS);
test_hash("sha224", sha224_tv_template, SHA224_TEST_VECTORS);
test_hash("sha256", sha256_tv_template, SHA256_TEST_VECTORS);
//BLOWFISH
test_cipher("ecb(blowfish)", ENCRYPT, bf_enc_tv_template,
BF_ENC_TEST_VECTORS);
test_cipher("ecb(blowfish)", DECRYPT, bf_dec_tv_template,
BF_DEC_TEST_VECTORS);
test_cipher("cbc(blowfish)", ENCRYPT, bf_cbc_enc_tv_template,
BF_CBC_ENC_TEST_VECTORS);
test_cipher("cbc(blowfish)", DECRYPT, bf_cbc_dec_tv_template,
BF_CBC_DEC_TEST_VECTORS);
//TWOFISH
test_cipher("ecb(twofish)", ENCRYPT, tf_enc_tv_template,
TF_ENC_TEST_VECTORS);
test_cipher("ecb(twofish)", DECRYPT, tf_dec_tv_template,
TF_DEC_TEST_VECTORS);
test_cipher("cbc(twofish)", ENCRYPT, tf_cbc_enc_tv_template,
TF_CBC_ENC_TEST_VECTORS);
test_cipher("cbc(twofish)", DECRYPT, tf_cbc_dec_tv_template,
TF_CBC_DEC_TEST_VECTORS);
//SERPENT
test_cipher("ecb(serpent)", ENCRYPT, serpent_enc_tv_template,
SERPENT_ENC_TEST_VECTORS);
test_cipher("ecb(serpent)", DECRYPT, serpent_dec_tv_template,
SERPENT_DEC_TEST_VECTORS);
//TNEPRES
test_cipher("ecb(tnepres)", ENCRYPT, tnepres_enc_tv_template,
TNEPRES_ENC_TEST_VECTORS);
test_cipher("ecb(tnepres)", DECRYPT, tnepres_dec_tv_template,
TNEPRES_DEC_TEST_VECTORS);
//AES
test_cipher("ecb(aes)", ENCRYPT, aes_enc_tv_template,
AES_ENC_TEST_VECTORS);
test_cipher("ecb(aes)", DECRYPT, aes_dec_tv_template,
AES_DEC_TEST_VECTORS);
test_cipher("cbc(aes)", ENCRYPT, aes_cbc_enc_tv_template,
AES_CBC_ENC_TEST_VECTORS);
test_cipher("cbc(aes)", DECRYPT, aes_cbc_dec_tv_template,
AES_CBC_DEC_TEST_VECTORS);
test_cipher("lrw(aes)", ENCRYPT, aes_lrw_enc_tv_template,
AES_LRW_ENC_TEST_VECTORS);
test_cipher("lrw(aes)", DECRYPT, aes_lrw_dec_tv_template,
AES_LRW_DEC_TEST_VECTORS);
test_cipher("xts(aes)", ENCRYPT, aes_xts_enc_tv_template,
AES_XTS_ENC_TEST_VECTORS);
test_cipher("xts(aes)", DECRYPT, aes_xts_dec_tv_template,
AES_XTS_DEC_TEST_VECTORS);
test_cipher("rfc3686(ctr(aes))", ENCRYPT, aes_ctr_enc_tv_template,
AES_CTR_ENC_TEST_VECTORS);
test_cipher("rfc3686(ctr(aes))", DECRYPT, aes_ctr_dec_tv_template,
AES_CTR_DEC_TEST_VECTORS);
test_aead("gcm(aes)", ENCRYPT, aes_gcm_enc_tv_template,
AES_GCM_ENC_TEST_VECTORS);
test_aead("gcm(aes)", DECRYPT, aes_gcm_dec_tv_template,
AES_GCM_DEC_TEST_VECTORS);
test_aead("ccm(aes)", ENCRYPT, aes_ccm_enc_tv_template,
AES_CCM_ENC_TEST_VECTORS);
test_aead("ccm(aes)", DECRYPT, aes_ccm_dec_tv_template,
AES_CCM_DEC_TEST_VECTORS);
//CAST5
test_cipher("ecb(cast5)", ENCRYPT, cast5_enc_tv_template,
CAST5_ENC_TEST_VECTORS);
test_cipher("ecb(cast5)", DECRYPT, cast5_dec_tv_template,
CAST5_DEC_TEST_VECTORS);
//CAST6
test_cipher("ecb(cast6)", ENCRYPT, cast6_enc_tv_template,
CAST6_ENC_TEST_VECTORS);
test_cipher("ecb(cast6)", DECRYPT, cast6_dec_tv_template,
CAST6_DEC_TEST_VECTORS);
//ARC4
test_cipher("ecb(arc4)", ENCRYPT, arc4_enc_tv_template,
ARC4_ENC_TEST_VECTORS);
test_cipher("ecb(arc4)", DECRYPT, arc4_dec_tv_template,
ARC4_DEC_TEST_VECTORS);
//TEA
test_cipher("ecb(tea)", ENCRYPT, tea_enc_tv_template,
TEA_ENC_TEST_VECTORS);
test_cipher("ecb(tea)", DECRYPT, tea_dec_tv_template,
TEA_DEC_TEST_VECTORS);
//XTEA
test_cipher("ecb(xtea)", ENCRYPT, xtea_enc_tv_template,
XTEA_ENC_TEST_VECTORS);
test_cipher("ecb(xtea)", DECRYPT, xtea_dec_tv_template,
XTEA_DEC_TEST_VECTORS);
//KHAZAD
test_cipher("ecb(khazad)", ENCRYPT, khazad_enc_tv_template,
KHAZAD_ENC_TEST_VECTORS);
test_cipher("ecb(khazad)", DECRYPT, khazad_dec_tv_template,
KHAZAD_DEC_TEST_VECTORS);
//ANUBIS
test_cipher("ecb(anubis)", ENCRYPT, anubis_enc_tv_template,
ANUBIS_ENC_TEST_VECTORS);
test_cipher("ecb(anubis)", DECRYPT, anubis_dec_tv_template,
ANUBIS_DEC_TEST_VECTORS);
test_cipher("cbc(anubis)", ENCRYPT, anubis_cbc_enc_tv_template,
ANUBIS_CBC_ENC_TEST_VECTORS);
test_cipher("cbc(anubis)", DECRYPT, anubis_cbc_dec_tv_template,
ANUBIS_CBC_ENC_TEST_VECTORS);
//XETA
test_cipher("ecb(xeta)", ENCRYPT, xeta_enc_tv_template,
XETA_ENC_TEST_VECTORS);
test_cipher("ecb(xeta)", DECRYPT, xeta_dec_tv_template,
XETA_DEC_TEST_VECTORS);
//FCrypt
test_cipher("pcbc(fcrypt)", ENCRYPT, fcrypt_pcbc_enc_tv_template,
FCRYPT_ENC_TEST_VECTORS);
test_cipher("pcbc(fcrypt)", DECRYPT, fcrypt_pcbc_dec_tv_template,
FCRYPT_DEC_TEST_VECTORS);
//CAMELLIA
test_cipher("ecb(camellia)", ENCRYPT,
camellia_enc_tv_template,
CAMELLIA_ENC_TEST_VECTORS);
test_cipher("ecb(camellia)", DECRYPT,
camellia_dec_tv_template,
CAMELLIA_DEC_TEST_VECTORS);
test_cipher("cbc(camellia)", ENCRYPT,
camellia_cbc_enc_tv_template,
CAMELLIA_CBC_ENC_TEST_VECTORS);
test_cipher("cbc(camellia)", DECRYPT,
camellia_cbc_dec_tv_template,
CAMELLIA_CBC_DEC_TEST_VECTORS);
//SEED
test_cipher("ecb(seed)", ENCRYPT, seed_enc_tv_template,
SEED_ENC_TEST_VECTORS);
test_cipher("ecb(seed)", DECRYPT, seed_dec_tv_template,
SEED_DEC_TEST_VECTORS);
//CTS
test_cipher("cts(cbc(aes))", ENCRYPT, cts_mode_enc_tv_template,
CTS_MODE_ENC_TEST_VECTORS);
test_cipher("cts(cbc(aes))", DECRYPT, cts_mode_dec_tv_template,
CTS_MODE_DEC_TEST_VECTORS);
test_hash("sha384", sha384_tv_template, SHA384_TEST_VECTORS);
test_hash("sha512", sha512_tv_template, SHA512_TEST_VECTORS);
test_hash("wp512", wp512_tv_template, WP512_TEST_VECTORS);
test_hash("wp384", wp384_tv_template, WP384_TEST_VECTORS);
test_hash("wp256", wp256_tv_template, WP256_TEST_VECTORS);
test_hash("tgr192", tgr192_tv_template, TGR192_TEST_VECTORS);
test_hash("tgr160", tgr160_tv_template, TGR160_TEST_VECTORS);
test_hash("tgr128", tgr128_tv_template, TGR128_TEST_VECTORS);
test_comp("deflate", deflate_comp_tv_template,
deflate_decomp_tv_template, DEFLATE_COMP_TEST_VECTORS,
DEFLATE_DECOMP_TEST_VECTORS);
test_comp("lzo", lzo_comp_tv_template, lzo_decomp_tv_template,
LZO_COMP_TEST_VECTORS, LZO_DECOMP_TEST_VECTORS);
test_hash("crc32c", crc32c_tv_template, CRC32C_TEST_VECTORS);
test_hash("hmac(md5)", hmac_md5_tv_template,
HMAC_MD5_TEST_VECTORS);
test_hash("hmac(sha1)", hmac_sha1_tv_template,
HMAC_SHA1_TEST_VECTORS);
test_hash("hmac(sha224)", hmac_sha224_tv_template,
HMAC_SHA224_TEST_VECTORS);
test_hash("hmac(sha256)", hmac_sha256_tv_template,
HMAC_SHA256_TEST_VECTORS);
test_hash("hmac(sha384)", hmac_sha384_tv_template,
HMAC_SHA384_TEST_VECTORS);
test_hash("hmac(sha512)", hmac_sha512_tv_template,
HMAC_SHA512_TEST_VECTORS);
test_hash("xcbc(aes)", aes_xcbc128_tv_template,
XCBC_AES_TEST_VECTORS);
test_hash("michael_mic", michael_mic_tv_template, MICHAEL_MIC_TEST_VECTORS);
break; break;
case 1: case 1:
test_hash("md5", md5_tv_template, MD5_TEST_VECTORS); tcrypt_test("md5");
break; break;
case 2: case 2:
test_hash("sha1", sha1_tv_template, SHA1_TEST_VECTORS); tcrypt_test("sha1");
break; break;
case 3: case 3:
test_cipher("ecb(des)", ENCRYPT, des_enc_tv_template, tcrypt_test("ecb(des)");
DES_ENC_TEST_VECTORS); tcrypt_test("cbc(des)");
test_cipher("ecb(des)", DECRYPT, des_dec_tv_template,
DES_DEC_TEST_VECTORS);
test_cipher("cbc(des)", ENCRYPT, des_cbc_enc_tv_template,
DES_CBC_ENC_TEST_VECTORS);
test_cipher("cbc(des)", DECRYPT, des_cbc_dec_tv_template,
DES_CBC_DEC_TEST_VECTORS);
break; break;
case 4: case 4:
test_cipher("ecb(des3_ede)", ENCRYPT, des3_ede_enc_tv_template, tcrypt_test("ecb(des3_ede)");
DES3_EDE_ENC_TEST_VECTORS); tcrypt_test("cbc(des3_ede)");
test_cipher("ecb(des3_ede)", DECRYPT, des3_ede_dec_tv_template,
DES3_EDE_DEC_TEST_VECTORS);
test_cipher("cbc(des3_ede)", ENCRYPT,
des3_ede_cbc_enc_tv_template,
DES3_EDE_CBC_ENC_TEST_VECTORS);
test_cipher("cbc(des3_ede)", DECRYPT,
des3_ede_cbc_dec_tv_template,
DES3_EDE_CBC_DEC_TEST_VECTORS);
break; break;
case 5: case 5:
test_hash("md4", md4_tv_template, MD4_TEST_VECTORS); tcrypt_test("md4");
break; break;
case 6: case 6:
test_hash("sha256", sha256_tv_template, SHA256_TEST_VECTORS); tcrypt_test("sha256");
break; break;
case 7: case 7:
test_cipher("ecb(blowfish)", ENCRYPT, bf_enc_tv_template, tcrypt_test("ecb(blowfish)");
BF_ENC_TEST_VECTORS); tcrypt_test("cbc(blowfish)");
test_cipher("ecb(blowfish)", DECRYPT, bf_dec_tv_template,
BF_DEC_TEST_VECTORS);
test_cipher("cbc(blowfish)", ENCRYPT, bf_cbc_enc_tv_template,
BF_CBC_ENC_TEST_VECTORS);
test_cipher("cbc(blowfish)", DECRYPT, bf_cbc_dec_tv_template,
BF_CBC_DEC_TEST_VECTORS);
break; break;
case 8: case 8:
test_cipher("ecb(twofish)", ENCRYPT, tf_enc_tv_template, tcrypt_test("ecb(twofish)");
TF_ENC_TEST_VECTORS); tcrypt_test("cbc(twofish)");
test_cipher("ecb(twofish)", DECRYPT, tf_dec_tv_template,
TF_DEC_TEST_VECTORS);
test_cipher("cbc(twofish)", ENCRYPT, tf_cbc_enc_tv_template,
TF_CBC_ENC_TEST_VECTORS);
test_cipher("cbc(twofish)", DECRYPT, tf_cbc_dec_tv_template,
TF_CBC_DEC_TEST_VECTORS);
break; break;
case 9: case 9:
test_cipher("ecb(serpent)", ENCRYPT, serpent_enc_tv_template, tcrypt_test("ecb(serpent)");
SERPENT_ENC_TEST_VECTORS);
test_cipher("ecb(serpent)", DECRYPT, serpent_dec_tv_template,
SERPENT_DEC_TEST_VECTORS);
break; break;
case 10: case 10:
test_cipher("ecb(aes)", ENCRYPT, aes_enc_tv_template, tcrypt_test("ecb(aes)");
AES_ENC_TEST_VECTORS); tcrypt_test("cbc(aes)");
test_cipher("ecb(aes)", DECRYPT, aes_dec_tv_template, tcrypt_test("lrw(aes)");
AES_DEC_TEST_VECTORS); tcrypt_test("xts(aes)");
test_cipher("cbc(aes)", ENCRYPT, aes_cbc_enc_tv_template, tcrypt_test("rfc3686(ctr(aes))");
AES_CBC_ENC_TEST_VECTORS);
test_cipher("cbc(aes)", DECRYPT, aes_cbc_dec_tv_template,
AES_CBC_DEC_TEST_VECTORS);
test_cipher("lrw(aes)", ENCRYPT, aes_lrw_enc_tv_template,
AES_LRW_ENC_TEST_VECTORS);
test_cipher("lrw(aes)", DECRYPT, aes_lrw_dec_tv_template,
AES_LRW_DEC_TEST_VECTORS);
test_cipher("xts(aes)", ENCRYPT, aes_xts_enc_tv_template,
AES_XTS_ENC_TEST_VECTORS);
test_cipher("xts(aes)", DECRYPT, aes_xts_dec_tv_template,
AES_XTS_DEC_TEST_VECTORS);
test_cipher("rfc3686(ctr(aes))", ENCRYPT, aes_ctr_enc_tv_template,
AES_CTR_ENC_TEST_VECTORS);
test_cipher("rfc3686(ctr(aes))", DECRYPT, aes_ctr_dec_tv_template,
AES_CTR_DEC_TEST_VECTORS);
break; break;
case 11: case 11:
test_hash("sha384", sha384_tv_template, SHA384_TEST_VECTORS); tcrypt_test("sha384");
break; break;
case 12: case 12:
test_hash("sha512", sha512_tv_template, SHA512_TEST_VECTORS); tcrypt_test("sha512");
break; break;
case 13: case 13:
test_comp("deflate", deflate_comp_tv_template, tcrypt_test("deflate");
deflate_decomp_tv_template, DEFLATE_COMP_TEST_VECTORS,
DEFLATE_DECOMP_TEST_VECTORS);
break; break;
case 14: case 14:
test_cipher("ecb(cast5)", ENCRYPT, cast5_enc_tv_template, tcrypt_test("ecb(cast5)");
CAST5_ENC_TEST_VECTORS);
test_cipher("ecb(cast5)", DECRYPT, cast5_dec_tv_template,
CAST5_DEC_TEST_VECTORS);
break; break;
case 15: case 15:
test_cipher("ecb(cast6)", ENCRYPT, cast6_enc_tv_template, tcrypt_test("ecb(cast6)");
CAST6_ENC_TEST_VECTORS);
test_cipher("ecb(cast6)", DECRYPT, cast6_dec_tv_template,
CAST6_DEC_TEST_VECTORS);
break; break;
case 16: case 16:
test_cipher("ecb(arc4)", ENCRYPT, arc4_enc_tv_template, tcrypt_test("ecb(arc4)");
ARC4_ENC_TEST_VECTORS);
test_cipher("ecb(arc4)", DECRYPT, arc4_dec_tv_template,
ARC4_DEC_TEST_VECTORS);
break; break;
case 17: case 17:
test_hash("michael_mic", michael_mic_tv_template, MICHAEL_MIC_TEST_VECTORS); tcrypt_test("michael_mic");
break; break;
case 18: case 18:
test_hash("crc32c", crc32c_tv_template, CRC32C_TEST_VECTORS); tcrypt_test("crc32c");
break; break;
case 19: case 19:
test_cipher("ecb(tea)", ENCRYPT, tea_enc_tv_template, tcrypt_test("ecb(tea)");
TEA_ENC_TEST_VECTORS);
test_cipher("ecb(tea)", DECRYPT, tea_dec_tv_template,
TEA_DEC_TEST_VECTORS);
break; break;
case 20: case 20:
test_cipher("ecb(xtea)", ENCRYPT, xtea_enc_tv_template, tcrypt_test("ecb(xtea)");
XTEA_ENC_TEST_VECTORS);
test_cipher("ecb(xtea)", DECRYPT, xtea_dec_tv_template,
XTEA_DEC_TEST_VECTORS);
break; break;
case 21: case 21:
test_cipher("ecb(khazad)", ENCRYPT, khazad_enc_tv_template, tcrypt_test("ecb(khazad)");
KHAZAD_ENC_TEST_VECTORS);
test_cipher("ecb(khazad)", DECRYPT, khazad_dec_tv_template,
KHAZAD_DEC_TEST_VECTORS);
break; break;
case 22: case 22:
test_hash("wp512", wp512_tv_template, WP512_TEST_VECTORS); tcrypt_test("wp512");
break; break;
case 23: case 23:
test_hash("wp384", wp384_tv_template, WP384_TEST_VECTORS); tcrypt_test("wp384");
break; break;
case 24: case 24:
test_hash("wp256", wp256_tv_template, WP256_TEST_VECTORS); tcrypt_test("wp256");
break; break;
case 25: case 25:
test_cipher("ecb(tnepres)", ENCRYPT, tnepres_enc_tv_template, tcrypt_test("ecb(tnepres)");
TNEPRES_ENC_TEST_VECTORS);
test_cipher("ecb(tnepres)", DECRYPT, tnepres_dec_tv_template,
TNEPRES_DEC_TEST_VECTORS);
break; break;
case 26: case 26:
test_cipher("ecb(anubis)", ENCRYPT, anubis_enc_tv_template, tcrypt_test("ecb(anubis)");
ANUBIS_ENC_TEST_VECTORS); tcrypt_test("cbc(anubis)");
test_cipher("ecb(anubis)", DECRYPT, anubis_dec_tv_template,
ANUBIS_DEC_TEST_VECTORS);
test_cipher("cbc(anubis)", ENCRYPT, anubis_cbc_enc_tv_template,
ANUBIS_CBC_ENC_TEST_VECTORS);
test_cipher("cbc(anubis)", DECRYPT, anubis_cbc_dec_tv_template,
ANUBIS_CBC_ENC_TEST_VECTORS);
break; break;
case 27: case 27:
test_hash("tgr192", tgr192_tv_template, TGR192_TEST_VECTORS); tcrypt_test("tgr192");
break; break;
case 28: case 28:
test_hash("tgr160", tgr160_tv_template, TGR160_TEST_VECTORS); tcrypt_test("tgr160");
break; break;
case 29: case 29:
test_hash("tgr128", tgr128_tv_template, TGR128_TEST_VECTORS); tcrypt_test("tgr128");
break; break;
case 30: case 30:
test_cipher("ecb(xeta)", ENCRYPT, xeta_enc_tv_template, tcrypt_test("ecb(xeta)");
XETA_ENC_TEST_VECTORS);
test_cipher("ecb(xeta)", DECRYPT, xeta_dec_tv_template,
XETA_DEC_TEST_VECTORS);
break; break;
case 31: case 31:
test_cipher("pcbc(fcrypt)", ENCRYPT, fcrypt_pcbc_enc_tv_template, tcrypt_test("pcbc(fcrypt)");
FCRYPT_ENC_TEST_VECTORS);
test_cipher("pcbc(fcrypt)", DECRYPT, fcrypt_pcbc_dec_tv_template,
FCRYPT_DEC_TEST_VECTORS);
break; break;
case 32: case 32:
test_cipher("ecb(camellia)", ENCRYPT, tcrypt_test("ecb(camellia)");
camellia_enc_tv_template, tcrypt_test("cbc(camellia)");
CAMELLIA_ENC_TEST_VECTORS);
test_cipher("ecb(camellia)", DECRYPT,
camellia_dec_tv_template,
CAMELLIA_DEC_TEST_VECTORS);
test_cipher("cbc(camellia)", ENCRYPT,
camellia_cbc_enc_tv_template,
CAMELLIA_CBC_ENC_TEST_VECTORS);
test_cipher("cbc(camellia)", DECRYPT,
camellia_cbc_dec_tv_template,
CAMELLIA_CBC_DEC_TEST_VECTORS);
break; break;
case 33: case 33:
test_hash("sha224", sha224_tv_template, SHA224_TEST_VECTORS); tcrypt_test("sha224");
break; break;
case 34: case 34:
test_cipher("salsa20", ENCRYPT, tcrypt_test("salsa20");
salsa20_stream_enc_tv_template,
SALSA20_STREAM_ENC_TEST_VECTORS);
break; break;
case 35: case 35:
test_aead("gcm(aes)", ENCRYPT, aes_gcm_enc_tv_template, tcrypt_test("gcm(aes)");
AES_GCM_ENC_TEST_VECTORS);
test_aead("gcm(aes)", DECRYPT, aes_gcm_dec_tv_template,
AES_GCM_DEC_TEST_VECTORS);
break; break;
case 36: case 36:
test_comp("lzo", lzo_comp_tv_template, lzo_decomp_tv_template, tcrypt_test("lzo");
LZO_COMP_TEST_VECTORS, LZO_DECOMP_TEST_VECTORS);
break; break;
case 37: case 37:
test_aead("ccm(aes)", ENCRYPT, aes_ccm_enc_tv_template, tcrypt_test("ccm(aes)");
AES_CCM_ENC_TEST_VECTORS);
test_aead("ccm(aes)", DECRYPT, aes_ccm_dec_tv_template,
AES_CCM_DEC_TEST_VECTORS);
break; break;
case 38: case 38:
test_cipher("cts(cbc(aes))", ENCRYPT, cts_mode_enc_tv_template, tcrypt_test("cts(cbc(aes))");
CTS_MODE_ENC_TEST_VECTORS);
test_cipher("cts(cbc(aes))", DECRYPT, cts_mode_dec_tv_template,
CTS_MODE_DEC_TEST_VECTORS);
break; break;
case 39: case 39:
test_hash("rmd128", rmd128_tv_template, RMD128_TEST_VECTORS); tcrypt_test("rmd128");
break; break;
case 40: case 40:
test_hash("rmd160", rmd160_tv_template, RMD160_TEST_VECTORS); tcrypt_test("rmd160");
break; break;
case 41: case 41:
test_hash("rmd256", rmd256_tv_template, RMD256_TEST_VECTORS); tcrypt_test("rmd256");
break; break;
case 42: case 42:
test_hash("rmd320", rmd320_tv_template, RMD320_TEST_VECTORS); tcrypt_test("rmd320");
break;
case 43:
tcrypt_test("ecb(seed)");
break; break;
case 100: case 100:
test_hash("hmac(md5)", hmac_md5_tv_template, tcrypt_test("hmac(md5)");
HMAC_MD5_TEST_VECTORS);
break; break;
case 101: case 101:
test_hash("hmac(sha1)", hmac_sha1_tv_template, tcrypt_test("hmac(sha1)");
HMAC_SHA1_TEST_VECTORS);
break; break;
case 102: case 102:
test_hash("hmac(sha256)", hmac_sha256_tv_template, tcrypt_test("hmac(sha256)");
HMAC_SHA256_TEST_VECTORS);
break; break;
case 103: case 103:
test_hash("hmac(sha384)", hmac_sha384_tv_template, tcrypt_test("hmac(sha384)");
HMAC_SHA384_TEST_VECTORS);
break; break;
case 104: case 104:
test_hash("hmac(sha512)", hmac_sha512_tv_template, tcrypt_test("hmac(sha512)");
HMAC_SHA512_TEST_VECTORS);
break; break;
case 105: case 105:
test_hash("hmac(sha224)", hmac_sha224_tv_template, tcrypt_test("hmac(sha224)");
HMAC_SHA224_TEST_VECTORS);
break; break;
case 106: case 106:
test_hash("xcbc(aes)", aes_xcbc128_tv_template, tcrypt_test("xcbc(aes)");
XCBC_AES_TEST_VECTORS);
break; break;
case 107: case 107:
test_hash("hmac(rmd128)", hmac_rmd128_tv_template, tcrypt_test("hmac(rmd128)");
HMAC_RMD128_TEST_VECTORS);
break; break;
case 108: case 108:
test_hash("hmac(rmd160)", hmac_rmd160_tv_template, tcrypt_test("hmac(rmd160)");
HMAC_RMD160_TEST_VECTORS);
break; break;
case 200: case 200:
...@@ -1767,16 +718,16 @@ static void do_test(void) ...@@ -1767,16 +718,16 @@ static void do_test(void)
case 201: case 201:
test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec, test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
des3_ede_enc_tv_template, DES3_EDE_ENC_TEST_VECTORS, des3_speed_template, DES3_SPEED_VECTORS,
speed_template_24); speed_template_24);
test_cipher_speed("ecb(des3_ede)", DECRYPT, sec, test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
des3_ede_enc_tv_template, DES3_EDE_ENC_TEST_VECTORS, des3_speed_template, DES3_SPEED_VECTORS,
speed_template_24); speed_template_24);
test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec, test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
des3_ede_enc_tv_template, DES3_EDE_ENC_TEST_VECTORS, des3_speed_template, DES3_SPEED_VECTORS,
speed_template_24); speed_template_24);
test_cipher_speed("cbc(des3_ede)", DECRYPT, sec, test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
des3_ede_enc_tv_template, DES3_EDE_ENC_TEST_VECTORS, des3_speed_template, DES3_SPEED_VECTORS,
speed_template_24); speed_template_24);
break; break;
...@@ -1906,31 +857,21 @@ static void do_test(void) ...@@ -1906,31 +857,21 @@ static void do_test(void)
case 1000: case 1000:
test_available(); test_available();
break; break;
default:
/* useful for debugging */
printk("not testing anything\n");
break;
} }
} }
static int __init tcrypt_mod_init(void) static int __init tcrypt_mod_init(void)
{ {
int err = -ENOMEM; int err = -ENOMEM;
int i;
tvmem = kmalloc(TVMEMSIZE, GFP_KERNEL); for (i = 0; i < TVMEMSIZE; i++) {
if (tvmem == NULL) tvmem[i] = (void *)__get_free_page(GFP_KERNEL);
return err; if (!tvmem[i])
xbuf = kmalloc(XBUFSIZE, GFP_KERNEL);
if (xbuf == NULL)
goto err_free_tv; goto err_free_tv;
}
axbuf = kmalloc(XBUFSIZE, GFP_KERNEL); do_test(mode);
if (axbuf == NULL)
goto err_free_xbuf;
do_test();
/* We intentionaly return -EAGAIN to prevent keeping /* We intentionaly return -EAGAIN to prevent keeping
* the module. It does all its work from init() * the module. It does all its work from init()
...@@ -1940,11 +881,9 @@ static int __init tcrypt_mod_init(void) ...@@ -1940,11 +881,9 @@ static int __init tcrypt_mod_init(void)
*/ */
err = -EAGAIN; err = -EAGAIN;
kfree(axbuf); err_free_tv:
err_free_xbuf: for (i = 0; i < TVMEMSIZE && tvmem[i]; i++)
kfree(xbuf); free_page((unsigned long)tvmem[i]);
err_free_tv:
kfree(tvmem);
return err; return err;
} }
......
This source diff could not be displayed because it is too large. You can view the blob instead.
/*
* Algorithm testing framework and tests.
*
* Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
* Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
* Copyright (c) 2007 Nokia Siemens Networks
* Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
*/
#include <crypto/hash.h>
#include <linux/err.h>
#include <linux/module.h>
#include <linux/scatterlist.h>
#include <linux/slab.h>
#include <linux/string.h>
#include "internal.h"
#include "testmgr.h"
/*
* Need slab memory for testing (size in number of pages).
*/
#define XBUFSIZE 8
/*
* Indexes into the xbuf to simulate cross-page access.
*/
#define IDX1 32
#define IDX2 32400
#define IDX3 1
#define IDX4 8193
#define IDX5 22222
#define IDX6 17101
#define IDX7 27333
#define IDX8 3000
/*
* Used by test_cipher()
*/
#define ENCRYPT 1
#define DECRYPT 0
struct tcrypt_result {
struct completion completion;
int err;
};
struct aead_test_suite {
struct {
struct aead_testvec *vecs;
unsigned int count;
} enc, dec;
};
struct cipher_test_suite {
struct {
struct cipher_testvec *vecs;
unsigned int count;
} enc, dec;
};
struct comp_test_suite {
struct {
struct comp_testvec *vecs;
unsigned int count;
} comp, decomp;
};
struct hash_test_suite {
struct hash_testvec *vecs;
unsigned int count;
};
struct alg_test_desc {
const char *alg;
int (*test)(const struct alg_test_desc *desc, const char *driver,
u32 type, u32 mask);
union {
struct aead_test_suite aead;
struct cipher_test_suite cipher;
struct comp_test_suite comp;
struct hash_test_suite hash;
} suite;
};
static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
static char *xbuf[XBUFSIZE];
static char *axbuf[XBUFSIZE];
static void hexdump(unsigned char *buf, unsigned int len)
{
print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
16, 1,
buf, len, false);
}
static void tcrypt_complete(struct crypto_async_request *req, int err)
{
struct tcrypt_result *res = req->data;
if (err == -EINPROGRESS)
return;
res->err = err;
complete(&res->completion);
}
static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
unsigned int tcount)
{
const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
unsigned int i, j, k, temp;
struct scatterlist sg[8];
char result[64];
struct ahash_request *req;
struct tcrypt_result tresult;
int ret;
void *hash_buff;
init_completion(&tresult.completion);
req = ahash_request_alloc(tfm, GFP_KERNEL);
if (!req) {
printk(KERN_ERR "alg: hash: Failed to allocate request for "
"%s\n", algo);
ret = -ENOMEM;
goto out_noreq;
}
ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
tcrypt_complete, &tresult);
for (i = 0; i < tcount; i++) {
memset(result, 0, 64);
hash_buff = xbuf[0];
memcpy(hash_buff, template[i].plaintext, template[i].psize);
sg_init_one(&sg[0], hash_buff, template[i].psize);
if (template[i].ksize) {
crypto_ahash_clear_flags(tfm, ~0);
ret = crypto_ahash_setkey(tfm, template[i].key,
template[i].ksize);
if (ret) {
printk(KERN_ERR "alg: hash: setkey failed on "
"test %d for %s: ret=%d\n", i + 1, algo,
-ret);
goto out;
}
}
ahash_request_set_crypt(req, sg, result, template[i].psize);
ret = crypto_ahash_digest(req);
switch (ret) {
case 0:
break;
case -EINPROGRESS:
case -EBUSY:
ret = wait_for_completion_interruptible(
&tresult.completion);
if (!ret && !(ret = tresult.err)) {
INIT_COMPLETION(tresult.completion);
break;
}
/* fall through */
default:
printk(KERN_ERR "alg: hash: digest failed on test %d "
"for %s: ret=%d\n", i + 1, algo, -ret);
goto out;
}
if (memcmp(result, template[i].digest,
crypto_ahash_digestsize(tfm))) {
printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
i + 1, algo);
hexdump(result, crypto_ahash_digestsize(tfm));
ret = -EINVAL;
goto out;
}
}
j = 0;
for (i = 0; i < tcount; i++) {
if (template[i].np) {
j++;
memset(result, 0, 64);
temp = 0;
sg_init_table(sg, template[i].np);
for (k = 0; k < template[i].np; k++) {
sg_set_buf(&sg[k],
memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
offset_in_page(IDX[k]),
template[i].plaintext + temp,
template[i].tap[k]),
template[i].tap[k]);
temp += template[i].tap[k];
}
if (template[i].ksize) {
crypto_ahash_clear_flags(tfm, ~0);
ret = crypto_ahash_setkey(tfm, template[i].key,
template[i].ksize);
if (ret) {
printk(KERN_ERR "alg: hash: setkey "
"failed on chunking test %d "
"for %s: ret=%d\n", j, algo,
-ret);
goto out;
}
}
ahash_request_set_crypt(req, sg, result,
template[i].psize);
ret = crypto_ahash_digest(req);
switch (ret) {
case 0:
break;
case -EINPROGRESS:
case -EBUSY:
ret = wait_for_completion_interruptible(
&tresult.completion);
if (!ret && !(ret = tresult.err)) {
INIT_COMPLETION(tresult.completion);
break;
}
/* fall through */
default:
printk(KERN_ERR "alg: hash: digest failed "
"on chunking test %d for %s: "
"ret=%d\n", j, algo, -ret);
goto out;
}
if (memcmp(result, template[i].digest,
crypto_ahash_digestsize(tfm))) {
printk(KERN_ERR "alg: hash: Chunking test %d "
"failed for %s\n", j, algo);
hexdump(result, crypto_ahash_digestsize(tfm));
ret = -EINVAL;
goto out;
}
}
}
ret = 0;
out:
ahash_request_free(req);
out_noreq:
return ret;
}
static int test_aead(struct crypto_aead *tfm, int enc,
struct aead_testvec *template, unsigned int tcount)
{
const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
unsigned int i, j, k, n, temp;
int ret = 0;
char *q;
char *key;
struct aead_request *req;
struct scatterlist sg[8];
struct scatterlist asg[8];
const char *e;
struct tcrypt_result result;
unsigned int authsize;
void *input;
void *assoc;
char iv[MAX_IVLEN];
if (enc == ENCRYPT)
e = "encryption";
else
e = "decryption";
init_completion(&result.completion);
req = aead_request_alloc(tfm, GFP_KERNEL);
if (!req) {
printk(KERN_ERR "alg: aead: Failed to allocate request for "
"%s\n", algo);
ret = -ENOMEM;
goto out;
}
aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
tcrypt_complete, &result);
for (i = 0, j = 0; i < tcount; i++) {
if (!template[i].np) {
j++;
/* some tepmplates have no input data but they will
* touch input
*/
input = xbuf[0];
assoc = axbuf[0];
memcpy(input, template[i].input, template[i].ilen);
memcpy(assoc, template[i].assoc, template[i].alen);
if (template[i].iv)
memcpy(iv, template[i].iv, MAX_IVLEN);
else
memset(iv, 0, MAX_IVLEN);
crypto_aead_clear_flags(tfm, ~0);
if (template[i].wk)
crypto_aead_set_flags(
tfm, CRYPTO_TFM_REQ_WEAK_KEY);
key = template[i].key;
ret = crypto_aead_setkey(tfm, key,
template[i].klen);
if (!ret == template[i].fail) {
printk(KERN_ERR "alg: aead: setkey failed on "
"test %d for %s: flags=%x\n", j, algo,
crypto_aead_get_flags(tfm));
goto out;
} else if (ret)
continue;
authsize = abs(template[i].rlen - template[i].ilen);
ret = crypto_aead_setauthsize(tfm, authsize);
if (ret) {
printk(KERN_ERR "alg: aead: Failed to set "
"authsize to %u on test %d for %s\n",
authsize, j, algo);
goto out;
}
sg_init_one(&sg[0], input,
template[i].ilen + (enc ? authsize : 0));
sg_init_one(&asg[0], assoc, template[i].alen);
aead_request_set_crypt(req, sg, sg,
template[i].ilen, iv);
aead_request_set_assoc(req, asg, template[i].alen);
ret = enc ?
crypto_aead_encrypt(req) :
crypto_aead_decrypt(req);
switch (ret) {
case 0:
break;
case -EINPROGRESS:
case -EBUSY:
ret = wait_for_completion_interruptible(
&result.completion);
if (!ret && !(ret = result.err)) {
INIT_COMPLETION(result.completion);
break;
}
/* fall through */
default:
printk(KERN_ERR "alg: aead: %s failed on test "
"%d for %s: ret=%d\n", e, j, algo, -ret);
goto out;
}
q = input;
if (memcmp(q, template[i].result, template[i].rlen)) {
printk(KERN_ERR "alg: aead: Test %d failed on "
"%s for %s\n", j, e, algo);
hexdump(q, template[i].rlen);
ret = -EINVAL;
goto out;
}
}
}
for (i = 0, j = 0; i < tcount; i++) {
if (template[i].np) {
j++;
if (template[i].iv)
memcpy(iv, template[i].iv, MAX_IVLEN);
else
memset(iv, 0, MAX_IVLEN);
crypto_aead_clear_flags(tfm, ~0);
if (template[i].wk)
crypto_aead_set_flags(
tfm, CRYPTO_TFM_REQ_WEAK_KEY);
key = template[i].key;
ret = crypto_aead_setkey(tfm, key, template[i].klen);
if (!ret == template[i].fail) {
printk(KERN_ERR "alg: aead: setkey failed on "
"chunk test %d for %s: flags=%x\n", j,
algo, crypto_aead_get_flags(tfm));
goto out;
} else if (ret)
continue;
authsize = abs(template[i].rlen - template[i].ilen);
ret = -EINVAL;
sg_init_table(sg, template[i].np);
for (k = 0, temp = 0; k < template[i].np; k++) {
if (WARN_ON(offset_in_page(IDX[k]) +
template[i].tap[k] > PAGE_SIZE))
goto out;
q = xbuf[IDX[k] >> PAGE_SHIFT] +
offset_in_page(IDX[k]);
memcpy(q, template[i].input + temp,
template[i].tap[k]);
n = template[i].tap[k];
if (k == template[i].np - 1 && enc)
n += authsize;
if (offset_in_page(q) + n < PAGE_SIZE)
q[n] = 0;
sg_set_buf(&sg[k], q, template[i].tap[k]);
temp += template[i].tap[k];
}
ret = crypto_aead_setauthsize(tfm, authsize);
if (ret) {
printk(KERN_ERR "alg: aead: Failed to set "
"authsize to %u on chunk test %d for "
"%s\n", authsize, j, algo);
goto out;
}
if (enc) {
if (WARN_ON(sg[k - 1].offset +
sg[k - 1].length + authsize >
PAGE_SIZE)) {
ret = -EINVAL;
goto out;
}
sg[k - 1].length += authsize;
}
sg_init_table(asg, template[i].anp);
for (k = 0, temp = 0; k < template[i].anp; k++) {
sg_set_buf(&asg[k],
memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
offset_in_page(IDX[k]),
template[i].assoc + temp,
template[i].atap[k]),
template[i].atap[k]);
temp += template[i].atap[k];
}
aead_request_set_crypt(req, sg, sg,
template[i].ilen,
iv);
aead_request_set_assoc(req, asg, template[i].alen);
ret = enc ?
crypto_aead_encrypt(req) :
crypto_aead_decrypt(req);
switch (ret) {
case 0:
break;
case -EINPROGRESS:
case -EBUSY:
ret = wait_for_completion_interruptible(
&result.completion);
if (!ret && !(ret = result.err)) {
INIT_COMPLETION(result.completion);
break;
}
/* fall through */
default:
printk(KERN_ERR "alg: aead: %s failed on "
"chunk test %d for %s: ret=%d\n", e, j,
algo, -ret);
goto out;
}
ret = -EINVAL;
for (k = 0, temp = 0; k < template[i].np; k++) {
q = xbuf[IDX[k] >> PAGE_SHIFT] +
offset_in_page(IDX[k]);
n = template[i].tap[k];
if (k == template[i].np - 1)
n += enc ? authsize : -authsize;
if (memcmp(q, template[i].result + temp, n)) {
printk(KERN_ERR "alg: aead: Chunk "
"test %d failed on %s at page "
"%u for %s\n", j, e, k, algo);
hexdump(q, n);
goto out;
}
q += n;
if (k == template[i].np - 1 && !enc) {
if (memcmp(q, template[i].input +
temp + n, authsize))
n = authsize;
else
n = 0;
} else {
for (n = 0; offset_in_page(q + n) &&
q[n]; n++)
;
}
if (n) {
printk(KERN_ERR "alg: aead: Result "
"buffer corruption in chunk "
"test %d on %s at page %u for "
"%s: %u bytes:\n", j, e, k,
algo, n);
hexdump(q, n);
goto out;
}
temp += template[i].tap[k];
}
}
}
ret = 0;
out:
aead_request_free(req);
return ret;
}
static int test_cipher(struct crypto_cipher *tfm, int enc,
struct cipher_testvec *template, unsigned int tcount)
{
const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
unsigned int i, j, k;
int ret;
char *q;
const char *e;
void *data;
if (enc == ENCRYPT)
e = "encryption";
else
e = "decryption";
j = 0;
for (i = 0; i < tcount; i++) {
if (template[i].np)
continue;
j++;
data = xbuf[0];
memcpy(data, template[i].input, template[i].ilen);
crypto_cipher_clear_flags(tfm, ~0);
if (template[i].wk)
crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
ret = crypto_cipher_setkey(tfm, template[i].key,
template[i].klen);
if (!ret == template[i].fail) {
printk(KERN_ERR "alg: cipher: setkey failed "
"on test %d for %s: flags=%x\n", j,
algo, crypto_cipher_get_flags(tfm));
goto out;
} else if (ret)
continue;
for (k = 0; k < template[i].ilen;
k += crypto_cipher_blocksize(tfm)) {
if (enc)
crypto_cipher_encrypt_one(tfm, data + k,
data + k);
else
crypto_cipher_decrypt_one(tfm, data + k,
data + k);
}
q = data;
if (memcmp(q, template[i].result, template[i].rlen)) {
printk(KERN_ERR "alg: cipher: Test %d failed "
"on %s for %s\n", j, e, algo);
hexdump(q, template[i].rlen);
ret = -EINVAL;
goto out;
}
}
ret = 0;
out:
return ret;
}
static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
struct cipher_testvec *template, unsigned int tcount)
{
const char *algo =
crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
unsigned int i, j, k, n, temp;
int ret;
char *q;
struct ablkcipher_request *req;
struct scatterlist sg[8];
const char *e;
struct tcrypt_result result;
void *data;
char iv[MAX_IVLEN];
if (enc == ENCRYPT)
e = "encryption";
else
e = "decryption";
init_completion(&result.completion);
req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
if (!req) {
printk(KERN_ERR "alg: skcipher: Failed to allocate request "
"for %s\n", algo);
ret = -ENOMEM;
goto out;
}
ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
tcrypt_complete, &result);
j = 0;
for (i = 0; i < tcount; i++) {
if (template[i].iv)
memcpy(iv, template[i].iv, MAX_IVLEN);
else
memset(iv, 0, MAX_IVLEN);
if (!(template[i].np)) {
j++;
data = xbuf[0];
memcpy(data, template[i].input, template[i].ilen);
crypto_ablkcipher_clear_flags(tfm, ~0);
if (template[i].wk)
crypto_ablkcipher_set_flags(
tfm, CRYPTO_TFM_REQ_WEAK_KEY);
ret = crypto_ablkcipher_setkey(tfm, template[i].key,
template[i].klen);
if (!ret == template[i].fail) {
printk(KERN_ERR "alg: skcipher: setkey failed "
"on test %d for %s: flags=%x\n", j,
algo, crypto_ablkcipher_get_flags(tfm));
goto out;
} else if (ret)
continue;
sg_init_one(&sg[0], data, template[i].ilen);
ablkcipher_request_set_crypt(req, sg, sg,
template[i].ilen, iv);
ret = enc ?
crypto_ablkcipher_encrypt(req) :
crypto_ablkcipher_decrypt(req);
switch (ret) {
case 0:
break;
case -EINPROGRESS:
case -EBUSY:
ret = wait_for_completion_interruptible(
&result.completion);
if (!ret && !((ret = result.err))) {
INIT_COMPLETION(result.completion);
break;
}
/* fall through */
default:
printk(KERN_ERR "alg: skcipher: %s failed on "
"test %d for %s: ret=%d\n", e, j, algo,
-ret);
goto out;
}
q = data;
if (memcmp(q, template[i].result, template[i].rlen)) {
printk(KERN_ERR "alg: skcipher: Test %d "
"failed on %s for %s\n", j, e, algo);
hexdump(q, template[i].rlen);
ret = -EINVAL;
goto out;
}
}
}
j = 0;
for (i = 0; i < tcount; i++) {
if (template[i].iv)
memcpy(iv, template[i].iv, MAX_IVLEN);
else
memset(iv, 0, MAX_IVLEN);
if (template[i].np) {
j++;
crypto_ablkcipher_clear_flags(tfm, ~0);
if (template[i].wk)
crypto_ablkcipher_set_flags(
tfm, CRYPTO_TFM_REQ_WEAK_KEY);
ret = crypto_ablkcipher_setkey(tfm, template[i].key,
template[i].klen);
if (!ret == template[i].fail) {
printk(KERN_ERR "alg: skcipher: setkey failed "
"on chunk test %d for %s: flags=%x\n",
j, algo,
crypto_ablkcipher_get_flags(tfm));
goto out;
} else if (ret)
continue;
temp = 0;
ret = -EINVAL;
sg_init_table(sg, template[i].np);
for (k = 0; k < template[i].np; k++) {
if (WARN_ON(offset_in_page(IDX[k]) +
template[i].tap[k] > PAGE_SIZE))
goto out;
q = xbuf[IDX[k] >> PAGE_SHIFT] +
offset_in_page(IDX[k]);
memcpy(q, template[i].input + temp,
template[i].tap[k]);
if (offset_in_page(q) + template[i].tap[k] <
PAGE_SIZE)
q[template[i].tap[k]] = 0;
sg_set_buf(&sg[k], q, template[i].tap[k]);
temp += template[i].tap[k];
}
ablkcipher_request_set_crypt(req, sg, sg,
template[i].ilen, iv);
ret = enc ?
crypto_ablkcipher_encrypt(req) :
crypto_ablkcipher_decrypt(req);
switch (ret) {
case 0:
break;
case -EINPROGRESS:
case -EBUSY:
ret = wait_for_completion_interruptible(
&result.completion);
if (!ret && !((ret = result.err))) {
INIT_COMPLETION(result.completion);
break;
}
/* fall through */
default:
printk(KERN_ERR "alg: skcipher: %s failed on "
"chunk test %d for %s: ret=%d\n", e, j,
algo, -ret);
goto out;
}
temp = 0;
ret = -EINVAL;
for (k = 0; k < template[i].np; k++) {
q = xbuf[IDX[k] >> PAGE_SHIFT] +
offset_in_page(IDX[k]);
if (memcmp(q, template[i].result + temp,
template[i].tap[k])) {
printk(KERN_ERR "alg: skcipher: Chunk "
"test %d failed on %s at page "
"%u for %s\n", j, e, k, algo);
hexdump(q, template[i].tap[k]);
goto out;
}
q += template[i].tap[k];
for (n = 0; offset_in_page(q + n) && q[n]; n++)
;
if (n) {
printk(KERN_ERR "alg: skcipher: "
"Result buffer corruption in "
"chunk test %d on %s at page "
"%u for %s: %u bytes:\n", j, e,
k, algo, n);
hexdump(q, n);
goto out;
}
temp += template[i].tap[k];
}
}
}
ret = 0;
out:
ablkcipher_request_free(req);
return ret;
}
static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
struct comp_testvec *dtemplate, int ctcount, int dtcount)
{
const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
unsigned int i;
char result[COMP_BUF_SIZE];
int ret;
for (i = 0; i < ctcount; i++) {
int ilen, dlen = COMP_BUF_SIZE;
memset(result, 0, sizeof (result));
ilen = ctemplate[i].inlen;
ret = crypto_comp_compress(tfm, ctemplate[i].input,
ilen, result, &dlen);
if (ret) {
printk(KERN_ERR "alg: comp: compression failed "
"on test %d for %s: ret=%d\n", i + 1, algo,
-ret);
goto out;
}
if (memcmp(result, ctemplate[i].output, dlen)) {
printk(KERN_ERR "alg: comp: Compression test %d "
"failed for %s\n", i + 1, algo);
hexdump(result, dlen);
ret = -EINVAL;
goto out;
}
}
for (i = 0; i < dtcount; i++) {
int ilen, ret, dlen = COMP_BUF_SIZE;
memset(result, 0, sizeof (result));
ilen = dtemplate[i].inlen;
ret = crypto_comp_decompress(tfm, dtemplate[i].input,
ilen, result, &dlen);
if (ret) {
printk(KERN_ERR "alg: comp: decompression failed "
"on test %d for %s: ret=%d\n", i + 1, algo,
-ret);
goto out;
}
if (memcmp(result, dtemplate[i].output, dlen)) {
printk(KERN_ERR "alg: comp: Decompression test %d "
"failed for %s\n", i + 1, algo);
hexdump(result, dlen);
ret = -EINVAL;
goto out;
}
}
ret = 0;
out:
return ret;
}
static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
u32 type, u32 mask)
{
struct crypto_aead *tfm;
int err = 0;
tfm = crypto_alloc_aead(driver, type, mask);
if (IS_ERR(tfm)) {
printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
"%ld\n", driver, PTR_ERR(tfm));
return PTR_ERR(tfm);
}
if (desc->suite.aead.enc.vecs) {
err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
desc->suite.aead.enc.count);
if (err)
goto out;
}
if (!err && desc->suite.aead.dec.vecs)
err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
desc->suite.aead.dec.count);
out:
crypto_free_aead(tfm);
return err;
}
static int alg_test_cipher(const struct alg_test_desc *desc,
const char *driver, u32 type, u32 mask)
{
struct crypto_cipher *tfm;
int err = 0;
tfm = crypto_alloc_cipher(driver, type, mask);
if (IS_ERR(tfm)) {
printk(KERN_ERR "alg: cipher: Failed to load transform for "
"%s: %ld\n", driver, PTR_ERR(tfm));
return PTR_ERR(tfm);
}
if (desc->suite.cipher.enc.vecs) {
err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
desc->suite.cipher.enc.count);
if (err)
goto out;
}
if (desc->suite.cipher.dec.vecs)
err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
desc->suite.cipher.dec.count);
out:
crypto_free_cipher(tfm);
return err;
}
static int alg_test_skcipher(const struct alg_test_desc *desc,
const char *driver, u32 type, u32 mask)
{
struct crypto_ablkcipher *tfm;
int err = 0;
tfm = crypto_alloc_ablkcipher(driver, type, mask);
if (IS_ERR(tfm)) {
printk(KERN_ERR "alg: skcipher: Failed to load transform for "
"%s: %ld\n", driver, PTR_ERR(tfm));
return PTR_ERR(tfm);
}
if (desc->suite.cipher.enc.vecs) {
err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
desc->suite.cipher.enc.count);
if (err)
goto out;
}
if (desc->suite.cipher.dec.vecs)
err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
desc->suite.cipher.dec.count);
out:
crypto_free_ablkcipher(tfm);
return err;
}
static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
u32 type, u32 mask)
{
struct crypto_comp *tfm;
int err;
tfm = crypto_alloc_comp(driver, type, mask);
if (IS_ERR(tfm)) {
printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
"%ld\n", driver, PTR_ERR(tfm));
return PTR_ERR(tfm);
}
err = test_comp(tfm, desc->suite.comp.comp.vecs,
desc->suite.comp.decomp.vecs,
desc->suite.comp.comp.count,
desc->suite.comp.decomp.count);
crypto_free_comp(tfm);
return err;
}
static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
u32 type, u32 mask)
{
struct crypto_ahash *tfm;
int err;
tfm = crypto_alloc_ahash(driver, type, mask);
if (IS_ERR(tfm)) {
printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
"%ld\n", driver, PTR_ERR(tfm));
return PTR_ERR(tfm);
}
err = test_hash(tfm, desc->suite.hash.vecs, desc->suite.hash.count);
crypto_free_ahash(tfm);
return err;
}
/* Please keep this list sorted by algorithm name. */
static const struct alg_test_desc alg_test_descs[] = {
{
.alg = "cbc(aes)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = aes_cbc_enc_tv_template,
.count = AES_CBC_ENC_TEST_VECTORS
},
.dec = {
.vecs = aes_cbc_dec_tv_template,
.count = AES_CBC_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "cbc(anubis)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = anubis_cbc_enc_tv_template,
.count = ANUBIS_CBC_ENC_TEST_VECTORS
},
.dec = {
.vecs = anubis_cbc_dec_tv_template,
.count = ANUBIS_CBC_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "cbc(blowfish)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = bf_cbc_enc_tv_template,
.count = BF_CBC_ENC_TEST_VECTORS
},
.dec = {
.vecs = bf_cbc_dec_tv_template,
.count = BF_CBC_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "cbc(camellia)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = camellia_cbc_enc_tv_template,
.count = CAMELLIA_CBC_ENC_TEST_VECTORS
},
.dec = {
.vecs = camellia_cbc_dec_tv_template,
.count = CAMELLIA_CBC_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "cbc(des)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = des_cbc_enc_tv_template,
.count = DES_CBC_ENC_TEST_VECTORS
},
.dec = {
.vecs = des_cbc_dec_tv_template,
.count = DES_CBC_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "cbc(des3_ede)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = des3_ede_cbc_enc_tv_template,
.count = DES3_EDE_CBC_ENC_TEST_VECTORS
},
.dec = {
.vecs = des3_ede_cbc_dec_tv_template,
.count = DES3_EDE_CBC_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "cbc(twofish)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = tf_cbc_enc_tv_template,
.count = TF_CBC_ENC_TEST_VECTORS
},
.dec = {
.vecs = tf_cbc_dec_tv_template,
.count = TF_CBC_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "ccm(aes)",
.test = alg_test_aead,
.suite = {
.aead = {
.enc = {
.vecs = aes_ccm_enc_tv_template,
.count = AES_CCM_ENC_TEST_VECTORS
},
.dec = {
.vecs = aes_ccm_dec_tv_template,
.count = AES_CCM_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "crc32c",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = crc32c_tv_template,
.count = CRC32C_TEST_VECTORS
}
}
}, {
.alg = "cts(cbc(aes))",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = cts_mode_enc_tv_template,
.count = CTS_MODE_ENC_TEST_VECTORS
},
.dec = {
.vecs = cts_mode_dec_tv_template,
.count = CTS_MODE_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "deflate",
.test = alg_test_comp,
.suite = {
.comp = {
.comp = {
.vecs = deflate_comp_tv_template,
.count = DEFLATE_COMP_TEST_VECTORS
},
.decomp = {
.vecs = deflate_decomp_tv_template,
.count = DEFLATE_DECOMP_TEST_VECTORS
}
}
}
}, {
.alg = "ecb(aes)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = aes_enc_tv_template,
.count = AES_ENC_TEST_VECTORS
},
.dec = {
.vecs = aes_dec_tv_template,
.count = AES_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "ecb(anubis)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = anubis_enc_tv_template,
.count = ANUBIS_ENC_TEST_VECTORS
},
.dec = {
.vecs = anubis_dec_tv_template,
.count = ANUBIS_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "ecb(arc4)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = arc4_enc_tv_template,
.count = ARC4_ENC_TEST_VECTORS
},
.dec = {
.vecs = arc4_dec_tv_template,
.count = ARC4_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "ecb(blowfish)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = bf_enc_tv_template,
.count = BF_ENC_TEST_VECTORS
},
.dec = {
.vecs = bf_dec_tv_template,
.count = BF_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "ecb(camellia)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = camellia_enc_tv_template,
.count = CAMELLIA_ENC_TEST_VECTORS
},
.dec = {
.vecs = camellia_dec_tv_template,
.count = CAMELLIA_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "ecb(cast5)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = cast5_enc_tv_template,
.count = CAST5_ENC_TEST_VECTORS
},
.dec = {
.vecs = cast5_dec_tv_template,
.count = CAST5_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "ecb(cast6)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = cast6_enc_tv_template,
.count = CAST6_ENC_TEST_VECTORS
},
.dec = {
.vecs = cast6_dec_tv_template,
.count = CAST6_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "ecb(des)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = des_enc_tv_template,
.count = DES_ENC_TEST_VECTORS
},
.dec = {
.vecs = des_dec_tv_template,
.count = DES_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "ecb(des3_ede)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = des3_ede_enc_tv_template,
.count = DES3_EDE_ENC_TEST_VECTORS
},
.dec = {
.vecs = des3_ede_dec_tv_template,
.count = DES3_EDE_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "ecb(khazad)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = khazad_enc_tv_template,
.count = KHAZAD_ENC_TEST_VECTORS
},
.dec = {
.vecs = khazad_dec_tv_template,
.count = KHAZAD_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "ecb(seed)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = seed_enc_tv_template,
.count = SEED_ENC_TEST_VECTORS
},
.dec = {
.vecs = seed_dec_tv_template,
.count = SEED_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "ecb(serpent)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = serpent_enc_tv_template,
.count = SERPENT_ENC_TEST_VECTORS
},
.dec = {
.vecs = serpent_dec_tv_template,
.count = SERPENT_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "ecb(tea)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = tea_enc_tv_template,
.count = TEA_ENC_TEST_VECTORS
},
.dec = {
.vecs = tea_dec_tv_template,
.count = TEA_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "ecb(tnepres)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = tnepres_enc_tv_template,
.count = TNEPRES_ENC_TEST_VECTORS
},
.dec = {
.vecs = tnepres_dec_tv_template,
.count = TNEPRES_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "ecb(twofish)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = tf_enc_tv_template,
.count = TF_ENC_TEST_VECTORS
},
.dec = {
.vecs = tf_dec_tv_template,
.count = TF_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "ecb(xeta)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = xeta_enc_tv_template,
.count = XETA_ENC_TEST_VECTORS
},
.dec = {
.vecs = xeta_dec_tv_template,
.count = XETA_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "ecb(xtea)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = xtea_enc_tv_template,
.count = XTEA_ENC_TEST_VECTORS
},
.dec = {
.vecs = xtea_dec_tv_template,
.count = XTEA_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "gcm(aes)",
.test = alg_test_aead,
.suite = {
.aead = {
.enc = {
.vecs = aes_gcm_enc_tv_template,
.count = AES_GCM_ENC_TEST_VECTORS
},
.dec = {
.vecs = aes_gcm_dec_tv_template,
.count = AES_GCM_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "hmac(md5)",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = hmac_md5_tv_template,
.count = HMAC_MD5_TEST_VECTORS
}
}
}, {
.alg = "hmac(rmd128)",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = hmac_rmd128_tv_template,
.count = HMAC_RMD128_TEST_VECTORS
}
}
}, {
.alg = "hmac(rmd160)",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = hmac_rmd160_tv_template,
.count = HMAC_RMD160_TEST_VECTORS
}
}
}, {
.alg = "hmac(sha1)",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = hmac_sha1_tv_template,
.count = HMAC_SHA1_TEST_VECTORS
}
}
}, {
.alg = "hmac(sha224)",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = hmac_sha224_tv_template,
.count = HMAC_SHA224_TEST_VECTORS
}
}
}, {
.alg = "hmac(sha256)",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = hmac_sha256_tv_template,
.count = HMAC_SHA256_TEST_VECTORS
}
}
}, {
.alg = "hmac(sha384)",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = hmac_sha384_tv_template,
.count = HMAC_SHA384_TEST_VECTORS
}
}
}, {
.alg = "hmac(sha512)",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = hmac_sha512_tv_template,
.count = HMAC_SHA512_TEST_VECTORS
}
}
}, {
.alg = "lrw(aes)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = aes_lrw_enc_tv_template,
.count = AES_LRW_ENC_TEST_VECTORS
},
.dec = {
.vecs = aes_lrw_dec_tv_template,
.count = AES_LRW_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "lzo",
.test = alg_test_comp,
.suite = {
.comp = {
.comp = {
.vecs = lzo_comp_tv_template,
.count = LZO_COMP_TEST_VECTORS
},
.decomp = {
.vecs = lzo_decomp_tv_template,
.count = LZO_DECOMP_TEST_VECTORS
}
}
}
}, {
.alg = "md4",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = md4_tv_template,
.count = MD4_TEST_VECTORS
}
}
}, {
.alg = "md5",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = md5_tv_template,
.count = MD5_TEST_VECTORS
}
}
}, {
.alg = "michael_mic",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = michael_mic_tv_template,
.count = MICHAEL_MIC_TEST_VECTORS
}
}
}, {
.alg = "pcbc(fcrypt)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = fcrypt_pcbc_enc_tv_template,
.count = FCRYPT_ENC_TEST_VECTORS
},
.dec = {
.vecs = fcrypt_pcbc_dec_tv_template,
.count = FCRYPT_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "rfc3686(ctr(aes))",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = aes_ctr_enc_tv_template,
.count = AES_CTR_ENC_TEST_VECTORS
},
.dec = {
.vecs = aes_ctr_dec_tv_template,
.count = AES_CTR_DEC_TEST_VECTORS
}
}
}
}, {
.alg = "rmd128",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = rmd128_tv_template,
.count = RMD128_TEST_VECTORS
}
}
}, {
.alg = "rmd160",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = rmd160_tv_template,
.count = RMD160_TEST_VECTORS
}
}
}, {
.alg = "rmd256",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = rmd256_tv_template,
.count = RMD256_TEST_VECTORS
}
}
}, {
.alg = "rmd320",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = rmd320_tv_template,
.count = RMD320_TEST_VECTORS
}
}
}, {
.alg = "salsa20",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = salsa20_stream_enc_tv_template,
.count = SALSA20_STREAM_ENC_TEST_VECTORS
}
}
}
}, {
.alg = "sha1",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = sha1_tv_template,
.count = SHA1_TEST_VECTORS
}
}
}, {
.alg = "sha224",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = sha224_tv_template,
.count = SHA224_TEST_VECTORS
}
}
}, {
.alg = "sha256",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = sha256_tv_template,
.count = SHA256_TEST_VECTORS
}
}
}, {
.alg = "sha384",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = sha384_tv_template,
.count = SHA384_TEST_VECTORS
}
}
}, {
.alg = "sha512",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = sha512_tv_template,
.count = SHA512_TEST_VECTORS
}
}
}, {
.alg = "tgr128",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = tgr128_tv_template,
.count = TGR128_TEST_VECTORS
}
}
}, {
.alg = "tgr160",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = tgr160_tv_template,
.count = TGR160_TEST_VECTORS
}
}
}, {
.alg = "tgr192",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = tgr192_tv_template,
.count = TGR192_TEST_VECTORS
}
}
}, {
.alg = "wp256",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = wp256_tv_template,
.count = WP256_TEST_VECTORS
}
}
}, {
.alg = "wp384",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = wp384_tv_template,
.count = WP384_TEST_VECTORS
}
}
}, {
.alg = "wp512",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = wp512_tv_template,
.count = WP512_TEST_VECTORS
}
}
}, {
.alg = "xcbc(aes)",
.test = alg_test_hash,
.suite = {
.hash = {
.vecs = aes_xcbc128_tv_template,
.count = XCBC_AES_TEST_VECTORS
}
}
}, {
.alg = "xts(aes)",
.test = alg_test_skcipher,
.suite = {
.cipher = {
.enc = {
.vecs = aes_xts_enc_tv_template,
.count = AES_XTS_ENC_TEST_VECTORS
},
.dec = {
.vecs = aes_xts_dec_tv_template,
.count = AES_XTS_DEC_TEST_VECTORS
}
}
}
}
};
static int alg_find_test(const char *alg)
{
int start = 0;
int end = ARRAY_SIZE(alg_test_descs);
while (start < end) {
int i = (start + end) / 2;
int diff = strcmp(alg_test_descs[i].alg, alg);
if (diff > 0) {
end = i;
continue;
}
if (diff < 0) {
start = i + 1;
continue;
}
return i;
}
return -1;
}
int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
{
int i;
if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
char nalg[CRYPTO_MAX_ALG_NAME];
if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
sizeof(nalg))
return -ENAMETOOLONG;
i = alg_find_test(nalg);
if (i < 0)
goto notest;
return alg_test_cipher(alg_test_descs + i, driver, type, mask);
}
i = alg_find_test(alg);
if (i < 0)
goto notest;
return alg_test_descs[i].test(alg_test_descs + i, driver,
type, mask);
notest:
printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
return 0;
}
EXPORT_SYMBOL_GPL(alg_test);
int __init testmgr_init(void)
{
int i;
for (i = 0; i < XBUFSIZE; i++) {
xbuf[i] = (void *)__get_free_page(GFP_KERNEL);
if (!xbuf[i])
goto err_free_xbuf;
}
for (i = 0; i < XBUFSIZE; i++) {
axbuf[i] = (void *)__get_free_page(GFP_KERNEL);
if (!axbuf[i])
goto err_free_axbuf;
}
return 0;
err_free_axbuf:
for (i = 0; i < XBUFSIZE && axbuf[i]; i++)
free_page((unsigned long)axbuf[i]);
err_free_xbuf:
for (i = 0; i < XBUFSIZE && xbuf[i]; i++)
free_page((unsigned long)xbuf[i]);
return -ENOMEM;
}
void testmgr_exit(void)
{
int i;
for (i = 0; i < XBUFSIZE; i++)
free_page((unsigned long)axbuf[i]);
for (i = 0; i < XBUFSIZE; i++)
free_page((unsigned long)xbuf[i]);
}
This source diff could not be displayed because it is too large. You can view the blob instead.
/*
* RNG: Random Number Generator algorithms under the crypto API
*
* Copyright (c) 2008 Neil Horman <nhorman@tuxdriver.com>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
*/
#ifndef _CRYPTO_INTERNAL_RNG_H
#define _CRYPTO_INTERNAL_RNG_H
#include <crypto/algapi.h>
#include <crypto/rng.h>
extern const struct crypto_type crypto_rng_type;
static inline void *crypto_rng_ctx(struct crypto_rng *tfm)
{
return crypto_tfm_ctx(&tfm->base);
}
#endif
...@@ -15,7 +15,6 @@ ...@@ -15,7 +15,6 @@
#include <crypto/algapi.h> #include <crypto/algapi.h>
#include <crypto/skcipher.h> #include <crypto/skcipher.h>
#include <linux/init.h>
#include <linux/types.h> #include <linux/types.h>
struct rtattr; struct rtattr;
...@@ -65,11 +64,6 @@ void skcipher_geniv_free(struct crypto_instance *inst); ...@@ -65,11 +64,6 @@ void skcipher_geniv_free(struct crypto_instance *inst);
int skcipher_geniv_init(struct crypto_tfm *tfm); int skcipher_geniv_init(struct crypto_tfm *tfm);
void skcipher_geniv_exit(struct crypto_tfm *tfm); void skcipher_geniv_exit(struct crypto_tfm *tfm);
int __init eseqiv_module_init(void);
void __exit eseqiv_module_exit(void);
int __init chainiv_module_init(void);
void chainiv_module_exit(void);
static inline struct crypto_ablkcipher *skcipher_geniv_cipher( static inline struct crypto_ablkcipher *skcipher_geniv_cipher(
struct crypto_ablkcipher *geniv) struct crypto_ablkcipher *geniv)
{ {
......
/*
* RNG: Random Number Generator algorithms under the crypto API
*
* Copyright (c) 2008 Neil Horman <nhorman@tuxdriver.com>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
*/
#ifndef _CRYPTO_RNG_H
#define _CRYPTO_RNG_H
#include <linux/crypto.h>
extern struct crypto_rng *crypto_default_rng;
int crypto_get_default_rng(void);
void crypto_put_default_rng(void);
static inline struct crypto_rng *__crypto_rng_cast(struct crypto_tfm *tfm)
{
return (struct crypto_rng *)tfm;
}
static inline struct crypto_rng *crypto_alloc_rng(const char *alg_name,
u32 type, u32 mask)
{
type &= ~CRYPTO_ALG_TYPE_MASK;
type |= CRYPTO_ALG_TYPE_RNG;
mask |= CRYPTO_ALG_TYPE_MASK;
return __crypto_rng_cast(crypto_alloc_base(alg_name, type, mask));
}
static inline struct crypto_tfm *crypto_rng_tfm(struct crypto_rng *tfm)
{
return &tfm->base;
}
static inline struct rng_alg *crypto_rng_alg(struct crypto_rng *tfm)
{
return &crypto_rng_tfm(tfm)->__crt_alg->cra_rng;
}
static inline struct rng_tfm *crypto_rng_crt(struct crypto_rng *tfm)
{
return &crypto_rng_tfm(tfm)->crt_rng;
}
static inline void crypto_free_rng(struct crypto_rng *tfm)
{
crypto_free_tfm(crypto_rng_tfm(tfm));
}
static inline int crypto_rng_get_bytes(struct crypto_rng *tfm,
u8 *rdata, unsigned int dlen)
{
return crypto_rng_crt(tfm)->rng_gen_random(tfm, rdata, dlen);
}
static inline int crypto_rng_reset(struct crypto_rng *tfm,
u8 *seed, unsigned int slen)
{
return crypto_rng_crt(tfm)->rng_reset(tfm, seed, slen);
}
static inline int crypto_rng_seedsize(struct crypto_rng *tfm)
{
return crypto_rng_alg(tfm)->seedsize;
}
#endif
...@@ -38,6 +38,7 @@ ...@@ -38,6 +38,7 @@
#define CRYPTO_ALG_TYPE_DIGEST 0x00000008 #define CRYPTO_ALG_TYPE_DIGEST 0x00000008
#define CRYPTO_ALG_TYPE_HASH 0x00000009 #define CRYPTO_ALG_TYPE_HASH 0x00000009
#define CRYPTO_ALG_TYPE_AHASH 0x0000000a #define CRYPTO_ALG_TYPE_AHASH 0x0000000a
#define CRYPTO_ALG_TYPE_RNG 0x0000000c
#define CRYPTO_ALG_TYPE_HASH_MASK 0x0000000e #define CRYPTO_ALG_TYPE_HASH_MASK 0x0000000e
#define CRYPTO_ALG_TYPE_AHASH_MASK 0x0000000c #define CRYPTO_ALG_TYPE_AHASH_MASK 0x0000000c
...@@ -60,6 +61,14 @@ ...@@ -60,6 +61,14 @@
*/ */
#define CRYPTO_ALG_GENIV 0x00000200 #define CRYPTO_ALG_GENIV 0x00000200
/*
* Set if the algorithm has passed automated run-time testing. Note that
* if there is no run-time testing for a given algorithm it is considered
* to have passed.
*/
#define CRYPTO_ALG_TESTED 0x00000400
/* /*
* Transform masks and values (for crt_flags). * Transform masks and values (for crt_flags).
*/ */
...@@ -105,6 +114,7 @@ struct crypto_aead; ...@@ -105,6 +114,7 @@ struct crypto_aead;
struct crypto_blkcipher; struct crypto_blkcipher;
struct crypto_hash; struct crypto_hash;
struct crypto_ahash; struct crypto_ahash;
struct crypto_rng;
struct crypto_tfm; struct crypto_tfm;
struct crypto_type; struct crypto_type;
struct aead_givcrypt_request; struct aead_givcrypt_request;
...@@ -290,6 +300,15 @@ struct compress_alg { ...@@ -290,6 +300,15 @@ struct compress_alg {
unsigned int slen, u8 *dst, unsigned int *dlen); unsigned int slen, u8 *dst, unsigned int *dlen);
}; };
struct rng_alg {
int (*rng_make_random)(struct crypto_rng *tfm, u8 *rdata,
unsigned int dlen);
int (*rng_reset)(struct crypto_rng *tfm, u8 *seed, unsigned int slen);
unsigned int seedsize;
};
#define cra_ablkcipher cra_u.ablkcipher #define cra_ablkcipher cra_u.ablkcipher
#define cra_aead cra_u.aead #define cra_aead cra_u.aead
#define cra_blkcipher cra_u.blkcipher #define cra_blkcipher cra_u.blkcipher
...@@ -298,6 +317,7 @@ struct compress_alg { ...@@ -298,6 +317,7 @@ struct compress_alg {
#define cra_hash cra_u.hash #define cra_hash cra_u.hash
#define cra_ahash cra_u.ahash #define cra_ahash cra_u.ahash
#define cra_compress cra_u.compress #define cra_compress cra_u.compress
#define cra_rng cra_u.rng
struct crypto_alg { struct crypto_alg {
struct list_head cra_list; struct list_head cra_list;
...@@ -325,6 +345,7 @@ struct crypto_alg { ...@@ -325,6 +345,7 @@ struct crypto_alg {
struct hash_alg hash; struct hash_alg hash;
struct ahash_alg ahash; struct ahash_alg ahash;
struct compress_alg compress; struct compress_alg compress;
struct rng_alg rng;
} cra_u; } cra_u;
int (*cra_init)(struct crypto_tfm *tfm); int (*cra_init)(struct crypto_tfm *tfm);
...@@ -430,6 +451,12 @@ struct compress_tfm { ...@@ -430,6 +451,12 @@ struct compress_tfm {
u8 *dst, unsigned int *dlen); u8 *dst, unsigned int *dlen);
}; };
struct rng_tfm {
int (*rng_gen_random)(struct crypto_rng *tfm, u8 *rdata,
unsigned int dlen);
int (*rng_reset)(struct crypto_rng *tfm, u8 *seed, unsigned int slen);
};
#define crt_ablkcipher crt_u.ablkcipher #define crt_ablkcipher crt_u.ablkcipher
#define crt_aead crt_u.aead #define crt_aead crt_u.aead
#define crt_blkcipher crt_u.blkcipher #define crt_blkcipher crt_u.blkcipher
...@@ -437,6 +464,7 @@ struct compress_tfm { ...@@ -437,6 +464,7 @@ struct compress_tfm {
#define crt_hash crt_u.hash #define crt_hash crt_u.hash
#define crt_ahash crt_u.ahash #define crt_ahash crt_u.ahash
#define crt_compress crt_u.compress #define crt_compress crt_u.compress
#define crt_rng crt_u.rng
struct crypto_tfm { struct crypto_tfm {
...@@ -450,6 +478,7 @@ struct crypto_tfm { ...@@ -450,6 +478,7 @@ struct crypto_tfm {
struct hash_tfm hash; struct hash_tfm hash;
struct ahash_tfm ahash; struct ahash_tfm ahash;
struct compress_tfm compress; struct compress_tfm compress;
struct rng_tfm rng;
} crt_u; } crt_u;
struct crypto_alg *__crt_alg; struct crypto_alg *__crt_alg;
...@@ -481,6 +510,10 @@ struct crypto_hash { ...@@ -481,6 +510,10 @@ struct crypto_hash {
struct crypto_tfm base; struct crypto_tfm base;
}; };
struct crypto_rng {
struct crypto_tfm base;
};
enum { enum {
CRYPTOA_UNSPEC, CRYPTOA_UNSPEC,
CRYPTOA_ALG, CRYPTOA_ALG,
...@@ -515,6 +548,8 @@ struct crypto_tfm *crypto_alloc_tfm(const char *alg_name, u32 tfm_flags); ...@@ -515,6 +548,8 @@ struct crypto_tfm *crypto_alloc_tfm(const char *alg_name, u32 tfm_flags);
struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask); struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask);
void crypto_free_tfm(struct crypto_tfm *tfm); void crypto_free_tfm(struct crypto_tfm *tfm);
int alg_test(const char *driver, const char *alg, u32 type, u32 mask);
/* /*
* Transform helpers which query the underlying algorithm. * Transform helpers which query the underlying algorithm.
*/ */
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment