Commit 9561dccb authored by Herbert Xu's avatar Herbert Xu

Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6

Merging the crypto tree for 3.17 to pull in the "by8" AVX CTR revert.
parents e3b3bb5a 7da4b29d
...@@ -481,7 +481,7 @@ static void ctr_crypt_final(struct crypto_aes_ctx *ctx, ...@@ -481,7 +481,7 @@ static void ctr_crypt_final(struct crypto_aes_ctx *ctx,
crypto_inc(ctrblk, AES_BLOCK_SIZE); crypto_inc(ctrblk, AES_BLOCK_SIZE);
} }
#ifdef CONFIG_AS_AVX #if 0 /* temporary disabled due to failing crypto tests */
static void aesni_ctr_enc_avx_tfm(struct crypto_aes_ctx *ctx, u8 *out, static void aesni_ctr_enc_avx_tfm(struct crypto_aes_ctx *ctx, u8 *out,
const u8 *in, unsigned int len, u8 *iv) const u8 *in, unsigned int len, u8 *iv)
{ {
...@@ -1522,7 +1522,7 @@ static int __init aesni_init(void) ...@@ -1522,7 +1522,7 @@ static int __init aesni_init(void)
aesni_gcm_dec_tfm = aesni_gcm_dec; aesni_gcm_dec_tfm = aesni_gcm_dec;
} }
aesni_ctr_enc_tfm = aesni_ctr_enc; aesni_ctr_enc_tfm = aesni_ctr_enc;
#ifdef CONFIG_AS_AVX #if 0 /* temporary disabled due to failing crypto tests */
if (cpu_has_avx) { if (cpu_has_avx) {
/* optimize performance of ctr mode encryption transform */ /* optimize performance of ctr mode encryption transform */
aesni_ctr_enc_tfm = aesni_ctr_enc_avx_tfm; aesni_ctr_enc_tfm = aesni_ctr_enc_avx_tfm;
......
...@@ -367,6 +367,10 @@ static int ccp_crypto_init(void) ...@@ -367,6 +367,10 @@ static int ccp_crypto_init(void)
{ {
int ret; int ret;
ret = ccp_present();
if (ret)
return ret;
spin_lock_init(&req_queue_lock); spin_lock_init(&req_queue_lock);
INIT_LIST_HEAD(&req_queue.cmds); INIT_LIST_HEAD(&req_queue.cmds);
req_queue.backlog = &req_queue.cmds; req_queue.backlog = &req_queue.cmds;
......
...@@ -54,6 +54,20 @@ static inline void ccp_del_device(struct ccp_device *ccp) ...@@ -54,6 +54,20 @@ static inline void ccp_del_device(struct ccp_device *ccp)
ccp_dev = NULL; ccp_dev = NULL;
} }
/**
* ccp_present - check if a CCP device is present
*
* Returns zero if a CCP device is present, -ENODEV otherwise.
*/
int ccp_present(void)
{
if (ccp_get_device())
return 0;
return -ENODEV;
}
EXPORT_SYMBOL_GPL(ccp_present);
/** /**
* ccp_enqueue_cmd - queue an operation for processing by the CCP * ccp_enqueue_cmd - queue an operation for processing by the CCP
* *
......
...@@ -66,7 +66,7 @@ ...@@ -66,7 +66,7 @@
#define ADF_DH895XCC_ETR_MAX_BANKS 32 #define ADF_DH895XCC_ETR_MAX_BANKS 32
#define ADF_DH895XCC_SMIAPF0_MASK_OFFSET (0x3A000 + 0x28) #define ADF_DH895XCC_SMIAPF0_MASK_OFFSET (0x3A000 + 0x28)
#define ADF_DH895XCC_SMIAPF1_MASK_OFFSET (0x3A000 + 0x30) #define ADF_DH895XCC_SMIAPF1_MASK_OFFSET (0x3A000 + 0x30)
#define ADF_DH895XCC_SMIA0_MASK 0xFFFF #define ADF_DH895XCC_SMIA0_MASK 0xFFFFFFFF
#define ADF_DH895XCC_SMIA1_MASK 0x1 #define ADF_DH895XCC_SMIA1_MASK 0x1
/* Error detection and correction */ /* Error detection and correction */
#define ADF_DH895XCC_AE_CTX_ENABLES(i) (i * 0x1000 + 0x20818) #define ADF_DH895XCC_AE_CTX_ENABLES(i) (i * 0x1000 + 0x20818)
......
...@@ -26,6 +26,13 @@ struct ccp_cmd; ...@@ -26,6 +26,13 @@ struct ccp_cmd;
#if defined(CONFIG_CRYPTO_DEV_CCP_DD) || \ #if defined(CONFIG_CRYPTO_DEV_CCP_DD) || \
defined(CONFIG_CRYPTO_DEV_CCP_DD_MODULE) defined(CONFIG_CRYPTO_DEV_CCP_DD_MODULE)
/**
* ccp_present - check if a CCP device is present
*
* Returns zero if a CCP device is present, -ENODEV otherwise.
*/
int ccp_present(void);
/** /**
* ccp_enqueue_cmd - queue an operation for processing by the CCP * ccp_enqueue_cmd - queue an operation for processing by the CCP
* *
...@@ -53,6 +60,11 @@ int ccp_enqueue_cmd(struct ccp_cmd *cmd); ...@@ -53,6 +60,11 @@ int ccp_enqueue_cmd(struct ccp_cmd *cmd);
#else /* CONFIG_CRYPTO_DEV_CCP_DD is not enabled */ #else /* CONFIG_CRYPTO_DEV_CCP_DD is not enabled */
static inline int ccp_present(void)
{
return -ENODEV;
}
static inline int ccp_enqueue_cmd(struct ccp_cmd *cmd) static inline int ccp_enqueue_cmd(struct ccp_cmd *cmd)
{ {
return -ENODEV; return -ENODEV;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment