Commit 6929c358 authored by Linus Torvalds's avatar Linus Torvalds

Merge tag 'llvmlinux-for-v3.18' of git://git.linuxfoundation.org/llvmlinux/kernel

Pull LLVM updates from Behan Webster:
 "These patches remove the use of VLAIS using a new SHASH_DESC_ON_STACK
  macro.

  Some of the previously accepted VLAIS removal patches haven't used
  this macro.  I will push new patches to consistently use this macro in
  all those older cases for 3.19"

[ More LLVM patches coming in through subsystem trees, and LLVM itself
  needs some fixes that are already in many distributions but not in
  released versions of LLVM.  Some day this will all "just work"  - Linus ]

* tag 'llvmlinux-for-v3.18' of git://git.linuxfoundation.org/llvmlinux/kernel:
  crypto: LLVMLinux: Remove VLAIS usage from crypto/testmgr.c
  security, crypto: LLVMLinux: Remove VLAIS from ima_crypto.c
  crypto: LLVMLinux: Remove VLAIS usage from libcrc32c.c
  crypto: LLVMLinux: Remove VLAIS usage from crypto/hmac.c
  crypto, dm: LLVMLinux: Remove VLAIS usage from dm-crypt
  crypto: LLVMLinux: Remove VLAIS from crypto/.../qat_algs.c
  crypto: LLVMLinux: Remove VLAIS from crypto/omap_sham.c
  crypto: LLVMLinux: Remove VLAIS from crypto/n2_core.c
  crypto: LLVMLinux: Remove VLAIS from crypto/mv_cesa.c
  crypto: LLVMLinux: Remove VLAIS from crypto/ccp/ccp-crypto-sha.c
  btrfs: LLVMLinux: Remove VLAIS
  crypto: LLVMLinux: Add macro to remove use of VLAIS in crypto code
parents 23971bdf 4c5c3024
...@@ -52,20 +52,17 @@ static int hmac_setkey(struct crypto_shash *parent, ...@@ -52,20 +52,17 @@ static int hmac_setkey(struct crypto_shash *parent,
struct hmac_ctx *ctx = align_ptr(opad + ss, struct hmac_ctx *ctx = align_ptr(opad + ss,
crypto_tfm_ctx_alignment()); crypto_tfm_ctx_alignment());
struct crypto_shash *hash = ctx->hash; struct crypto_shash *hash = ctx->hash;
struct { SHASH_DESC_ON_STACK(shash, hash);
struct shash_desc shash;
char ctx[crypto_shash_descsize(hash)];
} desc;
unsigned int i; unsigned int i;
desc.shash.tfm = hash; shash->tfm = hash;
desc.shash.flags = crypto_shash_get_flags(parent) & shash->flags = crypto_shash_get_flags(parent)
CRYPTO_TFM_REQ_MAY_SLEEP; & CRYPTO_TFM_REQ_MAY_SLEEP;
if (keylen > bs) { if (keylen > bs) {
int err; int err;
err = crypto_shash_digest(&desc.shash, inkey, keylen, ipad); err = crypto_shash_digest(shash, inkey, keylen, ipad);
if (err) if (err)
return err; return err;
...@@ -81,12 +78,12 @@ static int hmac_setkey(struct crypto_shash *parent, ...@@ -81,12 +78,12 @@ static int hmac_setkey(struct crypto_shash *parent,
opad[i] ^= 0x5c; opad[i] ^= 0x5c;
} }
return crypto_shash_init(&desc.shash) ?: return crypto_shash_init(shash) ?:
crypto_shash_update(&desc.shash, ipad, bs) ?: crypto_shash_update(shash, ipad, bs) ?:
crypto_shash_export(&desc.shash, ipad) ?: crypto_shash_export(shash, ipad) ?:
crypto_shash_init(&desc.shash) ?: crypto_shash_init(shash) ?:
crypto_shash_update(&desc.shash, opad, bs) ?: crypto_shash_update(shash, opad, bs) ?:
crypto_shash_export(&desc.shash, opad); crypto_shash_export(shash, opad);
} }
static int hmac_export(struct shash_desc *pdesc, void *out) static int hmac_export(struct shash_desc *pdesc, void *out)
......
...@@ -1678,16 +1678,14 @@ static int alg_test_crc32c(const struct alg_test_desc *desc, ...@@ -1678,16 +1678,14 @@ static int alg_test_crc32c(const struct alg_test_desc *desc,
} }
do { do {
struct { SHASH_DESC_ON_STACK(shash, tfm);
struct shash_desc shash; u32 *ctx = (u32 *)shash_desc_ctx(shash);
char ctx[crypto_shash_descsize(tfm)];
} sdesc;
sdesc.shash.tfm = tfm; shash->tfm = tfm;
sdesc.shash.flags = 0; shash->flags = 0;
*(u32 *)sdesc.ctx = le32_to_cpu(420553207); *ctx = le32_to_cpu(420553207);
err = crypto_shash_final(&sdesc.shash, (u8 *)&val); err = crypto_shash_final(shash, (u8 *)&val);
if (err) { if (err) {
printk(KERN_ERR "alg: crc32c: Operation failed for " printk(KERN_ERR "alg: crc32c: Operation failed for "
"%s: %d\n", driver, err); "%s: %d\n", driver, err);
......
...@@ -198,10 +198,9 @@ static int ccp_sha_setkey(struct crypto_ahash *tfm, const u8 *key, ...@@ -198,10 +198,9 @@ static int ccp_sha_setkey(struct crypto_ahash *tfm, const u8 *key,
{ {
struct ccp_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm)); struct ccp_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
struct crypto_shash *shash = ctx->u.sha.hmac_tfm; struct crypto_shash *shash = ctx->u.sha.hmac_tfm;
struct {
struct shash_desc sdesc; SHASH_DESC_ON_STACK(sdesc, shash);
char ctx[crypto_shash_descsize(shash)];
} desc;
unsigned int block_size = crypto_shash_blocksize(shash); unsigned int block_size = crypto_shash_blocksize(shash);
unsigned int digest_size = crypto_shash_digestsize(shash); unsigned int digest_size = crypto_shash_digestsize(shash);
int i, ret; int i, ret;
...@@ -216,11 +215,11 @@ static int ccp_sha_setkey(struct crypto_ahash *tfm, const u8 *key, ...@@ -216,11 +215,11 @@ static int ccp_sha_setkey(struct crypto_ahash *tfm, const u8 *key,
if (key_len > block_size) { if (key_len > block_size) {
/* Must hash the input key */ /* Must hash the input key */
desc.sdesc.tfm = shash; sdesc->tfm = shash;
desc.sdesc.flags = crypto_ahash_get_flags(tfm) & sdesc->flags = crypto_ahash_get_flags(tfm) &
CRYPTO_TFM_REQ_MAY_SLEEP; CRYPTO_TFM_REQ_MAY_SLEEP;
ret = crypto_shash_digest(&desc.sdesc, key, key_len, ret = crypto_shash_digest(sdesc, key, key_len,
ctx->u.sha.key); ctx->u.sha.key);
if (ret) { if (ret) {
crypto_ahash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); crypto_ahash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
......
...@@ -402,26 +402,23 @@ static int mv_hash_final_fallback(struct ahash_request *req) ...@@ -402,26 +402,23 @@ static int mv_hash_final_fallback(struct ahash_request *req)
{ {
const struct mv_tfm_hash_ctx *tfm_ctx = crypto_tfm_ctx(req->base.tfm); const struct mv_tfm_hash_ctx *tfm_ctx = crypto_tfm_ctx(req->base.tfm);
struct mv_req_hash_ctx *req_ctx = ahash_request_ctx(req); struct mv_req_hash_ctx *req_ctx = ahash_request_ctx(req);
struct { SHASH_DESC_ON_STACK(shash, tfm_ctx->fallback);
struct shash_desc shash;
char ctx[crypto_shash_descsize(tfm_ctx->fallback)];
} desc;
int rc; int rc;
desc.shash.tfm = tfm_ctx->fallback; shash->tfm = tfm_ctx->fallback;
desc.shash.flags = CRYPTO_TFM_REQ_MAY_SLEEP; shash->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
if (unlikely(req_ctx->first_hash)) { if (unlikely(req_ctx->first_hash)) {
crypto_shash_init(&desc.shash); crypto_shash_init(shash);
crypto_shash_update(&desc.shash, req_ctx->buffer, crypto_shash_update(shash, req_ctx->buffer,
req_ctx->extra_bytes); req_ctx->extra_bytes);
} else { } else {
/* only SHA1 for now.... /* only SHA1 for now....
*/ */
rc = mv_hash_import_sha1_ctx(req_ctx, &desc.shash); rc = mv_hash_import_sha1_ctx(req_ctx, shash);
if (rc) if (rc)
goto out; goto out;
} }
rc = crypto_shash_final(&desc.shash, req->result); rc = crypto_shash_final(shash, req->result);
out: out:
return rc; return rc;
} }
...@@ -794,23 +791,21 @@ static int mv_hash_setkey(struct crypto_ahash *tfm, const u8 * key, ...@@ -794,23 +791,21 @@ static int mv_hash_setkey(struct crypto_ahash *tfm, const u8 * key,
ss = crypto_shash_statesize(ctx->base_hash); ss = crypto_shash_statesize(ctx->base_hash);
{ {
struct { SHASH_DESC_ON_STACK(shash, ctx->base_hash);
struct shash_desc shash;
char ctx[crypto_shash_descsize(ctx->base_hash)];
} desc;
unsigned int i; unsigned int i;
char ipad[ss]; char ipad[ss];
char opad[ss]; char opad[ss];
desc.shash.tfm = ctx->base_hash; shash->tfm = ctx->base_hash;
desc.shash.flags = crypto_shash_get_flags(ctx->base_hash) & shash->flags = crypto_shash_get_flags(ctx->base_hash) &
CRYPTO_TFM_REQ_MAY_SLEEP; CRYPTO_TFM_REQ_MAY_SLEEP;
if (keylen > bs) { if (keylen > bs) {
int err; int err;
err = err =
crypto_shash_digest(&desc.shash, key, keylen, ipad); crypto_shash_digest(shash, key, keylen, ipad);
if (err) if (err)
return err; return err;
...@@ -826,12 +821,12 @@ static int mv_hash_setkey(struct crypto_ahash *tfm, const u8 * key, ...@@ -826,12 +821,12 @@ static int mv_hash_setkey(struct crypto_ahash *tfm, const u8 * key,
opad[i] ^= 0x5c; opad[i] ^= 0x5c;
} }
rc = crypto_shash_init(&desc.shash) ? : rc = crypto_shash_init(shash) ? :
crypto_shash_update(&desc.shash, ipad, bs) ? : crypto_shash_update(shash, ipad, bs) ? :
crypto_shash_export(&desc.shash, ipad) ? : crypto_shash_export(shash, ipad) ? :
crypto_shash_init(&desc.shash) ? : crypto_shash_init(shash) ? :
crypto_shash_update(&desc.shash, opad, bs) ? : crypto_shash_update(shash, opad, bs) ? :
crypto_shash_export(&desc.shash, opad); crypto_shash_export(shash, opad);
if (rc == 0) if (rc == 0)
mv_hash_init_ivs(ctx, ipad, opad); mv_hash_init_ivs(ctx, ipad, opad);
......
...@@ -445,10 +445,7 @@ static int n2_hmac_async_setkey(struct crypto_ahash *tfm, const u8 *key, ...@@ -445,10 +445,7 @@ static int n2_hmac_async_setkey(struct crypto_ahash *tfm, const u8 *key,
struct n2_hmac_ctx *ctx = crypto_ahash_ctx(tfm); struct n2_hmac_ctx *ctx = crypto_ahash_ctx(tfm);
struct crypto_shash *child_shash = ctx->child_shash; struct crypto_shash *child_shash = ctx->child_shash;
struct crypto_ahash *fallback_tfm; struct crypto_ahash *fallback_tfm;
struct { SHASH_DESC_ON_STACK(shash, child_shash);
struct shash_desc shash;
char ctx[crypto_shash_descsize(child_shash)];
} desc;
int err, bs, ds; int err, bs, ds;
fallback_tfm = ctx->base.fallback_tfm; fallback_tfm = ctx->base.fallback_tfm;
...@@ -456,15 +453,15 @@ static int n2_hmac_async_setkey(struct crypto_ahash *tfm, const u8 *key, ...@@ -456,15 +453,15 @@ static int n2_hmac_async_setkey(struct crypto_ahash *tfm, const u8 *key,
if (err) if (err)
return err; return err;
desc.shash.tfm = child_shash; shash->tfm = child_shash;
desc.shash.flags = crypto_ahash_get_flags(tfm) & shash->flags = crypto_ahash_get_flags(tfm) &
CRYPTO_TFM_REQ_MAY_SLEEP; CRYPTO_TFM_REQ_MAY_SLEEP;
bs = crypto_shash_blocksize(child_shash); bs = crypto_shash_blocksize(child_shash);
ds = crypto_shash_digestsize(child_shash); ds = crypto_shash_digestsize(child_shash);
BUG_ON(ds > N2_HASH_KEY_MAX); BUG_ON(ds > N2_HASH_KEY_MAX);
if (keylen > bs) { if (keylen > bs) {
err = crypto_shash_digest(&desc.shash, key, keylen, err = crypto_shash_digest(shash, key, keylen,
ctx->hash_key); ctx->hash_key);
if (err) if (err)
return err; return err;
......
...@@ -949,17 +949,14 @@ static int omap_sham_finish_hmac(struct ahash_request *req) ...@@ -949,17 +949,14 @@ static int omap_sham_finish_hmac(struct ahash_request *req)
struct omap_sham_hmac_ctx *bctx = tctx->base; struct omap_sham_hmac_ctx *bctx = tctx->base;
int bs = crypto_shash_blocksize(bctx->shash); int bs = crypto_shash_blocksize(bctx->shash);
int ds = crypto_shash_digestsize(bctx->shash); int ds = crypto_shash_digestsize(bctx->shash);
struct { SHASH_DESC_ON_STACK(shash, bctx->shash);
struct shash_desc shash;
char ctx[crypto_shash_descsize(bctx->shash)];
} desc;
desc.shash.tfm = bctx->shash; shash->tfm = bctx->shash;
desc.shash.flags = 0; /* not CRYPTO_TFM_REQ_MAY_SLEEP */ shash->flags = 0; /* not CRYPTO_TFM_REQ_MAY_SLEEP */
return crypto_shash_init(&desc.shash) ?: return crypto_shash_init(shash) ?:
crypto_shash_update(&desc.shash, bctx->opad, bs) ?: crypto_shash_update(shash, bctx->opad, bs) ?:
crypto_shash_finup(&desc.shash, req->result, ds, req->result); crypto_shash_finup(shash, req->result, ds, req->result);
} }
static int omap_sham_finish(struct ahash_request *req) static int omap_sham_finish(struct ahash_request *req)
...@@ -1118,18 +1115,15 @@ static int omap_sham_update(struct ahash_request *req) ...@@ -1118,18 +1115,15 @@ static int omap_sham_update(struct ahash_request *req)
return omap_sham_enqueue(req, OP_UPDATE); return omap_sham_enqueue(req, OP_UPDATE);
} }
static int omap_sham_shash_digest(struct crypto_shash *shash, u32 flags, static int omap_sham_shash_digest(struct crypto_shash *tfm, u32 flags,
const u8 *data, unsigned int len, u8 *out) const u8 *data, unsigned int len, u8 *out)
{ {
struct { SHASH_DESC_ON_STACK(shash, tfm);
struct shash_desc shash;
char ctx[crypto_shash_descsize(shash)];
} desc;
desc.shash.tfm = shash; shash->tfm = tfm;
desc.shash.flags = flags & CRYPTO_TFM_REQ_MAY_SLEEP; shash->flags = flags & CRYPTO_TFM_REQ_MAY_SLEEP;
return crypto_shash_digest(&desc.shash, data, len, out); return crypto_shash_digest(shash, data, len, out);
} }
static int omap_sham_final_shash(struct ahash_request *req) static int omap_sham_final_shash(struct ahash_request *req)
......
...@@ -149,10 +149,7 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash, ...@@ -149,10 +149,7 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
unsigned int auth_keylen) unsigned int auth_keylen)
{ {
struct qat_auth_state auth_state; struct qat_auth_state auth_state;
struct { SHASH_DESC_ON_STACK(shash, ctx->hash_tfm);
struct shash_desc shash;
char ctx[crypto_shash_descsize(ctx->hash_tfm)];
} desc;
struct sha1_state sha1; struct sha1_state sha1;
struct sha256_state sha256; struct sha256_state sha256;
struct sha512_state sha512; struct sha512_state sha512;
...@@ -165,12 +162,12 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash, ...@@ -165,12 +162,12 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
int i, offset; int i, offset;
memset(auth_state.data, '\0', MAX_AUTH_STATE_SIZE + 64); memset(auth_state.data, '\0', MAX_AUTH_STATE_SIZE + 64);
desc.shash.tfm = ctx->hash_tfm; shash->tfm = ctx->hash_tfm;
desc.shash.flags = 0x0; shash->flags = 0x0;
if (auth_keylen > block_size) { if (auth_keylen > block_size) {
char buff[SHA512_BLOCK_SIZE]; char buff[SHA512_BLOCK_SIZE];
int ret = crypto_shash_digest(&desc.shash, auth_key, int ret = crypto_shash_digest(shash, auth_key,
auth_keylen, buff); auth_keylen, buff);
if (ret) if (ret)
return ret; return ret;
...@@ -193,10 +190,10 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash, ...@@ -193,10 +190,10 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
*opad_ptr ^= 0x5C; *opad_ptr ^= 0x5C;
} }
if (crypto_shash_init(&desc.shash)) if (crypto_shash_init(shash))
return -EFAULT; return -EFAULT;
if (crypto_shash_update(&desc.shash, ipad, block_size)) if (crypto_shash_update(shash, ipad, block_size))
return -EFAULT; return -EFAULT;
hash_state_out = (__be32 *)hash->sha.state1; hash_state_out = (__be32 *)hash->sha.state1;
...@@ -204,19 +201,19 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash, ...@@ -204,19 +201,19 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
switch (ctx->qat_hash_alg) { switch (ctx->qat_hash_alg) {
case ICP_QAT_HW_AUTH_ALGO_SHA1: case ICP_QAT_HW_AUTH_ALGO_SHA1:
if (crypto_shash_export(&desc.shash, &sha1)) if (crypto_shash_export(shash, &sha1))
return -EFAULT; return -EFAULT;
for (i = 0; i < digest_size >> 2; i++, hash_state_out++) for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
*hash_state_out = cpu_to_be32(*(sha1.state + i)); *hash_state_out = cpu_to_be32(*(sha1.state + i));
break; break;
case ICP_QAT_HW_AUTH_ALGO_SHA256: case ICP_QAT_HW_AUTH_ALGO_SHA256:
if (crypto_shash_export(&desc.shash, &sha256)) if (crypto_shash_export(shash, &sha256))
return -EFAULT; return -EFAULT;
for (i = 0; i < digest_size >> 2; i++, hash_state_out++) for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
*hash_state_out = cpu_to_be32(*(sha256.state + i)); *hash_state_out = cpu_to_be32(*(sha256.state + i));
break; break;
case ICP_QAT_HW_AUTH_ALGO_SHA512: case ICP_QAT_HW_AUTH_ALGO_SHA512:
if (crypto_shash_export(&desc.shash, &sha512)) if (crypto_shash_export(shash, &sha512))
return -EFAULT; return -EFAULT;
for (i = 0; i < digest_size >> 3; i++, hash512_state_out++) for (i = 0; i < digest_size >> 3; i++, hash512_state_out++)
*hash512_state_out = cpu_to_be64(*(sha512.state + i)); *hash512_state_out = cpu_to_be64(*(sha512.state + i));
...@@ -225,10 +222,10 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash, ...@@ -225,10 +222,10 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
return -EFAULT; return -EFAULT;
} }
if (crypto_shash_init(&desc.shash)) if (crypto_shash_init(shash))
return -EFAULT; return -EFAULT;
if (crypto_shash_update(&desc.shash, opad, block_size)) if (crypto_shash_update(shash, opad, block_size))
return -EFAULT; return -EFAULT;
offset = round_up(qat_get_inter_state_size(ctx->qat_hash_alg), 8); offset = round_up(qat_get_inter_state_size(ctx->qat_hash_alg), 8);
...@@ -237,19 +234,19 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash, ...@@ -237,19 +234,19 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
switch (ctx->qat_hash_alg) { switch (ctx->qat_hash_alg) {
case ICP_QAT_HW_AUTH_ALGO_SHA1: case ICP_QAT_HW_AUTH_ALGO_SHA1:
if (crypto_shash_export(&desc.shash, &sha1)) if (crypto_shash_export(shash, &sha1))
return -EFAULT; return -EFAULT;
for (i = 0; i < digest_size >> 2; i++, hash_state_out++) for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
*hash_state_out = cpu_to_be32(*(sha1.state + i)); *hash_state_out = cpu_to_be32(*(sha1.state + i));
break; break;
case ICP_QAT_HW_AUTH_ALGO_SHA256: case ICP_QAT_HW_AUTH_ALGO_SHA256:
if (crypto_shash_export(&desc.shash, &sha256)) if (crypto_shash_export(shash, &sha256))
return -EFAULT; return -EFAULT;
for (i = 0; i < digest_size >> 2; i++, hash_state_out++) for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
*hash_state_out = cpu_to_be32(*(sha256.state + i)); *hash_state_out = cpu_to_be32(*(sha256.state + i));
break; break;
case ICP_QAT_HW_AUTH_ALGO_SHA512: case ICP_QAT_HW_AUTH_ALGO_SHA512:
if (crypto_shash_export(&desc.shash, &sha512)) if (crypto_shash_export(shash, &sha512))
return -EFAULT; return -EFAULT;
for (i = 0; i < digest_size >> 3; i++, hash512_state_out++) for (i = 0; i < digest_size >> 3; i++, hash512_state_out++)
*hash512_state_out = cpu_to_be64(*(sha512.state + i)); *hash512_state_out = cpu_to_be64(*(sha512.state + i));
......
...@@ -526,29 +526,26 @@ static int crypt_iv_lmk_one(struct crypt_config *cc, u8 *iv, ...@@ -526,29 +526,26 @@ static int crypt_iv_lmk_one(struct crypt_config *cc, u8 *iv,
u8 *data) u8 *data)
{ {
struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk;
struct { SHASH_DESC_ON_STACK(desc, lmk->hash_tfm);
struct shash_desc desc;
char ctx[crypto_shash_descsize(lmk->hash_tfm)];
} sdesc;
struct md5_state md5state; struct md5_state md5state;
__le32 buf[4]; __le32 buf[4];
int i, r; int i, r;
sdesc.desc.tfm = lmk->hash_tfm; desc->tfm = lmk->hash_tfm;
sdesc.desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP; desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
r = crypto_shash_init(&sdesc.desc); r = crypto_shash_init(desc);
if (r) if (r)
return r; return r;
if (lmk->seed) { if (lmk->seed) {
r = crypto_shash_update(&sdesc.desc, lmk->seed, LMK_SEED_SIZE); r = crypto_shash_update(desc, lmk->seed, LMK_SEED_SIZE);
if (r) if (r)
return r; return r;
} }
/* Sector is always 512B, block size 16, add data of blocks 1-31 */ /* Sector is always 512B, block size 16, add data of blocks 1-31 */
r = crypto_shash_update(&sdesc.desc, data + 16, 16 * 31); r = crypto_shash_update(desc, data + 16, 16 * 31);
if (r) if (r)
return r; return r;
...@@ -557,12 +554,12 @@ static int crypt_iv_lmk_one(struct crypt_config *cc, u8 *iv, ...@@ -557,12 +554,12 @@ static int crypt_iv_lmk_one(struct crypt_config *cc, u8 *iv,
buf[1] = cpu_to_le32((((u64)dmreq->iv_sector >> 32) & 0x00FFFFFF) | 0x80000000); buf[1] = cpu_to_le32((((u64)dmreq->iv_sector >> 32) & 0x00FFFFFF) | 0x80000000);
buf[2] = cpu_to_le32(4024); buf[2] = cpu_to_le32(4024);
buf[3] = 0; buf[3] = 0;
r = crypto_shash_update(&sdesc.desc, (u8 *)buf, sizeof(buf)); r = crypto_shash_update(desc, (u8 *)buf, sizeof(buf));
if (r) if (r)
return r; return r;
/* No MD5 padding here */ /* No MD5 padding here */
r = crypto_shash_export(&sdesc.desc, &md5state); r = crypto_shash_export(desc, &md5state);
if (r) if (r)
return r; return r;
...@@ -679,10 +676,7 @@ static int crypt_iv_tcw_whitening(struct crypt_config *cc, ...@@ -679,10 +676,7 @@ static int crypt_iv_tcw_whitening(struct crypt_config *cc,
struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw;
u64 sector = cpu_to_le64((u64)dmreq->iv_sector); u64 sector = cpu_to_le64((u64)dmreq->iv_sector);
u8 buf[TCW_WHITENING_SIZE]; u8 buf[TCW_WHITENING_SIZE];
struct { SHASH_DESC_ON_STACK(desc, tcw->crc32_tfm);
struct shash_desc desc;
char ctx[crypto_shash_descsize(tcw->crc32_tfm)];
} sdesc;
int i, r; int i, r;
/* xor whitening with sector number */ /* xor whitening with sector number */
...@@ -691,16 +685,16 @@ static int crypt_iv_tcw_whitening(struct crypt_config *cc, ...@@ -691,16 +685,16 @@ static int crypt_iv_tcw_whitening(struct crypt_config *cc,
crypto_xor(&buf[8], (u8 *)&sector, 8); crypto_xor(&buf[8], (u8 *)&sector, 8);
/* calculate crc32 for every 32bit part and xor it */ /* calculate crc32 for every 32bit part and xor it */
sdesc.desc.tfm = tcw->crc32_tfm; desc->tfm = tcw->crc32_tfm;
sdesc.desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP; desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
for (i = 0; i < 4; i++) { for (i = 0; i < 4; i++) {
r = crypto_shash_init(&sdesc.desc); r = crypto_shash_init(desc);
if (r) if (r)
goto out; goto out;
r = crypto_shash_update(&sdesc.desc, &buf[i * 4], 4); r = crypto_shash_update(desc, &buf[i * 4], 4);
if (r) if (r)
goto out; goto out;
r = crypto_shash_final(&sdesc.desc, &buf[i * 4]); r = crypto_shash_final(desc, &buf[i * 4]);
if (r) if (r)
goto out; goto out;
} }
......
...@@ -31,18 +31,16 @@ void btrfs_hash_exit(void) ...@@ -31,18 +31,16 @@ void btrfs_hash_exit(void)
u32 btrfs_crc32c(u32 crc, const void *address, unsigned int length) u32 btrfs_crc32c(u32 crc, const void *address, unsigned int length)
{ {
struct { SHASH_DESC_ON_STACK(shash, tfm);
struct shash_desc shash; u32 *ctx = (u32 *)shash_desc_ctx(shash);
char ctx[crypto_shash_descsize(tfm)];
} desc;
int err; int err;
desc.shash.tfm = tfm; shash->tfm = tfm;
desc.shash.flags = 0; shash->flags = 0;
*(u32 *)desc.ctx = crc; *ctx = crc;
err = crypto_shash_update(&desc.shash, address, length); err = crypto_shash_update(shash, address, length);
BUG_ON(err); BUG_ON(err);
return *(u32 *)desc.ctx; return *ctx;
} }
...@@ -58,6 +58,11 @@ struct shash_desc { ...@@ -58,6 +58,11 @@ struct shash_desc {
void *__ctx[] CRYPTO_MINALIGN_ATTR; void *__ctx[] CRYPTO_MINALIGN_ATTR;
}; };
#define SHASH_DESC_ON_STACK(shash, ctx) \
char __##shash##_desc[sizeof(struct shash_desc) + \
crypto_shash_descsize(ctx)] CRYPTO_MINALIGN_ATTR; \
struct shash_desc *shash = (struct shash_desc *)__##shash##_desc
struct shash_alg { struct shash_alg {
int (*init)(struct shash_desc *desc); int (*init)(struct shash_desc *desc);
int (*update)(struct shash_desc *desc, const u8 *data, int (*update)(struct shash_desc *desc, const u8 *data,
......
...@@ -41,20 +41,18 @@ static struct crypto_shash *tfm; ...@@ -41,20 +41,18 @@ static struct crypto_shash *tfm;
u32 crc32c(u32 crc, const void *address, unsigned int length) u32 crc32c(u32 crc, const void *address, unsigned int length)
{ {
struct { SHASH_DESC_ON_STACK(shash, tfm);
struct shash_desc shash; u32 *ctx = (u32 *)shash_desc_ctx(shash);
char ctx[crypto_shash_descsize(tfm)];
} desc;
int err; int err;
desc.shash.tfm = tfm; shash->tfm = tfm;
desc.shash.flags = 0; shash->flags = 0;
*(u32 *)desc.ctx = crc; *ctx = crc;
err = crypto_shash_update(&desc.shash, address, length); err = crypto_shash_update(shash, address, length);
BUG_ON(err); BUG_ON(err);
return *(u32 *)desc.ctx; return *ctx;
} }
EXPORT_SYMBOL(crc32c); EXPORT_SYMBOL(crc32c);
......
...@@ -386,17 +386,14 @@ static int ima_calc_file_hash_tfm(struct file *file, ...@@ -386,17 +386,14 @@ static int ima_calc_file_hash_tfm(struct file *file,
loff_t i_size, offset = 0; loff_t i_size, offset = 0;
char *rbuf; char *rbuf;
int rc, read = 0; int rc, read = 0;
struct { SHASH_DESC_ON_STACK(shash, tfm);
struct shash_desc shash;
char ctx[crypto_shash_descsize(tfm)];
} desc;
desc.shash.tfm = tfm; shash->tfm = tfm;
desc.shash.flags = 0; shash->flags = 0;
hash->length = crypto_shash_digestsize(tfm); hash->length = crypto_shash_digestsize(tfm);
rc = crypto_shash_init(&desc.shash); rc = crypto_shash_init(shash);
if (rc != 0) if (rc != 0)
return rc; return rc;
...@@ -426,7 +423,7 @@ static int ima_calc_file_hash_tfm(struct file *file, ...@@ -426,7 +423,7 @@ static int ima_calc_file_hash_tfm(struct file *file,
break; break;
offset += rbuf_len; offset += rbuf_len;
rc = crypto_shash_update(&desc.shash, rbuf, rbuf_len); rc = crypto_shash_update(shash, rbuf, rbuf_len);
if (rc) if (rc)
break; break;
} }
...@@ -435,7 +432,7 @@ static int ima_calc_file_hash_tfm(struct file *file, ...@@ -435,7 +432,7 @@ static int ima_calc_file_hash_tfm(struct file *file,
kfree(rbuf); kfree(rbuf);
out: out:
if (!rc) if (!rc)
rc = crypto_shash_final(&desc.shash, hash->digest); rc = crypto_shash_final(shash, hash->digest);
return rc; return rc;
} }
...@@ -493,18 +490,15 @@ static int ima_calc_field_array_hash_tfm(struct ima_field_data *field_data, ...@@ -493,18 +490,15 @@ static int ima_calc_field_array_hash_tfm(struct ima_field_data *field_data,
struct ima_digest_data *hash, struct ima_digest_data *hash,
struct crypto_shash *tfm) struct crypto_shash *tfm)
{ {
struct { SHASH_DESC_ON_STACK(shash, tfm);
struct shash_desc shash;
char ctx[crypto_shash_descsize(tfm)];
} desc;
int rc, i; int rc, i;
desc.shash.tfm = tfm; shash->tfm = tfm;
desc.shash.flags = 0; shash->flags = 0;
hash->length = crypto_shash_digestsize(tfm); hash->length = crypto_shash_digestsize(tfm);
rc = crypto_shash_init(&desc.shash); rc = crypto_shash_init(shash);
if (rc != 0) if (rc != 0)
return rc; return rc;
...@@ -514,7 +508,7 @@ static int ima_calc_field_array_hash_tfm(struct ima_field_data *field_data, ...@@ -514,7 +508,7 @@ static int ima_calc_field_array_hash_tfm(struct ima_field_data *field_data,
u32 datalen = field_data[i].len; u32 datalen = field_data[i].len;
if (strcmp(td->name, IMA_TEMPLATE_IMA_NAME) != 0) { if (strcmp(td->name, IMA_TEMPLATE_IMA_NAME) != 0) {
rc = crypto_shash_update(&desc.shash, rc = crypto_shash_update(shash,
(const u8 *) &field_data[i].len, (const u8 *) &field_data[i].len,
sizeof(field_data[i].len)); sizeof(field_data[i].len));
if (rc) if (rc)
...@@ -524,13 +518,13 @@ static int ima_calc_field_array_hash_tfm(struct ima_field_data *field_data, ...@@ -524,13 +518,13 @@ static int ima_calc_field_array_hash_tfm(struct ima_field_data *field_data,
data_to_hash = buffer; data_to_hash = buffer;
datalen = IMA_EVENT_NAME_LEN_MAX + 1; datalen = IMA_EVENT_NAME_LEN_MAX + 1;
} }
rc = crypto_shash_update(&desc.shash, data_to_hash, datalen); rc = crypto_shash_update(shash, data_to_hash, datalen);
if (rc) if (rc)
break; break;
} }
if (!rc) if (!rc)
rc = crypto_shash_final(&desc.shash, hash->digest); rc = crypto_shash_final(shash, hash->digest);
return rc; return rc;
} }
...@@ -571,15 +565,12 @@ static int __init ima_calc_boot_aggregate_tfm(char *digest, ...@@ -571,15 +565,12 @@ static int __init ima_calc_boot_aggregate_tfm(char *digest,
{ {
u8 pcr_i[TPM_DIGEST_SIZE]; u8 pcr_i[TPM_DIGEST_SIZE];
int rc, i; int rc, i;
struct { SHASH_DESC_ON_STACK(shash, tfm);
struct shash_desc shash;
char ctx[crypto_shash_descsize(tfm)];
} desc;
desc.shash.tfm = tfm; shash->tfm = tfm;
desc.shash.flags = 0; shash->flags = 0;
rc = crypto_shash_init(&desc.shash); rc = crypto_shash_init(shash);
if (rc != 0) if (rc != 0)
return rc; return rc;
...@@ -587,10 +578,10 @@ static int __init ima_calc_boot_aggregate_tfm(char *digest, ...@@ -587,10 +578,10 @@ static int __init ima_calc_boot_aggregate_tfm(char *digest,
for (i = TPM_PCR0; i < TPM_PCR8; i++) { for (i = TPM_PCR0; i < TPM_PCR8; i++) {
ima_pcrread(i, pcr_i); ima_pcrread(i, pcr_i);
/* now accumulate with current aggregate */ /* now accumulate with current aggregate */
rc = crypto_shash_update(&desc.shash, pcr_i, TPM_DIGEST_SIZE); rc = crypto_shash_update(shash, pcr_i, TPM_DIGEST_SIZE);
} }
if (!rc) if (!rc)
crypto_shash_final(&desc.shash, digest); crypto_shash_final(shash, digest);
return rc; return rc;
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment