Commit e577dc15 authored by Linus Torvalds's avatar Linus Torvalds

Merge branch 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6

Pull crypto fixes from Herbert Xu:
 "Fix a number of bugs in the ccp driver"

* 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6:
  crypto: ccp - Ignore tag length when decrypting GCM ciphertext
  crypto: ccp - Add support for valid authsize values less than 16
  crypto: ccp - Fix oops by properly managing allocated structures
parents b678c568 e2664ecb
......@@ -58,6 +58,19 @@ static int ccp_aes_gcm_setkey(struct crypto_aead *tfm, const u8 *key,
static int ccp_aes_gcm_setauthsize(struct crypto_aead *tfm,
unsigned int authsize)
{
switch (authsize) {
case 16:
case 15:
case 14:
case 13:
case 12:
case 8:
case 4:
break;
default:
return -EINVAL;
}
return 0;
}
......@@ -104,6 +117,7 @@ static int ccp_aes_gcm_crypt(struct aead_request *req, bool encrypt)
memset(&rctx->cmd, 0, sizeof(rctx->cmd));
INIT_LIST_HEAD(&rctx->cmd.entry);
rctx->cmd.engine = CCP_ENGINE_AES;
rctx->cmd.u.aes.authsize = crypto_aead_authsize(tfm);
rctx->cmd.u.aes.type = ctx->u.aes.type;
rctx->cmd.u.aes.mode = ctx->u.aes.mode;
rctx->cmd.u.aes.action = encrypt;
......
......@@ -622,6 +622,7 @@ static int ccp_run_aes_gcm_cmd(struct ccp_cmd_queue *cmd_q,
unsigned long long *final;
unsigned int dm_offset;
unsigned int authsize;
unsigned int jobid;
unsigned int ilen;
bool in_place = true; /* Default value */
......@@ -643,6 +644,21 @@ static int ccp_run_aes_gcm_cmd(struct ccp_cmd_queue *cmd_q,
if (!aes->key) /* Gotta have a key SGL */
return -EINVAL;
/* Zero defaults to 16 bytes, the maximum size */
authsize = aes->authsize ? aes->authsize : AES_BLOCK_SIZE;
switch (authsize) {
case 16:
case 15:
case 14:
case 13:
case 12:
case 8:
case 4:
break;
default:
return -EINVAL;
}
/* First, decompose the source buffer into AAD & PT,
* and the destination buffer into AAD, CT & tag, or
* the input into CT & tag.
......@@ -657,7 +673,7 @@ static int ccp_run_aes_gcm_cmd(struct ccp_cmd_queue *cmd_q,
p_tag = scatterwalk_ffwd(sg_tag, p_outp, ilen);
} else {
/* Input length for decryption includes tag */
ilen = aes->src_len - AES_BLOCK_SIZE;
ilen = aes->src_len - authsize;
p_tag = scatterwalk_ffwd(sg_tag, p_inp, ilen);
}
......@@ -766,8 +782,7 @@ static int ccp_run_aes_gcm_cmd(struct ccp_cmd_queue *cmd_q,
while (src.sg_wa.bytes_left) {
ccp_prepare_data(&src, &dst, &op, AES_BLOCK_SIZE, true);
if (!src.sg_wa.bytes_left) {
unsigned int nbytes = aes->src_len
% AES_BLOCK_SIZE;
unsigned int nbytes = ilen % AES_BLOCK_SIZE;
if (nbytes) {
op.eom = 1;
......@@ -839,19 +854,19 @@ static int ccp_run_aes_gcm_cmd(struct ccp_cmd_queue *cmd_q,
if (aes->action == CCP_AES_ACTION_ENCRYPT) {
/* Put the ciphered tag after the ciphertext. */
ccp_get_dm_area(&final_wa, 0, p_tag, 0, AES_BLOCK_SIZE);
ccp_get_dm_area(&final_wa, 0, p_tag, 0, authsize);
} else {
/* Does this ciphered tag match the input? */
ret = ccp_init_dm_workarea(&tag, cmd_q, AES_BLOCK_SIZE,
ret = ccp_init_dm_workarea(&tag, cmd_q, authsize,
DMA_BIDIRECTIONAL);
if (ret)
goto e_tag;
ret = ccp_set_dm_area(&tag, 0, p_tag, 0, AES_BLOCK_SIZE);
ret = ccp_set_dm_area(&tag, 0, p_tag, 0, authsize);
if (ret)
goto e_tag;
ret = crypto_memneq(tag.address, final_wa.address,
AES_BLOCK_SIZE) ? -EBADMSG : 0;
authsize) ? -EBADMSG : 0;
ccp_dm_free(&tag);
}
......@@ -859,11 +874,11 @@ static int ccp_run_aes_gcm_cmd(struct ccp_cmd_queue *cmd_q,
ccp_dm_free(&final_wa);
e_dst:
if (aes->src_len && !in_place)
if (ilen > 0 && !in_place)
ccp_free_data(&dst, cmd_q);
e_src:
if (aes->src_len)
if (ilen > 0)
ccp_free_data(&src, cmd_q);
e_aad:
......
......@@ -170,6 +170,8 @@ struct ccp_aes_engine {
enum ccp_aes_mode mode;
enum ccp_aes_action action;
u32 authsize;
struct scatterlist *key;
u32 key_len; /* In bytes */
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment