Commit 49d21b00 authored by James Morris's avatar James Morris

[CRYPTO]: Cleanups based upon feedback from Rusty and jgarzik

- s/__u/u/
- s/char/u8/
- Fixed bug in cipher.c, page remapped was off by one block
parent a5c660e6
...@@ -106,7 +106,7 @@ static inline void crypto_alg_put(struct crypto_alg *alg) ...@@ -106,7 +106,7 @@ static inline void crypto_alg_put(struct crypto_alg *alg)
/* XXX: dec refcount */ /* XXX: dec refcount */
} }
struct crypto_alg *crypto_alg_lookup(__u32 algid) struct crypto_alg *crypto_alg_lookup(u32 algid)
{ {
struct list_head *p; struct list_head *p;
struct crypto_alg *alg = NULL; struct crypto_alg *alg = NULL;
...@@ -150,7 +150,7 @@ static void crypto_init_ops(struct crypto_tfm *tfm) ...@@ -150,7 +150,7 @@ static void crypto_init_ops(struct crypto_tfm *tfm)
/* /*
* Todo: try and load the module if the lookup fails. * Todo: try and load the module if the lookup fails.
*/ */
struct crypto_tfm *crypto_alloc_tfm(__u32 id) struct crypto_tfm *crypto_alloc_tfm(u32 id)
{ {
struct crypto_tfm *tfm = NULL; struct crypto_tfm *tfm = NULL;
struct crypto_alg *alg; struct crypto_alg *alg;
......
...@@ -21,13 +21,13 @@ ...@@ -21,13 +21,13 @@
#include <linux/crypto.h> #include <linux/crypto.h>
#include "internal.h" #include "internal.h"
typedef void (cryptfn_t)(void *, __u8 *, __u8 *); typedef void (cryptfn_t)(void *, u8 *, u8 *);
typedef void (procfn_t)(struct crypto_tfm *, __u8 *, cryptfn_t, int enc); typedef void (procfn_t)(struct crypto_tfm *, u8 *, cryptfn_t, int enc);
static inline void xor_64(__u8 *a, const __u8 *b) static inline void xor_64(u8 *a, const u8 *b)
{ {
((__u32 *)a)[0] ^= ((__u32 *)b)[0]; ((u32 *)a)[0] ^= ((u32 *)b)[0];
((__u32 *)a)[1] ^= ((__u32 *)b)[1]; ((u32 *)a)[1] ^= ((u32 *)b)[1];
} }
static inline size_t sglen(struct scatterlist *sg, size_t nsg) static inline size_t sglen(struct scatterlist *sg, size_t nsg)
...@@ -45,7 +45,7 @@ static inline size_t sglen(struct scatterlist *sg, size_t nsg) ...@@ -45,7 +45,7 @@ static inline size_t sglen(struct scatterlist *sg, size_t nsg)
* Do not call this unless the total length of all of the fragments * Do not call this unless the total length of all of the fragments
* has been verified as multiple of the block size. * has been verified as multiple of the block size.
*/ */
static int copy_chunks(struct crypto_tfm *tfm, __u8 *buf, static int copy_chunks(struct crypto_tfm *tfm, u8 *buf,
struct scatterlist *sg, int sgidx, struct scatterlist *sg, int sgidx,
int rlen, int *last, int in) int rlen, int *last, int in)
{ {
...@@ -84,14 +84,14 @@ static int copy_chunks(struct crypto_tfm *tfm, __u8 *buf, ...@@ -84,14 +84,14 @@ static int copy_chunks(struct crypto_tfm *tfm, __u8 *buf,
return i - sgidx - 2 + aligned; return i - sgidx - 2 + aligned;
} }
static inline int gather_chunks(struct crypto_tfm *tfm, __u8 *buf, static inline int gather_chunks(struct crypto_tfm *tfm, u8 *buf,
struct scatterlist *sg, struct scatterlist *sg,
int sgidx, int rlen, int *last) int sgidx, int rlen, int *last)
{ {
return copy_chunks(tfm, buf, sg, sgidx, rlen, last, 1); return copy_chunks(tfm, buf, sg, sgidx, rlen, last, 1);
} }
static inline int scatter_chunks(struct crypto_tfm *tfm, __u8 *buf, static inline int scatter_chunks(struct crypto_tfm *tfm, u8 *buf,
struct scatterlist *sg, struct scatterlist *sg,
int sgidx, int rlen, int *last) int sgidx, int rlen, int *last)
{ {
...@@ -105,13 +105,18 @@ static inline int scatter_chunks(struct crypto_tfm *tfm, __u8 *buf, ...@@ -105,13 +105,18 @@ static inline int scatter_chunks(struct crypto_tfm *tfm, __u8 *buf,
* decrypt across possibly multiple page boundaries via a temporary * decrypt across possibly multiple page boundaries via a temporary
* block, then continue processing with a chunk offset until the end * block, then continue processing with a chunk offset until the end
* of a frag is block aligned. * of a frag is block aligned.
*
* The code is further complicated by having to remap a page after
* processing a block then yielding. The data will be offset from the
* start of page at the scatterlist offset, the chunking offset (coff)
* and the block offset (boff).
*/ */
static int crypt(struct crypto_tfm *tfm, struct scatterlist *sg, static int crypt(struct crypto_tfm *tfm, struct scatterlist *sg,
size_t nsg, cryptfn_t crfn, procfn_t prfn, int enc) size_t nsg, cryptfn_t crfn, procfn_t prfn, int enc)
{ {
int i, coff; int i, coff;
size_t bsize = crypto_tfm_blocksize(tfm); size_t bsize = crypto_tfm_blocksize(tfm);
__u8 tmp[CRYPTO_MAX_BLOCK_SIZE]; u8 tmp[CRYPTO_MAX_BLOCK_SIZE];
if (sglen(sg, nsg) % bsize) { if (sglen(sg, nsg) % bsize) {
tfm->crt_flags |= CRYPTO_BAD_BLOCK_LEN; tfm->crt_flags |= CRYPTO_BAD_BLOCK_LEN;
...@@ -119,7 +124,7 @@ static int crypt(struct crypto_tfm *tfm, struct scatterlist *sg, ...@@ -119,7 +124,7 @@ static int crypt(struct crypto_tfm *tfm, struct scatterlist *sg,
} }
for (i = 0, coff = 0; i < nsg; i++) { for (i = 0, coff = 0; i < nsg; i++) {
int n = 0; int n = 0, boff = 0;
int len = sg[i].length - coff; int len = sg[i].length - coff;
char *p = crypto_kmap(tfm, sg[i].page) + sg[i].offset + coff; char *p = crypto_kmap(tfm, sg[i].page) + sg[i].offset + coff;
...@@ -135,8 +140,12 @@ static int crypt(struct crypto_tfm *tfm, struct scatterlist *sg, ...@@ -135,8 +140,12 @@ static int crypt(struct crypto_tfm *tfm, struct scatterlist *sg,
prfn(tfm, p, crfn, enc); prfn(tfm, p, crfn, enc);
crypto_kunmap(tfm, sg[i].page, p); crypto_kunmap(tfm, sg[i].page, p);
crypto_yield(tfm); crypto_yield(tfm);
p = crypto_kmap(tfm, sg[i].page) + sg[i].offset + coff;
p += bsize; /* remap and point to recalculated offset */
boff += bsize;
p = crypto_kmap(tfm, sg[i].page)
+ sg[i].offset + coff + boff;
len -= bsize; len -= bsize;
/* End of frag with no remnant? */ /* End of frag with no remnant? */
...@@ -153,7 +162,7 @@ static int crypt(struct crypto_tfm *tfm, struct scatterlist *sg, ...@@ -153,7 +162,7 @@ static int crypt(struct crypto_tfm *tfm, struct scatterlist *sg,
} }
static void cbc_process(struct crypto_tfm *tfm, static void cbc_process(struct crypto_tfm *tfm,
__u8 *block, cryptfn_t fn, int enc) u8 *block, cryptfn_t fn, int enc)
{ {
if (enc) { if (enc) {
xor_64(tfm->crt_cipher.cit_iv, block); xor_64(tfm->crt_cipher.cit_iv, block);
...@@ -161,7 +170,7 @@ static void cbc_process(struct crypto_tfm *tfm, ...@@ -161,7 +170,7 @@ static void cbc_process(struct crypto_tfm *tfm,
memcpy(tfm->crt_cipher.cit_iv, block, memcpy(tfm->crt_cipher.cit_iv, block,
crypto_tfm_blocksize(tfm)); crypto_tfm_blocksize(tfm));
} else { } else {
__u8 buf[CRYPTO_MAX_BLOCK_SIZE]; u8 buf[CRYPTO_MAX_BLOCK_SIZE];
fn(tfm->crt_ctx, buf, block); fn(tfm->crt_ctx, buf, block);
xor_64(buf, tfm->crt_cipher.cit_iv); xor_64(buf, tfm->crt_cipher.cit_iv);
...@@ -171,13 +180,13 @@ static void cbc_process(struct crypto_tfm *tfm, ...@@ -171,13 +180,13 @@ static void cbc_process(struct crypto_tfm *tfm,
} }
} }
static void ecb_process(struct crypto_tfm *tfm, __u8 *block, static void ecb_process(struct crypto_tfm *tfm, u8 *block,
cryptfn_t fn, int enc) cryptfn_t fn, int enc)
{ {
fn(tfm->crt_ctx, block, block); fn(tfm->crt_ctx, block, block);
} }
static int setkey(struct crypto_tfm *tfm, const __u8 *key, size_t keylen) static int setkey(struct crypto_tfm *tfm, const u8 *key, size_t keylen)
{ {
return tfm->__crt_alg->cra_cipher.cia_setkey(tfm->crt_ctx, key, return tfm->__crt_alg->cra_cipher.cia_setkey(tfm->crt_ctx, key,
keylen, &tfm->crt_flags); keylen, &tfm->crt_flags);
......
This diff is collapsed.
...@@ -39,14 +39,14 @@ static void update(struct crypto_tfm *tfm, struct scatterlist *sg, size_t nsg) ...@@ -39,14 +39,14 @@ static void update(struct crypto_tfm *tfm, struct scatterlist *sg, size_t nsg)
return; return;
} }
static void final(struct crypto_tfm *tfm, __u8 *out) static void final(struct crypto_tfm *tfm, u8 *out)
{ {
tfm->__crt_alg->cra_digest.dia_final(tfm->crt_ctx, out); tfm->__crt_alg->cra_digest.dia_final(tfm->crt_ctx, out);
return; return;
} }
static void digest(struct crypto_tfm *tfm, static void digest(struct crypto_tfm *tfm,
struct scatterlist *sg, size_t nsg, __u8 *out) struct scatterlist *sg, size_t nsg, u8 *out)
{ {
int i; int i;
...@@ -63,8 +63,8 @@ static void digest(struct crypto_tfm *tfm, ...@@ -63,8 +63,8 @@ static void digest(struct crypto_tfm *tfm,
return; return;
} }
static void hmac(struct crypto_tfm *tfm, __u8 *key, size_t keylen, static void hmac(struct crypto_tfm *tfm, u8 *key, size_t keylen,
struct scatterlist *sg, size_t nsg, __u8 *out) struct scatterlist *sg, size_t nsg, u8 *out)
{ {
int i; int i;
struct scatterlist tmp; struct scatterlist tmp;
......
...@@ -36,14 +36,14 @@ ...@@ -36,14 +36,14 @@
(w += f(x, y, z) + in, w = (w<<s | w>>(32-s)) + x) (w += f(x, y, z) + in, w = (w<<s | w>>(32-s)) + x)
struct md5_ctx { struct md5_ctx {
__u32 hash[MD5_HASH_WORDS]; u32 hash[MD5_HASH_WORDS];
__u32 block[MD5_BLOCK_WORDS]; u32 block[MD5_BLOCK_WORDS];
__u64 byte_count; u64 byte_count;
}; };
static inline void md5_transform(__u32 *hash, __u32 const *in) static inline void md5_transform(u32 *hash, u32 const *in)
{ {
register __u32 a, b, c, d; register u32 a, b, c, d;
a = hash[0]; a = hash[0];
b = hash[1]; b = hash[1];
...@@ -125,7 +125,7 @@ static inline void md5_transform(__u32 *hash, __u32 const *in) ...@@ -125,7 +125,7 @@ static inline void md5_transform(__u32 *hash, __u32 const *in)
} }
/* XXX: this stuff can be optimized */ /* XXX: this stuff can be optimized */
static inline void le32_to_cpu_array(__u32 *buf, unsigned words) static inline void le32_to_cpu_array(u32 *buf, unsigned words)
{ {
while (words--) { while (words--) {
__le32_to_cpus(buf); __le32_to_cpus(buf);
...@@ -133,7 +133,7 @@ static inline void le32_to_cpu_array(__u32 *buf, unsigned words) ...@@ -133,7 +133,7 @@ static inline void le32_to_cpu_array(__u32 *buf, unsigned words)
} }
} }
static inline void cpu_to_le32_array(__u32 *buf, unsigned words) static inline void cpu_to_le32_array(u32 *buf, unsigned words)
{ {
while (words--) { while (words--) {
__cpu_to_le32s(buf); __cpu_to_le32s(buf);
...@@ -143,7 +143,7 @@ static inline void cpu_to_le32_array(__u32 *buf, unsigned words) ...@@ -143,7 +143,7 @@ static inline void cpu_to_le32_array(__u32 *buf, unsigned words)
static inline void md5_transform_helper(struct md5_ctx *ctx) static inline void md5_transform_helper(struct md5_ctx *ctx)
{ {
le32_to_cpu_array(ctx->block, sizeof(ctx->block) / sizeof(__u32)); le32_to_cpu_array(ctx->block, sizeof(ctx->block) / sizeof(u32));
md5_transform(ctx->hash, ctx->block); md5_transform(ctx->hash, ctx->block);
} }
...@@ -158,10 +158,10 @@ static void md5_init(void *ctx) ...@@ -158,10 +158,10 @@ static void md5_init(void *ctx)
mctx->byte_count = 0; mctx->byte_count = 0;
} }
static void md5_update(void *ctx, const __u8 *data, size_t len) static void md5_update(void *ctx, const u8 *data, size_t len)
{ {
struct md5_ctx *mctx = ctx; struct md5_ctx *mctx = ctx;
const __u32 avail = sizeof(mctx->block) - (mctx->byte_count & 0x3f); const u32 avail = sizeof(mctx->block) - (mctx->byte_count & 0x3f);
mctx->byte_count += len; mctx->byte_count += len;
...@@ -188,7 +188,7 @@ static void md5_update(void *ctx, const __u8 *data, size_t len) ...@@ -188,7 +188,7 @@ static void md5_update(void *ctx, const __u8 *data, size_t len)
memcpy(mctx->block, data, len); memcpy(mctx->block, data, len);
} }
static void md5_final(void *ctx, __u8 *out) static void md5_final(void *ctx, u8 *out)
{ {
struct md5_ctx *mctx = ctx; struct md5_ctx *mctx = ctx;
const int offset = mctx->byte_count & 0x3f; const int offset = mctx->byte_count & 0x3f;
...@@ -197,7 +197,7 @@ static void md5_final(void *ctx, __u8 *out) ...@@ -197,7 +197,7 @@ static void md5_final(void *ctx, __u8 *out)
*p++ = 0x80; *p++ = 0x80;
if (padding < 0) { if (padding < 0) {
memset(p, 0x00, padding + sizeof (__u64)); memset(p, 0x00, padding + sizeof (u64));
md5_transform_helper(mctx); md5_transform_helper(mctx);
p = (char *)mctx->block; p = (char *)mctx->block;
padding = 56; padding = 56;
...@@ -207,9 +207,9 @@ static void md5_final(void *ctx, __u8 *out) ...@@ -207,9 +207,9 @@ static void md5_final(void *ctx, __u8 *out)
mctx->block[14] = mctx->byte_count << 3; mctx->block[14] = mctx->byte_count << 3;
mctx->block[15] = mctx->byte_count >> 29; mctx->block[15] = mctx->byte_count >> 29;
le32_to_cpu_array(mctx->block, (sizeof(mctx->block) - le32_to_cpu_array(mctx->block, (sizeof(mctx->block) -
sizeof(__u64)) / sizeof(__u32)); sizeof(u64)) / sizeof(u32));
md5_transform(mctx->hash, mctx->block); md5_transform(mctx->hash, mctx->block);
cpu_to_le32_array(mctx->hash, sizeof(mctx->hash) / sizeof(__u32)); cpu_to_le32_array(mctx->hash, sizeof(mctx->hash) / sizeof(u32));
memcpy(out, mctx->hash, sizeof(mctx->hash)); memcpy(out, mctx->hash, sizeof(mctx->hash));
memset(mctx, 0, sizeof(mctx)); memset(mctx, 0, sizeof(mctx));
} }
......
...@@ -50,20 +50,20 @@ static inline u32 rol(u32 value, u32 bits) ...@@ -50,20 +50,20 @@ static inline u32 rol(u32 value, u32 bits)
#define R4(v,w,x,y,z,i) z+=(w^x^y)+blk(i)+0xCA62C1D6+rol(v,5);w=rol(w,30); #define R4(v,w,x,y,z,i) z+=(w^x^y)+blk(i)+0xCA62C1D6+rol(v,5);w=rol(w,30);
struct sha1_ctx { struct sha1_ctx {
__u64 count; u64 count;
__u32 state[5]; u32 state[5];
__u8 buffer[64]; u8 buffer[64];
}; };
/* Hash a single 512-bit block. This is the core of the algorithm. */ /* Hash a single 512-bit block. This is the core of the algorithm. */
static void sha1_transform(__u32 *state, const char *in) static void sha1_transform(u32 *state, const u8 *in)
{ {
__u32 a, b, c, d, e; u32 a, b, c, d, e;
__u32 block32[16]; u32 block32[16];
/* convert/copy data to workspace */ /* convert/copy data to workspace */
for (a = 0; a < sizeof(block32)/sizeof(__u32); a++) for (a = 0; a < sizeof(block32)/sizeof(u32); a++)
block32[a] = be32_to_cpu (((const __u32 *)in)[a]); block32[a] = be32_to_cpu (((const u32 *)in)[a]);
/* Copy context->state[] to working vars */ /* Copy context->state[] to working vars */
a = state[0]; a = state[0];
...@@ -116,7 +116,7 @@ static void sha1_init(void *ctx) ...@@ -116,7 +116,7 @@ static void sha1_init(void *ctx)
*sctx = initstate; *sctx = initstate;
} }
static void sha1_update(void *ctx, const __u8 *data, size_t len) static void sha1_update(void *ctx, const u8 *data, size_t len)
{ {
struct sha1_ctx *sctx = ctx; struct sha1_ctx *sctx = ctx;
unsigned i, j; unsigned i, j;
...@@ -138,13 +138,13 @@ static void sha1_update(void *ctx, const __u8 *data, size_t len) ...@@ -138,13 +138,13 @@ static void sha1_update(void *ctx, const __u8 *data, size_t len)
/* Add padding and return the message digest. */ /* Add padding and return the message digest. */
static void sha1_final(void* ctx, __u8 *out) static void sha1_final(void* ctx, u8 *out)
{ {
struct sha1_ctx *sctx = ctx; struct sha1_ctx *sctx = ctx;
__u32 i, j, index, padlen; u32 i, j, index, padlen;
__u64 t; u64 t;
__u8 bits[8] = { 0, }; u8 bits[8] = { 0, };
const static __u8 padding[64] = { 0x80, }; const static u8 padding[64] = { 0x80, };
t = sctx->count; t = sctx->count;
bits[7] = 0xff & t; t>>=8; bits[7] = 0xff & t; t>>=8;
...@@ -166,7 +166,7 @@ static void sha1_final(void* ctx, __u8 *out) ...@@ -166,7 +166,7 @@ static void sha1_final(void* ctx, __u8 *out)
/* Store state in digest */ /* Store state in digest */
for (i = j = 0; i < 5; i++, j += 4) { for (i = j = 0; i < 5; i++, j += 4) {
__u32 t2 = sctx->state[i]; u32 t2 = sctx->state[i];
out[j+3] = t2 & 0xff; t2>>=8; out[j+3] = t2 & 0xff; t2>>=8;
out[j+2] = t2 & 0xff; t2>>=8; out[j+2] = t2 & 0xff; t2>>=8;
out[j+1] = t2 & 0xff; t2>>=8; out[j+1] = t2 & 0xff; t2>>=8;
......
...@@ -326,7 +326,7 @@ void test_des(void) ...@@ -326,7 +326,7 @@ void test_des(void)
printk("test %d:\n", i + 1); printk("test %d:\n", i + 1);
key = des_tv[i].key; key = des_tv[i].key;
tfm->crt_flags = CRYPTO_WEAK_KEY_CHECK; tfm->crt_flags = CRYPTO_WEAK_KEY_CHECK;
ret = crypto_cipher_setkey(tfm, key, 8); ret = crypto_cipher_setkey(tfm, key, 8);
if (ret) { if (ret) {
......
...@@ -67,17 +67,16 @@ struct scatterlist; ...@@ -67,17 +67,16 @@ struct scatterlist;
struct cipher_alg { struct cipher_alg {
size_t cia_keysize; size_t cia_keysize;
size_t cia_ivsize; size_t cia_ivsize;
int (*cia_setkey)(void *ctx, const __u8 *key, size_t keylen, int (*cia_setkey)(void *ctx, const u8 *key, size_t keylen, int *flags);
int *flags); void (*cia_encrypt)(void *ctx, u8 *dst, u8 *src);
void (*cia_encrypt)(void *ctx, __u8 *dst, __u8 *src); void (*cia_decrypt)(void *ctx, u8 *dst, u8 *src);
void (*cia_decrypt)(void *ctx, __u8 *dst, __u8 *src);
}; };
struct digest_alg { struct digest_alg {
size_t dia_digestsize; size_t dia_digestsize;
void (*dia_init)(void *ctx); void (*dia_init)(void *ctx);
void (*dia_update)(void *ctx, const __u8 *data, size_t len); void (*dia_update)(void *ctx, const u8 *data, size_t len);
void (*dia_final)(void *ctx, __u8 *out); void (*dia_final)(void *ctx, u8 *out);
}; };
struct compress_alg { struct compress_alg {
...@@ -91,7 +90,7 @@ struct compress_alg { ...@@ -91,7 +90,7 @@ struct compress_alg {
struct crypto_alg { struct crypto_alg {
struct list_head cra_list; struct list_head cra_list;
__u32 cra_id; u32 cra_id;
size_t cra_blocksize; size_t cra_blocksize;
size_t cra_ctxsize; size_t cra_ctxsize;
char cra_name[CRYPTO_MAX_ALG_NAME]; char cra_name[CRYPTO_MAX_ALG_NAME];
...@@ -116,9 +115,8 @@ struct crypto_tfm; ...@@ -116,9 +115,8 @@ struct crypto_tfm;
*/ */
struct cipher_tfm { struct cipher_tfm {
void *cit_iv; void *cit_iv;
__u32 cit_mode; u32 cit_mode;
int (*cit_setkey)(struct crypto_tfm *tfm, const __u8 *key, int (*cit_setkey)(struct crypto_tfm *tfm, const u8 *key, size_t keylen);
size_t keylen);
int (*cit_encrypt)(struct crypto_tfm *tfm, int (*cit_encrypt)(struct crypto_tfm *tfm,
struct scatterlist *sg, size_t nsg); struct scatterlist *sg, size_t nsg);
int (*cit_decrypt)(struct crypto_tfm *tfm, int (*cit_decrypt)(struct crypto_tfm *tfm,
...@@ -129,11 +127,11 @@ struct digest_tfm { ...@@ -129,11 +127,11 @@ struct digest_tfm {
void (*dit_init)(struct crypto_tfm *tfm); void (*dit_init)(struct crypto_tfm *tfm);
void (*dit_update)(struct crypto_tfm *tfm, void (*dit_update)(struct crypto_tfm *tfm,
struct scatterlist *sg, size_t nsg); struct scatterlist *sg, size_t nsg);
void (*dit_final)(struct crypto_tfm *tfm, __u8 *out); void (*dit_final)(struct crypto_tfm *tfm, u8 *out);
void (*dit_digest)(struct crypto_tfm *tfm, struct scatterlist *sg, void (*dit_digest)(struct crypto_tfm *tfm, struct scatterlist *sg,
size_t nsg, __u8 *out); size_t nsg, u8 *out);
void (*dit_hmac)(struct crypto_tfm *tfm, __u8 *key, size_t keylen, void (*dit_hmac)(struct crypto_tfm *tfm, u8 *key, size_t keylen,
struct scatterlist *sg, size_t nsg, __u8 *out); struct scatterlist *sg, size_t nsg, u8 *out);
}; };
struct compress_tfm { struct compress_tfm {
...@@ -164,7 +162,7 @@ struct crypto_tfm { ...@@ -164,7 +162,7 @@ struct crypto_tfm {
* Will try an load a module based on the name if not present * Will try an load a module based on the name if not present
* in the kernel. Increments its algorithm refcount. * in the kernel. Increments its algorithm refcount.
*/ */
struct crypto_tfm *crypto_alloc_tfm(__u32 id); struct crypto_tfm *crypto_alloc_tfm(u32 id);
/* /*
* Frees the transform and decrements its algorithm's recount. * Frees the transform and decrements its algorithm's recount.
...@@ -185,28 +183,28 @@ static inline void crypto_digest_update(struct crypto_tfm *tfm, ...@@ -185,28 +183,28 @@ static inline void crypto_digest_update(struct crypto_tfm *tfm,
tfm->crt_digest.dit_update(tfm, sg, nsg); tfm->crt_digest.dit_update(tfm, sg, nsg);
} }
static inline void crypto_digest_final(struct crypto_tfm *tfm, __u8 *out) static inline void crypto_digest_final(struct crypto_tfm *tfm, u8 *out)
{ {
tfm->crt_digest.dit_final(tfm, out); tfm->crt_digest.dit_final(tfm, out);
} }
static inline void crypto_digest_digest(struct crypto_tfm *tfm, static inline void crypto_digest_digest(struct crypto_tfm *tfm,
struct scatterlist *sg, struct scatterlist *sg,
size_t nsg, __u8 *out) size_t nsg, u8 *out)
{ {
tfm->crt_digest.dit_digest(tfm, sg, nsg, out); tfm->crt_digest.dit_digest(tfm, sg, nsg, out);
} }
static inline void crypto_digest_hmac(struct crypto_tfm *tfm, __u8 *key, static inline void crypto_digest_hmac(struct crypto_tfm *tfm, u8 *key,
size_t keylen, struct scatterlist *sg, size_t keylen, struct scatterlist *sg,
size_t nsg, __u8 *out) size_t nsg, u8 *out)
{ {
tfm->crt_digest.dit_hmac(tfm, key, keylen, sg, nsg, out); tfm->crt_digest.dit_hmac(tfm, key, keylen, sg, nsg, out);
} }
static inline int crypto_cipher_setkey(struct crypto_tfm *tfm, static inline int crypto_cipher_setkey(struct crypto_tfm *tfm,
const __u8 *key, size_t keylen) const u8 *key, size_t keylen)
{ {
return tfm->crt_cipher.cit_setkey(tfm, key, keylen); return tfm->crt_cipher.cit_setkey(tfm, key, keylen);
} }
...@@ -224,7 +222,7 @@ static inline int crypto_cipher_decrypt(struct crypto_tfm *tfm, ...@@ -224,7 +222,7 @@ static inline int crypto_cipher_decrypt(struct crypto_tfm *tfm,
} }
static inline void crypto_cipher_copy_iv(struct crypto_tfm *tfm, static inline void crypto_cipher_copy_iv(struct crypto_tfm *tfm,
__u8 *src, size_t len) u8 *src, size_t len)
{ {
memcpy(tfm->crt_cipher.cit_iv, src, len); memcpy(tfm->crt_cipher.cit_iv, src, len);
} }
...@@ -257,7 +255,7 @@ static inline char *crypto_tfm_name(struct crypto_tfm *tfm) ...@@ -257,7 +255,7 @@ static inline char *crypto_tfm_name(struct crypto_tfm *tfm)
return tfm->__crt_alg->cra_name; return tfm->__crt_alg->cra_name;
} }
static inline __u32 crypto_tfm_type(struct crypto_tfm *tfm) static inline u32 crypto_tfm_type(struct crypto_tfm *tfm)
{ {
return tfm->__crt_alg->cra_id & CRYPTO_TYPE_MASK; return tfm->__crt_alg->cra_id & CRYPTO_TYPE_MASK;
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment