Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
L
linux
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
nexedi
linux
Commits
34cb5821
Commit
34cb5821
authored
Feb 03, 2017
by
Herbert Xu
Browse files
Options
Browse Files
Download
Plain Diff
Merge
git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Merge the crypto tree to pick up arm64 output IV patch.
parents
7dede913
7c2cf1c4
Changes
10
Show whitespace changes
Inline
Side-by-side
Showing
10 changed files
with
96 additions
and
89 deletions
+96
-89
arch/arm64/crypto/aes-modes.S
arch/arm64/crypto/aes-modes.S
+42
-46
arch/x86/crypto/aesni-intel_glue.c
arch/x86/crypto/aesni-intel_glue.c
+6
-5
crypto/algapi.c
crypto/algapi.c
+1
-0
crypto/algif_aead.c
crypto/algif_aead.c
+1
-1
drivers/crypto/ccp/ccp-dev-v5.c
drivers/crypto/ccp/ccp-dev-v5.c
+1
-1
drivers/crypto/ccp/ccp-dev.h
drivers/crypto/ccp/ccp-dev.h
+1
-0
drivers/crypto/ccp/ccp-dmaengine.c
drivers/crypto/ccp/ccp-dmaengine.c
+5
-1
drivers/crypto/chelsio/chcr_algo.c
drivers/crypto/chelsio/chcr_algo.c
+28
-25
drivers/crypto/chelsio/chcr_core.c
drivers/crypto/chelsio/chcr_core.c
+8
-10
drivers/crypto/chelsio/chcr_crypto.h
drivers/crypto/chelsio/chcr_crypto.h
+3
-0
No files found.
arch/arm64/crypto/aes-modes.S
View file @
34cb5821
...
...
@@ -193,15 +193,16 @@ AES_ENTRY(aes_cbc_encrypt)
cbz
w6
,
.
Lcbcencloop
ld1
{
v0
.16
b
},
[
x5
]
/*
get
iv
*/
enc_prepare
w3
,
x2
,
x
5
enc_prepare
w3
,
x2
,
x
6
.
Lcbcencloop
:
ld1
{
v1
.16
b
},
[
x1
],
#
16
/*
get
next
pt
block
*/
eor
v0
.16
b
,
v0
.16
b
,
v1
.16
b
/*
..
and
xor
with
iv
*/
encrypt_block
v0
,
w3
,
x2
,
x
5
,
w6
encrypt_block
v0
,
w3
,
x2
,
x
6
,
w7
st1
{
v0
.16
b
},
[
x0
],
#
16
subs
w4
,
w4
,
#
1
bne
.
Lcbcencloop
st1
{
v0
.16
b
},
[
x5
]
/*
return
iv
*/
ret
AES_ENDPROC
(
aes_cbc_encrypt
)
...
...
@@ -211,7 +212,7 @@ AES_ENTRY(aes_cbc_decrypt)
cbz
w6
,
.
LcbcdecloopNx
ld1
{
v7
.16
b
},
[
x5
]
/*
get
iv
*/
dec_prepare
w3
,
x2
,
x
5
dec_prepare
w3
,
x2
,
x
6
.
LcbcdecloopNx
:
#if INTERLEAVE >= 2
...
...
@@ -248,7 +249,7 @@ AES_ENTRY(aes_cbc_decrypt)
.
Lcbcdecloop
:
ld1
{
v1
.16
b
},
[
x1
],
#
16
/*
get
next
ct
block
*/
mov
v0
.16
b
,
v1
.16
b
/*
...
and
copy
to
v0
*/
decrypt_block
v0
,
w3
,
x2
,
x
5
,
w6
decrypt_block
v0
,
w3
,
x2
,
x
6
,
w7
eor
v0
.16
b
,
v0
.16
b
,
v7
.16
b
/*
xor
with
iv
=>
pt
*/
mov
v7
.16
b
,
v1
.16
b
/*
ct
is
next
iv
*/
st1
{
v0
.16
b
},
[
x0
],
#
16
...
...
@@ -256,6 +257,7 @@ AES_ENTRY(aes_cbc_decrypt)
bne
.
Lcbcdecloop
.
Lcbcdecout
:
FRAME_POP
st1
{
v7
.16
b
},
[
x5
]
/*
return
iv
*/
ret
AES_ENDPROC
(
aes_cbc_decrypt
)
...
...
@@ -267,24 +269,15 @@ AES_ENDPROC(aes_cbc_decrypt)
AES_ENTRY
(
aes_ctr_encrypt
)
FRAME_PUSH
cbnz
w6
,
.
Lctrfirst
/*
1
st
time
around
?
*/
umov
x5
,
v4
.
d
[
1
]
/*
keep
swabbed
ctr
in
reg
*/
rev
x5
,
x5
#if INTERLEAVE >= 2
cmn
w5
,
w4
/*
32
bit
overflow
?
*/
bcs
.
Lctrinc
add
x5
,
x5
,
#
1
/*
increment
BE
ctr
*/
b
.
LctrincNx
#else
b
.
Lctrinc
#endif
.
Lctrfirst
:
cbz
w6
,
.
Lctrnotfirst
/*
1
st
time
around
?
*/
enc_prepare
w3
,
x2
,
x6
ld1
{
v4
.16
b
},
[
x5
]
umov
x5
,
v4
.
d
[
1
]
/*
keep
swabbed
ctr
in
reg
*/
rev
x5
,
x5
.
Lctrnotfirst
:
umov
x8
,
v4
.
d
[
1
]
/*
keep
swabbed
ctr
in
reg
*/
rev
x8
,
x8
#if INTERLEAVE >= 2
cmn
w
5
,
w4
/*
32
bit
overflow
?
*/
cmn
w
8
,
w4
/*
32
bit
overflow
?
*/
bcs
.
Lctrloop
.
LctrloopNx
:
subs
w4
,
w4
,
#
INTERLEAVE
...
...
@@ -292,11 +285,11 @@ AES_ENTRY(aes_ctr_encrypt)
#if INTERLEAVE == 2
mov
v0
.8
b
,
v4
.8
b
mov
v1
.8
b
,
v4
.8
b
rev
x7
,
x
5
add
x
5
,
x5
,
#
1
rev
x7
,
x
8
add
x
8
,
x8
,
#
1
ins
v0
.
d
[
1
],
x7
rev
x7
,
x
5
add
x
5
,
x5
,
#
1
rev
x7
,
x
8
add
x
8
,
x8
,
#
1
ins
v1
.
d
[
1
],
x7
ld1
{
v2
.16
b
-
v3
.16
b
},
[
x1
],
#
32
/*
get
2
input
blocks
*/
do_encrypt_block2x
...
...
@@ -305,7 +298,7 @@ AES_ENTRY(aes_ctr_encrypt)
st1
{
v0
.16
b
-
v1
.16
b
},
[
x0
],
#
32
#else
ldr
q8
,
=
0x30000000200000001
/*
addends
1
,
2
,
3
[,
0
]
*/
dup
v7
.4
s
,
w
5
dup
v7
.4
s
,
w
8
mov
v0
.16
b
,
v4
.16
b
add
v7
.4
s
,
v7
.4
s
,
v8
.4
s
mov
v1
.16
b
,
v4
.16
b
...
...
@@ -323,18 +316,12 @@ AES_ENTRY(aes_ctr_encrypt)
eor
v2
.16
b
,
v7
.16
b
,
v2
.16
b
eor
v3
.16
b
,
v5
.16
b
,
v3
.16
b
st1
{
v0
.16
b
-
v3
.16
b
},
[
x0
],
#
64
add
x
5
,
x5
,
#
INTERLEAVE
add
x
8
,
x8
,
#
INTERLEAVE
#endif
cbz
w4
,
.
LctroutNx
.
LctrincNx
:
rev
x7
,
x5
rev
x7
,
x8
ins
v4
.
d
[
1
],
x7
cbz
w4
,
.
Lctrout
b
.
LctrloopNx
.
LctroutNx
:
sub
x5
,
x5
,
#
1
rev
x7
,
x5
ins
v4
.
d
[
1
],
x7
b
.
Lctrout
.
Lctr1x
:
adds
w4
,
w4
,
#
INTERLEAVE
beq
.
Lctrout
...
...
@@ -342,30 +329,39 @@ AES_ENTRY(aes_ctr_encrypt)
.
Lctrloop
:
mov
v0
.16
b
,
v4
.16
b
encrypt_block
v0
,
w3
,
x2
,
x6
,
w7
adds
x8
,
x8
,
#
1
/*
increment
BE
ctr
*/
rev
x7
,
x8
ins
v4
.
d
[
1
],
x7
bcs
.
Lctrcarry
/*
overflow
?
*/
.
Lctrcarrydone
:
subs
w4
,
w4
,
#
1
bmi
.
Lctrhalfblock
/*
blocks
<
0
means
1
/
2
block
*/
ld1
{
v3
.16
b
},
[
x1
],
#
16
eor
v3
.16
b
,
v0
.16
b
,
v3
.16
b
st1
{
v3
.16
b
},
[
x0
],
#
16
beq
.
Lctrout
.
Lctrinc
:
adds
x5
,
x5
,
#
1
/*
increment
BE
ctr
*/
rev
x7
,
x5
ins
v4
.
d
[
1
],
x7
bcc
.
Lctrloop
/*
no
overflow
?
*/
umov
x7
,
v4
.
d
[
0
]
/*
load
upper
word
of
ctr
*/
rev
x7
,
x7
/*
...
to
handle
the
carry
*/
add
x7
,
x7
,
#
1
rev
x7
,
x7
ins
v4
.
d
[
0
],
x7
b
.
Lctrloop
bne
.
Lctrloop
.
Lctrout
:
st1
{
v4
.16
b
},
[
x5
]
/*
return
next
CTR
value
*/
FRAME_POP
ret
.
Lctrhalfblock
:
ld1
{
v3
.8
b
},
[
x1
]
eor
v3
.8
b
,
v0
.8
b
,
v3
.8
b
st1
{
v3
.8
b
},
[
x0
]
.
Lctrout
:
FRAME_POP
ret
.
Lctrcarry
:
umov
x7
,
v4
.
d
[
0
]
/*
load
upper
word
of
ctr
*/
rev
x7
,
x7
/*
...
to
handle
the
carry
*/
add
x7
,
x7
,
#
1
rev
x7
,
x7
ins
v4
.
d
[
0
],
x7
b
.
Lctrcarrydone
AES_ENDPROC
(
aes_ctr_encrypt
)
.
ltorg
...
...
arch/x86/crypto/aesni-intel_glue.c
View file @
34cb5821
...
...
@@ -1024,7 +1024,8 @@ struct {
const
char
*
basename
;
struct
simd_skcipher_alg
*
simd
;
}
aesni_simd_skciphers2
[]
=
{
#if IS_ENABLED(CONFIG_CRYPTO_PCBC)
#if (defined(MODULE) && IS_ENABLED(CONFIG_CRYPTO_PCBC)) || \
IS_BUILTIN(CONFIG_CRYPTO_PCBC)
{
.
algname
=
"pcbc(aes)"
,
.
drvname
=
"pcbc-aes-aesni"
,
...
...
@@ -1088,8 +1089,8 @@ static void aesni_free_simds(void)
aesni_simd_skciphers
[
i
];
i
++
)
simd_skcipher_free
(
aesni_simd_skciphers
[
i
]);
for
(
i
=
0
;
i
<
ARRAY_SIZE
(
aesni_simd_skciphers2
)
&&
aesni_simd_skciphers2
[
i
].
simd
;
i
++
)
for
(
i
=
0
;
i
<
ARRAY_SIZE
(
aesni_simd_skciphers2
)
;
i
++
)
if
(
aesni_simd_skciphers2
[
i
].
simd
)
simd_skcipher_free
(
aesni_simd_skciphers2
[
i
].
simd
);
}
...
...
@@ -1171,7 +1172,7 @@ static int __init aesni_init(void)
simd
=
simd_skcipher_create_compat
(
algname
,
drvname
,
basename
);
err
=
PTR_ERR
(
simd
);
if
(
IS_ERR
(
simd
))
goto
unregister_simds
;
continue
;
aesni_simd_skciphers2
[
i
].
simd
=
simd
;
}
...
...
crypto/algapi.c
View file @
34cb5821
...
...
@@ -356,6 +356,7 @@ int crypto_register_alg(struct crypto_alg *alg)
struct
crypto_larval
*
larval
;
int
err
;
alg
->
cra_flags
&=
~
CRYPTO_ALG_DEAD
;
err
=
crypto_check_alg
(
alg
);
if
(
err
)
return
err
;
...
...
crypto/algif_aead.c
View file @
34cb5821
...
...
@@ -661,9 +661,9 @@ static int aead_recvmsg_sync(struct socket *sock, struct msghdr *msg, int flags)
unlock:
list_for_each_entry_safe
(
rsgl
,
tmp
,
&
ctx
->
list
,
list
)
{
af_alg_free_sg
(
&
rsgl
->
sgl
);
list_del
(
&
rsgl
->
list
);
if
(
rsgl
!=
&
ctx
->
first_rsgl
)
sock_kfree_s
(
sk
,
rsgl
,
sizeof
(
*
rsgl
));
list_del
(
&
rsgl
->
list
);
}
INIT_LIST_HEAD
(
&
ctx
->
list
);
aead_wmem_wakeup
(
sk
);
...
...
drivers/crypto/ccp/ccp-dev-v5.c
View file @
34cb5821
...
...
@@ -959,7 +959,7 @@ static irqreturn_t ccp5_irq_handler(int irq, void *data)
static
void
ccp5_config
(
struct
ccp_device
*
ccp
)
{
/* Public side */
iowrite32
(
0x0
0001249
,
ccp
->
io_regs
+
CMD5_REQID_CONFIG_OFFSET
);
iowrite32
(
0x0
,
ccp
->
io_regs
+
CMD5_REQID_CONFIG_OFFSET
);
}
static
void
ccp5other_config
(
struct
ccp_device
*
ccp
)
...
...
drivers/crypto/ccp/ccp-dev.h
View file @
34cb5821
...
...
@@ -238,6 +238,7 @@ struct ccp_dma_chan {
struct
ccp_device
*
ccp
;
spinlock_t
lock
;
struct
list_head
created
;
struct
list_head
pending
;
struct
list_head
active
;
struct
list_head
complete
;
...
...
drivers/crypto/ccp/ccp-dmaengine.c
View file @
34cb5821
...
...
@@ -63,6 +63,7 @@ static void ccp_free_chan_resources(struct dma_chan *dma_chan)
ccp_free_desc_resources
(
chan
->
ccp
,
&
chan
->
complete
);
ccp_free_desc_resources
(
chan
->
ccp
,
&
chan
->
active
);
ccp_free_desc_resources
(
chan
->
ccp
,
&
chan
->
pending
);
ccp_free_desc_resources
(
chan
->
ccp
,
&
chan
->
created
);
spin_unlock_irqrestore
(
&
chan
->
lock
,
flags
);
}
...
...
@@ -273,6 +274,7 @@ static dma_cookie_t ccp_tx_submit(struct dma_async_tx_descriptor *tx_desc)
spin_lock_irqsave
(
&
chan
->
lock
,
flags
);
cookie
=
dma_cookie_assign
(
tx_desc
);
list_del
(
&
desc
->
entry
);
list_add_tail
(
&
desc
->
entry
,
&
chan
->
pending
);
spin_unlock_irqrestore
(
&
chan
->
lock
,
flags
);
...
...
@@ -426,7 +428,7 @@ static struct ccp_dma_desc *ccp_create_desc(struct dma_chan *dma_chan,
spin_lock_irqsave
(
&
chan
->
lock
,
sflags
);
list_add_tail
(
&
desc
->
entry
,
&
chan
->
pending
);
list_add_tail
(
&
desc
->
entry
,
&
chan
->
created
);
spin_unlock_irqrestore
(
&
chan
->
lock
,
sflags
);
...
...
@@ -610,6 +612,7 @@ static int ccp_terminate_all(struct dma_chan *dma_chan)
/*TODO: Purge the complete list? */
ccp_free_desc_resources
(
chan
->
ccp
,
&
chan
->
active
);
ccp_free_desc_resources
(
chan
->
ccp
,
&
chan
->
pending
);
ccp_free_desc_resources
(
chan
->
ccp
,
&
chan
->
created
);
spin_unlock_irqrestore
(
&
chan
->
lock
,
flags
);
...
...
@@ -679,6 +682,7 @@ int ccp_dmaengine_register(struct ccp_device *ccp)
chan
->
ccp
=
ccp
;
spin_lock_init
(
&
chan
->
lock
);
INIT_LIST_HEAD
(
&
chan
->
created
);
INIT_LIST_HEAD
(
&
chan
->
pending
);
INIT_LIST_HEAD
(
&
chan
->
active
);
INIT_LIST_HEAD
(
&
chan
->
complete
);
...
...
drivers/crypto/chelsio/chcr_algo.c
View file @
34cb5821
...
...
@@ -158,7 +158,7 @@ int chcr_handle_resp(struct crypto_async_request *req, unsigned char *input,
case
CRYPTO_ALG_TYPE_AEAD
:
ctx_req
.
req
.
aead_req
=
(
struct
aead_request
*
)
req
;
ctx_req
.
ctx
.
reqctx
=
aead_request_ctx
(
ctx_req
.
req
.
aead_req
);
dma_unmap_sg
(
&
u_ctx
->
lldi
.
pdev
->
dev
,
ctx_req
.
req
.
aead_req
->
dst
,
dma_unmap_sg
(
&
u_ctx
->
lldi
.
pdev
->
dev
,
ctx_req
.
ctx
.
reqctx
->
dst
,
ctx_req
.
ctx
.
reqctx
->
dst_nents
,
DMA_FROM_DEVICE
);
if
(
ctx_req
.
ctx
.
reqctx
->
skb
)
{
kfree_skb
(
ctx_req
.
ctx
.
reqctx
->
skb
);
...
...
@@ -1362,8 +1362,7 @@ static struct sk_buff *create_authenc_wr(struct aead_request *req,
struct
chcr_wr
*
chcr_req
;
struct
cpl_rx_phys_dsgl
*
phys_cpl
;
struct
phys_sge_parm
sg_param
;
struct
scatterlist
*
src
,
*
dst
;
struct
scatterlist
src_sg
[
2
],
dst_sg
[
2
];
struct
scatterlist
*
src
;
unsigned
int
frags
=
0
,
transhdr_len
;
unsigned
int
ivsize
=
crypto_aead_ivsize
(
tfm
),
dst_size
=
0
;
unsigned
int
kctx_len
=
0
;
...
...
@@ -1383,19 +1382,21 @@ static struct sk_buff *create_authenc_wr(struct aead_request *req,
if
(
sg_nents_for_len
(
req
->
src
,
req
->
assoclen
+
req
->
cryptlen
)
<
0
)
goto
err
;
src
=
scatterwalk_ffwd
(
src_sg
,
req
->
src
,
req
->
assoclen
);
dst
=
src
;
src
=
scatterwalk_ffwd
(
reqctx
->
srcffwd
,
req
->
src
,
req
->
assoclen
);
reqctx
->
dst
=
src
;
if
(
req
->
src
!=
req
->
dst
)
{
err
=
chcr_copy_assoc
(
req
,
aeadctx
);
if
(
err
)
return
ERR_PTR
(
err
);
dst
=
scatterwalk_ffwd
(
dst_sg
,
req
->
dst
,
req
->
assoclen
);
reqctx
->
dst
=
scatterwalk_ffwd
(
reqctx
->
dstffwd
,
req
->
dst
,
req
->
assoclen
);
}
if
(
get_aead_subtype
(
tfm
)
==
CRYPTO_ALG_SUB_TYPE_AEAD_NULL
)
{
null
=
1
;
assoclen
=
0
;
}
reqctx
->
dst_nents
=
sg_nents_for_len
(
dst
,
req
->
cryptlen
+
reqctx
->
dst_nents
=
sg_nents_for_len
(
reqctx
->
dst
,
req
->
cryptlen
+
(
op_type
?
-
authsize
:
authsize
));
if
(
reqctx
->
dst_nents
<=
0
)
{
pr_err
(
"AUTHENC:Invalid Destination sg entries
\n
"
);
...
...
@@ -1460,7 +1461,7 @@ static struct sk_buff *create_authenc_wr(struct aead_request *req,
sg_param
.
obsize
=
req
->
cryptlen
+
(
op_type
?
-
authsize
:
authsize
);
sg_param
.
qid
=
qid
;
sg_param
.
align
=
0
;
if
(
map_writesg_phys_cpl
(
&
u_ctx
->
lldi
.
pdev
->
dev
,
phys_cpl
,
dst
,
if
(
map_writesg_phys_cpl
(
&
u_ctx
->
lldi
.
pdev
->
dev
,
phys_cpl
,
reqctx
->
dst
,
&
sg_param
))
goto
dstmap_fail
;
...
...
@@ -1711,8 +1712,7 @@ static struct sk_buff *create_aead_ccm_wr(struct aead_request *req,
struct
chcr_wr
*
chcr_req
;
struct
cpl_rx_phys_dsgl
*
phys_cpl
;
struct
phys_sge_parm
sg_param
;
struct
scatterlist
*
src
,
*
dst
;
struct
scatterlist
src_sg
[
2
],
dst_sg
[
2
];
struct
scatterlist
*
src
;
unsigned
int
frags
=
0
,
transhdr_len
,
ivsize
=
AES_BLOCK_SIZE
;
unsigned
int
dst_size
=
0
,
kctx_len
;
unsigned
int
sub_type
;
...
...
@@ -1728,17 +1728,19 @@ static struct sk_buff *create_aead_ccm_wr(struct aead_request *req,
if
(
sg_nents_for_len
(
req
->
src
,
req
->
assoclen
+
req
->
cryptlen
)
<
0
)
goto
err
;
sub_type
=
get_aead_subtype
(
tfm
);
src
=
scatterwalk_ffwd
(
src_sg
,
req
->
src
,
req
->
assoclen
);
dst
=
src
;
src
=
scatterwalk_ffwd
(
reqctx
->
srcffwd
,
req
->
src
,
req
->
assoclen
);
reqctx
->
dst
=
src
;
if
(
req
->
src
!=
req
->
dst
)
{
err
=
chcr_copy_assoc
(
req
,
aeadctx
);
if
(
err
)
{
pr_err
(
"AAD copy to destination buffer fails
\n
"
);
return
ERR_PTR
(
err
);
}
dst
=
scatterwalk_ffwd
(
dst_sg
,
req
->
dst
,
req
->
assoclen
);
reqctx
->
dst
=
scatterwalk_ffwd
(
reqctx
->
dstffwd
,
req
->
dst
,
req
->
assoclen
);
}
reqctx
->
dst_nents
=
sg_nents_for_len
(
dst
,
req
->
cryptlen
+
reqctx
->
dst_nents
=
sg_nents_for_len
(
reqctx
->
dst
,
req
->
cryptlen
+
(
op_type
?
-
authsize
:
authsize
));
if
(
reqctx
->
dst_nents
<=
0
)
{
pr_err
(
"CCM:Invalid Destination sg entries
\n
"
);
...
...
@@ -1777,7 +1779,7 @@ static struct sk_buff *create_aead_ccm_wr(struct aead_request *req,
sg_param
.
obsize
=
req
->
cryptlen
+
(
op_type
?
-
authsize
:
authsize
);
sg_param
.
qid
=
qid
;
sg_param
.
align
=
0
;
if
(
map_writesg_phys_cpl
(
&
u_ctx
->
lldi
.
pdev
->
dev
,
phys_cpl
,
dst
,
if
(
map_writesg_phys_cpl
(
&
u_ctx
->
lldi
.
pdev
->
dev
,
phys_cpl
,
reqctx
->
dst
,
&
sg_param
))
goto
dstmap_fail
;
...
...
@@ -1809,8 +1811,7 @@ static struct sk_buff *create_gcm_wr(struct aead_request *req,
struct
chcr_wr
*
chcr_req
;
struct
cpl_rx_phys_dsgl
*
phys_cpl
;
struct
phys_sge_parm
sg_param
;
struct
scatterlist
*
src
,
*
dst
;
struct
scatterlist
src_sg
[
2
],
dst_sg
[
2
];
struct
scatterlist
*
src
;
unsigned
int
frags
=
0
,
transhdr_len
;
unsigned
int
ivsize
=
AES_BLOCK_SIZE
;
unsigned
int
dst_size
=
0
,
kctx_len
;
...
...
@@ -1832,13 +1833,14 @@ static struct sk_buff *create_gcm_wr(struct aead_request *req,
if
(
sg_nents_for_len
(
req
->
src
,
req
->
assoclen
+
req
->
cryptlen
)
<
0
)
goto
err
;
src
=
scatterwalk_ffwd
(
src_sg
,
req
->
src
,
req
->
assoclen
);
dst
=
src
;
src
=
scatterwalk_ffwd
(
reqctx
->
srcffwd
,
req
->
src
,
req
->
assoclen
);
reqctx
->
dst
=
src
;
if
(
req
->
src
!=
req
->
dst
)
{
err
=
chcr_copy_assoc
(
req
,
aeadctx
);
if
(
err
)
return
ERR_PTR
(
err
);
dst
=
scatterwalk_ffwd
(
dst_sg
,
req
->
dst
,
req
->
assoclen
);
reqctx
->
dst
=
scatterwalk_ffwd
(
reqctx
->
dstffwd
,
req
->
dst
,
req
->
assoclen
);
}
if
(
!
req
->
cryptlen
)
...
...
@@ -1848,7 +1850,7 @@ static struct sk_buff *create_gcm_wr(struct aead_request *req,
crypt_len
=
AES_BLOCK_SIZE
;
else
crypt_len
=
req
->
cryptlen
;
reqctx
->
dst_nents
=
sg_nents_for_len
(
dst
,
req
->
cryptlen
+
reqctx
->
dst_nents
=
sg_nents_for_len
(
reqctx
->
dst
,
req
->
cryptlen
+
(
op_type
?
-
authsize
:
authsize
));
if
(
reqctx
->
dst_nents
<=
0
)
{
pr_err
(
"GCM:Invalid Destination sg entries
\n
"
);
...
...
@@ -1923,7 +1925,7 @@ static struct sk_buff *create_gcm_wr(struct aead_request *req,
sg_param
.
obsize
=
req
->
cryptlen
+
(
op_type
?
-
authsize
:
authsize
);
sg_param
.
qid
=
qid
;
sg_param
.
align
=
0
;
if
(
map_writesg_phys_cpl
(
&
u_ctx
->
lldi
.
pdev
->
dev
,
phys_cpl
,
dst
,
if
(
map_writesg_phys_cpl
(
&
u_ctx
->
lldi
.
pdev
->
dev
,
phys_cpl
,
reqctx
->
dst
,
&
sg_param
))
goto
dstmap_fail
;
...
...
@@ -1937,7 +1939,8 @@ static struct sk_buff *create_gcm_wr(struct aead_request *req,
write_sg_to_skb
(
skb
,
&
frags
,
src
,
req
->
cryptlen
);
}
else
{
aes_gcm_empty_pld_pad
(
req
->
dst
,
authsize
-
1
);
write_sg_to_skb
(
skb
,
&
frags
,
dst
,
crypt_len
);
write_sg_to_skb
(
skb
,
&
frags
,
reqctx
->
dst
,
crypt_len
);
}
create_wreq
(
ctx
,
chcr_req
,
req
,
skb
,
kctx_len
,
size
,
1
,
...
...
@@ -2189,8 +2192,8 @@ static int chcr_gcm_setkey(struct crypto_aead *aead, const u8 *key,
unsigned
int
ck_size
;
int
ret
=
0
,
key_ctx_size
=
0
;
if
(
get_aead_subtype
(
aead
)
==
CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106
)
{
if
(
get_aead_subtype
(
aead
)
==
CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106
&&
keylen
>
3
)
{
keylen
-=
4
;
/* nonce/salt is present in the last 4 bytes */
memcpy
(
aeadctx
->
salt
,
key
+
keylen
,
4
);
}
...
...
drivers/crypto/chelsio/chcr_core.c
View file @
34cb5821
...
...
@@ -52,6 +52,7 @@ static struct cxgb4_uld_info chcr_uld_info = {
int
assign_chcr_device
(
struct
chcr_dev
**
dev
)
{
struct
uld_ctx
*
u_ctx
;
int
ret
=
-
ENXIO
;
/*
* Which device to use if multiple devices are available TODO
...
...
@@ -59,15 +60,14 @@ int assign_chcr_device(struct chcr_dev **dev)
* must go to the same device to maintain the ordering.
*/
mutex_lock
(
&
dev_mutex
);
/* TODO ? */
u_ctx
=
list_first_entry
(
&
uld_ctx_list
,
struct
uld_ctx
,
entry
);
if
(
!
u_ctx
)
{
mutex_unlock
(
&
dev_mutex
);
return
-
ENXIO
;
}
list_for_each_entry
(
u_ctx
,
&
uld_ctx_list
,
entry
)
if
(
u_ctx
&&
u_ctx
->
dev
)
{
*
dev
=
u_ctx
->
dev
;
ret
=
0
;
break
;
}
mutex_unlock
(
&
dev_mutex
);
return
0
;
return
ret
;
}
static
int
chcr_dev_add
(
struct
uld_ctx
*
u_ctx
)
...
...
@@ -202,10 +202,8 @@ static int chcr_uld_state_change(void *handle, enum cxgb4_state state)
static
int
__init
chcr_crypto_init
(
void
)
{
if
(
cxgb4_register_uld
(
CXGB4_ULD_CRYPTO
,
&
chcr_uld_info
))
{
if
(
cxgb4_register_uld
(
CXGB4_ULD_CRYPTO
,
&
chcr_uld_info
))
pr_err
(
"ULD register fail: No chcr crypto support in cxgb4"
);
return
-
1
;
}
return
0
;
}
...
...
drivers/crypto/chelsio/chcr_crypto.h
View file @
34cb5821
...
...
@@ -158,6 +158,9 @@ struct ablk_ctx {
};
struct
chcr_aead_reqctx
{
struct
sk_buff
*
skb
;
struct
scatterlist
*
dst
;
struct
scatterlist
srcffwd
[
2
];
struct
scatterlist
dstffwd
[
2
];
short
int
dst_nents
;
u16
verify
;
u8
iv
[
CHCR_MAX_CRYPTO_IV_LEN
];
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment