Commit 698592e2 authored by Gilad Ben-Yossef's avatar Gilad Ben-Yossef Committed by Greg Kroah-Hartman

staging: ccree: CamelCase to snake_case in aead struct

Rename aead_req_struct fields from CamelCase to snake_case.
Signed-off-by: default avatarGilad Ben-Yossef <gilad@benyossef.com>
Signed-off-by: default avatarGreg Kroah-Hartman <gregkh@linuxfoundation.org>
parent 42886fab
...@@ -252,8 +252,8 @@ static void ssi_aead_complete(struct device *dev, void *ssi_req, void __iomem *c ...@@ -252,8 +252,8 @@ static void ssi_aead_complete(struct device *dev, void *ssi_req, void __iomem *c
} else { /*ENCRYPT*/ } else { /*ENCRYPT*/
if (unlikely(areq_ctx->is_icv_fragmented)) if (unlikely(areq_ctx->is_icv_fragmented))
ssi_buffer_mgr_copy_scatterlist_portion( ssi_buffer_mgr_copy_scatterlist_portion(
areq_ctx->mac_buf, areq_ctx->dstSgl, areq->cryptlen + areq_ctx->dstOffset, areq_ctx->mac_buf, areq_ctx->dst_sgl, areq->cryptlen + areq_ctx->dst_offset,
areq->cryptlen + areq_ctx->dstOffset + ctx->authsize, SSI_SG_FROM_BUF); areq->cryptlen + areq_ctx->dst_offset + ctx->authsize, SSI_SG_FROM_BUF);
/* If an IV was generated, copy it back to the user provided buffer. */ /* If an IV was generated, copy it back to the user provided buffer. */
if (areq_ctx->backup_giv) { if (areq_ctx->backup_giv) {
...@@ -777,11 +777,11 @@ ssi_aead_process_authenc_data_desc( ...@@ -777,11 +777,11 @@ ssi_aead_process_authenc_data_desc(
{ {
struct scatterlist *cipher = struct scatterlist *cipher =
(direct == DRV_CRYPTO_DIRECTION_ENCRYPT) ? (direct == DRV_CRYPTO_DIRECTION_ENCRYPT) ?
areq_ctx->dstSgl : areq_ctx->srcSgl; areq_ctx->dst_sgl : areq_ctx->src_sgl;
unsigned int offset = unsigned int offset =
(direct == DRV_CRYPTO_DIRECTION_ENCRYPT) ? (direct == DRV_CRYPTO_DIRECTION_ENCRYPT) ?
areq_ctx->dstOffset : areq_ctx->srcOffset; areq_ctx->dst_offset : areq_ctx->src_offset;
SSI_LOG_DEBUG("AUTHENC: SRC/DST buffer type DLLI\n"); SSI_LOG_DEBUG("AUTHENC: SRC/DST buffer type DLLI\n");
hw_desc_init(&desc[idx]); hw_desc_init(&desc[idx]);
set_din_type(&desc[idx], DMA_DLLI, set_din_type(&desc[idx], DMA_DLLI,
...@@ -843,11 +843,11 @@ ssi_aead_process_cipher_data_desc( ...@@ -843,11 +843,11 @@ ssi_aead_process_cipher_data_desc(
SSI_LOG_DEBUG("CIPHER: SRC/DST buffer type DLLI\n"); SSI_LOG_DEBUG("CIPHER: SRC/DST buffer type DLLI\n");
hw_desc_init(&desc[idx]); hw_desc_init(&desc[idx]);
set_din_type(&desc[idx], DMA_DLLI, set_din_type(&desc[idx], DMA_DLLI,
(sg_dma_address(areq_ctx->srcSgl) + (sg_dma_address(areq_ctx->src_sgl) +
areq_ctx->srcOffset), areq_ctx->cryptlen, NS_BIT); areq_ctx->src_offset), areq_ctx->cryptlen, NS_BIT);
set_dout_dlli(&desc[idx], set_dout_dlli(&desc[idx],
(sg_dma_address(areq_ctx->dstSgl) + (sg_dma_address(areq_ctx->dst_sgl) +
areq_ctx->dstOffset), areq_ctx->dst_offset),
areq_ctx->cryptlen, NS_BIT, 0); areq_ctx->cryptlen, NS_BIT, 0);
set_flow_mode(&desc[idx], flow_mode); set_flow_mode(&desc[idx], flow_mode);
break; break;
...@@ -1879,7 +1879,7 @@ static inline void ssi_aead_dump_gcm( ...@@ -1879,7 +1879,7 @@ static inline void ssi_aead_dump_gcm(
dump_byte_array("mac_buf", req_ctx->mac_buf, AES_BLOCK_SIZE); dump_byte_array("mac_buf", req_ctx->mac_buf, AES_BLOCK_SIZE);
dump_byte_array("gcm_len_block", req_ctx->gcm_len_block.lenA, AES_BLOCK_SIZE); dump_byte_array("gcm_len_block", req_ctx->gcm_len_block.len_a, AES_BLOCK_SIZE);
if (req->src && req->cryptlen) if (req->src && req->cryptlen)
dump_byte_array("req->src", sg_virt(req->src), req->cryptlen + req->assoclen); dump_byte_array("req->src", sg_virt(req->src), req->cryptlen + req->assoclen);
...@@ -1918,16 +1918,16 @@ static int config_gcm_context(struct aead_request *req) ...@@ -1918,16 +1918,16 @@ static int config_gcm_context(struct aead_request *req)
__be64 temp64; __be64 temp64;
temp64 = cpu_to_be64(req->assoclen * 8); temp64 = cpu_to_be64(req->assoclen * 8);
memcpy(&req_ctx->gcm_len_block.lenA, &temp64, sizeof(temp64)); memcpy(&req_ctx->gcm_len_block.len_a, &temp64, sizeof(temp64));
temp64 = cpu_to_be64(cryptlen * 8); temp64 = cpu_to_be64(cryptlen * 8);
memcpy(&req_ctx->gcm_len_block.lenC, &temp64, 8); memcpy(&req_ctx->gcm_len_block.len_c, &temp64, 8);
} else { //rfc4543=> all data(AAD,IV,Plain) are considered additional data that is nothing is encrypted. } else { //rfc4543=> all data(AAD,IV,Plain) are considered additional data that is nothing is encrypted.
__be64 temp64; __be64 temp64;
temp64 = cpu_to_be64((req->assoclen + GCM_BLOCK_RFC4_IV_SIZE + cryptlen) * 8); temp64 = cpu_to_be64((req->assoclen + GCM_BLOCK_RFC4_IV_SIZE + cryptlen) * 8);
memcpy(&req_ctx->gcm_len_block.lenA, &temp64, sizeof(temp64)); memcpy(&req_ctx->gcm_len_block.len_a, &temp64, sizeof(temp64));
temp64 = 0; temp64 = 0;
memcpy(&req_ctx->gcm_len_block.lenC, &temp64, 8); memcpy(&req_ctx->gcm_len_block.len_c, &temp64, 8);
} }
return 0; return 0;
......
...@@ -69,8 +69,8 @@ struct aead_req_ctx { ...@@ -69,8 +69,8 @@ struct aead_req_ctx {
u8 gcm_iv_inc2[AES_BLOCK_SIZE] ____cacheline_aligned; u8 gcm_iv_inc2[AES_BLOCK_SIZE] ____cacheline_aligned;
u8 hkey[AES_BLOCK_SIZE] ____cacheline_aligned; u8 hkey[AES_BLOCK_SIZE] ____cacheline_aligned;
struct { struct {
u8 lenA[GCM_BLOCK_LEN_SIZE] ____cacheline_aligned; u8 len_a[GCM_BLOCK_LEN_SIZE] ____cacheline_aligned;
u8 lenC[GCM_BLOCK_LEN_SIZE]; u8 len_c[GCM_BLOCK_LEN_SIZE];
} gcm_len_block; } gcm_len_block;
u8 ccm_config[CCM_CONFIG_BUF_SIZE] ____cacheline_aligned; u8 ccm_config[CCM_CONFIG_BUF_SIZE] ____cacheline_aligned;
...@@ -94,10 +94,10 @@ struct aead_req_ctx { ...@@ -94,10 +94,10 @@ struct aead_req_ctx {
struct ssi_mlli assoc; struct ssi_mlli assoc;
struct ssi_mlli src; struct ssi_mlli src;
struct ssi_mlli dst; struct ssi_mlli dst;
struct scatterlist *srcSgl; struct scatterlist *src_sgl;
struct scatterlist *dstSgl; struct scatterlist *dst_sgl;
unsigned int srcOffset; unsigned int src_offset;
unsigned int dstOffset; unsigned int dst_offset;
enum ssi_req_dma_buf_type assoc_buff_type; enum ssi_req_dma_buf_type assoc_buff_type;
enum ssi_req_dma_buf_type data_buff_type; enum ssi_req_dma_buf_type data_buff_type;
struct mlli_params mlli_params; struct mlli_params mlli_params;
......
...@@ -915,26 +915,26 @@ static inline void ssi_buffer_mgr_prepare_aead_data_dlli( ...@@ -915,26 +915,26 @@ static inline void ssi_buffer_mgr_prepare_aead_data_dlli(
if (likely(req->src == req->dst)) { if (likely(req->src == req->dst)) {
/*INPLACE*/ /*INPLACE*/
areq_ctx->icv_dma_addr = sg_dma_address( areq_ctx->icv_dma_addr = sg_dma_address(
areq_ctx->srcSgl) + areq_ctx->src_sgl) +
(*src_last_bytes - authsize); (*src_last_bytes - authsize);
areq_ctx->icv_virt_addr = sg_virt( areq_ctx->icv_virt_addr = sg_virt(
areq_ctx->srcSgl) + areq_ctx->src_sgl) +
(*src_last_bytes - authsize); (*src_last_bytes - authsize);
} else if (direct == DRV_CRYPTO_DIRECTION_DECRYPT) { } else if (direct == DRV_CRYPTO_DIRECTION_DECRYPT) {
/*NON-INPLACE and DECRYPT*/ /*NON-INPLACE and DECRYPT*/
areq_ctx->icv_dma_addr = sg_dma_address( areq_ctx->icv_dma_addr = sg_dma_address(
areq_ctx->srcSgl) + areq_ctx->src_sgl) +
(*src_last_bytes - authsize); (*src_last_bytes - authsize);
areq_ctx->icv_virt_addr = sg_virt( areq_ctx->icv_virt_addr = sg_virt(
areq_ctx->srcSgl) + areq_ctx->src_sgl) +
(*src_last_bytes - authsize); (*src_last_bytes - authsize);
} else { } else {
/*NON-INPLACE and ENCRYPT*/ /*NON-INPLACE and ENCRYPT*/
areq_ctx->icv_dma_addr = sg_dma_address( areq_ctx->icv_dma_addr = sg_dma_address(
areq_ctx->dstSgl) + areq_ctx->dst_sgl) +
(*dst_last_bytes - authsize); (*dst_last_bytes - authsize);
areq_ctx->icv_virt_addr = sg_virt( areq_ctx->icv_virt_addr = sg_virt(
areq_ctx->dstSgl) + areq_ctx->dst_sgl) +
(*dst_last_bytes - authsize); (*dst_last_bytes - authsize);
} }
} }
...@@ -956,13 +956,13 @@ static inline int ssi_buffer_mgr_prepare_aead_data_mlli( ...@@ -956,13 +956,13 @@ static inline int ssi_buffer_mgr_prepare_aead_data_mlli(
/*INPLACE*/ /*INPLACE*/
ssi_buffer_mgr_add_scatterlist_entry(sg_data, ssi_buffer_mgr_add_scatterlist_entry(sg_data,
areq_ctx->src.nents, areq_ctx->src.nents,
areq_ctx->srcSgl, areq_ctx->src_sgl,
areq_ctx->cryptlen, areq_ctx->cryptlen,
areq_ctx->srcOffset, areq_ctx->src_offset,
is_last_table, is_last_table,
&areq_ctx->src.mlli_nents); &areq_ctx->src.mlli_nents);
icv_nents = ssi_buffer_mgr_get_aead_icv_nents(areq_ctx->srcSgl, icv_nents = ssi_buffer_mgr_get_aead_icv_nents(areq_ctx->src_sgl,
areq_ctx->src.nents, areq_ctx->src.nents,
authsize, authsize,
*src_last_bytes, *src_last_bytes,
...@@ -1004,10 +1004,10 @@ static inline int ssi_buffer_mgr_prepare_aead_data_mlli( ...@@ -1004,10 +1004,10 @@ static inline int ssi_buffer_mgr_prepare_aead_data_mlli(
} else { /* Contig. ICV */ } else { /* Contig. ICV */
/*Should hanlde if the sg is not contig.*/ /*Should hanlde if the sg is not contig.*/
areq_ctx->icv_dma_addr = sg_dma_address( areq_ctx->icv_dma_addr = sg_dma_address(
&areq_ctx->srcSgl[areq_ctx->src.nents - 1]) + &areq_ctx->src_sgl[areq_ctx->src.nents - 1]) +
(*src_last_bytes - authsize); (*src_last_bytes - authsize);
areq_ctx->icv_virt_addr = sg_virt( areq_ctx->icv_virt_addr = sg_virt(
&areq_ctx->srcSgl[areq_ctx->src.nents - 1]) + &areq_ctx->src_sgl[areq_ctx->src.nents - 1]) +
(*src_last_bytes - authsize); (*src_last_bytes - authsize);
} }
...@@ -1015,20 +1015,20 @@ static inline int ssi_buffer_mgr_prepare_aead_data_mlli( ...@@ -1015,20 +1015,20 @@ static inline int ssi_buffer_mgr_prepare_aead_data_mlli(
/*NON-INPLACE and DECRYPT*/ /*NON-INPLACE and DECRYPT*/
ssi_buffer_mgr_add_scatterlist_entry(sg_data, ssi_buffer_mgr_add_scatterlist_entry(sg_data,
areq_ctx->src.nents, areq_ctx->src.nents,
areq_ctx->srcSgl, areq_ctx->src_sgl,
areq_ctx->cryptlen, areq_ctx->cryptlen,
areq_ctx->srcOffset, areq_ctx->src_offset,
is_last_table, is_last_table,
&areq_ctx->src.mlli_nents); &areq_ctx->src.mlli_nents);
ssi_buffer_mgr_add_scatterlist_entry(sg_data, ssi_buffer_mgr_add_scatterlist_entry(sg_data,
areq_ctx->dst.nents, areq_ctx->dst.nents,
areq_ctx->dstSgl, areq_ctx->dst_sgl,
areq_ctx->cryptlen, areq_ctx->cryptlen,
areq_ctx->dstOffset, areq_ctx->dst_offset,
is_last_table, is_last_table,
&areq_ctx->dst.mlli_nents); &areq_ctx->dst.mlli_nents);
icv_nents = ssi_buffer_mgr_get_aead_icv_nents(areq_ctx->srcSgl, icv_nents = ssi_buffer_mgr_get_aead_icv_nents(areq_ctx->src_sgl,
areq_ctx->src.nents, areq_ctx->src.nents,
authsize, authsize,
*src_last_bytes, *src_last_bytes,
...@@ -1056,10 +1056,10 @@ static inline int ssi_buffer_mgr_prepare_aead_data_mlli( ...@@ -1056,10 +1056,10 @@ static inline int ssi_buffer_mgr_prepare_aead_data_mlli(
} else { /* Contig. ICV */ } else { /* Contig. ICV */
/*Should hanlde if the sg is not contig.*/ /*Should hanlde if the sg is not contig.*/
areq_ctx->icv_dma_addr = sg_dma_address( areq_ctx->icv_dma_addr = sg_dma_address(
&areq_ctx->srcSgl[areq_ctx->src.nents - 1]) + &areq_ctx->src_sgl[areq_ctx->src.nents - 1]) +
(*src_last_bytes - authsize); (*src_last_bytes - authsize);
areq_ctx->icv_virt_addr = sg_virt( areq_ctx->icv_virt_addr = sg_virt(
&areq_ctx->srcSgl[areq_ctx->src.nents - 1]) + &areq_ctx->src_sgl[areq_ctx->src.nents - 1]) +
(*src_last_bytes - authsize); (*src_last_bytes - authsize);
} }
...@@ -1067,20 +1067,20 @@ static inline int ssi_buffer_mgr_prepare_aead_data_mlli( ...@@ -1067,20 +1067,20 @@ static inline int ssi_buffer_mgr_prepare_aead_data_mlli(
/*NON-INPLACE and ENCRYPT*/ /*NON-INPLACE and ENCRYPT*/
ssi_buffer_mgr_add_scatterlist_entry(sg_data, ssi_buffer_mgr_add_scatterlist_entry(sg_data,
areq_ctx->dst.nents, areq_ctx->dst.nents,
areq_ctx->dstSgl, areq_ctx->dst_sgl,
areq_ctx->cryptlen, areq_ctx->cryptlen,
areq_ctx->dstOffset, areq_ctx->dst_offset,
is_last_table, is_last_table,
&areq_ctx->dst.mlli_nents); &areq_ctx->dst.mlli_nents);
ssi_buffer_mgr_add_scatterlist_entry(sg_data, ssi_buffer_mgr_add_scatterlist_entry(sg_data,
areq_ctx->src.nents, areq_ctx->src.nents,
areq_ctx->srcSgl, areq_ctx->src_sgl,
areq_ctx->cryptlen, areq_ctx->cryptlen,
areq_ctx->srcOffset, areq_ctx->src_offset,
is_last_table, is_last_table,
&areq_ctx->src.mlli_nents); &areq_ctx->src.mlli_nents);
icv_nents = ssi_buffer_mgr_get_aead_icv_nents(areq_ctx->dstSgl, icv_nents = ssi_buffer_mgr_get_aead_icv_nents(areq_ctx->dst_sgl,
areq_ctx->dst.nents, areq_ctx->dst.nents,
authsize, authsize,
*dst_last_bytes, *dst_last_bytes,
...@@ -1093,10 +1093,10 @@ static inline int ssi_buffer_mgr_prepare_aead_data_mlli( ...@@ -1093,10 +1093,10 @@ static inline int ssi_buffer_mgr_prepare_aead_data_mlli(
if (likely(!areq_ctx->is_icv_fragmented)) { if (likely(!areq_ctx->is_icv_fragmented)) {
/* Contig. ICV */ /* Contig. ICV */
areq_ctx->icv_dma_addr = sg_dma_address( areq_ctx->icv_dma_addr = sg_dma_address(
&areq_ctx->dstSgl[areq_ctx->dst.nents - 1]) + &areq_ctx->dst_sgl[areq_ctx->dst.nents - 1]) +
(*dst_last_bytes - authsize); (*dst_last_bytes - authsize);
areq_ctx->icv_virt_addr = sg_virt( areq_ctx->icv_virt_addr = sg_virt(
&areq_ctx->dstSgl[areq_ctx->dst.nents - 1]) + &areq_ctx->dst_sgl[areq_ctx->dst.nents - 1]) +
(*dst_last_bytes - authsize); (*dst_last_bytes - authsize);
} else { } else {
areq_ctx->icv_dma_addr = areq_ctx->mac_buf_dma_addr; areq_ctx->icv_dma_addr = areq_ctx->mac_buf_dma_addr;
...@@ -1138,25 +1138,25 @@ static inline int ssi_buffer_mgr_aead_chain_data( ...@@ -1138,25 +1138,25 @@ static inline int ssi_buffer_mgr_aead_chain_data(
rc = -EINVAL; rc = -EINVAL;
goto chain_data_exit; goto chain_data_exit;
} }
areq_ctx->srcSgl = req->src; areq_ctx->src_sgl = req->src;
areq_ctx->dstSgl = req->dst; areq_ctx->dst_sgl = req->dst;
if (is_gcm4543) if (is_gcm4543)
size_for_map += crypto_aead_ivsize(tfm); size_for_map += crypto_aead_ivsize(tfm);
size_for_map += (direct == DRV_CRYPTO_DIRECTION_ENCRYPT) ? authsize : 0; size_for_map += (direct == DRV_CRYPTO_DIRECTION_ENCRYPT) ? authsize : 0;
src_mapped_nents = ssi_buffer_mgr_get_sgl_nents(req->src, size_for_map, &src_last_bytes, &chained); src_mapped_nents = ssi_buffer_mgr_get_sgl_nents(req->src, size_for_map, &src_last_bytes, &chained);
sg_index = areq_ctx->srcSgl->length; sg_index = areq_ctx->src_sgl->length;
//check where the data starts //check where the data starts
while (sg_index <= size_to_skip) { while (sg_index <= size_to_skip) {
offset -= areq_ctx->srcSgl->length; offset -= areq_ctx->src_sgl->length;
areq_ctx->srcSgl = sg_next(areq_ctx->srcSgl); areq_ctx->src_sgl = sg_next(areq_ctx->src_sgl);
//if have reached the end of the sgl, then this is unexpected //if have reached the end of the sgl, then this is unexpected
if (!areq_ctx->srcSgl) { if (!areq_ctx->src_sgl) {
SSI_LOG_ERR("reached end of sg list. unexpected\n"); SSI_LOG_ERR("reached end of sg list. unexpected\n");
BUG(); BUG();
} }
sg_index += areq_ctx->srcSgl->length; sg_index += areq_ctx->src_sgl->length;
src_mapped_nents--; src_mapped_nents--;
} }
if (unlikely(src_mapped_nents > LLI_MAX_NUM_OF_DATA_ENTRIES)) { if (unlikely(src_mapped_nents > LLI_MAX_NUM_OF_DATA_ENTRIES)) {
...@@ -1167,7 +1167,7 @@ static inline int ssi_buffer_mgr_aead_chain_data( ...@@ -1167,7 +1167,7 @@ static inline int ssi_buffer_mgr_aead_chain_data(
areq_ctx->src.nents = src_mapped_nents; areq_ctx->src.nents = src_mapped_nents;
areq_ctx->srcOffset = offset; areq_ctx->src_offset = offset;
if (req->src != req->dst) { if (req->src != req->dst) {
size_for_map = req->assoclen + req->cryptlen; size_for_map = req->assoclen + req->cryptlen;
...@@ -1188,19 +1188,19 @@ static inline int ssi_buffer_mgr_aead_chain_data( ...@@ -1188,19 +1188,19 @@ static inline int ssi_buffer_mgr_aead_chain_data(
} }
dst_mapped_nents = ssi_buffer_mgr_get_sgl_nents(req->dst, size_for_map, &dst_last_bytes, &chained); dst_mapped_nents = ssi_buffer_mgr_get_sgl_nents(req->dst, size_for_map, &dst_last_bytes, &chained);
sg_index = areq_ctx->dstSgl->length; sg_index = areq_ctx->dst_sgl->length;
offset = size_to_skip; offset = size_to_skip;
//check where the data starts //check where the data starts
while (sg_index <= size_to_skip) { while (sg_index <= size_to_skip) {
offset -= areq_ctx->dstSgl->length; offset -= areq_ctx->dst_sgl->length;
areq_ctx->dstSgl = sg_next(areq_ctx->dstSgl); areq_ctx->dst_sgl = sg_next(areq_ctx->dst_sgl);
//if have reached the end of the sgl, then this is unexpected //if have reached the end of the sgl, then this is unexpected
if (!areq_ctx->dstSgl) { if (!areq_ctx->dst_sgl) {
SSI_LOG_ERR("reached end of sg list. unexpected\n"); SSI_LOG_ERR("reached end of sg list. unexpected\n");
BUG(); BUG();
} }
sg_index += areq_ctx->dstSgl->length; sg_index += areq_ctx->dst_sgl->length;
dst_mapped_nents--; dst_mapped_nents--;
} }
if (unlikely(dst_mapped_nents > LLI_MAX_NUM_OF_DATA_ENTRIES)) { if (unlikely(dst_mapped_nents > LLI_MAX_NUM_OF_DATA_ENTRIES)) {
...@@ -1209,7 +1209,7 @@ static inline int ssi_buffer_mgr_aead_chain_data( ...@@ -1209,7 +1209,7 @@ static inline int ssi_buffer_mgr_aead_chain_data(
return -ENOMEM; return -ENOMEM;
} }
areq_ctx->dst.nents = dst_mapped_nents; areq_ctx->dst.nents = dst_mapped_nents;
areq_ctx->dstOffset = offset; areq_ctx->dst_offset = offset;
if ((src_mapped_nents > 1) || if ((src_mapped_nents > 1) ||
(dst_mapped_nents > 1) || (dst_mapped_nents > 1) ||
do_chain) { do_chain) {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment