Commit 35344cf3 authored by Taehee Yoo's avatar Taehee Yoo Committed by Herbert Xu

crypto: x86/aria - do not use magic number offsets of aria_ctx

aria-avx assembly code accesses members of aria_ctx with magic number
offset. If the shape of struct aria_ctx is changed carelessly,
aria-avx will not work.
So, we need to ensure accessing members of aria_ctx with correct
offset values, not with magic numbers.

It adds ARIA_CTX_enc_key, ARIA_CTX_dec_key, and ARIA_CTX_rounds in the
asm-offsets.c So, correct offset definitions will be generated.
aria-avx assembly code can access members of aria_ctx safely with
these definitions.
Signed-off-by: default avatarTaehee Yoo <ap420073@gmail.com>
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent 8e7d7ce2
...@@ -8,13 +8,9 @@ ...@@ -8,13 +8,9 @@
#include <linux/linkage.h> #include <linux/linkage.h>
#include <linux/cfi_types.h> #include <linux/cfi_types.h>
#include <asm/asm-offsets.h>
#include <asm/frame.h> #include <asm/frame.h>
/* struct aria_ctx: */
#define enc_key 0
#define dec_key 272
#define rounds 544
/* register macros */ /* register macros */
#define CTX %rdi #define CTX %rdi
...@@ -874,7 +870,7 @@ SYM_FUNC_START_LOCAL(__aria_aesni_avx_crypt_16way) ...@@ -874,7 +870,7 @@ SYM_FUNC_START_LOCAL(__aria_aesni_avx_crypt_16way)
aria_fo(%xmm9, %xmm8, %xmm11, %xmm10, %xmm12, %xmm13, %xmm14, %xmm15, aria_fo(%xmm9, %xmm8, %xmm11, %xmm10, %xmm12, %xmm13, %xmm14, %xmm15,
%xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7, %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
%rax, %r9, 10); %rax, %r9, 10);
cmpl $12, rounds(CTX); cmpl $12, ARIA_CTX_rounds(CTX);
jne .Laria_192; jne .Laria_192;
aria_ff(%xmm1, %xmm0, %xmm3, %xmm2, %xmm4, %xmm5, %xmm6, %xmm7, aria_ff(%xmm1, %xmm0, %xmm3, %xmm2, %xmm4, %xmm5, %xmm6, %xmm7,
%xmm8, %xmm9, %xmm10, %xmm11, %xmm12, %xmm13, %xmm14, %xmm8, %xmm9, %xmm10, %xmm11, %xmm12, %xmm13, %xmm14,
...@@ -887,7 +883,7 @@ SYM_FUNC_START_LOCAL(__aria_aesni_avx_crypt_16way) ...@@ -887,7 +883,7 @@ SYM_FUNC_START_LOCAL(__aria_aesni_avx_crypt_16way)
aria_fo(%xmm9, %xmm8, %xmm11, %xmm10, %xmm12, %xmm13, %xmm14, %xmm15, aria_fo(%xmm9, %xmm8, %xmm11, %xmm10, %xmm12, %xmm13, %xmm14, %xmm15,
%xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7, %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
%rax, %r9, 12); %rax, %r9, 12);
cmpl $14, rounds(CTX); cmpl $14, ARIA_CTX_rounds(CTX);
jne .Laria_256; jne .Laria_256;
aria_ff(%xmm1, %xmm0, %xmm3, %xmm2, %xmm4, %xmm5, %xmm6, %xmm7, aria_ff(%xmm1, %xmm0, %xmm3, %xmm2, %xmm4, %xmm5, %xmm6, %xmm7,
%xmm8, %xmm9, %xmm10, %xmm11, %xmm12, %xmm13, %xmm14, %xmm8, %xmm9, %xmm10, %xmm11, %xmm12, %xmm13, %xmm14,
...@@ -923,7 +919,7 @@ SYM_TYPED_FUNC_START(aria_aesni_avx_encrypt_16way) ...@@ -923,7 +919,7 @@ SYM_TYPED_FUNC_START(aria_aesni_avx_encrypt_16way)
FRAME_BEGIN FRAME_BEGIN
leaq enc_key(CTX), %r9; leaq ARIA_CTX_enc_key(CTX), %r9;
inpack16_pre(%xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7, inpack16_pre(%xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
%xmm8, %xmm9, %xmm10, %xmm11, %xmm12, %xmm13, %xmm14, %xmm8, %xmm9, %xmm10, %xmm11, %xmm12, %xmm13, %xmm14,
...@@ -948,7 +944,7 @@ SYM_TYPED_FUNC_START(aria_aesni_avx_decrypt_16way) ...@@ -948,7 +944,7 @@ SYM_TYPED_FUNC_START(aria_aesni_avx_decrypt_16way)
FRAME_BEGIN FRAME_BEGIN
leaq dec_key(CTX), %r9; leaq ARIA_CTX_dec_key(CTX), %r9;
inpack16_pre(%xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7, inpack16_pre(%xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
%xmm8, %xmm9, %xmm10, %xmm11, %xmm12, %xmm13, %xmm14, %xmm8, %xmm9, %xmm10, %xmm11, %xmm12, %xmm13, %xmm14,
...@@ -1056,7 +1052,7 @@ SYM_TYPED_FUNC_START(aria_aesni_avx_ctr_crypt_16way) ...@@ -1056,7 +1052,7 @@ SYM_TYPED_FUNC_START(aria_aesni_avx_ctr_crypt_16way)
leaq (%rdx), %r11; leaq (%rdx), %r11;
leaq (%rcx), %rsi; leaq (%rcx), %rsi;
leaq (%rcx), %rdx; leaq (%rcx), %rdx;
leaq enc_key(CTX), %r9; leaq ARIA_CTX_enc_key(CTX), %r9;
call __aria_aesni_avx_crypt_16way; call __aria_aesni_avx_crypt_16way;
...@@ -1157,7 +1153,7 @@ SYM_FUNC_START_LOCAL(__aria_aesni_avx_gfni_crypt_16way) ...@@ -1157,7 +1153,7 @@ SYM_FUNC_START_LOCAL(__aria_aesni_avx_gfni_crypt_16way)
%xmm0, %xmm1, %xmm2, %xmm3, %xmm0, %xmm1, %xmm2, %xmm3,
%xmm4, %xmm5, %xmm6, %xmm7, %xmm4, %xmm5, %xmm6, %xmm7,
%rax, %r9, 10); %rax, %r9, 10);
cmpl $12, rounds(CTX); cmpl $12, ARIA_CTX_rounds(CTX);
jne .Laria_gfni_192; jne .Laria_gfni_192;
aria_ff_gfni(%xmm1, %xmm0, %xmm3, %xmm2, %xmm4, %xmm5, %xmm6, %xmm7, aria_ff_gfni(%xmm1, %xmm0, %xmm3, %xmm2, %xmm4, %xmm5, %xmm6, %xmm7,
%xmm8, %xmm9, %xmm10, %xmm11, %xmm12, %xmm13, %xmm14, %xmm8, %xmm9, %xmm10, %xmm11, %xmm12, %xmm13, %xmm14,
...@@ -1174,7 +1170,7 @@ SYM_FUNC_START_LOCAL(__aria_aesni_avx_gfni_crypt_16way) ...@@ -1174,7 +1170,7 @@ SYM_FUNC_START_LOCAL(__aria_aesni_avx_gfni_crypt_16way)
%xmm0, %xmm1, %xmm2, %xmm3, %xmm0, %xmm1, %xmm2, %xmm3,
%xmm4, %xmm5, %xmm6, %xmm7, %xmm4, %xmm5, %xmm6, %xmm7,
%rax, %r9, 12); %rax, %r9, 12);
cmpl $14, rounds(CTX); cmpl $14, ARIA_CTX_rounds(CTX);
jne .Laria_gfni_256; jne .Laria_gfni_256;
aria_ff_gfni(%xmm1, %xmm0, %xmm3, %xmm2, aria_ff_gfni(%xmm1, %xmm0, %xmm3, %xmm2,
%xmm4, %xmm5, %xmm6, %xmm7, %xmm4, %xmm5, %xmm6, %xmm7,
...@@ -1218,7 +1214,7 @@ SYM_TYPED_FUNC_START(aria_aesni_avx_gfni_encrypt_16way) ...@@ -1218,7 +1214,7 @@ SYM_TYPED_FUNC_START(aria_aesni_avx_gfni_encrypt_16way)
FRAME_BEGIN FRAME_BEGIN
leaq enc_key(CTX), %r9; leaq ARIA_CTX_enc_key(CTX), %r9;
inpack16_pre(%xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7, inpack16_pre(%xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
%xmm8, %xmm9, %xmm10, %xmm11, %xmm12, %xmm13, %xmm14, %xmm8, %xmm9, %xmm10, %xmm11, %xmm12, %xmm13, %xmm14,
...@@ -1243,7 +1239,7 @@ SYM_TYPED_FUNC_START(aria_aesni_avx_gfni_decrypt_16way) ...@@ -1243,7 +1239,7 @@ SYM_TYPED_FUNC_START(aria_aesni_avx_gfni_decrypt_16way)
FRAME_BEGIN FRAME_BEGIN
leaq dec_key(CTX), %r9; leaq ARIA_CTX_dec_key(CTX), %r9;
inpack16_pre(%xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7, inpack16_pre(%xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
%xmm8, %xmm9, %xmm10, %xmm11, %xmm12, %xmm13, %xmm14, %xmm8, %xmm9, %xmm10, %xmm11, %xmm12, %xmm13, %xmm14,
...@@ -1275,7 +1271,7 @@ SYM_TYPED_FUNC_START(aria_aesni_avx_gfni_ctr_crypt_16way) ...@@ -1275,7 +1271,7 @@ SYM_TYPED_FUNC_START(aria_aesni_avx_gfni_ctr_crypt_16way)
leaq (%rdx), %r11; leaq (%rdx), %r11;
leaq (%rcx), %rsi; leaq (%rcx), %rsi;
leaq (%rcx), %rdx; leaq (%rcx), %rdx;
leaq enc_key(CTX), %r9; leaq ARIA_CTX_enc_key(CTX), %r9;
call __aria_aesni_avx_gfni_crypt_16way; call __aria_aesni_avx_gfni_crypt_16way;
......
...@@ -7,6 +7,7 @@ ...@@ -7,6 +7,7 @@
#define COMPILE_OFFSETS #define COMPILE_OFFSETS
#include <linux/crypto.h> #include <linux/crypto.h>
#include <crypto/aria.h>
#include <linux/sched.h> #include <linux/sched.h>
#include <linux/stddef.h> #include <linux/stddef.h>
#include <linux/hardirq.h> #include <linux/hardirq.h>
...@@ -111,5 +112,12 @@ static void __used common(void) ...@@ -111,5 +112,12 @@ static void __used common(void)
#ifdef CONFIG_CALL_DEPTH_TRACKING #ifdef CONFIG_CALL_DEPTH_TRACKING
OFFSET(X86_call_depth, pcpu_hot, call_depth); OFFSET(X86_call_depth, pcpu_hot, call_depth);
#endif #endif
#if IS_ENABLED(CONFIG_CRYPTO_ARIA_AESNI_AVX_X86_64)
/* Offset for fields in aria_ctx */
BLANK();
OFFSET(ARIA_CTX_enc_key, aria_ctx, enc_key);
OFFSET(ARIA_CTX_dec_key, aria_ctx, dec_key);
OFFSET(ARIA_CTX_rounds, aria_ctx, rounds);
#endif
} }
...@@ -178,6 +178,10 @@ int aria_set_key(struct crypto_tfm *tfm, const u8 *in_key, unsigned int key_len) ...@@ -178,6 +178,10 @@ int aria_set_key(struct crypto_tfm *tfm, const u8 *in_key, unsigned int key_len)
if (key_len != 16 && key_len != 24 && key_len != 32) if (key_len != 16 && key_len != 24 && key_len != 32)
return -EINVAL; return -EINVAL;
BUILD_BUG_ON(sizeof(ctx->enc_key) != 272);
BUILD_BUG_ON(sizeof(ctx->dec_key) != 272);
BUILD_BUG_ON(sizeof(int) != sizeof(ctx->rounds));
ctx->key_length = key_len; ctx->key_length = key_len;
ctx->rounds = (key_len + 32) / 4; ctx->rounds = (key_len + 32) / 4;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment