1 /* SPDX-License-Identifier: GPL-2.0-or-later */
3 * Glue Code for the AVX/AES-NI/GFNI assembler implementation of the ARIA Cipher
5 * Copyright (c) 2022 Taehee Yoo <ap420073@gmail.com>
8 #include <crypto/algapi.h>
9 #include <crypto/internal/simd.h>
10 #include <crypto/aria.h>
11 #include <linux/crypto.h>
12 #include <linux/err.h>
13 #include <linux/module.h>
14 #include <linux/types.h>
16 #include "ecb_cbc_helpers.h"
19 asmlinkage
void aria_aesni_avx_encrypt_16way(const void *ctx
, u8
*dst
,
21 EXPORT_SYMBOL_GPL(aria_aesni_avx_encrypt_16way
);
22 asmlinkage
void aria_aesni_avx_decrypt_16way(const void *ctx
, u8
*dst
,
24 EXPORT_SYMBOL_GPL(aria_aesni_avx_decrypt_16way
);
25 asmlinkage
void aria_aesni_avx_ctr_crypt_16way(const void *ctx
, u8
*dst
,
27 u8
*keystream
, u8
*iv
);
28 EXPORT_SYMBOL_GPL(aria_aesni_avx_ctr_crypt_16way
);
30 asmlinkage
void aria_aesni_avx_gfni_encrypt_16way(const void *ctx
, u8
*dst
,
32 EXPORT_SYMBOL_GPL(aria_aesni_avx_gfni_encrypt_16way
);
33 asmlinkage
void aria_aesni_avx_gfni_decrypt_16way(const void *ctx
, u8
*dst
,
35 EXPORT_SYMBOL_GPL(aria_aesni_avx_gfni_decrypt_16way
);
36 asmlinkage
void aria_aesni_avx_gfni_ctr_crypt_16way(const void *ctx
, u8
*dst
,
38 u8
*keystream
, u8
*iv
);
39 EXPORT_SYMBOL_GPL(aria_aesni_avx_gfni_ctr_crypt_16way
);
40 #endif /* CONFIG_AS_GFNI */
42 static struct aria_avx_ops aria_ops
;
44 struct aria_avx_request_ctx
{
45 u8 keystream
[ARIA_AESNI_PARALLEL_BLOCK_SIZE
];
48 static int ecb_do_encrypt(struct skcipher_request
*req
, const u32
*rkey
)
50 ECB_WALK_START(req
, ARIA_BLOCK_SIZE
, ARIA_AESNI_PARALLEL_BLOCKS
);
51 ECB_BLOCK(ARIA_AESNI_PARALLEL_BLOCKS
, aria_ops
.aria_encrypt_16way
);
52 ECB_BLOCK(1, aria_encrypt
);
56 static int ecb_do_decrypt(struct skcipher_request
*req
, const u32
*rkey
)
58 ECB_WALK_START(req
, ARIA_BLOCK_SIZE
, ARIA_AESNI_PARALLEL_BLOCKS
);
59 ECB_BLOCK(ARIA_AESNI_PARALLEL_BLOCKS
, aria_ops
.aria_decrypt_16way
);
60 ECB_BLOCK(1, aria_decrypt
);
64 static int aria_avx_ecb_encrypt(struct skcipher_request
*req
)
66 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
67 struct aria_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
69 return ecb_do_encrypt(req
, ctx
->enc_key
[0]);
72 static int aria_avx_ecb_decrypt(struct skcipher_request
*req
)
74 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
75 struct aria_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
77 return ecb_do_decrypt(req
, ctx
->dec_key
[0]);
80 static int aria_avx_set_key(struct crypto_skcipher
*tfm
, const u8
*key
,
83 return aria_set_key(&tfm
->base
, key
, keylen
);
86 static int aria_avx_ctr_encrypt(struct skcipher_request
*req
)
88 struct aria_avx_request_ctx
*req_ctx
= skcipher_request_ctx(req
);
89 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
90 struct aria_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
91 struct skcipher_walk walk
;
95 err
= skcipher_walk_virt(&walk
, req
, false);
97 while ((nbytes
= walk
.nbytes
) > 0) {
98 const u8
*src
= walk
.src
.virt
.addr
;
99 u8
*dst
= walk
.dst
.virt
.addr
;
101 while (nbytes
>= ARIA_AESNI_PARALLEL_BLOCK_SIZE
) {
103 aria_ops
.aria_ctr_crypt_16way(ctx
, dst
, src
,
104 &req_ctx
->keystream
[0],
107 dst
+= ARIA_AESNI_PARALLEL_BLOCK_SIZE
;
108 src
+= ARIA_AESNI_PARALLEL_BLOCK_SIZE
;
109 nbytes
-= ARIA_AESNI_PARALLEL_BLOCK_SIZE
;
112 while (nbytes
>= ARIA_BLOCK_SIZE
) {
113 memcpy(&req_ctx
->keystream
[0], walk
.iv
, ARIA_BLOCK_SIZE
);
114 crypto_inc(walk
.iv
, ARIA_BLOCK_SIZE
);
116 aria_encrypt(ctx
, &req_ctx
->keystream
[0],
117 &req_ctx
->keystream
[0]);
119 crypto_xor_cpy(dst
, src
, &req_ctx
->keystream
[0],
121 dst
+= ARIA_BLOCK_SIZE
;
122 src
+= ARIA_BLOCK_SIZE
;
123 nbytes
-= ARIA_BLOCK_SIZE
;
126 if (walk
.nbytes
== walk
.total
&& nbytes
> 0) {
127 memcpy(&req_ctx
->keystream
[0], walk
.iv
,
129 crypto_inc(walk
.iv
, ARIA_BLOCK_SIZE
);
131 aria_encrypt(ctx
, &req_ctx
->keystream
[0],
132 &req_ctx
->keystream
[0]);
134 crypto_xor_cpy(dst
, src
, &req_ctx
->keystream
[0],
140 err
= skcipher_walk_done(&walk
, nbytes
);
146 static int aria_avx_init_tfm(struct crypto_skcipher
*tfm
)
148 crypto_skcipher_set_reqsize(tfm
, sizeof(struct aria_avx_request_ctx
));
153 static struct skcipher_alg aria_algs
[] = {
155 .base
.cra_name
= "__ecb(aria)",
156 .base
.cra_driver_name
= "__ecb-aria-avx",
157 .base
.cra_priority
= 400,
158 .base
.cra_flags
= CRYPTO_ALG_INTERNAL
,
159 .base
.cra_blocksize
= ARIA_BLOCK_SIZE
,
160 .base
.cra_ctxsize
= sizeof(struct aria_ctx
),
161 .base
.cra_module
= THIS_MODULE
,
162 .min_keysize
= ARIA_MIN_KEY_SIZE
,
163 .max_keysize
= ARIA_MAX_KEY_SIZE
,
164 .setkey
= aria_avx_set_key
,
165 .encrypt
= aria_avx_ecb_encrypt
,
166 .decrypt
= aria_avx_ecb_decrypt
,
168 .base
.cra_name
= "__ctr(aria)",
169 .base
.cra_driver_name
= "__ctr-aria-avx",
170 .base
.cra_priority
= 400,
171 .base
.cra_flags
= CRYPTO_ALG_INTERNAL
,
172 .base
.cra_blocksize
= 1,
173 .base
.cra_ctxsize
= sizeof(struct aria_ctx
),
174 .base
.cra_module
= THIS_MODULE
,
175 .min_keysize
= ARIA_MIN_KEY_SIZE
,
176 .max_keysize
= ARIA_MAX_KEY_SIZE
,
177 .ivsize
= ARIA_BLOCK_SIZE
,
178 .chunksize
= ARIA_BLOCK_SIZE
,
179 .walksize
= 16 * ARIA_BLOCK_SIZE
,
180 .setkey
= aria_avx_set_key
,
181 .encrypt
= aria_avx_ctr_encrypt
,
182 .decrypt
= aria_avx_ctr_encrypt
,
183 .init
= aria_avx_init_tfm
,
187 static struct simd_skcipher_alg
*aria_simd_algs
[ARRAY_SIZE(aria_algs
)];
189 static int __init
aria_avx_init(void)
191 const char *feature_name
;
193 if (!boot_cpu_has(X86_FEATURE_AVX
) ||
194 !boot_cpu_has(X86_FEATURE_AES
) ||
195 !boot_cpu_has(X86_FEATURE_OSXSAVE
)) {
196 pr_info("AVX or AES-NI instructions are not detected.\n");
200 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE
| XFEATURE_MASK_YMM
,
202 pr_info("CPU feature '%s' is not supported.\n", feature_name
);
206 if (boot_cpu_has(X86_FEATURE_GFNI
) && IS_ENABLED(CONFIG_AS_GFNI
)) {
207 aria_ops
.aria_encrypt_16way
= aria_aesni_avx_gfni_encrypt_16way
;
208 aria_ops
.aria_decrypt_16way
= aria_aesni_avx_gfni_decrypt_16way
;
209 aria_ops
.aria_ctr_crypt_16way
= aria_aesni_avx_gfni_ctr_crypt_16way
;
211 aria_ops
.aria_encrypt_16way
= aria_aesni_avx_encrypt_16way
;
212 aria_ops
.aria_decrypt_16way
= aria_aesni_avx_decrypt_16way
;
213 aria_ops
.aria_ctr_crypt_16way
= aria_aesni_avx_ctr_crypt_16way
;
216 return simd_register_skciphers_compat(aria_algs
,
217 ARRAY_SIZE(aria_algs
),
221 static void __exit
aria_avx_exit(void)
223 simd_unregister_skciphers(aria_algs
, ARRAY_SIZE(aria_algs
),
227 module_init(aria_avx_init
);
228 module_exit(aria_avx_exit
);
230 MODULE_LICENSE("GPL");
231 MODULE_AUTHOR("Taehee Yoo <ap420073@gmail.com>");
232 MODULE_DESCRIPTION("ARIA Cipher Algorithm, AVX/AES-NI/GFNI optimized");
233 MODULE_ALIAS_CRYPTO("aria");
234 MODULE_ALIAS_CRYPTO("aria-aesni-avx");