1 /* SPDX-License-Identifier: GPL-2.0-or-later */
3 * Glue Code for the AVX2/AES-NI/GFNI assembler implementation of the ARIA Cipher
5 * Copyright (c) 2022 Taehee Yoo <ap420073@gmail.com>
8 #include <crypto/algapi.h>
9 #include <crypto/internal/simd.h>
10 #include <crypto/aria.h>
11 #include <linux/crypto.h>
12 #include <linux/err.h>
13 #include <linux/module.h>
14 #include <linux/types.h>
16 #include "ecb_cbc_helpers.h"
19 asmlinkage
void aria_aesni_avx2_encrypt_32way(const void *ctx
, u8
*dst
,
21 EXPORT_SYMBOL_GPL(aria_aesni_avx2_encrypt_32way
);
22 asmlinkage
void aria_aesni_avx2_decrypt_32way(const void *ctx
, u8
*dst
,
24 EXPORT_SYMBOL_GPL(aria_aesni_avx2_decrypt_32way
);
25 asmlinkage
void aria_aesni_avx2_ctr_crypt_32way(const void *ctx
, u8
*dst
,
27 u8
*keystream
, u8
*iv
);
28 EXPORT_SYMBOL_GPL(aria_aesni_avx2_ctr_crypt_32way
);
30 asmlinkage
void aria_aesni_avx2_gfni_encrypt_32way(const void *ctx
, u8
*dst
,
32 EXPORT_SYMBOL_GPL(aria_aesni_avx2_gfni_encrypt_32way
);
33 asmlinkage
void aria_aesni_avx2_gfni_decrypt_32way(const void *ctx
, u8
*dst
,
35 EXPORT_SYMBOL_GPL(aria_aesni_avx2_gfni_decrypt_32way
);
36 asmlinkage
void aria_aesni_avx2_gfni_ctr_crypt_32way(const void *ctx
, u8
*dst
,
38 u8
*keystream
, u8
*iv
);
39 EXPORT_SYMBOL_GPL(aria_aesni_avx2_gfni_ctr_crypt_32way
);
40 #endif /* CONFIG_AS_GFNI */
42 static struct aria_avx_ops aria_ops
;
44 struct aria_avx2_request_ctx
{
45 u8 keystream
[ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE
];
48 static int ecb_do_encrypt(struct skcipher_request
*req
, const u32
*rkey
)
50 ECB_WALK_START(req
, ARIA_BLOCK_SIZE
, ARIA_AESNI_PARALLEL_BLOCKS
);
51 ECB_BLOCK(ARIA_AESNI_AVX2_PARALLEL_BLOCKS
, aria_ops
.aria_encrypt_32way
);
52 ECB_BLOCK(ARIA_AESNI_PARALLEL_BLOCKS
, aria_ops
.aria_encrypt_16way
);
53 ECB_BLOCK(1, aria_encrypt
);
57 static int ecb_do_decrypt(struct skcipher_request
*req
, const u32
*rkey
)
59 ECB_WALK_START(req
, ARIA_BLOCK_SIZE
, ARIA_AESNI_PARALLEL_BLOCKS
);
60 ECB_BLOCK(ARIA_AESNI_AVX2_PARALLEL_BLOCKS
, aria_ops
.aria_decrypt_32way
);
61 ECB_BLOCK(ARIA_AESNI_PARALLEL_BLOCKS
, aria_ops
.aria_decrypt_16way
);
62 ECB_BLOCK(1, aria_decrypt
);
66 static int aria_avx2_ecb_encrypt(struct skcipher_request
*req
)
68 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
69 struct aria_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
71 return ecb_do_encrypt(req
, ctx
->enc_key
[0]);
74 static int aria_avx2_ecb_decrypt(struct skcipher_request
*req
)
76 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
77 struct aria_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
79 return ecb_do_decrypt(req
, ctx
->dec_key
[0]);
82 static int aria_avx2_set_key(struct crypto_skcipher
*tfm
, const u8
*key
,
85 return aria_set_key(&tfm
->base
, key
, keylen
);
88 static int aria_avx2_ctr_encrypt(struct skcipher_request
*req
)
90 struct aria_avx2_request_ctx
*req_ctx
= skcipher_request_ctx(req
);
91 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
92 struct aria_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
93 struct skcipher_walk walk
;
97 err
= skcipher_walk_virt(&walk
, req
, false);
99 while ((nbytes
= walk
.nbytes
) > 0) {
100 const u8
*src
= walk
.src
.virt
.addr
;
101 u8
*dst
= walk
.dst
.virt
.addr
;
103 while (nbytes
>= ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE
) {
105 aria_ops
.aria_ctr_crypt_32way(ctx
, dst
, src
,
106 &req_ctx
->keystream
[0],
109 dst
+= ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE
;
110 src
+= ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE
;
111 nbytes
-= ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE
;
114 while (nbytes
>= ARIA_AESNI_PARALLEL_BLOCK_SIZE
) {
116 aria_ops
.aria_ctr_crypt_16way(ctx
, dst
, src
,
117 &req_ctx
->keystream
[0],
120 dst
+= ARIA_AESNI_PARALLEL_BLOCK_SIZE
;
121 src
+= ARIA_AESNI_PARALLEL_BLOCK_SIZE
;
122 nbytes
-= ARIA_AESNI_PARALLEL_BLOCK_SIZE
;
125 while (nbytes
>= ARIA_BLOCK_SIZE
) {
126 memcpy(&req_ctx
->keystream
[0], walk
.iv
, ARIA_BLOCK_SIZE
);
127 crypto_inc(walk
.iv
, ARIA_BLOCK_SIZE
);
129 aria_encrypt(ctx
, &req_ctx
->keystream
[0],
130 &req_ctx
->keystream
[0]);
132 crypto_xor_cpy(dst
, src
, &req_ctx
->keystream
[0],
134 dst
+= ARIA_BLOCK_SIZE
;
135 src
+= ARIA_BLOCK_SIZE
;
136 nbytes
-= ARIA_BLOCK_SIZE
;
139 if (walk
.nbytes
== walk
.total
&& nbytes
> 0) {
140 memcpy(&req_ctx
->keystream
[0], walk
.iv
,
142 crypto_inc(walk
.iv
, ARIA_BLOCK_SIZE
);
144 aria_encrypt(ctx
, &req_ctx
->keystream
[0],
145 &req_ctx
->keystream
[0]);
147 crypto_xor_cpy(dst
, src
, &req_ctx
->keystream
[0],
153 err
= skcipher_walk_done(&walk
, nbytes
);
159 static int aria_avx2_init_tfm(struct crypto_skcipher
*tfm
)
161 crypto_skcipher_set_reqsize(tfm
, sizeof(struct aria_avx2_request_ctx
));
166 static struct skcipher_alg aria_algs
[] = {
168 .base
.cra_name
= "__ecb(aria)",
169 .base
.cra_driver_name
= "__ecb-aria-avx2",
170 .base
.cra_priority
= 500,
171 .base
.cra_flags
= CRYPTO_ALG_INTERNAL
,
172 .base
.cra_blocksize
= ARIA_BLOCK_SIZE
,
173 .base
.cra_ctxsize
= sizeof(struct aria_ctx
),
174 .base
.cra_module
= THIS_MODULE
,
175 .min_keysize
= ARIA_MIN_KEY_SIZE
,
176 .max_keysize
= ARIA_MAX_KEY_SIZE
,
177 .setkey
= aria_avx2_set_key
,
178 .encrypt
= aria_avx2_ecb_encrypt
,
179 .decrypt
= aria_avx2_ecb_decrypt
,
181 .base
.cra_name
= "__ctr(aria)",
182 .base
.cra_driver_name
= "__ctr-aria-avx2",
183 .base
.cra_priority
= 500,
184 .base
.cra_flags
= CRYPTO_ALG_INTERNAL
|
185 CRYPTO_ALG_SKCIPHER_REQSIZE_LARGE
,
186 .base
.cra_blocksize
= 1,
187 .base
.cra_ctxsize
= sizeof(struct aria_ctx
),
188 .base
.cra_module
= THIS_MODULE
,
189 .min_keysize
= ARIA_MIN_KEY_SIZE
,
190 .max_keysize
= ARIA_MAX_KEY_SIZE
,
191 .ivsize
= ARIA_BLOCK_SIZE
,
192 .chunksize
= ARIA_BLOCK_SIZE
,
193 .setkey
= aria_avx2_set_key
,
194 .encrypt
= aria_avx2_ctr_encrypt
,
195 .decrypt
= aria_avx2_ctr_encrypt
,
196 .init
= aria_avx2_init_tfm
,
200 static struct simd_skcipher_alg
*aria_simd_algs
[ARRAY_SIZE(aria_algs
)];
202 static int __init
aria_avx2_init(void)
204 const char *feature_name
;
206 if (!boot_cpu_has(X86_FEATURE_AVX
) ||
207 !boot_cpu_has(X86_FEATURE_AVX2
) ||
208 !boot_cpu_has(X86_FEATURE_AES
) ||
209 !boot_cpu_has(X86_FEATURE_OSXSAVE
)) {
210 pr_info("AVX2 or AES-NI instructions are not detected.\n");
214 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE
| XFEATURE_MASK_YMM
,
216 pr_info("CPU feature '%s' is not supported.\n", feature_name
);
220 if (boot_cpu_has(X86_FEATURE_GFNI
) && IS_ENABLED(CONFIG_AS_GFNI
)) {
221 aria_ops
.aria_encrypt_16way
= aria_aesni_avx_gfni_encrypt_16way
;
222 aria_ops
.aria_decrypt_16way
= aria_aesni_avx_gfni_decrypt_16way
;
223 aria_ops
.aria_ctr_crypt_16way
= aria_aesni_avx_gfni_ctr_crypt_16way
;
224 aria_ops
.aria_encrypt_32way
= aria_aesni_avx2_gfni_encrypt_32way
;
225 aria_ops
.aria_decrypt_32way
= aria_aesni_avx2_gfni_decrypt_32way
;
226 aria_ops
.aria_ctr_crypt_32way
= aria_aesni_avx2_gfni_ctr_crypt_32way
;
228 aria_ops
.aria_encrypt_16way
= aria_aesni_avx_encrypt_16way
;
229 aria_ops
.aria_decrypt_16way
= aria_aesni_avx_decrypt_16way
;
230 aria_ops
.aria_ctr_crypt_16way
= aria_aesni_avx_ctr_crypt_16way
;
231 aria_ops
.aria_encrypt_32way
= aria_aesni_avx2_encrypt_32way
;
232 aria_ops
.aria_decrypt_32way
= aria_aesni_avx2_decrypt_32way
;
233 aria_ops
.aria_ctr_crypt_32way
= aria_aesni_avx2_ctr_crypt_32way
;
236 return simd_register_skciphers_compat(aria_algs
,
237 ARRAY_SIZE(aria_algs
),
241 static void __exit
aria_avx2_exit(void)
243 simd_unregister_skciphers(aria_algs
, ARRAY_SIZE(aria_algs
),
247 module_init(aria_avx2_init
);
248 module_exit(aria_avx2_exit
);
250 MODULE_LICENSE("GPL");
251 MODULE_AUTHOR("Taehee Yoo <ap420073@gmail.com>");
252 MODULE_DESCRIPTION("ARIA Cipher Algorithm, AVX2/AES-NI/GFNI optimized");
253 MODULE_ALIAS_CRYPTO("aria");
254 MODULE_ALIAS_CRYPTO("aria-aesni-avx2");