2 * Glue Code for x86_64/AVX2/AES-NI assembler optimized version of Camellia
4 * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
13 #include <linux/module.h>
14 #include <linux/types.h>
15 #include <linux/crypto.h>
16 #include <linux/err.h>
17 #include <crypto/ablk_helper.h>
18 #include <crypto/algapi.h>
19 #include <crypto/ctr.h>
20 #include <crypto/lrw.h>
21 #include <crypto/xts.h>
22 #include <asm/fpu/api.h>
23 #include <asm/crypto/camellia.h>
24 #include <asm/crypto/glue_helper.h>
26 #define CAMELLIA_AESNI_PARALLEL_BLOCKS 16
27 #define CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS 32
29 /* 32-way AVX2/AES-NI parallel cipher functions */
30 asmlinkage
void camellia_ecb_enc_32way(struct camellia_ctx
*ctx
, u8
*dst
,
32 asmlinkage
void camellia_ecb_dec_32way(struct camellia_ctx
*ctx
, u8
*dst
,
35 asmlinkage
void camellia_cbc_dec_32way(struct camellia_ctx
*ctx
, u8
*dst
,
37 asmlinkage
void camellia_ctr_32way(struct camellia_ctx
*ctx
, u8
*dst
,
38 const u8
*src
, le128
*iv
);
40 asmlinkage
void camellia_xts_enc_32way(struct camellia_ctx
*ctx
, u8
*dst
,
41 const u8
*src
, le128
*iv
);
42 asmlinkage
void camellia_xts_dec_32way(struct camellia_ctx
*ctx
, u8
*dst
,
43 const u8
*src
, le128
*iv
);
45 static const struct common_glue_ctx camellia_enc
= {
47 .fpu_blocks_limit
= CAMELLIA_AESNI_PARALLEL_BLOCKS
,
50 .num_blocks
= CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS
,
51 .fn_u
= { .ecb
= GLUE_FUNC_CAST(camellia_ecb_enc_32way
) }
53 .num_blocks
= CAMELLIA_AESNI_PARALLEL_BLOCKS
,
54 .fn_u
= { .ecb
= GLUE_FUNC_CAST(camellia_ecb_enc_16way
) }
57 .fn_u
= { .ecb
= GLUE_FUNC_CAST(camellia_enc_blk_2way
) }
60 .fn_u
= { .ecb
= GLUE_FUNC_CAST(camellia_enc_blk
) }
64 static const struct common_glue_ctx camellia_ctr
= {
66 .fpu_blocks_limit
= CAMELLIA_AESNI_PARALLEL_BLOCKS
,
69 .num_blocks
= CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS
,
70 .fn_u
= { .ctr
= GLUE_CTR_FUNC_CAST(camellia_ctr_32way
) }
72 .num_blocks
= CAMELLIA_AESNI_PARALLEL_BLOCKS
,
73 .fn_u
= { .ctr
= GLUE_CTR_FUNC_CAST(camellia_ctr_16way
) }
76 .fn_u
= { .ctr
= GLUE_CTR_FUNC_CAST(camellia_crypt_ctr_2way
) }
79 .fn_u
= { .ctr
= GLUE_CTR_FUNC_CAST(camellia_crypt_ctr
) }
83 static const struct common_glue_ctx camellia_enc_xts
= {
85 .fpu_blocks_limit
= CAMELLIA_AESNI_PARALLEL_BLOCKS
,
88 .num_blocks
= CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS
,
89 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(camellia_xts_enc_32way
) }
91 .num_blocks
= CAMELLIA_AESNI_PARALLEL_BLOCKS
,
92 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(camellia_xts_enc_16way
) }
95 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(camellia_xts_enc
) }
99 static const struct common_glue_ctx camellia_dec
= {
101 .fpu_blocks_limit
= CAMELLIA_AESNI_PARALLEL_BLOCKS
,
104 .num_blocks
= CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS
,
105 .fn_u
= { .ecb
= GLUE_FUNC_CAST(camellia_ecb_dec_32way
) }
107 .num_blocks
= CAMELLIA_AESNI_PARALLEL_BLOCKS
,
108 .fn_u
= { .ecb
= GLUE_FUNC_CAST(camellia_ecb_dec_16way
) }
111 .fn_u
= { .ecb
= GLUE_FUNC_CAST(camellia_dec_blk_2way
) }
114 .fn_u
= { .ecb
= GLUE_FUNC_CAST(camellia_dec_blk
) }
118 static const struct common_glue_ctx camellia_dec_cbc
= {
120 .fpu_blocks_limit
= CAMELLIA_AESNI_PARALLEL_BLOCKS
,
123 .num_blocks
= CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS
,
124 .fn_u
= { .cbc
= GLUE_CBC_FUNC_CAST(camellia_cbc_dec_32way
) }
126 .num_blocks
= CAMELLIA_AESNI_PARALLEL_BLOCKS
,
127 .fn_u
= { .cbc
= GLUE_CBC_FUNC_CAST(camellia_cbc_dec_16way
) }
130 .fn_u
= { .cbc
= GLUE_CBC_FUNC_CAST(camellia_decrypt_cbc_2way
) }
133 .fn_u
= { .cbc
= GLUE_CBC_FUNC_CAST(camellia_dec_blk
) }
137 static const struct common_glue_ctx camellia_dec_xts
= {
139 .fpu_blocks_limit
= CAMELLIA_AESNI_PARALLEL_BLOCKS
,
142 .num_blocks
= CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS
,
143 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(camellia_xts_dec_32way
) }
145 .num_blocks
= CAMELLIA_AESNI_PARALLEL_BLOCKS
,
146 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(camellia_xts_dec_16way
) }
149 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(camellia_xts_dec
) }
153 static int ecb_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
154 struct scatterlist
*src
, unsigned int nbytes
)
156 return glue_ecb_crypt_128bit(&camellia_enc
, desc
, dst
, src
, nbytes
);
159 static int ecb_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
160 struct scatterlist
*src
, unsigned int nbytes
)
162 return glue_ecb_crypt_128bit(&camellia_dec
, desc
, dst
, src
, nbytes
);
165 static int cbc_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
166 struct scatterlist
*src
, unsigned int nbytes
)
168 return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(camellia_enc_blk
), desc
,
172 static int cbc_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
173 struct scatterlist
*src
, unsigned int nbytes
)
175 return glue_cbc_decrypt_128bit(&camellia_dec_cbc
, desc
, dst
, src
,
179 static int ctr_crypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
180 struct scatterlist
*src
, unsigned int nbytes
)
182 return glue_ctr_crypt_128bit(&camellia_ctr
, desc
, dst
, src
, nbytes
);
185 static inline bool camellia_fpu_begin(bool fpu_enabled
, unsigned int nbytes
)
187 return glue_fpu_begin(CAMELLIA_BLOCK_SIZE
,
188 CAMELLIA_AESNI_PARALLEL_BLOCKS
, NULL
, fpu_enabled
,
192 static inline void camellia_fpu_end(bool fpu_enabled
)
194 glue_fpu_end(fpu_enabled
);
197 static int camellia_setkey(struct crypto_tfm
*tfm
, const u8
*in_key
,
198 unsigned int key_len
)
200 return __camellia_setkey(crypto_tfm_ctx(tfm
), in_key
, key_len
,
205 struct camellia_ctx
*ctx
;
209 static void encrypt_callback(void *priv
, u8
*srcdst
, unsigned int nbytes
)
211 const unsigned int bsize
= CAMELLIA_BLOCK_SIZE
;
212 struct crypt_priv
*ctx
= priv
;
215 ctx
->fpu_enabled
= camellia_fpu_begin(ctx
->fpu_enabled
, nbytes
);
217 if (nbytes
>= CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS
* bsize
) {
218 camellia_ecb_enc_32way(ctx
->ctx
, srcdst
, srcdst
);
219 srcdst
+= bsize
* CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS
;
220 nbytes
-= bsize
* CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS
;
223 if (nbytes
>= CAMELLIA_AESNI_PARALLEL_BLOCKS
* bsize
) {
224 camellia_ecb_enc_16way(ctx
->ctx
, srcdst
, srcdst
);
225 srcdst
+= bsize
* CAMELLIA_AESNI_PARALLEL_BLOCKS
;
226 nbytes
-= bsize
* CAMELLIA_AESNI_PARALLEL_BLOCKS
;
229 while (nbytes
>= CAMELLIA_PARALLEL_BLOCKS
* bsize
) {
230 camellia_enc_blk_2way(ctx
->ctx
, srcdst
, srcdst
);
231 srcdst
+= bsize
* CAMELLIA_PARALLEL_BLOCKS
;
232 nbytes
-= bsize
* CAMELLIA_PARALLEL_BLOCKS
;
235 for (i
= 0; i
< nbytes
/ bsize
; i
++, srcdst
+= bsize
)
236 camellia_enc_blk(ctx
->ctx
, srcdst
, srcdst
);
239 static void decrypt_callback(void *priv
, u8
*srcdst
, unsigned int nbytes
)
241 const unsigned int bsize
= CAMELLIA_BLOCK_SIZE
;
242 struct crypt_priv
*ctx
= priv
;
245 ctx
->fpu_enabled
= camellia_fpu_begin(ctx
->fpu_enabled
, nbytes
);
247 if (nbytes
>= CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS
* bsize
) {
248 camellia_ecb_dec_32way(ctx
->ctx
, srcdst
, srcdst
);
249 srcdst
+= bsize
* CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS
;
250 nbytes
-= bsize
* CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS
;
253 if (nbytes
>= CAMELLIA_AESNI_PARALLEL_BLOCKS
* bsize
) {
254 camellia_ecb_dec_16way(ctx
->ctx
, srcdst
, srcdst
);
255 srcdst
+= bsize
* CAMELLIA_AESNI_PARALLEL_BLOCKS
;
256 nbytes
-= bsize
* CAMELLIA_AESNI_PARALLEL_BLOCKS
;
259 while (nbytes
>= CAMELLIA_PARALLEL_BLOCKS
* bsize
) {
260 camellia_dec_blk_2way(ctx
->ctx
, srcdst
, srcdst
);
261 srcdst
+= bsize
* CAMELLIA_PARALLEL_BLOCKS
;
262 nbytes
-= bsize
* CAMELLIA_PARALLEL_BLOCKS
;
265 for (i
= 0; i
< nbytes
/ bsize
; i
++, srcdst
+= bsize
)
266 camellia_dec_blk(ctx
->ctx
, srcdst
, srcdst
);
269 static int lrw_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
270 struct scatterlist
*src
, unsigned int nbytes
)
272 struct camellia_lrw_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
273 be128 buf
[CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS
];
274 struct crypt_priv crypt_ctx
= {
275 .ctx
= &ctx
->camellia_ctx
,
276 .fpu_enabled
= false,
278 struct lrw_crypt_req req
= {
280 .tbuflen
= sizeof(buf
),
282 .table_ctx
= &ctx
->lrw_table
,
283 .crypt_ctx
= &crypt_ctx
,
284 .crypt_fn
= encrypt_callback
,
288 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
289 ret
= lrw_crypt(desc
, dst
, src
, nbytes
, &req
);
290 camellia_fpu_end(crypt_ctx
.fpu_enabled
);
295 static int lrw_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
296 struct scatterlist
*src
, unsigned int nbytes
)
298 struct camellia_lrw_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
299 be128 buf
[CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS
];
300 struct crypt_priv crypt_ctx
= {
301 .ctx
= &ctx
->camellia_ctx
,
302 .fpu_enabled
= false,
304 struct lrw_crypt_req req
= {
306 .tbuflen
= sizeof(buf
),
308 .table_ctx
= &ctx
->lrw_table
,
309 .crypt_ctx
= &crypt_ctx
,
310 .crypt_fn
= decrypt_callback
,
314 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
315 ret
= lrw_crypt(desc
, dst
, src
, nbytes
, &req
);
316 camellia_fpu_end(crypt_ctx
.fpu_enabled
);
321 static int xts_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
322 struct scatterlist
*src
, unsigned int nbytes
)
324 struct camellia_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
326 return glue_xts_crypt_128bit(&camellia_enc_xts
, desc
, dst
, src
, nbytes
,
327 XTS_TWEAK_CAST(camellia_enc_blk
),
328 &ctx
->tweak_ctx
, &ctx
->crypt_ctx
);
331 static int xts_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
332 struct scatterlist
*src
, unsigned int nbytes
)
334 struct camellia_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
336 return glue_xts_crypt_128bit(&camellia_dec_xts
, desc
, dst
, src
, nbytes
,
337 XTS_TWEAK_CAST(camellia_enc_blk
),
338 &ctx
->tweak_ctx
, &ctx
->crypt_ctx
);
341 static struct crypto_alg cmll_algs
[10] = { {
342 .cra_name
= "__ecb-camellia-aesni-avx2",
343 .cra_driver_name
= "__driver-ecb-camellia-aesni-avx2",
345 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
347 .cra_blocksize
= CAMELLIA_BLOCK_SIZE
,
348 .cra_ctxsize
= sizeof(struct camellia_ctx
),
350 .cra_type
= &crypto_blkcipher_type
,
351 .cra_module
= THIS_MODULE
,
354 .min_keysize
= CAMELLIA_MIN_KEY_SIZE
,
355 .max_keysize
= CAMELLIA_MAX_KEY_SIZE
,
356 .setkey
= camellia_setkey
,
357 .encrypt
= ecb_encrypt
,
358 .decrypt
= ecb_decrypt
,
362 .cra_name
= "__cbc-camellia-aesni-avx2",
363 .cra_driver_name
= "__driver-cbc-camellia-aesni-avx2",
365 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
367 .cra_blocksize
= CAMELLIA_BLOCK_SIZE
,
368 .cra_ctxsize
= sizeof(struct camellia_ctx
),
370 .cra_type
= &crypto_blkcipher_type
,
371 .cra_module
= THIS_MODULE
,
374 .min_keysize
= CAMELLIA_MIN_KEY_SIZE
,
375 .max_keysize
= CAMELLIA_MAX_KEY_SIZE
,
376 .setkey
= camellia_setkey
,
377 .encrypt
= cbc_encrypt
,
378 .decrypt
= cbc_decrypt
,
382 .cra_name
= "__ctr-camellia-aesni-avx2",
383 .cra_driver_name
= "__driver-ctr-camellia-aesni-avx2",
385 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
388 .cra_ctxsize
= sizeof(struct camellia_ctx
),
390 .cra_type
= &crypto_blkcipher_type
,
391 .cra_module
= THIS_MODULE
,
394 .min_keysize
= CAMELLIA_MIN_KEY_SIZE
,
395 .max_keysize
= CAMELLIA_MAX_KEY_SIZE
,
396 .ivsize
= CAMELLIA_BLOCK_SIZE
,
397 .setkey
= camellia_setkey
,
398 .encrypt
= ctr_crypt
,
399 .decrypt
= ctr_crypt
,
403 .cra_name
= "__lrw-camellia-aesni-avx2",
404 .cra_driver_name
= "__driver-lrw-camellia-aesni-avx2",
406 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
408 .cra_blocksize
= CAMELLIA_BLOCK_SIZE
,
409 .cra_ctxsize
= sizeof(struct camellia_lrw_ctx
),
411 .cra_type
= &crypto_blkcipher_type
,
412 .cra_module
= THIS_MODULE
,
413 .cra_exit
= lrw_camellia_exit_tfm
,
416 .min_keysize
= CAMELLIA_MIN_KEY_SIZE
+
418 .max_keysize
= CAMELLIA_MAX_KEY_SIZE
+
420 .ivsize
= CAMELLIA_BLOCK_SIZE
,
421 .setkey
= lrw_camellia_setkey
,
422 .encrypt
= lrw_encrypt
,
423 .decrypt
= lrw_decrypt
,
427 .cra_name
= "__xts-camellia-aesni-avx2",
428 .cra_driver_name
= "__driver-xts-camellia-aesni-avx2",
430 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
432 .cra_blocksize
= CAMELLIA_BLOCK_SIZE
,
433 .cra_ctxsize
= sizeof(struct camellia_xts_ctx
),
435 .cra_type
= &crypto_blkcipher_type
,
436 .cra_module
= THIS_MODULE
,
439 .min_keysize
= CAMELLIA_MIN_KEY_SIZE
* 2,
440 .max_keysize
= CAMELLIA_MAX_KEY_SIZE
* 2,
441 .ivsize
= CAMELLIA_BLOCK_SIZE
,
442 .setkey
= xts_camellia_setkey
,
443 .encrypt
= xts_encrypt
,
444 .decrypt
= xts_decrypt
,
448 .cra_name
= "ecb(camellia)",
449 .cra_driver_name
= "ecb-camellia-aesni-avx2",
451 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
452 .cra_blocksize
= CAMELLIA_BLOCK_SIZE
,
453 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
455 .cra_type
= &crypto_ablkcipher_type
,
456 .cra_module
= THIS_MODULE
,
457 .cra_init
= ablk_init
,
458 .cra_exit
= ablk_exit
,
461 .min_keysize
= CAMELLIA_MIN_KEY_SIZE
,
462 .max_keysize
= CAMELLIA_MAX_KEY_SIZE
,
463 .setkey
= ablk_set_key
,
464 .encrypt
= ablk_encrypt
,
465 .decrypt
= ablk_decrypt
,
469 .cra_name
= "cbc(camellia)",
470 .cra_driver_name
= "cbc-camellia-aesni-avx2",
472 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
473 .cra_blocksize
= CAMELLIA_BLOCK_SIZE
,
474 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
476 .cra_type
= &crypto_ablkcipher_type
,
477 .cra_module
= THIS_MODULE
,
478 .cra_init
= ablk_init
,
479 .cra_exit
= ablk_exit
,
482 .min_keysize
= CAMELLIA_MIN_KEY_SIZE
,
483 .max_keysize
= CAMELLIA_MAX_KEY_SIZE
,
484 .ivsize
= CAMELLIA_BLOCK_SIZE
,
485 .setkey
= ablk_set_key
,
486 .encrypt
= __ablk_encrypt
,
487 .decrypt
= ablk_decrypt
,
491 .cra_name
= "ctr(camellia)",
492 .cra_driver_name
= "ctr-camellia-aesni-avx2",
494 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
496 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
498 .cra_type
= &crypto_ablkcipher_type
,
499 .cra_module
= THIS_MODULE
,
500 .cra_init
= ablk_init
,
501 .cra_exit
= ablk_exit
,
504 .min_keysize
= CAMELLIA_MIN_KEY_SIZE
,
505 .max_keysize
= CAMELLIA_MAX_KEY_SIZE
,
506 .ivsize
= CAMELLIA_BLOCK_SIZE
,
507 .setkey
= ablk_set_key
,
508 .encrypt
= ablk_encrypt
,
509 .decrypt
= ablk_encrypt
,
514 .cra_name
= "lrw(camellia)",
515 .cra_driver_name
= "lrw-camellia-aesni-avx2",
517 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
518 .cra_blocksize
= CAMELLIA_BLOCK_SIZE
,
519 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
521 .cra_type
= &crypto_ablkcipher_type
,
522 .cra_module
= THIS_MODULE
,
523 .cra_init
= ablk_init
,
524 .cra_exit
= ablk_exit
,
527 .min_keysize
= CAMELLIA_MIN_KEY_SIZE
+
529 .max_keysize
= CAMELLIA_MAX_KEY_SIZE
+
531 .ivsize
= CAMELLIA_BLOCK_SIZE
,
532 .setkey
= ablk_set_key
,
533 .encrypt
= ablk_encrypt
,
534 .decrypt
= ablk_decrypt
,
538 .cra_name
= "xts(camellia)",
539 .cra_driver_name
= "xts-camellia-aesni-avx2",
541 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
542 .cra_blocksize
= CAMELLIA_BLOCK_SIZE
,
543 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
545 .cra_type
= &crypto_ablkcipher_type
,
546 .cra_module
= THIS_MODULE
,
547 .cra_init
= ablk_init
,
548 .cra_exit
= ablk_exit
,
551 .min_keysize
= CAMELLIA_MIN_KEY_SIZE
* 2,
552 .max_keysize
= CAMELLIA_MAX_KEY_SIZE
* 2,
553 .ivsize
= CAMELLIA_BLOCK_SIZE
,
554 .setkey
= ablk_set_key
,
555 .encrypt
= ablk_encrypt
,
556 .decrypt
= ablk_decrypt
,
561 static int __init
camellia_aesni_init(void)
563 const char *feature_name
;
565 if (!cpu_has_avx2
|| !cpu_has_avx
|| !cpu_has_aes
|| !cpu_has_osxsave
) {
566 pr_info("AVX2 or AES-NI instructions are not detected.\n");
570 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE
| XFEATURE_MASK_YMM
,
572 pr_info("CPU feature '%s' is not supported.\n", feature_name
);
576 return crypto_register_algs(cmll_algs
, ARRAY_SIZE(cmll_algs
));
579 static void __exit
camellia_aesni_fini(void)
581 crypto_unregister_algs(cmll_algs
, ARRAY_SIZE(cmll_algs
));
584 module_init(camellia_aesni_init
);
585 module_exit(camellia_aesni_fini
);
587 MODULE_LICENSE("GPL");
588 MODULE_DESCRIPTION("Camellia Cipher Algorithm, AES-NI/AVX2 optimized");
589 MODULE_ALIAS_CRYPTO("camellia");
590 MODULE_ALIAS_CRYPTO("camellia-asm");