1 // SPDX-License-Identifier: GPL-2.0-only
3 * linux/arch/arm64/crypto/aes-glue.c - wrapper code for ARMv8 AES
5 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
11 #include <crypto/aes.h>
12 #include <crypto/internal/hash.h>
13 #include <crypto/internal/simd.h>
14 #include <crypto/internal/skcipher.h>
15 #include <crypto/scatterwalk.h>
16 #include <linux/module.h>
17 #include <linux/cpufeature.h>
18 #include <crypto/xts.h>
20 #include "aes-ce-setkey.h"
21 #include "aes-ctr-fallback.h"
23 #ifdef USE_V8_CRYPTO_EXTENSIONS
26 #define aes_setkey ce_aes_setkey
27 #define aes_expandkey ce_aes_expandkey
28 #define aes_ecb_encrypt ce_aes_ecb_encrypt
29 #define aes_ecb_decrypt ce_aes_ecb_decrypt
30 #define aes_cbc_encrypt ce_aes_cbc_encrypt
31 #define aes_cbc_decrypt ce_aes_cbc_decrypt
32 #define aes_cbc_cts_encrypt ce_aes_cbc_cts_encrypt
33 #define aes_cbc_cts_decrypt ce_aes_cbc_cts_decrypt
34 #define aes_ctr_encrypt ce_aes_ctr_encrypt
35 #define aes_xts_encrypt ce_aes_xts_encrypt
36 #define aes_xts_decrypt ce_aes_xts_decrypt
37 #define aes_mac_update ce_aes_mac_update
38 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
42 #define aes_setkey crypto_aes_set_key
43 #define aes_expandkey crypto_aes_expand_key
44 #define aes_ecb_encrypt neon_aes_ecb_encrypt
45 #define aes_ecb_decrypt neon_aes_ecb_decrypt
46 #define aes_cbc_encrypt neon_aes_cbc_encrypt
47 #define aes_cbc_decrypt neon_aes_cbc_decrypt
48 #define aes_cbc_cts_encrypt neon_aes_cbc_cts_encrypt
49 #define aes_cbc_cts_decrypt neon_aes_cbc_cts_decrypt
50 #define aes_ctr_encrypt neon_aes_ctr_encrypt
51 #define aes_xts_encrypt neon_aes_xts_encrypt
52 #define aes_xts_decrypt neon_aes_xts_decrypt
53 #define aes_mac_update neon_aes_mac_update
54 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 NEON");
55 MODULE_ALIAS_CRYPTO("ecb(aes)");
56 MODULE_ALIAS_CRYPTO("cbc(aes)");
57 MODULE_ALIAS_CRYPTO("ctr(aes)");
58 MODULE_ALIAS_CRYPTO("xts(aes)");
59 MODULE_ALIAS_CRYPTO("cmac(aes)");
60 MODULE_ALIAS_CRYPTO("xcbc(aes)");
61 MODULE_ALIAS_CRYPTO("cbcmac(aes)");
64 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
65 MODULE_LICENSE("GPL v2");
67 /* defined in aes-modes.S */
68 asmlinkage
void aes_ecb_encrypt(u8 out
[], u8
const in
[], u32
const rk
[],
69 int rounds
, int blocks
);
70 asmlinkage
void aes_ecb_decrypt(u8 out
[], u8
const in
[], u32
const rk
[],
71 int rounds
, int blocks
);
73 asmlinkage
void aes_cbc_encrypt(u8 out
[], u8
const in
[], u32
const rk
[],
74 int rounds
, int blocks
, u8 iv
[]);
75 asmlinkage
void aes_cbc_decrypt(u8 out
[], u8
const in
[], u32
const rk
[],
76 int rounds
, int blocks
, u8 iv
[]);
78 asmlinkage
void aes_cbc_cts_encrypt(u8 out
[], u8
const in
[], u32
const rk
[],
79 int rounds
, int bytes
, u8
const iv
[]);
80 asmlinkage
void aes_cbc_cts_decrypt(u8 out
[], u8
const in
[], u32
const rk
[],
81 int rounds
, int bytes
, u8
const iv
[]);
83 asmlinkage
void aes_ctr_encrypt(u8 out
[], u8
const in
[], u32
const rk
[],
84 int rounds
, int blocks
, u8 ctr
[]);
86 asmlinkage
void aes_xts_encrypt(u8 out
[], u8
const in
[], u32
const rk1
[],
87 int rounds
, int blocks
, u32
const rk2
[], u8 iv
[],
89 asmlinkage
void aes_xts_decrypt(u8 out
[], u8
const in
[], u32
const rk1
[],
90 int rounds
, int blocks
, u32
const rk2
[], u8 iv
[],
93 asmlinkage
void aes_mac_update(u8
const in
[], u32
const rk
[], int rounds
,
94 int blocks
, u8 dg
[], int enc_before
,
97 struct cts_cbc_req_ctx
{
98 struct scatterlist sg_src
[2];
99 struct scatterlist sg_dst
[2];
100 struct skcipher_request subreq
;
103 struct crypto_aes_xts_ctx
{
104 struct crypto_aes_ctx key1
;
105 struct crypto_aes_ctx
__aligned(8) key2
;
109 struct crypto_aes_ctx key
;
110 u8
__aligned(8) consts
[];
113 struct mac_desc_ctx
{
115 u8 dg
[AES_BLOCK_SIZE
];
118 static int skcipher_aes_setkey(struct crypto_skcipher
*tfm
, const u8
*in_key
,
119 unsigned int key_len
)
121 return aes_setkey(crypto_skcipher_tfm(tfm
), in_key
, key_len
);
124 static int xts_set_key(struct crypto_skcipher
*tfm
, const u8
*in_key
,
125 unsigned int key_len
)
127 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
130 ret
= xts_verify_key(tfm
, in_key
, key_len
);
134 ret
= aes_expandkey(&ctx
->key1
, in_key
, key_len
/ 2);
136 ret
= aes_expandkey(&ctx
->key2
, &in_key
[key_len
/ 2],
141 crypto_skcipher_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
145 static int ecb_encrypt(struct skcipher_request
*req
)
147 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
148 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
149 int err
, rounds
= 6 + ctx
->key_length
/ 4;
150 struct skcipher_walk walk
;
153 err
= skcipher_walk_virt(&walk
, req
, false);
155 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
157 aes_ecb_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
158 ctx
->key_enc
, rounds
, blocks
);
160 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
165 static int ecb_decrypt(struct skcipher_request
*req
)
167 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
168 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
169 int err
, rounds
= 6 + ctx
->key_length
/ 4;
170 struct skcipher_walk walk
;
173 err
= skcipher_walk_virt(&walk
, req
, false);
175 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
177 aes_ecb_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
178 ctx
->key_dec
, rounds
, blocks
);
180 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
185 static int cbc_encrypt(struct skcipher_request
*req
)
187 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
188 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
189 int err
, rounds
= 6 + ctx
->key_length
/ 4;
190 struct skcipher_walk walk
;
193 err
= skcipher_walk_virt(&walk
, req
, false);
195 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
197 aes_cbc_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
198 ctx
->key_enc
, rounds
, blocks
, walk
.iv
);
200 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
205 static int cbc_decrypt(struct skcipher_request
*req
)
207 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
208 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
209 int err
, rounds
= 6 + ctx
->key_length
/ 4;
210 struct skcipher_walk walk
;
213 err
= skcipher_walk_virt(&walk
, req
, false);
215 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
217 aes_cbc_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
218 ctx
->key_dec
, rounds
, blocks
, walk
.iv
);
220 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
225 static int cts_cbc_init_tfm(struct crypto_skcipher
*tfm
)
227 crypto_skcipher_set_reqsize(tfm
, sizeof(struct cts_cbc_req_ctx
));
231 static int cts_cbc_encrypt(struct skcipher_request
*req
)
233 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
234 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
235 struct cts_cbc_req_ctx
*rctx
= skcipher_request_ctx(req
);
236 int err
, rounds
= 6 + ctx
->key_length
/ 4;
237 int cbc_blocks
= DIV_ROUND_UP(req
->cryptlen
, AES_BLOCK_SIZE
) - 2;
238 struct scatterlist
*src
= req
->src
, *dst
= req
->dst
;
239 struct skcipher_walk walk
;
241 skcipher_request_set_tfm(&rctx
->subreq
, tfm
);
243 if (req
->cryptlen
<= AES_BLOCK_SIZE
) {
244 if (req
->cryptlen
< AES_BLOCK_SIZE
)
249 if (cbc_blocks
> 0) {
252 skcipher_request_set_crypt(&rctx
->subreq
, req
->src
, req
->dst
,
253 cbc_blocks
* AES_BLOCK_SIZE
,
256 err
= skcipher_walk_virt(&walk
, &rctx
->subreq
, false);
258 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
260 aes_cbc_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
261 ctx
->key_enc
, rounds
, blocks
, walk
.iv
);
263 err
= skcipher_walk_done(&walk
,
264 walk
.nbytes
% AES_BLOCK_SIZE
);
269 if (req
->cryptlen
== AES_BLOCK_SIZE
)
272 dst
= src
= scatterwalk_ffwd(rctx
->sg_src
, req
->src
,
273 rctx
->subreq
.cryptlen
);
274 if (req
->dst
!= req
->src
)
275 dst
= scatterwalk_ffwd(rctx
->sg_dst
, req
->dst
,
276 rctx
->subreq
.cryptlen
);
279 /* handle ciphertext stealing */
280 skcipher_request_set_crypt(&rctx
->subreq
, src
, dst
,
281 req
->cryptlen
- cbc_blocks
* AES_BLOCK_SIZE
,
284 err
= skcipher_walk_virt(&walk
, &rctx
->subreq
, false);
289 aes_cbc_cts_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
290 ctx
->key_enc
, rounds
, walk
.nbytes
, walk
.iv
);
293 return skcipher_walk_done(&walk
, 0);
296 static int cts_cbc_decrypt(struct skcipher_request
*req
)
298 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
299 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
300 struct cts_cbc_req_ctx
*rctx
= skcipher_request_ctx(req
);
301 int err
, rounds
= 6 + ctx
->key_length
/ 4;
302 int cbc_blocks
= DIV_ROUND_UP(req
->cryptlen
, AES_BLOCK_SIZE
) - 2;
303 struct scatterlist
*src
= req
->src
, *dst
= req
->dst
;
304 struct skcipher_walk walk
;
306 skcipher_request_set_tfm(&rctx
->subreq
, tfm
);
308 if (req
->cryptlen
<= AES_BLOCK_SIZE
) {
309 if (req
->cryptlen
< AES_BLOCK_SIZE
)
314 if (cbc_blocks
> 0) {
317 skcipher_request_set_crypt(&rctx
->subreq
, req
->src
, req
->dst
,
318 cbc_blocks
* AES_BLOCK_SIZE
,
321 err
= skcipher_walk_virt(&walk
, &rctx
->subreq
, false);
323 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
325 aes_cbc_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
326 ctx
->key_dec
, rounds
, blocks
, walk
.iv
);
328 err
= skcipher_walk_done(&walk
,
329 walk
.nbytes
% AES_BLOCK_SIZE
);
334 if (req
->cryptlen
== AES_BLOCK_SIZE
)
337 dst
= src
= scatterwalk_ffwd(rctx
->sg_src
, req
->src
,
338 rctx
->subreq
.cryptlen
);
339 if (req
->dst
!= req
->src
)
340 dst
= scatterwalk_ffwd(rctx
->sg_dst
, req
->dst
,
341 rctx
->subreq
.cryptlen
);
344 /* handle ciphertext stealing */
345 skcipher_request_set_crypt(&rctx
->subreq
, src
, dst
,
346 req
->cryptlen
- cbc_blocks
* AES_BLOCK_SIZE
,
349 err
= skcipher_walk_virt(&walk
, &rctx
->subreq
, false);
354 aes_cbc_cts_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
355 ctx
->key_dec
, rounds
, walk
.nbytes
, walk
.iv
);
358 return skcipher_walk_done(&walk
, 0);
361 static int ctr_encrypt(struct skcipher_request
*req
)
363 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
364 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
365 int err
, rounds
= 6 + ctx
->key_length
/ 4;
366 struct skcipher_walk walk
;
369 err
= skcipher_walk_virt(&walk
, req
, false);
371 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
373 aes_ctr_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
374 ctx
->key_enc
, rounds
, blocks
, walk
.iv
);
376 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
379 u8
__aligned(8) tail
[AES_BLOCK_SIZE
];
380 unsigned int nbytes
= walk
.nbytes
;
381 u8
*tdst
= walk
.dst
.virt
.addr
;
382 u8
*tsrc
= walk
.src
.virt
.addr
;
385 * Tell aes_ctr_encrypt() to process a tail block.
390 aes_ctr_encrypt(tail
, NULL
, ctx
->key_enc
, rounds
,
393 crypto_xor_cpy(tdst
, tsrc
, tail
, nbytes
);
394 err
= skcipher_walk_done(&walk
, 0);
400 static int ctr_encrypt_sync(struct skcipher_request
*req
)
402 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
403 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
405 if (!crypto_simd_usable())
406 return aes_ctr_encrypt_fallback(ctx
, req
);
408 return ctr_encrypt(req
);
411 static int xts_encrypt(struct skcipher_request
*req
)
413 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
414 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
415 int err
, first
, rounds
= 6 + ctx
->key1
.key_length
/ 4;
416 struct skcipher_walk walk
;
419 err
= skcipher_walk_virt(&walk
, req
, false);
421 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
423 aes_xts_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
424 ctx
->key1
.key_enc
, rounds
, blocks
,
425 ctx
->key2
.key_enc
, walk
.iv
, first
);
427 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
433 static int xts_decrypt(struct skcipher_request
*req
)
435 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
436 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
437 int err
, first
, rounds
= 6 + ctx
->key1
.key_length
/ 4;
438 struct skcipher_walk walk
;
441 err
= skcipher_walk_virt(&walk
, req
, false);
443 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
445 aes_xts_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
446 ctx
->key1
.key_dec
, rounds
, blocks
,
447 ctx
->key2
.key_enc
, walk
.iv
, first
);
449 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
455 static struct skcipher_alg aes_algs
[] = { {
457 .cra_name
= "__ecb(aes)",
458 .cra_driver_name
= "__ecb-aes-" MODE
,
459 .cra_priority
= PRIO
,
460 .cra_flags
= CRYPTO_ALG_INTERNAL
,
461 .cra_blocksize
= AES_BLOCK_SIZE
,
462 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
463 .cra_module
= THIS_MODULE
,
465 .min_keysize
= AES_MIN_KEY_SIZE
,
466 .max_keysize
= AES_MAX_KEY_SIZE
,
467 .setkey
= skcipher_aes_setkey
,
468 .encrypt
= ecb_encrypt
,
469 .decrypt
= ecb_decrypt
,
472 .cra_name
= "__cbc(aes)",
473 .cra_driver_name
= "__cbc-aes-" MODE
,
474 .cra_priority
= PRIO
,
475 .cra_flags
= CRYPTO_ALG_INTERNAL
,
476 .cra_blocksize
= AES_BLOCK_SIZE
,
477 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
478 .cra_module
= THIS_MODULE
,
480 .min_keysize
= AES_MIN_KEY_SIZE
,
481 .max_keysize
= AES_MAX_KEY_SIZE
,
482 .ivsize
= AES_BLOCK_SIZE
,
483 .setkey
= skcipher_aes_setkey
,
484 .encrypt
= cbc_encrypt
,
485 .decrypt
= cbc_decrypt
,
488 .cra_name
= "__cts(cbc(aes))",
489 .cra_driver_name
= "__cts-cbc-aes-" MODE
,
490 .cra_priority
= PRIO
,
491 .cra_flags
= CRYPTO_ALG_INTERNAL
,
492 .cra_blocksize
= AES_BLOCK_SIZE
,
493 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
494 .cra_module
= THIS_MODULE
,
496 .min_keysize
= AES_MIN_KEY_SIZE
,
497 .max_keysize
= AES_MAX_KEY_SIZE
,
498 .ivsize
= AES_BLOCK_SIZE
,
499 .walksize
= 2 * AES_BLOCK_SIZE
,
500 .setkey
= skcipher_aes_setkey
,
501 .encrypt
= cts_cbc_encrypt
,
502 .decrypt
= cts_cbc_decrypt
,
503 .init
= cts_cbc_init_tfm
,
506 .cra_name
= "__ctr(aes)",
507 .cra_driver_name
= "__ctr-aes-" MODE
,
508 .cra_priority
= PRIO
,
509 .cra_flags
= CRYPTO_ALG_INTERNAL
,
511 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
512 .cra_module
= THIS_MODULE
,
514 .min_keysize
= AES_MIN_KEY_SIZE
,
515 .max_keysize
= AES_MAX_KEY_SIZE
,
516 .ivsize
= AES_BLOCK_SIZE
,
517 .chunksize
= AES_BLOCK_SIZE
,
518 .setkey
= skcipher_aes_setkey
,
519 .encrypt
= ctr_encrypt
,
520 .decrypt
= ctr_encrypt
,
523 .cra_name
= "ctr(aes)",
524 .cra_driver_name
= "ctr-aes-" MODE
,
525 .cra_priority
= PRIO
- 1,
527 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
528 .cra_module
= THIS_MODULE
,
530 .min_keysize
= AES_MIN_KEY_SIZE
,
531 .max_keysize
= AES_MAX_KEY_SIZE
,
532 .ivsize
= AES_BLOCK_SIZE
,
533 .chunksize
= AES_BLOCK_SIZE
,
534 .setkey
= skcipher_aes_setkey
,
535 .encrypt
= ctr_encrypt_sync
,
536 .decrypt
= ctr_encrypt_sync
,
539 .cra_name
= "__xts(aes)",
540 .cra_driver_name
= "__xts-aes-" MODE
,
541 .cra_priority
= PRIO
,
542 .cra_flags
= CRYPTO_ALG_INTERNAL
,
543 .cra_blocksize
= AES_BLOCK_SIZE
,
544 .cra_ctxsize
= sizeof(struct crypto_aes_xts_ctx
),
545 .cra_module
= THIS_MODULE
,
547 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
548 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
549 .ivsize
= AES_BLOCK_SIZE
,
550 .setkey
= xts_set_key
,
551 .encrypt
= xts_encrypt
,
552 .decrypt
= xts_decrypt
,
555 static int cbcmac_setkey(struct crypto_shash
*tfm
, const u8
*in_key
,
556 unsigned int key_len
)
558 struct mac_tfm_ctx
*ctx
= crypto_shash_ctx(tfm
);
561 err
= aes_expandkey(&ctx
->key
, in_key
, key_len
);
563 crypto_shash_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
568 static void cmac_gf128_mul_by_x(be128
*y
, const be128
*x
)
570 u64 a
= be64_to_cpu(x
->a
);
571 u64 b
= be64_to_cpu(x
->b
);
573 y
->a
= cpu_to_be64((a
<< 1) | (b
>> 63));
574 y
->b
= cpu_to_be64((b
<< 1) ^ ((a
>> 63) ? 0x87 : 0));
577 static int cmac_setkey(struct crypto_shash
*tfm
, const u8
*in_key
,
578 unsigned int key_len
)
580 struct mac_tfm_ctx
*ctx
= crypto_shash_ctx(tfm
);
581 be128
*consts
= (be128
*)ctx
->consts
;
582 int rounds
= 6 + key_len
/ 4;
585 err
= cbcmac_setkey(tfm
, in_key
, key_len
);
589 /* encrypt the zero vector */
591 aes_ecb_encrypt(ctx
->consts
, (u8
[AES_BLOCK_SIZE
]){}, ctx
->key
.key_enc
,
595 cmac_gf128_mul_by_x(consts
, consts
);
596 cmac_gf128_mul_by_x(consts
+ 1, consts
);
601 static int xcbc_setkey(struct crypto_shash
*tfm
, const u8
*in_key
,
602 unsigned int key_len
)
604 static u8
const ks
[3][AES_BLOCK_SIZE
] = {
605 { [0 ... AES_BLOCK_SIZE
- 1] = 0x1 },
606 { [0 ... AES_BLOCK_SIZE
- 1] = 0x2 },
607 { [0 ... AES_BLOCK_SIZE
- 1] = 0x3 },
610 struct mac_tfm_ctx
*ctx
= crypto_shash_ctx(tfm
);
611 int rounds
= 6 + key_len
/ 4;
612 u8 key
[AES_BLOCK_SIZE
];
615 err
= cbcmac_setkey(tfm
, in_key
, key_len
);
620 aes_ecb_encrypt(key
, ks
[0], ctx
->key
.key_enc
, rounds
, 1);
621 aes_ecb_encrypt(ctx
->consts
, ks
[1], ctx
->key
.key_enc
, rounds
, 2);
624 return cbcmac_setkey(tfm
, key
, sizeof(key
));
627 static int mac_init(struct shash_desc
*desc
)
629 struct mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
631 memset(ctx
->dg
, 0, AES_BLOCK_SIZE
);
637 static void mac_do_update(struct crypto_aes_ctx
*ctx
, u8
const in
[], int blocks
,
638 u8 dg
[], int enc_before
, int enc_after
)
640 int rounds
= 6 + ctx
->key_length
/ 4;
642 if (crypto_simd_usable()) {
644 aes_mac_update(in
, ctx
->key_enc
, rounds
, blocks
, dg
, enc_before
,
649 __aes_arm64_encrypt(ctx
->key_enc
, dg
, dg
, rounds
);
652 crypto_xor(dg
, in
, AES_BLOCK_SIZE
);
653 in
+= AES_BLOCK_SIZE
;
655 if (blocks
|| enc_after
)
656 __aes_arm64_encrypt(ctx
->key_enc
, dg
, dg
,
662 static int mac_update(struct shash_desc
*desc
, const u8
*p
, unsigned int len
)
664 struct mac_tfm_ctx
*tctx
= crypto_shash_ctx(desc
->tfm
);
665 struct mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
670 if ((ctx
->len
% AES_BLOCK_SIZE
) == 0 &&
671 (ctx
->len
+ len
) > AES_BLOCK_SIZE
) {
673 int blocks
= len
/ AES_BLOCK_SIZE
;
675 len
%= AES_BLOCK_SIZE
;
677 mac_do_update(&tctx
->key
, p
, blocks
, ctx
->dg
,
678 (ctx
->len
!= 0), (len
!= 0));
680 p
+= blocks
* AES_BLOCK_SIZE
;
683 ctx
->len
= AES_BLOCK_SIZE
;
689 l
= min(len
, AES_BLOCK_SIZE
- ctx
->len
);
691 if (l
<= AES_BLOCK_SIZE
) {
692 crypto_xor(ctx
->dg
+ ctx
->len
, p
, l
);
702 static int cbcmac_final(struct shash_desc
*desc
, u8
*out
)
704 struct mac_tfm_ctx
*tctx
= crypto_shash_ctx(desc
->tfm
);
705 struct mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
707 mac_do_update(&tctx
->key
, NULL
, 0, ctx
->dg
, (ctx
->len
!= 0), 0);
709 memcpy(out
, ctx
->dg
, AES_BLOCK_SIZE
);
714 static int cmac_final(struct shash_desc
*desc
, u8
*out
)
716 struct mac_tfm_ctx
*tctx
= crypto_shash_ctx(desc
->tfm
);
717 struct mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
718 u8
*consts
= tctx
->consts
;
720 if (ctx
->len
!= AES_BLOCK_SIZE
) {
721 ctx
->dg
[ctx
->len
] ^= 0x80;
722 consts
+= AES_BLOCK_SIZE
;
725 mac_do_update(&tctx
->key
, consts
, 1, ctx
->dg
, 0, 1);
727 memcpy(out
, ctx
->dg
, AES_BLOCK_SIZE
);
732 static struct shash_alg mac_algs
[] = { {
733 .base
.cra_name
= "cmac(aes)",
734 .base
.cra_driver_name
= "cmac-aes-" MODE
,
735 .base
.cra_priority
= PRIO
,
736 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
737 .base
.cra_ctxsize
= sizeof(struct mac_tfm_ctx
) +
739 .base
.cra_module
= THIS_MODULE
,
741 .digestsize
= AES_BLOCK_SIZE
,
743 .update
= mac_update
,
745 .setkey
= cmac_setkey
,
746 .descsize
= sizeof(struct mac_desc_ctx
),
748 .base
.cra_name
= "xcbc(aes)",
749 .base
.cra_driver_name
= "xcbc-aes-" MODE
,
750 .base
.cra_priority
= PRIO
,
751 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
752 .base
.cra_ctxsize
= sizeof(struct mac_tfm_ctx
) +
754 .base
.cra_module
= THIS_MODULE
,
756 .digestsize
= AES_BLOCK_SIZE
,
758 .update
= mac_update
,
760 .setkey
= xcbc_setkey
,
761 .descsize
= sizeof(struct mac_desc_ctx
),
763 .base
.cra_name
= "cbcmac(aes)",
764 .base
.cra_driver_name
= "cbcmac-aes-" MODE
,
765 .base
.cra_priority
= PRIO
,
766 .base
.cra_blocksize
= 1,
767 .base
.cra_ctxsize
= sizeof(struct mac_tfm_ctx
),
768 .base
.cra_module
= THIS_MODULE
,
770 .digestsize
= AES_BLOCK_SIZE
,
772 .update
= mac_update
,
773 .final
= cbcmac_final
,
774 .setkey
= cbcmac_setkey
,
775 .descsize
= sizeof(struct mac_desc_ctx
),
778 static struct simd_skcipher_alg
*aes_simd_algs
[ARRAY_SIZE(aes_algs
)];
780 static void aes_exit(void)
784 for (i
= 0; i
< ARRAY_SIZE(aes_simd_algs
); i
++)
785 if (aes_simd_algs
[i
])
786 simd_skcipher_free(aes_simd_algs
[i
]);
788 crypto_unregister_shashes(mac_algs
, ARRAY_SIZE(mac_algs
));
789 crypto_unregister_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
792 static int __init
aes_init(void)
794 struct simd_skcipher_alg
*simd
;
795 const char *basename
;
801 err
= crypto_register_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
805 err
= crypto_register_shashes(mac_algs
, ARRAY_SIZE(mac_algs
));
807 goto unregister_ciphers
;
809 for (i
= 0; i
< ARRAY_SIZE(aes_algs
); i
++) {
810 if (!(aes_algs
[i
].base
.cra_flags
& CRYPTO_ALG_INTERNAL
))
813 algname
= aes_algs
[i
].base
.cra_name
+ 2;
814 drvname
= aes_algs
[i
].base
.cra_driver_name
+ 2;
815 basename
= aes_algs
[i
].base
.cra_driver_name
;
816 simd
= simd_skcipher_create_compat(algname
, drvname
, basename
);
819 goto unregister_simds
;
821 aes_simd_algs
[i
] = simd
;
830 crypto_unregister_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
834 #ifdef USE_V8_CRYPTO_EXTENSIONS
835 module_cpu_feature_match(AES
, aes_init
);
837 module_init(aes_init
);
838 EXPORT_SYMBOL(neon_aes_ecb_encrypt
);
839 EXPORT_SYMBOL(neon_aes_cbc_encrypt
);
841 module_exit(aes_exit
);