1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * The AEGIS-128 Authenticated-Encryption Algorithm
5 * Copyright (c) 2017-2018 Ondrej Mosnacek <omosnacek@gmail.com>
6 * Copyright (C) 2017-2018 Red Hat, Inc. All rights reserved.
9 #include <crypto/algapi.h>
10 #include <crypto/internal/aead.h>
11 #include <crypto/internal/simd.h>
12 #include <crypto/internal/skcipher.h>
13 #include <crypto/scatterwalk.h>
14 #include <linux/err.h>
15 #include <linux/init.h>
16 #include <linux/jump_label.h>
17 #include <linux/kernel.h>
18 #include <linux/module.h>
19 #include <linux/scatterlist.h>
25 #define AEGIS128_NONCE_SIZE 16
26 #define AEGIS128_STATE_BLOCKS 5
27 #define AEGIS128_KEY_SIZE 16
28 #define AEGIS128_MIN_AUTH_SIZE 8
29 #define AEGIS128_MAX_AUTH_SIZE 16
32 union aegis_block blocks
[AEGIS128_STATE_BLOCKS
];
36 union aegis_block key
;
39 static __ro_after_init
DEFINE_STATIC_KEY_FALSE(have_simd
);
41 static const union aegis_block crypto_aegis_const
[2] = {
43 cpu_to_le64(U64_C(0x0d08050302010100)),
44 cpu_to_le64(U64_C(0x6279e99059372215)),
47 cpu_to_le64(U64_C(0xf12fc26d55183ddb)),
48 cpu_to_le64(U64_C(0xdd28b57342311120)),
52 static bool aegis128_do_simd(void)
54 #ifdef CONFIG_CRYPTO_AEGIS128_SIMD
55 if (static_branch_likely(&have_simd
))
56 return crypto_simd_usable();
61 bool crypto_aegis128_have_simd(void);
62 void crypto_aegis128_update_simd(struct aegis_state
*state
, const void *msg
);
63 void crypto_aegis128_init_simd(struct aegis_state
*state
,
64 const union aegis_block
*key
,
66 void crypto_aegis128_encrypt_chunk_simd(struct aegis_state
*state
, u8
*dst
,
67 const u8
*src
, unsigned int size
);
68 void crypto_aegis128_decrypt_chunk_simd(struct aegis_state
*state
, u8
*dst
,
69 const u8
*src
, unsigned int size
);
70 int crypto_aegis128_final_simd(struct aegis_state
*state
,
71 union aegis_block
*tag_xor
,
72 unsigned int assoclen
,
73 unsigned int cryptlen
,
74 unsigned int authsize
);
76 static void crypto_aegis128_update(struct aegis_state
*state
)
78 union aegis_block tmp
;
81 tmp
= state
->blocks
[AEGIS128_STATE_BLOCKS
- 1];
82 for (i
= AEGIS128_STATE_BLOCKS
- 1; i
> 0; i
--)
83 crypto_aegis_aesenc(&state
->blocks
[i
], &state
->blocks
[i
- 1],
85 crypto_aegis_aesenc(&state
->blocks
[0], &tmp
, &state
->blocks
[0]);
88 static void crypto_aegis128_update_a(struct aegis_state
*state
,
89 const union aegis_block
*msg
,
92 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD
) && do_simd
) {
93 crypto_aegis128_update_simd(state
, msg
);
97 crypto_aegis128_update(state
);
98 crypto_aegis_block_xor(&state
->blocks
[0], msg
);
101 static void crypto_aegis128_update_u(struct aegis_state
*state
, const void *msg
,
104 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD
) && do_simd
) {
105 crypto_aegis128_update_simd(state
, msg
);
109 crypto_aegis128_update(state
);
110 crypto_xor(state
->blocks
[0].bytes
, msg
, AEGIS_BLOCK_SIZE
);
113 static void crypto_aegis128_init(struct aegis_state
*state
,
114 const union aegis_block
*key
,
117 union aegis_block key_iv
;
121 crypto_xor(key_iv
.bytes
, iv
, AEGIS_BLOCK_SIZE
);
123 state
->blocks
[0] = key_iv
;
124 state
->blocks
[1] = crypto_aegis_const
[1];
125 state
->blocks
[2] = crypto_aegis_const
[0];
126 state
->blocks
[3] = *key
;
127 state
->blocks
[4] = *key
;
129 crypto_aegis_block_xor(&state
->blocks
[3], &crypto_aegis_const
[0]);
130 crypto_aegis_block_xor(&state
->blocks
[4], &crypto_aegis_const
[1]);
132 for (i
= 0; i
< 5; i
++) {
133 crypto_aegis128_update_a(state
, key
, false);
134 crypto_aegis128_update_a(state
, &key_iv
, false);
138 static void crypto_aegis128_ad(struct aegis_state
*state
,
139 const u8
*src
, unsigned int size
,
142 if (AEGIS_ALIGNED(src
)) {
143 const union aegis_block
*src_blk
=
144 (const union aegis_block
*)src
;
146 while (size
>= AEGIS_BLOCK_SIZE
) {
147 crypto_aegis128_update_a(state
, src_blk
, do_simd
);
149 size
-= AEGIS_BLOCK_SIZE
;
153 while (size
>= AEGIS_BLOCK_SIZE
) {
154 crypto_aegis128_update_u(state
, src
, do_simd
);
156 size
-= AEGIS_BLOCK_SIZE
;
157 src
+= AEGIS_BLOCK_SIZE
;
162 static void crypto_aegis128_wipe_chunk(struct aegis_state
*state
, u8
*dst
,
163 const u8
*src
, unsigned int size
)
165 memzero_explicit(dst
, size
);
168 static void crypto_aegis128_encrypt_chunk(struct aegis_state
*state
, u8
*dst
,
169 const u8
*src
, unsigned int size
)
171 union aegis_block tmp
;
173 if (AEGIS_ALIGNED(src
) && AEGIS_ALIGNED(dst
)) {
174 while (size
>= AEGIS_BLOCK_SIZE
) {
175 union aegis_block
*dst_blk
=
176 (union aegis_block
*)dst
;
177 const union aegis_block
*src_blk
=
178 (const union aegis_block
*)src
;
180 tmp
= state
->blocks
[2];
181 crypto_aegis_block_and(&tmp
, &state
->blocks
[3]);
182 crypto_aegis_block_xor(&tmp
, &state
->blocks
[4]);
183 crypto_aegis_block_xor(&tmp
, &state
->blocks
[1]);
184 crypto_aegis_block_xor(&tmp
, src_blk
);
186 crypto_aegis128_update_a(state
, src_blk
, false);
190 size
-= AEGIS_BLOCK_SIZE
;
191 src
+= AEGIS_BLOCK_SIZE
;
192 dst
+= AEGIS_BLOCK_SIZE
;
195 while (size
>= AEGIS_BLOCK_SIZE
) {
196 tmp
= state
->blocks
[2];
197 crypto_aegis_block_and(&tmp
, &state
->blocks
[3]);
198 crypto_aegis_block_xor(&tmp
, &state
->blocks
[4]);
199 crypto_aegis_block_xor(&tmp
, &state
->blocks
[1]);
200 crypto_xor(tmp
.bytes
, src
, AEGIS_BLOCK_SIZE
);
202 crypto_aegis128_update_u(state
, src
, false);
204 memcpy(dst
, tmp
.bytes
, AEGIS_BLOCK_SIZE
);
206 size
-= AEGIS_BLOCK_SIZE
;
207 src
+= AEGIS_BLOCK_SIZE
;
208 dst
+= AEGIS_BLOCK_SIZE
;
213 union aegis_block msg
= {};
214 memcpy(msg
.bytes
, src
, size
);
216 tmp
= state
->blocks
[2];
217 crypto_aegis_block_and(&tmp
, &state
->blocks
[3]);
218 crypto_aegis_block_xor(&tmp
, &state
->blocks
[4]);
219 crypto_aegis_block_xor(&tmp
, &state
->blocks
[1]);
221 crypto_aegis128_update_a(state
, &msg
, false);
223 crypto_aegis_block_xor(&msg
, &tmp
);
225 memcpy(dst
, msg
.bytes
, size
);
229 static void crypto_aegis128_decrypt_chunk(struct aegis_state
*state
, u8
*dst
,
230 const u8
*src
, unsigned int size
)
232 union aegis_block tmp
;
234 if (AEGIS_ALIGNED(src
) && AEGIS_ALIGNED(dst
)) {
235 while (size
>= AEGIS_BLOCK_SIZE
) {
236 union aegis_block
*dst_blk
=
237 (union aegis_block
*)dst
;
238 const union aegis_block
*src_blk
=
239 (const union aegis_block
*)src
;
241 tmp
= state
->blocks
[2];
242 crypto_aegis_block_and(&tmp
, &state
->blocks
[3]);
243 crypto_aegis_block_xor(&tmp
, &state
->blocks
[4]);
244 crypto_aegis_block_xor(&tmp
, &state
->blocks
[1]);
245 crypto_aegis_block_xor(&tmp
, src_blk
);
247 crypto_aegis128_update_a(state
, &tmp
, false);
251 size
-= AEGIS_BLOCK_SIZE
;
252 src
+= AEGIS_BLOCK_SIZE
;
253 dst
+= AEGIS_BLOCK_SIZE
;
256 while (size
>= AEGIS_BLOCK_SIZE
) {
257 tmp
= state
->blocks
[2];
258 crypto_aegis_block_and(&tmp
, &state
->blocks
[3]);
259 crypto_aegis_block_xor(&tmp
, &state
->blocks
[4]);
260 crypto_aegis_block_xor(&tmp
, &state
->blocks
[1]);
261 crypto_xor(tmp
.bytes
, src
, AEGIS_BLOCK_SIZE
);
263 crypto_aegis128_update_a(state
, &tmp
, false);
265 memcpy(dst
, tmp
.bytes
, AEGIS_BLOCK_SIZE
);
267 size
-= AEGIS_BLOCK_SIZE
;
268 src
+= AEGIS_BLOCK_SIZE
;
269 dst
+= AEGIS_BLOCK_SIZE
;
274 union aegis_block msg
= {};
275 memcpy(msg
.bytes
, src
, size
);
277 tmp
= state
->blocks
[2];
278 crypto_aegis_block_and(&tmp
, &state
->blocks
[3]);
279 crypto_aegis_block_xor(&tmp
, &state
->blocks
[4]);
280 crypto_aegis_block_xor(&tmp
, &state
->blocks
[1]);
281 crypto_aegis_block_xor(&msg
, &tmp
);
283 memset(msg
.bytes
+ size
, 0, AEGIS_BLOCK_SIZE
- size
);
285 crypto_aegis128_update_a(state
, &msg
, false);
287 memcpy(dst
, msg
.bytes
, size
);
291 static void crypto_aegis128_process_ad(struct aegis_state
*state
,
292 struct scatterlist
*sg_src
,
293 unsigned int assoclen
,
296 struct scatter_walk walk
;
297 union aegis_block buf
;
298 unsigned int pos
= 0;
300 scatterwalk_start(&walk
, sg_src
);
301 while (assoclen
!= 0) {
302 unsigned int size
= scatterwalk_clamp(&walk
, assoclen
);
303 unsigned int left
= size
;
304 void *mapped
= scatterwalk_map(&walk
);
305 const u8
*src
= (const u8
*)mapped
;
307 if (pos
+ size
>= AEGIS_BLOCK_SIZE
) {
309 unsigned int fill
= AEGIS_BLOCK_SIZE
- pos
;
310 memcpy(buf
.bytes
+ pos
, src
, fill
);
311 crypto_aegis128_update_a(state
, &buf
, do_simd
);
317 crypto_aegis128_ad(state
, src
, left
, do_simd
);
318 src
+= left
& ~(AEGIS_BLOCK_SIZE
- 1);
319 left
&= AEGIS_BLOCK_SIZE
- 1;
322 memcpy(buf
.bytes
+ pos
, src
, left
);
326 scatterwalk_unmap(mapped
);
327 scatterwalk_advance(&walk
, size
);
328 scatterwalk_done(&walk
, 0, assoclen
);
332 memset(buf
.bytes
+ pos
, 0, AEGIS_BLOCK_SIZE
- pos
);
333 crypto_aegis128_update_a(state
, &buf
, do_simd
);
337 static __always_inline
338 int crypto_aegis128_process_crypt(struct aegis_state
*state
,
339 struct skcipher_walk
*walk
,
340 void (*crypt
)(struct aegis_state
*state
,
341 u8
*dst
, const u8
*src
,
346 while (walk
->nbytes
) {
347 unsigned int nbytes
= walk
->nbytes
;
349 if (nbytes
< walk
->total
)
350 nbytes
= round_down(nbytes
, walk
->stride
);
352 crypt(state
, walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
, nbytes
);
354 err
= skcipher_walk_done(walk
, walk
->nbytes
- nbytes
);
359 static void crypto_aegis128_final(struct aegis_state
*state
,
360 union aegis_block
*tag_xor
,
361 u64 assoclen
, u64 cryptlen
)
363 u64 assocbits
= assoclen
* 8;
364 u64 cryptbits
= cryptlen
* 8;
366 union aegis_block tmp
;
369 tmp
.words64
[0] = cpu_to_le64(assocbits
);
370 tmp
.words64
[1] = cpu_to_le64(cryptbits
);
372 crypto_aegis_block_xor(&tmp
, &state
->blocks
[3]);
374 for (i
= 0; i
< 7; i
++)
375 crypto_aegis128_update_a(state
, &tmp
, false);
377 for (i
= 0; i
< AEGIS128_STATE_BLOCKS
; i
++)
378 crypto_aegis_block_xor(tag_xor
, &state
->blocks
[i
]);
381 static int crypto_aegis128_setkey(struct crypto_aead
*aead
, const u8
*key
,
384 struct aegis_ctx
*ctx
= crypto_aead_ctx(aead
);
386 if (keylen
!= AEGIS128_KEY_SIZE
)
389 memcpy(ctx
->key
.bytes
, key
, AEGIS128_KEY_SIZE
);
393 static int crypto_aegis128_setauthsize(struct crypto_aead
*tfm
,
394 unsigned int authsize
)
396 if (authsize
> AEGIS128_MAX_AUTH_SIZE
)
398 if (authsize
< AEGIS128_MIN_AUTH_SIZE
)
403 static int crypto_aegis128_encrypt_generic(struct aead_request
*req
)
405 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
406 union aegis_block tag
= {};
407 unsigned int authsize
= crypto_aead_authsize(tfm
);
408 struct aegis_ctx
*ctx
= crypto_aead_ctx(tfm
);
409 unsigned int cryptlen
= req
->cryptlen
;
410 struct skcipher_walk walk
;
411 struct aegis_state state
;
413 skcipher_walk_aead_encrypt(&walk
, req
, false);
414 crypto_aegis128_init(&state
, &ctx
->key
, req
->iv
);
415 crypto_aegis128_process_ad(&state
, req
->src
, req
->assoclen
, false);
416 crypto_aegis128_process_crypt(&state
, &walk
,
417 crypto_aegis128_encrypt_chunk
);
418 crypto_aegis128_final(&state
, &tag
, req
->assoclen
, cryptlen
);
420 scatterwalk_map_and_copy(tag
.bytes
, req
->dst
, req
->assoclen
+ cryptlen
,
425 static int crypto_aegis128_decrypt_generic(struct aead_request
*req
)
427 static const u8 zeros
[AEGIS128_MAX_AUTH_SIZE
] = {};
428 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
429 union aegis_block tag
;
430 unsigned int authsize
= crypto_aead_authsize(tfm
);
431 unsigned int cryptlen
= req
->cryptlen
- authsize
;
432 struct aegis_ctx
*ctx
= crypto_aead_ctx(tfm
);
433 struct skcipher_walk walk
;
434 struct aegis_state state
;
436 scatterwalk_map_and_copy(tag
.bytes
, req
->src
, req
->assoclen
+ cryptlen
,
439 skcipher_walk_aead_decrypt(&walk
, req
, false);
440 crypto_aegis128_init(&state
, &ctx
->key
, req
->iv
);
441 crypto_aegis128_process_ad(&state
, req
->src
, req
->assoclen
, false);
442 crypto_aegis128_process_crypt(&state
, &walk
,
443 crypto_aegis128_decrypt_chunk
);
444 crypto_aegis128_final(&state
, &tag
, req
->assoclen
, cryptlen
);
446 if (unlikely(crypto_memneq(tag
.bytes
, zeros
, authsize
))) {
448 * From Chapter 4. 'Security Analysis' of the AEGIS spec [0]
450 * "3. If verification fails, the decrypted plaintext and the
451 * wrong authentication tag should not be given as output."
453 * [0] https://competitions.cr.yp.to/round3/aegisv11.pdf
455 skcipher_walk_aead_decrypt(&walk
, req
, false);
456 crypto_aegis128_process_crypt(NULL
, &walk
,
457 crypto_aegis128_wipe_chunk
);
458 memzero_explicit(&tag
, sizeof(tag
));
464 static int crypto_aegis128_encrypt_simd(struct aead_request
*req
)
466 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
467 union aegis_block tag
= {};
468 unsigned int authsize
= crypto_aead_authsize(tfm
);
469 struct aegis_ctx
*ctx
= crypto_aead_ctx(tfm
);
470 unsigned int cryptlen
= req
->cryptlen
;
471 struct skcipher_walk walk
;
472 struct aegis_state state
;
474 if (!aegis128_do_simd())
475 return crypto_aegis128_encrypt_generic(req
);
477 skcipher_walk_aead_encrypt(&walk
, req
, false);
478 crypto_aegis128_init_simd(&state
, &ctx
->key
, req
->iv
);
479 crypto_aegis128_process_ad(&state
, req
->src
, req
->assoclen
, true);
480 crypto_aegis128_process_crypt(&state
, &walk
,
481 crypto_aegis128_encrypt_chunk_simd
);
482 crypto_aegis128_final_simd(&state
, &tag
, req
->assoclen
, cryptlen
, 0);
484 scatterwalk_map_and_copy(tag
.bytes
, req
->dst
, req
->assoclen
+ cryptlen
,
489 static int crypto_aegis128_decrypt_simd(struct aead_request
*req
)
491 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
492 union aegis_block tag
;
493 unsigned int authsize
= crypto_aead_authsize(tfm
);
494 unsigned int cryptlen
= req
->cryptlen
- authsize
;
495 struct aegis_ctx
*ctx
= crypto_aead_ctx(tfm
);
496 struct skcipher_walk walk
;
497 struct aegis_state state
;
499 if (!aegis128_do_simd())
500 return crypto_aegis128_decrypt_generic(req
);
502 scatterwalk_map_and_copy(tag
.bytes
, req
->src
, req
->assoclen
+ cryptlen
,
505 skcipher_walk_aead_decrypt(&walk
, req
, false);
506 crypto_aegis128_init_simd(&state
, &ctx
->key
, req
->iv
);
507 crypto_aegis128_process_ad(&state
, req
->src
, req
->assoclen
, true);
508 crypto_aegis128_process_crypt(&state
, &walk
,
509 crypto_aegis128_decrypt_chunk_simd
);
511 if (unlikely(crypto_aegis128_final_simd(&state
, &tag
, req
->assoclen
,
512 cryptlen
, authsize
))) {
513 skcipher_walk_aead_decrypt(&walk
, req
, false);
514 crypto_aegis128_process_crypt(NULL
, &walk
,
515 crypto_aegis128_wipe_chunk
);
521 static struct aead_alg crypto_aegis128_alg_generic
= {
522 .setkey
= crypto_aegis128_setkey
,
523 .setauthsize
= crypto_aegis128_setauthsize
,
524 .encrypt
= crypto_aegis128_encrypt_generic
,
525 .decrypt
= crypto_aegis128_decrypt_generic
,
527 .ivsize
= AEGIS128_NONCE_SIZE
,
528 .maxauthsize
= AEGIS128_MAX_AUTH_SIZE
,
529 .chunksize
= AEGIS_BLOCK_SIZE
,
531 .base
.cra_blocksize
= 1,
532 .base
.cra_ctxsize
= sizeof(struct aegis_ctx
),
533 .base
.cra_alignmask
= 0,
534 .base
.cra_priority
= 100,
535 .base
.cra_name
= "aegis128",
536 .base
.cra_driver_name
= "aegis128-generic",
537 .base
.cra_module
= THIS_MODULE
,
540 static struct aead_alg crypto_aegis128_alg_simd
= {
541 .setkey
= crypto_aegis128_setkey
,
542 .setauthsize
= crypto_aegis128_setauthsize
,
543 .encrypt
= crypto_aegis128_encrypt_simd
,
544 .decrypt
= crypto_aegis128_decrypt_simd
,
546 .ivsize
= AEGIS128_NONCE_SIZE
,
547 .maxauthsize
= AEGIS128_MAX_AUTH_SIZE
,
548 .chunksize
= AEGIS_BLOCK_SIZE
,
550 .base
.cra_blocksize
= 1,
551 .base
.cra_ctxsize
= sizeof(struct aegis_ctx
),
552 .base
.cra_alignmask
= 0,
553 .base
.cra_priority
= 200,
554 .base
.cra_name
= "aegis128",
555 .base
.cra_driver_name
= "aegis128-simd",
556 .base
.cra_module
= THIS_MODULE
,
559 static int __init
crypto_aegis128_module_init(void)
563 ret
= crypto_register_aead(&crypto_aegis128_alg_generic
);
567 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD
) &&
568 crypto_aegis128_have_simd()) {
569 ret
= crypto_register_aead(&crypto_aegis128_alg_simd
);
571 crypto_unregister_aead(&crypto_aegis128_alg_generic
);
574 static_branch_enable(&have_simd
);
579 static void __exit
crypto_aegis128_module_exit(void)
581 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD
) &&
582 crypto_aegis128_have_simd())
583 crypto_unregister_aead(&crypto_aegis128_alg_simd
);
585 crypto_unregister_aead(&crypto_aegis128_alg_generic
);
588 subsys_initcall(crypto_aegis128_module_init
);
589 module_exit(crypto_aegis128_module_exit
);
591 MODULE_LICENSE("GPL");
592 MODULE_AUTHOR("Ondrej Mosnacek <omosnacek@gmail.com>");
593 MODULE_DESCRIPTION("AEGIS-128 AEAD algorithm");
594 MODULE_ALIAS_CRYPTO("aegis128");
595 MODULE_ALIAS_CRYPTO("aegis128-generic");
596 MODULE_ALIAS_CRYPTO("aegis128-simd");