1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * The AEGIS-128 Authenticated-Encryption Algorithm
5 * Copyright (c) 2017-2018 Ondrej Mosnacek <omosnacek@gmail.com>
6 * Copyright (C) 2017-2018 Red Hat, Inc. All rights reserved.
9 #include <crypto/algapi.h>
10 #include <crypto/internal/aead.h>
11 #include <crypto/internal/simd.h>
12 #include <crypto/internal/skcipher.h>
13 #include <crypto/scatterwalk.h>
14 #include <linux/err.h>
15 #include <linux/init.h>
16 #include <linux/jump_label.h>
17 #include <linux/kernel.h>
18 #include <linux/module.h>
19 #include <linux/scatterlist.h>
25 #define AEGIS128_NONCE_SIZE 16
26 #define AEGIS128_STATE_BLOCKS 5
27 #define AEGIS128_KEY_SIZE 16
28 #define AEGIS128_MIN_AUTH_SIZE 8
29 #define AEGIS128_MAX_AUTH_SIZE 16
32 union aegis_block blocks
[AEGIS128_STATE_BLOCKS
];
36 union aegis_block key
;
39 static __ro_after_init
DEFINE_STATIC_KEY_FALSE(have_simd
);
41 static const union aegis_block crypto_aegis_const
[2] = {
43 cpu_to_le64(U64_C(0x0d08050302010100)),
44 cpu_to_le64(U64_C(0x6279e99059372215)),
47 cpu_to_le64(U64_C(0xf12fc26d55183ddb)),
48 cpu_to_le64(U64_C(0xdd28b57342311120)),
52 static bool aegis128_do_simd(void)
54 #ifdef CONFIG_CRYPTO_AEGIS128_SIMD
55 if (static_branch_likely(&have_simd
))
56 return crypto_simd_usable();
61 static void crypto_aegis128_update(struct aegis_state
*state
)
63 union aegis_block tmp
;
66 tmp
= state
->blocks
[AEGIS128_STATE_BLOCKS
- 1];
67 for (i
= AEGIS128_STATE_BLOCKS
- 1; i
> 0; i
--)
68 crypto_aegis_aesenc(&state
->blocks
[i
], &state
->blocks
[i
- 1],
70 crypto_aegis_aesenc(&state
->blocks
[0], &tmp
, &state
->blocks
[0]);
73 static void crypto_aegis128_update_a(struct aegis_state
*state
,
74 const union aegis_block
*msg
,
77 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD
) && do_simd
) {
78 crypto_aegis128_update_simd(state
, msg
);
82 crypto_aegis128_update(state
);
83 crypto_aegis_block_xor(&state
->blocks
[0], msg
);
86 static void crypto_aegis128_update_u(struct aegis_state
*state
, const void *msg
,
89 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD
) && do_simd
) {
90 crypto_aegis128_update_simd(state
, msg
);
94 crypto_aegis128_update(state
);
95 crypto_xor(state
->blocks
[0].bytes
, msg
, AEGIS_BLOCK_SIZE
);
98 static void crypto_aegis128_init(struct aegis_state
*state
,
99 const union aegis_block
*key
,
102 union aegis_block key_iv
;
106 crypto_xor(key_iv
.bytes
, iv
, AEGIS_BLOCK_SIZE
);
108 state
->blocks
[0] = key_iv
;
109 state
->blocks
[1] = crypto_aegis_const
[1];
110 state
->blocks
[2] = crypto_aegis_const
[0];
111 state
->blocks
[3] = *key
;
112 state
->blocks
[4] = *key
;
114 crypto_aegis_block_xor(&state
->blocks
[3], &crypto_aegis_const
[0]);
115 crypto_aegis_block_xor(&state
->blocks
[4], &crypto_aegis_const
[1]);
117 for (i
= 0; i
< 5; i
++) {
118 crypto_aegis128_update_a(state
, key
, false);
119 crypto_aegis128_update_a(state
, &key_iv
, false);
123 static void crypto_aegis128_ad(struct aegis_state
*state
,
124 const u8
*src
, unsigned int size
,
127 if (AEGIS_ALIGNED(src
)) {
128 const union aegis_block
*src_blk
=
129 (const union aegis_block
*)src
;
131 while (size
>= AEGIS_BLOCK_SIZE
) {
132 crypto_aegis128_update_a(state
, src_blk
, do_simd
);
134 size
-= AEGIS_BLOCK_SIZE
;
138 while (size
>= AEGIS_BLOCK_SIZE
) {
139 crypto_aegis128_update_u(state
, src
, do_simd
);
141 size
-= AEGIS_BLOCK_SIZE
;
142 src
+= AEGIS_BLOCK_SIZE
;
147 static void crypto_aegis128_wipe_chunk(struct aegis_state
*state
, u8
*dst
,
148 const u8
*src
, unsigned int size
)
150 memzero_explicit(dst
, size
);
153 static void crypto_aegis128_encrypt_chunk(struct aegis_state
*state
, u8
*dst
,
154 const u8
*src
, unsigned int size
)
156 union aegis_block tmp
;
158 if (AEGIS_ALIGNED(src
) && AEGIS_ALIGNED(dst
)) {
159 while (size
>= AEGIS_BLOCK_SIZE
) {
160 union aegis_block
*dst_blk
=
161 (union aegis_block
*)dst
;
162 const union aegis_block
*src_blk
=
163 (const union aegis_block
*)src
;
165 tmp
= state
->blocks
[2];
166 crypto_aegis_block_and(&tmp
, &state
->blocks
[3]);
167 crypto_aegis_block_xor(&tmp
, &state
->blocks
[4]);
168 crypto_aegis_block_xor(&tmp
, &state
->blocks
[1]);
169 crypto_aegis_block_xor(&tmp
, src_blk
);
171 crypto_aegis128_update_a(state
, src_blk
, false);
175 size
-= AEGIS_BLOCK_SIZE
;
176 src
+= AEGIS_BLOCK_SIZE
;
177 dst
+= AEGIS_BLOCK_SIZE
;
180 while (size
>= AEGIS_BLOCK_SIZE
) {
181 tmp
= state
->blocks
[2];
182 crypto_aegis_block_and(&tmp
, &state
->blocks
[3]);
183 crypto_aegis_block_xor(&tmp
, &state
->blocks
[4]);
184 crypto_aegis_block_xor(&tmp
, &state
->blocks
[1]);
185 crypto_xor(tmp
.bytes
, src
, AEGIS_BLOCK_SIZE
);
187 crypto_aegis128_update_u(state
, src
, false);
189 memcpy(dst
, tmp
.bytes
, AEGIS_BLOCK_SIZE
);
191 size
-= AEGIS_BLOCK_SIZE
;
192 src
+= AEGIS_BLOCK_SIZE
;
193 dst
+= AEGIS_BLOCK_SIZE
;
198 union aegis_block msg
= {};
199 memcpy(msg
.bytes
, src
, size
);
201 tmp
= state
->blocks
[2];
202 crypto_aegis_block_and(&tmp
, &state
->blocks
[3]);
203 crypto_aegis_block_xor(&tmp
, &state
->blocks
[4]);
204 crypto_aegis_block_xor(&tmp
, &state
->blocks
[1]);
206 crypto_aegis128_update_a(state
, &msg
, false);
208 crypto_aegis_block_xor(&msg
, &tmp
);
210 memcpy(dst
, msg
.bytes
, size
);
214 static void crypto_aegis128_decrypt_chunk(struct aegis_state
*state
, u8
*dst
,
215 const u8
*src
, unsigned int size
)
217 union aegis_block tmp
;
219 if (AEGIS_ALIGNED(src
) && AEGIS_ALIGNED(dst
)) {
220 while (size
>= AEGIS_BLOCK_SIZE
) {
221 union aegis_block
*dst_blk
=
222 (union aegis_block
*)dst
;
223 const union aegis_block
*src_blk
=
224 (const union aegis_block
*)src
;
226 tmp
= state
->blocks
[2];
227 crypto_aegis_block_and(&tmp
, &state
->blocks
[3]);
228 crypto_aegis_block_xor(&tmp
, &state
->blocks
[4]);
229 crypto_aegis_block_xor(&tmp
, &state
->blocks
[1]);
230 crypto_aegis_block_xor(&tmp
, src_blk
);
232 crypto_aegis128_update_a(state
, &tmp
, false);
236 size
-= AEGIS_BLOCK_SIZE
;
237 src
+= AEGIS_BLOCK_SIZE
;
238 dst
+= AEGIS_BLOCK_SIZE
;
241 while (size
>= AEGIS_BLOCK_SIZE
) {
242 tmp
= state
->blocks
[2];
243 crypto_aegis_block_and(&tmp
, &state
->blocks
[3]);
244 crypto_aegis_block_xor(&tmp
, &state
->blocks
[4]);
245 crypto_aegis_block_xor(&tmp
, &state
->blocks
[1]);
246 crypto_xor(tmp
.bytes
, src
, AEGIS_BLOCK_SIZE
);
248 crypto_aegis128_update_a(state
, &tmp
, false);
250 memcpy(dst
, tmp
.bytes
, AEGIS_BLOCK_SIZE
);
252 size
-= AEGIS_BLOCK_SIZE
;
253 src
+= AEGIS_BLOCK_SIZE
;
254 dst
+= AEGIS_BLOCK_SIZE
;
259 union aegis_block msg
= {};
260 memcpy(msg
.bytes
, src
, size
);
262 tmp
= state
->blocks
[2];
263 crypto_aegis_block_and(&tmp
, &state
->blocks
[3]);
264 crypto_aegis_block_xor(&tmp
, &state
->blocks
[4]);
265 crypto_aegis_block_xor(&tmp
, &state
->blocks
[1]);
266 crypto_aegis_block_xor(&msg
, &tmp
);
268 memset(msg
.bytes
+ size
, 0, AEGIS_BLOCK_SIZE
- size
);
270 crypto_aegis128_update_a(state
, &msg
, false);
272 memcpy(dst
, msg
.bytes
, size
);
276 static void crypto_aegis128_process_ad(struct aegis_state
*state
,
277 struct scatterlist
*sg_src
,
278 unsigned int assoclen
,
281 struct scatter_walk walk
;
282 union aegis_block buf
;
283 unsigned int pos
= 0;
285 scatterwalk_start(&walk
, sg_src
);
286 while (assoclen
!= 0) {
287 unsigned int size
= scatterwalk_clamp(&walk
, assoclen
);
288 unsigned int left
= size
;
289 void *mapped
= scatterwalk_map(&walk
);
290 const u8
*src
= (const u8
*)mapped
;
292 if (pos
+ size
>= AEGIS_BLOCK_SIZE
) {
294 unsigned int fill
= AEGIS_BLOCK_SIZE
- pos
;
295 memcpy(buf
.bytes
+ pos
, src
, fill
);
296 crypto_aegis128_update_a(state
, &buf
, do_simd
);
302 crypto_aegis128_ad(state
, src
, left
, do_simd
);
303 src
+= left
& ~(AEGIS_BLOCK_SIZE
- 1);
304 left
&= AEGIS_BLOCK_SIZE
- 1;
307 memcpy(buf
.bytes
+ pos
, src
, left
);
311 scatterwalk_unmap(mapped
);
312 scatterwalk_advance(&walk
, size
);
313 scatterwalk_done(&walk
, 0, assoclen
);
317 memset(buf
.bytes
+ pos
, 0, AEGIS_BLOCK_SIZE
- pos
);
318 crypto_aegis128_update_a(state
, &buf
, do_simd
);
322 static __always_inline
323 int crypto_aegis128_process_crypt(struct aegis_state
*state
,
324 struct skcipher_walk
*walk
,
325 void (*crypt
)(struct aegis_state
*state
,
332 while (walk
->nbytes
) {
333 unsigned int nbytes
= walk
->nbytes
;
335 if (nbytes
< walk
->total
)
336 nbytes
= round_down(nbytes
, walk
->stride
);
338 crypt(state
, walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
, nbytes
);
340 err
= skcipher_walk_done(walk
, walk
->nbytes
- nbytes
);
345 static void crypto_aegis128_final(struct aegis_state
*state
,
346 union aegis_block
*tag_xor
,
347 u64 assoclen
, u64 cryptlen
)
349 u64 assocbits
= assoclen
* 8;
350 u64 cryptbits
= cryptlen
* 8;
352 union aegis_block tmp
;
355 tmp
.words64
[0] = cpu_to_le64(assocbits
);
356 tmp
.words64
[1] = cpu_to_le64(cryptbits
);
358 crypto_aegis_block_xor(&tmp
, &state
->blocks
[3]);
360 for (i
= 0; i
< 7; i
++)
361 crypto_aegis128_update_a(state
, &tmp
, false);
363 for (i
= 0; i
< AEGIS128_STATE_BLOCKS
; i
++)
364 crypto_aegis_block_xor(tag_xor
, &state
->blocks
[i
]);
367 static int crypto_aegis128_setkey(struct crypto_aead
*aead
, const u8
*key
,
370 struct aegis_ctx
*ctx
= crypto_aead_ctx(aead
);
372 if (keylen
!= AEGIS128_KEY_SIZE
)
375 memcpy(ctx
->key
.bytes
, key
, AEGIS128_KEY_SIZE
);
379 static int crypto_aegis128_setauthsize(struct crypto_aead
*tfm
,
380 unsigned int authsize
)
382 if (authsize
> AEGIS128_MAX_AUTH_SIZE
)
384 if (authsize
< AEGIS128_MIN_AUTH_SIZE
)
389 static int crypto_aegis128_encrypt_generic(struct aead_request
*req
)
391 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
392 union aegis_block tag
= {};
393 unsigned int authsize
= crypto_aead_authsize(tfm
);
394 struct aegis_ctx
*ctx
= crypto_aead_ctx(tfm
);
395 unsigned int cryptlen
= req
->cryptlen
;
396 struct skcipher_walk walk
;
397 struct aegis_state state
;
399 skcipher_walk_aead_encrypt(&walk
, req
, false);
400 crypto_aegis128_init(&state
, &ctx
->key
, req
->iv
);
401 crypto_aegis128_process_ad(&state
, req
->src
, req
->assoclen
, false);
402 crypto_aegis128_process_crypt(&state
, &walk
,
403 crypto_aegis128_encrypt_chunk
);
404 crypto_aegis128_final(&state
, &tag
, req
->assoclen
, cryptlen
);
406 scatterwalk_map_and_copy(tag
.bytes
, req
->dst
, req
->assoclen
+ cryptlen
,
411 static int crypto_aegis128_decrypt_generic(struct aead_request
*req
)
413 static const u8 zeros
[AEGIS128_MAX_AUTH_SIZE
] = {};
414 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
415 union aegis_block tag
;
416 unsigned int authsize
= crypto_aead_authsize(tfm
);
417 unsigned int cryptlen
= req
->cryptlen
- authsize
;
418 struct aegis_ctx
*ctx
= crypto_aead_ctx(tfm
);
419 struct skcipher_walk walk
;
420 struct aegis_state state
;
422 scatterwalk_map_and_copy(tag
.bytes
, req
->src
, req
->assoclen
+ cryptlen
,
425 skcipher_walk_aead_decrypt(&walk
, req
, false);
426 crypto_aegis128_init(&state
, &ctx
->key
, req
->iv
);
427 crypto_aegis128_process_ad(&state
, req
->src
, req
->assoclen
, false);
428 crypto_aegis128_process_crypt(&state
, &walk
,
429 crypto_aegis128_decrypt_chunk
);
430 crypto_aegis128_final(&state
, &tag
, req
->assoclen
, cryptlen
);
432 if (unlikely(crypto_memneq(tag
.bytes
, zeros
, authsize
))) {
434 * From Chapter 4. 'Security Analysis' of the AEGIS spec [0]
436 * "3. If verification fails, the decrypted plaintext and the
437 * wrong authentication tag should not be given as output."
439 * [0] https://competitions.cr.yp.to/round3/aegisv11.pdf
441 skcipher_walk_aead_decrypt(&walk
, req
, false);
442 crypto_aegis128_process_crypt(NULL
, &walk
,
443 crypto_aegis128_wipe_chunk
);
444 memzero_explicit(&tag
, sizeof(tag
));
450 static int crypto_aegis128_encrypt_simd(struct aead_request
*req
)
452 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
453 union aegis_block tag
= {};
454 unsigned int authsize
= crypto_aead_authsize(tfm
);
455 struct aegis_ctx
*ctx
= crypto_aead_ctx(tfm
);
456 unsigned int cryptlen
= req
->cryptlen
;
457 struct skcipher_walk walk
;
458 struct aegis_state state
;
460 if (!aegis128_do_simd())
461 return crypto_aegis128_encrypt_generic(req
);
463 skcipher_walk_aead_encrypt(&walk
, req
, false);
464 crypto_aegis128_init_simd(&state
, &ctx
->key
, req
->iv
);
465 crypto_aegis128_process_ad(&state
, req
->src
, req
->assoclen
, true);
466 crypto_aegis128_process_crypt(&state
, &walk
,
467 crypto_aegis128_encrypt_chunk_simd
);
468 crypto_aegis128_final_simd(&state
, &tag
, req
->assoclen
, cryptlen
, 0);
470 scatterwalk_map_and_copy(tag
.bytes
, req
->dst
, req
->assoclen
+ cryptlen
,
475 static int crypto_aegis128_decrypt_simd(struct aead_request
*req
)
477 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
478 union aegis_block tag
;
479 unsigned int authsize
= crypto_aead_authsize(tfm
);
480 unsigned int cryptlen
= req
->cryptlen
- authsize
;
481 struct aegis_ctx
*ctx
= crypto_aead_ctx(tfm
);
482 struct skcipher_walk walk
;
483 struct aegis_state state
;
485 if (!aegis128_do_simd())
486 return crypto_aegis128_decrypt_generic(req
);
488 scatterwalk_map_and_copy(tag
.bytes
, req
->src
, req
->assoclen
+ cryptlen
,
491 skcipher_walk_aead_decrypt(&walk
, req
, false);
492 crypto_aegis128_init_simd(&state
, &ctx
->key
, req
->iv
);
493 crypto_aegis128_process_ad(&state
, req
->src
, req
->assoclen
, true);
494 crypto_aegis128_process_crypt(&state
, &walk
,
495 crypto_aegis128_decrypt_chunk_simd
);
497 if (unlikely(crypto_aegis128_final_simd(&state
, &tag
, req
->assoclen
,
498 cryptlen
, authsize
))) {
499 skcipher_walk_aead_decrypt(&walk
, req
, false);
500 crypto_aegis128_process_crypt(NULL
, &walk
,
501 crypto_aegis128_wipe_chunk
);
507 static struct aead_alg crypto_aegis128_alg_generic
= {
508 .setkey
= crypto_aegis128_setkey
,
509 .setauthsize
= crypto_aegis128_setauthsize
,
510 .encrypt
= crypto_aegis128_encrypt_generic
,
511 .decrypt
= crypto_aegis128_decrypt_generic
,
513 .ivsize
= AEGIS128_NONCE_SIZE
,
514 .maxauthsize
= AEGIS128_MAX_AUTH_SIZE
,
515 .chunksize
= AEGIS_BLOCK_SIZE
,
517 .base
.cra_blocksize
= 1,
518 .base
.cra_ctxsize
= sizeof(struct aegis_ctx
),
519 .base
.cra_alignmask
= 0,
520 .base
.cra_priority
= 100,
521 .base
.cra_name
= "aegis128",
522 .base
.cra_driver_name
= "aegis128-generic",
523 .base
.cra_module
= THIS_MODULE
,
526 static struct aead_alg crypto_aegis128_alg_simd
= {
527 .setkey
= crypto_aegis128_setkey
,
528 .setauthsize
= crypto_aegis128_setauthsize
,
529 .encrypt
= crypto_aegis128_encrypt_simd
,
530 .decrypt
= crypto_aegis128_decrypt_simd
,
532 .ivsize
= AEGIS128_NONCE_SIZE
,
533 .maxauthsize
= AEGIS128_MAX_AUTH_SIZE
,
534 .chunksize
= AEGIS_BLOCK_SIZE
,
536 .base
.cra_blocksize
= 1,
537 .base
.cra_ctxsize
= sizeof(struct aegis_ctx
),
538 .base
.cra_alignmask
= 0,
539 .base
.cra_priority
= 200,
540 .base
.cra_name
= "aegis128",
541 .base
.cra_driver_name
= "aegis128-simd",
542 .base
.cra_module
= THIS_MODULE
,
545 static int __init
crypto_aegis128_module_init(void)
549 ret
= crypto_register_aead(&crypto_aegis128_alg_generic
);
553 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD
) &&
554 crypto_aegis128_have_simd()) {
555 ret
= crypto_register_aead(&crypto_aegis128_alg_simd
);
557 crypto_unregister_aead(&crypto_aegis128_alg_generic
);
560 static_branch_enable(&have_simd
);
565 static void __exit
crypto_aegis128_module_exit(void)
567 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD
) &&
568 crypto_aegis128_have_simd())
569 crypto_unregister_aead(&crypto_aegis128_alg_simd
);
571 crypto_unregister_aead(&crypto_aegis128_alg_generic
);
574 subsys_initcall(crypto_aegis128_module_init
);
575 module_exit(crypto_aegis128_module_exit
);
577 MODULE_LICENSE("GPL");
578 MODULE_AUTHOR("Ondrej Mosnacek <omosnacek@gmail.com>");
579 MODULE_DESCRIPTION("AEGIS-128 AEAD algorithm");
580 MODULE_ALIAS_CRYPTO("aegis128");
581 MODULE_ALIAS_CRYPTO("aegis128-generic");
582 MODULE_ALIAS_CRYPTO("aegis128-simd");