1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * The AEGIS-128L Authenticated-Encryption Algorithm
5 * Copyright (c) 2017-2018 Ondrej Mosnacek <omosnacek@gmail.com>
6 * Copyright (C) 2017-2018 Red Hat, Inc. All rights reserved.
9 #include <crypto/algapi.h>
10 #include <crypto/internal/aead.h>
11 #include <crypto/internal/skcipher.h>
12 #include <crypto/scatterwalk.h>
13 #include <linux/err.h>
14 #include <linux/init.h>
15 #include <linux/kernel.h>
16 #include <linux/module.h>
17 #include <linux/scatterlist.h>
21 #define AEGIS128L_CHUNK_BLOCKS 2
22 #define AEGIS128L_CHUNK_SIZE (AEGIS128L_CHUNK_BLOCKS * AEGIS_BLOCK_SIZE)
23 #define AEGIS128L_NONCE_SIZE 16
24 #define AEGIS128L_STATE_BLOCKS 8
25 #define AEGIS128L_KEY_SIZE 16
26 #define AEGIS128L_MIN_AUTH_SIZE 8
27 #define AEGIS128L_MAX_AUTH_SIZE 16
30 union aegis_block blocks
[AEGIS128L_CHUNK_BLOCKS
];
31 u8 bytes
[AEGIS128L_CHUNK_SIZE
];
35 union aegis_block blocks
[AEGIS128L_STATE_BLOCKS
];
39 union aegis_block key
;
42 struct aegis128l_ops
{
43 int (*skcipher_walk_init
)(struct skcipher_walk
*walk
,
44 struct aead_request
*req
, bool atomic
);
46 void (*crypt_chunk
)(struct aegis_state
*state
, u8
*dst
,
47 const u8
*src
, unsigned int size
);
50 static void crypto_aegis128l_update(struct aegis_state
*state
)
52 union aegis_block tmp
;
55 tmp
= state
->blocks
[AEGIS128L_STATE_BLOCKS
- 1];
56 for (i
= AEGIS128L_STATE_BLOCKS
- 1; i
> 0; i
--)
57 crypto_aegis_aesenc(&state
->blocks
[i
], &state
->blocks
[i
- 1],
59 crypto_aegis_aesenc(&state
->blocks
[0], &tmp
, &state
->blocks
[0]);
62 static void crypto_aegis128l_update_a(struct aegis_state
*state
,
63 const union aegis_chunk
*msg
)
65 crypto_aegis128l_update(state
);
66 crypto_aegis_block_xor(&state
->blocks
[0], &msg
->blocks
[0]);
67 crypto_aegis_block_xor(&state
->blocks
[4], &msg
->blocks
[1]);
70 static void crypto_aegis128l_update_u(struct aegis_state
*state
,
73 crypto_aegis128l_update(state
);
74 crypto_xor(state
->blocks
[0].bytes
, msg
+ 0 * AEGIS_BLOCK_SIZE
,
76 crypto_xor(state
->blocks
[4].bytes
, msg
+ 1 * AEGIS_BLOCK_SIZE
,
80 static void crypto_aegis128l_init(struct aegis_state
*state
,
81 const union aegis_block
*key
,
84 union aegis_block key_iv
;
85 union aegis_chunk chunk
;
88 memcpy(chunk
.blocks
[0].bytes
, iv
, AEGIS_BLOCK_SIZE
);
89 chunk
.blocks
[1] = *key
;
92 crypto_aegis_block_xor(&key_iv
, &chunk
.blocks
[0]);
94 state
->blocks
[0] = key_iv
;
95 state
->blocks
[1] = crypto_aegis_const
[1];
96 state
->blocks
[2] = crypto_aegis_const
[0];
97 state
->blocks
[3] = crypto_aegis_const
[1];
98 state
->blocks
[4] = key_iv
;
99 state
->blocks
[5] = *key
;
100 state
->blocks
[6] = *key
;
101 state
->blocks
[7] = *key
;
103 crypto_aegis_block_xor(&state
->blocks
[5], &crypto_aegis_const
[0]);
104 crypto_aegis_block_xor(&state
->blocks
[6], &crypto_aegis_const
[1]);
105 crypto_aegis_block_xor(&state
->blocks
[7], &crypto_aegis_const
[0]);
107 for (i
= 0; i
< 10; i
++) {
108 crypto_aegis128l_update_a(state
, &chunk
);
112 static void crypto_aegis128l_ad(struct aegis_state
*state
,
113 const u8
*src
, unsigned int size
)
115 if (AEGIS_ALIGNED(src
)) {
116 const union aegis_chunk
*src_chunk
=
117 (const union aegis_chunk
*)src
;
119 while (size
>= AEGIS128L_CHUNK_SIZE
) {
120 crypto_aegis128l_update_a(state
, src_chunk
);
122 size
-= AEGIS128L_CHUNK_SIZE
;
126 while (size
>= AEGIS128L_CHUNK_SIZE
) {
127 crypto_aegis128l_update_u(state
, src
);
129 size
-= AEGIS128L_CHUNK_SIZE
;
130 src
+= AEGIS128L_CHUNK_SIZE
;
135 static void crypto_aegis128l_encrypt_chunk(struct aegis_state
*state
, u8
*dst
,
136 const u8
*src
, unsigned int size
)
138 union aegis_chunk tmp
;
139 union aegis_block
*tmp0
= &tmp
.blocks
[0];
140 union aegis_block
*tmp1
= &tmp
.blocks
[1];
142 if (AEGIS_ALIGNED(src
) && AEGIS_ALIGNED(dst
)) {
143 while (size
>= AEGIS128L_CHUNK_SIZE
) {
144 union aegis_chunk
*dst_blk
=
145 (union aegis_chunk
*)dst
;
146 const union aegis_chunk
*src_blk
=
147 (const union aegis_chunk
*)src
;
149 *tmp0
= state
->blocks
[2];
150 crypto_aegis_block_and(tmp0
, &state
->blocks
[3]);
151 crypto_aegis_block_xor(tmp0
, &state
->blocks
[6]);
152 crypto_aegis_block_xor(tmp0
, &state
->blocks
[1]);
153 crypto_aegis_block_xor(tmp0
, &src_blk
->blocks
[0]);
155 *tmp1
= state
->blocks
[6];
156 crypto_aegis_block_and(tmp1
, &state
->blocks
[7]);
157 crypto_aegis_block_xor(tmp1
, &state
->blocks
[5]);
158 crypto_aegis_block_xor(tmp1
, &state
->blocks
[2]);
159 crypto_aegis_block_xor(tmp1
, &src_blk
->blocks
[1]);
161 crypto_aegis128l_update_a(state
, src_blk
);
165 size
-= AEGIS128L_CHUNK_SIZE
;
166 src
+= AEGIS128L_CHUNK_SIZE
;
167 dst
+= AEGIS128L_CHUNK_SIZE
;
170 while (size
>= AEGIS128L_CHUNK_SIZE
) {
171 *tmp0
= state
->blocks
[2];
172 crypto_aegis_block_and(tmp0
, &state
->blocks
[3]);
173 crypto_aegis_block_xor(tmp0
, &state
->blocks
[6]);
174 crypto_aegis_block_xor(tmp0
, &state
->blocks
[1]);
175 crypto_xor(tmp0
->bytes
, src
+ 0 * AEGIS_BLOCK_SIZE
,
178 *tmp1
= state
->blocks
[6];
179 crypto_aegis_block_and(tmp1
, &state
->blocks
[7]);
180 crypto_aegis_block_xor(tmp1
, &state
->blocks
[5]);
181 crypto_aegis_block_xor(tmp1
, &state
->blocks
[2]);
182 crypto_xor(tmp1
->bytes
, src
+ 1 * AEGIS_BLOCK_SIZE
,
185 crypto_aegis128l_update_u(state
, src
);
187 memcpy(dst
, tmp
.bytes
, AEGIS128L_CHUNK_SIZE
);
189 size
-= AEGIS128L_CHUNK_SIZE
;
190 src
+= AEGIS128L_CHUNK_SIZE
;
191 dst
+= AEGIS128L_CHUNK_SIZE
;
196 union aegis_chunk msg
= {};
197 memcpy(msg
.bytes
, src
, size
);
199 *tmp0
= state
->blocks
[2];
200 crypto_aegis_block_and(tmp0
, &state
->blocks
[3]);
201 crypto_aegis_block_xor(tmp0
, &state
->blocks
[6]);
202 crypto_aegis_block_xor(tmp0
, &state
->blocks
[1]);
204 *tmp1
= state
->blocks
[6];
205 crypto_aegis_block_and(tmp1
, &state
->blocks
[7]);
206 crypto_aegis_block_xor(tmp1
, &state
->blocks
[5]);
207 crypto_aegis_block_xor(tmp1
, &state
->blocks
[2]);
209 crypto_aegis128l_update_a(state
, &msg
);
211 crypto_aegis_block_xor(&msg
.blocks
[0], tmp0
);
212 crypto_aegis_block_xor(&msg
.blocks
[1], tmp1
);
214 memcpy(dst
, msg
.bytes
, size
);
218 static void crypto_aegis128l_decrypt_chunk(struct aegis_state
*state
, u8
*dst
,
219 const u8
*src
, unsigned int size
)
221 union aegis_chunk tmp
;
222 union aegis_block
*tmp0
= &tmp
.blocks
[0];
223 union aegis_block
*tmp1
= &tmp
.blocks
[1];
225 if (AEGIS_ALIGNED(src
) && AEGIS_ALIGNED(dst
)) {
226 while (size
>= AEGIS128L_CHUNK_SIZE
) {
227 union aegis_chunk
*dst_blk
=
228 (union aegis_chunk
*)dst
;
229 const union aegis_chunk
*src_blk
=
230 (const union aegis_chunk
*)src
;
232 *tmp0
= state
->blocks
[2];
233 crypto_aegis_block_and(tmp0
, &state
->blocks
[3]);
234 crypto_aegis_block_xor(tmp0
, &state
->blocks
[6]);
235 crypto_aegis_block_xor(tmp0
, &state
->blocks
[1]);
236 crypto_aegis_block_xor(tmp0
, &src_blk
->blocks
[0]);
238 *tmp1
= state
->blocks
[6];
239 crypto_aegis_block_and(tmp1
, &state
->blocks
[7]);
240 crypto_aegis_block_xor(tmp1
, &state
->blocks
[5]);
241 crypto_aegis_block_xor(tmp1
, &state
->blocks
[2]);
242 crypto_aegis_block_xor(tmp1
, &src_blk
->blocks
[1]);
244 crypto_aegis128l_update_a(state
, &tmp
);
248 size
-= AEGIS128L_CHUNK_SIZE
;
249 src
+= AEGIS128L_CHUNK_SIZE
;
250 dst
+= AEGIS128L_CHUNK_SIZE
;
253 while (size
>= AEGIS128L_CHUNK_SIZE
) {
254 *tmp0
= state
->blocks
[2];
255 crypto_aegis_block_and(tmp0
, &state
->blocks
[3]);
256 crypto_aegis_block_xor(tmp0
, &state
->blocks
[6]);
257 crypto_aegis_block_xor(tmp0
, &state
->blocks
[1]);
258 crypto_xor(tmp0
->bytes
, src
+ 0 * AEGIS_BLOCK_SIZE
,
261 *tmp1
= state
->blocks
[6];
262 crypto_aegis_block_and(tmp1
, &state
->blocks
[7]);
263 crypto_aegis_block_xor(tmp1
, &state
->blocks
[5]);
264 crypto_aegis_block_xor(tmp1
, &state
->blocks
[2]);
265 crypto_xor(tmp1
->bytes
, src
+ 1 * AEGIS_BLOCK_SIZE
,
268 crypto_aegis128l_update_a(state
, &tmp
);
270 memcpy(dst
, tmp
.bytes
, AEGIS128L_CHUNK_SIZE
);
272 size
-= AEGIS128L_CHUNK_SIZE
;
273 src
+= AEGIS128L_CHUNK_SIZE
;
274 dst
+= AEGIS128L_CHUNK_SIZE
;
279 union aegis_chunk msg
= {};
280 memcpy(msg
.bytes
, src
, size
);
282 *tmp0
= state
->blocks
[2];
283 crypto_aegis_block_and(tmp0
, &state
->blocks
[3]);
284 crypto_aegis_block_xor(tmp0
, &state
->blocks
[6]);
285 crypto_aegis_block_xor(tmp0
, &state
->blocks
[1]);
286 crypto_aegis_block_xor(&msg
.blocks
[0], tmp0
);
288 *tmp1
= state
->blocks
[6];
289 crypto_aegis_block_and(tmp1
, &state
->blocks
[7]);
290 crypto_aegis_block_xor(tmp1
, &state
->blocks
[5]);
291 crypto_aegis_block_xor(tmp1
, &state
->blocks
[2]);
292 crypto_aegis_block_xor(&msg
.blocks
[1], tmp1
);
294 memset(msg
.bytes
+ size
, 0, AEGIS128L_CHUNK_SIZE
- size
);
296 crypto_aegis128l_update_a(state
, &msg
);
298 memcpy(dst
, msg
.bytes
, size
);
302 static void crypto_aegis128l_process_ad(struct aegis_state
*state
,
303 struct scatterlist
*sg_src
,
304 unsigned int assoclen
)
306 struct scatter_walk walk
;
307 union aegis_chunk buf
;
308 unsigned int pos
= 0;
310 scatterwalk_start(&walk
, sg_src
);
311 while (assoclen
!= 0) {
312 unsigned int size
= scatterwalk_clamp(&walk
, assoclen
);
313 unsigned int left
= size
;
314 void *mapped
= scatterwalk_map(&walk
);
315 const u8
*src
= (const u8
*)mapped
;
317 if (pos
+ size
>= AEGIS128L_CHUNK_SIZE
) {
319 unsigned int fill
= AEGIS128L_CHUNK_SIZE
- pos
;
320 memcpy(buf
.bytes
+ pos
, src
, fill
);
321 crypto_aegis128l_update_a(state
, &buf
);
327 crypto_aegis128l_ad(state
, src
, left
);
328 src
+= left
& ~(AEGIS128L_CHUNK_SIZE
- 1);
329 left
&= AEGIS128L_CHUNK_SIZE
- 1;
332 memcpy(buf
.bytes
+ pos
, src
, left
);
336 scatterwalk_unmap(mapped
);
337 scatterwalk_advance(&walk
, size
);
338 scatterwalk_done(&walk
, 0, assoclen
);
342 memset(buf
.bytes
+ pos
, 0, AEGIS128L_CHUNK_SIZE
- pos
);
343 crypto_aegis128l_update_a(state
, &buf
);
347 static void crypto_aegis128l_process_crypt(struct aegis_state
*state
,
348 struct aead_request
*req
,
349 const struct aegis128l_ops
*ops
)
351 struct skcipher_walk walk
;
353 ops
->skcipher_walk_init(&walk
, req
, false);
355 while (walk
.nbytes
) {
356 unsigned int nbytes
= walk
.nbytes
;
358 if (nbytes
< walk
.total
)
359 nbytes
= round_down(nbytes
, walk
.stride
);
361 ops
->crypt_chunk(state
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
364 skcipher_walk_done(&walk
, walk
.nbytes
- nbytes
);
368 static void crypto_aegis128l_final(struct aegis_state
*state
,
369 union aegis_block
*tag_xor
,
370 u64 assoclen
, u64 cryptlen
)
372 u64 assocbits
= assoclen
* 8;
373 u64 cryptbits
= cryptlen
* 8;
375 union aegis_chunk tmp
;
378 tmp
.blocks
[0].words64
[0] = cpu_to_le64(assocbits
);
379 tmp
.blocks
[0].words64
[1] = cpu_to_le64(cryptbits
);
381 crypto_aegis_block_xor(&tmp
.blocks
[0], &state
->blocks
[2]);
383 tmp
.blocks
[1] = tmp
.blocks
[0];
384 for (i
= 0; i
< 7; i
++)
385 crypto_aegis128l_update_a(state
, &tmp
);
387 for (i
= 0; i
< 7; i
++)
388 crypto_aegis_block_xor(tag_xor
, &state
->blocks
[i
]);
391 static int crypto_aegis128l_setkey(struct crypto_aead
*aead
, const u8
*key
,
394 struct aegis_ctx
*ctx
= crypto_aead_ctx(aead
);
396 if (keylen
!= AEGIS128L_KEY_SIZE
) {
397 crypto_aead_set_flags(aead
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
401 memcpy(ctx
->key
.bytes
, key
, AEGIS128L_KEY_SIZE
);
405 static int crypto_aegis128l_setauthsize(struct crypto_aead
*tfm
,
406 unsigned int authsize
)
408 if (authsize
> AEGIS128L_MAX_AUTH_SIZE
)
410 if (authsize
< AEGIS128L_MIN_AUTH_SIZE
)
415 static void crypto_aegis128l_crypt(struct aead_request
*req
,
416 union aegis_block
*tag_xor
,
417 unsigned int cryptlen
,
418 const struct aegis128l_ops
*ops
)
420 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
421 struct aegis_ctx
*ctx
= crypto_aead_ctx(tfm
);
422 struct aegis_state state
;
424 crypto_aegis128l_init(&state
, &ctx
->key
, req
->iv
);
425 crypto_aegis128l_process_ad(&state
, req
->src
, req
->assoclen
);
426 crypto_aegis128l_process_crypt(&state
, req
, ops
);
427 crypto_aegis128l_final(&state
, tag_xor
, req
->assoclen
, cryptlen
);
430 static int crypto_aegis128l_encrypt(struct aead_request
*req
)
432 static const struct aegis128l_ops ops
= {
433 .skcipher_walk_init
= skcipher_walk_aead_encrypt
,
434 .crypt_chunk
= crypto_aegis128l_encrypt_chunk
,
437 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
438 union aegis_block tag
= {};
439 unsigned int authsize
= crypto_aead_authsize(tfm
);
440 unsigned int cryptlen
= req
->cryptlen
;
442 crypto_aegis128l_crypt(req
, &tag
, cryptlen
, &ops
);
444 scatterwalk_map_and_copy(tag
.bytes
, req
->dst
, req
->assoclen
+ cryptlen
,
449 static int crypto_aegis128l_decrypt(struct aead_request
*req
)
451 static const struct aegis128l_ops ops
= {
452 .skcipher_walk_init
= skcipher_walk_aead_decrypt
,
453 .crypt_chunk
= crypto_aegis128l_decrypt_chunk
,
455 static const u8 zeros
[AEGIS128L_MAX_AUTH_SIZE
] = {};
457 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
458 union aegis_block tag
;
459 unsigned int authsize
= crypto_aead_authsize(tfm
);
460 unsigned int cryptlen
= req
->cryptlen
- authsize
;
462 scatterwalk_map_and_copy(tag
.bytes
, req
->src
, req
->assoclen
+ cryptlen
,
465 crypto_aegis128l_crypt(req
, &tag
, cryptlen
, &ops
);
467 return crypto_memneq(tag
.bytes
, zeros
, authsize
) ? -EBADMSG
: 0;
470 static int crypto_aegis128l_init_tfm(struct crypto_aead
*tfm
)
475 static void crypto_aegis128l_exit_tfm(struct crypto_aead
*tfm
)
479 static struct aead_alg crypto_aegis128l_alg
= {
480 .setkey
= crypto_aegis128l_setkey
,
481 .setauthsize
= crypto_aegis128l_setauthsize
,
482 .encrypt
= crypto_aegis128l_encrypt
,
483 .decrypt
= crypto_aegis128l_decrypt
,
484 .init
= crypto_aegis128l_init_tfm
,
485 .exit
= crypto_aegis128l_exit_tfm
,
487 .ivsize
= AEGIS128L_NONCE_SIZE
,
488 .maxauthsize
= AEGIS128L_MAX_AUTH_SIZE
,
489 .chunksize
= AEGIS128L_CHUNK_SIZE
,
493 .cra_ctxsize
= sizeof(struct aegis_ctx
),
498 .cra_name
= "aegis128l",
499 .cra_driver_name
= "aegis128l-generic",
501 .cra_module
= THIS_MODULE
,
505 static int __init
crypto_aegis128l_module_init(void)
507 return crypto_register_aead(&crypto_aegis128l_alg
);
510 static void __exit
crypto_aegis128l_module_exit(void)
512 crypto_unregister_aead(&crypto_aegis128l_alg
);
515 module_init(crypto_aegis128l_module_init
);
516 module_exit(crypto_aegis128l_module_exit
);
518 MODULE_LICENSE("GPL");
519 MODULE_AUTHOR("Ondrej Mosnacek <omosnacek@gmail.com>");
520 MODULE_DESCRIPTION("AEGIS-128L AEAD algorithm");
521 MODULE_ALIAS_CRYPTO("aegis128l");
522 MODULE_ALIAS_CRYPTO("aegis128l-generic");