1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * The AEGIS-128 Authenticated-Encryption Algorithm
4 * Glue for AES-NI + SSE2 implementation
6 * Copyright (c) 2017-2018 Ondrej Mosnacek <omosnacek@gmail.com>
7 * Copyright (C) 2017-2018 Red Hat, Inc. All rights reserved.
10 #include <crypto/internal/aead.h>
11 #include <crypto/internal/simd.h>
12 #include <crypto/internal/skcipher.h>
13 #include <crypto/scatterwalk.h>
14 #include <linux/module.h>
15 #include <asm/fpu/api.h>
16 #include <asm/cpu_device_id.h>
18 #define AEGIS128_BLOCK_ALIGN 16
19 #define AEGIS128_BLOCK_SIZE 16
20 #define AEGIS128_NONCE_SIZE 16
21 #define AEGIS128_STATE_BLOCKS 5
22 #define AEGIS128_KEY_SIZE 16
23 #define AEGIS128_MIN_AUTH_SIZE 8
24 #define AEGIS128_MAX_AUTH_SIZE 16
26 asmlinkage
void crypto_aegis128_aesni_init(void *state
, void *key
, void *iv
);
28 asmlinkage
void crypto_aegis128_aesni_ad(
29 void *state
, unsigned int length
, const void *data
);
31 asmlinkage
void crypto_aegis128_aesni_enc(
32 void *state
, unsigned int length
, const void *src
, void *dst
);
34 asmlinkage
void crypto_aegis128_aesni_dec(
35 void *state
, unsigned int length
, const void *src
, void *dst
);
37 asmlinkage
void crypto_aegis128_aesni_enc_tail(
38 void *state
, unsigned int length
, const void *src
, void *dst
);
40 asmlinkage
void crypto_aegis128_aesni_dec_tail(
41 void *state
, unsigned int length
, const void *src
, void *dst
);
43 asmlinkage
void crypto_aegis128_aesni_final(
44 void *state
, void *tag_xor
, unsigned int cryptlen
,
45 unsigned int assoclen
);
48 u8 bytes
[AEGIS128_BLOCK_SIZE
] __aligned(AEGIS128_BLOCK_ALIGN
);
52 struct aegis_block blocks
[AEGIS128_STATE_BLOCKS
];
56 struct aegis_block key
;
59 struct aegis_crypt_ops
{
60 int (*skcipher_walk_init
)(struct skcipher_walk
*walk
,
61 struct aead_request
*req
, bool atomic
);
63 void (*crypt_blocks
)(void *state
, unsigned int length
, const void *src
,
65 void (*crypt_tail
)(void *state
, unsigned int length
, const void *src
,
69 static void crypto_aegis128_aesni_process_ad(
70 struct aegis_state
*state
, struct scatterlist
*sg_src
,
71 unsigned int assoclen
)
73 struct scatter_walk walk
;
74 struct aegis_block buf
;
77 scatterwalk_start(&walk
, sg_src
);
78 while (assoclen
!= 0) {
79 unsigned int size
= scatterwalk_clamp(&walk
, assoclen
);
80 unsigned int left
= size
;
81 void *mapped
= scatterwalk_map(&walk
);
82 const u8
*src
= (const u8
*)mapped
;
84 if (pos
+ size
>= AEGIS128_BLOCK_SIZE
) {
86 unsigned int fill
= AEGIS128_BLOCK_SIZE
- pos
;
87 memcpy(buf
.bytes
+ pos
, src
, fill
);
88 crypto_aegis128_aesni_ad(state
,
96 crypto_aegis128_aesni_ad(state
, left
, src
);
98 src
+= left
& ~(AEGIS128_BLOCK_SIZE
- 1);
99 left
&= AEGIS128_BLOCK_SIZE
- 1;
102 memcpy(buf
.bytes
+ pos
, src
, left
);
106 scatterwalk_unmap(mapped
);
107 scatterwalk_advance(&walk
, size
);
108 scatterwalk_done(&walk
, 0, assoclen
);
112 memset(buf
.bytes
+ pos
, 0, AEGIS128_BLOCK_SIZE
- pos
);
113 crypto_aegis128_aesni_ad(state
, AEGIS128_BLOCK_SIZE
, buf
.bytes
);
117 static void crypto_aegis128_aesni_process_crypt(
118 struct aegis_state
*state
, struct skcipher_walk
*walk
,
119 const struct aegis_crypt_ops
*ops
)
121 while (walk
->nbytes
>= AEGIS128_BLOCK_SIZE
) {
122 ops
->crypt_blocks(state
,
123 round_down(walk
->nbytes
, AEGIS128_BLOCK_SIZE
),
124 walk
->src
.virt
.addr
, walk
->dst
.virt
.addr
);
125 skcipher_walk_done(walk
, walk
->nbytes
% AEGIS128_BLOCK_SIZE
);
129 ops
->crypt_tail(state
, walk
->nbytes
, walk
->src
.virt
.addr
,
130 walk
->dst
.virt
.addr
);
131 skcipher_walk_done(walk
, 0);
135 static struct aegis_ctx
*crypto_aegis128_aesni_ctx(struct crypto_aead
*aead
)
137 u8
*ctx
= crypto_aead_ctx(aead
);
138 ctx
= PTR_ALIGN(ctx
, __alignof__(struct aegis_ctx
));
142 static int crypto_aegis128_aesni_setkey(struct crypto_aead
*aead
, const u8
*key
,
145 struct aegis_ctx
*ctx
= crypto_aegis128_aesni_ctx(aead
);
147 if (keylen
!= AEGIS128_KEY_SIZE
) {
148 crypto_aead_set_flags(aead
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
152 memcpy(ctx
->key
.bytes
, key
, AEGIS128_KEY_SIZE
);
157 static int crypto_aegis128_aesni_setauthsize(struct crypto_aead
*tfm
,
158 unsigned int authsize
)
160 if (authsize
> AEGIS128_MAX_AUTH_SIZE
)
162 if (authsize
< AEGIS128_MIN_AUTH_SIZE
)
167 static void crypto_aegis128_aesni_crypt(struct aead_request
*req
,
168 struct aegis_block
*tag_xor
,
169 unsigned int cryptlen
,
170 const struct aegis_crypt_ops
*ops
)
172 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
173 struct aegis_ctx
*ctx
= crypto_aegis128_aesni_ctx(tfm
);
174 struct skcipher_walk walk
;
175 struct aegis_state state
;
177 ops
->skcipher_walk_init(&walk
, req
, true);
181 crypto_aegis128_aesni_init(&state
, ctx
->key
.bytes
, req
->iv
);
182 crypto_aegis128_aesni_process_ad(&state
, req
->src
, req
->assoclen
);
183 crypto_aegis128_aesni_process_crypt(&state
, &walk
, ops
);
184 crypto_aegis128_aesni_final(&state
, tag_xor
, req
->assoclen
, cryptlen
);
189 static int crypto_aegis128_aesni_encrypt(struct aead_request
*req
)
191 static const struct aegis_crypt_ops OPS
= {
192 .skcipher_walk_init
= skcipher_walk_aead_encrypt
,
193 .crypt_blocks
= crypto_aegis128_aesni_enc
,
194 .crypt_tail
= crypto_aegis128_aesni_enc_tail
,
197 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
198 struct aegis_block tag
= {};
199 unsigned int authsize
= crypto_aead_authsize(tfm
);
200 unsigned int cryptlen
= req
->cryptlen
;
202 crypto_aegis128_aesni_crypt(req
, &tag
, cryptlen
, &OPS
);
204 scatterwalk_map_and_copy(tag
.bytes
, req
->dst
,
205 req
->assoclen
+ cryptlen
, authsize
, 1);
209 static int crypto_aegis128_aesni_decrypt(struct aead_request
*req
)
211 static const struct aegis_block zeros
= {};
213 static const struct aegis_crypt_ops OPS
= {
214 .skcipher_walk_init
= skcipher_walk_aead_decrypt
,
215 .crypt_blocks
= crypto_aegis128_aesni_dec
,
216 .crypt_tail
= crypto_aegis128_aesni_dec_tail
,
219 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
220 struct aegis_block tag
;
221 unsigned int authsize
= crypto_aead_authsize(tfm
);
222 unsigned int cryptlen
= req
->cryptlen
- authsize
;
224 scatterwalk_map_and_copy(tag
.bytes
, req
->src
,
225 req
->assoclen
+ cryptlen
, authsize
, 0);
227 crypto_aegis128_aesni_crypt(req
, &tag
, cryptlen
, &OPS
);
229 return crypto_memneq(tag
.bytes
, zeros
.bytes
, authsize
) ? -EBADMSG
: 0;
232 static int crypto_aegis128_aesni_init_tfm(struct crypto_aead
*aead
)
237 static void crypto_aegis128_aesni_exit_tfm(struct crypto_aead
*aead
)
241 static struct aead_alg crypto_aegis128_aesni_alg
= {
242 .setkey
= crypto_aegis128_aesni_setkey
,
243 .setauthsize
= crypto_aegis128_aesni_setauthsize
,
244 .encrypt
= crypto_aegis128_aesni_encrypt
,
245 .decrypt
= crypto_aegis128_aesni_decrypt
,
246 .init
= crypto_aegis128_aesni_init_tfm
,
247 .exit
= crypto_aegis128_aesni_exit_tfm
,
249 .ivsize
= AEGIS128_NONCE_SIZE
,
250 .maxauthsize
= AEGIS128_MAX_AUTH_SIZE
,
251 .chunksize
= AEGIS128_BLOCK_SIZE
,
254 .cra_flags
= CRYPTO_ALG_INTERNAL
,
256 .cra_ctxsize
= sizeof(struct aegis_ctx
) +
257 __alignof__(struct aegis_ctx
),
261 .cra_name
= "__aegis128",
262 .cra_driver_name
= "__aegis128-aesni",
264 .cra_module
= THIS_MODULE
,
268 static struct simd_aead_alg
*simd_alg
;
270 static int __init
crypto_aegis128_aesni_module_init(void)
272 if (!boot_cpu_has(X86_FEATURE_XMM2
) ||
273 !boot_cpu_has(X86_FEATURE_AES
) ||
274 !cpu_has_xfeatures(XFEATURE_MASK_SSE
, NULL
))
277 return simd_register_aeads_compat(&crypto_aegis128_aesni_alg
, 1,
281 static void __exit
crypto_aegis128_aesni_module_exit(void)
283 simd_unregister_aeads(&crypto_aegis128_aesni_alg
, 1, &simd_alg
);
286 module_init(crypto_aegis128_aesni_module_init
);
287 module_exit(crypto_aegis128_aesni_module_exit
);
289 MODULE_LICENSE("GPL");
290 MODULE_AUTHOR("Ondrej Mosnacek <omosnacek@gmail.com>");
291 MODULE_DESCRIPTION("AEGIS-128 AEAD algorithm -- AESNI+SSE2 implementation");
292 MODULE_ALIAS_CRYPTO("aegis128");
293 MODULE_ALIAS_CRYPTO("aegis128-aesni");