1 // SPDX-License-Identifier: GPL-2.0-only
3 * Accelerated GHASH implementation with ARMv8 vmull.p64 instructions.
5 * Copyright (C) 2015 - 2018 Linaro Ltd.
6 * Copyright (C) 2023 Google LLC.
12 #include <linux/unaligned.h>
13 #include <crypto/aes.h>
14 #include <crypto/gcm.h>
15 #include <crypto/b128ops.h>
16 #include <crypto/cryptd.h>
17 #include <crypto/internal/aead.h>
18 #include <crypto/internal/hash.h>
19 #include <crypto/internal/simd.h>
20 #include <crypto/internal/skcipher.h>
21 #include <crypto/gf128mul.h>
22 #include <crypto/scatterwalk.h>
23 #include <linux/cpufeature.h>
24 #include <linux/crypto.h>
25 #include <linux/jump_label.h>
26 #include <linux/module.h>
28 MODULE_DESCRIPTION("GHASH hash function using ARMv8 Crypto Extensions");
29 MODULE_AUTHOR("Ard Biesheuvel <ardb@kernel.org>");
30 MODULE_LICENSE("GPL");
31 MODULE_ALIAS_CRYPTO("ghash");
32 MODULE_ALIAS_CRYPTO("gcm(aes)");
33 MODULE_ALIAS_CRYPTO("rfc4106(gcm(aes))");
35 #define GHASH_BLOCK_SIZE 16
36 #define GHASH_DIGEST_SIZE 16
38 #define RFC4106_NONCE_SIZE 4
47 u32 rk
[AES_MAX_KEYLENGTH_U32
];
49 u8 nonce
[]; // for RFC4106 nonce
52 struct ghash_desc_ctx
{
53 u64 digest
[GHASH_DIGEST_SIZE
/sizeof(u64
)];
54 u8 buf
[GHASH_BLOCK_SIZE
];
58 struct ghash_async_ctx
{
59 struct cryptd_ahash
*cryptd_tfm
;
62 asmlinkage
void pmull_ghash_update_p64(int blocks
, u64 dg
[], const char *src
,
63 u64
const h
[][2], const char *head
);
65 asmlinkage
void pmull_ghash_update_p8(int blocks
, u64 dg
[], const char *src
,
66 u64
const h
[][2], const char *head
);
68 static __ro_after_init
DEFINE_STATIC_KEY_FALSE(use_p64
);
70 static int ghash_init(struct shash_desc
*desc
)
72 struct ghash_desc_ctx
*ctx
= shash_desc_ctx(desc
);
74 *ctx
= (struct ghash_desc_ctx
){};
78 static void ghash_do_update(int blocks
, u64 dg
[], const char *src
,
79 struct ghash_key
*key
, const char *head
)
81 if (likely(crypto_simd_usable())) {
83 if (static_branch_likely(&use_p64
))
84 pmull_ghash_update_p64(blocks
, dg
, src
, key
->h
, head
);
86 pmull_ghash_update_p8(blocks
, dg
, src
, key
->h
, head
);
89 be128 dst
= { cpu_to_be64(dg
[1]), cpu_to_be64(dg
[0]) };
99 src
+= GHASH_BLOCK_SIZE
;
102 crypto_xor((u8
*)&dst
, in
, GHASH_BLOCK_SIZE
);
103 gf128mul_lle(&dst
, &key
->k
);
106 dg
[0] = be64_to_cpu(dst
.b
);
107 dg
[1] = be64_to_cpu(dst
.a
);
111 static int ghash_update(struct shash_desc
*desc
, const u8
*src
,
114 struct ghash_desc_ctx
*ctx
= shash_desc_ctx(desc
);
115 unsigned int partial
= ctx
->count
% GHASH_BLOCK_SIZE
;
119 if ((partial
+ len
) >= GHASH_BLOCK_SIZE
) {
120 struct ghash_key
*key
= crypto_shash_ctx(desc
->tfm
);
124 int p
= GHASH_BLOCK_SIZE
- partial
;
126 memcpy(ctx
->buf
+ partial
, src
, p
);
131 blocks
= len
/ GHASH_BLOCK_SIZE
;
132 len
%= GHASH_BLOCK_SIZE
;
134 ghash_do_update(blocks
, ctx
->digest
, src
, key
,
135 partial
? ctx
->buf
: NULL
);
136 src
+= blocks
* GHASH_BLOCK_SIZE
;
140 memcpy(ctx
->buf
+ partial
, src
, len
);
144 static int ghash_final(struct shash_desc
*desc
, u8
*dst
)
146 struct ghash_desc_ctx
*ctx
= shash_desc_ctx(desc
);
147 unsigned int partial
= ctx
->count
% GHASH_BLOCK_SIZE
;
150 struct ghash_key
*key
= crypto_shash_ctx(desc
->tfm
);
152 memset(ctx
->buf
+ partial
, 0, GHASH_BLOCK_SIZE
- partial
);
153 ghash_do_update(1, ctx
->digest
, ctx
->buf
, key
, NULL
);
155 put_unaligned_be64(ctx
->digest
[1], dst
);
156 put_unaligned_be64(ctx
->digest
[0], dst
+ 8);
158 *ctx
= (struct ghash_desc_ctx
){};
162 static void ghash_reflect(u64 h
[], const be128
*k
)
164 u64 carry
= be64_to_cpu(k
->a
) >> 63;
166 h
[0] = (be64_to_cpu(k
->b
) << 1) | carry
;
167 h
[1] = (be64_to_cpu(k
->a
) << 1) | (be64_to_cpu(k
->b
) >> 63);
170 h
[1] ^= 0xc200000000000000UL
;
173 static int ghash_setkey(struct crypto_shash
*tfm
,
174 const u8
*inkey
, unsigned int keylen
)
176 struct ghash_key
*key
= crypto_shash_ctx(tfm
);
178 if (keylen
!= GHASH_BLOCK_SIZE
)
181 /* needed for the fallback */
182 memcpy(&key
->k
, inkey
, GHASH_BLOCK_SIZE
);
183 ghash_reflect(key
->h
[0], &key
->k
);
185 if (static_branch_likely(&use_p64
)) {
188 gf128mul_lle(&h
, &key
->k
);
189 ghash_reflect(key
->h
[1], &h
);
191 gf128mul_lle(&h
, &key
->k
);
192 ghash_reflect(key
->h
[2], &h
);
194 gf128mul_lle(&h
, &key
->k
);
195 ghash_reflect(key
->h
[3], &h
);
200 static struct shash_alg ghash_alg
= {
201 .digestsize
= GHASH_DIGEST_SIZE
,
203 .update
= ghash_update
,
204 .final
= ghash_final
,
205 .setkey
= ghash_setkey
,
206 .descsize
= sizeof(struct ghash_desc_ctx
),
208 .base
.cra_name
= "ghash",
209 .base
.cra_driver_name
= "ghash-ce-sync",
210 .base
.cra_priority
= 300 - 1,
211 .base
.cra_blocksize
= GHASH_BLOCK_SIZE
,
212 .base
.cra_ctxsize
= sizeof(struct ghash_key
) + sizeof(u64
[2]),
213 .base
.cra_module
= THIS_MODULE
,
216 static int ghash_async_init(struct ahash_request
*req
)
218 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
219 struct ghash_async_ctx
*ctx
= crypto_ahash_ctx(tfm
);
220 struct ahash_request
*cryptd_req
= ahash_request_ctx(req
);
221 struct cryptd_ahash
*cryptd_tfm
= ctx
->cryptd_tfm
;
222 struct shash_desc
*desc
= cryptd_shash_desc(cryptd_req
);
223 struct crypto_shash
*child
= cryptd_ahash_child(cryptd_tfm
);
226 return crypto_shash_init(desc
);
229 static int ghash_async_update(struct ahash_request
*req
)
231 struct ahash_request
*cryptd_req
= ahash_request_ctx(req
);
232 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
233 struct ghash_async_ctx
*ctx
= crypto_ahash_ctx(tfm
);
234 struct cryptd_ahash
*cryptd_tfm
= ctx
->cryptd_tfm
;
236 if (!crypto_simd_usable() ||
237 (in_atomic() && cryptd_ahash_queued(cryptd_tfm
))) {
238 memcpy(cryptd_req
, req
, sizeof(*req
));
239 ahash_request_set_tfm(cryptd_req
, &cryptd_tfm
->base
);
240 return crypto_ahash_update(cryptd_req
);
242 struct shash_desc
*desc
= cryptd_shash_desc(cryptd_req
);
243 return shash_ahash_update(req
, desc
);
247 static int ghash_async_final(struct ahash_request
*req
)
249 struct ahash_request
*cryptd_req
= ahash_request_ctx(req
);
250 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
251 struct ghash_async_ctx
*ctx
= crypto_ahash_ctx(tfm
);
252 struct cryptd_ahash
*cryptd_tfm
= ctx
->cryptd_tfm
;
254 if (!crypto_simd_usable() ||
255 (in_atomic() && cryptd_ahash_queued(cryptd_tfm
))) {
256 memcpy(cryptd_req
, req
, sizeof(*req
));
257 ahash_request_set_tfm(cryptd_req
, &cryptd_tfm
->base
);
258 return crypto_ahash_final(cryptd_req
);
260 struct shash_desc
*desc
= cryptd_shash_desc(cryptd_req
);
261 return crypto_shash_final(desc
, req
->result
);
265 static int ghash_async_digest(struct ahash_request
*req
)
267 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
268 struct ghash_async_ctx
*ctx
= crypto_ahash_ctx(tfm
);
269 struct ahash_request
*cryptd_req
= ahash_request_ctx(req
);
270 struct cryptd_ahash
*cryptd_tfm
= ctx
->cryptd_tfm
;
272 if (!crypto_simd_usable() ||
273 (in_atomic() && cryptd_ahash_queued(cryptd_tfm
))) {
274 memcpy(cryptd_req
, req
, sizeof(*req
));
275 ahash_request_set_tfm(cryptd_req
, &cryptd_tfm
->base
);
276 return crypto_ahash_digest(cryptd_req
);
278 struct shash_desc
*desc
= cryptd_shash_desc(cryptd_req
);
279 struct crypto_shash
*child
= cryptd_ahash_child(cryptd_tfm
);
282 return shash_ahash_digest(req
, desc
);
286 static int ghash_async_import(struct ahash_request
*req
, const void *in
)
288 struct ahash_request
*cryptd_req
= ahash_request_ctx(req
);
289 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
290 struct ghash_async_ctx
*ctx
= crypto_ahash_ctx(tfm
);
291 struct shash_desc
*desc
= cryptd_shash_desc(cryptd_req
);
293 desc
->tfm
= cryptd_ahash_child(ctx
->cryptd_tfm
);
295 return crypto_shash_import(desc
, in
);
298 static int ghash_async_export(struct ahash_request
*req
, void *out
)
300 struct ahash_request
*cryptd_req
= ahash_request_ctx(req
);
301 struct shash_desc
*desc
= cryptd_shash_desc(cryptd_req
);
303 return crypto_shash_export(desc
, out
);
306 static int ghash_async_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
309 struct ghash_async_ctx
*ctx
= crypto_ahash_ctx(tfm
);
310 struct crypto_ahash
*child
= &ctx
->cryptd_tfm
->base
;
312 crypto_ahash_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
313 crypto_ahash_set_flags(child
, crypto_ahash_get_flags(tfm
)
314 & CRYPTO_TFM_REQ_MASK
);
315 return crypto_ahash_setkey(child
, key
, keylen
);
318 static int ghash_async_init_tfm(struct crypto_tfm
*tfm
)
320 struct cryptd_ahash
*cryptd_tfm
;
321 struct ghash_async_ctx
*ctx
= crypto_tfm_ctx(tfm
);
323 cryptd_tfm
= cryptd_alloc_ahash("ghash-ce-sync", 0, 0);
324 if (IS_ERR(cryptd_tfm
))
325 return PTR_ERR(cryptd_tfm
);
326 ctx
->cryptd_tfm
= cryptd_tfm
;
327 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm
),
328 sizeof(struct ahash_request
) +
329 crypto_ahash_reqsize(&cryptd_tfm
->base
));
334 static void ghash_async_exit_tfm(struct crypto_tfm
*tfm
)
336 struct ghash_async_ctx
*ctx
= crypto_tfm_ctx(tfm
);
338 cryptd_free_ahash(ctx
->cryptd_tfm
);
341 static struct ahash_alg ghash_async_alg
= {
342 .init
= ghash_async_init
,
343 .update
= ghash_async_update
,
344 .final
= ghash_async_final
,
345 .setkey
= ghash_async_setkey
,
346 .digest
= ghash_async_digest
,
347 .import
= ghash_async_import
,
348 .export
= ghash_async_export
,
349 .halg
.digestsize
= GHASH_DIGEST_SIZE
,
350 .halg
.statesize
= sizeof(struct ghash_desc_ctx
),
353 .cra_driver_name
= "ghash-ce",
355 .cra_flags
= CRYPTO_ALG_ASYNC
,
356 .cra_blocksize
= GHASH_BLOCK_SIZE
,
357 .cra_ctxsize
= sizeof(struct ghash_async_ctx
),
358 .cra_module
= THIS_MODULE
,
359 .cra_init
= ghash_async_init_tfm
,
360 .cra_exit
= ghash_async_exit_tfm
,
365 void pmull_gcm_encrypt(int blocks
, u64 dg
[], const char *src
,
366 struct gcm_key
const *k
, char *dst
,
367 const char *iv
, int rounds
, u32 counter
);
369 void pmull_gcm_enc_final(int blocks
, u64 dg
[], char *tag
,
370 struct gcm_key
const *k
, char *head
,
371 const char *iv
, int rounds
, u32 counter
);
373 void pmull_gcm_decrypt(int bytes
, u64 dg
[], const char *src
,
374 struct gcm_key
const *k
, char *dst
,
375 const char *iv
, int rounds
, u32 counter
);
377 int pmull_gcm_dec_final(int bytes
, u64 dg
[], char *tag
,
378 struct gcm_key
const *k
, char *head
,
379 const char *iv
, int rounds
, u32 counter
,
380 const char *otag
, int authsize
);
382 static int gcm_aes_setkey(struct crypto_aead
*tfm
, const u8
*inkey
,
385 struct gcm_key
*ctx
= crypto_aead_ctx(tfm
);
386 struct crypto_aes_ctx aes_ctx
;
390 ret
= aes_expandkey(&aes_ctx
, inkey
, keylen
);
394 aes_encrypt(&aes_ctx
, (u8
*)&k
, (u8
[AES_BLOCK_SIZE
]){});
396 memcpy(ctx
->rk
, aes_ctx
.key_enc
, sizeof(ctx
->rk
));
397 ctx
->rounds
= 6 + keylen
/ 4;
399 memzero_explicit(&aes_ctx
, sizeof(aes_ctx
));
401 ghash_reflect(ctx
->h
[0], &k
);
404 gf128mul_lle(&h
, &k
);
405 ghash_reflect(ctx
->h
[1], &h
);
407 gf128mul_lle(&h
, &k
);
408 ghash_reflect(ctx
->h
[2], &h
);
410 gf128mul_lle(&h
, &k
);
411 ghash_reflect(ctx
->h
[3], &h
);
416 static int gcm_aes_setauthsize(struct crypto_aead
*tfm
, unsigned int authsize
)
418 return crypto_gcm_check_authsize(authsize
);
421 static void gcm_update_mac(u64 dg
[], const u8
*src
, int count
, u8 buf
[],
422 int *buf_count
, struct gcm_key
*ctx
)
424 if (*buf_count
> 0) {
425 int buf_added
= min(count
, GHASH_BLOCK_SIZE
- *buf_count
);
427 memcpy(&buf
[*buf_count
], src
, buf_added
);
429 *buf_count
+= buf_added
;
434 if (count
>= GHASH_BLOCK_SIZE
|| *buf_count
== GHASH_BLOCK_SIZE
) {
435 int blocks
= count
/ GHASH_BLOCK_SIZE
;
437 pmull_ghash_update_p64(blocks
, dg
, src
, ctx
->h
,
438 *buf_count
? buf
: NULL
);
440 src
+= blocks
* GHASH_BLOCK_SIZE
;
441 count
%= GHASH_BLOCK_SIZE
;
446 memcpy(buf
, src
, count
);
451 static void gcm_calculate_auth_mac(struct aead_request
*req
, u64 dg
[], u32 len
)
453 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
454 struct gcm_key
*ctx
= crypto_aead_ctx(aead
);
455 u8 buf
[GHASH_BLOCK_SIZE
];
456 struct scatter_walk walk
;
459 scatterwalk_start(&walk
, req
->src
);
462 u32 n
= scatterwalk_clamp(&walk
, len
);
466 scatterwalk_start(&walk
, sg_next(walk
.sg
));
467 n
= scatterwalk_clamp(&walk
, len
);
470 p
= scatterwalk_map(&walk
);
471 gcm_update_mac(dg
, p
, n
, buf
, &buf_count
, ctx
);
472 scatterwalk_unmap(p
);
474 if (unlikely(len
/ SZ_4K
> (len
- n
) / SZ_4K
)) {
480 scatterwalk_advance(&walk
, n
);
481 scatterwalk_done(&walk
, 0, len
);
485 memset(&buf
[buf_count
], 0, GHASH_BLOCK_SIZE
- buf_count
);
486 pmull_ghash_update_p64(1, dg
, buf
, ctx
->h
, NULL
);
490 static int gcm_encrypt(struct aead_request
*req
, const u8
*iv
, u32 assoclen
)
492 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
493 struct gcm_key
*ctx
= crypto_aead_ctx(aead
);
494 struct skcipher_walk walk
;
495 u8 buf
[AES_BLOCK_SIZE
];
503 if (WARN_ON_ONCE(!may_use_simd()))
506 err
= skcipher_walk_aead_encrypt(&walk
, req
, false);
511 gcm_calculate_auth_mac(req
, dg
, assoclen
);
513 src
= walk
.src
.virt
.addr
;
514 dst
= walk
.dst
.virt
.addr
;
516 while (walk
.nbytes
>= AES_BLOCK_SIZE
) {
517 int nblocks
= walk
.nbytes
/ AES_BLOCK_SIZE
;
519 pmull_gcm_encrypt(nblocks
, dg
, src
, ctx
, dst
, iv
,
520 ctx
->rounds
, counter
);
523 if (walk
.nbytes
== walk
.total
) {
524 src
+= nblocks
* AES_BLOCK_SIZE
;
525 dst
+= nblocks
* AES_BLOCK_SIZE
;
531 err
= skcipher_walk_done(&walk
,
532 walk
.nbytes
% AES_BLOCK_SIZE
);
536 src
= walk
.src
.virt
.addr
;
537 dst
= walk
.dst
.virt
.addr
;
543 lengths
.a
= cpu_to_be64(assoclen
* 8);
544 lengths
.b
= cpu_to_be64(req
->cryptlen
* 8);
546 tag
= (u8
*)&lengths
;
547 tail
= walk
.nbytes
% AES_BLOCK_SIZE
;
550 * Bounce via a buffer unless we are encrypting in place and src/dst
551 * are not pointing to the start of the walk buffer. In that case, we
552 * can do a NEON load/xor/store sequence in place as long as we move
553 * the plain/ciphertext and keystream to the start of the register. If
554 * not, do a memcpy() to the end of the buffer so we can reuse the same
557 if (unlikely(tail
&& (tail
== walk
.nbytes
|| src
!= dst
)))
558 src
= memcpy(buf
+ sizeof(buf
) - tail
, src
, tail
);
560 pmull_gcm_enc_final(tail
, dg
, tag
, ctx
, (u8
*)src
, iv
,
561 ctx
->rounds
, counter
);
564 if (unlikely(tail
&& src
!= dst
))
565 memcpy(dst
, src
, tail
);
568 err
= skcipher_walk_done(&walk
, 0);
573 /* copy authtag to end of dst */
574 scatterwalk_map_and_copy(tag
, req
->dst
, req
->assoclen
+ req
->cryptlen
,
575 crypto_aead_authsize(aead
), 1);
580 static int gcm_decrypt(struct aead_request
*req
, const u8
*iv
, u32 assoclen
)
582 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
583 struct gcm_key
*ctx
= crypto_aead_ctx(aead
);
584 int authsize
= crypto_aead_authsize(aead
);
585 struct skcipher_walk walk
;
586 u8 otag
[AES_BLOCK_SIZE
];
587 u8 buf
[AES_BLOCK_SIZE
];
595 if (WARN_ON_ONCE(!may_use_simd()))
598 scatterwalk_map_and_copy(otag
, req
->src
,
599 req
->assoclen
+ req
->cryptlen
- authsize
,
602 err
= skcipher_walk_aead_decrypt(&walk
, req
, false);
607 gcm_calculate_auth_mac(req
, dg
, assoclen
);
609 src
= walk
.src
.virt
.addr
;
610 dst
= walk
.dst
.virt
.addr
;
612 while (walk
.nbytes
>= AES_BLOCK_SIZE
) {
613 int nblocks
= walk
.nbytes
/ AES_BLOCK_SIZE
;
615 pmull_gcm_decrypt(nblocks
, dg
, src
, ctx
, dst
, iv
,
616 ctx
->rounds
, counter
);
619 if (walk
.nbytes
== walk
.total
) {
620 src
+= nblocks
* AES_BLOCK_SIZE
;
621 dst
+= nblocks
* AES_BLOCK_SIZE
;
627 err
= skcipher_walk_done(&walk
,
628 walk
.nbytes
% AES_BLOCK_SIZE
);
632 src
= walk
.src
.virt
.addr
;
633 dst
= walk
.dst
.virt
.addr
;
638 lengths
.a
= cpu_to_be64(assoclen
* 8);
639 lengths
.b
= cpu_to_be64((req
->cryptlen
- authsize
) * 8);
641 tag
= (u8
*)&lengths
;
642 tail
= walk
.nbytes
% AES_BLOCK_SIZE
;
644 if (unlikely(tail
&& (tail
== walk
.nbytes
|| src
!= dst
)))
645 src
= memcpy(buf
+ sizeof(buf
) - tail
, src
, tail
);
647 ret
= pmull_gcm_dec_final(tail
, dg
, tag
, ctx
, (u8
*)src
, iv
,
648 ctx
->rounds
, counter
, otag
, authsize
);
651 if (unlikely(tail
&& src
!= dst
))
652 memcpy(dst
, src
, tail
);
655 err
= skcipher_walk_done(&walk
, 0);
660 return ret
? -EBADMSG
: 0;
663 static int gcm_aes_encrypt(struct aead_request
*req
)
665 return gcm_encrypt(req
, req
->iv
, req
->assoclen
);
668 static int gcm_aes_decrypt(struct aead_request
*req
)
670 return gcm_decrypt(req
, req
->iv
, req
->assoclen
);
673 static int rfc4106_setkey(struct crypto_aead
*tfm
, const u8
*inkey
,
676 struct gcm_key
*ctx
= crypto_aead_ctx(tfm
);
679 keylen
-= RFC4106_NONCE_SIZE
;
680 err
= gcm_aes_setkey(tfm
, inkey
, keylen
);
684 memcpy(ctx
->nonce
, inkey
+ keylen
, RFC4106_NONCE_SIZE
);
688 static int rfc4106_setauthsize(struct crypto_aead
*tfm
, unsigned int authsize
)
690 return crypto_rfc4106_check_authsize(authsize
);
693 static int rfc4106_encrypt(struct aead_request
*req
)
695 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
696 struct gcm_key
*ctx
= crypto_aead_ctx(aead
);
697 u8 iv
[GCM_AES_IV_SIZE
];
699 memcpy(iv
, ctx
->nonce
, RFC4106_NONCE_SIZE
);
700 memcpy(iv
+ RFC4106_NONCE_SIZE
, req
->iv
, GCM_RFC4106_IV_SIZE
);
702 return crypto_ipsec_check_assoclen(req
->assoclen
) ?:
703 gcm_encrypt(req
, iv
, req
->assoclen
- GCM_RFC4106_IV_SIZE
);
706 static int rfc4106_decrypt(struct aead_request
*req
)
708 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
709 struct gcm_key
*ctx
= crypto_aead_ctx(aead
);
710 u8 iv
[GCM_AES_IV_SIZE
];
712 memcpy(iv
, ctx
->nonce
, RFC4106_NONCE_SIZE
);
713 memcpy(iv
+ RFC4106_NONCE_SIZE
, req
->iv
, GCM_RFC4106_IV_SIZE
);
715 return crypto_ipsec_check_assoclen(req
->assoclen
) ?:
716 gcm_decrypt(req
, iv
, req
->assoclen
- GCM_RFC4106_IV_SIZE
);
719 static struct aead_alg gcm_aes_algs
[] = {{
720 .ivsize
= GCM_AES_IV_SIZE
,
721 .chunksize
= AES_BLOCK_SIZE
,
722 .maxauthsize
= AES_BLOCK_SIZE
,
723 .setkey
= gcm_aes_setkey
,
724 .setauthsize
= gcm_aes_setauthsize
,
725 .encrypt
= gcm_aes_encrypt
,
726 .decrypt
= gcm_aes_decrypt
,
728 .base
.cra_name
= "gcm(aes)",
729 .base
.cra_driver_name
= "gcm-aes-ce",
730 .base
.cra_priority
= 400,
731 .base
.cra_blocksize
= 1,
732 .base
.cra_ctxsize
= sizeof(struct gcm_key
),
733 .base
.cra_module
= THIS_MODULE
,
735 .ivsize
= GCM_RFC4106_IV_SIZE
,
736 .chunksize
= AES_BLOCK_SIZE
,
737 .maxauthsize
= AES_BLOCK_SIZE
,
738 .setkey
= rfc4106_setkey
,
739 .setauthsize
= rfc4106_setauthsize
,
740 .encrypt
= rfc4106_encrypt
,
741 .decrypt
= rfc4106_decrypt
,
743 .base
.cra_name
= "rfc4106(gcm(aes))",
744 .base
.cra_driver_name
= "rfc4106-gcm-aes-ce",
745 .base
.cra_priority
= 400,
746 .base
.cra_blocksize
= 1,
747 .base
.cra_ctxsize
= sizeof(struct gcm_key
) + RFC4106_NONCE_SIZE
,
748 .base
.cra_module
= THIS_MODULE
,
751 static int __init
ghash_ce_mod_init(void)
755 if (!(elf_hwcap
& HWCAP_NEON
))
758 if (elf_hwcap2
& HWCAP2_PMULL
) {
759 err
= crypto_register_aeads(gcm_aes_algs
,
760 ARRAY_SIZE(gcm_aes_algs
));
763 ghash_alg
.base
.cra_ctxsize
+= 3 * sizeof(u64
[2]);
764 static_branch_enable(&use_p64
);
767 err
= crypto_register_shash(&ghash_alg
);
770 err
= crypto_register_ahash(&ghash_async_alg
);
777 crypto_unregister_shash(&ghash_alg
);
779 if (elf_hwcap2
& HWCAP2_PMULL
)
780 crypto_unregister_aeads(gcm_aes_algs
,
781 ARRAY_SIZE(gcm_aes_algs
));
785 static void __exit
ghash_ce_mod_exit(void)
787 crypto_unregister_ahash(&ghash_async_alg
);
788 crypto_unregister_shash(&ghash_alg
);
789 if (elf_hwcap2
& HWCAP2_PMULL
)
790 crypto_unregister_aeads(gcm_aes_algs
,
791 ARRAY_SIZE(gcm_aes_algs
));
794 module_init(ghash_ce_mod_init
);
795 module_exit(ghash_ce_mod_exit
);