1 // SPDX-License-Identifier: GPL-2.0-only
3 * Accelerated GHASH implementation with ARMv8 PMULL instructions.
5 * Copyright (C) 2014 - 2018 Linaro Ltd. <ard.biesheuvel@linaro.org>
10 #include <asm/unaligned.h>
11 #include <crypto/aes.h>
12 #include <crypto/algapi.h>
13 #include <crypto/b128ops.h>
14 #include <crypto/gf128mul.h>
15 #include <crypto/internal/aead.h>
16 #include <crypto/internal/hash.h>
17 #include <crypto/internal/simd.h>
18 #include <crypto/internal/skcipher.h>
19 #include <crypto/scatterwalk.h>
20 #include <linux/cpufeature.h>
21 #include <linux/crypto.h>
22 #include <linux/module.h>
24 MODULE_DESCRIPTION("GHASH and AES-GCM using ARMv8 Crypto Extensions");
25 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
26 MODULE_LICENSE("GPL v2");
27 MODULE_ALIAS_CRYPTO("ghash");
29 #define GHASH_BLOCK_SIZE 16
30 #define GHASH_DIGEST_SIZE 16
31 #define GCM_IV_SIZE 12
42 struct ghash_desc_ctx
{
43 u64 digest
[GHASH_DIGEST_SIZE
/sizeof(u64
)];
44 u8 buf
[GHASH_BLOCK_SIZE
];
49 struct crypto_aes_ctx aes_key
;
50 struct ghash_key ghash_key
;
53 asmlinkage
void pmull_ghash_update_p64(int blocks
, u64 dg
[], const char *src
,
54 struct ghash_key
const *k
,
57 asmlinkage
void pmull_ghash_update_p8(int blocks
, u64 dg
[], const char *src
,
58 struct ghash_key
const *k
,
61 asmlinkage
void pmull_gcm_encrypt(int blocks
, u64 dg
[], u8 dst
[],
62 const u8 src
[], struct ghash_key
const *k
,
63 u8 ctr
[], u32
const rk
[], int rounds
,
66 asmlinkage
void pmull_gcm_decrypt(int blocks
, u64 dg
[], u8 dst
[],
67 const u8 src
[], struct ghash_key
const *k
,
68 u8 ctr
[], u32
const rk
[], int rounds
);
70 asmlinkage
void pmull_gcm_encrypt_block(u8 dst
[], u8
const src
[],
71 u32
const rk
[], int rounds
);
73 asmlinkage
void __aes_arm64_encrypt(u32
*rk
, u8
*out
, const u8
*in
, int rounds
);
75 static int ghash_init(struct shash_desc
*desc
)
77 struct ghash_desc_ctx
*ctx
= shash_desc_ctx(desc
);
79 *ctx
= (struct ghash_desc_ctx
){};
83 static void ghash_do_update(int blocks
, u64 dg
[], const char *src
,
84 struct ghash_key
*key
, const char *head
,
85 void (*simd_update
)(int blocks
, u64 dg
[],
87 struct ghash_key
const *k
,
90 if (likely(crypto_simd_usable())) {
92 simd_update(blocks
, dg
, src
, key
, head
);
95 be128 dst
= { cpu_to_be64(dg
[1]), cpu_to_be64(dg
[0]) };
105 src
+= GHASH_BLOCK_SIZE
;
108 crypto_xor((u8
*)&dst
, in
, GHASH_BLOCK_SIZE
);
109 gf128mul_lle(&dst
, &key
->k
);
112 dg
[0] = be64_to_cpu(dst
.b
);
113 dg
[1] = be64_to_cpu(dst
.a
);
117 /* avoid hogging the CPU for too long */
118 #define MAX_BLOCKS (SZ_64K / GHASH_BLOCK_SIZE)
120 static int __ghash_update(struct shash_desc
*desc
, const u8
*src
,
122 void (*simd_update
)(int blocks
, u64 dg
[],
124 struct ghash_key
const *k
,
127 struct ghash_desc_ctx
*ctx
= shash_desc_ctx(desc
);
128 unsigned int partial
= ctx
->count
% GHASH_BLOCK_SIZE
;
132 if ((partial
+ len
) >= GHASH_BLOCK_SIZE
) {
133 struct ghash_key
*key
= crypto_shash_ctx(desc
->tfm
);
137 int p
= GHASH_BLOCK_SIZE
- partial
;
139 memcpy(ctx
->buf
+ partial
, src
, p
);
144 blocks
= len
/ GHASH_BLOCK_SIZE
;
145 len
%= GHASH_BLOCK_SIZE
;
148 int chunk
= min(blocks
, MAX_BLOCKS
);
150 ghash_do_update(chunk
, ctx
->digest
, src
, key
,
151 partial
? ctx
->buf
: NULL
,
155 src
+= chunk
* GHASH_BLOCK_SIZE
;
157 } while (unlikely(blocks
> 0));
160 memcpy(ctx
->buf
+ partial
, src
, len
);
164 static int ghash_update_p8(struct shash_desc
*desc
, const u8
*src
,
167 return __ghash_update(desc
, src
, len
, pmull_ghash_update_p8
);
170 static int ghash_update_p64(struct shash_desc
*desc
, const u8
*src
,
173 return __ghash_update(desc
, src
, len
, pmull_ghash_update_p64
);
176 static int ghash_final_p8(struct shash_desc
*desc
, u8
*dst
)
178 struct ghash_desc_ctx
*ctx
= shash_desc_ctx(desc
);
179 unsigned int partial
= ctx
->count
% GHASH_BLOCK_SIZE
;
182 struct ghash_key
*key
= crypto_shash_ctx(desc
->tfm
);
184 memset(ctx
->buf
+ partial
, 0, GHASH_BLOCK_SIZE
- partial
);
186 ghash_do_update(1, ctx
->digest
, ctx
->buf
, key
, NULL
,
187 pmull_ghash_update_p8
);
189 put_unaligned_be64(ctx
->digest
[1], dst
);
190 put_unaligned_be64(ctx
->digest
[0], dst
+ 8);
192 *ctx
= (struct ghash_desc_ctx
){};
196 static int ghash_final_p64(struct shash_desc
*desc
, u8
*dst
)
198 struct ghash_desc_ctx
*ctx
= shash_desc_ctx(desc
);
199 unsigned int partial
= ctx
->count
% GHASH_BLOCK_SIZE
;
202 struct ghash_key
*key
= crypto_shash_ctx(desc
->tfm
);
204 memset(ctx
->buf
+ partial
, 0, GHASH_BLOCK_SIZE
- partial
);
206 ghash_do_update(1, ctx
->digest
, ctx
->buf
, key
, NULL
,
207 pmull_ghash_update_p64
);
209 put_unaligned_be64(ctx
->digest
[1], dst
);
210 put_unaligned_be64(ctx
->digest
[0], dst
+ 8);
212 *ctx
= (struct ghash_desc_ctx
){};
216 static void ghash_reflect(u64 h
[], const be128
*k
)
218 u64 carry
= be64_to_cpu(k
->a
) & BIT(63) ? 1 : 0;
220 h
[0] = (be64_to_cpu(k
->b
) << 1) | carry
;
221 h
[1] = (be64_to_cpu(k
->a
) << 1) | (be64_to_cpu(k
->b
) >> 63);
224 h
[1] ^= 0xc200000000000000UL
;
227 static int __ghash_setkey(struct ghash_key
*key
,
228 const u8
*inkey
, unsigned int keylen
)
232 /* needed for the fallback */
233 memcpy(&key
->k
, inkey
, GHASH_BLOCK_SIZE
);
235 ghash_reflect(key
->h
, &key
->k
);
238 gf128mul_lle(&h
, &key
->k
);
239 ghash_reflect(key
->h2
, &h
);
241 gf128mul_lle(&h
, &key
->k
);
242 ghash_reflect(key
->h3
, &h
);
244 gf128mul_lle(&h
, &key
->k
);
245 ghash_reflect(key
->h4
, &h
);
250 static int ghash_setkey(struct crypto_shash
*tfm
,
251 const u8
*inkey
, unsigned int keylen
)
253 struct ghash_key
*key
= crypto_shash_ctx(tfm
);
255 if (keylen
!= GHASH_BLOCK_SIZE
) {
256 crypto_shash_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
260 return __ghash_setkey(key
, inkey
, keylen
);
263 static struct shash_alg ghash_alg
[] = {{
264 .base
.cra_name
= "ghash",
265 .base
.cra_driver_name
= "ghash-neon",
266 .base
.cra_priority
= 100,
267 .base
.cra_blocksize
= GHASH_BLOCK_SIZE
,
268 .base
.cra_ctxsize
= sizeof(struct ghash_key
),
269 .base
.cra_module
= THIS_MODULE
,
271 .digestsize
= GHASH_DIGEST_SIZE
,
273 .update
= ghash_update_p8
,
274 .final
= ghash_final_p8
,
275 .setkey
= ghash_setkey
,
276 .descsize
= sizeof(struct ghash_desc_ctx
),
278 .base
.cra_name
= "ghash",
279 .base
.cra_driver_name
= "ghash-ce",
280 .base
.cra_priority
= 200,
281 .base
.cra_blocksize
= GHASH_BLOCK_SIZE
,
282 .base
.cra_ctxsize
= sizeof(struct ghash_key
),
283 .base
.cra_module
= THIS_MODULE
,
285 .digestsize
= GHASH_DIGEST_SIZE
,
287 .update
= ghash_update_p64
,
288 .final
= ghash_final_p64
,
289 .setkey
= ghash_setkey
,
290 .descsize
= sizeof(struct ghash_desc_ctx
),
293 static int num_rounds(struct crypto_aes_ctx
*ctx
)
296 * # of rounds specified by AES:
297 * 128 bit key 10 rounds
298 * 192 bit key 12 rounds
299 * 256 bit key 14 rounds
300 * => n byte key => 6 + (n/4) rounds
302 return 6 + ctx
->key_length
/ 4;
305 static int gcm_setkey(struct crypto_aead
*tfm
, const u8
*inkey
,
308 struct gcm_aes_ctx
*ctx
= crypto_aead_ctx(tfm
);
309 u8 key
[GHASH_BLOCK_SIZE
];
312 ret
= crypto_aes_expand_key(&ctx
->aes_key
, inkey
, keylen
);
314 tfm
->base
.crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
318 __aes_arm64_encrypt(ctx
->aes_key
.key_enc
, key
, (u8
[AES_BLOCK_SIZE
]){},
319 num_rounds(&ctx
->aes_key
));
321 return __ghash_setkey(&ctx
->ghash_key
, key
, sizeof(be128
));
324 static int gcm_setauthsize(struct crypto_aead
*tfm
, unsigned int authsize
)
337 static void gcm_update_mac(u64 dg
[], const u8
*src
, int count
, u8 buf
[],
338 int *buf_count
, struct gcm_aes_ctx
*ctx
)
340 if (*buf_count
> 0) {
341 int buf_added
= min(count
, GHASH_BLOCK_SIZE
- *buf_count
);
343 memcpy(&buf
[*buf_count
], src
, buf_added
);
345 *buf_count
+= buf_added
;
350 if (count
>= GHASH_BLOCK_SIZE
|| *buf_count
== GHASH_BLOCK_SIZE
) {
351 int blocks
= count
/ GHASH_BLOCK_SIZE
;
353 ghash_do_update(blocks
, dg
, src
, &ctx
->ghash_key
,
354 *buf_count
? buf
: NULL
,
355 pmull_ghash_update_p64
);
357 src
+= blocks
* GHASH_BLOCK_SIZE
;
358 count
%= GHASH_BLOCK_SIZE
;
363 memcpy(buf
, src
, count
);
368 static void gcm_calculate_auth_mac(struct aead_request
*req
, u64 dg
[])
370 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
371 struct gcm_aes_ctx
*ctx
= crypto_aead_ctx(aead
);
372 u8 buf
[GHASH_BLOCK_SIZE
];
373 struct scatter_walk walk
;
374 u32 len
= req
->assoclen
;
377 scatterwalk_start(&walk
, req
->src
);
380 u32 n
= scatterwalk_clamp(&walk
, len
);
384 scatterwalk_start(&walk
, sg_next(walk
.sg
));
385 n
= scatterwalk_clamp(&walk
, len
);
387 p
= scatterwalk_map(&walk
);
389 gcm_update_mac(dg
, p
, n
, buf
, &buf_count
, ctx
);
392 scatterwalk_unmap(p
);
393 scatterwalk_advance(&walk
, n
);
394 scatterwalk_done(&walk
, 0, len
);
398 memset(&buf
[buf_count
], 0, GHASH_BLOCK_SIZE
- buf_count
);
399 ghash_do_update(1, dg
, buf
, &ctx
->ghash_key
, NULL
,
400 pmull_ghash_update_p64
);
404 static void gcm_final(struct aead_request
*req
, struct gcm_aes_ctx
*ctx
,
405 u64 dg
[], u8 tag
[], int cryptlen
)
407 u8 mac
[AES_BLOCK_SIZE
];
410 lengths
.a
= cpu_to_be64(req
->assoclen
* 8);
411 lengths
.b
= cpu_to_be64(cryptlen
* 8);
413 ghash_do_update(1, dg
, (void *)&lengths
, &ctx
->ghash_key
, NULL
,
414 pmull_ghash_update_p64
);
416 put_unaligned_be64(dg
[1], mac
);
417 put_unaligned_be64(dg
[0], mac
+ 8);
419 crypto_xor(tag
, mac
, AES_BLOCK_SIZE
);
422 static int gcm_encrypt(struct aead_request
*req
)
424 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
425 struct gcm_aes_ctx
*ctx
= crypto_aead_ctx(aead
);
426 struct skcipher_walk walk
;
427 u8 iv
[AES_BLOCK_SIZE
];
428 u8 ks
[2 * AES_BLOCK_SIZE
];
429 u8 tag
[AES_BLOCK_SIZE
];
431 int nrounds
= num_rounds(&ctx
->aes_key
);
435 gcm_calculate_auth_mac(req
, dg
);
437 memcpy(iv
, req
->iv
, GCM_IV_SIZE
);
438 put_unaligned_be32(1, iv
+ GCM_IV_SIZE
);
440 err
= skcipher_walk_aead_encrypt(&walk
, req
, false);
442 if (likely(crypto_simd_usable() && walk
.total
>= 2 * AES_BLOCK_SIZE
)) {
443 u32
const *rk
= NULL
;
446 pmull_gcm_encrypt_block(tag
, iv
, ctx
->aes_key
.key_enc
, nrounds
);
447 put_unaligned_be32(2, iv
+ GCM_IV_SIZE
);
448 pmull_gcm_encrypt_block(ks
, iv
, NULL
, nrounds
);
449 put_unaligned_be32(3, iv
+ GCM_IV_SIZE
);
450 pmull_gcm_encrypt_block(ks
+ AES_BLOCK_SIZE
, iv
, NULL
, nrounds
);
451 put_unaligned_be32(4, iv
+ GCM_IV_SIZE
);
454 int blocks
= walk
.nbytes
/ (2 * AES_BLOCK_SIZE
) * 2;
459 pmull_gcm_encrypt(blocks
, dg
, walk
.dst
.virt
.addr
,
460 walk
.src
.virt
.addr
, &ctx
->ghash_key
,
461 iv
, rk
, nrounds
, ks
);
464 err
= skcipher_walk_done(&walk
,
465 walk
.nbytes
% (2 * AES_BLOCK_SIZE
));
467 rk
= ctx
->aes_key
.key_enc
;
468 } while (walk
.nbytes
>= 2 * AES_BLOCK_SIZE
);
470 __aes_arm64_encrypt(ctx
->aes_key
.key_enc
, tag
, iv
, nrounds
);
471 put_unaligned_be32(2, iv
+ GCM_IV_SIZE
);
473 while (walk
.nbytes
>= (2 * AES_BLOCK_SIZE
)) {
475 walk
.nbytes
/ (2 * AES_BLOCK_SIZE
) * 2;
476 u8
*dst
= walk
.dst
.virt
.addr
;
477 u8
*src
= walk
.src
.virt
.addr
;
478 int remaining
= blocks
;
481 __aes_arm64_encrypt(ctx
->aes_key
.key_enc
,
483 crypto_xor_cpy(dst
, src
, ks
, AES_BLOCK_SIZE
);
484 crypto_inc(iv
, AES_BLOCK_SIZE
);
486 dst
+= AES_BLOCK_SIZE
;
487 src
+= AES_BLOCK_SIZE
;
488 } while (--remaining
> 0);
490 ghash_do_update(blocks
, dg
,
491 walk
.dst
.virt
.addr
, &ctx
->ghash_key
,
492 NULL
, pmull_ghash_update_p64
);
494 err
= skcipher_walk_done(&walk
,
495 walk
.nbytes
% (2 * AES_BLOCK_SIZE
));
498 __aes_arm64_encrypt(ctx
->aes_key
.key_enc
, ks
, iv
,
500 if (walk
.nbytes
> AES_BLOCK_SIZE
) {
501 crypto_inc(iv
, AES_BLOCK_SIZE
);
502 __aes_arm64_encrypt(ctx
->aes_key
.key_enc
,
503 ks
+ AES_BLOCK_SIZE
, iv
,
509 /* handle the tail */
511 u8 buf
[GHASH_BLOCK_SIZE
];
512 unsigned int nbytes
= walk
.nbytes
;
513 u8
*dst
= walk
.dst
.virt
.addr
;
516 crypto_xor_cpy(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
, ks
,
519 if (walk
.nbytes
> GHASH_BLOCK_SIZE
) {
521 dst
+= GHASH_BLOCK_SIZE
;
522 nbytes
%= GHASH_BLOCK_SIZE
;
525 memcpy(buf
, dst
, nbytes
);
526 memset(buf
+ nbytes
, 0, GHASH_BLOCK_SIZE
- nbytes
);
527 ghash_do_update(!!nbytes
, dg
, buf
, &ctx
->ghash_key
, head
,
528 pmull_ghash_update_p64
);
530 err
= skcipher_walk_done(&walk
, 0);
536 gcm_final(req
, ctx
, dg
, tag
, req
->cryptlen
);
538 /* copy authtag to end of dst */
539 scatterwalk_map_and_copy(tag
, req
->dst
, req
->assoclen
+ req
->cryptlen
,
540 crypto_aead_authsize(aead
), 1);
545 static int gcm_decrypt(struct aead_request
*req
)
547 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
548 struct gcm_aes_ctx
*ctx
= crypto_aead_ctx(aead
);
549 unsigned int authsize
= crypto_aead_authsize(aead
);
550 struct skcipher_walk walk
;
551 u8 iv
[2 * AES_BLOCK_SIZE
];
552 u8 tag
[AES_BLOCK_SIZE
];
553 u8 buf
[2 * GHASH_BLOCK_SIZE
];
555 int nrounds
= num_rounds(&ctx
->aes_key
);
559 gcm_calculate_auth_mac(req
, dg
);
561 memcpy(iv
, req
->iv
, GCM_IV_SIZE
);
562 put_unaligned_be32(1, iv
+ GCM_IV_SIZE
);
564 err
= skcipher_walk_aead_decrypt(&walk
, req
, false);
566 if (likely(crypto_simd_usable() && walk
.total
>= 2 * AES_BLOCK_SIZE
)) {
567 u32
const *rk
= NULL
;
570 pmull_gcm_encrypt_block(tag
, iv
, ctx
->aes_key
.key_enc
, nrounds
);
571 put_unaligned_be32(2, iv
+ GCM_IV_SIZE
);
574 int blocks
= walk
.nbytes
/ (2 * AES_BLOCK_SIZE
) * 2;
575 int rem
= walk
.total
- blocks
* AES_BLOCK_SIZE
;
580 pmull_gcm_decrypt(blocks
, dg
, walk
.dst
.virt
.addr
,
581 walk
.src
.virt
.addr
, &ctx
->ghash_key
,
584 /* check if this is the final iteration of the loop */
585 if (rem
< (2 * AES_BLOCK_SIZE
)) {
586 u8
*iv2
= iv
+ AES_BLOCK_SIZE
;
588 if (rem
> AES_BLOCK_SIZE
) {
589 memcpy(iv2
, iv
, AES_BLOCK_SIZE
);
590 crypto_inc(iv2
, AES_BLOCK_SIZE
);
593 pmull_gcm_encrypt_block(iv
, iv
, NULL
, nrounds
);
595 if (rem
> AES_BLOCK_SIZE
)
596 pmull_gcm_encrypt_block(iv2
, iv2
, NULL
,
602 err
= skcipher_walk_done(&walk
,
603 walk
.nbytes
% (2 * AES_BLOCK_SIZE
));
605 rk
= ctx
->aes_key
.key_enc
;
606 } while (walk
.nbytes
>= 2 * AES_BLOCK_SIZE
);
608 __aes_arm64_encrypt(ctx
->aes_key
.key_enc
, tag
, iv
, nrounds
);
609 put_unaligned_be32(2, iv
+ GCM_IV_SIZE
);
611 while (walk
.nbytes
>= (2 * AES_BLOCK_SIZE
)) {
612 int blocks
= walk
.nbytes
/ (2 * AES_BLOCK_SIZE
) * 2;
613 u8
*dst
= walk
.dst
.virt
.addr
;
614 u8
*src
= walk
.src
.virt
.addr
;
616 ghash_do_update(blocks
, dg
, walk
.src
.virt
.addr
,
617 &ctx
->ghash_key
, NULL
,
618 pmull_ghash_update_p64
);
621 __aes_arm64_encrypt(ctx
->aes_key
.key_enc
,
623 crypto_xor_cpy(dst
, src
, buf
, AES_BLOCK_SIZE
);
624 crypto_inc(iv
, AES_BLOCK_SIZE
);
626 dst
+= AES_BLOCK_SIZE
;
627 src
+= AES_BLOCK_SIZE
;
628 } while (--blocks
> 0);
630 err
= skcipher_walk_done(&walk
,
631 walk
.nbytes
% (2 * AES_BLOCK_SIZE
));
634 if (walk
.nbytes
> AES_BLOCK_SIZE
) {
635 u8
*iv2
= iv
+ AES_BLOCK_SIZE
;
637 memcpy(iv2
, iv
, AES_BLOCK_SIZE
);
638 crypto_inc(iv2
, AES_BLOCK_SIZE
);
640 __aes_arm64_encrypt(ctx
->aes_key
.key_enc
, iv2
,
643 __aes_arm64_encrypt(ctx
->aes_key
.key_enc
, iv
, iv
,
648 /* handle the tail */
650 const u8
*src
= walk
.src
.virt
.addr
;
651 const u8
*head
= NULL
;
652 unsigned int nbytes
= walk
.nbytes
;
654 if (walk
.nbytes
> GHASH_BLOCK_SIZE
) {
656 src
+= GHASH_BLOCK_SIZE
;
657 nbytes
%= GHASH_BLOCK_SIZE
;
660 memcpy(buf
, src
, nbytes
);
661 memset(buf
+ nbytes
, 0, GHASH_BLOCK_SIZE
- nbytes
);
662 ghash_do_update(!!nbytes
, dg
, buf
, &ctx
->ghash_key
, head
,
663 pmull_ghash_update_p64
);
665 crypto_xor_cpy(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
, iv
,
668 err
= skcipher_walk_done(&walk
, 0);
674 gcm_final(req
, ctx
, dg
, tag
, req
->cryptlen
- authsize
);
676 /* compare calculated auth tag with the stored one */
677 scatterwalk_map_and_copy(buf
, req
->src
,
678 req
->assoclen
+ req
->cryptlen
- authsize
,
681 if (crypto_memneq(tag
, buf
, authsize
))
686 static struct aead_alg gcm_aes_alg
= {
687 .ivsize
= GCM_IV_SIZE
,
688 .chunksize
= 2 * AES_BLOCK_SIZE
,
689 .maxauthsize
= AES_BLOCK_SIZE
,
690 .setkey
= gcm_setkey
,
691 .setauthsize
= gcm_setauthsize
,
692 .encrypt
= gcm_encrypt
,
693 .decrypt
= gcm_decrypt
,
695 .base
.cra_name
= "gcm(aes)",
696 .base
.cra_driver_name
= "gcm-aes-ce",
697 .base
.cra_priority
= 300,
698 .base
.cra_blocksize
= 1,
699 .base
.cra_ctxsize
= sizeof(struct gcm_aes_ctx
),
700 .base
.cra_module
= THIS_MODULE
,
703 static int __init
ghash_ce_mod_init(void)
707 if (!cpu_have_named_feature(ASIMD
))
710 if (cpu_have_named_feature(PMULL
))
711 ret
= crypto_register_shashes(ghash_alg
,
712 ARRAY_SIZE(ghash_alg
));
714 /* only register the first array element */
715 ret
= crypto_register_shash(ghash_alg
);
720 if (cpu_have_named_feature(PMULL
)) {
721 ret
= crypto_register_aead(&gcm_aes_alg
);
723 crypto_unregister_shashes(ghash_alg
,
724 ARRAY_SIZE(ghash_alg
));
729 static void __exit
ghash_ce_mod_exit(void)
731 if (cpu_have_named_feature(PMULL
))
732 crypto_unregister_shashes(ghash_alg
, ARRAY_SIZE(ghash_alg
));
734 crypto_unregister_shash(ghash_alg
);
735 crypto_unregister_aead(&gcm_aes_alg
);
738 static const struct cpu_feature ghash_cpu_feature
[] = {
739 { cpu_feature(PMULL
) }, { }
741 MODULE_DEVICE_TABLE(cpu
, ghash_cpu_feature
);
743 module_init(ghash_ce_mod_init
);
744 module_exit(ghash_ce_mod_exit
);