1 /* SPDX-License-Identifier: GPL-2.0-or-later */
3 * SM4 Cipher Algorithm, using ARMv8 Crypto Extensions
5 * https://tools.ietf.org/id/draft-ribose-cfrg-sm4-10.html
7 * Copyright (C) 2022, Alibaba Group.
8 * Copyright (C) 2022 Tianjia Zhang <tianjia.zhang@linux.alibaba.com>
11 #include <linux/module.h>
12 #include <linux/crypto.h>
13 #include <linux/kernel.h>
14 #include <linux/cpufeature.h>
17 #include <crypto/b128ops.h>
18 #include <crypto/internal/simd.h>
19 #include <crypto/internal/skcipher.h>
20 #include <crypto/internal/hash.h>
21 #include <crypto/scatterwalk.h>
22 #include <crypto/xts.h>
23 #include <crypto/sm4.h>
25 #define BYTES2BLKS(nbytes) ((nbytes) >> 4)
27 asmlinkage
void sm4_ce_expand_key(const u8
*key
, u32
*rkey_enc
, u32
*rkey_dec
,
28 const u32
*fk
, const u32
*ck
);
29 asmlinkage
void sm4_ce_crypt_block(const u32
*rkey
, u8
*dst
, const u8
*src
);
30 asmlinkage
void sm4_ce_crypt(const u32
*rkey
, u8
*dst
, const u8
*src
,
32 asmlinkage
void sm4_ce_cbc_enc(const u32
*rkey
, u8
*dst
, const u8
*src
,
33 u8
*iv
, unsigned int nblocks
);
34 asmlinkage
void sm4_ce_cbc_dec(const u32
*rkey
, u8
*dst
, const u8
*src
,
35 u8
*iv
, unsigned int nblocks
);
36 asmlinkage
void sm4_ce_cbc_cts_enc(const u32
*rkey
, u8
*dst
, const u8
*src
,
37 u8
*iv
, unsigned int nbytes
);
38 asmlinkage
void sm4_ce_cbc_cts_dec(const u32
*rkey
, u8
*dst
, const u8
*src
,
39 u8
*iv
, unsigned int nbytes
);
40 asmlinkage
void sm4_ce_ctr_enc(const u32
*rkey
, u8
*dst
, const u8
*src
,
41 u8
*iv
, unsigned int nblks
);
42 asmlinkage
void sm4_ce_xts_enc(const u32
*rkey1
, u8
*dst
, const u8
*src
,
43 u8
*tweak
, unsigned int nbytes
,
44 const u32
*rkey2_enc
);
45 asmlinkage
void sm4_ce_xts_dec(const u32
*rkey1
, u8
*dst
, const u8
*src
,
46 u8
*tweak
, unsigned int nbytes
,
47 const u32
*rkey2_enc
);
48 asmlinkage
void sm4_ce_mac_update(const u32
*rkey_enc
, u8
*digest
,
49 const u8
*src
, unsigned int nblocks
,
50 bool enc_before
, bool enc_after
);
52 EXPORT_SYMBOL(sm4_ce_expand_key
);
53 EXPORT_SYMBOL(sm4_ce_crypt_block
);
54 EXPORT_SYMBOL(sm4_ce_cbc_enc
);
61 struct sm4_mac_tfm_ctx
{
63 u8
__aligned(8) consts
[];
66 struct sm4_mac_desc_ctx
{
68 u8 digest
[SM4_BLOCK_SIZE
];
71 static int sm4_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
74 struct sm4_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
76 if (key_len
!= SM4_KEY_SIZE
)
80 sm4_ce_expand_key(key
, ctx
->rkey_enc
, ctx
->rkey_dec
,
81 crypto_sm4_fk
, crypto_sm4_ck
);
86 static int sm4_xts_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
89 struct sm4_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
92 if (key_len
!= SM4_KEY_SIZE
* 2)
95 ret
= xts_verify_key(tfm
, key
, key_len
);
100 sm4_ce_expand_key(key
, ctx
->key1
.rkey_enc
,
101 ctx
->key1
.rkey_dec
, crypto_sm4_fk
, crypto_sm4_ck
);
102 sm4_ce_expand_key(&key
[SM4_KEY_SIZE
], ctx
->key2
.rkey_enc
,
103 ctx
->key2
.rkey_dec
, crypto_sm4_fk
, crypto_sm4_ck
);
109 static int sm4_ecb_do_crypt(struct skcipher_request
*req
, const u32
*rkey
)
111 struct skcipher_walk walk
;
115 err
= skcipher_walk_virt(&walk
, req
, false);
117 while ((nbytes
= walk
.nbytes
) > 0) {
118 const u8
*src
= walk
.src
.virt
.addr
;
119 u8
*dst
= walk
.dst
.virt
.addr
;
124 nblks
= BYTES2BLKS(nbytes
);
126 sm4_ce_crypt(rkey
, dst
, src
, nblks
);
127 nbytes
-= nblks
* SM4_BLOCK_SIZE
;
132 err
= skcipher_walk_done(&walk
, nbytes
);
138 static int sm4_ecb_encrypt(struct skcipher_request
*req
)
140 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
141 struct sm4_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
143 return sm4_ecb_do_crypt(req
, ctx
->rkey_enc
);
146 static int sm4_ecb_decrypt(struct skcipher_request
*req
)
148 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
149 struct sm4_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
151 return sm4_ecb_do_crypt(req
, ctx
->rkey_dec
);
154 static int sm4_cbc_crypt(struct skcipher_request
*req
,
155 struct sm4_ctx
*ctx
, bool encrypt
)
157 struct skcipher_walk walk
;
161 err
= skcipher_walk_virt(&walk
, req
, false);
165 while ((nbytes
= walk
.nbytes
) > 0) {
166 const u8
*src
= walk
.src
.virt
.addr
;
167 u8
*dst
= walk
.dst
.virt
.addr
;
168 unsigned int nblocks
;
170 nblocks
= nbytes
/ SM4_BLOCK_SIZE
;
175 sm4_ce_cbc_enc(ctx
->rkey_enc
, dst
, src
,
178 sm4_ce_cbc_dec(ctx
->rkey_dec
, dst
, src
,
184 err
= skcipher_walk_done(&walk
, nbytes
% SM4_BLOCK_SIZE
);
190 static int sm4_cbc_encrypt(struct skcipher_request
*req
)
192 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
193 struct sm4_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
195 return sm4_cbc_crypt(req
, ctx
, true);
198 static int sm4_cbc_decrypt(struct skcipher_request
*req
)
200 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
201 struct sm4_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
203 return sm4_cbc_crypt(req
, ctx
, false);
206 static int sm4_cbc_cts_crypt(struct skcipher_request
*req
, bool encrypt
)
208 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
209 struct sm4_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
210 struct scatterlist
*src
= req
->src
;
211 struct scatterlist
*dst
= req
->dst
;
212 struct scatterlist sg_src
[2], sg_dst
[2];
213 struct skcipher_request subreq
;
214 struct skcipher_walk walk
;
218 if (req
->cryptlen
< SM4_BLOCK_SIZE
)
221 if (req
->cryptlen
== SM4_BLOCK_SIZE
)
222 return sm4_cbc_crypt(req
, ctx
, encrypt
);
224 skcipher_request_set_tfm(&subreq
, tfm
);
225 skcipher_request_set_callback(&subreq
, skcipher_request_flags(req
),
228 /* handle the CBC cryption part */
229 cbc_blocks
= DIV_ROUND_UP(req
->cryptlen
, SM4_BLOCK_SIZE
) - 2;
231 skcipher_request_set_crypt(&subreq
, src
, dst
,
232 cbc_blocks
* SM4_BLOCK_SIZE
,
235 err
= sm4_cbc_crypt(&subreq
, ctx
, encrypt
);
239 dst
= src
= scatterwalk_ffwd(sg_src
, src
, subreq
.cryptlen
);
240 if (req
->dst
!= req
->src
)
241 dst
= scatterwalk_ffwd(sg_dst
, req
->dst
,
245 /* handle ciphertext stealing */
246 skcipher_request_set_crypt(&subreq
, src
, dst
,
247 req
->cryptlen
- cbc_blocks
* SM4_BLOCK_SIZE
,
250 err
= skcipher_walk_virt(&walk
, &subreq
, false);
257 sm4_ce_cbc_cts_enc(ctx
->rkey_enc
, walk
.dst
.virt
.addr
,
258 walk
.src
.virt
.addr
, walk
.iv
, walk
.nbytes
);
260 sm4_ce_cbc_cts_dec(ctx
->rkey_dec
, walk
.dst
.virt
.addr
,
261 walk
.src
.virt
.addr
, walk
.iv
, walk
.nbytes
);
265 return skcipher_walk_done(&walk
, 0);
268 static int sm4_cbc_cts_encrypt(struct skcipher_request
*req
)
270 return sm4_cbc_cts_crypt(req
, true);
273 static int sm4_cbc_cts_decrypt(struct skcipher_request
*req
)
275 return sm4_cbc_cts_crypt(req
, false);
278 static int sm4_ctr_crypt(struct skcipher_request
*req
)
280 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
281 struct sm4_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
282 struct skcipher_walk walk
;
286 err
= skcipher_walk_virt(&walk
, req
, false);
288 while ((nbytes
= walk
.nbytes
) > 0) {
289 const u8
*src
= walk
.src
.virt
.addr
;
290 u8
*dst
= walk
.dst
.virt
.addr
;
295 nblks
= BYTES2BLKS(nbytes
);
297 sm4_ce_ctr_enc(ctx
->rkey_enc
, dst
, src
, walk
.iv
, nblks
);
298 dst
+= nblks
* SM4_BLOCK_SIZE
;
299 src
+= nblks
* SM4_BLOCK_SIZE
;
300 nbytes
-= nblks
* SM4_BLOCK_SIZE
;
304 if (walk
.nbytes
== walk
.total
&& nbytes
> 0) {
305 u8 keystream
[SM4_BLOCK_SIZE
];
307 sm4_ce_crypt_block(ctx
->rkey_enc
, keystream
, walk
.iv
);
308 crypto_inc(walk
.iv
, SM4_BLOCK_SIZE
);
309 crypto_xor_cpy(dst
, src
, keystream
, nbytes
);
315 err
= skcipher_walk_done(&walk
, nbytes
);
321 static int sm4_xts_crypt(struct skcipher_request
*req
, bool encrypt
)
323 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
324 struct sm4_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
325 int tail
= req
->cryptlen
% SM4_BLOCK_SIZE
;
326 const u32
*rkey2_enc
= ctx
->key2
.rkey_enc
;
327 struct scatterlist sg_src
[2], sg_dst
[2];
328 struct skcipher_request subreq
;
329 struct scatterlist
*src
, *dst
;
330 struct skcipher_walk walk
;
334 if (req
->cryptlen
< SM4_BLOCK_SIZE
)
337 err
= skcipher_walk_virt(&walk
, req
, false);
341 if (unlikely(tail
> 0 && walk
.nbytes
< walk
.total
)) {
342 int nblocks
= DIV_ROUND_UP(req
->cryptlen
, SM4_BLOCK_SIZE
) - 2;
344 skcipher_walk_abort(&walk
);
346 skcipher_request_set_tfm(&subreq
, tfm
);
347 skcipher_request_set_callback(&subreq
,
348 skcipher_request_flags(req
),
350 skcipher_request_set_crypt(&subreq
, req
->src
, req
->dst
,
351 nblocks
* SM4_BLOCK_SIZE
, req
->iv
);
353 err
= skcipher_walk_virt(&walk
, &subreq
, false);
360 while ((nbytes
= walk
.nbytes
) >= SM4_BLOCK_SIZE
) {
361 if (nbytes
< walk
.total
)
362 nbytes
&= ~(SM4_BLOCK_SIZE
- 1);
367 sm4_ce_xts_enc(ctx
->key1
.rkey_enc
, walk
.dst
.virt
.addr
,
368 walk
.src
.virt
.addr
, walk
.iv
, nbytes
,
371 sm4_ce_xts_dec(ctx
->key1
.rkey_dec
, walk
.dst
.virt
.addr
,
372 walk
.src
.virt
.addr
, walk
.iv
, nbytes
,
379 err
= skcipher_walk_done(&walk
, walk
.nbytes
- nbytes
);
384 if (likely(tail
== 0))
387 /* handle ciphertext stealing */
389 dst
= src
= scatterwalk_ffwd(sg_src
, req
->src
, subreq
.cryptlen
);
390 if (req
->dst
!= req
->src
)
391 dst
= scatterwalk_ffwd(sg_dst
, req
->dst
, subreq
.cryptlen
);
393 skcipher_request_set_crypt(&subreq
, src
, dst
, SM4_BLOCK_SIZE
+ tail
,
396 err
= skcipher_walk_virt(&walk
, &subreq
, false);
403 sm4_ce_xts_enc(ctx
->key1
.rkey_enc
, walk
.dst
.virt
.addr
,
404 walk
.src
.virt
.addr
, walk
.iv
, walk
.nbytes
,
407 sm4_ce_xts_dec(ctx
->key1
.rkey_dec
, walk
.dst
.virt
.addr
,
408 walk
.src
.virt
.addr
, walk
.iv
, walk
.nbytes
,
413 return skcipher_walk_done(&walk
, 0);
416 static int sm4_xts_encrypt(struct skcipher_request
*req
)
418 return sm4_xts_crypt(req
, true);
421 static int sm4_xts_decrypt(struct skcipher_request
*req
)
423 return sm4_xts_crypt(req
, false);
426 static struct skcipher_alg sm4_algs
[] = {
429 .cra_name
= "ecb(sm4)",
430 .cra_driver_name
= "ecb-sm4-ce",
432 .cra_blocksize
= SM4_BLOCK_SIZE
,
433 .cra_ctxsize
= sizeof(struct sm4_ctx
),
434 .cra_module
= THIS_MODULE
,
436 .min_keysize
= SM4_KEY_SIZE
,
437 .max_keysize
= SM4_KEY_SIZE
,
438 .setkey
= sm4_setkey
,
439 .encrypt
= sm4_ecb_encrypt
,
440 .decrypt
= sm4_ecb_decrypt
,
443 .cra_name
= "cbc(sm4)",
444 .cra_driver_name
= "cbc-sm4-ce",
446 .cra_blocksize
= SM4_BLOCK_SIZE
,
447 .cra_ctxsize
= sizeof(struct sm4_ctx
),
448 .cra_module
= THIS_MODULE
,
450 .min_keysize
= SM4_KEY_SIZE
,
451 .max_keysize
= SM4_KEY_SIZE
,
452 .ivsize
= SM4_BLOCK_SIZE
,
453 .setkey
= sm4_setkey
,
454 .encrypt
= sm4_cbc_encrypt
,
455 .decrypt
= sm4_cbc_decrypt
,
458 .cra_name
= "ctr(sm4)",
459 .cra_driver_name
= "ctr-sm4-ce",
462 .cra_ctxsize
= sizeof(struct sm4_ctx
),
463 .cra_module
= THIS_MODULE
,
465 .min_keysize
= SM4_KEY_SIZE
,
466 .max_keysize
= SM4_KEY_SIZE
,
467 .ivsize
= SM4_BLOCK_SIZE
,
468 .chunksize
= SM4_BLOCK_SIZE
,
469 .setkey
= sm4_setkey
,
470 .encrypt
= sm4_ctr_crypt
,
471 .decrypt
= sm4_ctr_crypt
,
474 .cra_name
= "cts(cbc(sm4))",
475 .cra_driver_name
= "cts-cbc-sm4-ce",
477 .cra_blocksize
= SM4_BLOCK_SIZE
,
478 .cra_ctxsize
= sizeof(struct sm4_ctx
),
479 .cra_module
= THIS_MODULE
,
481 .min_keysize
= SM4_KEY_SIZE
,
482 .max_keysize
= SM4_KEY_SIZE
,
483 .ivsize
= SM4_BLOCK_SIZE
,
484 .walksize
= SM4_BLOCK_SIZE
* 2,
485 .setkey
= sm4_setkey
,
486 .encrypt
= sm4_cbc_cts_encrypt
,
487 .decrypt
= sm4_cbc_cts_decrypt
,
490 .cra_name
= "xts(sm4)",
491 .cra_driver_name
= "xts-sm4-ce",
493 .cra_blocksize
= SM4_BLOCK_SIZE
,
494 .cra_ctxsize
= sizeof(struct sm4_xts_ctx
),
495 .cra_module
= THIS_MODULE
,
497 .min_keysize
= SM4_KEY_SIZE
* 2,
498 .max_keysize
= SM4_KEY_SIZE
* 2,
499 .ivsize
= SM4_BLOCK_SIZE
,
500 .walksize
= SM4_BLOCK_SIZE
* 2,
501 .setkey
= sm4_xts_setkey
,
502 .encrypt
= sm4_xts_encrypt
,
503 .decrypt
= sm4_xts_decrypt
,
507 static int sm4_cbcmac_setkey(struct crypto_shash
*tfm
, const u8
*key
,
508 unsigned int key_len
)
510 struct sm4_mac_tfm_ctx
*ctx
= crypto_shash_ctx(tfm
);
512 if (key_len
!= SM4_KEY_SIZE
)
516 sm4_ce_expand_key(key
, ctx
->key
.rkey_enc
, ctx
->key
.rkey_dec
,
517 crypto_sm4_fk
, crypto_sm4_ck
);
523 static int sm4_cmac_setkey(struct crypto_shash
*tfm
, const u8
*key
,
524 unsigned int key_len
)
526 struct sm4_mac_tfm_ctx
*ctx
= crypto_shash_ctx(tfm
);
527 be128
*consts
= (be128
*)ctx
->consts
;
530 if (key_len
!= SM4_KEY_SIZE
)
533 memset(consts
, 0, SM4_BLOCK_SIZE
);
537 sm4_ce_expand_key(key
, ctx
->key
.rkey_enc
, ctx
->key
.rkey_dec
,
538 crypto_sm4_fk
, crypto_sm4_ck
);
540 /* encrypt the zero block */
541 sm4_ce_crypt_block(ctx
->key
.rkey_enc
, (u8
*)consts
, (const u8
*)consts
);
545 /* gf(2^128) multiply zero-ciphertext with u and u^2 */
546 a
= be64_to_cpu(consts
[0].a
);
547 b
= be64_to_cpu(consts
[0].b
);
548 consts
[0].a
= cpu_to_be64((a
<< 1) | (b
>> 63));
549 consts
[0].b
= cpu_to_be64((b
<< 1) ^ ((a
>> 63) ? 0x87 : 0));
551 a
= be64_to_cpu(consts
[0].a
);
552 b
= be64_to_cpu(consts
[0].b
);
553 consts
[1].a
= cpu_to_be64((a
<< 1) | (b
>> 63));
554 consts
[1].b
= cpu_to_be64((b
<< 1) ^ ((a
>> 63) ? 0x87 : 0));
559 static int sm4_xcbc_setkey(struct crypto_shash
*tfm
, const u8
*key
,
560 unsigned int key_len
)
562 struct sm4_mac_tfm_ctx
*ctx
= crypto_shash_ctx(tfm
);
563 u8
__aligned(8) key2
[SM4_BLOCK_SIZE
];
564 static u8
const ks
[3][SM4_BLOCK_SIZE
] = {
565 { [0 ... SM4_BLOCK_SIZE
- 1] = 0x1},
566 { [0 ... SM4_BLOCK_SIZE
- 1] = 0x2},
567 { [0 ... SM4_BLOCK_SIZE
- 1] = 0x3},
570 if (key_len
!= SM4_KEY_SIZE
)
575 sm4_ce_expand_key(key
, ctx
->key
.rkey_enc
, ctx
->key
.rkey_dec
,
576 crypto_sm4_fk
, crypto_sm4_ck
);
578 sm4_ce_crypt_block(ctx
->key
.rkey_enc
, key2
, ks
[0]);
579 sm4_ce_crypt(ctx
->key
.rkey_enc
, ctx
->consts
, ks
[1], 2);
581 sm4_ce_expand_key(key2
, ctx
->key
.rkey_enc
, ctx
->key
.rkey_dec
,
582 crypto_sm4_fk
, crypto_sm4_ck
);
589 static int sm4_mac_init(struct shash_desc
*desc
)
591 struct sm4_mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
593 memset(ctx
->digest
, 0, SM4_BLOCK_SIZE
);
599 static int sm4_mac_update(struct shash_desc
*desc
, const u8
*p
,
602 struct sm4_mac_tfm_ctx
*tctx
= crypto_shash_ctx(desc
->tfm
);
603 struct sm4_mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
604 unsigned int l
, nblocks
;
609 if (ctx
->len
|| ctx
->len
+ len
< SM4_BLOCK_SIZE
) {
610 l
= min(len
, SM4_BLOCK_SIZE
- ctx
->len
);
612 crypto_xor(ctx
->digest
+ ctx
->len
, p
, l
);
618 if (len
&& (ctx
->len
% SM4_BLOCK_SIZE
) == 0) {
621 if (len
< SM4_BLOCK_SIZE
&& ctx
->len
== SM4_BLOCK_SIZE
) {
622 sm4_ce_crypt_block(tctx
->key
.rkey_enc
,
623 ctx
->digest
, ctx
->digest
);
626 nblocks
= len
/ SM4_BLOCK_SIZE
;
627 len
%= SM4_BLOCK_SIZE
;
629 sm4_ce_mac_update(tctx
->key
.rkey_enc
, ctx
->digest
, p
,
630 nblocks
, (ctx
->len
== SM4_BLOCK_SIZE
),
633 p
+= nblocks
* SM4_BLOCK_SIZE
;
636 ctx
->len
= SM4_BLOCK_SIZE
;
642 crypto_xor(ctx
->digest
, p
, len
);
650 static int sm4_cmac_final(struct shash_desc
*desc
, u8
*out
)
652 struct sm4_mac_tfm_ctx
*tctx
= crypto_shash_ctx(desc
->tfm
);
653 struct sm4_mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
654 const u8
*consts
= tctx
->consts
;
656 if (ctx
->len
!= SM4_BLOCK_SIZE
) {
657 ctx
->digest
[ctx
->len
] ^= 0x80;
658 consts
+= SM4_BLOCK_SIZE
;
662 sm4_ce_mac_update(tctx
->key
.rkey_enc
, ctx
->digest
, consts
, 1,
666 memcpy(out
, ctx
->digest
, SM4_BLOCK_SIZE
);
671 static int sm4_cbcmac_final(struct shash_desc
*desc
, u8
*out
)
673 struct sm4_mac_tfm_ctx
*tctx
= crypto_shash_ctx(desc
->tfm
);
674 struct sm4_mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
678 sm4_ce_crypt_block(tctx
->key
.rkey_enc
, ctx
->digest
,
683 memcpy(out
, ctx
->digest
, SM4_BLOCK_SIZE
);
688 static struct shash_alg sm4_mac_algs
[] = {
691 .cra_name
= "cmac(sm4)",
692 .cra_driver_name
= "cmac-sm4-ce",
694 .cra_blocksize
= SM4_BLOCK_SIZE
,
695 .cra_ctxsize
= sizeof(struct sm4_mac_tfm_ctx
)
696 + SM4_BLOCK_SIZE
* 2,
697 .cra_module
= THIS_MODULE
,
699 .digestsize
= SM4_BLOCK_SIZE
,
700 .init
= sm4_mac_init
,
701 .update
= sm4_mac_update
,
702 .final
= sm4_cmac_final
,
703 .setkey
= sm4_cmac_setkey
,
704 .descsize
= sizeof(struct sm4_mac_desc_ctx
),
707 .cra_name
= "xcbc(sm4)",
708 .cra_driver_name
= "xcbc-sm4-ce",
710 .cra_blocksize
= SM4_BLOCK_SIZE
,
711 .cra_ctxsize
= sizeof(struct sm4_mac_tfm_ctx
)
712 + SM4_BLOCK_SIZE
* 2,
713 .cra_module
= THIS_MODULE
,
715 .digestsize
= SM4_BLOCK_SIZE
,
716 .init
= sm4_mac_init
,
717 .update
= sm4_mac_update
,
718 .final
= sm4_cmac_final
,
719 .setkey
= sm4_xcbc_setkey
,
720 .descsize
= sizeof(struct sm4_mac_desc_ctx
),
723 .cra_name
= "cbcmac(sm4)",
724 .cra_driver_name
= "cbcmac-sm4-ce",
727 .cra_ctxsize
= sizeof(struct sm4_mac_tfm_ctx
),
728 .cra_module
= THIS_MODULE
,
730 .digestsize
= SM4_BLOCK_SIZE
,
731 .init
= sm4_mac_init
,
732 .update
= sm4_mac_update
,
733 .final
= sm4_cbcmac_final
,
734 .setkey
= sm4_cbcmac_setkey
,
735 .descsize
= sizeof(struct sm4_mac_desc_ctx
),
739 static int __init
sm4_init(void)
743 err
= crypto_register_skciphers(sm4_algs
, ARRAY_SIZE(sm4_algs
));
747 err
= crypto_register_shashes(sm4_mac_algs
, ARRAY_SIZE(sm4_mac_algs
));
754 crypto_unregister_skciphers(sm4_algs
, ARRAY_SIZE(sm4_algs
));
758 static void __exit
sm4_exit(void)
760 crypto_unregister_shashes(sm4_mac_algs
, ARRAY_SIZE(sm4_mac_algs
));
761 crypto_unregister_skciphers(sm4_algs
, ARRAY_SIZE(sm4_algs
));
764 module_cpu_feature_match(SM4
, sm4_init
);
765 module_exit(sm4_exit
);
767 MODULE_DESCRIPTION("SM4 ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
768 MODULE_ALIAS_CRYPTO("sm4-ce");
769 MODULE_ALIAS_CRYPTO("sm4");
770 MODULE_ALIAS_CRYPTO("ecb(sm4)");
771 MODULE_ALIAS_CRYPTO("cbc(sm4)");
772 MODULE_ALIAS_CRYPTO("ctr(sm4)");
773 MODULE_ALIAS_CRYPTO("cts(cbc(sm4))");
774 MODULE_ALIAS_CRYPTO("xts(sm4)");
775 MODULE_ALIAS_CRYPTO("cmac(sm4)");
776 MODULE_ALIAS_CRYPTO("xcbc(sm4)");
777 MODULE_ALIAS_CRYPTO("cbcmac(sm4)");
778 MODULE_AUTHOR("Tianjia Zhang <tianjia.zhang@linux.alibaba.com>");
779 MODULE_LICENSE("GPL v2");