1 /* SPDX-License-Identifier: GPL-2.0-or-later */
3 * SM4 Cipher Algorithm, using ARMv8 NEON
5 * https://tools.ietf.org/id/draft-ribose-cfrg-sm4-10.html
7 * Copyright (C) 2022, Alibaba Group.
8 * Copyright (C) 2022 Tianjia Zhang <tianjia.zhang@linux.alibaba.com>
11 #include <linux/module.h>
12 #include <linux/crypto.h>
13 #include <linux/kernel.h>
14 #include <linux/cpufeature.h>
17 #include <crypto/internal/simd.h>
18 #include <crypto/internal/skcipher.h>
19 #include <crypto/sm4.h>
21 asmlinkage
void sm4_neon_crypt(const u32
*rkey
, u8
*dst
, const u8
*src
,
22 unsigned int nblocks
);
23 asmlinkage
void sm4_neon_cbc_dec(const u32
*rkey_dec
, u8
*dst
, const u8
*src
,
24 u8
*iv
, unsigned int nblocks
);
25 asmlinkage
void sm4_neon_ctr_crypt(const u32
*rkey_enc
, u8
*dst
, const u8
*src
,
26 u8
*iv
, unsigned int nblocks
);
28 static int sm4_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
31 struct sm4_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
33 return sm4_expandkey(ctx
, key
, key_len
);
36 static int sm4_ecb_do_crypt(struct skcipher_request
*req
, const u32
*rkey
)
38 struct skcipher_walk walk
;
42 err
= skcipher_walk_virt(&walk
, req
, false);
44 while ((nbytes
= walk
.nbytes
) > 0) {
45 const u8
*src
= walk
.src
.virt
.addr
;
46 u8
*dst
= walk
.dst
.virt
.addr
;
49 nblocks
= nbytes
/ SM4_BLOCK_SIZE
;
53 sm4_neon_crypt(rkey
, dst
, src
, nblocks
);
58 err
= skcipher_walk_done(&walk
, nbytes
% SM4_BLOCK_SIZE
);
64 static int sm4_ecb_encrypt(struct skcipher_request
*req
)
66 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
67 struct sm4_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
69 return sm4_ecb_do_crypt(req
, ctx
->rkey_enc
);
72 static int sm4_ecb_decrypt(struct skcipher_request
*req
)
74 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
75 struct sm4_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
77 return sm4_ecb_do_crypt(req
, ctx
->rkey_dec
);
80 static int sm4_cbc_encrypt(struct skcipher_request
*req
)
82 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
83 struct sm4_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
84 struct skcipher_walk walk
;
88 err
= skcipher_walk_virt(&walk
, req
, false);
90 while ((nbytes
= walk
.nbytes
) > 0) {
91 const u8
*iv
= walk
.iv
;
92 const u8
*src
= walk
.src
.virt
.addr
;
93 u8
*dst
= walk
.dst
.virt
.addr
;
95 while (nbytes
>= SM4_BLOCK_SIZE
) {
96 crypto_xor_cpy(dst
, src
, iv
, SM4_BLOCK_SIZE
);
97 sm4_crypt_block(ctx
->rkey_enc
, dst
, dst
);
99 src
+= SM4_BLOCK_SIZE
;
100 dst
+= SM4_BLOCK_SIZE
;
101 nbytes
-= SM4_BLOCK_SIZE
;
104 memcpy(walk
.iv
, iv
, SM4_BLOCK_SIZE
);
106 err
= skcipher_walk_done(&walk
, nbytes
);
112 static int sm4_cbc_decrypt(struct skcipher_request
*req
)
114 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
115 struct sm4_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
116 struct skcipher_walk walk
;
120 err
= skcipher_walk_virt(&walk
, req
, false);
122 while ((nbytes
= walk
.nbytes
) > 0) {
123 const u8
*src
= walk
.src
.virt
.addr
;
124 u8
*dst
= walk
.dst
.virt
.addr
;
125 unsigned int nblocks
;
127 nblocks
= nbytes
/ SM4_BLOCK_SIZE
;
131 sm4_neon_cbc_dec(ctx
->rkey_dec
, dst
, src
,
137 err
= skcipher_walk_done(&walk
, nbytes
% SM4_BLOCK_SIZE
);
143 static int sm4_ctr_crypt(struct skcipher_request
*req
)
145 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
146 struct sm4_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
147 struct skcipher_walk walk
;
151 err
= skcipher_walk_virt(&walk
, req
, false);
153 while ((nbytes
= walk
.nbytes
) > 0) {
154 const u8
*src
= walk
.src
.virt
.addr
;
155 u8
*dst
= walk
.dst
.virt
.addr
;
156 unsigned int nblocks
;
158 nblocks
= nbytes
/ SM4_BLOCK_SIZE
;
162 sm4_neon_ctr_crypt(ctx
->rkey_enc
, dst
, src
,
167 dst
+= nblocks
* SM4_BLOCK_SIZE
;
168 src
+= nblocks
* SM4_BLOCK_SIZE
;
169 nbytes
-= nblocks
* SM4_BLOCK_SIZE
;
173 if (walk
.nbytes
== walk
.total
&& nbytes
> 0) {
174 u8 keystream
[SM4_BLOCK_SIZE
];
176 sm4_crypt_block(ctx
->rkey_enc
, keystream
, walk
.iv
);
177 crypto_inc(walk
.iv
, SM4_BLOCK_SIZE
);
178 crypto_xor_cpy(dst
, src
, keystream
, nbytes
);
182 err
= skcipher_walk_done(&walk
, nbytes
);
188 static struct skcipher_alg sm4_algs
[] = {
191 .cra_name
= "ecb(sm4)",
192 .cra_driver_name
= "ecb-sm4-neon",
194 .cra_blocksize
= SM4_BLOCK_SIZE
,
195 .cra_ctxsize
= sizeof(struct sm4_ctx
),
196 .cra_module
= THIS_MODULE
,
198 .min_keysize
= SM4_KEY_SIZE
,
199 .max_keysize
= SM4_KEY_SIZE
,
200 .setkey
= sm4_setkey
,
201 .encrypt
= sm4_ecb_encrypt
,
202 .decrypt
= sm4_ecb_decrypt
,
205 .cra_name
= "cbc(sm4)",
206 .cra_driver_name
= "cbc-sm4-neon",
208 .cra_blocksize
= SM4_BLOCK_SIZE
,
209 .cra_ctxsize
= sizeof(struct sm4_ctx
),
210 .cra_module
= THIS_MODULE
,
212 .min_keysize
= SM4_KEY_SIZE
,
213 .max_keysize
= SM4_KEY_SIZE
,
214 .ivsize
= SM4_BLOCK_SIZE
,
215 .setkey
= sm4_setkey
,
216 .encrypt
= sm4_cbc_encrypt
,
217 .decrypt
= sm4_cbc_decrypt
,
220 .cra_name
= "ctr(sm4)",
221 .cra_driver_name
= "ctr-sm4-neon",
224 .cra_ctxsize
= sizeof(struct sm4_ctx
),
225 .cra_module
= THIS_MODULE
,
227 .min_keysize
= SM4_KEY_SIZE
,
228 .max_keysize
= SM4_KEY_SIZE
,
229 .ivsize
= SM4_BLOCK_SIZE
,
230 .chunksize
= SM4_BLOCK_SIZE
,
231 .setkey
= sm4_setkey
,
232 .encrypt
= sm4_ctr_crypt
,
233 .decrypt
= sm4_ctr_crypt
,
237 static int __init
sm4_init(void)
239 return crypto_register_skciphers(sm4_algs
, ARRAY_SIZE(sm4_algs
));
242 static void __exit
sm4_exit(void)
244 crypto_unregister_skciphers(sm4_algs
, ARRAY_SIZE(sm4_algs
));
247 module_init(sm4_init
);
248 module_exit(sm4_exit
);
250 MODULE_DESCRIPTION("SM4 ECB/CBC/CTR using ARMv8 NEON");
251 MODULE_ALIAS_CRYPTO("sm4-neon");
252 MODULE_ALIAS_CRYPTO("sm4");
253 MODULE_ALIAS_CRYPTO("ecb(sm4)");
254 MODULE_ALIAS_CRYPTO("cbc(sm4)");
255 MODULE_ALIAS_CRYPTO("ctr(sm4)");
256 MODULE_AUTHOR("Tianjia Zhang <tianjia.zhang@linux.alibaba.com>");
257 MODULE_LICENSE("GPL v2");