1 /* SPDX-License-Identifier: GPL-2.0-or-later */
3 * SM4-CCM AEAD Algorithm using ARMv8 Crypto Extensions
4 * as specified in rfc8998
5 * https://datatracker.ietf.org/doc/html/rfc8998
7 * Copyright (C) 2022 Tianjia Zhang <tianjia.zhang@linux.alibaba.com>
10 #include <linux/module.h>
11 #include <linux/crypto.h>
12 #include <linux/kernel.h>
13 #include <linux/cpufeature.h>
15 #include <crypto/scatterwalk.h>
16 #include <crypto/internal/aead.h>
17 #include <crypto/internal/skcipher.h>
18 #include <crypto/sm4.h>
21 asmlinkage
void sm4_ce_cbcmac_update(const u32
*rkey_enc
, u8
*mac
,
22 const u8
*src
, unsigned int nblocks
);
23 asmlinkage
void sm4_ce_ccm_enc(const u32
*rkey_enc
, u8
*dst
, const u8
*src
,
24 u8
*iv
, unsigned int nbytes
, u8
*mac
);
25 asmlinkage
void sm4_ce_ccm_dec(const u32
*rkey_enc
, u8
*dst
, const u8
*src
,
26 u8
*iv
, unsigned int nbytes
, u8
*mac
);
27 asmlinkage
void sm4_ce_ccm_final(const u32
*rkey_enc
, u8
*iv
, u8
*mac
);
30 static int ccm_setkey(struct crypto_aead
*tfm
, const u8
*key
,
33 struct sm4_ctx
*ctx
= crypto_aead_ctx(tfm
);
35 if (key_len
!= SM4_KEY_SIZE
)
39 sm4_ce_expand_key(key
, ctx
->rkey_enc
, ctx
->rkey_dec
,
40 crypto_sm4_fk
, crypto_sm4_ck
);
46 static int ccm_setauthsize(struct crypto_aead
*tfm
, unsigned int authsize
)
48 if ((authsize
& 1) || authsize
< 4)
53 static int ccm_format_input(u8 info
[], struct aead_request
*req
,
56 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
57 unsigned int l
= req
->iv
[0] + 1;
61 /* verify that CCM dimension 'L': 2 <= L <= 8 */
64 if (l
< 4 && msglen
>> (8 * l
))
67 memset(&req
->iv
[SM4_BLOCK_SIZE
- l
], 0, l
);
69 memcpy(info
, req
->iv
, SM4_BLOCK_SIZE
);
71 m
= crypto_aead_authsize(aead
);
73 /* format flags field per RFC 3610/NIST 800-38C */
74 *info
|= ((m
- 2) / 2) << 3;
79 * format message length field,
80 * Linux uses a u32 type to represent msglen
85 len
= cpu_to_be32(msglen
);
86 memcpy(&info
[SM4_BLOCK_SIZE
- l
], (u8
*)&len
+ 4 - l
, l
);
91 static void ccm_calculate_auth_mac(struct aead_request
*req
, u8 mac
[])
93 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
94 struct sm4_ctx
*ctx
= crypto_aead_ctx(aead
);
95 struct __packed
{ __be16 l
; __be32 h
; } aadlen
;
96 u32 assoclen
= req
->assoclen
;
97 struct scatter_walk walk
;
100 if (assoclen
< 0xff00) {
101 aadlen
.l
= cpu_to_be16(assoclen
);
104 aadlen
.l
= cpu_to_be16(0xfffe);
105 put_unaligned_be32(assoclen
, &aadlen
.h
);
109 sm4_ce_crypt_block(ctx
->rkey_enc
, mac
, mac
);
110 crypto_xor(mac
, (const u8
*)&aadlen
, len
);
112 scatterwalk_start(&walk
, req
->src
);
115 u32 n
= scatterwalk_clamp(&walk
, assoclen
);
119 scatterwalk_start(&walk
, sg_next(walk
.sg
));
120 n
= scatterwalk_clamp(&walk
, assoclen
);
123 p
= ptr
= scatterwalk_map(&walk
);
125 scatterwalk_advance(&walk
, n
);
128 unsigned int l
, nblocks
;
130 if (len
== SM4_BLOCK_SIZE
) {
131 if (n
< SM4_BLOCK_SIZE
) {
132 sm4_ce_crypt_block(ctx
->rkey_enc
,
137 nblocks
= n
/ SM4_BLOCK_SIZE
;
138 sm4_ce_cbcmac_update(ctx
->rkey_enc
,
141 ptr
+= nblocks
* SM4_BLOCK_SIZE
;
148 l
= min(n
, SM4_BLOCK_SIZE
- len
);
150 crypto_xor(mac
+ len
, ptr
, l
);
157 scatterwalk_unmap(p
);
158 scatterwalk_done(&walk
, 0, assoclen
);
162 static int ccm_crypt(struct aead_request
*req
, struct skcipher_walk
*walk
,
163 u32
*rkey_enc
, u8 mac
[],
164 void (*sm4_ce_ccm_crypt
)(const u32
*rkey_enc
, u8
*dst
,
165 const u8
*src
, u8
*iv
,
166 unsigned int nbytes
, u8
*mac
))
168 u8
__aligned(8) ctr0
[SM4_BLOCK_SIZE
];
171 /* preserve the initial ctr0 for the TAG */
172 memcpy(ctr0
, walk
->iv
, SM4_BLOCK_SIZE
);
173 crypto_inc(walk
->iv
, SM4_BLOCK_SIZE
);
178 ccm_calculate_auth_mac(req
, mac
);
180 while (walk
->nbytes
&& walk
->nbytes
!= walk
->total
) {
181 unsigned int tail
= walk
->nbytes
% SM4_BLOCK_SIZE
;
183 sm4_ce_ccm_crypt(rkey_enc
, walk
->dst
.virt
.addr
,
184 walk
->src
.virt
.addr
, walk
->iv
,
185 walk
->nbytes
- tail
, mac
);
189 err
= skcipher_walk_done(walk
, tail
);
195 sm4_ce_ccm_crypt(rkey_enc
, walk
->dst
.virt
.addr
,
196 walk
->src
.virt
.addr
, walk
->iv
,
199 sm4_ce_ccm_final(rkey_enc
, ctr0
, mac
);
203 err
= skcipher_walk_done(walk
, 0);
205 sm4_ce_ccm_final(rkey_enc
, ctr0
, mac
);
213 static int ccm_encrypt(struct aead_request
*req
)
215 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
216 struct sm4_ctx
*ctx
= crypto_aead_ctx(aead
);
217 u8
__aligned(8) mac
[SM4_BLOCK_SIZE
];
218 struct skcipher_walk walk
;
221 err
= ccm_format_input(mac
, req
, req
->cryptlen
);
225 err
= skcipher_walk_aead_encrypt(&walk
, req
, false);
229 err
= ccm_crypt(req
, &walk
, ctx
->rkey_enc
, mac
, sm4_ce_ccm_enc
);
233 /* copy authtag to end of dst */
234 scatterwalk_map_and_copy(mac
, req
->dst
, req
->assoclen
+ req
->cryptlen
,
235 crypto_aead_authsize(aead
), 1);
240 static int ccm_decrypt(struct aead_request
*req
)
242 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
243 unsigned int authsize
= crypto_aead_authsize(aead
);
244 struct sm4_ctx
*ctx
= crypto_aead_ctx(aead
);
245 u8
__aligned(8) mac
[SM4_BLOCK_SIZE
];
246 u8 authtag
[SM4_BLOCK_SIZE
];
247 struct skcipher_walk walk
;
250 err
= ccm_format_input(mac
, req
, req
->cryptlen
- authsize
);
254 err
= skcipher_walk_aead_decrypt(&walk
, req
, false);
258 err
= ccm_crypt(req
, &walk
, ctx
->rkey_enc
, mac
, sm4_ce_ccm_dec
);
262 /* compare calculated auth tag with the stored one */
263 scatterwalk_map_and_copy(authtag
, req
->src
,
264 req
->assoclen
+ req
->cryptlen
- authsize
,
267 if (crypto_memneq(authtag
, mac
, authsize
))
273 static struct aead_alg sm4_ccm_alg
= {
275 .cra_name
= "ccm(sm4)",
276 .cra_driver_name
= "ccm-sm4-ce",
279 .cra_ctxsize
= sizeof(struct sm4_ctx
),
280 .cra_module
= THIS_MODULE
,
282 .ivsize
= SM4_BLOCK_SIZE
,
283 .chunksize
= SM4_BLOCK_SIZE
,
284 .maxauthsize
= SM4_BLOCK_SIZE
,
285 .setkey
= ccm_setkey
,
286 .setauthsize
= ccm_setauthsize
,
287 .encrypt
= ccm_encrypt
,
288 .decrypt
= ccm_decrypt
,
291 static int __init
sm4_ce_ccm_init(void)
293 return crypto_register_aead(&sm4_ccm_alg
);
296 static void __exit
sm4_ce_ccm_exit(void)
298 crypto_unregister_aead(&sm4_ccm_alg
);
301 module_cpu_feature_match(SM4
, sm4_ce_ccm_init
);
302 module_exit(sm4_ce_ccm_exit
);
304 MODULE_DESCRIPTION("Synchronous SM4 in CCM mode using ARMv8 Crypto Extensions");
305 MODULE_ALIAS_CRYPTO("ccm(sm4)");
306 MODULE_AUTHOR("Tianjia Zhang <tianjia.zhang@linux.alibaba.com>");
307 MODULE_LICENSE("GPL v2");