1 // SPDX-License-Identifier: GPL-2.0-only
3 * aes-ce-ccm-glue.c - AES-CCM transform for ARMv8 with Crypto Extensions
5 * Copyright (C) 2013 - 2017 Linaro Ltd.
6 * Copyright (C) 2024 Google LLC
8 * Author: Ard Biesheuvel <ardb@kernel.org>
12 #include <linux/unaligned.h>
13 #include <crypto/aes.h>
14 #include <crypto/scatterwalk.h>
15 #include <crypto/internal/aead.h>
16 #include <crypto/internal/skcipher.h>
17 #include <linux/module.h>
19 #include "aes-ce-setkey.h"
21 MODULE_IMPORT_NS("CRYPTO_INTERNAL");
23 static int num_rounds(struct crypto_aes_ctx
*ctx
)
26 * # of rounds specified by AES:
27 * 128 bit key 10 rounds
28 * 192 bit key 12 rounds
29 * 256 bit key 14 rounds
30 * => n byte key => 6 + (n/4) rounds
32 return 6 + ctx
->key_length
/ 4;
35 asmlinkage u32
ce_aes_mac_update(u8
const in
[], u32
const rk
[], int rounds
,
36 int blocks
, u8 dg
[], int enc_before
,
39 asmlinkage
void ce_aes_ccm_encrypt(u8 out
[], u8
const in
[], u32 cbytes
,
40 u32
const rk
[], u32 rounds
, u8 mac
[],
41 u8 ctr
[], u8
const final_iv
[]);
43 asmlinkage
void ce_aes_ccm_decrypt(u8 out
[], u8
const in
[], u32 cbytes
,
44 u32
const rk
[], u32 rounds
, u8 mac
[],
45 u8 ctr
[], u8
const final_iv
[]);
47 static int ccm_setkey(struct crypto_aead
*tfm
, const u8
*in_key
,
50 struct crypto_aes_ctx
*ctx
= crypto_aead_ctx(tfm
);
52 return ce_aes_expandkey(ctx
, in_key
, key_len
);
55 static int ccm_setauthsize(struct crypto_aead
*tfm
, unsigned int authsize
)
57 if ((authsize
& 1) || authsize
< 4)
62 static int ccm_init_mac(struct aead_request
*req
, u8 maciv
[], u32 msglen
)
64 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
65 __be32
*n
= (__be32
*)&maciv
[AES_BLOCK_SIZE
- 8];
66 u32 l
= req
->iv
[0] + 1;
68 /* verify that CCM dimension 'L' is set correctly in the IV */
72 /* verify that msglen can in fact be represented in L bytes */
73 if (l
< 4 && msglen
>> (8 * l
))
77 * Even if the CCM spec allows L values of up to 8, the Linux cryptoapi
78 * uses a u32 type to represent msglen so the top 4 bytes are always 0.
81 n
[1] = cpu_to_be32(msglen
);
83 memcpy(maciv
, req
->iv
, AES_BLOCK_SIZE
- l
);
86 * Meaning of byte 0 according to CCM spec (RFC 3610/NIST 800-38C)
87 * - bits 0..2 : max # of bytes required to represent msglen, minus 1
88 * (already set by caller)
89 * - bits 3..5 : size of auth tag (1 => 4 bytes, 2 => 6 bytes, etc)
90 * - bit 6 : indicates presence of authenticate-only data
92 maciv
[0] |= (crypto_aead_authsize(aead
) - 2) << 2;
96 memset(&req
->iv
[AES_BLOCK_SIZE
- l
], 0, l
);
100 static u32
ce_aes_ccm_auth_data(u8 mac
[], u8
const in
[], u32 abytes
,
101 u32 macp
, u32
const rk
[], u32 rounds
)
103 int enc_after
= (macp
+ abytes
) % AES_BLOCK_SIZE
;
106 u32 blocks
= abytes
/ AES_BLOCK_SIZE
;
108 if (macp
== AES_BLOCK_SIZE
|| (!macp
&& blocks
> 0)) {
109 u32 rem
= ce_aes_mac_update(in
, rk
, rounds
, blocks
, mac
,
111 u32 adv
= (blocks
- rem
) * AES_BLOCK_SIZE
;
113 macp
= enc_after
? 0 : AES_BLOCK_SIZE
;
123 u32 l
= min(AES_BLOCK_SIZE
- macp
, abytes
);
125 crypto_xor(&mac
[macp
], in
, l
);
130 } while (abytes
> 0);
135 static void ccm_calculate_auth_mac(struct aead_request
*req
, u8 mac
[])
137 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
138 struct crypto_aes_ctx
*ctx
= crypto_aead_ctx(aead
);
139 struct __packed
{ __be16 l
; __be32 h
; u16 len
; } ltag
;
140 struct scatter_walk walk
;
141 u32 len
= req
->assoclen
;
142 u32 macp
= AES_BLOCK_SIZE
;
144 /* prepend the AAD with a length tag */
146 ltag
.l
= cpu_to_be16(len
);
149 ltag
.l
= cpu_to_be16(0xfffe);
150 put_unaligned_be32(len
, <ag
.h
);
154 macp
= ce_aes_ccm_auth_data(mac
, (u8
*)<ag
, ltag
.len
, macp
,
155 ctx
->key_enc
, num_rounds(ctx
));
156 scatterwalk_start(&walk
, req
->src
);
159 u32 n
= scatterwalk_clamp(&walk
, len
);
163 scatterwalk_start(&walk
, sg_next(walk
.sg
));
164 n
= scatterwalk_clamp(&walk
, len
);
166 p
= scatterwalk_map(&walk
);
168 macp
= ce_aes_ccm_auth_data(mac
, p
, n
, macp
, ctx
->key_enc
,
173 scatterwalk_unmap(p
);
174 scatterwalk_advance(&walk
, n
);
175 scatterwalk_done(&walk
, 0, len
);
179 static int ccm_encrypt(struct aead_request
*req
)
181 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
182 struct crypto_aes_ctx
*ctx
= crypto_aead_ctx(aead
);
183 struct skcipher_walk walk
;
184 u8
__aligned(8) mac
[AES_BLOCK_SIZE
];
185 u8 orig_iv
[AES_BLOCK_SIZE
];
186 u32 len
= req
->cryptlen
;
189 err
= ccm_init_mac(req
, mac
, len
);
193 /* preserve the original iv for the final round */
194 memcpy(orig_iv
, req
->iv
, AES_BLOCK_SIZE
);
196 err
= skcipher_walk_aead_encrypt(&walk
, req
, false);
203 ccm_calculate_auth_mac(req
, mac
);
206 u32 tail
= walk
.nbytes
% AES_BLOCK_SIZE
;
207 const u8
*src
= walk
.src
.virt
.addr
;
208 u8
*dst
= walk
.dst
.virt
.addr
;
209 u8 buf
[AES_BLOCK_SIZE
];
212 if (walk
.nbytes
== walk
.total
) {
217 if (unlikely(walk
.nbytes
< AES_BLOCK_SIZE
))
218 src
= dst
= memcpy(&buf
[sizeof(buf
) - walk
.nbytes
],
221 ce_aes_ccm_encrypt(dst
, src
, walk
.nbytes
- tail
,
222 ctx
->key_enc
, num_rounds(ctx
),
223 mac
, walk
.iv
, final_iv
);
225 if (unlikely(walk
.nbytes
< AES_BLOCK_SIZE
))
226 memcpy(walk
.dst
.virt
.addr
, dst
, walk
.nbytes
);
229 err
= skcipher_walk_done(&walk
, tail
);
231 } while (walk
.nbytes
);
238 /* copy authtag to end of dst */
239 scatterwalk_map_and_copy(mac
, req
->dst
, req
->assoclen
+ req
->cryptlen
,
240 crypto_aead_authsize(aead
), 1);
245 static int ccm_decrypt(struct aead_request
*req
)
247 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
248 struct crypto_aes_ctx
*ctx
= crypto_aead_ctx(aead
);
249 unsigned int authsize
= crypto_aead_authsize(aead
);
250 struct skcipher_walk walk
;
251 u8
__aligned(8) mac
[AES_BLOCK_SIZE
];
252 u8 orig_iv
[AES_BLOCK_SIZE
];
253 u32 len
= req
->cryptlen
- authsize
;
256 err
= ccm_init_mac(req
, mac
, len
);
260 /* preserve the original iv for the final round */
261 memcpy(orig_iv
, req
->iv
, AES_BLOCK_SIZE
);
263 err
= skcipher_walk_aead_decrypt(&walk
, req
, false);
270 ccm_calculate_auth_mac(req
, mac
);
273 u32 tail
= walk
.nbytes
% AES_BLOCK_SIZE
;
274 const u8
*src
= walk
.src
.virt
.addr
;
275 u8
*dst
= walk
.dst
.virt
.addr
;
276 u8 buf
[AES_BLOCK_SIZE
];
279 if (walk
.nbytes
== walk
.total
) {
284 if (unlikely(walk
.nbytes
< AES_BLOCK_SIZE
))
285 src
= dst
= memcpy(&buf
[sizeof(buf
) - walk
.nbytes
],
288 ce_aes_ccm_decrypt(dst
, src
, walk
.nbytes
- tail
,
289 ctx
->key_enc
, num_rounds(ctx
),
290 mac
, walk
.iv
, final_iv
);
292 if (unlikely(walk
.nbytes
< AES_BLOCK_SIZE
))
293 memcpy(walk
.dst
.virt
.addr
, dst
, walk
.nbytes
);
296 err
= skcipher_walk_done(&walk
, tail
);
298 } while (walk
.nbytes
);
305 /* compare calculated auth tag with the stored one */
306 scatterwalk_map_and_copy(orig_iv
, req
->src
,
307 req
->assoclen
+ req
->cryptlen
- authsize
,
310 if (crypto_memneq(mac
, orig_iv
, authsize
))
315 static struct aead_alg ccm_aes_alg
= {
317 .cra_name
= "ccm(aes)",
318 .cra_driver_name
= "ccm-aes-ce",
321 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
322 .cra_module
= THIS_MODULE
,
324 .ivsize
= AES_BLOCK_SIZE
,
325 .chunksize
= AES_BLOCK_SIZE
,
326 .maxauthsize
= AES_BLOCK_SIZE
,
327 .setkey
= ccm_setkey
,
328 .setauthsize
= ccm_setauthsize
,
329 .encrypt
= ccm_encrypt
,
330 .decrypt
= ccm_decrypt
,
333 static int __init
aes_mod_init(void)
335 if (!cpu_have_named_feature(AES
))
337 return crypto_register_aead(&ccm_aes_alg
);
340 static void __exit
aes_mod_exit(void)
342 crypto_unregister_aead(&ccm_aes_alg
);
345 module_init(aes_mod_init
);
346 module_exit(aes_mod_exit
);
348 MODULE_DESCRIPTION("Synchronous AES in CCM mode using ARMv8 Crypto Extensions");
349 MODULE_AUTHOR("Ard Biesheuvel <ardb@kernel.org>");
350 MODULE_LICENSE("GPL v2");
351 MODULE_ALIAS_CRYPTO("ccm(aes)");