1 // SPDX-License-Identifier: GPL-2.0-only
3 * aes-ccm-glue.c - AES-CCM transform for ARMv8 with Crypto Extensions
5 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
10 #include <asm/unaligned.h>
11 #include <crypto/aes.h>
12 #include <crypto/scatterwalk.h>
13 #include <crypto/internal/aead.h>
14 #include <crypto/internal/simd.h>
15 #include <crypto/internal/skcipher.h>
16 #include <linux/module.h>
18 #include "aes-ce-setkey.h"
20 static int num_rounds(struct crypto_aes_ctx
*ctx
)
23 * # of rounds specified by AES:
24 * 128 bit key 10 rounds
25 * 192 bit key 12 rounds
26 * 256 bit key 14 rounds
27 * => n byte key => 6 + (n/4) rounds
29 return 6 + ctx
->key_length
/ 4;
32 asmlinkage
void ce_aes_ccm_auth_data(u8 mac
[], u8
const in
[], u32 abytes
,
33 u32
*macp
, u32
const rk
[], u32 rounds
);
35 asmlinkage
void ce_aes_ccm_encrypt(u8 out
[], u8
const in
[], u32 cbytes
,
36 u32
const rk
[], u32 rounds
, u8 mac
[],
39 asmlinkage
void ce_aes_ccm_decrypt(u8 out
[], u8
const in
[], u32 cbytes
,
40 u32
const rk
[], u32 rounds
, u8 mac
[],
43 asmlinkage
void ce_aes_ccm_final(u8 mac
[], u8
const ctr
[], u32
const rk
[],
46 static int ccm_setkey(struct crypto_aead
*tfm
, const u8
*in_key
,
49 struct crypto_aes_ctx
*ctx
= crypto_aead_ctx(tfm
);
51 return ce_aes_expandkey(ctx
, in_key
, key_len
);
54 static int ccm_setauthsize(struct crypto_aead
*tfm
, unsigned int authsize
)
56 if ((authsize
& 1) || authsize
< 4)
61 static int ccm_init_mac(struct aead_request
*req
, u8 maciv
[], u32 msglen
)
63 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
64 __be32
*n
= (__be32
*)&maciv
[AES_BLOCK_SIZE
- 8];
65 u32 l
= req
->iv
[0] + 1;
67 /* verify that CCM dimension 'L' is set correctly in the IV */
71 /* verify that msglen can in fact be represented in L bytes */
72 if (l
< 4 && msglen
>> (8 * l
))
76 * Even if the CCM spec allows L values of up to 8, the Linux cryptoapi
77 * uses a u32 type to represent msglen so the top 4 bytes are always 0.
80 n
[1] = cpu_to_be32(msglen
);
82 memcpy(maciv
, req
->iv
, AES_BLOCK_SIZE
- l
);
85 * Meaning of byte 0 according to CCM spec (RFC 3610/NIST 800-38C)
86 * - bits 0..2 : max # of bytes required to represent msglen, minus 1
87 * (already set by caller)
88 * - bits 3..5 : size of auth tag (1 => 4 bytes, 2 => 6 bytes, etc)
89 * - bit 6 : indicates presence of authenticate-only data
91 maciv
[0] |= (crypto_aead_authsize(aead
) - 2) << 2;
95 memset(&req
->iv
[AES_BLOCK_SIZE
- l
], 0, l
);
99 static void ccm_update_mac(struct crypto_aes_ctx
*key
, u8 mac
[], u8
const in
[],
100 u32 abytes
, u32
*macp
)
102 if (crypto_simd_usable()) {
104 ce_aes_ccm_auth_data(mac
, in
, abytes
, macp
, key
->key_enc
,
108 if (*macp
> 0 && *macp
< AES_BLOCK_SIZE
) {
109 int added
= min(abytes
, AES_BLOCK_SIZE
- *macp
);
111 crypto_xor(&mac
[*macp
], in
, added
);
118 while (abytes
>= AES_BLOCK_SIZE
) {
119 aes_encrypt(key
, mac
, mac
);
120 crypto_xor(mac
, in
, AES_BLOCK_SIZE
);
122 in
+= AES_BLOCK_SIZE
;
123 abytes
-= AES_BLOCK_SIZE
;
127 aes_encrypt(key
, mac
, mac
);
128 crypto_xor(mac
, in
, abytes
);
134 static void ccm_calculate_auth_mac(struct aead_request
*req
, u8 mac
[])
136 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
137 struct crypto_aes_ctx
*ctx
= crypto_aead_ctx(aead
);
138 struct __packed
{ __be16 l
; __be32 h
; u16 len
; } ltag
;
139 struct scatter_walk walk
;
140 u32 len
= req
->assoclen
;
143 /* prepend the AAD with a length tag */
145 ltag
.l
= cpu_to_be16(len
);
148 ltag
.l
= cpu_to_be16(0xfffe);
149 put_unaligned_be32(len
, <ag
.h
);
153 ccm_update_mac(ctx
, mac
, (u8
*)<ag
, ltag
.len
, &macp
);
154 scatterwalk_start(&walk
, req
->src
);
157 u32 n
= scatterwalk_clamp(&walk
, len
);
161 scatterwalk_start(&walk
, sg_next(walk
.sg
));
162 n
= scatterwalk_clamp(&walk
, len
);
164 p
= scatterwalk_map(&walk
);
165 ccm_update_mac(ctx
, mac
, p
, n
, &macp
);
168 scatterwalk_unmap(p
);
169 scatterwalk_advance(&walk
, n
);
170 scatterwalk_done(&walk
, 0, len
);
174 static int ccm_crypt_fallback(struct skcipher_walk
*walk
, u8 mac
[], u8 iv0
[],
175 struct crypto_aes_ctx
*ctx
, bool enc
)
177 u8 buf
[AES_BLOCK_SIZE
];
180 while (walk
->nbytes
) {
181 int blocks
= walk
->nbytes
/ AES_BLOCK_SIZE
;
182 u32 tail
= walk
->nbytes
% AES_BLOCK_SIZE
;
183 u8
*dst
= walk
->dst
.virt
.addr
;
184 u8
*src
= walk
->src
.virt
.addr
;
185 u32 nbytes
= walk
->nbytes
;
187 if (nbytes
== walk
->total
&& tail
> 0) {
193 u32 bsize
= AES_BLOCK_SIZE
;
195 if (nbytes
< AES_BLOCK_SIZE
)
198 crypto_inc(walk
->iv
, AES_BLOCK_SIZE
);
199 aes_encrypt(ctx
, buf
, walk
->iv
);
200 aes_encrypt(ctx
, mac
, mac
);
202 crypto_xor(mac
, src
, bsize
);
203 crypto_xor_cpy(dst
, src
, buf
, bsize
);
205 crypto_xor(mac
, dst
, bsize
);
211 err
= skcipher_walk_done(walk
, tail
);
215 aes_encrypt(ctx
, buf
, iv0
);
216 aes_encrypt(ctx
, mac
, mac
);
217 crypto_xor(mac
, buf
, AES_BLOCK_SIZE
);
222 static int ccm_encrypt(struct aead_request
*req
)
224 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
225 struct crypto_aes_ctx
*ctx
= crypto_aead_ctx(aead
);
226 struct skcipher_walk walk
;
227 u8
__aligned(8) mac
[AES_BLOCK_SIZE
];
228 u8 buf
[AES_BLOCK_SIZE
];
229 u32 len
= req
->cryptlen
;
232 err
= ccm_init_mac(req
, mac
, len
);
237 ccm_calculate_auth_mac(req
, mac
);
239 /* preserve the original iv for the final round */
240 memcpy(buf
, req
->iv
, AES_BLOCK_SIZE
);
242 err
= skcipher_walk_aead_encrypt(&walk
, req
, false);
244 if (crypto_simd_usable()) {
245 while (walk
.nbytes
) {
246 u32 tail
= walk
.nbytes
% AES_BLOCK_SIZE
;
248 if (walk
.nbytes
== walk
.total
)
252 ce_aes_ccm_encrypt(walk
.dst
.virt
.addr
,
254 walk
.nbytes
- tail
, ctx
->key_enc
,
255 num_rounds(ctx
), mac
, walk
.iv
);
258 err
= skcipher_walk_done(&walk
, tail
);
262 ce_aes_ccm_final(mac
, buf
, ctx
->key_enc
,
267 err
= ccm_crypt_fallback(&walk
, mac
, buf
, ctx
, true);
272 /* copy authtag to end of dst */
273 scatterwalk_map_and_copy(mac
, req
->dst
, req
->assoclen
+ req
->cryptlen
,
274 crypto_aead_authsize(aead
), 1);
279 static int ccm_decrypt(struct aead_request
*req
)
281 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
282 struct crypto_aes_ctx
*ctx
= crypto_aead_ctx(aead
);
283 unsigned int authsize
= crypto_aead_authsize(aead
);
284 struct skcipher_walk walk
;
285 u8
__aligned(8) mac
[AES_BLOCK_SIZE
];
286 u8 buf
[AES_BLOCK_SIZE
];
287 u32 len
= req
->cryptlen
- authsize
;
290 err
= ccm_init_mac(req
, mac
, len
);
295 ccm_calculate_auth_mac(req
, mac
);
297 /* preserve the original iv for the final round */
298 memcpy(buf
, req
->iv
, AES_BLOCK_SIZE
);
300 err
= skcipher_walk_aead_decrypt(&walk
, req
, false);
302 if (crypto_simd_usable()) {
303 while (walk
.nbytes
) {
304 u32 tail
= walk
.nbytes
% AES_BLOCK_SIZE
;
306 if (walk
.nbytes
== walk
.total
)
310 ce_aes_ccm_decrypt(walk
.dst
.virt
.addr
,
312 walk
.nbytes
- tail
, ctx
->key_enc
,
313 num_rounds(ctx
), mac
, walk
.iv
);
316 err
= skcipher_walk_done(&walk
, tail
);
320 ce_aes_ccm_final(mac
, buf
, ctx
->key_enc
,
325 err
= ccm_crypt_fallback(&walk
, mac
, buf
, ctx
, false);
331 /* compare calculated auth tag with the stored one */
332 scatterwalk_map_and_copy(buf
, req
->src
,
333 req
->assoclen
+ req
->cryptlen
- authsize
,
336 if (crypto_memneq(mac
, buf
, authsize
))
341 static struct aead_alg ccm_aes_alg
= {
343 .cra_name
= "ccm(aes)",
344 .cra_driver_name
= "ccm-aes-ce",
347 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
348 .cra_module
= THIS_MODULE
,
350 .ivsize
= AES_BLOCK_SIZE
,
351 .chunksize
= AES_BLOCK_SIZE
,
352 .maxauthsize
= AES_BLOCK_SIZE
,
353 .setkey
= ccm_setkey
,
354 .setauthsize
= ccm_setauthsize
,
355 .encrypt
= ccm_encrypt
,
356 .decrypt
= ccm_decrypt
,
359 static int __init
aes_mod_init(void)
361 if (!cpu_have_named_feature(AES
))
363 return crypto_register_aead(&ccm_aes_alg
);
366 static void __exit
aes_mod_exit(void)
368 crypto_unregister_aead(&ccm_aes_alg
);
371 module_init(aes_mod_init
);
372 module_exit(aes_mod_exit
);
374 MODULE_DESCRIPTION("Synchronous AES in CCM mode using ARMv8 Crypto Extensions");
375 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
376 MODULE_LICENSE("GPL v2");
377 MODULE_ALIAS_CRYPTO("ccm(aes)");