2 * aes-ccm-glue.c - AES-CCM transform for ARMv8 with Crypto Extensions
4 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
13 #include <asm/unaligned.h>
14 #include <crypto/aes.h>
15 #include <crypto/scatterwalk.h>
16 #include <crypto/internal/aead.h>
17 #include <crypto/internal/skcipher.h>
18 #include <linux/module.h>
20 #include "aes-ce-setkey.h"
22 static int num_rounds(struct crypto_aes_ctx
*ctx
)
25 * # of rounds specified by AES:
26 * 128 bit key 10 rounds
27 * 192 bit key 12 rounds
28 * 256 bit key 14 rounds
29 * => n byte key => 6 + (n/4) rounds
31 return 6 + ctx
->key_length
/ 4;
34 asmlinkage
void ce_aes_ccm_auth_data(u8 mac
[], u8
const in
[], u32 abytes
,
35 u32
*macp
, u32
const rk
[], u32 rounds
);
37 asmlinkage
void ce_aes_ccm_encrypt(u8 out
[], u8
const in
[], u32 cbytes
,
38 u32
const rk
[], u32 rounds
, u8 mac
[],
41 asmlinkage
void ce_aes_ccm_decrypt(u8 out
[], u8
const in
[], u32 cbytes
,
42 u32
const rk
[], u32 rounds
, u8 mac
[],
45 asmlinkage
void ce_aes_ccm_final(u8 mac
[], u8
const ctr
[], u32
const rk
[],
48 asmlinkage
void __aes_arm64_encrypt(u32
*rk
, u8
*out
, const u8
*in
, int rounds
);
50 static int ccm_setkey(struct crypto_aead
*tfm
, const u8
*in_key
,
53 struct crypto_aes_ctx
*ctx
= crypto_aead_ctx(tfm
);
56 ret
= ce_aes_expandkey(ctx
, in_key
, key_len
);
60 tfm
->base
.crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
64 static int ccm_setauthsize(struct crypto_aead
*tfm
, unsigned int authsize
)
66 if ((authsize
& 1) || authsize
< 4)
71 static int ccm_init_mac(struct aead_request
*req
, u8 maciv
[], u32 msglen
)
73 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
74 __be32
*n
= (__be32
*)&maciv
[AES_BLOCK_SIZE
- 8];
75 u32 l
= req
->iv
[0] + 1;
77 /* verify that CCM dimension 'L' is set correctly in the IV */
81 /* verify that msglen can in fact be represented in L bytes */
82 if (l
< 4 && msglen
>> (8 * l
))
86 * Even if the CCM spec allows L values of up to 8, the Linux cryptoapi
87 * uses a u32 type to represent msglen so the top 4 bytes are always 0.
90 n
[1] = cpu_to_be32(msglen
);
92 memcpy(maciv
, req
->iv
, AES_BLOCK_SIZE
- l
);
95 * Meaning of byte 0 according to CCM spec (RFC 3610/NIST 800-38C)
96 * - bits 0..2 : max # of bytes required to represent msglen, minus 1
97 * (already set by caller)
98 * - bits 3..5 : size of auth tag (1 => 4 bytes, 2 => 6 bytes, etc)
99 * - bit 6 : indicates presence of authenticate-only data
101 maciv
[0] |= (crypto_aead_authsize(aead
) - 2) << 2;
105 memset(&req
->iv
[AES_BLOCK_SIZE
- l
], 0, l
);
109 static void ccm_update_mac(struct crypto_aes_ctx
*key
, u8 mac
[], u8
const in
[],
110 u32 abytes
, u32
*macp
, bool use_neon
)
112 if (likely(use_neon
)) {
113 ce_aes_ccm_auth_data(mac
, in
, abytes
, macp
, key
->key_enc
,
116 if (*macp
> 0 && *macp
< AES_BLOCK_SIZE
) {
117 int added
= min(abytes
, AES_BLOCK_SIZE
- *macp
);
119 crypto_xor(&mac
[*macp
], in
, added
);
126 while (abytes
> AES_BLOCK_SIZE
) {
127 __aes_arm64_encrypt(key
->key_enc
, mac
, mac
,
129 crypto_xor(mac
, in
, AES_BLOCK_SIZE
);
131 in
+= AES_BLOCK_SIZE
;
132 abytes
-= AES_BLOCK_SIZE
;
136 __aes_arm64_encrypt(key
->key_enc
, mac
, mac
,
138 crypto_xor(mac
, in
, abytes
);
146 static void ccm_calculate_auth_mac(struct aead_request
*req
, u8 mac
[],
149 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
150 struct crypto_aes_ctx
*ctx
= crypto_aead_ctx(aead
);
151 struct __packed
{ __be16 l
; __be32 h
; u16 len
; } ltag
;
152 struct scatter_walk walk
;
153 u32 len
= req
->assoclen
;
156 /* prepend the AAD with a length tag */
158 ltag
.l
= cpu_to_be16(len
);
161 ltag
.l
= cpu_to_be16(0xfffe);
162 put_unaligned_be32(len
, <ag
.h
);
166 ccm_update_mac(ctx
, mac
, (u8
*)<ag
, ltag
.len
, &macp
, use_neon
);
167 scatterwalk_start(&walk
, req
->src
);
170 u32 n
= scatterwalk_clamp(&walk
, len
);
174 scatterwalk_start(&walk
, sg_next(walk
.sg
));
175 n
= scatterwalk_clamp(&walk
, len
);
177 p
= scatterwalk_map(&walk
);
178 ccm_update_mac(ctx
, mac
, p
, n
, &macp
, use_neon
);
181 scatterwalk_unmap(p
);
182 scatterwalk_advance(&walk
, n
);
183 scatterwalk_done(&walk
, 0, len
);
187 static int ccm_crypt_fallback(struct skcipher_walk
*walk
, u8 mac
[], u8 iv0
[],
188 struct crypto_aes_ctx
*ctx
, bool enc
)
190 u8 buf
[AES_BLOCK_SIZE
];
193 while (walk
->nbytes
) {
194 int blocks
= walk
->nbytes
/ AES_BLOCK_SIZE
;
195 u32 tail
= walk
->nbytes
% AES_BLOCK_SIZE
;
196 u8
*dst
= walk
->dst
.virt
.addr
;
197 u8
*src
= walk
->src
.virt
.addr
;
198 u32 nbytes
= walk
->nbytes
;
200 if (nbytes
== walk
->total
&& tail
> 0) {
206 u32 bsize
= AES_BLOCK_SIZE
;
208 if (nbytes
< AES_BLOCK_SIZE
)
211 crypto_inc(walk
->iv
, AES_BLOCK_SIZE
);
212 __aes_arm64_encrypt(ctx
->key_enc
, buf
, walk
->iv
,
214 __aes_arm64_encrypt(ctx
->key_enc
, mac
, mac
,
217 crypto_xor(mac
, src
, bsize
);
218 crypto_xor_cpy(dst
, src
, buf
, bsize
);
220 crypto_xor(mac
, dst
, bsize
);
226 err
= skcipher_walk_done(walk
, tail
);
230 __aes_arm64_encrypt(ctx
->key_enc
, buf
, iv0
, num_rounds(ctx
));
231 __aes_arm64_encrypt(ctx
->key_enc
, mac
, mac
, num_rounds(ctx
));
232 crypto_xor(mac
, buf
, AES_BLOCK_SIZE
);
237 static int ccm_encrypt(struct aead_request
*req
)
239 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
240 struct crypto_aes_ctx
*ctx
= crypto_aead_ctx(aead
);
241 struct skcipher_walk walk
;
242 u8
__aligned(8) mac
[AES_BLOCK_SIZE
];
243 u8 buf
[AES_BLOCK_SIZE
];
244 u32 len
= req
->cryptlen
;
245 bool use_neon
= may_use_simd();
248 err
= ccm_init_mac(req
, mac
, len
);
252 if (likely(use_neon
))
256 ccm_calculate_auth_mac(req
, mac
, use_neon
);
258 /* preserve the original iv for the final round */
259 memcpy(buf
, req
->iv
, AES_BLOCK_SIZE
);
261 err
= skcipher_walk_aead_encrypt(&walk
, req
, true);
263 if (likely(use_neon
)) {
264 while (walk
.nbytes
) {
265 u32 tail
= walk
.nbytes
% AES_BLOCK_SIZE
;
267 if (walk
.nbytes
== walk
.total
)
270 ce_aes_ccm_encrypt(walk
.dst
.virt
.addr
,
272 walk
.nbytes
- tail
, ctx
->key_enc
,
273 num_rounds(ctx
), mac
, walk
.iv
);
275 err
= skcipher_walk_done(&walk
, tail
);
278 ce_aes_ccm_final(mac
, buf
, ctx
->key_enc
,
283 err
= ccm_crypt_fallback(&walk
, mac
, buf
, ctx
, true);
288 /* copy authtag to end of dst */
289 scatterwalk_map_and_copy(mac
, req
->dst
, req
->assoclen
+ req
->cryptlen
,
290 crypto_aead_authsize(aead
), 1);
295 static int ccm_decrypt(struct aead_request
*req
)
297 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
298 struct crypto_aes_ctx
*ctx
= crypto_aead_ctx(aead
);
299 unsigned int authsize
= crypto_aead_authsize(aead
);
300 struct skcipher_walk walk
;
301 u8
__aligned(8) mac
[AES_BLOCK_SIZE
];
302 u8 buf
[AES_BLOCK_SIZE
];
303 u32 len
= req
->cryptlen
- authsize
;
304 bool use_neon
= may_use_simd();
307 err
= ccm_init_mac(req
, mac
, len
);
311 if (likely(use_neon
))
315 ccm_calculate_auth_mac(req
, mac
, use_neon
);
317 /* preserve the original iv for the final round */
318 memcpy(buf
, req
->iv
, AES_BLOCK_SIZE
);
320 err
= skcipher_walk_aead_decrypt(&walk
, req
, true);
322 if (likely(use_neon
)) {
323 while (walk
.nbytes
) {
324 u32 tail
= walk
.nbytes
% AES_BLOCK_SIZE
;
326 if (walk
.nbytes
== walk
.total
)
329 ce_aes_ccm_decrypt(walk
.dst
.virt
.addr
,
331 walk
.nbytes
- tail
, ctx
->key_enc
,
332 num_rounds(ctx
), mac
, walk
.iv
);
334 err
= skcipher_walk_done(&walk
, tail
);
337 ce_aes_ccm_final(mac
, buf
, ctx
->key_enc
,
342 err
= ccm_crypt_fallback(&walk
, mac
, buf
, ctx
, false);
348 /* compare calculated auth tag with the stored one */
349 scatterwalk_map_and_copy(buf
, req
->src
,
350 req
->assoclen
+ req
->cryptlen
- authsize
,
353 if (crypto_memneq(mac
, buf
, authsize
))
358 static struct aead_alg ccm_aes_alg
= {
360 .cra_name
= "ccm(aes)",
361 .cra_driver_name
= "ccm-aes-ce",
364 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
365 .cra_module
= THIS_MODULE
,
367 .ivsize
= AES_BLOCK_SIZE
,
368 .chunksize
= AES_BLOCK_SIZE
,
369 .maxauthsize
= AES_BLOCK_SIZE
,
370 .setkey
= ccm_setkey
,
371 .setauthsize
= ccm_setauthsize
,
372 .encrypt
= ccm_encrypt
,
373 .decrypt
= ccm_decrypt
,
376 static int __init
aes_mod_init(void)
378 if (!(elf_hwcap
& HWCAP_AES
))
380 return crypto_register_aead(&ccm_aes_alg
);
383 static void __exit
aes_mod_exit(void)
385 crypto_unregister_aead(&ccm_aes_alg
);
388 module_init(aes_mod_init
);
389 module_exit(aes_mod_exit
);
391 MODULE_DESCRIPTION("Synchronous AES in CCM mode using ARMv8 Crypto Extensions");
392 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
393 MODULE_LICENSE("GPL v2");
394 MODULE_ALIAS_CRYPTO("ccm(aes)");