2 * aes-ccm-glue.c - AES-CCM transform for ARMv8 with Crypto Extensions
4 * Copyright (C) 2013 - 2014 Linaro Ltd <ard.biesheuvel@linaro.org>
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
12 #include <asm/unaligned.h>
13 #include <crypto/aes.h>
14 #include <crypto/algapi.h>
15 #include <crypto/scatterwalk.h>
16 #include <crypto/internal/aead.h>
17 #include <linux/module.h>
19 #include "aes-ce-setkey.h"
21 static int num_rounds(struct crypto_aes_ctx
*ctx
)
24 * # of rounds specified by AES:
25 * 128 bit key 10 rounds
26 * 192 bit key 12 rounds
27 * 256 bit key 14 rounds
28 * => n byte key => 6 + (n/4) rounds
30 return 6 + ctx
->key_length
/ 4;
33 asmlinkage
void ce_aes_ccm_auth_data(u8 mac
[], u8
const in
[], u32 abytes
,
34 u32
*macp
, u32
const rk
[], u32 rounds
);
36 asmlinkage
void ce_aes_ccm_encrypt(u8 out
[], u8
const in
[], u32 cbytes
,
37 u32
const rk
[], u32 rounds
, u8 mac
[],
40 asmlinkage
void ce_aes_ccm_decrypt(u8 out
[], u8
const in
[], u32 cbytes
,
41 u32
const rk
[], u32 rounds
, u8 mac
[],
44 asmlinkage
void ce_aes_ccm_final(u8 mac
[], u8
const ctr
[], u32
const rk
[],
47 static int ccm_setkey(struct crypto_aead
*tfm
, const u8
*in_key
,
50 struct crypto_aes_ctx
*ctx
= crypto_aead_ctx(tfm
);
53 ret
= ce_aes_expandkey(ctx
, in_key
, key_len
);
57 tfm
->base
.crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
61 static int ccm_setauthsize(struct crypto_aead
*tfm
, unsigned int authsize
)
63 if ((authsize
& 1) || authsize
< 4)
68 static int ccm_init_mac(struct aead_request
*req
, u8 maciv
[], u32 msglen
)
70 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
71 __be32
*n
= (__be32
*)&maciv
[AES_BLOCK_SIZE
- 8];
72 u32 l
= req
->iv
[0] + 1;
74 /* verify that CCM dimension 'L' is set correctly in the IV */
78 /* verify that msglen can in fact be represented in L bytes */
79 if (l
< 4 && msglen
>> (8 * l
))
83 * Even if the CCM spec allows L values of up to 8, the Linux cryptoapi
84 * uses a u32 type to represent msglen so the top 4 bytes are always 0.
87 n
[1] = cpu_to_be32(msglen
);
89 memcpy(maciv
, req
->iv
, AES_BLOCK_SIZE
- l
);
92 * Meaning of byte 0 according to CCM spec (RFC 3610/NIST 800-38C)
93 * - bits 0..2 : max # of bytes required to represent msglen, minus 1
94 * (already set by caller)
95 * - bits 3..5 : size of auth tag (1 => 4 bytes, 2 => 6 bytes, etc)
96 * - bit 6 : indicates presence of authenticate-only data
98 maciv
[0] |= (crypto_aead_authsize(aead
) - 2) << 2;
102 memset(&req
->iv
[AES_BLOCK_SIZE
- l
], 0, l
);
106 static void ccm_calculate_auth_mac(struct aead_request
*req
, u8 mac
[])
108 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
109 struct crypto_aes_ctx
*ctx
= crypto_aead_ctx(aead
);
110 struct __packed
{ __be16 l
; __be32 h
; u16 len
; } ltag
;
111 struct scatter_walk walk
;
112 u32 len
= req
->assoclen
;
115 /* prepend the AAD with a length tag */
117 ltag
.l
= cpu_to_be16(len
);
120 ltag
.l
= cpu_to_be16(0xfffe);
121 put_unaligned_be32(len
, <ag
.h
);
125 ce_aes_ccm_auth_data(mac
, (u8
*)<ag
, ltag
.len
, &macp
, ctx
->key_enc
,
127 scatterwalk_start(&walk
, req
->src
);
130 u32 n
= scatterwalk_clamp(&walk
, len
);
134 scatterwalk_start(&walk
, sg_next(walk
.sg
));
135 n
= scatterwalk_clamp(&walk
, len
);
137 p
= scatterwalk_map(&walk
);
138 ce_aes_ccm_auth_data(mac
, p
, n
, &macp
, ctx
->key_enc
,
142 scatterwalk_unmap(p
);
143 scatterwalk_advance(&walk
, n
);
144 scatterwalk_done(&walk
, 0, len
);
148 static int ccm_encrypt(struct aead_request
*req
)
150 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
151 struct crypto_aes_ctx
*ctx
= crypto_aead_ctx(aead
);
152 struct blkcipher_desc desc
= { .info
= req
->iv
};
153 struct blkcipher_walk walk
;
154 struct scatterlist srcbuf
[2];
155 struct scatterlist dstbuf
[2];
156 struct scatterlist
*src
;
157 struct scatterlist
*dst
;
158 u8
__aligned(8) mac
[AES_BLOCK_SIZE
];
159 u8 buf
[AES_BLOCK_SIZE
];
160 u32 len
= req
->cryptlen
;
163 err
= ccm_init_mac(req
, mac
, len
);
167 kernel_neon_begin_partial(6);
170 ccm_calculate_auth_mac(req
, mac
);
172 /* preserve the original iv for the final round */
173 memcpy(buf
, req
->iv
, AES_BLOCK_SIZE
);
175 src
= scatterwalk_ffwd(srcbuf
, req
->src
, req
->assoclen
);
177 if (req
->src
!= req
->dst
)
178 dst
= scatterwalk_ffwd(dstbuf
, req
->dst
, req
->assoclen
);
180 blkcipher_walk_init(&walk
, dst
, src
, len
);
181 err
= blkcipher_aead_walk_virt_block(&desc
, &walk
, aead
,
184 while (walk
.nbytes
) {
185 u32 tail
= walk
.nbytes
% AES_BLOCK_SIZE
;
187 if (walk
.nbytes
== len
)
190 ce_aes_ccm_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
191 walk
.nbytes
- tail
, ctx
->key_enc
,
192 num_rounds(ctx
), mac
, walk
.iv
);
194 len
-= walk
.nbytes
- tail
;
195 err
= blkcipher_walk_done(&desc
, &walk
, tail
);
198 ce_aes_ccm_final(mac
, buf
, ctx
->key_enc
, num_rounds(ctx
));
205 /* copy authtag to end of dst */
206 scatterwalk_map_and_copy(mac
, dst
, req
->cryptlen
,
207 crypto_aead_authsize(aead
), 1);
212 static int ccm_decrypt(struct aead_request
*req
)
214 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
215 struct crypto_aes_ctx
*ctx
= crypto_aead_ctx(aead
);
216 unsigned int authsize
= crypto_aead_authsize(aead
);
217 struct blkcipher_desc desc
= { .info
= req
->iv
};
218 struct blkcipher_walk walk
;
219 struct scatterlist srcbuf
[2];
220 struct scatterlist dstbuf
[2];
221 struct scatterlist
*src
;
222 struct scatterlist
*dst
;
223 u8
__aligned(8) mac
[AES_BLOCK_SIZE
];
224 u8 buf
[AES_BLOCK_SIZE
];
225 u32 len
= req
->cryptlen
- authsize
;
228 err
= ccm_init_mac(req
, mac
, len
);
232 kernel_neon_begin_partial(6);
235 ccm_calculate_auth_mac(req
, mac
);
237 /* preserve the original iv for the final round */
238 memcpy(buf
, req
->iv
, AES_BLOCK_SIZE
);
240 src
= scatterwalk_ffwd(srcbuf
, req
->src
, req
->assoclen
);
242 if (req
->src
!= req
->dst
)
243 dst
= scatterwalk_ffwd(dstbuf
, req
->dst
, req
->assoclen
);
245 blkcipher_walk_init(&walk
, dst
, src
, len
);
246 err
= blkcipher_aead_walk_virt_block(&desc
, &walk
, aead
,
249 while (walk
.nbytes
) {
250 u32 tail
= walk
.nbytes
% AES_BLOCK_SIZE
;
252 if (walk
.nbytes
== len
)
255 ce_aes_ccm_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
256 walk
.nbytes
- tail
, ctx
->key_enc
,
257 num_rounds(ctx
), mac
, walk
.iv
);
259 len
-= walk
.nbytes
- tail
;
260 err
= blkcipher_walk_done(&desc
, &walk
, tail
);
263 ce_aes_ccm_final(mac
, buf
, ctx
->key_enc
, num_rounds(ctx
));
270 /* compare calculated auth tag with the stored one */
271 scatterwalk_map_and_copy(buf
, src
, req
->cryptlen
- authsize
,
274 if (crypto_memneq(mac
, buf
, authsize
))
279 static struct aead_alg ccm_aes_alg
= {
281 .cra_name
= "ccm(aes)",
282 .cra_driver_name
= "ccm-aes-ce",
285 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
287 .cra_module
= THIS_MODULE
,
289 .ivsize
= AES_BLOCK_SIZE
,
290 .maxauthsize
= AES_BLOCK_SIZE
,
291 .setkey
= ccm_setkey
,
292 .setauthsize
= ccm_setauthsize
,
293 .encrypt
= ccm_encrypt
,
294 .decrypt
= ccm_decrypt
,
297 static int __init
aes_mod_init(void)
299 if (!(elf_hwcap
& HWCAP_AES
))
301 return crypto_register_aead(&ccm_aes_alg
);
304 static void __exit
aes_mod_exit(void)
306 crypto_unregister_aead(&ccm_aes_alg
);
309 module_init(aes_mod_init
);
310 module_exit(aes_mod_exit
);
312 MODULE_DESCRIPTION("Synchronous AES in CCM mode using ARMv8 Crypto Extensions");
313 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
314 MODULE_LICENSE("GPL v2");
315 MODULE_ALIAS_CRYPTO("ccm(aes)");