2 * CCM: Counter with CBC-MAC
4 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
13 #include <crypto/internal/aead.h>
14 #include <crypto/internal/hash.h>
15 #include <crypto/internal/skcipher.h>
16 #include <crypto/scatterwalk.h>
17 #include <linux/err.h>
18 #include <linux/init.h>
19 #include <linux/kernel.h>
20 #include <linux/module.h>
21 #include <linux/slab.h>
25 struct ccm_instance_ctx
{
26 struct crypto_skcipher_spawn ctr
;
27 struct crypto_ahash_spawn mac
;
30 struct crypto_ccm_ctx
{
31 struct crypto_ahash
*mac
;
32 struct crypto_skcipher
*ctr
;
35 struct crypto_rfc4309_ctx
{
36 struct crypto_aead
*child
;
40 struct crypto_rfc4309_req_ctx
{
41 struct scatterlist src
[3];
42 struct scatterlist dst
[3];
43 struct aead_request subreq
;
46 struct crypto_ccm_req_priv_ctx
{
51 struct scatterlist src
[3];
52 struct scatterlist dst
[3];
53 struct skcipher_request skreq
;
56 struct cbcmac_tfm_ctx
{
57 struct crypto_cipher
*child
;
60 struct cbcmac_desc_ctx
{
64 static inline struct crypto_ccm_req_priv_ctx
*crypto_ccm_reqctx(
65 struct aead_request
*req
)
67 unsigned long align
= crypto_aead_alignmask(crypto_aead_reqtfm(req
));
69 return (void *)PTR_ALIGN((u8
*)aead_request_ctx(req
), align
+ 1);
72 static int set_msg_len(u8
*block
, unsigned int msglen
, int csize
)
76 memset(block
, 0, csize
);
81 else if (msglen
> (1 << (8 * csize
)))
84 data
= cpu_to_be32(msglen
);
85 memcpy(block
- csize
, (u8
*)&data
+ 4 - csize
, csize
);
90 static int crypto_ccm_setkey(struct crypto_aead
*aead
, const u8
*key
,
93 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(aead
);
94 struct crypto_skcipher
*ctr
= ctx
->ctr
;
95 struct crypto_ahash
*mac
= ctx
->mac
;
98 crypto_skcipher_clear_flags(ctr
, CRYPTO_TFM_REQ_MASK
);
99 crypto_skcipher_set_flags(ctr
, crypto_aead_get_flags(aead
) &
100 CRYPTO_TFM_REQ_MASK
);
101 err
= crypto_skcipher_setkey(ctr
, key
, keylen
);
102 crypto_aead_set_flags(aead
, crypto_skcipher_get_flags(ctr
) &
103 CRYPTO_TFM_RES_MASK
);
107 crypto_ahash_clear_flags(mac
, CRYPTO_TFM_REQ_MASK
);
108 crypto_ahash_set_flags(mac
, crypto_aead_get_flags(aead
) &
109 CRYPTO_TFM_REQ_MASK
);
110 err
= crypto_ahash_setkey(mac
, key
, keylen
);
111 crypto_aead_set_flags(aead
, crypto_ahash_get_flags(mac
) &
112 CRYPTO_TFM_RES_MASK
);
118 static int crypto_ccm_setauthsize(struct crypto_aead
*tfm
,
119 unsigned int authsize
)
137 static int format_input(u8
*info
, struct aead_request
*req
,
138 unsigned int cryptlen
)
140 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
141 unsigned int lp
= req
->iv
[0];
142 unsigned int l
= lp
+ 1;
145 m
= crypto_aead_authsize(aead
);
147 memcpy(info
, req
->iv
, 16);
149 /* format control info per RFC 3610 and
150 * NIST Special Publication 800-38C
152 *info
|= (8 * ((m
- 2) / 2));
156 return set_msg_len(info
+ 16 - l
, cryptlen
, l
);
159 static int format_adata(u8
*adata
, unsigned int a
)
163 /* add control info for associated data
164 * RFC 3610 and NIST Special Publication 800-38C
167 *(__be16
*)adata
= cpu_to_be16(a
);
170 *(__be16
*)adata
= cpu_to_be16(0xfffe);
171 *(__be32
*)&adata
[2] = cpu_to_be32(a
);
178 static int crypto_ccm_auth(struct aead_request
*req
, struct scatterlist
*plain
,
179 unsigned int cryptlen
)
181 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
182 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
183 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(aead
);
184 AHASH_REQUEST_ON_STACK(ahreq
, ctx
->mac
);
185 unsigned int assoclen
= req
->assoclen
;
186 struct scatterlist sg
[3];
187 u8
*odata
= pctx
->odata
;
188 u8
*idata
= pctx
->idata
;
191 /* format control data for input */
192 err
= format_input(odata
, req
, cryptlen
);
196 sg_init_table(sg
, 3);
197 sg_set_buf(&sg
[0], odata
, 16);
199 /* format associated data and compute into mac */
201 ilen
= format_adata(idata
, assoclen
);
202 sg_set_buf(&sg
[1], idata
, ilen
);
203 sg_chain(sg
, 3, req
->src
);
206 sg_chain(sg
, 2, req
->src
);
209 ahash_request_set_tfm(ahreq
, ctx
->mac
);
210 ahash_request_set_callback(ahreq
, pctx
->flags
, NULL
, NULL
);
211 ahash_request_set_crypt(ahreq
, sg
, NULL
, assoclen
+ ilen
+ 16);
212 err
= crypto_ahash_init(ahreq
);
215 err
= crypto_ahash_update(ahreq
);
219 /* we need to pad the MAC input to a round multiple of the block size */
220 ilen
= 16 - (assoclen
+ ilen
) % 16;
222 memset(idata
, 0, ilen
);
223 sg_init_table(sg
, 2);
224 sg_set_buf(&sg
[0], idata
, ilen
);
226 sg_chain(sg
, 2, plain
);
231 ahash_request_set_crypt(ahreq
, plain
, pctx
->odata
, cryptlen
);
232 err
= crypto_ahash_finup(ahreq
);
237 static void crypto_ccm_encrypt_done(struct crypto_async_request
*areq
, int err
)
239 struct aead_request
*req
= areq
->data
;
240 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
241 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
242 u8
*odata
= pctx
->odata
;
245 scatterwalk_map_and_copy(odata
, req
->dst
,
246 req
->assoclen
+ req
->cryptlen
,
247 crypto_aead_authsize(aead
), 1);
248 aead_request_complete(req
, err
);
251 static inline int crypto_ccm_check_iv(const u8
*iv
)
253 /* 2 <= L <= 8, so 1 <= L' <= 7. */
254 if (1 > iv
[0] || iv
[0] > 7)
260 static int crypto_ccm_init_crypt(struct aead_request
*req
, u8
*tag
)
262 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
263 struct scatterlist
*sg
;
267 err
= crypto_ccm_check_iv(iv
);
271 pctx
->flags
= aead_request_flags(req
);
273 /* Note: rfc 3610 and NIST 800-38C require counter of
274 * zero to encrypt auth tag.
276 memset(iv
+ 15 - iv
[0], 0, iv
[0] + 1);
278 sg_init_table(pctx
->src
, 3);
279 sg_set_buf(pctx
->src
, tag
, 16);
280 sg
= scatterwalk_ffwd(pctx
->src
+ 1, req
->src
, req
->assoclen
);
281 if (sg
!= pctx
->src
+ 1)
282 sg_chain(pctx
->src
, 2, sg
);
284 if (req
->src
!= req
->dst
) {
285 sg_init_table(pctx
->dst
, 3);
286 sg_set_buf(pctx
->dst
, tag
, 16);
287 sg
= scatterwalk_ffwd(pctx
->dst
+ 1, req
->dst
, req
->assoclen
);
288 if (sg
!= pctx
->dst
+ 1)
289 sg_chain(pctx
->dst
, 2, sg
);
295 static int crypto_ccm_encrypt(struct aead_request
*req
)
297 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
298 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(aead
);
299 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
300 struct skcipher_request
*skreq
= &pctx
->skreq
;
301 struct scatterlist
*dst
;
302 unsigned int cryptlen
= req
->cryptlen
;
303 u8
*odata
= pctx
->odata
;
307 err
= crypto_ccm_init_crypt(req
, odata
);
311 err
= crypto_ccm_auth(req
, sg_next(pctx
->src
), cryptlen
);
316 if (req
->src
!= req
->dst
)
319 skcipher_request_set_tfm(skreq
, ctx
->ctr
);
320 skcipher_request_set_callback(skreq
, pctx
->flags
,
321 crypto_ccm_encrypt_done
, req
);
322 skcipher_request_set_crypt(skreq
, pctx
->src
, dst
, cryptlen
+ 16, iv
);
323 err
= crypto_skcipher_encrypt(skreq
);
327 /* copy authtag to end of dst */
328 scatterwalk_map_and_copy(odata
, sg_next(dst
), cryptlen
,
329 crypto_aead_authsize(aead
), 1);
333 static void crypto_ccm_decrypt_done(struct crypto_async_request
*areq
,
336 struct aead_request
*req
= areq
->data
;
337 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
338 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
339 unsigned int authsize
= crypto_aead_authsize(aead
);
340 unsigned int cryptlen
= req
->cryptlen
- authsize
;
341 struct scatterlist
*dst
;
345 dst
= sg_next(req
->src
== req
->dst
? pctx
->src
: pctx
->dst
);
348 err
= crypto_ccm_auth(req
, dst
, cryptlen
);
349 if (!err
&& crypto_memneq(pctx
->auth_tag
, pctx
->odata
, authsize
))
352 aead_request_complete(req
, err
);
355 static int crypto_ccm_decrypt(struct aead_request
*req
)
357 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
358 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(aead
);
359 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
360 struct skcipher_request
*skreq
= &pctx
->skreq
;
361 struct scatterlist
*dst
;
362 unsigned int authsize
= crypto_aead_authsize(aead
);
363 unsigned int cryptlen
= req
->cryptlen
;
364 u8
*authtag
= pctx
->auth_tag
;
365 u8
*odata
= pctx
->odata
;
369 cryptlen
-= authsize
;
371 err
= crypto_ccm_init_crypt(req
, authtag
);
375 scatterwalk_map_and_copy(authtag
, sg_next(pctx
->src
), cryptlen
,
379 if (req
->src
!= req
->dst
)
382 skcipher_request_set_tfm(skreq
, ctx
->ctr
);
383 skcipher_request_set_callback(skreq
, pctx
->flags
,
384 crypto_ccm_decrypt_done
, req
);
385 skcipher_request_set_crypt(skreq
, pctx
->src
, dst
, cryptlen
+ 16, iv
);
386 err
= crypto_skcipher_decrypt(skreq
);
390 err
= crypto_ccm_auth(req
, sg_next(dst
), cryptlen
);
395 if (crypto_memneq(authtag
, odata
, authsize
))
401 static int crypto_ccm_init_tfm(struct crypto_aead
*tfm
)
403 struct aead_instance
*inst
= aead_alg_instance(tfm
);
404 struct ccm_instance_ctx
*ictx
= aead_instance_ctx(inst
);
405 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(tfm
);
406 struct crypto_ahash
*mac
;
407 struct crypto_skcipher
*ctr
;
411 mac
= crypto_spawn_ahash(&ictx
->mac
);
415 ctr
= crypto_spawn_skcipher(&ictx
->ctr
);
423 align
= crypto_aead_alignmask(tfm
);
424 align
&= ~(crypto_tfm_ctx_alignment() - 1);
425 crypto_aead_set_reqsize(
427 align
+ sizeof(struct crypto_ccm_req_priv_ctx
) +
428 crypto_skcipher_reqsize(ctr
));
433 crypto_free_ahash(mac
);
437 static void crypto_ccm_exit_tfm(struct crypto_aead
*tfm
)
439 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(tfm
);
441 crypto_free_ahash(ctx
->mac
);
442 crypto_free_skcipher(ctx
->ctr
);
445 static void crypto_ccm_free(struct aead_instance
*inst
)
447 struct ccm_instance_ctx
*ctx
= aead_instance_ctx(inst
);
449 crypto_drop_ahash(&ctx
->mac
);
450 crypto_drop_skcipher(&ctx
->ctr
);
454 static int crypto_ccm_create_common(struct crypto_template
*tmpl
,
456 const char *full_name
,
457 const char *ctr_name
,
458 const char *mac_name
)
460 struct crypto_attr_type
*algt
;
461 struct aead_instance
*inst
;
462 struct skcipher_alg
*ctr
;
463 struct crypto_alg
*mac_alg
;
464 struct hash_alg_common
*mac
;
465 struct ccm_instance_ctx
*ictx
;
468 algt
= crypto_get_attr_type(tb
);
470 return PTR_ERR(algt
);
472 if ((algt
->type
^ CRYPTO_ALG_TYPE_AEAD
) & algt
->mask
)
475 mac_alg
= crypto_find_alg(mac_name
, &crypto_ahash_type
,
476 CRYPTO_ALG_TYPE_HASH
,
477 CRYPTO_ALG_TYPE_AHASH_MASK
|
480 return PTR_ERR(mac_alg
);
482 mac
= __crypto_hash_alg_common(mac_alg
);
484 if (mac
->digestsize
!= 16)
487 inst
= kzalloc(sizeof(*inst
) + sizeof(*ictx
), GFP_KERNEL
);
492 ictx
= aead_instance_ctx(inst
);
493 err
= crypto_init_ahash_spawn(&ictx
->mac
, mac
,
494 aead_crypto_instance(inst
));
498 crypto_set_skcipher_spawn(&ictx
->ctr
, aead_crypto_instance(inst
));
499 err
= crypto_grab_skcipher(&ictx
->ctr
, ctr_name
, 0,
500 crypto_requires_sync(algt
->type
,
505 ctr
= crypto_spawn_skcipher_alg(&ictx
->ctr
);
507 /* Not a stream cipher? */
509 if (ctr
->base
.cra_blocksize
!= 1)
512 /* We want the real thing! */
513 if (crypto_skcipher_alg_ivsize(ctr
) != 16)
517 if (snprintf(inst
->alg
.base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
518 "ccm_base(%s,%s)", ctr
->base
.cra_driver_name
,
519 mac
->base
.cra_driver_name
) >= CRYPTO_MAX_ALG_NAME
)
522 memcpy(inst
->alg
.base
.cra_name
, full_name
, CRYPTO_MAX_ALG_NAME
);
524 inst
->alg
.base
.cra_flags
= ctr
->base
.cra_flags
& CRYPTO_ALG_ASYNC
;
525 inst
->alg
.base
.cra_priority
= (mac
->base
.cra_priority
+
526 ctr
->base
.cra_priority
) / 2;
527 inst
->alg
.base
.cra_blocksize
= 1;
528 inst
->alg
.base
.cra_alignmask
= mac
->base
.cra_alignmask
|
529 ctr
->base
.cra_alignmask
;
530 inst
->alg
.ivsize
= 16;
531 inst
->alg
.chunksize
= crypto_skcipher_alg_chunksize(ctr
);
532 inst
->alg
.maxauthsize
= 16;
533 inst
->alg
.base
.cra_ctxsize
= sizeof(struct crypto_ccm_ctx
);
534 inst
->alg
.init
= crypto_ccm_init_tfm
;
535 inst
->alg
.exit
= crypto_ccm_exit_tfm
;
536 inst
->alg
.setkey
= crypto_ccm_setkey
;
537 inst
->alg
.setauthsize
= crypto_ccm_setauthsize
;
538 inst
->alg
.encrypt
= crypto_ccm_encrypt
;
539 inst
->alg
.decrypt
= crypto_ccm_decrypt
;
541 inst
->free
= crypto_ccm_free
;
543 err
= aead_register_instance(tmpl
, inst
);
548 crypto_mod_put(mac_alg
);
552 crypto_drop_skcipher(&ictx
->ctr
);
554 crypto_drop_ahash(&ictx
->mac
);
560 static int crypto_ccm_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
562 const char *cipher_name
;
563 char ctr_name
[CRYPTO_MAX_ALG_NAME
];
564 char mac_name
[CRYPTO_MAX_ALG_NAME
];
565 char full_name
[CRYPTO_MAX_ALG_NAME
];
567 cipher_name
= crypto_attr_alg_name(tb
[1]);
568 if (IS_ERR(cipher_name
))
569 return PTR_ERR(cipher_name
);
571 if (snprintf(ctr_name
, CRYPTO_MAX_ALG_NAME
, "ctr(%s)",
572 cipher_name
) >= CRYPTO_MAX_ALG_NAME
)
573 return -ENAMETOOLONG
;
575 if (snprintf(mac_name
, CRYPTO_MAX_ALG_NAME
, "cbcmac(%s)",
576 cipher_name
) >= CRYPTO_MAX_ALG_NAME
)
577 return -ENAMETOOLONG
;
579 if (snprintf(full_name
, CRYPTO_MAX_ALG_NAME
, "ccm(%s)", cipher_name
) >=
581 return -ENAMETOOLONG
;
583 return crypto_ccm_create_common(tmpl
, tb
, full_name
, ctr_name
,
587 static struct crypto_template crypto_ccm_tmpl
= {
589 .create
= crypto_ccm_create
,
590 .module
= THIS_MODULE
,
593 static int crypto_ccm_base_create(struct crypto_template
*tmpl
,
596 const char *ctr_name
;
597 const char *cipher_name
;
598 char full_name
[CRYPTO_MAX_ALG_NAME
];
600 ctr_name
= crypto_attr_alg_name(tb
[1]);
601 if (IS_ERR(ctr_name
))
602 return PTR_ERR(ctr_name
);
604 cipher_name
= crypto_attr_alg_name(tb
[2]);
605 if (IS_ERR(cipher_name
))
606 return PTR_ERR(cipher_name
);
608 if (snprintf(full_name
, CRYPTO_MAX_ALG_NAME
, "ccm_base(%s,%s)",
609 ctr_name
, cipher_name
) >= CRYPTO_MAX_ALG_NAME
)
610 return -ENAMETOOLONG
;
612 return crypto_ccm_create_common(tmpl
, tb
, full_name
, ctr_name
,
616 static struct crypto_template crypto_ccm_base_tmpl
= {
618 .create
= crypto_ccm_base_create
,
619 .module
= THIS_MODULE
,
622 static int crypto_rfc4309_setkey(struct crypto_aead
*parent
, const u8
*key
,
625 struct crypto_rfc4309_ctx
*ctx
= crypto_aead_ctx(parent
);
626 struct crypto_aead
*child
= ctx
->child
;
633 memcpy(ctx
->nonce
, key
+ keylen
, 3);
635 crypto_aead_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
636 crypto_aead_set_flags(child
, crypto_aead_get_flags(parent
) &
637 CRYPTO_TFM_REQ_MASK
);
638 err
= crypto_aead_setkey(child
, key
, keylen
);
639 crypto_aead_set_flags(parent
, crypto_aead_get_flags(child
) &
640 CRYPTO_TFM_RES_MASK
);
645 static int crypto_rfc4309_setauthsize(struct crypto_aead
*parent
,
646 unsigned int authsize
)
648 struct crypto_rfc4309_ctx
*ctx
= crypto_aead_ctx(parent
);
659 return crypto_aead_setauthsize(ctx
->child
, authsize
);
662 static struct aead_request
*crypto_rfc4309_crypt(struct aead_request
*req
)
664 struct crypto_rfc4309_req_ctx
*rctx
= aead_request_ctx(req
);
665 struct aead_request
*subreq
= &rctx
->subreq
;
666 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
667 struct crypto_rfc4309_ctx
*ctx
= crypto_aead_ctx(aead
);
668 struct crypto_aead
*child
= ctx
->child
;
669 struct scatterlist
*sg
;
670 u8
*iv
= PTR_ALIGN((u8
*)(subreq
+ 1) + crypto_aead_reqsize(child
),
671 crypto_aead_alignmask(child
) + 1);
676 memcpy(iv
+ 1, ctx
->nonce
, 3);
677 memcpy(iv
+ 4, req
->iv
, 8);
679 scatterwalk_map_and_copy(iv
+ 16, req
->src
, 0, req
->assoclen
- 8, 0);
681 sg_init_table(rctx
->src
, 3);
682 sg_set_buf(rctx
->src
, iv
+ 16, req
->assoclen
- 8);
683 sg
= scatterwalk_ffwd(rctx
->src
+ 1, req
->src
, req
->assoclen
);
684 if (sg
!= rctx
->src
+ 1)
685 sg_chain(rctx
->src
, 2, sg
);
687 if (req
->src
!= req
->dst
) {
688 sg_init_table(rctx
->dst
, 3);
689 sg_set_buf(rctx
->dst
, iv
+ 16, req
->assoclen
- 8);
690 sg
= scatterwalk_ffwd(rctx
->dst
+ 1, req
->dst
, req
->assoclen
);
691 if (sg
!= rctx
->dst
+ 1)
692 sg_chain(rctx
->dst
, 2, sg
);
695 aead_request_set_tfm(subreq
, child
);
696 aead_request_set_callback(subreq
, req
->base
.flags
, req
->base
.complete
,
698 aead_request_set_crypt(subreq
, rctx
->src
,
699 req
->src
== req
->dst
? rctx
->src
: rctx
->dst
,
701 aead_request_set_ad(subreq
, req
->assoclen
- 8);
706 static int crypto_rfc4309_encrypt(struct aead_request
*req
)
708 if (req
->assoclen
!= 16 && req
->assoclen
!= 20)
711 req
= crypto_rfc4309_crypt(req
);
713 return crypto_aead_encrypt(req
);
716 static int crypto_rfc4309_decrypt(struct aead_request
*req
)
718 if (req
->assoclen
!= 16 && req
->assoclen
!= 20)
721 req
= crypto_rfc4309_crypt(req
);
723 return crypto_aead_decrypt(req
);
726 static int crypto_rfc4309_init_tfm(struct crypto_aead
*tfm
)
728 struct aead_instance
*inst
= aead_alg_instance(tfm
);
729 struct crypto_aead_spawn
*spawn
= aead_instance_ctx(inst
);
730 struct crypto_rfc4309_ctx
*ctx
= crypto_aead_ctx(tfm
);
731 struct crypto_aead
*aead
;
734 aead
= crypto_spawn_aead(spawn
);
736 return PTR_ERR(aead
);
740 align
= crypto_aead_alignmask(aead
);
741 align
&= ~(crypto_tfm_ctx_alignment() - 1);
742 crypto_aead_set_reqsize(
744 sizeof(struct crypto_rfc4309_req_ctx
) +
745 ALIGN(crypto_aead_reqsize(aead
), crypto_tfm_ctx_alignment()) +
751 static void crypto_rfc4309_exit_tfm(struct crypto_aead
*tfm
)
753 struct crypto_rfc4309_ctx
*ctx
= crypto_aead_ctx(tfm
);
755 crypto_free_aead(ctx
->child
);
758 static void crypto_rfc4309_free(struct aead_instance
*inst
)
760 crypto_drop_aead(aead_instance_ctx(inst
));
764 static int crypto_rfc4309_create(struct crypto_template
*tmpl
,
767 struct crypto_attr_type
*algt
;
768 struct aead_instance
*inst
;
769 struct crypto_aead_spawn
*spawn
;
770 struct aead_alg
*alg
;
771 const char *ccm_name
;
774 algt
= crypto_get_attr_type(tb
);
776 return PTR_ERR(algt
);
778 if ((algt
->type
^ CRYPTO_ALG_TYPE_AEAD
) & algt
->mask
)
781 ccm_name
= crypto_attr_alg_name(tb
[1]);
782 if (IS_ERR(ccm_name
))
783 return PTR_ERR(ccm_name
);
785 inst
= kzalloc(sizeof(*inst
) + sizeof(*spawn
), GFP_KERNEL
);
789 spawn
= aead_instance_ctx(inst
);
790 crypto_set_aead_spawn(spawn
, aead_crypto_instance(inst
));
791 err
= crypto_grab_aead(spawn
, ccm_name
, 0,
792 crypto_requires_sync(algt
->type
, algt
->mask
));
796 alg
= crypto_spawn_aead_alg(spawn
);
800 /* We only support 16-byte blocks. */
801 if (crypto_aead_alg_ivsize(alg
) != 16)
804 /* Not a stream cipher? */
805 if (alg
->base
.cra_blocksize
!= 1)
809 if (snprintf(inst
->alg
.base
.cra_name
, CRYPTO_MAX_ALG_NAME
,
810 "rfc4309(%s)", alg
->base
.cra_name
) >=
811 CRYPTO_MAX_ALG_NAME
||
812 snprintf(inst
->alg
.base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
813 "rfc4309(%s)", alg
->base
.cra_driver_name
) >=
817 inst
->alg
.base
.cra_flags
= alg
->base
.cra_flags
& CRYPTO_ALG_ASYNC
;
818 inst
->alg
.base
.cra_priority
= alg
->base
.cra_priority
;
819 inst
->alg
.base
.cra_blocksize
= 1;
820 inst
->alg
.base
.cra_alignmask
= alg
->base
.cra_alignmask
;
822 inst
->alg
.ivsize
= 8;
823 inst
->alg
.chunksize
= crypto_aead_alg_chunksize(alg
);
824 inst
->alg
.maxauthsize
= 16;
826 inst
->alg
.base
.cra_ctxsize
= sizeof(struct crypto_rfc4309_ctx
);
828 inst
->alg
.init
= crypto_rfc4309_init_tfm
;
829 inst
->alg
.exit
= crypto_rfc4309_exit_tfm
;
831 inst
->alg
.setkey
= crypto_rfc4309_setkey
;
832 inst
->alg
.setauthsize
= crypto_rfc4309_setauthsize
;
833 inst
->alg
.encrypt
= crypto_rfc4309_encrypt
;
834 inst
->alg
.decrypt
= crypto_rfc4309_decrypt
;
836 inst
->free
= crypto_rfc4309_free
;
838 err
= aead_register_instance(tmpl
, inst
);
846 crypto_drop_aead(spawn
);
852 static struct crypto_template crypto_rfc4309_tmpl
= {
854 .create
= crypto_rfc4309_create
,
855 .module
= THIS_MODULE
,
858 static int crypto_cbcmac_digest_setkey(struct crypto_shash
*parent
,
859 const u8
*inkey
, unsigned int keylen
)
861 struct cbcmac_tfm_ctx
*ctx
= crypto_shash_ctx(parent
);
863 return crypto_cipher_setkey(ctx
->child
, inkey
, keylen
);
866 static int crypto_cbcmac_digest_init(struct shash_desc
*pdesc
)
868 struct cbcmac_desc_ctx
*ctx
= shash_desc_ctx(pdesc
);
869 int bs
= crypto_shash_digestsize(pdesc
->tfm
);
870 u8
*dg
= (u8
*)ctx
+ crypto_shash_descsize(pdesc
->tfm
) - bs
;
878 static int crypto_cbcmac_digest_update(struct shash_desc
*pdesc
, const u8
*p
,
881 struct crypto_shash
*parent
= pdesc
->tfm
;
882 struct cbcmac_tfm_ctx
*tctx
= crypto_shash_ctx(parent
);
883 struct cbcmac_desc_ctx
*ctx
= shash_desc_ctx(pdesc
);
884 struct crypto_cipher
*tfm
= tctx
->child
;
885 int bs
= crypto_shash_digestsize(parent
);
886 u8
*dg
= (u8
*)ctx
+ crypto_shash_descsize(parent
) - bs
;
889 unsigned int l
= min(len
, bs
- ctx
->len
);
891 crypto_xor(dg
+ ctx
->len
, p
, l
);
896 if (ctx
->len
== bs
) {
897 crypto_cipher_encrypt_one(tfm
, dg
, dg
);
905 static int crypto_cbcmac_digest_final(struct shash_desc
*pdesc
, u8
*out
)
907 struct crypto_shash
*parent
= pdesc
->tfm
;
908 struct cbcmac_tfm_ctx
*tctx
= crypto_shash_ctx(parent
);
909 struct cbcmac_desc_ctx
*ctx
= shash_desc_ctx(pdesc
);
910 struct crypto_cipher
*tfm
= tctx
->child
;
911 int bs
= crypto_shash_digestsize(parent
);
912 u8
*dg
= (u8
*)ctx
+ crypto_shash_descsize(parent
) - bs
;
915 crypto_cipher_encrypt_one(tfm
, dg
, dg
);
921 static int cbcmac_init_tfm(struct crypto_tfm
*tfm
)
923 struct crypto_cipher
*cipher
;
924 struct crypto_instance
*inst
= (void *)tfm
->__crt_alg
;
925 struct crypto_spawn
*spawn
= crypto_instance_ctx(inst
);
926 struct cbcmac_tfm_ctx
*ctx
= crypto_tfm_ctx(tfm
);
928 cipher
= crypto_spawn_cipher(spawn
);
930 return PTR_ERR(cipher
);
937 static void cbcmac_exit_tfm(struct crypto_tfm
*tfm
)
939 struct cbcmac_tfm_ctx
*ctx
= crypto_tfm_ctx(tfm
);
940 crypto_free_cipher(ctx
->child
);
943 static int cbcmac_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
945 struct shash_instance
*inst
;
946 struct crypto_alg
*alg
;
949 err
= crypto_check_attr_type(tb
, CRYPTO_ALG_TYPE_SHASH
);
953 alg
= crypto_get_attr_alg(tb
, CRYPTO_ALG_TYPE_CIPHER
,
954 CRYPTO_ALG_TYPE_MASK
);
958 inst
= shash_alloc_instance("cbcmac", alg
);
963 err
= crypto_init_spawn(shash_instance_ctx(inst
), alg
,
964 shash_crypto_instance(inst
),
965 CRYPTO_ALG_TYPE_MASK
);
969 inst
->alg
.base
.cra_priority
= alg
->cra_priority
;
970 inst
->alg
.base
.cra_blocksize
= 1;
972 inst
->alg
.digestsize
= alg
->cra_blocksize
;
973 inst
->alg
.descsize
= ALIGN(sizeof(struct cbcmac_desc_ctx
),
974 alg
->cra_alignmask
+ 1) +
977 inst
->alg
.base
.cra_ctxsize
= sizeof(struct cbcmac_tfm_ctx
);
978 inst
->alg
.base
.cra_init
= cbcmac_init_tfm
;
979 inst
->alg
.base
.cra_exit
= cbcmac_exit_tfm
;
981 inst
->alg
.init
= crypto_cbcmac_digest_init
;
982 inst
->alg
.update
= crypto_cbcmac_digest_update
;
983 inst
->alg
.final
= crypto_cbcmac_digest_final
;
984 inst
->alg
.setkey
= crypto_cbcmac_digest_setkey
;
986 err
= shash_register_instance(tmpl
, inst
);
990 shash_free_instance(shash_crypto_instance(inst
));
997 static struct crypto_template crypto_cbcmac_tmpl
= {
999 .create
= cbcmac_create
,
1000 .free
= shash_free_instance
,
1001 .module
= THIS_MODULE
,
1004 static int __init
crypto_ccm_module_init(void)
1008 err
= crypto_register_template(&crypto_cbcmac_tmpl
);
1012 err
= crypto_register_template(&crypto_ccm_base_tmpl
);
1014 goto out_undo_cbcmac
;
1016 err
= crypto_register_template(&crypto_ccm_tmpl
);
1020 err
= crypto_register_template(&crypto_rfc4309_tmpl
);
1028 crypto_unregister_template(&crypto_ccm_tmpl
);
1030 crypto_unregister_template(&crypto_ccm_base_tmpl
);
1032 crypto_register_template(&crypto_cbcmac_tmpl
);
1036 static void __exit
crypto_ccm_module_exit(void)
1038 crypto_unregister_template(&crypto_rfc4309_tmpl
);
1039 crypto_unregister_template(&crypto_ccm_tmpl
);
1040 crypto_unregister_template(&crypto_ccm_base_tmpl
);
1041 crypto_unregister_template(&crypto_cbcmac_tmpl
);
1044 module_init(crypto_ccm_module_init
);
1045 module_exit(crypto_ccm_module_exit
);
1047 MODULE_LICENSE("GPL");
1048 MODULE_DESCRIPTION("Counter with CBC MAC");
1049 MODULE_ALIAS_CRYPTO("ccm_base");
1050 MODULE_ALIAS_CRYPTO("rfc4309");
1051 MODULE_ALIAS_CRYPTO("ccm");