2 * CCM: Counter with CBC-MAC
4 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
13 #include <crypto/internal/aead.h>
14 #include <crypto/internal/hash.h>
15 #include <crypto/internal/skcipher.h>
16 #include <crypto/scatterwalk.h>
17 #include <linux/err.h>
18 #include <linux/init.h>
19 #include <linux/kernel.h>
20 #include <linux/module.h>
21 #include <linux/slab.h>
25 struct ccm_instance_ctx
{
26 struct crypto_skcipher_spawn ctr
;
27 struct crypto_ahash_spawn mac
;
30 struct crypto_ccm_ctx
{
31 struct crypto_ahash
*mac
;
32 struct crypto_skcipher
*ctr
;
35 struct crypto_rfc4309_ctx
{
36 struct crypto_aead
*child
;
40 struct crypto_rfc4309_req_ctx
{
41 struct scatterlist src
[3];
42 struct scatterlist dst
[3];
43 struct aead_request subreq
;
46 struct crypto_ccm_req_priv_ctx
{
51 struct scatterlist src
[3];
52 struct scatterlist dst
[3];
54 struct ahash_request ahreq
;
55 struct skcipher_request skreq
;
59 struct cbcmac_tfm_ctx
{
60 struct crypto_cipher
*child
;
63 struct cbcmac_desc_ctx
{
67 static inline struct crypto_ccm_req_priv_ctx
*crypto_ccm_reqctx(
68 struct aead_request
*req
)
70 unsigned long align
= crypto_aead_alignmask(crypto_aead_reqtfm(req
));
72 return (void *)PTR_ALIGN((u8
*)aead_request_ctx(req
), align
+ 1);
75 static int set_msg_len(u8
*block
, unsigned int msglen
, int csize
)
79 memset(block
, 0, csize
);
84 else if (msglen
> (1 << (8 * csize
)))
87 data
= cpu_to_be32(msglen
);
88 memcpy(block
- csize
, (u8
*)&data
+ 4 - csize
, csize
);
93 static int crypto_ccm_setkey(struct crypto_aead
*aead
, const u8
*key
,
96 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(aead
);
97 struct crypto_skcipher
*ctr
= ctx
->ctr
;
98 struct crypto_ahash
*mac
= ctx
->mac
;
101 crypto_skcipher_clear_flags(ctr
, CRYPTO_TFM_REQ_MASK
);
102 crypto_skcipher_set_flags(ctr
, crypto_aead_get_flags(aead
) &
103 CRYPTO_TFM_REQ_MASK
);
104 err
= crypto_skcipher_setkey(ctr
, key
, keylen
);
105 crypto_aead_set_flags(aead
, crypto_skcipher_get_flags(ctr
) &
106 CRYPTO_TFM_RES_MASK
);
110 crypto_ahash_clear_flags(mac
, CRYPTO_TFM_REQ_MASK
);
111 crypto_ahash_set_flags(mac
, crypto_aead_get_flags(aead
) &
112 CRYPTO_TFM_REQ_MASK
);
113 err
= crypto_ahash_setkey(mac
, key
, keylen
);
114 crypto_aead_set_flags(aead
, crypto_ahash_get_flags(mac
) &
115 CRYPTO_TFM_RES_MASK
);
121 static int crypto_ccm_setauthsize(struct crypto_aead
*tfm
,
122 unsigned int authsize
)
140 static int format_input(u8
*info
, struct aead_request
*req
,
141 unsigned int cryptlen
)
143 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
144 unsigned int lp
= req
->iv
[0];
145 unsigned int l
= lp
+ 1;
148 m
= crypto_aead_authsize(aead
);
150 memcpy(info
, req
->iv
, 16);
152 /* format control info per RFC 3610 and
153 * NIST Special Publication 800-38C
155 *info
|= (8 * ((m
- 2) / 2));
159 return set_msg_len(info
+ 16 - l
, cryptlen
, l
);
162 static int format_adata(u8
*adata
, unsigned int a
)
166 /* add control info for associated data
167 * RFC 3610 and NIST Special Publication 800-38C
170 *(__be16
*)adata
= cpu_to_be16(a
);
173 *(__be16
*)adata
= cpu_to_be16(0xfffe);
174 *(__be32
*)&adata
[2] = cpu_to_be32(a
);
181 static int crypto_ccm_auth(struct aead_request
*req
, struct scatterlist
*plain
,
182 unsigned int cryptlen
)
184 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
185 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
186 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(aead
);
187 struct ahash_request
*ahreq
= &pctx
->ahreq
;
188 unsigned int assoclen
= req
->assoclen
;
189 struct scatterlist sg
[3];
190 u8
*odata
= pctx
->odata
;
191 u8
*idata
= pctx
->idata
;
194 /* format control data for input */
195 err
= format_input(odata
, req
, cryptlen
);
199 sg_init_table(sg
, 3);
200 sg_set_buf(&sg
[0], odata
, 16);
202 /* format associated data and compute into mac */
204 ilen
= format_adata(idata
, assoclen
);
205 sg_set_buf(&sg
[1], idata
, ilen
);
206 sg_chain(sg
, 3, req
->src
);
209 sg_chain(sg
, 2, req
->src
);
212 ahash_request_set_tfm(ahreq
, ctx
->mac
);
213 ahash_request_set_callback(ahreq
, pctx
->flags
, NULL
, NULL
);
214 ahash_request_set_crypt(ahreq
, sg
, NULL
, assoclen
+ ilen
+ 16);
215 err
= crypto_ahash_init(ahreq
);
218 err
= crypto_ahash_update(ahreq
);
222 /* we need to pad the MAC input to a round multiple of the block size */
223 ilen
= 16 - (assoclen
+ ilen
) % 16;
225 memset(idata
, 0, ilen
);
226 sg_init_table(sg
, 2);
227 sg_set_buf(&sg
[0], idata
, ilen
);
229 sg_chain(sg
, 2, plain
);
234 ahash_request_set_crypt(ahreq
, plain
, pctx
->odata
, cryptlen
);
235 err
= crypto_ahash_finup(ahreq
);
240 static void crypto_ccm_encrypt_done(struct crypto_async_request
*areq
, int err
)
242 struct aead_request
*req
= areq
->data
;
243 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
244 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
245 u8
*odata
= pctx
->odata
;
248 scatterwalk_map_and_copy(odata
, req
->dst
,
249 req
->assoclen
+ req
->cryptlen
,
250 crypto_aead_authsize(aead
), 1);
251 aead_request_complete(req
, err
);
254 static inline int crypto_ccm_check_iv(const u8
*iv
)
256 /* 2 <= L <= 8, so 1 <= L' <= 7. */
257 if (1 > iv
[0] || iv
[0] > 7)
263 static int crypto_ccm_init_crypt(struct aead_request
*req
, u8
*tag
)
265 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
266 struct scatterlist
*sg
;
270 err
= crypto_ccm_check_iv(iv
);
274 pctx
->flags
= aead_request_flags(req
);
276 /* Note: rfc 3610 and NIST 800-38C require counter of
277 * zero to encrypt auth tag.
279 memset(iv
+ 15 - iv
[0], 0, iv
[0] + 1);
281 sg_init_table(pctx
->src
, 3);
282 sg_set_buf(pctx
->src
, tag
, 16);
283 sg
= scatterwalk_ffwd(pctx
->src
+ 1, req
->src
, req
->assoclen
);
284 if (sg
!= pctx
->src
+ 1)
285 sg_chain(pctx
->src
, 2, sg
);
287 if (req
->src
!= req
->dst
) {
288 sg_init_table(pctx
->dst
, 3);
289 sg_set_buf(pctx
->dst
, tag
, 16);
290 sg
= scatterwalk_ffwd(pctx
->dst
+ 1, req
->dst
, req
->assoclen
);
291 if (sg
!= pctx
->dst
+ 1)
292 sg_chain(pctx
->dst
, 2, sg
);
298 static int crypto_ccm_encrypt(struct aead_request
*req
)
300 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
301 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(aead
);
302 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
303 struct skcipher_request
*skreq
= &pctx
->skreq
;
304 struct scatterlist
*dst
;
305 unsigned int cryptlen
= req
->cryptlen
;
306 u8
*odata
= pctx
->odata
;
310 err
= crypto_ccm_init_crypt(req
, odata
);
314 err
= crypto_ccm_auth(req
, sg_next(pctx
->src
), cryptlen
);
319 if (req
->src
!= req
->dst
)
322 skcipher_request_set_tfm(skreq
, ctx
->ctr
);
323 skcipher_request_set_callback(skreq
, pctx
->flags
,
324 crypto_ccm_encrypt_done
, req
);
325 skcipher_request_set_crypt(skreq
, pctx
->src
, dst
, cryptlen
+ 16, iv
);
326 err
= crypto_skcipher_encrypt(skreq
);
330 /* copy authtag to end of dst */
331 scatterwalk_map_and_copy(odata
, sg_next(dst
), cryptlen
,
332 crypto_aead_authsize(aead
), 1);
336 static void crypto_ccm_decrypt_done(struct crypto_async_request
*areq
,
339 struct aead_request
*req
= areq
->data
;
340 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
341 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
342 unsigned int authsize
= crypto_aead_authsize(aead
);
343 unsigned int cryptlen
= req
->cryptlen
- authsize
;
344 struct scatterlist
*dst
;
348 dst
= sg_next(req
->src
== req
->dst
? pctx
->src
: pctx
->dst
);
351 err
= crypto_ccm_auth(req
, dst
, cryptlen
);
352 if (!err
&& crypto_memneq(pctx
->auth_tag
, pctx
->odata
, authsize
))
355 aead_request_complete(req
, err
);
358 static int crypto_ccm_decrypt(struct aead_request
*req
)
360 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
361 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(aead
);
362 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
363 struct skcipher_request
*skreq
= &pctx
->skreq
;
364 struct scatterlist
*dst
;
365 unsigned int authsize
= crypto_aead_authsize(aead
);
366 unsigned int cryptlen
= req
->cryptlen
;
367 u8
*authtag
= pctx
->auth_tag
;
368 u8
*odata
= pctx
->odata
;
369 u8
*iv
= pctx
->idata
;
372 cryptlen
-= authsize
;
374 err
= crypto_ccm_init_crypt(req
, authtag
);
378 scatterwalk_map_and_copy(authtag
, sg_next(pctx
->src
), cryptlen
,
382 if (req
->src
!= req
->dst
)
385 memcpy(iv
, req
->iv
, 16);
387 skcipher_request_set_tfm(skreq
, ctx
->ctr
);
388 skcipher_request_set_callback(skreq
, pctx
->flags
,
389 crypto_ccm_decrypt_done
, req
);
390 skcipher_request_set_crypt(skreq
, pctx
->src
, dst
, cryptlen
+ 16, iv
);
391 err
= crypto_skcipher_decrypt(skreq
);
395 err
= crypto_ccm_auth(req
, sg_next(dst
), cryptlen
);
400 if (crypto_memneq(authtag
, odata
, authsize
))
406 static int crypto_ccm_init_tfm(struct crypto_aead
*tfm
)
408 struct aead_instance
*inst
= aead_alg_instance(tfm
);
409 struct ccm_instance_ctx
*ictx
= aead_instance_ctx(inst
);
410 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(tfm
);
411 struct crypto_ahash
*mac
;
412 struct crypto_skcipher
*ctr
;
416 mac
= crypto_spawn_ahash(&ictx
->mac
);
420 ctr
= crypto_spawn_skcipher(&ictx
->ctr
);
428 align
= crypto_aead_alignmask(tfm
);
429 align
&= ~(crypto_tfm_ctx_alignment() - 1);
430 crypto_aead_set_reqsize(
432 align
+ sizeof(struct crypto_ccm_req_priv_ctx
) +
433 max(crypto_ahash_reqsize(mac
), crypto_skcipher_reqsize(ctr
)));
438 crypto_free_ahash(mac
);
442 static void crypto_ccm_exit_tfm(struct crypto_aead
*tfm
)
444 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(tfm
);
446 crypto_free_ahash(ctx
->mac
);
447 crypto_free_skcipher(ctx
->ctr
);
450 static void crypto_ccm_free(struct aead_instance
*inst
)
452 struct ccm_instance_ctx
*ctx
= aead_instance_ctx(inst
);
454 crypto_drop_ahash(&ctx
->mac
);
455 crypto_drop_skcipher(&ctx
->ctr
);
459 static int crypto_ccm_create_common(struct crypto_template
*tmpl
,
461 const char *ctr_name
,
462 const char *mac_name
)
464 struct crypto_attr_type
*algt
;
465 struct aead_instance
*inst
;
466 struct skcipher_alg
*ctr
;
467 struct crypto_alg
*mac_alg
;
468 struct hash_alg_common
*mac
;
469 struct ccm_instance_ctx
*ictx
;
472 algt
= crypto_get_attr_type(tb
);
474 return PTR_ERR(algt
);
476 if ((algt
->type
^ CRYPTO_ALG_TYPE_AEAD
) & algt
->mask
)
479 mac_alg
= crypto_find_alg(mac_name
, &crypto_ahash_type
,
480 CRYPTO_ALG_TYPE_HASH
,
481 CRYPTO_ALG_TYPE_AHASH_MASK
|
484 return PTR_ERR(mac_alg
);
486 mac
= __crypto_hash_alg_common(mac_alg
);
488 if (strncmp(mac
->base
.cra_name
, "cbcmac(", 7) != 0 ||
489 mac
->digestsize
!= 16)
492 inst
= kzalloc(sizeof(*inst
) + sizeof(*ictx
), GFP_KERNEL
);
497 ictx
= aead_instance_ctx(inst
);
498 err
= crypto_init_ahash_spawn(&ictx
->mac
, mac
,
499 aead_crypto_instance(inst
));
503 crypto_set_skcipher_spawn(&ictx
->ctr
, aead_crypto_instance(inst
));
504 err
= crypto_grab_skcipher(&ictx
->ctr
, ctr_name
, 0,
505 crypto_requires_sync(algt
->type
,
510 ctr
= crypto_spawn_skcipher_alg(&ictx
->ctr
);
512 /* The skcipher algorithm must be CTR mode, using 16-byte blocks. */
514 if (strncmp(ctr
->base
.cra_name
, "ctr(", 4) != 0 ||
515 crypto_skcipher_alg_ivsize(ctr
) != 16 ||
516 ctr
->base
.cra_blocksize
!= 1)
519 /* ctr and cbcmac must use the same underlying block cipher. */
520 if (strcmp(ctr
->base
.cra_name
+ 4, mac
->base
.cra_name
+ 7) != 0)
524 if (snprintf(inst
->alg
.base
.cra_name
, CRYPTO_MAX_ALG_NAME
,
525 "ccm(%s", ctr
->base
.cra_name
+ 4) >= CRYPTO_MAX_ALG_NAME
)
528 if (snprintf(inst
->alg
.base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
529 "ccm_base(%s,%s)", ctr
->base
.cra_driver_name
,
530 mac
->base
.cra_driver_name
) >= CRYPTO_MAX_ALG_NAME
)
533 inst
->alg
.base
.cra_flags
= ctr
->base
.cra_flags
& CRYPTO_ALG_ASYNC
;
534 inst
->alg
.base
.cra_priority
= (mac
->base
.cra_priority
+
535 ctr
->base
.cra_priority
) / 2;
536 inst
->alg
.base
.cra_blocksize
= 1;
537 inst
->alg
.base
.cra_alignmask
= mac
->base
.cra_alignmask
|
538 ctr
->base
.cra_alignmask
;
539 inst
->alg
.ivsize
= 16;
540 inst
->alg
.chunksize
= crypto_skcipher_alg_chunksize(ctr
);
541 inst
->alg
.maxauthsize
= 16;
542 inst
->alg
.base
.cra_ctxsize
= sizeof(struct crypto_ccm_ctx
);
543 inst
->alg
.init
= crypto_ccm_init_tfm
;
544 inst
->alg
.exit
= crypto_ccm_exit_tfm
;
545 inst
->alg
.setkey
= crypto_ccm_setkey
;
546 inst
->alg
.setauthsize
= crypto_ccm_setauthsize
;
547 inst
->alg
.encrypt
= crypto_ccm_encrypt
;
548 inst
->alg
.decrypt
= crypto_ccm_decrypt
;
550 inst
->free
= crypto_ccm_free
;
552 err
= aead_register_instance(tmpl
, inst
);
557 crypto_mod_put(mac_alg
);
561 crypto_drop_skcipher(&ictx
->ctr
);
563 crypto_drop_ahash(&ictx
->mac
);
569 static int crypto_ccm_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
571 const char *cipher_name
;
572 char ctr_name
[CRYPTO_MAX_ALG_NAME
];
573 char mac_name
[CRYPTO_MAX_ALG_NAME
];
575 cipher_name
= crypto_attr_alg_name(tb
[1]);
576 if (IS_ERR(cipher_name
))
577 return PTR_ERR(cipher_name
);
579 if (snprintf(ctr_name
, CRYPTO_MAX_ALG_NAME
, "ctr(%s)",
580 cipher_name
) >= CRYPTO_MAX_ALG_NAME
)
581 return -ENAMETOOLONG
;
583 if (snprintf(mac_name
, CRYPTO_MAX_ALG_NAME
, "cbcmac(%s)",
584 cipher_name
) >= CRYPTO_MAX_ALG_NAME
)
585 return -ENAMETOOLONG
;
587 return crypto_ccm_create_common(tmpl
, tb
, ctr_name
, mac_name
);
590 static int crypto_ccm_base_create(struct crypto_template
*tmpl
,
593 const char *ctr_name
;
594 const char *mac_name
;
596 ctr_name
= crypto_attr_alg_name(tb
[1]);
597 if (IS_ERR(ctr_name
))
598 return PTR_ERR(ctr_name
);
600 mac_name
= crypto_attr_alg_name(tb
[2]);
601 if (IS_ERR(mac_name
))
602 return PTR_ERR(mac_name
);
604 return crypto_ccm_create_common(tmpl
, tb
, ctr_name
, mac_name
);
607 static int crypto_rfc4309_setkey(struct crypto_aead
*parent
, const u8
*key
,
610 struct crypto_rfc4309_ctx
*ctx
= crypto_aead_ctx(parent
);
611 struct crypto_aead
*child
= ctx
->child
;
618 memcpy(ctx
->nonce
, key
+ keylen
, 3);
620 crypto_aead_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
621 crypto_aead_set_flags(child
, crypto_aead_get_flags(parent
) &
622 CRYPTO_TFM_REQ_MASK
);
623 err
= crypto_aead_setkey(child
, key
, keylen
);
624 crypto_aead_set_flags(parent
, crypto_aead_get_flags(child
) &
625 CRYPTO_TFM_RES_MASK
);
630 static int crypto_rfc4309_setauthsize(struct crypto_aead
*parent
,
631 unsigned int authsize
)
633 struct crypto_rfc4309_ctx
*ctx
= crypto_aead_ctx(parent
);
644 return crypto_aead_setauthsize(ctx
->child
, authsize
);
647 static struct aead_request
*crypto_rfc4309_crypt(struct aead_request
*req
)
649 struct crypto_rfc4309_req_ctx
*rctx
= aead_request_ctx(req
);
650 struct aead_request
*subreq
= &rctx
->subreq
;
651 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
652 struct crypto_rfc4309_ctx
*ctx
= crypto_aead_ctx(aead
);
653 struct crypto_aead
*child
= ctx
->child
;
654 struct scatterlist
*sg
;
655 u8
*iv
= PTR_ALIGN((u8
*)(subreq
+ 1) + crypto_aead_reqsize(child
),
656 crypto_aead_alignmask(child
) + 1);
661 memcpy(iv
+ 1, ctx
->nonce
, 3);
662 memcpy(iv
+ 4, req
->iv
, 8);
664 scatterwalk_map_and_copy(iv
+ 16, req
->src
, 0, req
->assoclen
- 8, 0);
666 sg_init_table(rctx
->src
, 3);
667 sg_set_buf(rctx
->src
, iv
+ 16, req
->assoclen
- 8);
668 sg
= scatterwalk_ffwd(rctx
->src
+ 1, req
->src
, req
->assoclen
);
669 if (sg
!= rctx
->src
+ 1)
670 sg_chain(rctx
->src
, 2, sg
);
672 if (req
->src
!= req
->dst
) {
673 sg_init_table(rctx
->dst
, 3);
674 sg_set_buf(rctx
->dst
, iv
+ 16, req
->assoclen
- 8);
675 sg
= scatterwalk_ffwd(rctx
->dst
+ 1, req
->dst
, req
->assoclen
);
676 if (sg
!= rctx
->dst
+ 1)
677 sg_chain(rctx
->dst
, 2, sg
);
680 aead_request_set_tfm(subreq
, child
);
681 aead_request_set_callback(subreq
, req
->base
.flags
, req
->base
.complete
,
683 aead_request_set_crypt(subreq
, rctx
->src
,
684 req
->src
== req
->dst
? rctx
->src
: rctx
->dst
,
686 aead_request_set_ad(subreq
, req
->assoclen
- 8);
691 static int crypto_rfc4309_encrypt(struct aead_request
*req
)
693 if (req
->assoclen
!= 16 && req
->assoclen
!= 20)
696 req
= crypto_rfc4309_crypt(req
);
698 return crypto_aead_encrypt(req
);
701 static int crypto_rfc4309_decrypt(struct aead_request
*req
)
703 if (req
->assoclen
!= 16 && req
->assoclen
!= 20)
706 req
= crypto_rfc4309_crypt(req
);
708 return crypto_aead_decrypt(req
);
711 static int crypto_rfc4309_init_tfm(struct crypto_aead
*tfm
)
713 struct aead_instance
*inst
= aead_alg_instance(tfm
);
714 struct crypto_aead_spawn
*spawn
= aead_instance_ctx(inst
);
715 struct crypto_rfc4309_ctx
*ctx
= crypto_aead_ctx(tfm
);
716 struct crypto_aead
*aead
;
719 aead
= crypto_spawn_aead(spawn
);
721 return PTR_ERR(aead
);
725 align
= crypto_aead_alignmask(aead
);
726 align
&= ~(crypto_tfm_ctx_alignment() - 1);
727 crypto_aead_set_reqsize(
729 sizeof(struct crypto_rfc4309_req_ctx
) +
730 ALIGN(crypto_aead_reqsize(aead
), crypto_tfm_ctx_alignment()) +
736 static void crypto_rfc4309_exit_tfm(struct crypto_aead
*tfm
)
738 struct crypto_rfc4309_ctx
*ctx
= crypto_aead_ctx(tfm
);
740 crypto_free_aead(ctx
->child
);
743 static void crypto_rfc4309_free(struct aead_instance
*inst
)
745 crypto_drop_aead(aead_instance_ctx(inst
));
749 static int crypto_rfc4309_create(struct crypto_template
*tmpl
,
752 struct crypto_attr_type
*algt
;
753 struct aead_instance
*inst
;
754 struct crypto_aead_spawn
*spawn
;
755 struct aead_alg
*alg
;
756 const char *ccm_name
;
759 algt
= crypto_get_attr_type(tb
);
761 return PTR_ERR(algt
);
763 if ((algt
->type
^ CRYPTO_ALG_TYPE_AEAD
) & algt
->mask
)
766 ccm_name
= crypto_attr_alg_name(tb
[1]);
767 if (IS_ERR(ccm_name
))
768 return PTR_ERR(ccm_name
);
770 inst
= kzalloc(sizeof(*inst
) + sizeof(*spawn
), GFP_KERNEL
);
774 spawn
= aead_instance_ctx(inst
);
775 crypto_set_aead_spawn(spawn
, aead_crypto_instance(inst
));
776 err
= crypto_grab_aead(spawn
, ccm_name
, 0,
777 crypto_requires_sync(algt
->type
, algt
->mask
));
781 alg
= crypto_spawn_aead_alg(spawn
);
785 /* We only support 16-byte blocks. */
786 if (crypto_aead_alg_ivsize(alg
) != 16)
789 /* Not a stream cipher? */
790 if (alg
->base
.cra_blocksize
!= 1)
794 if (snprintf(inst
->alg
.base
.cra_name
, CRYPTO_MAX_ALG_NAME
,
795 "rfc4309(%s)", alg
->base
.cra_name
) >=
796 CRYPTO_MAX_ALG_NAME
||
797 snprintf(inst
->alg
.base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
798 "rfc4309(%s)", alg
->base
.cra_driver_name
) >=
802 inst
->alg
.base
.cra_flags
= alg
->base
.cra_flags
& CRYPTO_ALG_ASYNC
;
803 inst
->alg
.base
.cra_priority
= alg
->base
.cra_priority
;
804 inst
->alg
.base
.cra_blocksize
= 1;
805 inst
->alg
.base
.cra_alignmask
= alg
->base
.cra_alignmask
;
807 inst
->alg
.ivsize
= 8;
808 inst
->alg
.chunksize
= crypto_aead_alg_chunksize(alg
);
809 inst
->alg
.maxauthsize
= 16;
811 inst
->alg
.base
.cra_ctxsize
= sizeof(struct crypto_rfc4309_ctx
);
813 inst
->alg
.init
= crypto_rfc4309_init_tfm
;
814 inst
->alg
.exit
= crypto_rfc4309_exit_tfm
;
816 inst
->alg
.setkey
= crypto_rfc4309_setkey
;
817 inst
->alg
.setauthsize
= crypto_rfc4309_setauthsize
;
818 inst
->alg
.encrypt
= crypto_rfc4309_encrypt
;
819 inst
->alg
.decrypt
= crypto_rfc4309_decrypt
;
821 inst
->free
= crypto_rfc4309_free
;
823 err
= aead_register_instance(tmpl
, inst
);
831 crypto_drop_aead(spawn
);
837 static int crypto_cbcmac_digest_setkey(struct crypto_shash
*parent
,
838 const u8
*inkey
, unsigned int keylen
)
840 struct cbcmac_tfm_ctx
*ctx
= crypto_shash_ctx(parent
);
842 return crypto_cipher_setkey(ctx
->child
, inkey
, keylen
);
845 static int crypto_cbcmac_digest_init(struct shash_desc
*pdesc
)
847 struct cbcmac_desc_ctx
*ctx
= shash_desc_ctx(pdesc
);
848 int bs
= crypto_shash_digestsize(pdesc
->tfm
);
849 u8
*dg
= (u8
*)ctx
+ crypto_shash_descsize(pdesc
->tfm
) - bs
;
857 static int crypto_cbcmac_digest_update(struct shash_desc
*pdesc
, const u8
*p
,
860 struct crypto_shash
*parent
= pdesc
->tfm
;
861 struct cbcmac_tfm_ctx
*tctx
= crypto_shash_ctx(parent
);
862 struct cbcmac_desc_ctx
*ctx
= shash_desc_ctx(pdesc
);
863 struct crypto_cipher
*tfm
= tctx
->child
;
864 int bs
= crypto_shash_digestsize(parent
);
865 u8
*dg
= (u8
*)ctx
+ crypto_shash_descsize(parent
) - bs
;
868 unsigned int l
= min(len
, bs
- ctx
->len
);
870 crypto_xor(dg
+ ctx
->len
, p
, l
);
875 if (ctx
->len
== bs
) {
876 crypto_cipher_encrypt_one(tfm
, dg
, dg
);
884 static int crypto_cbcmac_digest_final(struct shash_desc
*pdesc
, u8
*out
)
886 struct crypto_shash
*parent
= pdesc
->tfm
;
887 struct cbcmac_tfm_ctx
*tctx
= crypto_shash_ctx(parent
);
888 struct cbcmac_desc_ctx
*ctx
= shash_desc_ctx(pdesc
);
889 struct crypto_cipher
*tfm
= tctx
->child
;
890 int bs
= crypto_shash_digestsize(parent
);
891 u8
*dg
= (u8
*)ctx
+ crypto_shash_descsize(parent
) - bs
;
894 crypto_cipher_encrypt_one(tfm
, dg
, dg
);
900 static int cbcmac_init_tfm(struct crypto_tfm
*tfm
)
902 struct crypto_cipher
*cipher
;
903 struct crypto_instance
*inst
= (void *)tfm
->__crt_alg
;
904 struct crypto_spawn
*spawn
= crypto_instance_ctx(inst
);
905 struct cbcmac_tfm_ctx
*ctx
= crypto_tfm_ctx(tfm
);
907 cipher
= crypto_spawn_cipher(spawn
);
909 return PTR_ERR(cipher
);
916 static void cbcmac_exit_tfm(struct crypto_tfm
*tfm
)
918 struct cbcmac_tfm_ctx
*ctx
= crypto_tfm_ctx(tfm
);
919 crypto_free_cipher(ctx
->child
);
922 static int cbcmac_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
924 struct shash_instance
*inst
;
925 struct crypto_alg
*alg
;
928 err
= crypto_check_attr_type(tb
, CRYPTO_ALG_TYPE_SHASH
);
932 alg
= crypto_get_attr_alg(tb
, CRYPTO_ALG_TYPE_CIPHER
,
933 CRYPTO_ALG_TYPE_MASK
);
937 inst
= shash_alloc_instance("cbcmac", alg
);
942 err
= crypto_init_spawn(shash_instance_ctx(inst
), alg
,
943 shash_crypto_instance(inst
),
944 CRYPTO_ALG_TYPE_MASK
);
948 inst
->alg
.base
.cra_priority
= alg
->cra_priority
;
949 inst
->alg
.base
.cra_blocksize
= 1;
951 inst
->alg
.digestsize
= alg
->cra_blocksize
;
952 inst
->alg
.descsize
= ALIGN(sizeof(struct cbcmac_desc_ctx
),
953 alg
->cra_alignmask
+ 1) +
956 inst
->alg
.base
.cra_ctxsize
= sizeof(struct cbcmac_tfm_ctx
);
957 inst
->alg
.base
.cra_init
= cbcmac_init_tfm
;
958 inst
->alg
.base
.cra_exit
= cbcmac_exit_tfm
;
960 inst
->alg
.init
= crypto_cbcmac_digest_init
;
961 inst
->alg
.update
= crypto_cbcmac_digest_update
;
962 inst
->alg
.final
= crypto_cbcmac_digest_final
;
963 inst
->alg
.setkey
= crypto_cbcmac_digest_setkey
;
965 err
= shash_register_instance(tmpl
, inst
);
969 shash_free_instance(shash_crypto_instance(inst
));
976 static struct crypto_template crypto_ccm_tmpls
[] = {
979 .create
= cbcmac_create
,
980 .free
= shash_free_instance
,
981 .module
= THIS_MODULE
,
984 .create
= crypto_ccm_base_create
,
985 .module
= THIS_MODULE
,
988 .create
= crypto_ccm_create
,
989 .module
= THIS_MODULE
,
992 .create
= crypto_rfc4309_create
,
993 .module
= THIS_MODULE
,
997 static int __init
crypto_ccm_module_init(void)
999 return crypto_register_templates(crypto_ccm_tmpls
,
1000 ARRAY_SIZE(crypto_ccm_tmpls
));
1003 static void __exit
crypto_ccm_module_exit(void)
1005 crypto_unregister_templates(crypto_ccm_tmpls
,
1006 ARRAY_SIZE(crypto_ccm_tmpls
));
1009 subsys_initcall(crypto_ccm_module_init
);
1010 module_exit(crypto_ccm_module_exit
);
1012 MODULE_LICENSE("GPL");
1013 MODULE_DESCRIPTION("Counter with CBC MAC");
1014 MODULE_ALIAS_CRYPTO("ccm_base");
1015 MODULE_ALIAS_CRYPTO("rfc4309");
1016 MODULE_ALIAS_CRYPTO("ccm");