1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * CCM: Counter with CBC-MAC
5 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
8 #include <crypto/internal/aead.h>
9 #include <crypto/internal/hash.h>
10 #include <crypto/internal/skcipher.h>
11 #include <crypto/scatterwalk.h>
12 #include <linux/err.h>
13 #include <linux/init.h>
14 #include <linux/kernel.h>
15 #include <linux/module.h>
16 #include <linux/slab.h>
20 struct ccm_instance_ctx
{
21 struct crypto_skcipher_spawn ctr
;
22 struct crypto_ahash_spawn mac
;
25 struct crypto_ccm_ctx
{
26 struct crypto_ahash
*mac
;
27 struct crypto_skcipher
*ctr
;
30 struct crypto_rfc4309_ctx
{
31 struct crypto_aead
*child
;
35 struct crypto_rfc4309_req_ctx
{
36 struct scatterlist src
[3];
37 struct scatterlist dst
[3];
38 struct aead_request subreq
;
41 struct crypto_ccm_req_priv_ctx
{
46 struct scatterlist src
[3];
47 struct scatterlist dst
[3];
49 struct ahash_request ahreq
;
50 struct skcipher_request skreq
;
54 struct cbcmac_tfm_ctx
{
55 struct crypto_cipher
*child
;
58 struct cbcmac_desc_ctx
{
62 static inline struct crypto_ccm_req_priv_ctx
*crypto_ccm_reqctx(
63 struct aead_request
*req
)
65 unsigned long align
= crypto_aead_alignmask(crypto_aead_reqtfm(req
));
67 return (void *)PTR_ALIGN((u8
*)aead_request_ctx(req
), align
+ 1);
70 static int set_msg_len(u8
*block
, unsigned int msglen
, int csize
)
74 memset(block
, 0, csize
);
79 else if (msglen
> (1 << (8 * csize
)))
82 data
= cpu_to_be32(msglen
);
83 memcpy(block
- csize
, (u8
*)&data
+ 4 - csize
, csize
);
88 static int crypto_ccm_setkey(struct crypto_aead
*aead
, const u8
*key
,
91 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(aead
);
92 struct crypto_skcipher
*ctr
= ctx
->ctr
;
93 struct crypto_ahash
*mac
= ctx
->mac
;
96 crypto_skcipher_clear_flags(ctr
, CRYPTO_TFM_REQ_MASK
);
97 crypto_skcipher_set_flags(ctr
, crypto_aead_get_flags(aead
) &
99 err
= crypto_skcipher_setkey(ctr
, key
, keylen
);
100 crypto_aead_set_flags(aead
, crypto_skcipher_get_flags(ctr
) &
101 CRYPTO_TFM_RES_MASK
);
105 crypto_ahash_clear_flags(mac
, CRYPTO_TFM_REQ_MASK
);
106 crypto_ahash_set_flags(mac
, crypto_aead_get_flags(aead
) &
107 CRYPTO_TFM_REQ_MASK
);
108 err
= crypto_ahash_setkey(mac
, key
, keylen
);
109 crypto_aead_set_flags(aead
, crypto_ahash_get_flags(mac
) &
110 CRYPTO_TFM_RES_MASK
);
116 static int crypto_ccm_setauthsize(struct crypto_aead
*tfm
,
117 unsigned int authsize
)
135 static int format_input(u8
*info
, struct aead_request
*req
,
136 unsigned int cryptlen
)
138 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
139 unsigned int lp
= req
->iv
[0];
140 unsigned int l
= lp
+ 1;
143 m
= crypto_aead_authsize(aead
);
145 memcpy(info
, req
->iv
, 16);
147 /* format control info per RFC 3610 and
148 * NIST Special Publication 800-38C
150 *info
|= (8 * ((m
- 2) / 2));
154 return set_msg_len(info
+ 16 - l
, cryptlen
, l
);
157 static int format_adata(u8
*adata
, unsigned int a
)
161 /* add control info for associated data
162 * RFC 3610 and NIST Special Publication 800-38C
165 *(__be16
*)adata
= cpu_to_be16(a
);
168 *(__be16
*)adata
= cpu_to_be16(0xfffe);
169 *(__be32
*)&adata
[2] = cpu_to_be32(a
);
176 static int crypto_ccm_auth(struct aead_request
*req
, struct scatterlist
*plain
,
177 unsigned int cryptlen
)
179 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
180 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
181 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(aead
);
182 struct ahash_request
*ahreq
= &pctx
->ahreq
;
183 unsigned int assoclen
= req
->assoclen
;
184 struct scatterlist sg
[3];
185 u8
*odata
= pctx
->odata
;
186 u8
*idata
= pctx
->idata
;
189 /* format control data for input */
190 err
= format_input(odata
, req
, cryptlen
);
194 sg_init_table(sg
, 3);
195 sg_set_buf(&sg
[0], odata
, 16);
197 /* format associated data and compute into mac */
199 ilen
= format_adata(idata
, assoclen
);
200 sg_set_buf(&sg
[1], idata
, ilen
);
201 sg_chain(sg
, 3, req
->src
);
204 sg_chain(sg
, 2, req
->src
);
207 ahash_request_set_tfm(ahreq
, ctx
->mac
);
208 ahash_request_set_callback(ahreq
, pctx
->flags
, NULL
, NULL
);
209 ahash_request_set_crypt(ahreq
, sg
, NULL
, assoclen
+ ilen
+ 16);
210 err
= crypto_ahash_init(ahreq
);
213 err
= crypto_ahash_update(ahreq
);
217 /* we need to pad the MAC input to a round multiple of the block size */
218 ilen
= 16 - (assoclen
+ ilen
) % 16;
220 memset(idata
, 0, ilen
);
221 sg_init_table(sg
, 2);
222 sg_set_buf(&sg
[0], idata
, ilen
);
224 sg_chain(sg
, 2, plain
);
229 ahash_request_set_crypt(ahreq
, plain
, pctx
->odata
, cryptlen
);
230 err
= crypto_ahash_finup(ahreq
);
235 static void crypto_ccm_encrypt_done(struct crypto_async_request
*areq
, int err
)
237 struct aead_request
*req
= areq
->data
;
238 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
239 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
240 u8
*odata
= pctx
->odata
;
243 scatterwalk_map_and_copy(odata
, req
->dst
,
244 req
->assoclen
+ req
->cryptlen
,
245 crypto_aead_authsize(aead
), 1);
246 aead_request_complete(req
, err
);
249 static inline int crypto_ccm_check_iv(const u8
*iv
)
251 /* 2 <= L <= 8, so 1 <= L' <= 7. */
252 if (1 > iv
[0] || iv
[0] > 7)
258 static int crypto_ccm_init_crypt(struct aead_request
*req
, u8
*tag
)
260 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
261 struct scatterlist
*sg
;
265 err
= crypto_ccm_check_iv(iv
);
269 pctx
->flags
= aead_request_flags(req
);
271 /* Note: rfc 3610 and NIST 800-38C require counter of
272 * zero to encrypt auth tag.
274 memset(iv
+ 15 - iv
[0], 0, iv
[0] + 1);
276 sg_init_table(pctx
->src
, 3);
277 sg_set_buf(pctx
->src
, tag
, 16);
278 sg
= scatterwalk_ffwd(pctx
->src
+ 1, req
->src
, req
->assoclen
);
279 if (sg
!= pctx
->src
+ 1)
280 sg_chain(pctx
->src
, 2, sg
);
282 if (req
->src
!= req
->dst
) {
283 sg_init_table(pctx
->dst
, 3);
284 sg_set_buf(pctx
->dst
, tag
, 16);
285 sg
= scatterwalk_ffwd(pctx
->dst
+ 1, req
->dst
, req
->assoclen
);
286 if (sg
!= pctx
->dst
+ 1)
287 sg_chain(pctx
->dst
, 2, sg
);
293 static int crypto_ccm_encrypt(struct aead_request
*req
)
295 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
296 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(aead
);
297 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
298 struct skcipher_request
*skreq
= &pctx
->skreq
;
299 struct scatterlist
*dst
;
300 unsigned int cryptlen
= req
->cryptlen
;
301 u8
*odata
= pctx
->odata
;
305 err
= crypto_ccm_init_crypt(req
, odata
);
309 err
= crypto_ccm_auth(req
, sg_next(pctx
->src
), cryptlen
);
314 if (req
->src
!= req
->dst
)
317 skcipher_request_set_tfm(skreq
, ctx
->ctr
);
318 skcipher_request_set_callback(skreq
, pctx
->flags
,
319 crypto_ccm_encrypt_done
, req
);
320 skcipher_request_set_crypt(skreq
, pctx
->src
, dst
, cryptlen
+ 16, iv
);
321 err
= crypto_skcipher_encrypt(skreq
);
325 /* copy authtag to end of dst */
326 scatterwalk_map_and_copy(odata
, sg_next(dst
), cryptlen
,
327 crypto_aead_authsize(aead
), 1);
331 static void crypto_ccm_decrypt_done(struct crypto_async_request
*areq
,
334 struct aead_request
*req
= areq
->data
;
335 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
336 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
337 unsigned int authsize
= crypto_aead_authsize(aead
);
338 unsigned int cryptlen
= req
->cryptlen
- authsize
;
339 struct scatterlist
*dst
;
343 dst
= sg_next(req
->src
== req
->dst
? pctx
->src
: pctx
->dst
);
346 err
= crypto_ccm_auth(req
, dst
, cryptlen
);
347 if (!err
&& crypto_memneq(pctx
->auth_tag
, pctx
->odata
, authsize
))
350 aead_request_complete(req
, err
);
353 static int crypto_ccm_decrypt(struct aead_request
*req
)
355 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
356 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(aead
);
357 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
358 struct skcipher_request
*skreq
= &pctx
->skreq
;
359 struct scatterlist
*dst
;
360 unsigned int authsize
= crypto_aead_authsize(aead
);
361 unsigned int cryptlen
= req
->cryptlen
;
362 u8
*authtag
= pctx
->auth_tag
;
363 u8
*odata
= pctx
->odata
;
364 u8
*iv
= pctx
->idata
;
367 cryptlen
-= authsize
;
369 err
= crypto_ccm_init_crypt(req
, authtag
);
373 scatterwalk_map_and_copy(authtag
, sg_next(pctx
->src
), cryptlen
,
377 if (req
->src
!= req
->dst
)
380 memcpy(iv
, req
->iv
, 16);
382 skcipher_request_set_tfm(skreq
, ctx
->ctr
);
383 skcipher_request_set_callback(skreq
, pctx
->flags
,
384 crypto_ccm_decrypt_done
, req
);
385 skcipher_request_set_crypt(skreq
, pctx
->src
, dst
, cryptlen
+ 16, iv
);
386 err
= crypto_skcipher_decrypt(skreq
);
390 err
= crypto_ccm_auth(req
, sg_next(dst
), cryptlen
);
395 if (crypto_memneq(authtag
, odata
, authsize
))
401 static int crypto_ccm_init_tfm(struct crypto_aead
*tfm
)
403 struct aead_instance
*inst
= aead_alg_instance(tfm
);
404 struct ccm_instance_ctx
*ictx
= aead_instance_ctx(inst
);
405 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(tfm
);
406 struct crypto_ahash
*mac
;
407 struct crypto_skcipher
*ctr
;
411 mac
= crypto_spawn_ahash(&ictx
->mac
);
415 ctr
= crypto_spawn_skcipher(&ictx
->ctr
);
423 align
= crypto_aead_alignmask(tfm
);
424 align
&= ~(crypto_tfm_ctx_alignment() - 1);
425 crypto_aead_set_reqsize(
427 align
+ sizeof(struct crypto_ccm_req_priv_ctx
) +
428 max(crypto_ahash_reqsize(mac
), crypto_skcipher_reqsize(ctr
)));
433 crypto_free_ahash(mac
);
437 static void crypto_ccm_exit_tfm(struct crypto_aead
*tfm
)
439 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(tfm
);
441 crypto_free_ahash(ctx
->mac
);
442 crypto_free_skcipher(ctx
->ctr
);
445 static void crypto_ccm_free(struct aead_instance
*inst
)
447 struct ccm_instance_ctx
*ctx
= aead_instance_ctx(inst
);
449 crypto_drop_ahash(&ctx
->mac
);
450 crypto_drop_skcipher(&ctx
->ctr
);
454 static int crypto_ccm_create_common(struct crypto_template
*tmpl
,
456 const char *ctr_name
,
457 const char *mac_name
)
459 struct crypto_attr_type
*algt
;
460 struct aead_instance
*inst
;
461 struct skcipher_alg
*ctr
;
462 struct crypto_alg
*mac_alg
;
463 struct hash_alg_common
*mac
;
464 struct ccm_instance_ctx
*ictx
;
467 algt
= crypto_get_attr_type(tb
);
469 return PTR_ERR(algt
);
471 if ((algt
->type
^ CRYPTO_ALG_TYPE_AEAD
) & algt
->mask
)
474 mac_alg
= crypto_find_alg(mac_name
, &crypto_ahash_type
,
475 CRYPTO_ALG_TYPE_HASH
,
476 CRYPTO_ALG_TYPE_AHASH_MASK
|
479 return PTR_ERR(mac_alg
);
481 mac
= __crypto_hash_alg_common(mac_alg
);
483 if (strncmp(mac
->base
.cra_name
, "cbcmac(", 7) != 0 ||
484 mac
->digestsize
!= 16)
487 inst
= kzalloc(sizeof(*inst
) + sizeof(*ictx
), GFP_KERNEL
);
492 ictx
= aead_instance_ctx(inst
);
493 err
= crypto_init_ahash_spawn(&ictx
->mac
, mac
,
494 aead_crypto_instance(inst
));
498 crypto_set_skcipher_spawn(&ictx
->ctr
, aead_crypto_instance(inst
));
499 err
= crypto_grab_skcipher(&ictx
->ctr
, ctr_name
, 0,
500 crypto_requires_sync(algt
->type
,
505 ctr
= crypto_spawn_skcipher_alg(&ictx
->ctr
);
507 /* The skcipher algorithm must be CTR mode, using 16-byte blocks. */
509 if (strncmp(ctr
->base
.cra_name
, "ctr(", 4) != 0 ||
510 crypto_skcipher_alg_ivsize(ctr
) != 16 ||
511 ctr
->base
.cra_blocksize
!= 1)
514 /* ctr and cbcmac must use the same underlying block cipher. */
515 if (strcmp(ctr
->base
.cra_name
+ 4, mac
->base
.cra_name
+ 7) != 0)
519 if (snprintf(inst
->alg
.base
.cra_name
, CRYPTO_MAX_ALG_NAME
,
520 "ccm(%s", ctr
->base
.cra_name
+ 4) >= CRYPTO_MAX_ALG_NAME
)
523 if (snprintf(inst
->alg
.base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
524 "ccm_base(%s,%s)", ctr
->base
.cra_driver_name
,
525 mac
->base
.cra_driver_name
) >= CRYPTO_MAX_ALG_NAME
)
528 inst
->alg
.base
.cra_flags
= ctr
->base
.cra_flags
& CRYPTO_ALG_ASYNC
;
529 inst
->alg
.base
.cra_priority
= (mac
->base
.cra_priority
+
530 ctr
->base
.cra_priority
) / 2;
531 inst
->alg
.base
.cra_blocksize
= 1;
532 inst
->alg
.base
.cra_alignmask
= mac
->base
.cra_alignmask
|
533 ctr
->base
.cra_alignmask
;
534 inst
->alg
.ivsize
= 16;
535 inst
->alg
.chunksize
= crypto_skcipher_alg_chunksize(ctr
);
536 inst
->alg
.maxauthsize
= 16;
537 inst
->alg
.base
.cra_ctxsize
= sizeof(struct crypto_ccm_ctx
);
538 inst
->alg
.init
= crypto_ccm_init_tfm
;
539 inst
->alg
.exit
= crypto_ccm_exit_tfm
;
540 inst
->alg
.setkey
= crypto_ccm_setkey
;
541 inst
->alg
.setauthsize
= crypto_ccm_setauthsize
;
542 inst
->alg
.encrypt
= crypto_ccm_encrypt
;
543 inst
->alg
.decrypt
= crypto_ccm_decrypt
;
545 inst
->free
= crypto_ccm_free
;
547 err
= aead_register_instance(tmpl
, inst
);
552 crypto_mod_put(mac_alg
);
556 crypto_drop_skcipher(&ictx
->ctr
);
558 crypto_drop_ahash(&ictx
->mac
);
564 static int crypto_ccm_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
566 const char *cipher_name
;
567 char ctr_name
[CRYPTO_MAX_ALG_NAME
];
568 char mac_name
[CRYPTO_MAX_ALG_NAME
];
570 cipher_name
= crypto_attr_alg_name(tb
[1]);
571 if (IS_ERR(cipher_name
))
572 return PTR_ERR(cipher_name
);
574 if (snprintf(ctr_name
, CRYPTO_MAX_ALG_NAME
, "ctr(%s)",
575 cipher_name
) >= CRYPTO_MAX_ALG_NAME
)
576 return -ENAMETOOLONG
;
578 if (snprintf(mac_name
, CRYPTO_MAX_ALG_NAME
, "cbcmac(%s)",
579 cipher_name
) >= CRYPTO_MAX_ALG_NAME
)
580 return -ENAMETOOLONG
;
582 return crypto_ccm_create_common(tmpl
, tb
, ctr_name
, mac_name
);
585 static int crypto_ccm_base_create(struct crypto_template
*tmpl
,
588 const char *ctr_name
;
589 const char *mac_name
;
591 ctr_name
= crypto_attr_alg_name(tb
[1]);
592 if (IS_ERR(ctr_name
))
593 return PTR_ERR(ctr_name
);
595 mac_name
= crypto_attr_alg_name(tb
[2]);
596 if (IS_ERR(mac_name
))
597 return PTR_ERR(mac_name
);
599 return crypto_ccm_create_common(tmpl
, tb
, ctr_name
, mac_name
);
602 static int crypto_rfc4309_setkey(struct crypto_aead
*parent
, const u8
*key
,
605 struct crypto_rfc4309_ctx
*ctx
= crypto_aead_ctx(parent
);
606 struct crypto_aead
*child
= ctx
->child
;
613 memcpy(ctx
->nonce
, key
+ keylen
, 3);
615 crypto_aead_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
616 crypto_aead_set_flags(child
, crypto_aead_get_flags(parent
) &
617 CRYPTO_TFM_REQ_MASK
);
618 err
= crypto_aead_setkey(child
, key
, keylen
);
619 crypto_aead_set_flags(parent
, crypto_aead_get_flags(child
) &
620 CRYPTO_TFM_RES_MASK
);
625 static int crypto_rfc4309_setauthsize(struct crypto_aead
*parent
,
626 unsigned int authsize
)
628 struct crypto_rfc4309_ctx
*ctx
= crypto_aead_ctx(parent
);
639 return crypto_aead_setauthsize(ctx
->child
, authsize
);
642 static struct aead_request
*crypto_rfc4309_crypt(struct aead_request
*req
)
644 struct crypto_rfc4309_req_ctx
*rctx
= aead_request_ctx(req
);
645 struct aead_request
*subreq
= &rctx
->subreq
;
646 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
647 struct crypto_rfc4309_ctx
*ctx
= crypto_aead_ctx(aead
);
648 struct crypto_aead
*child
= ctx
->child
;
649 struct scatterlist
*sg
;
650 u8
*iv
= PTR_ALIGN((u8
*)(subreq
+ 1) + crypto_aead_reqsize(child
),
651 crypto_aead_alignmask(child
) + 1);
656 memcpy(iv
+ 1, ctx
->nonce
, 3);
657 memcpy(iv
+ 4, req
->iv
, 8);
659 scatterwalk_map_and_copy(iv
+ 16, req
->src
, 0, req
->assoclen
- 8, 0);
661 sg_init_table(rctx
->src
, 3);
662 sg_set_buf(rctx
->src
, iv
+ 16, req
->assoclen
- 8);
663 sg
= scatterwalk_ffwd(rctx
->src
+ 1, req
->src
, req
->assoclen
);
664 if (sg
!= rctx
->src
+ 1)
665 sg_chain(rctx
->src
, 2, sg
);
667 if (req
->src
!= req
->dst
) {
668 sg_init_table(rctx
->dst
, 3);
669 sg_set_buf(rctx
->dst
, iv
+ 16, req
->assoclen
- 8);
670 sg
= scatterwalk_ffwd(rctx
->dst
+ 1, req
->dst
, req
->assoclen
);
671 if (sg
!= rctx
->dst
+ 1)
672 sg_chain(rctx
->dst
, 2, sg
);
675 aead_request_set_tfm(subreq
, child
);
676 aead_request_set_callback(subreq
, req
->base
.flags
, req
->base
.complete
,
678 aead_request_set_crypt(subreq
, rctx
->src
,
679 req
->src
== req
->dst
? rctx
->src
: rctx
->dst
,
681 aead_request_set_ad(subreq
, req
->assoclen
- 8);
686 static int crypto_rfc4309_encrypt(struct aead_request
*req
)
688 if (req
->assoclen
!= 16 && req
->assoclen
!= 20)
691 req
= crypto_rfc4309_crypt(req
);
693 return crypto_aead_encrypt(req
);
696 static int crypto_rfc4309_decrypt(struct aead_request
*req
)
698 if (req
->assoclen
!= 16 && req
->assoclen
!= 20)
701 req
= crypto_rfc4309_crypt(req
);
703 return crypto_aead_decrypt(req
);
706 static int crypto_rfc4309_init_tfm(struct crypto_aead
*tfm
)
708 struct aead_instance
*inst
= aead_alg_instance(tfm
);
709 struct crypto_aead_spawn
*spawn
= aead_instance_ctx(inst
);
710 struct crypto_rfc4309_ctx
*ctx
= crypto_aead_ctx(tfm
);
711 struct crypto_aead
*aead
;
714 aead
= crypto_spawn_aead(spawn
);
716 return PTR_ERR(aead
);
720 align
= crypto_aead_alignmask(aead
);
721 align
&= ~(crypto_tfm_ctx_alignment() - 1);
722 crypto_aead_set_reqsize(
724 sizeof(struct crypto_rfc4309_req_ctx
) +
725 ALIGN(crypto_aead_reqsize(aead
), crypto_tfm_ctx_alignment()) +
731 static void crypto_rfc4309_exit_tfm(struct crypto_aead
*tfm
)
733 struct crypto_rfc4309_ctx
*ctx
= crypto_aead_ctx(tfm
);
735 crypto_free_aead(ctx
->child
);
738 static void crypto_rfc4309_free(struct aead_instance
*inst
)
740 crypto_drop_aead(aead_instance_ctx(inst
));
744 static int crypto_rfc4309_create(struct crypto_template
*tmpl
,
747 struct crypto_attr_type
*algt
;
748 struct aead_instance
*inst
;
749 struct crypto_aead_spawn
*spawn
;
750 struct aead_alg
*alg
;
751 const char *ccm_name
;
754 algt
= crypto_get_attr_type(tb
);
756 return PTR_ERR(algt
);
758 if ((algt
->type
^ CRYPTO_ALG_TYPE_AEAD
) & algt
->mask
)
761 ccm_name
= crypto_attr_alg_name(tb
[1]);
762 if (IS_ERR(ccm_name
))
763 return PTR_ERR(ccm_name
);
765 inst
= kzalloc(sizeof(*inst
) + sizeof(*spawn
), GFP_KERNEL
);
769 spawn
= aead_instance_ctx(inst
);
770 crypto_set_aead_spawn(spawn
, aead_crypto_instance(inst
));
771 err
= crypto_grab_aead(spawn
, ccm_name
, 0,
772 crypto_requires_sync(algt
->type
, algt
->mask
));
776 alg
= crypto_spawn_aead_alg(spawn
);
780 /* We only support 16-byte blocks. */
781 if (crypto_aead_alg_ivsize(alg
) != 16)
784 /* Not a stream cipher? */
785 if (alg
->base
.cra_blocksize
!= 1)
789 if (snprintf(inst
->alg
.base
.cra_name
, CRYPTO_MAX_ALG_NAME
,
790 "rfc4309(%s)", alg
->base
.cra_name
) >=
791 CRYPTO_MAX_ALG_NAME
||
792 snprintf(inst
->alg
.base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
793 "rfc4309(%s)", alg
->base
.cra_driver_name
) >=
797 inst
->alg
.base
.cra_flags
= alg
->base
.cra_flags
& CRYPTO_ALG_ASYNC
;
798 inst
->alg
.base
.cra_priority
= alg
->base
.cra_priority
;
799 inst
->alg
.base
.cra_blocksize
= 1;
800 inst
->alg
.base
.cra_alignmask
= alg
->base
.cra_alignmask
;
802 inst
->alg
.ivsize
= 8;
803 inst
->alg
.chunksize
= crypto_aead_alg_chunksize(alg
);
804 inst
->alg
.maxauthsize
= 16;
806 inst
->alg
.base
.cra_ctxsize
= sizeof(struct crypto_rfc4309_ctx
);
808 inst
->alg
.init
= crypto_rfc4309_init_tfm
;
809 inst
->alg
.exit
= crypto_rfc4309_exit_tfm
;
811 inst
->alg
.setkey
= crypto_rfc4309_setkey
;
812 inst
->alg
.setauthsize
= crypto_rfc4309_setauthsize
;
813 inst
->alg
.encrypt
= crypto_rfc4309_encrypt
;
814 inst
->alg
.decrypt
= crypto_rfc4309_decrypt
;
816 inst
->free
= crypto_rfc4309_free
;
818 err
= aead_register_instance(tmpl
, inst
);
826 crypto_drop_aead(spawn
);
832 static int crypto_cbcmac_digest_setkey(struct crypto_shash
*parent
,
833 const u8
*inkey
, unsigned int keylen
)
835 struct cbcmac_tfm_ctx
*ctx
= crypto_shash_ctx(parent
);
837 return crypto_cipher_setkey(ctx
->child
, inkey
, keylen
);
840 static int crypto_cbcmac_digest_init(struct shash_desc
*pdesc
)
842 struct cbcmac_desc_ctx
*ctx
= shash_desc_ctx(pdesc
);
843 int bs
= crypto_shash_digestsize(pdesc
->tfm
);
844 u8
*dg
= (u8
*)ctx
+ crypto_shash_descsize(pdesc
->tfm
) - bs
;
852 static int crypto_cbcmac_digest_update(struct shash_desc
*pdesc
, const u8
*p
,
855 struct crypto_shash
*parent
= pdesc
->tfm
;
856 struct cbcmac_tfm_ctx
*tctx
= crypto_shash_ctx(parent
);
857 struct cbcmac_desc_ctx
*ctx
= shash_desc_ctx(pdesc
);
858 struct crypto_cipher
*tfm
= tctx
->child
;
859 int bs
= crypto_shash_digestsize(parent
);
860 u8
*dg
= (u8
*)ctx
+ crypto_shash_descsize(parent
) - bs
;
863 unsigned int l
= min(len
, bs
- ctx
->len
);
865 crypto_xor(dg
+ ctx
->len
, p
, l
);
870 if (ctx
->len
== bs
) {
871 crypto_cipher_encrypt_one(tfm
, dg
, dg
);
879 static int crypto_cbcmac_digest_final(struct shash_desc
*pdesc
, u8
*out
)
881 struct crypto_shash
*parent
= pdesc
->tfm
;
882 struct cbcmac_tfm_ctx
*tctx
= crypto_shash_ctx(parent
);
883 struct cbcmac_desc_ctx
*ctx
= shash_desc_ctx(pdesc
);
884 struct crypto_cipher
*tfm
= tctx
->child
;
885 int bs
= crypto_shash_digestsize(parent
);
886 u8
*dg
= (u8
*)ctx
+ crypto_shash_descsize(parent
) - bs
;
889 crypto_cipher_encrypt_one(tfm
, dg
, dg
);
895 static int cbcmac_init_tfm(struct crypto_tfm
*tfm
)
897 struct crypto_cipher
*cipher
;
898 struct crypto_instance
*inst
= (void *)tfm
->__crt_alg
;
899 struct crypto_spawn
*spawn
= crypto_instance_ctx(inst
);
900 struct cbcmac_tfm_ctx
*ctx
= crypto_tfm_ctx(tfm
);
902 cipher
= crypto_spawn_cipher(spawn
);
904 return PTR_ERR(cipher
);
911 static void cbcmac_exit_tfm(struct crypto_tfm
*tfm
)
913 struct cbcmac_tfm_ctx
*ctx
= crypto_tfm_ctx(tfm
);
914 crypto_free_cipher(ctx
->child
);
917 static int cbcmac_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
919 struct shash_instance
*inst
;
920 struct crypto_alg
*alg
;
923 err
= crypto_check_attr_type(tb
, CRYPTO_ALG_TYPE_SHASH
);
927 alg
= crypto_get_attr_alg(tb
, CRYPTO_ALG_TYPE_CIPHER
,
928 CRYPTO_ALG_TYPE_MASK
);
932 inst
= shash_alloc_instance("cbcmac", alg
);
937 err
= crypto_init_spawn(shash_instance_ctx(inst
), alg
,
938 shash_crypto_instance(inst
),
939 CRYPTO_ALG_TYPE_MASK
);
943 inst
->alg
.base
.cra_priority
= alg
->cra_priority
;
944 inst
->alg
.base
.cra_blocksize
= 1;
946 inst
->alg
.digestsize
= alg
->cra_blocksize
;
947 inst
->alg
.descsize
= ALIGN(sizeof(struct cbcmac_desc_ctx
),
948 alg
->cra_alignmask
+ 1) +
951 inst
->alg
.base
.cra_ctxsize
= sizeof(struct cbcmac_tfm_ctx
);
952 inst
->alg
.base
.cra_init
= cbcmac_init_tfm
;
953 inst
->alg
.base
.cra_exit
= cbcmac_exit_tfm
;
955 inst
->alg
.init
= crypto_cbcmac_digest_init
;
956 inst
->alg
.update
= crypto_cbcmac_digest_update
;
957 inst
->alg
.final
= crypto_cbcmac_digest_final
;
958 inst
->alg
.setkey
= crypto_cbcmac_digest_setkey
;
960 err
= shash_register_instance(tmpl
, inst
);
964 shash_free_instance(shash_crypto_instance(inst
));
971 static struct crypto_template crypto_ccm_tmpls
[] = {
974 .create
= cbcmac_create
,
975 .free
= shash_free_instance
,
976 .module
= THIS_MODULE
,
979 .create
= crypto_ccm_base_create
,
980 .module
= THIS_MODULE
,
983 .create
= crypto_ccm_create
,
984 .module
= THIS_MODULE
,
987 .create
= crypto_rfc4309_create
,
988 .module
= THIS_MODULE
,
992 static int __init
crypto_ccm_module_init(void)
994 return crypto_register_templates(crypto_ccm_tmpls
,
995 ARRAY_SIZE(crypto_ccm_tmpls
));
998 static void __exit
crypto_ccm_module_exit(void)
1000 crypto_unregister_templates(crypto_ccm_tmpls
,
1001 ARRAY_SIZE(crypto_ccm_tmpls
));
1004 subsys_initcall(crypto_ccm_module_init
);
1005 module_exit(crypto_ccm_module_exit
);
1007 MODULE_LICENSE("GPL");
1008 MODULE_DESCRIPTION("Counter with CBC MAC");
1009 MODULE_ALIAS_CRYPTO("ccm_base");
1010 MODULE_ALIAS_CRYPTO("rfc4309");
1011 MODULE_ALIAS_CRYPTO("ccm");