2 * GCM: Galois/Counter Mode.
4 * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published
8 * by the Free Software Foundation.
11 #include <crypto/gf128mul.h>
12 #include <crypto/internal/aead.h>
13 #include <crypto/internal/skcipher.h>
14 #include <crypto/internal/hash.h>
15 #include <crypto/null.h>
16 #include <crypto/scatterwalk.h>
17 #include <crypto/hash.h>
19 #include <linux/completion.h>
20 #include <linux/err.h>
21 #include <linux/init.h>
22 #include <linux/kernel.h>
23 #include <linux/module.h>
24 #include <linux/slab.h>
26 struct gcm_instance_ctx
{
27 struct crypto_skcipher_spawn ctr
;
28 struct crypto_ahash_spawn ghash
;
31 struct crypto_gcm_ctx
{
32 struct crypto_ablkcipher
*ctr
;
33 struct crypto_ahash
*ghash
;
36 struct crypto_rfc4106_ctx
{
37 struct crypto_aead
*child
;
41 struct crypto_rfc4543_instance_ctx
{
42 struct crypto_aead_spawn aead
;
45 struct crypto_rfc4543_ctx
{
46 struct crypto_aead
*child
;
47 struct crypto_blkcipher
*null
;
51 struct crypto_rfc4543_req_ctx
{
52 struct aead_request subreq
;
55 struct crypto_gcm_ghash_ctx
{
56 unsigned int cryptlen
;
57 struct scatterlist
*src
;
58 int (*complete
)(struct aead_request
*req
, u32 flags
);
61 struct crypto_gcm_req_priv_ctx
{
65 struct scatterlist src
[3];
66 struct scatterlist dst
[3];
67 struct scatterlist sg
;
68 struct crypto_gcm_ghash_ctx ghash_ctx
;
70 struct ahash_request ahreq
;
71 struct ablkcipher_request abreq
;
75 struct crypto_gcm_setkey_result
{
77 struct completion completion
;
82 struct scatterlist sg
;
85 static int crypto_rfc4543_copy_src_to_dst(struct aead_request
*req
, bool enc
);
87 static inline struct crypto_gcm_req_priv_ctx
*crypto_gcm_reqctx(
88 struct aead_request
*req
)
90 unsigned long align
= crypto_aead_alignmask(crypto_aead_reqtfm(req
));
92 return (void *)PTR_ALIGN((u8
*)aead_request_ctx(req
), align
+ 1);
95 static void crypto_gcm_setkey_done(struct crypto_async_request
*req
, int err
)
97 struct crypto_gcm_setkey_result
*result
= req
->data
;
99 if (err
== -EINPROGRESS
)
103 complete(&result
->completion
);
106 static int crypto_gcm_setkey(struct crypto_aead
*aead
, const u8
*key
,
109 struct crypto_gcm_ctx
*ctx
= crypto_aead_ctx(aead
);
110 struct crypto_ahash
*ghash
= ctx
->ghash
;
111 struct crypto_ablkcipher
*ctr
= ctx
->ctr
;
116 struct crypto_gcm_setkey_result result
;
118 struct scatterlist sg
[1];
119 struct ablkcipher_request req
;
123 crypto_ablkcipher_clear_flags(ctr
, CRYPTO_TFM_REQ_MASK
);
124 crypto_ablkcipher_set_flags(ctr
, crypto_aead_get_flags(aead
) &
125 CRYPTO_TFM_REQ_MASK
);
126 err
= crypto_ablkcipher_setkey(ctr
, key
, keylen
);
127 crypto_aead_set_flags(aead
, crypto_ablkcipher_get_flags(ctr
) &
128 CRYPTO_TFM_RES_MASK
);
132 data
= kzalloc(sizeof(*data
) + crypto_ablkcipher_reqsize(ctr
),
137 init_completion(&data
->result
.completion
);
138 sg_init_one(data
->sg
, &data
->hash
, sizeof(data
->hash
));
139 ablkcipher_request_set_tfm(&data
->req
, ctr
);
140 ablkcipher_request_set_callback(&data
->req
, CRYPTO_TFM_REQ_MAY_SLEEP
|
141 CRYPTO_TFM_REQ_MAY_BACKLOG
,
142 crypto_gcm_setkey_done
,
144 ablkcipher_request_set_crypt(&data
->req
, data
->sg
, data
->sg
,
145 sizeof(data
->hash
), data
->iv
);
147 err
= crypto_ablkcipher_encrypt(&data
->req
);
148 if (err
== -EINPROGRESS
|| err
== -EBUSY
) {
149 err
= wait_for_completion_interruptible(
150 &data
->result
.completion
);
152 err
= data
->result
.err
;
158 crypto_ahash_clear_flags(ghash
, CRYPTO_TFM_REQ_MASK
);
159 crypto_ahash_set_flags(ghash
, crypto_aead_get_flags(aead
) &
160 CRYPTO_TFM_REQ_MASK
);
161 err
= crypto_ahash_setkey(ghash
, (u8
*)&data
->hash
, sizeof(be128
));
162 crypto_aead_set_flags(aead
, crypto_ahash_get_flags(ghash
) &
163 CRYPTO_TFM_RES_MASK
);
170 static int crypto_gcm_setauthsize(struct crypto_aead
*tfm
,
171 unsigned int authsize
)
189 static void crypto_gcm_init_common(struct aead_request
*req
)
191 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
192 __be32 counter
= cpu_to_be32(1);
193 struct scatterlist
*sg
;
195 memset(pctx
->auth_tag
, 0, sizeof(pctx
->auth_tag
));
196 memcpy(pctx
->iv
, req
->iv
, 12);
197 memcpy(pctx
->iv
+ 12, &counter
, 4);
199 sg_init_table(pctx
->src
, 3);
200 sg_set_buf(pctx
->src
, pctx
->auth_tag
, sizeof(pctx
->auth_tag
));
201 sg
= scatterwalk_ffwd(pctx
->src
+ 1, req
->src
, req
->assoclen
);
202 if (sg
!= pctx
->src
+ 1)
203 scatterwalk_sg_chain(pctx
->src
, 2, sg
);
205 if (req
->src
!= req
->dst
) {
206 sg_init_table(pctx
->dst
, 3);
207 sg_set_buf(pctx
->dst
, pctx
->auth_tag
, sizeof(pctx
->auth_tag
));
208 sg
= scatterwalk_ffwd(pctx
->dst
+ 1, req
->dst
, req
->assoclen
);
209 if (sg
!= pctx
->dst
+ 1)
210 scatterwalk_sg_chain(pctx
->dst
, 2, sg
);
214 static void crypto_gcm_init_crypt(struct aead_request
*req
,
215 unsigned int cryptlen
)
217 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
218 struct crypto_gcm_ctx
*ctx
= crypto_aead_ctx(aead
);
219 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
220 struct ablkcipher_request
*ablk_req
= &pctx
->u
.abreq
;
221 struct scatterlist
*dst
;
223 dst
= req
->src
== req
->dst
? pctx
->src
: pctx
->dst
;
225 ablkcipher_request_set_tfm(ablk_req
, ctx
->ctr
);
226 ablkcipher_request_set_crypt(ablk_req
, pctx
->src
, dst
,
227 cryptlen
+ sizeof(pctx
->auth_tag
),
231 static inline unsigned int gcm_remain(unsigned int len
)
234 return len
? 16 - len
: 0;
237 static void gcm_hash_len_done(struct crypto_async_request
*areq
, int err
);
239 static int gcm_hash_update(struct aead_request
*req
,
240 crypto_completion_t
compl,
241 struct scatterlist
*src
,
242 unsigned int len
, u32 flags
)
244 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
245 struct ahash_request
*ahreq
= &pctx
->u
.ahreq
;
247 ahash_request_set_callback(ahreq
, flags
, compl, req
);
248 ahash_request_set_crypt(ahreq
, src
, NULL
, len
);
250 return crypto_ahash_update(ahreq
);
253 static int gcm_hash_remain(struct aead_request
*req
,
255 crypto_completion_t
compl, u32 flags
)
257 return gcm_hash_update(req
, compl, &gcm_zeroes
->sg
, remain
, flags
);
260 static int gcm_hash_len(struct aead_request
*req
, u32 flags
)
262 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
263 struct ahash_request
*ahreq
= &pctx
->u
.ahreq
;
264 struct crypto_gcm_ghash_ctx
*gctx
= &pctx
->ghash_ctx
;
267 lengths
.a
= cpu_to_be64(req
->assoclen
* 8);
268 lengths
.b
= cpu_to_be64(gctx
->cryptlen
* 8);
269 memcpy(pctx
->iauth_tag
, &lengths
, 16);
270 sg_init_one(&pctx
->sg
, pctx
->iauth_tag
, 16);
271 ahash_request_set_callback(ahreq
, flags
, gcm_hash_len_done
, req
);
272 ahash_request_set_crypt(ahreq
, &pctx
->sg
,
273 pctx
->iauth_tag
, sizeof(lengths
));
275 return crypto_ahash_finup(ahreq
);
278 static int gcm_hash_len_continue(struct aead_request
*req
, u32 flags
)
280 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
281 struct crypto_gcm_ghash_ctx
*gctx
= &pctx
->ghash_ctx
;
283 return gctx
->complete(req
, flags
);
286 static void gcm_hash_len_done(struct crypto_async_request
*areq
, int err
)
288 struct aead_request
*req
= areq
->data
;
293 err
= gcm_hash_len_continue(req
, 0);
294 if (err
== -EINPROGRESS
)
298 aead_request_complete(req
, err
);
301 static int gcm_hash_crypt_remain_continue(struct aead_request
*req
, u32 flags
)
303 return gcm_hash_len(req
, flags
) ?:
304 gcm_hash_len_continue(req
, flags
);
307 static void gcm_hash_crypt_remain_done(struct crypto_async_request
*areq
,
310 struct aead_request
*req
= areq
->data
;
315 err
= gcm_hash_crypt_remain_continue(req
, 0);
316 if (err
== -EINPROGRESS
)
320 aead_request_complete(req
, err
);
323 static int gcm_hash_crypt_continue(struct aead_request
*req
, u32 flags
)
325 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
326 struct crypto_gcm_ghash_ctx
*gctx
= &pctx
->ghash_ctx
;
329 remain
= gcm_remain(gctx
->cryptlen
);
331 return gcm_hash_remain(req
, remain
,
332 gcm_hash_crypt_remain_done
, flags
) ?:
333 gcm_hash_crypt_remain_continue(req
, flags
);
335 return gcm_hash_crypt_remain_continue(req
, flags
);
338 static void gcm_hash_crypt_done(struct crypto_async_request
*areq
, int err
)
340 struct aead_request
*req
= areq
->data
;
345 err
= gcm_hash_crypt_continue(req
, 0);
346 if (err
== -EINPROGRESS
)
350 aead_request_complete(req
, err
);
353 static int gcm_hash_assoc_remain_continue(struct aead_request
*req
, u32 flags
)
355 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
356 struct crypto_gcm_ghash_ctx
*gctx
= &pctx
->ghash_ctx
;
359 return gcm_hash_update(req
, gcm_hash_crypt_done
,
360 gctx
->src
, gctx
->cryptlen
, flags
) ?:
361 gcm_hash_crypt_continue(req
, flags
);
363 return gcm_hash_crypt_remain_continue(req
, flags
);
366 static void gcm_hash_assoc_remain_done(struct crypto_async_request
*areq
,
369 struct aead_request
*req
= areq
->data
;
374 err
= gcm_hash_assoc_remain_continue(req
, 0);
375 if (err
== -EINPROGRESS
)
379 aead_request_complete(req
, err
);
382 static int gcm_hash_assoc_continue(struct aead_request
*req
, u32 flags
)
386 remain
= gcm_remain(req
->assoclen
);
388 return gcm_hash_remain(req
, remain
,
389 gcm_hash_assoc_remain_done
, flags
) ?:
390 gcm_hash_assoc_remain_continue(req
, flags
);
392 return gcm_hash_assoc_remain_continue(req
, flags
);
395 static void gcm_hash_assoc_done(struct crypto_async_request
*areq
, int err
)
397 struct aead_request
*req
= areq
->data
;
402 err
= gcm_hash_assoc_continue(req
, 0);
403 if (err
== -EINPROGRESS
)
407 aead_request_complete(req
, err
);
410 static int gcm_hash_init_continue(struct aead_request
*req
, u32 flags
)
413 return gcm_hash_update(req
, gcm_hash_assoc_done
,
414 req
->src
, req
->assoclen
, flags
) ?:
415 gcm_hash_assoc_continue(req
, flags
);
417 return gcm_hash_assoc_remain_continue(req
, flags
);
420 static void gcm_hash_init_done(struct crypto_async_request
*areq
, int err
)
422 struct aead_request
*req
= areq
->data
;
427 err
= gcm_hash_init_continue(req
, 0);
428 if (err
== -EINPROGRESS
)
432 aead_request_complete(req
, err
);
435 static int gcm_hash(struct aead_request
*req
, u32 flags
)
437 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
438 struct ahash_request
*ahreq
= &pctx
->u
.ahreq
;
439 struct crypto_gcm_ctx
*ctx
= crypto_aead_ctx(crypto_aead_reqtfm(req
));
441 ahash_request_set_tfm(ahreq
, ctx
->ghash
);
443 ahash_request_set_callback(ahreq
, flags
, gcm_hash_init_done
, req
);
444 return crypto_ahash_init(ahreq
) ?:
445 gcm_hash_init_continue(req
, flags
);
448 static int gcm_enc_copy_hash(struct aead_request
*req
, u32 flags
)
450 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
451 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
452 u8
*auth_tag
= pctx
->auth_tag
;
454 crypto_xor(auth_tag
, pctx
->iauth_tag
, 16);
455 scatterwalk_map_and_copy(auth_tag
, req
->dst
,
456 req
->assoclen
+ req
->cryptlen
,
457 crypto_aead_authsize(aead
), 1);
461 static int gcm_encrypt_continue(struct aead_request
*req
, u32 flags
)
463 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
464 struct crypto_gcm_ghash_ctx
*gctx
= &pctx
->ghash_ctx
;
466 gctx
->src
= sg_next(req
->src
== req
->dst
? pctx
->src
: pctx
->dst
);
467 gctx
->cryptlen
= req
->cryptlen
;
468 gctx
->complete
= gcm_enc_copy_hash
;
470 return gcm_hash(req
, flags
);
473 static void gcm_encrypt_done(struct crypto_async_request
*areq
, int err
)
475 struct aead_request
*req
= areq
->data
;
480 err
= gcm_encrypt_continue(req
, 0);
481 if (err
== -EINPROGRESS
)
485 aead_request_complete(req
, err
);
488 static int crypto_gcm_encrypt(struct aead_request
*req
)
490 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
491 struct ablkcipher_request
*abreq
= &pctx
->u
.abreq
;
492 u32 flags
= aead_request_flags(req
);
494 crypto_gcm_init_common(req
);
495 crypto_gcm_init_crypt(req
, req
->cryptlen
);
496 ablkcipher_request_set_callback(abreq
, flags
, gcm_encrypt_done
, req
);
498 return crypto_ablkcipher_encrypt(abreq
) ?:
499 gcm_encrypt_continue(req
, flags
);
502 static int crypto_gcm_verify(struct aead_request
*req
)
504 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
505 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
506 u8
*auth_tag
= pctx
->auth_tag
;
507 u8
*iauth_tag
= pctx
->iauth_tag
;
508 unsigned int authsize
= crypto_aead_authsize(aead
);
509 unsigned int cryptlen
= req
->cryptlen
- authsize
;
511 crypto_xor(auth_tag
, iauth_tag
, 16);
512 scatterwalk_map_and_copy(iauth_tag
, req
->src
,
513 req
->assoclen
+ cryptlen
, authsize
, 0);
514 return crypto_memneq(iauth_tag
, auth_tag
, authsize
) ? -EBADMSG
: 0;
517 static void gcm_decrypt_done(struct crypto_async_request
*areq
, int err
)
519 struct aead_request
*req
= areq
->data
;
522 err
= crypto_gcm_verify(req
);
524 aead_request_complete(req
, err
);
527 static int gcm_dec_hash_continue(struct aead_request
*req
, u32 flags
)
529 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
530 struct ablkcipher_request
*abreq
= &pctx
->u
.abreq
;
531 struct crypto_gcm_ghash_ctx
*gctx
= &pctx
->ghash_ctx
;
533 crypto_gcm_init_crypt(req
, gctx
->cryptlen
);
534 ablkcipher_request_set_callback(abreq
, flags
, gcm_decrypt_done
, req
);
535 return crypto_ablkcipher_decrypt(abreq
) ?: crypto_gcm_verify(req
);
538 static int crypto_gcm_decrypt(struct aead_request
*req
)
540 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
541 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
542 struct crypto_gcm_ghash_ctx
*gctx
= &pctx
->ghash_ctx
;
543 unsigned int authsize
= crypto_aead_authsize(aead
);
544 unsigned int cryptlen
= req
->cryptlen
;
545 u32 flags
= aead_request_flags(req
);
547 cryptlen
-= authsize
;
549 crypto_gcm_init_common(req
);
551 gctx
->src
= sg_next(pctx
->src
);
552 gctx
->cryptlen
= cryptlen
;
553 gctx
->complete
= gcm_dec_hash_continue
;
555 return gcm_hash(req
, flags
);
558 static int crypto_gcm_init_tfm(struct crypto_aead
*tfm
)
560 struct aead_instance
*inst
= aead_alg_instance(tfm
);
561 struct gcm_instance_ctx
*ictx
= aead_instance_ctx(inst
);
562 struct crypto_gcm_ctx
*ctx
= crypto_aead_ctx(tfm
);
563 struct crypto_ablkcipher
*ctr
;
564 struct crypto_ahash
*ghash
;
568 ghash
= crypto_spawn_ahash(&ictx
->ghash
);
570 return PTR_ERR(ghash
);
572 ctr
= crypto_spawn_skcipher(&ictx
->ctr
);
580 align
= crypto_aead_alignmask(tfm
);
581 align
&= ~(crypto_tfm_ctx_alignment() - 1);
582 crypto_aead_set_reqsize(tfm
,
583 align
+ offsetof(struct crypto_gcm_req_priv_ctx
, u
) +
584 max(sizeof(struct ablkcipher_request
) +
585 crypto_ablkcipher_reqsize(ctr
),
586 sizeof(struct ahash_request
) +
587 crypto_ahash_reqsize(ghash
)));
592 crypto_free_ahash(ghash
);
596 static void crypto_gcm_exit_tfm(struct crypto_aead
*tfm
)
598 struct crypto_gcm_ctx
*ctx
= crypto_aead_ctx(tfm
);
600 crypto_free_ahash(ctx
->ghash
);
601 crypto_free_ablkcipher(ctx
->ctr
);
604 static int crypto_gcm_create_common(struct crypto_template
*tmpl
,
606 const char *full_name
,
607 const char *ctr_name
,
608 const char *ghash_name
)
610 struct crypto_attr_type
*algt
;
611 struct aead_instance
*inst
;
612 struct crypto_alg
*ctr
;
613 struct crypto_alg
*ghash_alg
;
614 struct hash_alg_common
*ghash
;
615 struct gcm_instance_ctx
*ctx
;
618 algt
= crypto_get_attr_type(tb
);
620 return PTR_ERR(algt
);
622 if ((algt
->type
^ CRYPTO_ALG_TYPE_AEAD
) & algt
->mask
)
625 ghash_alg
= crypto_find_alg(ghash_name
, &crypto_ahash_type
,
626 CRYPTO_ALG_TYPE_HASH
,
627 CRYPTO_ALG_TYPE_AHASH_MASK
);
628 if (IS_ERR(ghash_alg
))
629 return PTR_ERR(ghash_alg
);
631 ghash
= __crypto_hash_alg_common(ghash_alg
);
634 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
638 ctx
= aead_instance_ctx(inst
);
639 err
= crypto_init_ahash_spawn(&ctx
->ghash
, ghash
,
640 aead_crypto_instance(inst
));
645 if (ghash
->digestsize
!= 16)
648 crypto_set_skcipher_spawn(&ctx
->ctr
, aead_crypto_instance(inst
));
649 err
= crypto_grab_skcipher(&ctx
->ctr
, ctr_name
, 0,
650 crypto_requires_sync(algt
->type
,
655 ctr
= crypto_skcipher_spawn_alg(&ctx
->ctr
);
657 /* We only support 16-byte blocks. */
658 if (ctr
->cra_ablkcipher
.ivsize
!= 16)
661 /* Not a stream cipher? */
663 if (ctr
->cra_blocksize
!= 1)
667 if (snprintf(inst
->alg
.base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
668 "gcm_base(%s,%s)", ctr
->cra_driver_name
,
669 ghash_alg
->cra_driver_name
) >=
673 memcpy(inst
->alg
.base
.cra_name
, full_name
, CRYPTO_MAX_ALG_NAME
);
675 inst
->alg
.base
.cra_flags
= (ghash
->base
.cra_flags
| ctr
->cra_flags
) &
677 inst
->alg
.base
.cra_priority
= (ghash
->base
.cra_priority
+
678 ctr
->cra_priority
) / 2;
679 inst
->alg
.base
.cra_blocksize
= 1;
680 inst
->alg
.base
.cra_alignmask
= ghash
->base
.cra_alignmask
|
682 inst
->alg
.base
.cra_ctxsize
= sizeof(struct crypto_gcm_ctx
);
683 inst
->alg
.ivsize
= 12;
684 inst
->alg
.maxauthsize
= 16;
685 inst
->alg
.init
= crypto_gcm_init_tfm
;
686 inst
->alg
.exit
= crypto_gcm_exit_tfm
;
687 inst
->alg
.setkey
= crypto_gcm_setkey
;
688 inst
->alg
.setauthsize
= crypto_gcm_setauthsize
;
689 inst
->alg
.encrypt
= crypto_gcm_encrypt
;
690 inst
->alg
.decrypt
= crypto_gcm_decrypt
;
692 err
= aead_register_instance(tmpl
, inst
);
697 crypto_mod_put(ghash_alg
);
701 crypto_drop_skcipher(&ctx
->ctr
);
703 crypto_drop_ahash(&ctx
->ghash
);
709 static int crypto_gcm_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
711 const char *cipher_name
;
712 char ctr_name
[CRYPTO_MAX_ALG_NAME
];
713 char full_name
[CRYPTO_MAX_ALG_NAME
];
715 cipher_name
= crypto_attr_alg_name(tb
[1]);
716 if (IS_ERR(cipher_name
))
717 return PTR_ERR(cipher_name
);
719 if (snprintf(ctr_name
, CRYPTO_MAX_ALG_NAME
, "ctr(%s)", cipher_name
) >=
721 return -ENAMETOOLONG
;
723 if (snprintf(full_name
, CRYPTO_MAX_ALG_NAME
, "gcm(%s)", cipher_name
) >=
725 return -ENAMETOOLONG
;
727 return crypto_gcm_create_common(tmpl
, tb
, full_name
,
731 static void crypto_gcm_free(struct crypto_instance
*inst
)
733 struct gcm_instance_ctx
*ctx
= crypto_instance_ctx(inst
);
735 crypto_drop_skcipher(&ctx
->ctr
);
736 crypto_drop_ahash(&ctx
->ghash
);
737 kfree(aead_instance(inst
));
740 static struct crypto_template crypto_gcm_tmpl
= {
742 .create
= crypto_gcm_create
,
743 .free
= crypto_gcm_free
,
744 .module
= THIS_MODULE
,
747 static int crypto_gcm_base_create(struct crypto_template
*tmpl
,
750 const char *ctr_name
;
751 const char *ghash_name
;
752 char full_name
[CRYPTO_MAX_ALG_NAME
];
754 ctr_name
= crypto_attr_alg_name(tb
[1]);
755 if (IS_ERR(ctr_name
))
756 return PTR_ERR(ctr_name
);
758 ghash_name
= crypto_attr_alg_name(tb
[2]);
759 if (IS_ERR(ghash_name
))
760 return PTR_ERR(ghash_name
);
762 if (snprintf(full_name
, CRYPTO_MAX_ALG_NAME
, "gcm_base(%s,%s)",
763 ctr_name
, ghash_name
) >= CRYPTO_MAX_ALG_NAME
)
764 return -ENAMETOOLONG
;
766 return crypto_gcm_create_common(tmpl
, tb
, full_name
,
767 ctr_name
, ghash_name
);
770 static struct crypto_template crypto_gcm_base_tmpl
= {
772 .create
= crypto_gcm_base_create
,
773 .free
= crypto_gcm_free
,
774 .module
= THIS_MODULE
,
777 static int crypto_rfc4106_setkey(struct crypto_aead
*parent
, const u8
*key
,
780 struct crypto_rfc4106_ctx
*ctx
= crypto_aead_ctx(parent
);
781 struct crypto_aead
*child
= ctx
->child
;
788 memcpy(ctx
->nonce
, key
+ keylen
, 4);
790 crypto_aead_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
791 crypto_aead_set_flags(child
, crypto_aead_get_flags(parent
) &
792 CRYPTO_TFM_REQ_MASK
);
793 err
= crypto_aead_setkey(child
, key
, keylen
);
794 crypto_aead_set_flags(parent
, crypto_aead_get_flags(child
) &
795 CRYPTO_TFM_RES_MASK
);
800 static int crypto_rfc4106_setauthsize(struct crypto_aead
*parent
,
801 unsigned int authsize
)
803 struct crypto_rfc4106_ctx
*ctx
= crypto_aead_ctx(parent
);
814 return crypto_aead_setauthsize(ctx
->child
, authsize
);
817 static struct aead_request
*crypto_rfc4106_crypt(struct aead_request
*req
)
819 struct aead_request
*subreq
= aead_request_ctx(req
);
820 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
821 struct crypto_rfc4106_ctx
*ctx
= crypto_aead_ctx(aead
);
822 struct crypto_aead
*child
= ctx
->child
;
823 u8
*iv
= PTR_ALIGN((u8
*)(subreq
+ 1) + crypto_aead_reqsize(child
),
824 crypto_aead_alignmask(child
) + 1);
826 memcpy(iv
, ctx
->nonce
, 4);
827 memcpy(iv
+ 4, req
->iv
, 8);
829 aead_request_set_tfm(subreq
, child
);
830 aead_request_set_callback(subreq
, req
->base
.flags
, req
->base
.complete
,
832 aead_request_set_crypt(subreq
, req
->src
, req
->dst
, req
->cryptlen
, iv
);
833 aead_request_set_ad(subreq
, req
->assoclen
);
838 static int crypto_rfc4106_encrypt(struct aead_request
*req
)
840 req
= crypto_rfc4106_crypt(req
);
842 return crypto_aead_encrypt(req
);
845 static int crypto_rfc4106_decrypt(struct aead_request
*req
)
847 req
= crypto_rfc4106_crypt(req
);
849 return crypto_aead_decrypt(req
);
852 static int crypto_rfc4106_init_tfm(struct crypto_aead
*tfm
)
854 struct aead_instance
*inst
= aead_alg_instance(tfm
);
855 struct crypto_aead_spawn
*spawn
= aead_instance_ctx(inst
);
856 struct crypto_rfc4106_ctx
*ctx
= crypto_aead_ctx(tfm
);
857 struct crypto_aead
*aead
;
860 aead
= crypto_spawn_aead(spawn
);
862 return PTR_ERR(aead
);
866 align
= crypto_aead_alignmask(aead
);
867 align
&= ~(crypto_tfm_ctx_alignment() - 1);
868 crypto_aead_set_reqsize(
870 sizeof(struct aead_request
) +
871 ALIGN(crypto_aead_reqsize(aead
), crypto_tfm_ctx_alignment()) +
877 static void crypto_rfc4106_exit_tfm(struct crypto_aead
*tfm
)
879 struct crypto_rfc4106_ctx
*ctx
= crypto_aead_ctx(tfm
);
881 crypto_free_aead(ctx
->child
);
884 static int crypto_rfc4106_create(struct crypto_template
*tmpl
,
887 struct crypto_attr_type
*algt
;
888 struct aead_instance
*inst
;
889 struct crypto_aead_spawn
*spawn
;
890 struct aead_alg
*alg
;
891 const char *ccm_name
;
894 algt
= crypto_get_attr_type(tb
);
896 return PTR_ERR(algt
);
898 if ((algt
->type
^ CRYPTO_ALG_TYPE_AEAD
) & algt
->mask
)
901 ccm_name
= crypto_attr_alg_name(tb
[1]);
902 if (IS_ERR(ccm_name
))
903 return PTR_ERR(ccm_name
);
905 inst
= kzalloc(sizeof(*inst
) + sizeof(*spawn
), GFP_KERNEL
);
909 spawn
= aead_instance_ctx(inst
);
910 crypto_set_aead_spawn(spawn
, aead_crypto_instance(inst
));
911 err
= crypto_grab_aead(spawn
, ccm_name
, 0,
912 crypto_requires_sync(algt
->type
, algt
->mask
));
916 alg
= crypto_spawn_aead_alg(spawn
);
920 /* Underlying IV size must be 12. */
921 if (crypto_aead_alg_ivsize(alg
) != 12)
924 /* Not a stream cipher? */
925 if (alg
->base
.cra_blocksize
!= 1)
929 if (snprintf(inst
->alg
.base
.cra_name
, CRYPTO_MAX_ALG_NAME
,
930 "rfc4106(%s)", alg
->base
.cra_name
) >=
931 CRYPTO_MAX_ALG_NAME
||
932 snprintf(inst
->alg
.base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
933 "rfc4106(%s)", alg
->base
.cra_driver_name
) >=
937 inst
->alg
.base
.cra_flags
|= alg
->base
.cra_flags
& CRYPTO_ALG_ASYNC
;
938 inst
->alg
.base
.cra_priority
= alg
->base
.cra_priority
;
939 inst
->alg
.base
.cra_blocksize
= 1;
940 inst
->alg
.base
.cra_alignmask
= alg
->base
.cra_alignmask
;
942 inst
->alg
.base
.cra_ctxsize
= sizeof(struct crypto_rfc4106_ctx
);
944 inst
->alg
.ivsize
= 8;
945 inst
->alg
.maxauthsize
= crypto_aead_alg_maxauthsize(alg
);
947 inst
->alg
.init
= crypto_rfc4106_init_tfm
;
948 inst
->alg
.exit
= crypto_rfc4106_exit_tfm
;
950 inst
->alg
.setkey
= crypto_rfc4106_setkey
;
951 inst
->alg
.setauthsize
= crypto_rfc4106_setauthsize
;
952 inst
->alg
.encrypt
= crypto_rfc4106_encrypt
;
953 inst
->alg
.decrypt
= crypto_rfc4106_decrypt
;
955 err
= aead_register_instance(tmpl
, inst
);
963 crypto_drop_aead(spawn
);
969 static void crypto_rfc4106_free(struct crypto_instance
*inst
)
971 crypto_drop_aead(crypto_instance_ctx(inst
));
972 kfree(aead_instance(inst
));
975 static struct crypto_template crypto_rfc4106_tmpl
= {
977 .create
= crypto_rfc4106_create
,
978 .free
= crypto_rfc4106_free
,
979 .module
= THIS_MODULE
,
982 static int crypto_rfc4543_setkey(struct crypto_aead
*parent
, const u8
*key
,
985 struct crypto_rfc4543_ctx
*ctx
= crypto_aead_ctx(parent
);
986 struct crypto_aead
*child
= ctx
->child
;
993 memcpy(ctx
->nonce
, key
+ keylen
, 4);
995 crypto_aead_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
996 crypto_aead_set_flags(child
, crypto_aead_get_flags(parent
) &
997 CRYPTO_TFM_REQ_MASK
);
998 err
= crypto_aead_setkey(child
, key
, keylen
);
999 crypto_aead_set_flags(parent
, crypto_aead_get_flags(child
) &
1000 CRYPTO_TFM_RES_MASK
);
1005 static int crypto_rfc4543_setauthsize(struct crypto_aead
*parent
,
1006 unsigned int authsize
)
1008 struct crypto_rfc4543_ctx
*ctx
= crypto_aead_ctx(parent
);
1013 return crypto_aead_setauthsize(ctx
->child
, authsize
);
1016 static int crypto_rfc4543_crypt(struct aead_request
*req
, bool enc
)
1018 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1019 struct crypto_rfc4543_ctx
*ctx
= crypto_aead_ctx(aead
);
1020 struct crypto_rfc4543_req_ctx
*rctx
= aead_request_ctx(req
);
1021 struct aead_request
*subreq
= &rctx
->subreq
;
1022 unsigned int authsize
= crypto_aead_authsize(aead
);
1023 u8
*iv
= PTR_ALIGN((u8
*)(rctx
+ 1) + crypto_aead_reqsize(ctx
->child
),
1024 crypto_aead_alignmask(ctx
->child
) + 1);
1027 if (req
->src
!= req
->dst
) {
1028 err
= crypto_rfc4543_copy_src_to_dst(req
, enc
);
1033 memcpy(iv
, ctx
->nonce
, 4);
1034 memcpy(iv
+ 4, req
->iv
, 8);
1036 aead_request_set_tfm(subreq
, ctx
->child
);
1037 aead_request_set_callback(subreq
, req
->base
.flags
,
1038 req
->base
.complete
, req
->base
.data
);
1039 aead_request_set_crypt(subreq
, req
->src
, req
->dst
,
1040 enc
? 0 : authsize
, iv
);
1041 aead_request_set_ad(subreq
, req
->assoclen
+ req
->cryptlen
-
1044 return enc
? crypto_aead_encrypt(subreq
) : crypto_aead_decrypt(subreq
);
1047 static int crypto_rfc4543_copy_src_to_dst(struct aead_request
*req
, bool enc
)
1049 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1050 struct crypto_rfc4543_ctx
*ctx
= crypto_aead_ctx(aead
);
1051 unsigned int authsize
= crypto_aead_authsize(aead
);
1052 unsigned int nbytes
= req
->assoclen
+ req
->cryptlen
-
1053 (enc
? 0 : authsize
);
1054 struct blkcipher_desc desc
= {
1058 return crypto_blkcipher_encrypt(&desc
, req
->dst
, req
->src
, nbytes
);
1061 static int crypto_rfc4543_encrypt(struct aead_request
*req
)
1063 return crypto_rfc4543_crypt(req
, true);
1066 static int crypto_rfc4543_decrypt(struct aead_request
*req
)
1068 return crypto_rfc4543_crypt(req
, false);
1071 static int crypto_rfc4543_init_tfm(struct crypto_aead
*tfm
)
1073 struct aead_instance
*inst
= aead_alg_instance(tfm
);
1074 struct crypto_rfc4543_instance_ctx
*ictx
= aead_instance_ctx(inst
);
1075 struct crypto_aead_spawn
*spawn
= &ictx
->aead
;
1076 struct crypto_rfc4543_ctx
*ctx
= crypto_aead_ctx(tfm
);
1077 struct crypto_aead
*aead
;
1078 struct crypto_blkcipher
*null
;
1079 unsigned long align
;
1082 aead
= crypto_spawn_aead(spawn
);
1084 return PTR_ERR(aead
);
1086 null
= crypto_get_default_null_skcipher();
1087 err
= PTR_ERR(null
);
1094 align
= crypto_aead_alignmask(aead
);
1095 align
&= ~(crypto_tfm_ctx_alignment() - 1);
1096 crypto_aead_set_reqsize(
1098 sizeof(struct crypto_rfc4543_req_ctx
) +
1099 ALIGN(crypto_aead_reqsize(aead
), crypto_tfm_ctx_alignment()) +
1105 crypto_free_aead(aead
);
1109 static void crypto_rfc4543_exit_tfm(struct crypto_aead
*tfm
)
1111 struct crypto_rfc4543_ctx
*ctx
= crypto_aead_ctx(tfm
);
1113 crypto_free_aead(ctx
->child
);
1114 crypto_put_default_null_skcipher();
1117 static int crypto_rfc4543_create(struct crypto_template
*tmpl
,
1120 struct crypto_attr_type
*algt
;
1121 struct aead_instance
*inst
;
1122 struct crypto_aead_spawn
*spawn
;
1123 struct aead_alg
*alg
;
1124 struct crypto_rfc4543_instance_ctx
*ctx
;
1125 const char *ccm_name
;
1128 algt
= crypto_get_attr_type(tb
);
1130 return PTR_ERR(algt
);
1132 if ((algt
->type
^ CRYPTO_ALG_TYPE_AEAD
) & algt
->mask
)
1135 ccm_name
= crypto_attr_alg_name(tb
[1]);
1136 if (IS_ERR(ccm_name
))
1137 return PTR_ERR(ccm_name
);
1139 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
1143 ctx
= aead_instance_ctx(inst
);
1145 crypto_set_aead_spawn(spawn
, aead_crypto_instance(inst
));
1146 err
= crypto_grab_aead(spawn
, ccm_name
, 0,
1147 crypto_requires_sync(algt
->type
, algt
->mask
));
1151 alg
= crypto_spawn_aead_alg(spawn
);
1155 /* Underlying IV size must be 12. */
1156 if (crypto_aead_alg_ivsize(alg
) != 12)
1159 /* Not a stream cipher? */
1160 if (alg
->base
.cra_blocksize
!= 1)
1163 err
= -ENAMETOOLONG
;
1164 if (snprintf(inst
->alg
.base
.cra_name
, CRYPTO_MAX_ALG_NAME
,
1165 "rfc4543(%s)", alg
->base
.cra_name
) >=
1166 CRYPTO_MAX_ALG_NAME
||
1167 snprintf(inst
->alg
.base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
1168 "rfc4543(%s)", alg
->base
.cra_driver_name
) >=
1169 CRYPTO_MAX_ALG_NAME
)
1172 inst
->alg
.base
.cra_flags
= alg
->base
.cra_flags
& CRYPTO_ALG_ASYNC
;
1173 inst
->alg
.base
.cra_priority
= alg
->base
.cra_priority
;
1174 inst
->alg
.base
.cra_blocksize
= 1;
1175 inst
->alg
.base
.cra_alignmask
= alg
->base
.cra_alignmask
;
1177 inst
->alg
.base
.cra_ctxsize
= sizeof(struct crypto_rfc4543_ctx
);
1179 inst
->alg
.ivsize
= 8;
1180 inst
->alg
.maxauthsize
= crypto_aead_alg_maxauthsize(alg
);
1182 inst
->alg
.init
= crypto_rfc4543_init_tfm
;
1183 inst
->alg
.exit
= crypto_rfc4543_exit_tfm
;
1185 inst
->alg
.setkey
= crypto_rfc4543_setkey
;
1186 inst
->alg
.setauthsize
= crypto_rfc4543_setauthsize
;
1187 inst
->alg
.encrypt
= crypto_rfc4543_encrypt
;
1188 inst
->alg
.decrypt
= crypto_rfc4543_decrypt
;
1190 err
= aead_register_instance(tmpl
, inst
);
1198 crypto_drop_aead(spawn
);
1204 static void crypto_rfc4543_free(struct crypto_instance
*inst
)
1206 struct crypto_rfc4543_instance_ctx
*ctx
= crypto_instance_ctx(inst
);
1208 crypto_drop_aead(&ctx
->aead
);
1210 kfree(aead_instance(inst
));
1213 static struct crypto_template crypto_rfc4543_tmpl
= {
1215 .create
= crypto_rfc4543_create
,
1216 .free
= crypto_rfc4543_free
,
1217 .module
= THIS_MODULE
,
1220 static int __init
crypto_gcm_module_init(void)
1224 gcm_zeroes
= kzalloc(sizeof(*gcm_zeroes
), GFP_KERNEL
);
1228 sg_init_one(&gcm_zeroes
->sg
, gcm_zeroes
->buf
, sizeof(gcm_zeroes
->buf
));
1230 err
= crypto_register_template(&crypto_gcm_base_tmpl
);
1234 err
= crypto_register_template(&crypto_gcm_tmpl
);
1238 err
= crypto_register_template(&crypto_rfc4106_tmpl
);
1242 err
= crypto_register_template(&crypto_rfc4543_tmpl
);
1244 goto out_undo_rfc4106
;
1249 crypto_unregister_template(&crypto_rfc4106_tmpl
);
1251 crypto_unregister_template(&crypto_gcm_tmpl
);
1253 crypto_unregister_template(&crypto_gcm_base_tmpl
);
1259 static void __exit
crypto_gcm_module_exit(void)
1262 crypto_unregister_template(&crypto_rfc4543_tmpl
);
1263 crypto_unregister_template(&crypto_rfc4106_tmpl
);
1264 crypto_unregister_template(&crypto_gcm_tmpl
);
1265 crypto_unregister_template(&crypto_gcm_base_tmpl
);
1268 module_init(crypto_gcm_module_init
);
1269 module_exit(crypto_gcm_module_exit
);
1271 MODULE_LICENSE("GPL");
1272 MODULE_DESCRIPTION("Galois/Counter Mode");
1273 MODULE_AUTHOR("Mikko Herranen <mh1@iki.fi>");
1274 MODULE_ALIAS_CRYPTO("gcm_base");
1275 MODULE_ALIAS_CRYPTO("rfc4106");
1276 MODULE_ALIAS_CRYPTO("rfc4543");
1277 MODULE_ALIAS_CRYPTO("gcm");