2 * GCM: Galois/Counter Mode.
4 * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published
8 * by the Free Software Foundation.
11 #include <crypto/gf128mul.h>
12 #include <crypto/internal/aead.h>
13 #include <crypto/internal/skcipher.h>
14 #include <crypto/internal/hash.h>
15 #include <crypto/scatterwalk.h>
16 #include <crypto/hash.h>
18 #include <linux/completion.h>
19 #include <linux/err.h>
20 #include <linux/init.h>
21 #include <linux/kernel.h>
22 #include <linux/module.h>
23 #include <linux/slab.h>
25 struct gcm_instance_ctx
{
26 struct crypto_skcipher_spawn ctr
;
27 struct crypto_ahash_spawn ghash
;
30 struct crypto_gcm_ctx
{
31 struct crypto_ablkcipher
*ctr
;
32 struct crypto_ahash
*ghash
;
35 struct crypto_rfc4106_ctx
{
36 struct crypto_aead
*child
;
40 struct crypto_rfc4543_ctx
{
41 struct crypto_aead
*child
;
45 struct crypto_rfc4543_req_ctx
{
48 struct scatterlist cipher
[1];
49 struct scatterlist payload
[2];
50 struct scatterlist assoc
[2];
51 struct aead_request subreq
;
54 struct crypto_gcm_ghash_ctx
{
55 unsigned int cryptlen
;
56 struct scatterlist
*src
;
57 void (*complete
)(struct aead_request
*req
, int err
);
60 struct crypto_gcm_req_priv_ctx
{
63 struct scatterlist src
[2];
64 struct scatterlist dst
[2];
65 struct crypto_gcm_ghash_ctx ghash_ctx
;
67 struct ahash_request ahreq
;
68 struct ablkcipher_request abreq
;
72 struct crypto_gcm_setkey_result
{
74 struct completion completion
;
77 static void *gcm_zeroes
;
79 static inline struct crypto_gcm_req_priv_ctx
*crypto_gcm_reqctx(
80 struct aead_request
*req
)
82 unsigned long align
= crypto_aead_alignmask(crypto_aead_reqtfm(req
));
84 return (void *)PTR_ALIGN((u8
*)aead_request_ctx(req
), align
+ 1);
87 static void crypto_gcm_setkey_done(struct crypto_async_request
*req
, int err
)
89 struct crypto_gcm_setkey_result
*result
= req
->data
;
91 if (err
== -EINPROGRESS
)
95 complete(&result
->completion
);
98 static int crypto_gcm_setkey(struct crypto_aead
*aead
, const u8
*key
,
101 struct crypto_gcm_ctx
*ctx
= crypto_aead_ctx(aead
);
102 struct crypto_ahash
*ghash
= ctx
->ghash
;
103 struct crypto_ablkcipher
*ctr
= ctx
->ctr
;
108 struct crypto_gcm_setkey_result result
;
110 struct scatterlist sg
[1];
111 struct ablkcipher_request req
;
115 crypto_ablkcipher_clear_flags(ctr
, CRYPTO_TFM_REQ_MASK
);
116 crypto_ablkcipher_set_flags(ctr
, crypto_aead_get_flags(aead
) &
117 CRYPTO_TFM_REQ_MASK
);
119 err
= crypto_ablkcipher_setkey(ctr
, key
, keylen
);
123 crypto_aead_set_flags(aead
, crypto_ablkcipher_get_flags(ctr
) &
124 CRYPTO_TFM_RES_MASK
);
126 data
= kzalloc(sizeof(*data
) + crypto_ablkcipher_reqsize(ctr
),
131 init_completion(&data
->result
.completion
);
132 sg_init_one(data
->sg
, &data
->hash
, sizeof(data
->hash
));
133 ablkcipher_request_set_tfm(&data
->req
, ctr
);
134 ablkcipher_request_set_callback(&data
->req
, CRYPTO_TFM_REQ_MAY_SLEEP
|
135 CRYPTO_TFM_REQ_MAY_BACKLOG
,
136 crypto_gcm_setkey_done
,
138 ablkcipher_request_set_crypt(&data
->req
, data
->sg
, data
->sg
,
139 sizeof(data
->hash
), data
->iv
);
141 err
= crypto_ablkcipher_encrypt(&data
->req
);
142 if (err
== -EINPROGRESS
|| err
== -EBUSY
) {
143 err
= wait_for_completion_interruptible(
144 &data
->result
.completion
);
146 err
= data
->result
.err
;
152 crypto_ahash_clear_flags(ghash
, CRYPTO_TFM_REQ_MASK
);
153 crypto_ahash_set_flags(ghash
, crypto_aead_get_flags(aead
) &
154 CRYPTO_TFM_REQ_MASK
);
155 err
= crypto_ahash_setkey(ghash
, (u8
*)&data
->hash
, sizeof(be128
));
156 crypto_aead_set_flags(aead
, crypto_ahash_get_flags(ghash
) &
157 CRYPTO_TFM_RES_MASK
);
164 static int crypto_gcm_setauthsize(struct crypto_aead
*tfm
,
165 unsigned int authsize
)
183 static void crypto_gcm_init_crypt(struct ablkcipher_request
*ablk_req
,
184 struct aead_request
*req
,
185 unsigned int cryptlen
)
187 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
188 struct crypto_gcm_ctx
*ctx
= crypto_aead_ctx(aead
);
189 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
190 struct scatterlist
*dst
;
191 __be32 counter
= cpu_to_be32(1);
193 memset(pctx
->auth_tag
, 0, sizeof(pctx
->auth_tag
));
194 memcpy(req
->iv
+ 12, &counter
, 4);
196 sg_init_table(pctx
->src
, 2);
197 sg_set_buf(pctx
->src
, pctx
->auth_tag
, sizeof(pctx
->auth_tag
));
198 scatterwalk_sg_chain(pctx
->src
, 2, req
->src
);
201 if (req
->src
!= req
->dst
) {
202 sg_init_table(pctx
->dst
, 2);
203 sg_set_buf(pctx
->dst
, pctx
->auth_tag
, sizeof(pctx
->auth_tag
));
204 scatterwalk_sg_chain(pctx
->dst
, 2, req
->dst
);
208 ablkcipher_request_set_tfm(ablk_req
, ctx
->ctr
);
209 ablkcipher_request_set_crypt(ablk_req
, pctx
->src
, dst
,
210 cryptlen
+ sizeof(pctx
->auth_tag
),
214 static inline unsigned int gcm_remain(unsigned int len
)
217 return len
? 16 - len
: 0;
220 static void gcm_hash_len_done(struct crypto_async_request
*areq
, int err
);
221 static void gcm_hash_final_done(struct crypto_async_request
*areq
, int err
);
223 static int gcm_hash_update(struct aead_request
*req
,
224 struct crypto_gcm_req_priv_ctx
*pctx
,
225 crypto_completion_t complete
,
226 struct scatterlist
*src
,
229 struct ahash_request
*ahreq
= &pctx
->u
.ahreq
;
231 ahash_request_set_callback(ahreq
, aead_request_flags(req
),
233 ahash_request_set_crypt(ahreq
, src
, NULL
, len
);
235 return crypto_ahash_update(ahreq
);
238 static int gcm_hash_remain(struct aead_request
*req
,
239 struct crypto_gcm_req_priv_ctx
*pctx
,
241 crypto_completion_t complete
)
243 struct ahash_request
*ahreq
= &pctx
->u
.ahreq
;
245 ahash_request_set_callback(ahreq
, aead_request_flags(req
),
247 sg_init_one(pctx
->src
, gcm_zeroes
, remain
);
248 ahash_request_set_crypt(ahreq
, pctx
->src
, NULL
, remain
);
250 return crypto_ahash_update(ahreq
);
253 static int gcm_hash_len(struct aead_request
*req
,
254 struct crypto_gcm_req_priv_ctx
*pctx
)
256 struct ahash_request
*ahreq
= &pctx
->u
.ahreq
;
257 struct crypto_gcm_ghash_ctx
*gctx
= &pctx
->ghash_ctx
;
260 lengths
.a
= cpu_to_be64(req
->assoclen
* 8);
261 lengths
.b
= cpu_to_be64(gctx
->cryptlen
* 8);
262 memcpy(pctx
->iauth_tag
, &lengths
, 16);
263 sg_init_one(pctx
->src
, pctx
->iauth_tag
, 16);
264 ahash_request_set_callback(ahreq
, aead_request_flags(req
),
265 gcm_hash_len_done
, req
);
266 ahash_request_set_crypt(ahreq
, pctx
->src
,
267 NULL
, sizeof(lengths
));
269 return crypto_ahash_update(ahreq
);
272 static int gcm_hash_final(struct aead_request
*req
,
273 struct crypto_gcm_req_priv_ctx
*pctx
)
275 struct ahash_request
*ahreq
= &pctx
->u
.ahreq
;
277 ahash_request_set_callback(ahreq
, aead_request_flags(req
),
278 gcm_hash_final_done
, req
);
279 ahash_request_set_crypt(ahreq
, NULL
, pctx
->iauth_tag
, 0);
281 return crypto_ahash_final(ahreq
);
284 static void __gcm_hash_final_done(struct aead_request
*req
, int err
)
286 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
287 struct crypto_gcm_ghash_ctx
*gctx
= &pctx
->ghash_ctx
;
290 crypto_xor(pctx
->auth_tag
, pctx
->iauth_tag
, 16);
292 gctx
->complete(req
, err
);
295 static void gcm_hash_final_done(struct crypto_async_request
*areq
, int err
)
297 struct aead_request
*req
= areq
->data
;
299 __gcm_hash_final_done(req
, err
);
302 static void __gcm_hash_len_done(struct aead_request
*req
, int err
)
304 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
307 err
= gcm_hash_final(req
, pctx
);
308 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
312 __gcm_hash_final_done(req
, err
);
315 static void gcm_hash_len_done(struct crypto_async_request
*areq
, int err
)
317 struct aead_request
*req
= areq
->data
;
319 __gcm_hash_len_done(req
, err
);
322 static void __gcm_hash_crypt_remain_done(struct aead_request
*req
, int err
)
324 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
327 err
= gcm_hash_len(req
, pctx
);
328 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
332 __gcm_hash_len_done(req
, err
);
335 static void gcm_hash_crypt_remain_done(struct crypto_async_request
*areq
,
338 struct aead_request
*req
= areq
->data
;
340 __gcm_hash_crypt_remain_done(req
, err
);
343 static void __gcm_hash_crypt_done(struct aead_request
*req
, int err
)
345 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
346 struct crypto_gcm_ghash_ctx
*gctx
= &pctx
->ghash_ctx
;
350 remain
= gcm_remain(gctx
->cryptlen
);
352 err
= gcm_hash_remain(req
, pctx
, remain
,
353 gcm_hash_crypt_remain_done
);
354 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
358 __gcm_hash_crypt_remain_done(req
, err
);
361 static void gcm_hash_crypt_done(struct crypto_async_request
*areq
, int err
)
363 struct aead_request
*req
= areq
->data
;
365 __gcm_hash_crypt_done(req
, err
);
368 static void __gcm_hash_assoc_remain_done(struct aead_request
*req
, int err
)
370 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
371 struct crypto_gcm_ghash_ctx
*gctx
= &pctx
->ghash_ctx
;
372 crypto_completion_t complete
;
373 unsigned int remain
= 0;
375 if (!err
&& gctx
->cryptlen
) {
376 remain
= gcm_remain(gctx
->cryptlen
);
377 complete
= remain
? gcm_hash_crypt_done
:
378 gcm_hash_crypt_remain_done
;
379 err
= gcm_hash_update(req
, pctx
, complete
,
380 gctx
->src
, gctx
->cryptlen
);
381 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
386 __gcm_hash_crypt_done(req
, err
);
388 __gcm_hash_crypt_remain_done(req
, err
);
391 static void gcm_hash_assoc_remain_done(struct crypto_async_request
*areq
,
394 struct aead_request
*req
= areq
->data
;
396 __gcm_hash_assoc_remain_done(req
, err
);
399 static void __gcm_hash_assoc_done(struct aead_request
*req
, int err
)
401 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
405 remain
= gcm_remain(req
->assoclen
);
407 err
= gcm_hash_remain(req
, pctx
, remain
,
408 gcm_hash_assoc_remain_done
);
409 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
413 __gcm_hash_assoc_remain_done(req
, err
);
416 static void gcm_hash_assoc_done(struct crypto_async_request
*areq
, int err
)
418 struct aead_request
*req
= areq
->data
;
420 __gcm_hash_assoc_done(req
, err
);
423 static void __gcm_hash_init_done(struct aead_request
*req
, int err
)
425 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
426 crypto_completion_t complete
;
427 unsigned int remain
= 0;
429 if (!err
&& req
->assoclen
) {
430 remain
= gcm_remain(req
->assoclen
);
431 complete
= remain
? gcm_hash_assoc_done
:
432 gcm_hash_assoc_remain_done
;
433 err
= gcm_hash_update(req
, pctx
, complete
,
434 req
->assoc
, req
->assoclen
);
435 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
440 __gcm_hash_assoc_done(req
, err
);
442 __gcm_hash_assoc_remain_done(req
, err
);
445 static void gcm_hash_init_done(struct crypto_async_request
*areq
, int err
)
447 struct aead_request
*req
= areq
->data
;
449 __gcm_hash_init_done(req
, err
);
452 static int gcm_hash(struct aead_request
*req
,
453 struct crypto_gcm_req_priv_ctx
*pctx
)
455 struct ahash_request
*ahreq
= &pctx
->u
.ahreq
;
456 struct crypto_gcm_ghash_ctx
*gctx
= &pctx
->ghash_ctx
;
457 struct crypto_gcm_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
459 crypto_completion_t complete
;
462 ahash_request_set_tfm(ahreq
, ctx
->ghash
);
464 ahash_request_set_callback(ahreq
, aead_request_flags(req
),
465 gcm_hash_init_done
, req
);
466 err
= crypto_ahash_init(ahreq
);
469 remain
= gcm_remain(req
->assoclen
);
470 complete
= remain
? gcm_hash_assoc_done
: gcm_hash_assoc_remain_done
;
471 err
= gcm_hash_update(req
, pctx
, complete
, req
->assoc
, req
->assoclen
);
475 err
= gcm_hash_remain(req
, pctx
, remain
,
476 gcm_hash_assoc_remain_done
);
480 remain
= gcm_remain(gctx
->cryptlen
);
481 complete
= remain
? gcm_hash_crypt_done
: gcm_hash_crypt_remain_done
;
482 err
= gcm_hash_update(req
, pctx
, complete
, gctx
->src
, gctx
->cryptlen
);
486 err
= gcm_hash_remain(req
, pctx
, remain
,
487 gcm_hash_crypt_remain_done
);
491 err
= gcm_hash_len(req
, pctx
);
494 err
= gcm_hash_final(req
, pctx
);
501 static void gcm_enc_copy_hash(struct aead_request
*req
,
502 struct crypto_gcm_req_priv_ctx
*pctx
)
504 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
505 u8
*auth_tag
= pctx
->auth_tag
;
507 scatterwalk_map_and_copy(auth_tag
, req
->dst
, req
->cryptlen
,
508 crypto_aead_authsize(aead
), 1);
511 static void gcm_enc_hash_done(struct aead_request
*req
, int err
)
513 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
516 gcm_enc_copy_hash(req
, pctx
);
518 aead_request_complete(req
, err
);
521 static void gcm_encrypt_done(struct crypto_async_request
*areq
, int err
)
523 struct aead_request
*req
= areq
->data
;
524 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
527 err
= gcm_hash(req
, pctx
);
528 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
531 crypto_xor(pctx
->auth_tag
, pctx
->iauth_tag
, 16);
532 gcm_enc_copy_hash(req
, pctx
);
536 aead_request_complete(req
, err
);
539 static int crypto_gcm_encrypt(struct aead_request
*req
)
541 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
542 struct ablkcipher_request
*abreq
= &pctx
->u
.abreq
;
543 struct crypto_gcm_ghash_ctx
*gctx
= &pctx
->ghash_ctx
;
546 crypto_gcm_init_crypt(abreq
, req
, req
->cryptlen
);
547 ablkcipher_request_set_callback(abreq
, aead_request_flags(req
),
548 gcm_encrypt_done
, req
);
550 gctx
->src
= req
->dst
;
551 gctx
->cryptlen
= req
->cryptlen
;
552 gctx
->complete
= gcm_enc_hash_done
;
554 err
= crypto_ablkcipher_encrypt(abreq
);
558 err
= gcm_hash(req
, pctx
);
562 crypto_xor(pctx
->auth_tag
, pctx
->iauth_tag
, 16);
563 gcm_enc_copy_hash(req
, pctx
);
568 static int crypto_gcm_verify(struct aead_request
*req
,
569 struct crypto_gcm_req_priv_ctx
*pctx
)
571 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
572 u8
*auth_tag
= pctx
->auth_tag
;
573 u8
*iauth_tag
= pctx
->iauth_tag
;
574 unsigned int authsize
= crypto_aead_authsize(aead
);
575 unsigned int cryptlen
= req
->cryptlen
- authsize
;
577 crypto_xor(auth_tag
, iauth_tag
, 16);
578 scatterwalk_map_and_copy(iauth_tag
, req
->src
, cryptlen
, authsize
, 0);
579 return memcmp(iauth_tag
, auth_tag
, authsize
) ? -EBADMSG
: 0;
582 static void gcm_decrypt_done(struct crypto_async_request
*areq
, int err
)
584 struct aead_request
*req
= areq
->data
;
585 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
588 err
= crypto_gcm_verify(req
, pctx
);
590 aead_request_complete(req
, err
);
593 static void gcm_dec_hash_done(struct aead_request
*req
, int err
)
595 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
596 struct ablkcipher_request
*abreq
= &pctx
->u
.abreq
;
597 struct crypto_gcm_ghash_ctx
*gctx
= &pctx
->ghash_ctx
;
600 ablkcipher_request_set_callback(abreq
, aead_request_flags(req
),
601 gcm_decrypt_done
, req
);
602 crypto_gcm_init_crypt(abreq
, req
, gctx
->cryptlen
);
603 err
= crypto_ablkcipher_decrypt(abreq
);
604 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
607 err
= crypto_gcm_verify(req
, pctx
);
610 aead_request_complete(req
, err
);
613 static int crypto_gcm_decrypt(struct aead_request
*req
)
615 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
616 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
617 struct ablkcipher_request
*abreq
= &pctx
->u
.abreq
;
618 struct crypto_gcm_ghash_ctx
*gctx
= &pctx
->ghash_ctx
;
619 unsigned int authsize
= crypto_aead_authsize(aead
);
620 unsigned int cryptlen
= req
->cryptlen
;
623 if (cryptlen
< authsize
)
625 cryptlen
-= authsize
;
627 gctx
->src
= req
->src
;
628 gctx
->cryptlen
= cryptlen
;
629 gctx
->complete
= gcm_dec_hash_done
;
631 err
= gcm_hash(req
, pctx
);
635 ablkcipher_request_set_callback(abreq
, aead_request_flags(req
),
636 gcm_decrypt_done
, req
);
637 crypto_gcm_init_crypt(abreq
, req
, cryptlen
);
638 err
= crypto_ablkcipher_decrypt(abreq
);
642 return crypto_gcm_verify(req
, pctx
);
645 static int crypto_gcm_init_tfm(struct crypto_tfm
*tfm
)
647 struct crypto_instance
*inst
= (void *)tfm
->__crt_alg
;
648 struct gcm_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
649 struct crypto_gcm_ctx
*ctx
= crypto_tfm_ctx(tfm
);
650 struct crypto_ablkcipher
*ctr
;
651 struct crypto_ahash
*ghash
;
655 ghash
= crypto_spawn_ahash(&ictx
->ghash
);
657 return PTR_ERR(ghash
);
659 ctr
= crypto_spawn_skcipher(&ictx
->ctr
);
667 align
= crypto_tfm_alg_alignmask(tfm
);
668 align
&= ~(crypto_tfm_ctx_alignment() - 1);
669 tfm
->crt_aead
.reqsize
= align
+
670 offsetof(struct crypto_gcm_req_priv_ctx
, u
) +
671 max(sizeof(struct ablkcipher_request
) +
672 crypto_ablkcipher_reqsize(ctr
),
673 sizeof(struct ahash_request
) +
674 crypto_ahash_reqsize(ghash
));
679 crypto_free_ahash(ghash
);
683 static void crypto_gcm_exit_tfm(struct crypto_tfm
*tfm
)
685 struct crypto_gcm_ctx
*ctx
= crypto_tfm_ctx(tfm
);
687 crypto_free_ahash(ctx
->ghash
);
688 crypto_free_ablkcipher(ctx
->ctr
);
691 static struct crypto_instance
*crypto_gcm_alloc_common(struct rtattr
**tb
,
692 const char *full_name
,
693 const char *ctr_name
,
694 const char *ghash_name
)
696 struct crypto_attr_type
*algt
;
697 struct crypto_instance
*inst
;
698 struct crypto_alg
*ctr
;
699 struct crypto_alg
*ghash_alg
;
700 struct ahash_alg
*ghash_ahash_alg
;
701 struct gcm_instance_ctx
*ctx
;
704 algt
= crypto_get_attr_type(tb
);
709 if ((algt
->type
^ CRYPTO_ALG_TYPE_AEAD
) & algt
->mask
)
710 return ERR_PTR(-EINVAL
);
712 ghash_alg
= crypto_find_alg(ghash_name
, &crypto_ahash_type
,
713 CRYPTO_ALG_TYPE_HASH
,
714 CRYPTO_ALG_TYPE_AHASH_MASK
);
715 err
= PTR_ERR(ghash_alg
);
716 if (IS_ERR(ghash_alg
))
720 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
724 ctx
= crypto_instance_ctx(inst
);
725 ghash_ahash_alg
= container_of(ghash_alg
, struct ahash_alg
, halg
.base
);
726 err
= crypto_init_ahash_spawn(&ctx
->ghash
, &ghash_ahash_alg
->halg
,
731 crypto_set_skcipher_spawn(&ctx
->ctr
, inst
);
732 err
= crypto_grab_skcipher(&ctx
->ctr
, ctr_name
, 0,
733 crypto_requires_sync(algt
->type
,
738 ctr
= crypto_skcipher_spawn_alg(&ctx
->ctr
);
740 /* We only support 16-byte blocks. */
741 if (ctr
->cra_ablkcipher
.ivsize
!= 16)
744 /* Not a stream cipher? */
746 if (ctr
->cra_blocksize
!= 1)
750 if (snprintf(inst
->alg
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
751 "gcm_base(%s,%s)", ctr
->cra_driver_name
,
752 ghash_alg
->cra_driver_name
) >=
756 memcpy(inst
->alg
.cra_name
, full_name
, CRYPTO_MAX_ALG_NAME
);
758 inst
->alg
.cra_flags
= CRYPTO_ALG_TYPE_AEAD
;
759 inst
->alg
.cra_flags
|= ctr
->cra_flags
& CRYPTO_ALG_ASYNC
;
760 inst
->alg
.cra_priority
= ctr
->cra_priority
;
761 inst
->alg
.cra_blocksize
= 1;
762 inst
->alg
.cra_alignmask
= ctr
->cra_alignmask
| (__alignof__(u64
) - 1);
763 inst
->alg
.cra_type
= &crypto_aead_type
;
764 inst
->alg
.cra_aead
.ivsize
= 16;
765 inst
->alg
.cra_aead
.maxauthsize
= 16;
766 inst
->alg
.cra_ctxsize
= sizeof(struct crypto_gcm_ctx
);
767 inst
->alg
.cra_init
= crypto_gcm_init_tfm
;
768 inst
->alg
.cra_exit
= crypto_gcm_exit_tfm
;
769 inst
->alg
.cra_aead
.setkey
= crypto_gcm_setkey
;
770 inst
->alg
.cra_aead
.setauthsize
= crypto_gcm_setauthsize
;
771 inst
->alg
.cra_aead
.encrypt
= crypto_gcm_encrypt
;
772 inst
->alg
.cra_aead
.decrypt
= crypto_gcm_decrypt
;
775 crypto_mod_put(ghash_alg
);
779 crypto_drop_skcipher(&ctx
->ctr
);
781 crypto_drop_ahash(&ctx
->ghash
);
789 static struct crypto_instance
*crypto_gcm_alloc(struct rtattr
**tb
)
792 const char *cipher_name
;
793 char ctr_name
[CRYPTO_MAX_ALG_NAME
];
794 char full_name
[CRYPTO_MAX_ALG_NAME
];
796 cipher_name
= crypto_attr_alg_name(tb
[1]);
797 err
= PTR_ERR(cipher_name
);
798 if (IS_ERR(cipher_name
))
801 if (snprintf(ctr_name
, CRYPTO_MAX_ALG_NAME
, "ctr(%s)", cipher_name
) >=
803 return ERR_PTR(-ENAMETOOLONG
);
805 if (snprintf(full_name
, CRYPTO_MAX_ALG_NAME
, "gcm(%s)", cipher_name
) >=
807 return ERR_PTR(-ENAMETOOLONG
);
809 return crypto_gcm_alloc_common(tb
, full_name
, ctr_name
, "ghash");
812 static void crypto_gcm_free(struct crypto_instance
*inst
)
814 struct gcm_instance_ctx
*ctx
= crypto_instance_ctx(inst
);
816 crypto_drop_skcipher(&ctx
->ctr
);
817 crypto_drop_ahash(&ctx
->ghash
);
821 static struct crypto_template crypto_gcm_tmpl
= {
823 .alloc
= crypto_gcm_alloc
,
824 .free
= crypto_gcm_free
,
825 .module
= THIS_MODULE
,
828 static struct crypto_instance
*crypto_gcm_base_alloc(struct rtattr
**tb
)
831 const char *ctr_name
;
832 const char *ghash_name
;
833 char full_name
[CRYPTO_MAX_ALG_NAME
];
835 ctr_name
= crypto_attr_alg_name(tb
[1]);
836 err
= PTR_ERR(ctr_name
);
837 if (IS_ERR(ctr_name
))
840 ghash_name
= crypto_attr_alg_name(tb
[2]);
841 err
= PTR_ERR(ghash_name
);
842 if (IS_ERR(ghash_name
))
845 if (snprintf(full_name
, CRYPTO_MAX_ALG_NAME
, "gcm_base(%s,%s)",
846 ctr_name
, ghash_name
) >= CRYPTO_MAX_ALG_NAME
)
847 return ERR_PTR(-ENAMETOOLONG
);
849 return crypto_gcm_alloc_common(tb
, full_name
, ctr_name
, ghash_name
);
852 static struct crypto_template crypto_gcm_base_tmpl
= {
854 .alloc
= crypto_gcm_base_alloc
,
855 .free
= crypto_gcm_free
,
856 .module
= THIS_MODULE
,
859 static int crypto_rfc4106_setkey(struct crypto_aead
*parent
, const u8
*key
,
862 struct crypto_rfc4106_ctx
*ctx
= crypto_aead_ctx(parent
);
863 struct crypto_aead
*child
= ctx
->child
;
870 memcpy(ctx
->nonce
, key
+ keylen
, 4);
872 crypto_aead_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
873 crypto_aead_set_flags(child
, crypto_aead_get_flags(parent
) &
874 CRYPTO_TFM_REQ_MASK
);
875 err
= crypto_aead_setkey(child
, key
, keylen
);
876 crypto_aead_set_flags(parent
, crypto_aead_get_flags(child
) &
877 CRYPTO_TFM_RES_MASK
);
882 static int crypto_rfc4106_setauthsize(struct crypto_aead
*parent
,
883 unsigned int authsize
)
885 struct crypto_rfc4106_ctx
*ctx
= crypto_aead_ctx(parent
);
896 return crypto_aead_setauthsize(ctx
->child
, authsize
);
899 static struct aead_request
*crypto_rfc4106_crypt(struct aead_request
*req
)
901 struct aead_request
*subreq
= aead_request_ctx(req
);
902 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
903 struct crypto_rfc4106_ctx
*ctx
= crypto_aead_ctx(aead
);
904 struct crypto_aead
*child
= ctx
->child
;
905 u8
*iv
= PTR_ALIGN((u8
*)(subreq
+ 1) + crypto_aead_reqsize(child
),
906 crypto_aead_alignmask(child
) + 1);
908 memcpy(iv
, ctx
->nonce
, 4);
909 memcpy(iv
+ 4, req
->iv
, 8);
911 aead_request_set_tfm(subreq
, child
);
912 aead_request_set_callback(subreq
, req
->base
.flags
, req
->base
.complete
,
914 aead_request_set_crypt(subreq
, req
->src
, req
->dst
, req
->cryptlen
, iv
);
915 aead_request_set_assoc(subreq
, req
->assoc
, req
->assoclen
);
920 static int crypto_rfc4106_encrypt(struct aead_request
*req
)
922 req
= crypto_rfc4106_crypt(req
);
924 return crypto_aead_encrypt(req
);
927 static int crypto_rfc4106_decrypt(struct aead_request
*req
)
929 req
= crypto_rfc4106_crypt(req
);
931 return crypto_aead_decrypt(req
);
934 static int crypto_rfc4106_init_tfm(struct crypto_tfm
*tfm
)
936 struct crypto_instance
*inst
= (void *)tfm
->__crt_alg
;
937 struct crypto_aead_spawn
*spawn
= crypto_instance_ctx(inst
);
938 struct crypto_rfc4106_ctx
*ctx
= crypto_tfm_ctx(tfm
);
939 struct crypto_aead
*aead
;
942 aead
= crypto_spawn_aead(spawn
);
944 return PTR_ERR(aead
);
948 align
= crypto_aead_alignmask(aead
);
949 align
&= ~(crypto_tfm_ctx_alignment() - 1);
950 tfm
->crt_aead
.reqsize
= sizeof(struct aead_request
) +
951 ALIGN(crypto_aead_reqsize(aead
),
952 crypto_tfm_ctx_alignment()) +
958 static void crypto_rfc4106_exit_tfm(struct crypto_tfm
*tfm
)
960 struct crypto_rfc4106_ctx
*ctx
= crypto_tfm_ctx(tfm
);
962 crypto_free_aead(ctx
->child
);
965 static struct crypto_instance
*crypto_rfc4106_alloc(struct rtattr
**tb
)
967 struct crypto_attr_type
*algt
;
968 struct crypto_instance
*inst
;
969 struct crypto_aead_spawn
*spawn
;
970 struct crypto_alg
*alg
;
971 const char *ccm_name
;
974 algt
= crypto_get_attr_type(tb
);
979 if ((algt
->type
^ CRYPTO_ALG_TYPE_AEAD
) & algt
->mask
)
980 return ERR_PTR(-EINVAL
);
982 ccm_name
= crypto_attr_alg_name(tb
[1]);
983 err
= PTR_ERR(ccm_name
);
984 if (IS_ERR(ccm_name
))
987 inst
= kzalloc(sizeof(*inst
) + sizeof(*spawn
), GFP_KERNEL
);
989 return ERR_PTR(-ENOMEM
);
991 spawn
= crypto_instance_ctx(inst
);
992 crypto_set_aead_spawn(spawn
, inst
);
993 err
= crypto_grab_aead(spawn
, ccm_name
, 0,
994 crypto_requires_sync(algt
->type
, algt
->mask
));
998 alg
= crypto_aead_spawn_alg(spawn
);
1002 /* We only support 16-byte blocks. */
1003 if (alg
->cra_aead
.ivsize
!= 16)
1006 /* Not a stream cipher? */
1007 if (alg
->cra_blocksize
!= 1)
1010 err
= -ENAMETOOLONG
;
1011 if (snprintf(inst
->alg
.cra_name
, CRYPTO_MAX_ALG_NAME
,
1012 "rfc4106(%s)", alg
->cra_name
) >= CRYPTO_MAX_ALG_NAME
||
1013 snprintf(inst
->alg
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
1014 "rfc4106(%s)", alg
->cra_driver_name
) >=
1015 CRYPTO_MAX_ALG_NAME
)
1018 inst
->alg
.cra_flags
= CRYPTO_ALG_TYPE_AEAD
;
1019 inst
->alg
.cra_flags
|= alg
->cra_flags
& CRYPTO_ALG_ASYNC
;
1020 inst
->alg
.cra_priority
= alg
->cra_priority
;
1021 inst
->alg
.cra_blocksize
= 1;
1022 inst
->alg
.cra_alignmask
= alg
->cra_alignmask
;
1023 inst
->alg
.cra_type
= &crypto_nivaead_type
;
1025 inst
->alg
.cra_aead
.ivsize
= 8;
1026 inst
->alg
.cra_aead
.maxauthsize
= 16;
1028 inst
->alg
.cra_ctxsize
= sizeof(struct crypto_rfc4106_ctx
);
1030 inst
->alg
.cra_init
= crypto_rfc4106_init_tfm
;
1031 inst
->alg
.cra_exit
= crypto_rfc4106_exit_tfm
;
1033 inst
->alg
.cra_aead
.setkey
= crypto_rfc4106_setkey
;
1034 inst
->alg
.cra_aead
.setauthsize
= crypto_rfc4106_setauthsize
;
1035 inst
->alg
.cra_aead
.encrypt
= crypto_rfc4106_encrypt
;
1036 inst
->alg
.cra_aead
.decrypt
= crypto_rfc4106_decrypt
;
1038 inst
->alg
.cra_aead
.geniv
= "seqiv";
1044 crypto_drop_aead(spawn
);
1047 inst
= ERR_PTR(err
);
1051 static void crypto_rfc4106_free(struct crypto_instance
*inst
)
1053 crypto_drop_spawn(crypto_instance_ctx(inst
));
1057 static struct crypto_template crypto_rfc4106_tmpl
= {
1059 .alloc
= crypto_rfc4106_alloc
,
1060 .free
= crypto_rfc4106_free
,
1061 .module
= THIS_MODULE
,
1064 static inline struct crypto_rfc4543_req_ctx
*crypto_rfc4543_reqctx(
1065 struct aead_request
*req
)
1067 unsigned long align
= crypto_aead_alignmask(crypto_aead_reqtfm(req
));
1069 return (void *)PTR_ALIGN((u8
*)aead_request_ctx(req
), align
+ 1);
1072 static int crypto_rfc4543_setkey(struct crypto_aead
*parent
, const u8
*key
,
1073 unsigned int keylen
)
1075 struct crypto_rfc4543_ctx
*ctx
= crypto_aead_ctx(parent
);
1076 struct crypto_aead
*child
= ctx
->child
;
1083 memcpy(ctx
->nonce
, key
+ keylen
, 4);
1085 crypto_aead_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
1086 crypto_aead_set_flags(child
, crypto_aead_get_flags(parent
) &
1087 CRYPTO_TFM_REQ_MASK
);
1088 err
= crypto_aead_setkey(child
, key
, keylen
);
1089 crypto_aead_set_flags(parent
, crypto_aead_get_flags(child
) &
1090 CRYPTO_TFM_RES_MASK
);
1095 static int crypto_rfc4543_setauthsize(struct crypto_aead
*parent
,
1096 unsigned int authsize
)
1098 struct crypto_rfc4543_ctx
*ctx
= crypto_aead_ctx(parent
);
1103 return crypto_aead_setauthsize(ctx
->child
, authsize
);
1106 static struct aead_request
*crypto_rfc4543_crypt(struct aead_request
*req
,
1109 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1110 struct crypto_rfc4543_ctx
*ctx
= crypto_aead_ctx(aead
);
1111 struct crypto_rfc4543_req_ctx
*rctx
= crypto_rfc4543_reqctx(req
);
1112 struct aead_request
*subreq
= &rctx
->subreq
;
1113 struct scatterlist
*dst
= req
->dst
;
1114 struct scatterlist
*cipher
= rctx
->cipher
;
1115 struct scatterlist
*payload
= rctx
->payload
;
1116 struct scatterlist
*assoc
= rctx
->assoc
;
1117 unsigned int authsize
= crypto_aead_authsize(aead
);
1118 unsigned int assoclen
= req
->assoclen
;
1121 u8
*iv
= PTR_ALIGN((u8
*)(rctx
+ 1) + crypto_aead_reqsize(ctx
->child
),
1122 crypto_aead_alignmask(ctx
->child
) + 1);
1124 memcpy(iv
, ctx
->nonce
, 4);
1125 memcpy(iv
+ 4, req
->iv
, 8);
1127 /* construct cipher/plaintext */
1129 memset(rctx
->auth_tag
, 0, authsize
);
1131 scatterwalk_map_and_copy(rctx
->auth_tag
, dst
,
1132 req
->cryptlen
- authsize
,
1135 sg_init_one(cipher
, rctx
->auth_tag
, authsize
);
1137 /* construct the aad */
1138 dstp
= sg_page(dst
);
1139 vdst
= PageHighMem(dstp
) ? NULL
: page_address(dstp
) + dst
->offset
;
1141 sg_init_table(payload
, 2);
1142 sg_set_buf(payload
, req
->iv
, 8);
1143 scatterwalk_crypto_chain(payload
, dst
, vdst
== req
->iv
+ 8, 2);
1144 assoclen
+= 8 + req
->cryptlen
- (enc
? 0 : authsize
);
1146 if (req
->assoc
->length
== req
->assoclen
) {
1147 sg_init_table(assoc
, 2);
1148 sg_set_page(assoc
, sg_page(req
->assoc
), req
->assoc
->length
,
1149 req
->assoc
->offset
);
1151 BUG_ON(req
->assoclen
> sizeof(rctx
->assocbuf
));
1153 scatterwalk_map_and_copy(rctx
->assocbuf
, req
->assoc
, 0,
1156 sg_init_table(assoc
, 2);
1157 sg_set_buf(assoc
, rctx
->assocbuf
, req
->assoclen
);
1159 scatterwalk_crypto_chain(assoc
, payload
, 0, 2);
1161 aead_request_set_tfm(subreq
, ctx
->child
);
1162 aead_request_set_callback(subreq
, req
->base
.flags
, req
->base
.complete
,
1164 aead_request_set_crypt(subreq
, cipher
, cipher
, enc
? 0 : authsize
, iv
);
1165 aead_request_set_assoc(subreq
, assoc
, assoclen
);
1170 static int crypto_rfc4543_encrypt(struct aead_request
*req
)
1172 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1173 struct crypto_rfc4543_req_ctx
*rctx
= crypto_rfc4543_reqctx(req
);
1174 struct aead_request
*subreq
;
1177 subreq
= crypto_rfc4543_crypt(req
, 1);
1178 err
= crypto_aead_encrypt(subreq
);
1182 scatterwalk_map_and_copy(rctx
->auth_tag
, req
->dst
, req
->cryptlen
,
1183 crypto_aead_authsize(aead
), 1);
1188 static int crypto_rfc4543_decrypt(struct aead_request
*req
)
1190 req
= crypto_rfc4543_crypt(req
, 0);
1192 return crypto_aead_decrypt(req
);
1195 static int crypto_rfc4543_init_tfm(struct crypto_tfm
*tfm
)
1197 struct crypto_instance
*inst
= (void *)tfm
->__crt_alg
;
1198 struct crypto_aead_spawn
*spawn
= crypto_instance_ctx(inst
);
1199 struct crypto_rfc4543_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1200 struct crypto_aead
*aead
;
1201 unsigned long align
;
1203 aead
= crypto_spawn_aead(spawn
);
1205 return PTR_ERR(aead
);
1209 align
= crypto_aead_alignmask(aead
);
1210 align
&= ~(crypto_tfm_ctx_alignment() - 1);
1211 tfm
->crt_aead
.reqsize
= sizeof(struct crypto_rfc4543_req_ctx
) +
1212 ALIGN(crypto_aead_reqsize(aead
),
1213 crypto_tfm_ctx_alignment()) +
1219 static void crypto_rfc4543_exit_tfm(struct crypto_tfm
*tfm
)
1221 struct crypto_rfc4543_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1223 crypto_free_aead(ctx
->child
);
1226 static struct crypto_instance
*crypto_rfc4543_alloc(struct rtattr
**tb
)
1228 struct crypto_attr_type
*algt
;
1229 struct crypto_instance
*inst
;
1230 struct crypto_aead_spawn
*spawn
;
1231 struct crypto_alg
*alg
;
1232 const char *ccm_name
;
1235 algt
= crypto_get_attr_type(tb
);
1236 err
= PTR_ERR(algt
);
1238 return ERR_PTR(err
);
1240 if ((algt
->type
^ CRYPTO_ALG_TYPE_AEAD
) & algt
->mask
)
1241 return ERR_PTR(-EINVAL
);
1243 ccm_name
= crypto_attr_alg_name(tb
[1]);
1244 err
= PTR_ERR(ccm_name
);
1245 if (IS_ERR(ccm_name
))
1246 return ERR_PTR(err
);
1248 inst
= kzalloc(sizeof(*inst
) + sizeof(*spawn
), GFP_KERNEL
);
1250 return ERR_PTR(-ENOMEM
);
1252 spawn
= crypto_instance_ctx(inst
);
1253 crypto_set_aead_spawn(spawn
, inst
);
1254 err
= crypto_grab_aead(spawn
, ccm_name
, 0,
1255 crypto_requires_sync(algt
->type
, algt
->mask
));
1259 alg
= crypto_aead_spawn_alg(spawn
);
1263 /* We only support 16-byte blocks. */
1264 if (alg
->cra_aead
.ivsize
!= 16)
1267 /* Not a stream cipher? */
1268 if (alg
->cra_blocksize
!= 1)
1271 err
= -ENAMETOOLONG
;
1272 if (snprintf(inst
->alg
.cra_name
, CRYPTO_MAX_ALG_NAME
,
1273 "rfc4543(%s)", alg
->cra_name
) >= CRYPTO_MAX_ALG_NAME
||
1274 snprintf(inst
->alg
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
1275 "rfc4543(%s)", alg
->cra_driver_name
) >=
1276 CRYPTO_MAX_ALG_NAME
)
1279 inst
->alg
.cra_flags
= CRYPTO_ALG_TYPE_AEAD
;
1280 inst
->alg
.cra_flags
|= alg
->cra_flags
& CRYPTO_ALG_ASYNC
;
1281 inst
->alg
.cra_priority
= alg
->cra_priority
;
1282 inst
->alg
.cra_blocksize
= 1;
1283 inst
->alg
.cra_alignmask
= alg
->cra_alignmask
;
1284 inst
->alg
.cra_type
= &crypto_nivaead_type
;
1286 inst
->alg
.cra_aead
.ivsize
= 8;
1287 inst
->alg
.cra_aead
.maxauthsize
= 16;
1289 inst
->alg
.cra_ctxsize
= sizeof(struct crypto_rfc4543_ctx
);
1291 inst
->alg
.cra_init
= crypto_rfc4543_init_tfm
;
1292 inst
->alg
.cra_exit
= crypto_rfc4543_exit_tfm
;
1294 inst
->alg
.cra_aead
.setkey
= crypto_rfc4543_setkey
;
1295 inst
->alg
.cra_aead
.setauthsize
= crypto_rfc4543_setauthsize
;
1296 inst
->alg
.cra_aead
.encrypt
= crypto_rfc4543_encrypt
;
1297 inst
->alg
.cra_aead
.decrypt
= crypto_rfc4543_decrypt
;
1299 inst
->alg
.cra_aead
.geniv
= "seqiv";
1305 crypto_drop_aead(spawn
);
1308 inst
= ERR_PTR(err
);
1312 static void crypto_rfc4543_free(struct crypto_instance
*inst
)
1314 crypto_drop_spawn(crypto_instance_ctx(inst
));
1318 static struct crypto_template crypto_rfc4543_tmpl
= {
1320 .alloc
= crypto_rfc4543_alloc
,
1321 .free
= crypto_rfc4543_free
,
1322 .module
= THIS_MODULE
,
1325 static int __init
crypto_gcm_module_init(void)
1329 gcm_zeroes
= kzalloc(16, GFP_KERNEL
);
1333 err
= crypto_register_template(&crypto_gcm_base_tmpl
);
1337 err
= crypto_register_template(&crypto_gcm_tmpl
);
1341 err
= crypto_register_template(&crypto_rfc4106_tmpl
);
1345 err
= crypto_register_template(&crypto_rfc4543_tmpl
);
1347 goto out_undo_rfc4106
;
1352 crypto_unregister_template(&crypto_rfc4106_tmpl
);
1354 crypto_unregister_template(&crypto_gcm_tmpl
);
1356 crypto_unregister_template(&crypto_gcm_base_tmpl
);
1362 static void __exit
crypto_gcm_module_exit(void)
1365 crypto_unregister_template(&crypto_rfc4543_tmpl
);
1366 crypto_unregister_template(&crypto_rfc4106_tmpl
);
1367 crypto_unregister_template(&crypto_gcm_tmpl
);
1368 crypto_unregister_template(&crypto_gcm_base_tmpl
);
1371 module_init(crypto_gcm_module_init
);
1372 module_exit(crypto_gcm_module_exit
);
1374 MODULE_LICENSE("GPL");
1375 MODULE_DESCRIPTION("Galois/Counter Mode");
1376 MODULE_AUTHOR("Mikko Herranen <mh1@iki.fi>");
1377 MODULE_ALIAS_CRYPTO("gcm_base");
1378 MODULE_ALIAS_CRYPTO("rfc4106");
1379 MODULE_ALIAS_CRYPTO("rfc4543");
1380 MODULE_ALIAS_CRYPTO("gcm");