1 // SPDX-License-Identifier: GPL-2.0
3 * ESSIV skcipher and aead template for block encryption
5 * This template encapsulates the ESSIV IV generation algorithm used by
6 * dm-crypt and fscrypt, which converts the initial vector for the skcipher
7 * used for block encryption, by encrypting it using the hash of the
8 * skcipher key as encryption key. Usually, the input IV is a 64-bit sector
9 * number in LE representation zero-padded to the size of the IV, but this
10 * is not assumed by this driver.
12 * The typical use of this template is to instantiate the skcipher
13 * 'essiv(cbc(aes),sha256)', which is the only instantiation used by
14 * fscrypt, and the most relevant one for dm-crypt. However, dm-crypt
15 * also permits ESSIV to be used in combination with the authenc template,
16 * e.g., 'essiv(authenc(hmac(sha256),cbc(aes)),sha256)', in which case
17 * we need to instantiate an aead that accepts the same special key format
18 * as the authenc template, and deals with the way the encrypted IV is
19 * embedded into the AAD area of the aead request. This means the AEAD
20 * flavor produced by this template is tightly coupled to the way dm-crypt
23 * Copyright (c) 2019 Linaro, Ltd. <ard.biesheuvel@linaro.org>
26 * adiantum length-preserving encryption mode
28 * Copyright 2018 Google LLC
31 #include <crypto/authenc.h>
32 #include <crypto/internal/aead.h>
33 #include <crypto/internal/cipher.h>
34 #include <crypto/internal/hash.h>
35 #include <crypto/internal/skcipher.h>
36 #include <crypto/scatterwalk.h>
37 #include <linux/module.h>
41 struct essiv_instance_ctx
{
43 struct crypto_skcipher_spawn skcipher_spawn
;
44 struct crypto_aead_spawn aead_spawn
;
46 char essiv_cipher_name
[CRYPTO_MAX_ALG_NAME
];
47 char shash_driver_name
[CRYPTO_MAX_ALG_NAME
];
50 struct essiv_tfm_ctx
{
52 struct crypto_skcipher
*skcipher
;
53 struct crypto_aead
*aead
;
55 struct crypto_cipher
*essiv_cipher
;
56 struct crypto_shash
*hash
;
60 struct essiv_aead_request_ctx
{
61 struct scatterlist sg
[4];
63 struct aead_request aead_req
;
66 static int essiv_skcipher_setkey(struct crypto_skcipher
*tfm
,
67 const u8
*key
, unsigned int keylen
)
69 struct essiv_tfm_ctx
*tctx
= crypto_skcipher_ctx(tfm
);
70 u8 salt
[HASH_MAX_DIGESTSIZE
];
73 crypto_skcipher_clear_flags(tctx
->u
.skcipher
, CRYPTO_TFM_REQ_MASK
);
74 crypto_skcipher_set_flags(tctx
->u
.skcipher
,
75 crypto_skcipher_get_flags(tfm
) &
77 err
= crypto_skcipher_setkey(tctx
->u
.skcipher
, key
, keylen
);
81 err
= crypto_shash_tfm_digest(tctx
->hash
, key
, keylen
, salt
);
85 crypto_cipher_clear_flags(tctx
->essiv_cipher
, CRYPTO_TFM_REQ_MASK
);
86 crypto_cipher_set_flags(tctx
->essiv_cipher
,
87 crypto_skcipher_get_flags(tfm
) &
89 return crypto_cipher_setkey(tctx
->essiv_cipher
, salt
,
90 crypto_shash_digestsize(tctx
->hash
));
93 static int essiv_aead_setkey(struct crypto_aead
*tfm
, const u8
*key
,
96 struct essiv_tfm_ctx
*tctx
= crypto_aead_ctx(tfm
);
97 SHASH_DESC_ON_STACK(desc
, tctx
->hash
);
98 struct crypto_authenc_keys keys
;
99 u8 salt
[HASH_MAX_DIGESTSIZE
];
102 crypto_aead_clear_flags(tctx
->u
.aead
, CRYPTO_TFM_REQ_MASK
);
103 crypto_aead_set_flags(tctx
->u
.aead
, crypto_aead_get_flags(tfm
) &
104 CRYPTO_TFM_REQ_MASK
);
105 err
= crypto_aead_setkey(tctx
->u
.aead
, key
, keylen
);
109 if (crypto_authenc_extractkeys(&keys
, key
, keylen
) != 0)
112 desc
->tfm
= tctx
->hash
;
113 err
= crypto_shash_init(desc
) ?:
114 crypto_shash_update(desc
, keys
.enckey
, keys
.enckeylen
) ?:
115 crypto_shash_finup(desc
, keys
.authkey
, keys
.authkeylen
, salt
);
119 crypto_cipher_clear_flags(tctx
->essiv_cipher
, CRYPTO_TFM_REQ_MASK
);
120 crypto_cipher_set_flags(tctx
->essiv_cipher
, crypto_aead_get_flags(tfm
) &
121 CRYPTO_TFM_REQ_MASK
);
122 return crypto_cipher_setkey(tctx
->essiv_cipher
, salt
,
123 crypto_shash_digestsize(tctx
->hash
));
126 static int essiv_aead_setauthsize(struct crypto_aead
*tfm
,
127 unsigned int authsize
)
129 struct essiv_tfm_ctx
*tctx
= crypto_aead_ctx(tfm
);
131 return crypto_aead_setauthsize(tctx
->u
.aead
, authsize
);
134 static void essiv_skcipher_done(void *data
, int err
)
136 struct skcipher_request
*req
= data
;
138 skcipher_request_complete(req
, err
);
141 static int essiv_skcipher_crypt(struct skcipher_request
*req
, bool enc
)
143 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
144 const struct essiv_tfm_ctx
*tctx
= crypto_skcipher_ctx(tfm
);
145 struct skcipher_request
*subreq
= skcipher_request_ctx(req
);
147 crypto_cipher_encrypt_one(tctx
->essiv_cipher
, req
->iv
, req
->iv
);
149 skcipher_request_set_tfm(subreq
, tctx
->u
.skcipher
);
150 skcipher_request_set_crypt(subreq
, req
->src
, req
->dst
, req
->cryptlen
,
152 skcipher_request_set_callback(subreq
, skcipher_request_flags(req
),
153 essiv_skcipher_done
, req
);
155 return enc
? crypto_skcipher_encrypt(subreq
) :
156 crypto_skcipher_decrypt(subreq
);
159 static int essiv_skcipher_encrypt(struct skcipher_request
*req
)
161 return essiv_skcipher_crypt(req
, true);
164 static int essiv_skcipher_decrypt(struct skcipher_request
*req
)
166 return essiv_skcipher_crypt(req
, false);
169 static void essiv_aead_done(void *data
, int err
)
171 struct aead_request
*req
= data
;
172 struct essiv_aead_request_ctx
*rctx
= aead_request_ctx(req
);
174 if (err
== -EINPROGRESS
)
180 aead_request_complete(req
, err
);
183 static int essiv_aead_crypt(struct aead_request
*req
, bool enc
)
185 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
186 const struct essiv_tfm_ctx
*tctx
= crypto_aead_ctx(tfm
);
187 struct essiv_aead_request_ctx
*rctx
= aead_request_ctx(req
);
188 struct aead_request
*subreq
= &rctx
->aead_req
;
189 struct scatterlist
*src
= req
->src
;
192 crypto_cipher_encrypt_one(tctx
->essiv_cipher
, req
->iv
, req
->iv
);
195 * dm-crypt embeds the sector number and the IV in the AAD region, so
196 * we have to copy the converted IV into the right scatterlist before
200 if (req
->src
== req
->dst
|| !enc
) {
201 scatterwalk_map_and_copy(req
->iv
, req
->dst
,
202 req
->assoclen
- crypto_aead_ivsize(tfm
),
203 crypto_aead_ivsize(tfm
), 1);
205 u8
*iv
= (u8
*)aead_request_ctx(req
) + tctx
->ivoffset
;
206 int ivsize
= crypto_aead_ivsize(tfm
);
207 int ssize
= req
->assoclen
- ivsize
;
208 struct scatterlist
*sg
;
214 nents
= sg_nents_for_len(req
->src
, ssize
);
218 memcpy(iv
, req
->iv
, ivsize
);
219 sg_init_table(rctx
->sg
, 4);
221 if (unlikely(nents
> 1)) {
223 * This is a case that rarely occurs in practice, but
224 * for correctness, we have to deal with it nonetheless.
226 rctx
->assoc
= kmalloc(ssize
, GFP_ATOMIC
);
230 scatterwalk_map_and_copy(rctx
->assoc
, req
->src
, 0,
232 sg_set_buf(rctx
->sg
, rctx
->assoc
, ssize
);
234 sg_set_page(rctx
->sg
, sg_page(req
->src
), ssize
,
238 sg_set_buf(rctx
->sg
+ 1, iv
, ivsize
);
239 sg
= scatterwalk_ffwd(rctx
->sg
+ 2, req
->src
, req
->assoclen
);
240 if (sg
!= rctx
->sg
+ 2)
241 sg_chain(rctx
->sg
, 3, sg
);
246 aead_request_set_tfm(subreq
, tctx
->u
.aead
);
247 aead_request_set_ad(subreq
, req
->assoclen
);
248 aead_request_set_callback(subreq
, aead_request_flags(req
),
249 essiv_aead_done
, req
);
250 aead_request_set_crypt(subreq
, src
, req
->dst
, req
->cryptlen
, req
->iv
);
252 err
= enc
? crypto_aead_encrypt(subreq
) :
253 crypto_aead_decrypt(subreq
);
255 if (rctx
->assoc
&& err
!= -EINPROGRESS
&& err
!= -EBUSY
)
260 static int essiv_aead_encrypt(struct aead_request
*req
)
262 return essiv_aead_crypt(req
, true);
265 static int essiv_aead_decrypt(struct aead_request
*req
)
267 return essiv_aead_crypt(req
, false);
270 static int essiv_init_tfm(struct essiv_instance_ctx
*ictx
,
271 struct essiv_tfm_ctx
*tctx
)
273 struct crypto_cipher
*essiv_cipher
;
274 struct crypto_shash
*hash
;
277 essiv_cipher
= crypto_alloc_cipher(ictx
->essiv_cipher_name
, 0, 0);
278 if (IS_ERR(essiv_cipher
))
279 return PTR_ERR(essiv_cipher
);
281 hash
= crypto_alloc_shash(ictx
->shash_driver_name
, 0, 0);
284 goto err_free_essiv_cipher
;
287 tctx
->essiv_cipher
= essiv_cipher
;
292 err_free_essiv_cipher
:
293 crypto_free_cipher(essiv_cipher
);
297 static int essiv_skcipher_init_tfm(struct crypto_skcipher
*tfm
)
299 struct skcipher_instance
*inst
= skcipher_alg_instance(tfm
);
300 struct essiv_instance_ctx
*ictx
= skcipher_instance_ctx(inst
);
301 struct essiv_tfm_ctx
*tctx
= crypto_skcipher_ctx(tfm
);
302 struct crypto_skcipher
*skcipher
;
305 skcipher
= crypto_spawn_skcipher(&ictx
->u
.skcipher_spawn
);
306 if (IS_ERR(skcipher
))
307 return PTR_ERR(skcipher
);
309 crypto_skcipher_set_reqsize(tfm
, sizeof(struct skcipher_request
) +
310 crypto_skcipher_reqsize(skcipher
));
312 err
= essiv_init_tfm(ictx
, tctx
);
314 crypto_free_skcipher(skcipher
);
318 tctx
->u
.skcipher
= skcipher
;
322 static int essiv_aead_init_tfm(struct crypto_aead
*tfm
)
324 struct aead_instance
*inst
= aead_alg_instance(tfm
);
325 struct essiv_instance_ctx
*ictx
= aead_instance_ctx(inst
);
326 struct essiv_tfm_ctx
*tctx
= crypto_aead_ctx(tfm
);
327 struct crypto_aead
*aead
;
328 unsigned int subreq_size
;
331 BUILD_BUG_ON(offsetofend(struct essiv_aead_request_ctx
, aead_req
) !=
332 sizeof(struct essiv_aead_request_ctx
));
334 aead
= crypto_spawn_aead(&ictx
->u
.aead_spawn
);
336 return PTR_ERR(aead
);
338 subreq_size
= sizeof_field(struct essiv_aead_request_ctx
, aead_req
) +
339 crypto_aead_reqsize(aead
);
341 tctx
->ivoffset
= offsetof(struct essiv_aead_request_ctx
, aead_req
) +
343 crypto_aead_set_reqsize(tfm
, tctx
->ivoffset
+ crypto_aead_ivsize(aead
));
345 err
= essiv_init_tfm(ictx
, tctx
);
347 crypto_free_aead(aead
);
355 static void essiv_skcipher_exit_tfm(struct crypto_skcipher
*tfm
)
357 struct essiv_tfm_ctx
*tctx
= crypto_skcipher_ctx(tfm
);
359 crypto_free_skcipher(tctx
->u
.skcipher
);
360 crypto_free_cipher(tctx
->essiv_cipher
);
361 crypto_free_shash(tctx
->hash
);
364 static void essiv_aead_exit_tfm(struct crypto_aead
*tfm
)
366 struct essiv_tfm_ctx
*tctx
= crypto_aead_ctx(tfm
);
368 crypto_free_aead(tctx
->u
.aead
);
369 crypto_free_cipher(tctx
->essiv_cipher
);
370 crypto_free_shash(tctx
->hash
);
373 static void essiv_skcipher_free_instance(struct skcipher_instance
*inst
)
375 struct essiv_instance_ctx
*ictx
= skcipher_instance_ctx(inst
);
377 crypto_drop_skcipher(&ictx
->u
.skcipher_spawn
);
381 static void essiv_aead_free_instance(struct aead_instance
*inst
)
383 struct essiv_instance_ctx
*ictx
= aead_instance_ctx(inst
);
385 crypto_drop_aead(&ictx
->u
.aead_spawn
);
389 static bool parse_cipher_name(char *essiv_cipher_name
, const char *cra_name
)
394 /* find the last opening parens */
395 p
= strrchr(cra_name
, '(');
399 /* find the first closing parens in the tail of the string */
405 if (len
>= CRYPTO_MAX_ALG_NAME
)
408 memcpy(essiv_cipher_name
, p
, len
);
409 essiv_cipher_name
[len
] = '\0';
413 static bool essiv_supported_algorithms(const char *essiv_cipher_name
,
414 struct shash_alg
*hash_alg
,
417 struct crypto_alg
*alg
;
420 alg
= crypto_alg_mod_lookup(essiv_cipher_name
,
421 CRYPTO_ALG_TYPE_CIPHER
,
422 CRYPTO_ALG_TYPE_MASK
);
426 if (hash_alg
->digestsize
< alg
->cra_cipher
.cia_min_keysize
||
427 hash_alg
->digestsize
> alg
->cra_cipher
.cia_max_keysize
)
430 if (ivsize
!= alg
->cra_blocksize
)
433 if (crypto_shash_alg_needs_key(hash_alg
))
443 static int essiv_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
445 struct skcipher_alg_common
*skcipher_alg
= NULL
;
446 struct crypto_attr_type
*algt
;
447 const char *inner_cipher_name
;
448 const char *shash_name
;
449 struct skcipher_instance
*skcipher_inst
= NULL
;
450 struct aead_instance
*aead_inst
= NULL
;
451 struct crypto_instance
*inst
;
452 struct crypto_alg
*base
, *block_base
;
453 struct essiv_instance_ctx
*ictx
;
454 struct aead_alg
*aead_alg
= NULL
;
455 struct crypto_alg
*_hash_alg
;
456 struct shash_alg
*hash_alg
;
462 algt
= crypto_get_attr_type(tb
);
464 return PTR_ERR(algt
);
466 inner_cipher_name
= crypto_attr_alg_name(tb
[1]);
467 if (IS_ERR(inner_cipher_name
))
468 return PTR_ERR(inner_cipher_name
);
470 shash_name
= crypto_attr_alg_name(tb
[2]);
471 if (IS_ERR(shash_name
))
472 return PTR_ERR(shash_name
);
474 type
= algt
->type
& algt
->mask
;
475 mask
= crypto_algt_inherited_mask(algt
);
478 case CRYPTO_ALG_TYPE_LSKCIPHER
:
479 skcipher_inst
= kzalloc(sizeof(*skcipher_inst
) +
480 sizeof(*ictx
), GFP_KERNEL
);
483 inst
= skcipher_crypto_instance(skcipher_inst
);
484 base
= &skcipher_inst
->alg
.base
;
485 ictx
= crypto_instance_ctx(inst
);
487 /* Symmetric cipher, e.g., "cbc(aes)" */
488 err
= crypto_grab_skcipher(&ictx
->u
.skcipher_spawn
, inst
,
489 inner_cipher_name
, 0, mask
);
492 skcipher_alg
= crypto_spawn_skcipher_alg_common(
493 &ictx
->u
.skcipher_spawn
);
494 block_base
= &skcipher_alg
->base
;
495 ivsize
= skcipher_alg
->ivsize
;
498 case CRYPTO_ALG_TYPE_AEAD
:
499 aead_inst
= kzalloc(sizeof(*aead_inst
) +
500 sizeof(*ictx
), GFP_KERNEL
);
503 inst
= aead_crypto_instance(aead_inst
);
504 base
= &aead_inst
->alg
.base
;
505 ictx
= crypto_instance_ctx(inst
);
507 /* AEAD cipher, e.g., "authenc(hmac(sha256),cbc(aes))" */
508 err
= crypto_grab_aead(&ictx
->u
.aead_spawn
, inst
,
509 inner_cipher_name
, 0, mask
);
512 aead_alg
= crypto_spawn_aead_alg(&ictx
->u
.aead_spawn
);
513 block_base
= &aead_alg
->base
;
514 if (!strstarts(block_base
->cra_name
, "authenc(")) {
515 pr_warn("Only authenc() type AEADs are supported by ESSIV\n");
517 goto out_drop_skcipher
;
519 ivsize
= aead_alg
->ivsize
;
526 if (!parse_cipher_name(ictx
->essiv_cipher_name
, block_base
->cra_name
)) {
527 pr_warn("Failed to parse ESSIV cipher name from skcipher cra_name\n");
529 goto out_drop_skcipher
;
532 /* Synchronous hash, e.g., "sha256" */
533 _hash_alg
= crypto_alg_mod_lookup(shash_name
,
534 CRYPTO_ALG_TYPE_SHASH
,
535 CRYPTO_ALG_TYPE_MASK
| mask
);
536 if (IS_ERR(_hash_alg
)) {
537 err
= PTR_ERR(_hash_alg
);
538 goto out_drop_skcipher
;
540 hash_alg
= __crypto_shash_alg(_hash_alg
);
542 /* Check the set of algorithms */
543 if (!essiv_supported_algorithms(ictx
->essiv_cipher_name
, hash_alg
,
545 pr_warn("Unsupported essiv instantiation: essiv(%s,%s)\n",
546 block_base
->cra_name
, hash_alg
->base
.cra_name
);
551 /* record the driver name so we can instantiate this exact algo later */
552 strscpy(ictx
->shash_driver_name
, hash_alg
->base
.cra_driver_name
,
553 CRYPTO_MAX_ALG_NAME
);
555 /* Instance fields */
558 if (snprintf(base
->cra_name
, CRYPTO_MAX_ALG_NAME
,
559 "essiv(%s,%s)", block_base
->cra_name
,
560 hash_alg
->base
.cra_name
) >= CRYPTO_MAX_ALG_NAME
)
562 if (snprintf(base
->cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
563 "essiv(%s,%s)", block_base
->cra_driver_name
,
564 hash_alg
->base
.cra_driver_name
) >= CRYPTO_MAX_ALG_NAME
)
568 * hash_alg wasn't gotten via crypto_grab*(), so we need to inherit its
571 base
->cra_flags
|= (hash_alg
->base
.cra_flags
&
572 CRYPTO_ALG_INHERITED_FLAGS
);
573 base
->cra_blocksize
= block_base
->cra_blocksize
;
574 base
->cra_ctxsize
= sizeof(struct essiv_tfm_ctx
);
575 base
->cra_alignmask
= block_base
->cra_alignmask
;
576 base
->cra_priority
= block_base
->cra_priority
;
578 if (type
== CRYPTO_ALG_TYPE_LSKCIPHER
) {
579 skcipher_inst
->alg
.setkey
= essiv_skcipher_setkey
;
580 skcipher_inst
->alg
.encrypt
= essiv_skcipher_encrypt
;
581 skcipher_inst
->alg
.decrypt
= essiv_skcipher_decrypt
;
582 skcipher_inst
->alg
.init
= essiv_skcipher_init_tfm
;
583 skcipher_inst
->alg
.exit
= essiv_skcipher_exit_tfm
;
585 skcipher_inst
->alg
.min_keysize
= skcipher_alg
->min_keysize
;
586 skcipher_inst
->alg
.max_keysize
= skcipher_alg
->max_keysize
;
587 skcipher_inst
->alg
.ivsize
= ivsize
;
588 skcipher_inst
->alg
.chunksize
= skcipher_alg
->chunksize
;
590 skcipher_inst
->free
= essiv_skcipher_free_instance
;
592 err
= skcipher_register_instance(tmpl
, skcipher_inst
);
594 aead_inst
->alg
.setkey
= essiv_aead_setkey
;
595 aead_inst
->alg
.setauthsize
= essiv_aead_setauthsize
;
596 aead_inst
->alg
.encrypt
= essiv_aead_encrypt
;
597 aead_inst
->alg
.decrypt
= essiv_aead_decrypt
;
598 aead_inst
->alg
.init
= essiv_aead_init_tfm
;
599 aead_inst
->alg
.exit
= essiv_aead_exit_tfm
;
601 aead_inst
->alg
.ivsize
= ivsize
;
602 aead_inst
->alg
.maxauthsize
= crypto_aead_alg_maxauthsize(aead_alg
);
603 aead_inst
->alg
.chunksize
= crypto_aead_alg_chunksize(aead_alg
);
605 aead_inst
->free
= essiv_aead_free_instance
;
607 err
= aead_register_instance(tmpl
, aead_inst
);
613 crypto_mod_put(_hash_alg
);
617 crypto_mod_put(_hash_alg
);
619 if (type
== CRYPTO_ALG_TYPE_LSKCIPHER
)
620 crypto_drop_skcipher(&ictx
->u
.skcipher_spawn
);
622 crypto_drop_aead(&ictx
->u
.aead_spawn
);
624 kfree(skcipher_inst
);
629 /* essiv(cipher_name, shash_name) */
630 static struct crypto_template essiv_tmpl
= {
632 .create
= essiv_create
,
633 .module
= THIS_MODULE
,
636 static int __init
essiv_module_init(void)
638 return crypto_register_template(&essiv_tmpl
);
641 static void __exit
essiv_module_exit(void)
643 crypto_unregister_template(&essiv_tmpl
);
646 subsys_initcall(essiv_module_init
);
647 module_exit(essiv_module_exit
);
649 MODULE_DESCRIPTION("ESSIV skcipher/aead wrapper for block encryption");
650 MODULE_LICENSE("GPL v2");
651 MODULE_ALIAS_CRYPTO("essiv");
652 MODULE_IMPORT_NS(CRYPTO_INTERNAL
);