1 // SPDX-License-Identifier: GPL-2.0
3 * HCTR2 length-preserving encryption mode
5 * Copyright 2021 Google LLC
10 * HCTR2 is a length-preserving encryption mode that is efficient on
11 * processors with instructions to accelerate AES and carryless
12 * multiplication, e.g. x86 processors with AES-NI and CLMUL, and ARM
13 * processors with the ARMv8 crypto extensions.
15 * For more details, see the paper: "Length-preserving encryption with HCTR2"
16 * (https://eprint.iacr.org/2021/1441.pdf)
19 #include <crypto/internal/cipher.h>
20 #include <crypto/internal/hash.h>
21 #include <crypto/internal/skcipher.h>
22 #include <crypto/polyval.h>
23 #include <crypto/scatterwalk.h>
24 #include <linux/module.h>
26 #define BLOCKCIPHER_BLOCK_SIZE 16
29 * The specification allows variable-length tweaks, but Linux's crypto API
30 * currently only allows algorithms to support a single length. The "natural"
31 * tweak length for HCTR2 is 16, since that fits into one POLYVAL block for
32 * the best performance. But longer tweaks are useful for fscrypt, to avoid
33 * needing to derive per-file keys. So instead we use two blocks, or 32 bytes.
37 struct hctr2_instance_ctx
{
38 struct crypto_cipher_spawn blockcipher_spawn
;
39 struct crypto_skcipher_spawn xctr_spawn
;
40 struct crypto_shash_spawn polyval_spawn
;
43 struct hctr2_tfm_ctx
{
44 struct crypto_cipher
*blockcipher
;
45 struct crypto_skcipher
*xctr
;
46 struct crypto_shash
*polyval
;
47 u8 L
[BLOCKCIPHER_BLOCK_SIZE
];
48 int hashed_tweak_offset
;
50 * This struct is allocated with extra space for two exported hash
51 * states. Since the hash state size is not known at compile-time, we
52 * can't add these to the struct directly.
54 * hashed_tweaklen_divisible;
55 * hashed_tweaklen_remainder;
59 struct hctr2_request_ctx
{
60 u8 first_block
[BLOCKCIPHER_BLOCK_SIZE
];
61 u8 xctr_iv
[BLOCKCIPHER_BLOCK_SIZE
];
62 struct scatterlist
*bulk_part_dst
;
63 struct scatterlist
*bulk_part_src
;
64 struct scatterlist sg_src
[2];
65 struct scatterlist sg_dst
[2];
67 * Sub-request sizes are unknown at compile-time, so they need to go
68 * after the members with known sizes.
71 struct shash_desc hash_desc
;
72 struct skcipher_request xctr_req
;
75 * This struct is allocated with extra space for one exported hash
76 * state. Since the hash state size is not known at compile-time, we
77 * can't add it to the struct directly.
83 static inline u8
*hctr2_hashed_tweaklen(const struct hctr2_tfm_ctx
*tctx
,
86 u8
*p
= (u8
*)tctx
+ sizeof(*tctx
);
88 if (has_remainder
) /* For messages not a multiple of block length */
89 p
+= crypto_shash_statesize(tctx
->polyval
);
93 static inline u8
*hctr2_hashed_tweak(const struct hctr2_tfm_ctx
*tctx
,
94 struct hctr2_request_ctx
*rctx
)
96 return (u8
*)rctx
+ tctx
->hashed_tweak_offset
;
100 * The input data for each HCTR2 hash step begins with a 16-byte block that
101 * contains the tweak length and a flag that indicates whether the input is evenly
102 * divisible into blocks. Since this implementation only supports one tweak
103 * length, we precompute the two hash states resulting from hashing the two
104 * possible values of this initial block. This reduces by one block the amount of
105 * data that needs to be hashed for each encryption/decryption
107 * These precomputed hashes are stored in hctr2_tfm_ctx.
109 static int hctr2_hash_tweaklen(struct hctr2_tfm_ctx
*tctx
, bool has_remainder
)
111 SHASH_DESC_ON_STACK(shash
, tfm
->polyval
);
112 __le64 tweak_length_block
[2];
115 shash
->tfm
= tctx
->polyval
;
116 memset(tweak_length_block
, 0, sizeof(tweak_length_block
));
118 tweak_length_block
[0] = cpu_to_le64(TWEAK_SIZE
* 8 * 2 + 2 + has_remainder
);
119 err
= crypto_shash_init(shash
);
122 err
= crypto_shash_update(shash
, (u8
*)tweak_length_block
,
126 return crypto_shash_export(shash
, hctr2_hashed_tweaklen(tctx
, has_remainder
));
129 static int hctr2_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
132 struct hctr2_tfm_ctx
*tctx
= crypto_skcipher_ctx(tfm
);
133 u8 hbar
[BLOCKCIPHER_BLOCK_SIZE
];
136 crypto_cipher_clear_flags(tctx
->blockcipher
, CRYPTO_TFM_REQ_MASK
);
137 crypto_cipher_set_flags(tctx
->blockcipher
,
138 crypto_skcipher_get_flags(tfm
) &
139 CRYPTO_TFM_REQ_MASK
);
140 err
= crypto_cipher_setkey(tctx
->blockcipher
, key
, keylen
);
144 crypto_skcipher_clear_flags(tctx
->xctr
, CRYPTO_TFM_REQ_MASK
);
145 crypto_skcipher_set_flags(tctx
->xctr
,
146 crypto_skcipher_get_flags(tfm
) &
147 CRYPTO_TFM_REQ_MASK
);
148 err
= crypto_skcipher_setkey(tctx
->xctr
, key
, keylen
);
152 memset(hbar
, 0, sizeof(hbar
));
153 crypto_cipher_encrypt_one(tctx
->blockcipher
, hbar
, hbar
);
155 memset(tctx
->L
, 0, sizeof(tctx
->L
));
157 crypto_cipher_encrypt_one(tctx
->blockcipher
, tctx
->L
, tctx
->L
);
159 crypto_shash_clear_flags(tctx
->polyval
, CRYPTO_TFM_REQ_MASK
);
160 crypto_shash_set_flags(tctx
->polyval
, crypto_skcipher_get_flags(tfm
) &
161 CRYPTO_TFM_REQ_MASK
);
162 err
= crypto_shash_setkey(tctx
->polyval
, hbar
, BLOCKCIPHER_BLOCK_SIZE
);
165 memzero_explicit(hbar
, sizeof(hbar
));
167 return hctr2_hash_tweaklen(tctx
, true) ?: hctr2_hash_tweaklen(tctx
, false);
170 static int hctr2_hash_tweak(struct skcipher_request
*req
)
172 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
173 const struct hctr2_tfm_ctx
*tctx
= crypto_skcipher_ctx(tfm
);
174 struct hctr2_request_ctx
*rctx
= skcipher_request_ctx(req
);
175 struct shash_desc
*hash_desc
= &rctx
->u
.hash_desc
;
177 bool has_remainder
= req
->cryptlen
% POLYVAL_BLOCK_SIZE
;
179 hash_desc
->tfm
= tctx
->polyval
;
180 err
= crypto_shash_import(hash_desc
, hctr2_hashed_tweaklen(tctx
, has_remainder
));
183 err
= crypto_shash_update(hash_desc
, req
->iv
, TWEAK_SIZE
);
187 // Store the hashed tweak, since we need it when computing both
188 // H(T || N) and H(T || V).
189 return crypto_shash_export(hash_desc
, hctr2_hashed_tweak(tctx
, rctx
));
192 static int hctr2_hash_message(struct skcipher_request
*req
,
193 struct scatterlist
*sgl
,
194 u8 digest
[POLYVAL_DIGEST_SIZE
])
196 static const u8 padding
[BLOCKCIPHER_BLOCK_SIZE
] = { 0x1 };
197 struct hctr2_request_ctx
*rctx
= skcipher_request_ctx(req
);
198 struct shash_desc
*hash_desc
= &rctx
->u
.hash_desc
;
199 const unsigned int bulk_len
= req
->cryptlen
- BLOCKCIPHER_BLOCK_SIZE
;
200 struct sg_mapping_iter miter
;
201 unsigned int remainder
= bulk_len
% BLOCKCIPHER_BLOCK_SIZE
;
206 sg_miter_start(&miter
, sgl
, sg_nents(sgl
),
207 SG_MITER_FROM_SG
| SG_MITER_ATOMIC
);
208 for (i
= 0; i
< bulk_len
; i
+= n
) {
209 sg_miter_next(&miter
);
210 n
= min_t(unsigned int, miter
.length
, bulk_len
- i
);
211 err
= crypto_shash_update(hash_desc
, miter
.addr
, n
);
215 sg_miter_stop(&miter
);
221 err
= crypto_shash_update(hash_desc
, padding
,
222 BLOCKCIPHER_BLOCK_SIZE
- remainder
);
226 return crypto_shash_final(hash_desc
, digest
);
229 static int hctr2_finish(struct skcipher_request
*req
)
231 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
232 const struct hctr2_tfm_ctx
*tctx
= crypto_skcipher_ctx(tfm
);
233 struct hctr2_request_ctx
*rctx
= skcipher_request_ctx(req
);
234 u8 digest
[POLYVAL_DIGEST_SIZE
];
235 struct shash_desc
*hash_desc
= &rctx
->u
.hash_desc
;
238 // U = UU ^ H(T || V)
239 // or M = MM ^ H(T || N)
240 hash_desc
->tfm
= tctx
->polyval
;
241 err
= crypto_shash_import(hash_desc
, hctr2_hashed_tweak(tctx
, rctx
));
244 err
= hctr2_hash_message(req
, rctx
->bulk_part_dst
, digest
);
247 crypto_xor(rctx
->first_block
, digest
, BLOCKCIPHER_BLOCK_SIZE
);
249 // Copy U (or M) into dst scatterlist
250 scatterwalk_map_and_copy(rctx
->first_block
, req
->dst
,
251 0, BLOCKCIPHER_BLOCK_SIZE
, 1);
255 static void hctr2_xctr_done(void *data
, int err
)
257 struct skcipher_request
*req
= data
;
260 err
= hctr2_finish(req
);
262 skcipher_request_complete(req
, err
);
265 static int hctr2_crypt(struct skcipher_request
*req
, bool enc
)
267 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
268 const struct hctr2_tfm_ctx
*tctx
= crypto_skcipher_ctx(tfm
);
269 struct hctr2_request_ctx
*rctx
= skcipher_request_ctx(req
);
270 u8 digest
[POLYVAL_DIGEST_SIZE
];
271 int bulk_len
= req
->cryptlen
- BLOCKCIPHER_BLOCK_SIZE
;
274 // Requests must be at least one block
275 if (req
->cryptlen
< BLOCKCIPHER_BLOCK_SIZE
)
278 // Copy M (or U) into a temporary buffer
279 scatterwalk_map_and_copy(rctx
->first_block
, req
->src
,
280 0, BLOCKCIPHER_BLOCK_SIZE
, 0);
282 // Create scatterlists for N and V
283 rctx
->bulk_part_src
= scatterwalk_ffwd(rctx
->sg_src
, req
->src
,
284 BLOCKCIPHER_BLOCK_SIZE
);
285 rctx
->bulk_part_dst
= scatterwalk_ffwd(rctx
->sg_dst
, req
->dst
,
286 BLOCKCIPHER_BLOCK_SIZE
);
288 // MM = M ^ H(T || N)
289 // or UU = U ^ H(T || V)
290 err
= hctr2_hash_tweak(req
);
293 err
= hctr2_hash_message(req
, rctx
->bulk_part_src
, digest
);
296 crypto_xor(digest
, rctx
->first_block
, BLOCKCIPHER_BLOCK_SIZE
);
301 crypto_cipher_encrypt_one(tctx
->blockcipher
, rctx
->first_block
,
304 crypto_cipher_decrypt_one(tctx
->blockcipher
, rctx
->first_block
,
308 crypto_xor(digest
, rctx
->first_block
, BLOCKCIPHER_BLOCK_SIZE
);
309 crypto_xor_cpy(rctx
->xctr_iv
, digest
, tctx
->L
, BLOCKCIPHER_BLOCK_SIZE
);
313 skcipher_request_set_tfm(&rctx
->u
.xctr_req
, tctx
->xctr
);
314 skcipher_request_set_crypt(&rctx
->u
.xctr_req
, rctx
->bulk_part_src
,
315 rctx
->bulk_part_dst
, bulk_len
,
317 skcipher_request_set_callback(&rctx
->u
.xctr_req
,
319 hctr2_xctr_done
, req
);
320 return crypto_skcipher_encrypt(&rctx
->u
.xctr_req
) ?:
324 static int hctr2_encrypt(struct skcipher_request
*req
)
326 return hctr2_crypt(req
, true);
329 static int hctr2_decrypt(struct skcipher_request
*req
)
331 return hctr2_crypt(req
, false);
334 static int hctr2_init_tfm(struct crypto_skcipher
*tfm
)
336 struct skcipher_instance
*inst
= skcipher_alg_instance(tfm
);
337 struct hctr2_instance_ctx
*ictx
= skcipher_instance_ctx(inst
);
338 struct hctr2_tfm_ctx
*tctx
= crypto_skcipher_ctx(tfm
);
339 struct crypto_skcipher
*xctr
;
340 struct crypto_cipher
*blockcipher
;
341 struct crypto_shash
*polyval
;
342 unsigned int subreq_size
;
345 xctr
= crypto_spawn_skcipher(&ictx
->xctr_spawn
);
347 return PTR_ERR(xctr
);
349 blockcipher
= crypto_spawn_cipher(&ictx
->blockcipher_spawn
);
350 if (IS_ERR(blockcipher
)) {
351 err
= PTR_ERR(blockcipher
);
355 polyval
= crypto_spawn_shash(&ictx
->polyval_spawn
);
356 if (IS_ERR(polyval
)) {
357 err
= PTR_ERR(polyval
);
358 goto err_free_blockcipher
;
362 tctx
->blockcipher
= blockcipher
;
363 tctx
->polyval
= polyval
;
365 BUILD_BUG_ON(offsetofend(struct hctr2_request_ctx
, u
) !=
366 sizeof(struct hctr2_request_ctx
));
367 subreq_size
= max(sizeof_field(struct hctr2_request_ctx
, u
.hash_desc
) +
368 crypto_shash_descsize(polyval
),
369 sizeof_field(struct hctr2_request_ctx
, u
.xctr_req
) +
370 crypto_skcipher_reqsize(xctr
));
372 tctx
->hashed_tweak_offset
= offsetof(struct hctr2_request_ctx
, u
) +
374 crypto_skcipher_set_reqsize(tfm
, tctx
->hashed_tweak_offset
+
375 crypto_shash_statesize(polyval
));
378 err_free_blockcipher
:
379 crypto_free_cipher(blockcipher
);
381 crypto_free_skcipher(xctr
);
385 static void hctr2_exit_tfm(struct crypto_skcipher
*tfm
)
387 struct hctr2_tfm_ctx
*tctx
= crypto_skcipher_ctx(tfm
);
389 crypto_free_cipher(tctx
->blockcipher
);
390 crypto_free_skcipher(tctx
->xctr
);
391 crypto_free_shash(tctx
->polyval
);
394 static void hctr2_free_instance(struct skcipher_instance
*inst
)
396 struct hctr2_instance_ctx
*ictx
= skcipher_instance_ctx(inst
);
398 crypto_drop_cipher(&ictx
->blockcipher_spawn
);
399 crypto_drop_skcipher(&ictx
->xctr_spawn
);
400 crypto_drop_shash(&ictx
->polyval_spawn
);
404 static int hctr2_create_common(struct crypto_template
*tmpl
,
406 const char *xctr_name
,
407 const char *polyval_name
)
409 struct skcipher_alg_common
*xctr_alg
;
411 struct skcipher_instance
*inst
;
412 struct hctr2_instance_ctx
*ictx
;
413 struct crypto_alg
*blockcipher_alg
;
414 struct shash_alg
*polyval_alg
;
415 char blockcipher_name
[CRYPTO_MAX_ALG_NAME
];
419 err
= crypto_check_attr_type(tb
, CRYPTO_ALG_TYPE_SKCIPHER
, &mask
);
423 inst
= kzalloc(sizeof(*inst
) + sizeof(*ictx
), GFP_KERNEL
);
426 ictx
= skcipher_instance_ctx(inst
);
428 /* Stream cipher, xctr(block_cipher) */
429 err
= crypto_grab_skcipher(&ictx
->xctr_spawn
,
430 skcipher_crypto_instance(inst
),
434 xctr_alg
= crypto_spawn_skcipher_alg_common(&ictx
->xctr_spawn
);
437 if (strncmp(xctr_alg
->base
.cra_name
, "xctr(", 5))
439 len
= strscpy(blockcipher_name
, xctr_alg
->base
.cra_name
+ 5,
440 sizeof(blockcipher_name
));
443 if (blockcipher_name
[len
- 1] != ')')
445 blockcipher_name
[len
- 1] = 0;
447 /* Block cipher, e.g. "aes" */
448 err
= crypto_grab_cipher(&ictx
->blockcipher_spawn
,
449 skcipher_crypto_instance(inst
),
450 blockcipher_name
, 0, mask
);
453 blockcipher_alg
= crypto_spawn_cipher_alg(&ictx
->blockcipher_spawn
);
455 /* Require blocksize of 16 bytes */
457 if (blockcipher_alg
->cra_blocksize
!= BLOCKCIPHER_BLOCK_SIZE
)
460 /* Polyval ε-∆U hash function */
461 err
= crypto_grab_shash(&ictx
->polyval_spawn
,
462 skcipher_crypto_instance(inst
),
463 polyval_name
, 0, mask
);
466 polyval_alg
= crypto_spawn_shash_alg(&ictx
->polyval_spawn
);
468 /* Ensure Polyval is being used */
470 if (strcmp(polyval_alg
->base
.cra_name
, "polyval") != 0)
473 /* Instance fields */
476 if (snprintf(inst
->alg
.base
.cra_name
, CRYPTO_MAX_ALG_NAME
, "hctr2(%s)",
477 blockcipher_alg
->cra_name
) >= CRYPTO_MAX_ALG_NAME
)
479 if (snprintf(inst
->alg
.base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
481 xctr_alg
->base
.cra_driver_name
,
482 polyval_alg
->base
.cra_driver_name
) >= CRYPTO_MAX_ALG_NAME
)
485 inst
->alg
.base
.cra_blocksize
= BLOCKCIPHER_BLOCK_SIZE
;
486 inst
->alg
.base
.cra_ctxsize
= sizeof(struct hctr2_tfm_ctx
) +
487 polyval_alg
->statesize
* 2;
488 inst
->alg
.base
.cra_alignmask
= xctr_alg
->base
.cra_alignmask
;
490 * The hash function is called twice, so it is weighted higher than the
491 * xctr and blockcipher.
493 inst
->alg
.base
.cra_priority
= (2 * xctr_alg
->base
.cra_priority
+
494 4 * polyval_alg
->base
.cra_priority
+
495 blockcipher_alg
->cra_priority
) / 7;
497 inst
->alg
.setkey
= hctr2_setkey
;
498 inst
->alg
.encrypt
= hctr2_encrypt
;
499 inst
->alg
.decrypt
= hctr2_decrypt
;
500 inst
->alg
.init
= hctr2_init_tfm
;
501 inst
->alg
.exit
= hctr2_exit_tfm
;
502 inst
->alg
.min_keysize
= xctr_alg
->min_keysize
;
503 inst
->alg
.max_keysize
= xctr_alg
->max_keysize
;
504 inst
->alg
.ivsize
= TWEAK_SIZE
;
506 inst
->free
= hctr2_free_instance
;
508 err
= skcipher_register_instance(tmpl
, inst
);
511 hctr2_free_instance(inst
);
516 static int hctr2_create_base(struct crypto_template
*tmpl
, struct rtattr
**tb
)
518 const char *xctr_name
;
519 const char *polyval_name
;
521 xctr_name
= crypto_attr_alg_name(tb
[1]);
522 if (IS_ERR(xctr_name
))
523 return PTR_ERR(xctr_name
);
525 polyval_name
= crypto_attr_alg_name(tb
[2]);
526 if (IS_ERR(polyval_name
))
527 return PTR_ERR(polyval_name
);
529 return hctr2_create_common(tmpl
, tb
, xctr_name
, polyval_name
);
532 static int hctr2_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
534 const char *blockcipher_name
;
535 char xctr_name
[CRYPTO_MAX_ALG_NAME
];
537 blockcipher_name
= crypto_attr_alg_name(tb
[1]);
538 if (IS_ERR(blockcipher_name
))
539 return PTR_ERR(blockcipher_name
);
541 if (snprintf(xctr_name
, CRYPTO_MAX_ALG_NAME
, "xctr(%s)",
542 blockcipher_name
) >= CRYPTO_MAX_ALG_NAME
)
543 return -ENAMETOOLONG
;
545 return hctr2_create_common(tmpl
, tb
, xctr_name
, "polyval");
548 static struct crypto_template hctr2_tmpls
[] = {
550 /* hctr2_base(xctr_name, polyval_name) */
551 .name
= "hctr2_base",
552 .create
= hctr2_create_base
,
553 .module
= THIS_MODULE
,
555 /* hctr2(blockcipher_name) */
557 .create
= hctr2_create
,
558 .module
= THIS_MODULE
,
562 static int __init
hctr2_module_init(void)
564 return crypto_register_templates(hctr2_tmpls
, ARRAY_SIZE(hctr2_tmpls
));
567 static void __exit
hctr2_module_exit(void)
569 return crypto_unregister_templates(hctr2_tmpls
,
570 ARRAY_SIZE(hctr2_tmpls
));
573 subsys_initcall(hctr2_module_init
);
574 module_exit(hctr2_module_exit
);
576 MODULE_DESCRIPTION("HCTR2 length-preserving encryption mode");
577 MODULE_LICENSE("GPL v2");
578 MODULE_ALIAS_CRYPTO("hctr2");
579 MODULE_IMPORT_NS("CRYPTO_INTERNAL");