1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /* XTS: as defined in IEEE1619/D16
3 * http://grouper.ieee.org/groups/1619/email/pdf00086.pdf
5 * Copyright (c) 2007 Rik Snel <rsnel@cube.dyndns.org>
8 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
10 #include <crypto/internal/skcipher.h>
11 #include <crypto/scatterwalk.h>
12 #include <linux/err.h>
13 #include <linux/init.h>
14 #include <linux/kernel.h>
15 #include <linux/module.h>
16 #include <linux/scatterlist.h>
17 #include <linux/slab.h>
19 #include <crypto/xts.h>
20 #include <crypto/b128ops.h>
21 #include <crypto/gf128mul.h>
24 struct crypto_skcipher
*child
;
25 struct crypto_cipher
*tweak
;
28 struct xts_instance_ctx
{
29 struct crypto_skcipher_spawn spawn
;
30 char name
[CRYPTO_MAX_ALG_NAME
];
33 struct xts_request_ctx
{
35 struct scatterlist
*tail
;
36 struct scatterlist sg
[2];
37 struct skcipher_request subreq
;
40 static int xts_setkey(struct crypto_skcipher
*parent
, const u8
*key
,
43 struct xts_tfm_ctx
*ctx
= crypto_skcipher_ctx(parent
);
44 struct crypto_skcipher
*child
;
45 struct crypto_cipher
*tweak
;
48 err
= xts_verify_key(parent
, key
, keylen
);
54 /* we need two cipher instances: one to compute the initial 'tweak'
55 * by encrypting the IV (usually the 'plain' iv) and the other
56 * one to encrypt and decrypt the data */
58 /* tweak cipher, uses Key2 i.e. the second half of *key */
60 crypto_cipher_clear_flags(tweak
, CRYPTO_TFM_REQ_MASK
);
61 crypto_cipher_set_flags(tweak
, crypto_skcipher_get_flags(parent
) &
63 err
= crypto_cipher_setkey(tweak
, key
+ keylen
, keylen
);
67 /* data cipher, uses Key1 i.e. the first half of *key */
69 crypto_skcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
70 crypto_skcipher_set_flags(child
, crypto_skcipher_get_flags(parent
) &
72 return crypto_skcipher_setkey(child
, key
, keylen
);
76 * We compute the tweak masks twice (both before and after the ECB encryption or
77 * decryption) to avoid having to allocate a temporary buffer and/or make
78 * mutliple calls to the 'ecb(..)' instance, which usually would be slower than
79 * just doing the gf128mul_x_ble() calls again.
81 static int xts_xor_tweak(struct skcipher_request
*req
, bool second_pass
,
84 struct xts_request_ctx
*rctx
= skcipher_request_ctx(req
);
85 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
86 const bool cts
= (req
->cryptlen
% XTS_BLOCK_SIZE
);
87 const int bs
= XTS_BLOCK_SIZE
;
88 struct skcipher_walk w
;
94 /* set to our TFM to enforce correct alignment: */
95 skcipher_request_set_tfm(req
, tfm
);
97 err
= skcipher_walk_virt(&w
, req
, false);
100 unsigned int avail
= w
.nbytes
;
104 wsrc
= w
.src
.virt
.addr
;
105 wdst
= w
.dst
.virt
.addr
;
109 w
.total
- w
.nbytes
+ avail
< 2 * XTS_BLOCK_SIZE
) {
113 gf128mul_x_ble(&t
, &t
);
115 le128_xor(wdst
, &t
, wsrc
);
116 if (enc
&& second_pass
)
117 gf128mul_x_ble(&rctx
->t
, &t
);
118 skcipher_walk_done(&w
, avail
- bs
);
122 le128_xor(wdst
++, &t
, wsrc
++);
123 gf128mul_x_ble(&t
, &t
);
124 } while ((avail
-= bs
) >= bs
);
126 err
= skcipher_walk_done(&w
, avail
);
132 static int xts_xor_tweak_pre(struct skcipher_request
*req
, bool enc
)
134 return xts_xor_tweak(req
, false, enc
);
137 static int xts_xor_tweak_post(struct skcipher_request
*req
, bool enc
)
139 return xts_xor_tweak(req
, true, enc
);
142 static void xts_cts_done(struct crypto_async_request
*areq
, int err
)
144 struct skcipher_request
*req
= areq
->data
;
148 struct xts_request_ctx
*rctx
= skcipher_request_ctx(req
);
150 scatterwalk_map_and_copy(&b
, rctx
->tail
, 0, XTS_BLOCK_SIZE
, 0);
151 le128_xor(&b
, &rctx
->t
, &b
);
152 scatterwalk_map_and_copy(&b
, rctx
->tail
, 0, XTS_BLOCK_SIZE
, 1);
155 skcipher_request_complete(req
, err
);
158 static int xts_cts_final(struct skcipher_request
*req
,
159 int (*crypt
)(struct skcipher_request
*req
))
161 const struct xts_tfm_ctx
*ctx
=
162 crypto_skcipher_ctx(crypto_skcipher_reqtfm(req
));
163 int offset
= req
->cryptlen
& ~(XTS_BLOCK_SIZE
- 1);
164 struct xts_request_ctx
*rctx
= skcipher_request_ctx(req
);
165 struct skcipher_request
*subreq
= &rctx
->subreq
;
166 int tail
= req
->cryptlen
% XTS_BLOCK_SIZE
;
170 rctx
->tail
= scatterwalk_ffwd(rctx
->sg
, req
->dst
,
171 offset
- XTS_BLOCK_SIZE
);
173 scatterwalk_map_and_copy(b
, rctx
->tail
, 0, XTS_BLOCK_SIZE
, 0);
175 scatterwalk_map_and_copy(b
, req
->src
, offset
, tail
, 0);
177 le128_xor(b
, &rctx
->t
, b
);
179 scatterwalk_map_and_copy(b
, rctx
->tail
, 0, XTS_BLOCK_SIZE
+ tail
, 1);
181 skcipher_request_set_tfm(subreq
, ctx
->child
);
182 skcipher_request_set_callback(subreq
, req
->base
.flags
, xts_cts_done
,
184 skcipher_request_set_crypt(subreq
, rctx
->tail
, rctx
->tail
,
185 XTS_BLOCK_SIZE
, NULL
);
191 scatterwalk_map_and_copy(b
, rctx
->tail
, 0, XTS_BLOCK_SIZE
, 0);
192 le128_xor(b
, &rctx
->t
, b
);
193 scatterwalk_map_and_copy(b
, rctx
->tail
, 0, XTS_BLOCK_SIZE
, 1);
198 static void xts_encrypt_done(struct crypto_async_request
*areq
, int err
)
200 struct skcipher_request
*req
= areq
->data
;
203 struct xts_request_ctx
*rctx
= skcipher_request_ctx(req
);
205 rctx
->subreq
.base
.flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
206 err
= xts_xor_tweak_post(req
, true);
208 if (!err
&& unlikely(req
->cryptlen
% XTS_BLOCK_SIZE
)) {
209 err
= xts_cts_final(req
, crypto_skcipher_encrypt
);
210 if (err
== -EINPROGRESS
)
215 skcipher_request_complete(req
, err
);
218 static void xts_decrypt_done(struct crypto_async_request
*areq
, int err
)
220 struct skcipher_request
*req
= areq
->data
;
223 struct xts_request_ctx
*rctx
= skcipher_request_ctx(req
);
225 rctx
->subreq
.base
.flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
226 err
= xts_xor_tweak_post(req
, false);
228 if (!err
&& unlikely(req
->cryptlen
% XTS_BLOCK_SIZE
)) {
229 err
= xts_cts_final(req
, crypto_skcipher_decrypt
);
230 if (err
== -EINPROGRESS
)
235 skcipher_request_complete(req
, err
);
238 static int xts_init_crypt(struct skcipher_request
*req
,
239 crypto_completion_t
compl)
241 const struct xts_tfm_ctx
*ctx
=
242 crypto_skcipher_ctx(crypto_skcipher_reqtfm(req
));
243 struct xts_request_ctx
*rctx
= skcipher_request_ctx(req
);
244 struct skcipher_request
*subreq
= &rctx
->subreq
;
246 if (req
->cryptlen
< XTS_BLOCK_SIZE
)
249 skcipher_request_set_tfm(subreq
, ctx
->child
);
250 skcipher_request_set_callback(subreq
, req
->base
.flags
, compl, req
);
251 skcipher_request_set_crypt(subreq
, req
->dst
, req
->dst
,
252 req
->cryptlen
& ~(XTS_BLOCK_SIZE
- 1), NULL
);
254 /* calculate first value of T */
255 crypto_cipher_encrypt_one(ctx
->tweak
, (u8
*)&rctx
->t
, req
->iv
);
260 static int xts_encrypt(struct skcipher_request
*req
)
262 struct xts_request_ctx
*rctx
= skcipher_request_ctx(req
);
263 struct skcipher_request
*subreq
= &rctx
->subreq
;
266 err
= xts_init_crypt(req
, xts_encrypt_done
) ?:
267 xts_xor_tweak_pre(req
, true) ?:
268 crypto_skcipher_encrypt(subreq
) ?:
269 xts_xor_tweak_post(req
, true);
271 if (err
|| likely((req
->cryptlen
% XTS_BLOCK_SIZE
) == 0))
274 return xts_cts_final(req
, crypto_skcipher_encrypt
);
277 static int xts_decrypt(struct skcipher_request
*req
)
279 struct xts_request_ctx
*rctx
= skcipher_request_ctx(req
);
280 struct skcipher_request
*subreq
= &rctx
->subreq
;
283 err
= xts_init_crypt(req
, xts_decrypt_done
) ?:
284 xts_xor_tweak_pre(req
, false) ?:
285 crypto_skcipher_decrypt(subreq
) ?:
286 xts_xor_tweak_post(req
, false);
288 if (err
|| likely((req
->cryptlen
% XTS_BLOCK_SIZE
) == 0))
291 return xts_cts_final(req
, crypto_skcipher_decrypt
);
294 static int xts_init_tfm(struct crypto_skcipher
*tfm
)
296 struct skcipher_instance
*inst
= skcipher_alg_instance(tfm
);
297 struct xts_instance_ctx
*ictx
= skcipher_instance_ctx(inst
);
298 struct xts_tfm_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
299 struct crypto_skcipher
*child
;
300 struct crypto_cipher
*tweak
;
302 child
= crypto_spawn_skcipher(&ictx
->spawn
);
304 return PTR_ERR(child
);
308 tweak
= crypto_alloc_cipher(ictx
->name
, 0, 0);
310 crypto_free_skcipher(ctx
->child
);
311 return PTR_ERR(tweak
);
316 crypto_skcipher_set_reqsize(tfm
, crypto_skcipher_reqsize(child
) +
317 sizeof(struct xts_request_ctx
));
322 static void xts_exit_tfm(struct crypto_skcipher
*tfm
)
324 struct xts_tfm_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
326 crypto_free_skcipher(ctx
->child
);
327 crypto_free_cipher(ctx
->tweak
);
330 static void xts_free_instance(struct skcipher_instance
*inst
)
332 struct xts_instance_ctx
*ictx
= skcipher_instance_ctx(inst
);
334 crypto_drop_skcipher(&ictx
->spawn
);
338 static int xts_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
340 struct skcipher_instance
*inst
;
341 struct xts_instance_ctx
*ctx
;
342 struct skcipher_alg
*alg
;
343 const char *cipher_name
;
347 err
= crypto_check_attr_type(tb
, CRYPTO_ALG_TYPE_SKCIPHER
, &mask
);
351 cipher_name
= crypto_attr_alg_name(tb
[1]);
352 if (IS_ERR(cipher_name
))
353 return PTR_ERR(cipher_name
);
355 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
359 ctx
= skcipher_instance_ctx(inst
);
361 err
= crypto_grab_skcipher(&ctx
->spawn
, skcipher_crypto_instance(inst
),
362 cipher_name
, 0, mask
);
363 if (err
== -ENOENT
) {
365 if (snprintf(ctx
->name
, CRYPTO_MAX_ALG_NAME
, "ecb(%s)",
366 cipher_name
) >= CRYPTO_MAX_ALG_NAME
)
369 err
= crypto_grab_skcipher(&ctx
->spawn
,
370 skcipher_crypto_instance(inst
),
377 alg
= crypto_skcipher_spawn_alg(&ctx
->spawn
);
380 if (alg
->base
.cra_blocksize
!= XTS_BLOCK_SIZE
)
383 if (crypto_skcipher_alg_ivsize(alg
))
386 err
= crypto_inst_setname(skcipher_crypto_instance(inst
), "xts",
392 cipher_name
= alg
->base
.cra_name
;
394 /* Alas we screwed up the naming so we have to mangle the
397 if (!strncmp(cipher_name
, "ecb(", 4)) {
400 len
= strlcpy(ctx
->name
, cipher_name
+ 4, sizeof(ctx
->name
));
401 if (len
< 2 || len
>= sizeof(ctx
->name
))
404 if (ctx
->name
[len
- 1] != ')')
407 ctx
->name
[len
- 1] = 0;
409 if (snprintf(inst
->alg
.base
.cra_name
, CRYPTO_MAX_ALG_NAME
,
410 "xts(%s)", ctx
->name
) >= CRYPTO_MAX_ALG_NAME
) {
417 inst
->alg
.base
.cra_priority
= alg
->base
.cra_priority
;
418 inst
->alg
.base
.cra_blocksize
= XTS_BLOCK_SIZE
;
419 inst
->alg
.base
.cra_alignmask
= alg
->base
.cra_alignmask
|
420 (__alignof__(u64
) - 1);
422 inst
->alg
.ivsize
= XTS_BLOCK_SIZE
;
423 inst
->alg
.min_keysize
= crypto_skcipher_alg_min_keysize(alg
) * 2;
424 inst
->alg
.max_keysize
= crypto_skcipher_alg_max_keysize(alg
) * 2;
426 inst
->alg
.base
.cra_ctxsize
= sizeof(struct xts_tfm_ctx
);
428 inst
->alg
.init
= xts_init_tfm
;
429 inst
->alg
.exit
= xts_exit_tfm
;
431 inst
->alg
.setkey
= xts_setkey
;
432 inst
->alg
.encrypt
= xts_encrypt
;
433 inst
->alg
.decrypt
= xts_decrypt
;
435 inst
->free
= xts_free_instance
;
437 err
= skcipher_register_instance(tmpl
, inst
);
440 xts_free_instance(inst
);
445 static struct crypto_template xts_tmpl
= {
447 .create
= xts_create
,
448 .module
= THIS_MODULE
,
451 static int __init
xts_module_init(void)
453 return crypto_register_template(&xts_tmpl
);
456 static void __exit
xts_module_exit(void)
458 crypto_unregister_template(&xts_tmpl
);
461 subsys_initcall(xts_module_init
);
462 module_exit(xts_module_exit
);
464 MODULE_LICENSE("GPL");
465 MODULE_DESCRIPTION("XTS block cipher mode");
466 MODULE_ALIAS_CRYPTO("xts");