1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /* XTS: as defined in IEEE1619/D16
3 * http://grouper.ieee.org/groups/1619/email/pdf00086.pdf
5 * Copyright (c) 2007 Rik Snel <rsnel@cube.dyndns.org>
8 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
10 #include <crypto/internal/cipher.h>
11 #include <crypto/internal/skcipher.h>
12 #include <crypto/scatterwalk.h>
13 #include <linux/err.h>
14 #include <linux/init.h>
15 #include <linux/kernel.h>
16 #include <linux/module.h>
17 #include <linux/scatterlist.h>
18 #include <linux/slab.h>
20 #include <crypto/xts.h>
21 #include <crypto/b128ops.h>
22 #include <crypto/gf128mul.h>
25 struct crypto_skcipher
*child
;
26 struct crypto_cipher
*tweak
;
29 struct xts_instance_ctx
{
30 struct crypto_skcipher_spawn spawn
;
31 struct crypto_cipher_spawn tweak_spawn
;
34 struct xts_request_ctx
{
36 struct scatterlist
*tail
;
37 struct scatterlist sg
[2];
38 struct skcipher_request subreq
;
41 static int xts_setkey(struct crypto_skcipher
*parent
, const u8
*key
,
44 struct xts_tfm_ctx
*ctx
= crypto_skcipher_ctx(parent
);
45 struct crypto_skcipher
*child
;
46 struct crypto_cipher
*tweak
;
49 err
= xts_verify_key(parent
, key
, keylen
);
55 /* we need two cipher instances: one to compute the initial 'tweak'
56 * by encrypting the IV (usually the 'plain' iv) and the other
57 * one to encrypt and decrypt the data */
59 /* tweak cipher, uses Key2 i.e. the second half of *key */
61 crypto_cipher_clear_flags(tweak
, CRYPTO_TFM_REQ_MASK
);
62 crypto_cipher_set_flags(tweak
, crypto_skcipher_get_flags(parent
) &
64 err
= crypto_cipher_setkey(tweak
, key
+ keylen
, keylen
);
68 /* data cipher, uses Key1 i.e. the first half of *key */
70 crypto_skcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
71 crypto_skcipher_set_flags(child
, crypto_skcipher_get_flags(parent
) &
73 return crypto_skcipher_setkey(child
, key
, keylen
);
77 * We compute the tweak masks twice (both before and after the ECB encryption or
78 * decryption) to avoid having to allocate a temporary buffer and/or make
79 * mutliple calls to the 'ecb(..)' instance, which usually would be slower than
80 * just doing the gf128mul_x_ble() calls again.
82 static int xts_xor_tweak(struct skcipher_request
*req
, bool second_pass
,
85 struct xts_request_ctx
*rctx
= skcipher_request_ctx(req
);
86 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
87 const bool cts
= (req
->cryptlen
% XTS_BLOCK_SIZE
);
88 const int bs
= XTS_BLOCK_SIZE
;
89 struct skcipher_walk w
;
95 /* set to our TFM to enforce correct alignment: */
96 skcipher_request_set_tfm(req
, tfm
);
98 err
= skcipher_walk_virt(&w
, req
, false);
101 unsigned int avail
= w
.nbytes
;
105 wsrc
= w
.src
.virt
.addr
;
106 wdst
= w
.dst
.virt
.addr
;
110 w
.total
- w
.nbytes
+ avail
< 2 * XTS_BLOCK_SIZE
) {
114 gf128mul_x_ble(&t
, &t
);
116 le128_xor(wdst
, &t
, wsrc
);
117 if (enc
&& second_pass
)
118 gf128mul_x_ble(&rctx
->t
, &t
);
119 skcipher_walk_done(&w
, avail
- bs
);
123 le128_xor(wdst
++, &t
, wsrc
++);
124 gf128mul_x_ble(&t
, &t
);
125 } while ((avail
-= bs
) >= bs
);
127 err
= skcipher_walk_done(&w
, avail
);
133 static int xts_xor_tweak_pre(struct skcipher_request
*req
, bool enc
)
135 return xts_xor_tweak(req
, false, enc
);
138 static int xts_xor_tweak_post(struct skcipher_request
*req
, bool enc
)
140 return xts_xor_tweak(req
, true, enc
);
143 static void xts_cts_done(void *data
, int err
)
145 struct skcipher_request
*req
= data
;
149 struct xts_request_ctx
*rctx
= skcipher_request_ctx(req
);
151 scatterwalk_map_and_copy(&b
, rctx
->tail
, 0, XTS_BLOCK_SIZE
, 0);
152 le128_xor(&b
, &rctx
->t
, &b
);
153 scatterwalk_map_and_copy(&b
, rctx
->tail
, 0, XTS_BLOCK_SIZE
, 1);
156 skcipher_request_complete(req
, err
);
159 static int xts_cts_final(struct skcipher_request
*req
,
160 int (*crypt
)(struct skcipher_request
*req
))
162 const struct xts_tfm_ctx
*ctx
=
163 crypto_skcipher_ctx(crypto_skcipher_reqtfm(req
));
164 int offset
= req
->cryptlen
& ~(XTS_BLOCK_SIZE
- 1);
165 struct xts_request_ctx
*rctx
= skcipher_request_ctx(req
);
166 struct skcipher_request
*subreq
= &rctx
->subreq
;
167 int tail
= req
->cryptlen
% XTS_BLOCK_SIZE
;
171 rctx
->tail
= scatterwalk_ffwd(rctx
->sg
, req
->dst
,
172 offset
- XTS_BLOCK_SIZE
);
174 scatterwalk_map_and_copy(b
, rctx
->tail
, 0, XTS_BLOCK_SIZE
, 0);
176 scatterwalk_map_and_copy(b
, req
->src
, offset
, tail
, 0);
178 le128_xor(b
, &rctx
->t
, b
);
180 scatterwalk_map_and_copy(b
, rctx
->tail
, 0, XTS_BLOCK_SIZE
+ tail
, 1);
182 skcipher_request_set_tfm(subreq
, ctx
->child
);
183 skcipher_request_set_callback(subreq
, req
->base
.flags
, xts_cts_done
,
185 skcipher_request_set_crypt(subreq
, rctx
->tail
, rctx
->tail
,
186 XTS_BLOCK_SIZE
, NULL
);
192 scatterwalk_map_and_copy(b
, rctx
->tail
, 0, XTS_BLOCK_SIZE
, 0);
193 le128_xor(b
, &rctx
->t
, b
);
194 scatterwalk_map_and_copy(b
, rctx
->tail
, 0, XTS_BLOCK_SIZE
, 1);
199 static void xts_encrypt_done(void *data
, int err
)
201 struct skcipher_request
*req
= data
;
204 struct xts_request_ctx
*rctx
= skcipher_request_ctx(req
);
206 rctx
->subreq
.base
.flags
&= CRYPTO_TFM_REQ_MAY_BACKLOG
;
207 err
= xts_xor_tweak_post(req
, true);
209 if (!err
&& unlikely(req
->cryptlen
% XTS_BLOCK_SIZE
)) {
210 err
= xts_cts_final(req
, crypto_skcipher_encrypt
);
211 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
216 skcipher_request_complete(req
, err
);
219 static void xts_decrypt_done(void *data
, int err
)
221 struct skcipher_request
*req
= data
;
224 struct xts_request_ctx
*rctx
= skcipher_request_ctx(req
);
226 rctx
->subreq
.base
.flags
&= CRYPTO_TFM_REQ_MAY_BACKLOG
;
227 err
= xts_xor_tweak_post(req
, false);
229 if (!err
&& unlikely(req
->cryptlen
% XTS_BLOCK_SIZE
)) {
230 err
= xts_cts_final(req
, crypto_skcipher_decrypt
);
231 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
236 skcipher_request_complete(req
, err
);
239 static int xts_init_crypt(struct skcipher_request
*req
,
240 crypto_completion_t
compl)
242 const struct xts_tfm_ctx
*ctx
=
243 crypto_skcipher_ctx(crypto_skcipher_reqtfm(req
));
244 struct xts_request_ctx
*rctx
= skcipher_request_ctx(req
);
245 struct skcipher_request
*subreq
= &rctx
->subreq
;
247 if (req
->cryptlen
< XTS_BLOCK_SIZE
)
250 skcipher_request_set_tfm(subreq
, ctx
->child
);
251 skcipher_request_set_callback(subreq
, req
->base
.flags
, compl, req
);
252 skcipher_request_set_crypt(subreq
, req
->dst
, req
->dst
,
253 req
->cryptlen
& ~(XTS_BLOCK_SIZE
- 1), NULL
);
255 /* calculate first value of T */
256 crypto_cipher_encrypt_one(ctx
->tweak
, (u8
*)&rctx
->t
, req
->iv
);
261 static int xts_encrypt(struct skcipher_request
*req
)
263 struct xts_request_ctx
*rctx
= skcipher_request_ctx(req
);
264 struct skcipher_request
*subreq
= &rctx
->subreq
;
267 err
= xts_init_crypt(req
, xts_encrypt_done
) ?:
268 xts_xor_tweak_pre(req
, true) ?:
269 crypto_skcipher_encrypt(subreq
) ?:
270 xts_xor_tweak_post(req
, true);
272 if (err
|| likely((req
->cryptlen
% XTS_BLOCK_SIZE
) == 0))
275 return xts_cts_final(req
, crypto_skcipher_encrypt
);
278 static int xts_decrypt(struct skcipher_request
*req
)
280 struct xts_request_ctx
*rctx
= skcipher_request_ctx(req
);
281 struct skcipher_request
*subreq
= &rctx
->subreq
;
284 err
= xts_init_crypt(req
, xts_decrypt_done
) ?:
285 xts_xor_tweak_pre(req
, false) ?:
286 crypto_skcipher_decrypt(subreq
) ?:
287 xts_xor_tweak_post(req
, false);
289 if (err
|| likely((req
->cryptlen
% XTS_BLOCK_SIZE
) == 0))
292 return xts_cts_final(req
, crypto_skcipher_decrypt
);
295 static int xts_init_tfm(struct crypto_skcipher
*tfm
)
297 struct skcipher_instance
*inst
= skcipher_alg_instance(tfm
);
298 struct xts_instance_ctx
*ictx
= skcipher_instance_ctx(inst
);
299 struct xts_tfm_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
300 struct crypto_skcipher
*child
;
301 struct crypto_cipher
*tweak
;
303 child
= crypto_spawn_skcipher(&ictx
->spawn
);
305 return PTR_ERR(child
);
309 tweak
= crypto_spawn_cipher(&ictx
->tweak_spawn
);
311 crypto_free_skcipher(ctx
->child
);
312 return PTR_ERR(tweak
);
317 crypto_skcipher_set_reqsize(tfm
, crypto_skcipher_reqsize(child
) +
318 sizeof(struct xts_request_ctx
));
323 static void xts_exit_tfm(struct crypto_skcipher
*tfm
)
325 struct xts_tfm_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
327 crypto_free_skcipher(ctx
->child
);
328 crypto_free_cipher(ctx
->tweak
);
331 static void xts_free_instance(struct skcipher_instance
*inst
)
333 struct xts_instance_ctx
*ictx
= skcipher_instance_ctx(inst
);
335 crypto_drop_skcipher(&ictx
->spawn
);
336 crypto_drop_cipher(&ictx
->tweak_spawn
);
340 static int xts_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
342 struct skcipher_alg_common
*alg
;
343 char name
[CRYPTO_MAX_ALG_NAME
];
344 struct skcipher_instance
*inst
;
345 struct xts_instance_ctx
*ctx
;
346 const char *cipher_name
;
350 err
= crypto_check_attr_type(tb
, CRYPTO_ALG_TYPE_SKCIPHER
, &mask
);
354 cipher_name
= crypto_attr_alg_name(tb
[1]);
355 if (IS_ERR(cipher_name
))
356 return PTR_ERR(cipher_name
);
358 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
362 ctx
= skcipher_instance_ctx(inst
);
364 err
= crypto_grab_skcipher(&ctx
->spawn
, skcipher_crypto_instance(inst
),
365 cipher_name
, 0, mask
);
366 if (err
== -ENOENT
) {
368 if (snprintf(name
, CRYPTO_MAX_ALG_NAME
, "ecb(%s)",
369 cipher_name
) >= CRYPTO_MAX_ALG_NAME
)
372 err
= crypto_grab_skcipher(&ctx
->spawn
,
373 skcipher_crypto_instance(inst
),
380 alg
= crypto_spawn_skcipher_alg_common(&ctx
->spawn
);
383 if (alg
->base
.cra_blocksize
!= XTS_BLOCK_SIZE
)
389 err
= crypto_inst_setname(skcipher_crypto_instance(inst
), "xts",
395 cipher_name
= alg
->base
.cra_name
;
397 /* Alas we screwed up the naming so we have to mangle the
400 if (!strncmp(cipher_name
, "ecb(", 4)) {
403 len
= strscpy(name
, cipher_name
+ 4, sizeof(name
));
407 if (name
[len
- 1] != ')')
412 if (snprintf(inst
->alg
.base
.cra_name
, CRYPTO_MAX_ALG_NAME
,
413 "xts(%s)", name
) >= CRYPTO_MAX_ALG_NAME
) {
420 err
= crypto_grab_cipher(&ctx
->tweak_spawn
,
421 skcipher_crypto_instance(inst
), name
, 0, mask
);
425 inst
->alg
.base
.cra_priority
= alg
->base
.cra_priority
;
426 inst
->alg
.base
.cra_blocksize
= XTS_BLOCK_SIZE
;
427 inst
->alg
.base
.cra_alignmask
= alg
->base
.cra_alignmask
|
428 (__alignof__(u64
) - 1);
430 inst
->alg
.ivsize
= XTS_BLOCK_SIZE
;
431 inst
->alg
.min_keysize
= alg
->min_keysize
* 2;
432 inst
->alg
.max_keysize
= alg
->max_keysize
* 2;
434 inst
->alg
.base
.cra_ctxsize
= sizeof(struct xts_tfm_ctx
);
436 inst
->alg
.init
= xts_init_tfm
;
437 inst
->alg
.exit
= xts_exit_tfm
;
439 inst
->alg
.setkey
= xts_setkey
;
440 inst
->alg
.encrypt
= xts_encrypt
;
441 inst
->alg
.decrypt
= xts_decrypt
;
443 inst
->free
= xts_free_instance
;
445 err
= skcipher_register_instance(tmpl
, inst
);
448 xts_free_instance(inst
);
453 static struct crypto_template xts_tmpl
= {
455 .create
= xts_create
,
456 .module
= THIS_MODULE
,
459 static int __init
xts_module_init(void)
461 return crypto_register_template(&xts_tmpl
);
464 static void __exit
xts_module_exit(void)
466 crypto_unregister_template(&xts_tmpl
);
469 subsys_initcall(xts_module_init
);
470 module_exit(xts_module_exit
);
472 MODULE_LICENSE("GPL");
473 MODULE_DESCRIPTION("XTS block cipher mode");
474 MODULE_ALIAS_CRYPTO("xts");
475 MODULE_IMPORT_NS("CRYPTO_INTERNAL");
476 MODULE_SOFTDEP("pre: ecb");