1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /* XTS: as defined in IEEE1619/D16
3 * http://grouper.ieee.org/groups/1619/email/pdf00086.pdf
5 * Copyright (c) 2007 Rik Snel <rsnel@cube.dyndns.org>
8 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
10 #include <crypto/internal/skcipher.h>
11 #include <crypto/scatterwalk.h>
12 #include <linux/err.h>
13 #include <linux/init.h>
14 #include <linux/kernel.h>
15 #include <linux/module.h>
16 #include <linux/scatterlist.h>
17 #include <linux/slab.h>
19 #include <crypto/xts.h>
20 #include <crypto/b128ops.h>
21 #include <crypto/gf128mul.h>
24 struct crypto_skcipher
*child
;
25 struct crypto_cipher
*tweak
;
28 struct xts_instance_ctx
{
29 struct crypto_skcipher_spawn spawn
;
30 char name
[CRYPTO_MAX_ALG_NAME
];
35 struct scatterlist
*tail
;
36 struct scatterlist sg
[2];
37 struct skcipher_request subreq
;
40 static int setkey(struct crypto_skcipher
*parent
, const u8
*key
,
43 struct priv
*ctx
= crypto_skcipher_ctx(parent
);
44 struct crypto_skcipher
*child
;
45 struct crypto_cipher
*tweak
;
48 err
= xts_verify_key(parent
, key
, keylen
);
54 /* we need two cipher instances: one to compute the initial 'tweak'
55 * by encrypting the IV (usually the 'plain' iv) and the other
56 * one to encrypt and decrypt the data */
58 /* tweak cipher, uses Key2 i.e. the second half of *key */
60 crypto_cipher_clear_flags(tweak
, CRYPTO_TFM_REQ_MASK
);
61 crypto_cipher_set_flags(tweak
, crypto_skcipher_get_flags(parent
) &
63 err
= crypto_cipher_setkey(tweak
, key
+ keylen
, keylen
);
67 /* data cipher, uses Key1 i.e. the first half of *key */
69 crypto_skcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
70 crypto_skcipher_set_flags(child
, crypto_skcipher_get_flags(parent
) &
72 return crypto_skcipher_setkey(child
, key
, keylen
);
76 * We compute the tweak masks twice (both before and after the ECB encryption or
77 * decryption) to avoid having to allocate a temporary buffer and/or make
78 * mutliple calls to the 'ecb(..)' instance, which usually would be slower than
79 * just doing the gf128mul_x_ble() calls again.
81 static int xor_tweak(struct skcipher_request
*req
, bool second_pass
, bool enc
)
83 struct rctx
*rctx
= skcipher_request_ctx(req
);
84 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
85 const bool cts
= (req
->cryptlen
% XTS_BLOCK_SIZE
);
86 const int bs
= XTS_BLOCK_SIZE
;
87 struct skcipher_walk w
;
93 /* set to our TFM to enforce correct alignment: */
94 skcipher_request_set_tfm(req
, tfm
);
96 err
= skcipher_walk_virt(&w
, req
, false);
99 unsigned int avail
= w
.nbytes
;
103 wsrc
= w
.src
.virt
.addr
;
104 wdst
= w
.dst
.virt
.addr
;
108 w
.total
- w
.nbytes
+ avail
< 2 * XTS_BLOCK_SIZE
) {
112 gf128mul_x_ble(&t
, &t
);
114 le128_xor(wdst
, &t
, wsrc
);
115 if (enc
&& second_pass
)
116 gf128mul_x_ble(&rctx
->t
, &t
);
117 skcipher_walk_done(&w
, avail
- bs
);
121 le128_xor(wdst
++, &t
, wsrc
++);
122 gf128mul_x_ble(&t
, &t
);
123 } while ((avail
-= bs
) >= bs
);
125 err
= skcipher_walk_done(&w
, avail
);
131 static int xor_tweak_pre(struct skcipher_request
*req
, bool enc
)
133 return xor_tweak(req
, false, enc
);
136 static int xor_tweak_post(struct skcipher_request
*req
, bool enc
)
138 return xor_tweak(req
, true, enc
);
141 static void cts_done(struct crypto_async_request
*areq
, int err
)
143 struct skcipher_request
*req
= areq
->data
;
147 struct rctx
*rctx
= skcipher_request_ctx(req
);
149 scatterwalk_map_and_copy(&b
, rctx
->tail
, 0, XTS_BLOCK_SIZE
, 0);
150 le128_xor(&b
, &rctx
->t
, &b
);
151 scatterwalk_map_and_copy(&b
, rctx
->tail
, 0, XTS_BLOCK_SIZE
, 1);
154 skcipher_request_complete(req
, err
);
157 static int cts_final(struct skcipher_request
*req
,
158 int (*crypt
)(struct skcipher_request
*req
))
160 struct priv
*ctx
= crypto_skcipher_ctx(crypto_skcipher_reqtfm(req
));
161 int offset
= req
->cryptlen
& ~(XTS_BLOCK_SIZE
- 1);
162 struct rctx
*rctx
= skcipher_request_ctx(req
);
163 struct skcipher_request
*subreq
= &rctx
->subreq
;
164 int tail
= req
->cryptlen
% XTS_BLOCK_SIZE
;
168 rctx
->tail
= scatterwalk_ffwd(rctx
->sg
, req
->dst
,
169 offset
- XTS_BLOCK_SIZE
);
171 scatterwalk_map_and_copy(b
, rctx
->tail
, 0, XTS_BLOCK_SIZE
, 0);
172 memcpy(b
+ 1, b
, tail
);
173 scatterwalk_map_and_copy(b
, req
->src
, offset
, tail
, 0);
175 le128_xor(b
, &rctx
->t
, b
);
177 scatterwalk_map_and_copy(b
, rctx
->tail
, 0, XTS_BLOCK_SIZE
+ tail
, 1);
179 skcipher_request_set_tfm(subreq
, ctx
->child
);
180 skcipher_request_set_callback(subreq
, req
->base
.flags
, cts_done
, req
);
181 skcipher_request_set_crypt(subreq
, rctx
->tail
, rctx
->tail
,
182 XTS_BLOCK_SIZE
, NULL
);
188 scatterwalk_map_and_copy(b
, rctx
->tail
, 0, XTS_BLOCK_SIZE
, 0);
189 le128_xor(b
, &rctx
->t
, b
);
190 scatterwalk_map_and_copy(b
, rctx
->tail
, 0, XTS_BLOCK_SIZE
, 1);
195 static void encrypt_done(struct crypto_async_request
*areq
, int err
)
197 struct skcipher_request
*req
= areq
->data
;
200 struct rctx
*rctx
= skcipher_request_ctx(req
);
202 rctx
->subreq
.base
.flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
203 err
= xor_tweak_post(req
, true);
205 if (!err
&& unlikely(req
->cryptlen
% XTS_BLOCK_SIZE
)) {
206 err
= cts_final(req
, crypto_skcipher_encrypt
);
207 if (err
== -EINPROGRESS
)
212 skcipher_request_complete(req
, err
);
215 static void decrypt_done(struct crypto_async_request
*areq
, int err
)
217 struct skcipher_request
*req
= areq
->data
;
220 struct rctx
*rctx
= skcipher_request_ctx(req
);
222 rctx
->subreq
.base
.flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
223 err
= xor_tweak_post(req
, false);
225 if (!err
&& unlikely(req
->cryptlen
% XTS_BLOCK_SIZE
)) {
226 err
= cts_final(req
, crypto_skcipher_decrypt
);
227 if (err
== -EINPROGRESS
)
232 skcipher_request_complete(req
, err
);
235 static int init_crypt(struct skcipher_request
*req
, crypto_completion_t
compl)
237 struct priv
*ctx
= crypto_skcipher_ctx(crypto_skcipher_reqtfm(req
));
238 struct rctx
*rctx
= skcipher_request_ctx(req
);
239 struct skcipher_request
*subreq
= &rctx
->subreq
;
241 if (req
->cryptlen
< XTS_BLOCK_SIZE
)
244 skcipher_request_set_tfm(subreq
, ctx
->child
);
245 skcipher_request_set_callback(subreq
, req
->base
.flags
, compl, req
);
246 skcipher_request_set_crypt(subreq
, req
->dst
, req
->dst
,
247 req
->cryptlen
& ~(XTS_BLOCK_SIZE
- 1), NULL
);
249 /* calculate first value of T */
250 crypto_cipher_encrypt_one(ctx
->tweak
, (u8
*)&rctx
->t
, req
->iv
);
255 static int encrypt(struct skcipher_request
*req
)
257 struct rctx
*rctx
= skcipher_request_ctx(req
);
258 struct skcipher_request
*subreq
= &rctx
->subreq
;
261 err
= init_crypt(req
, encrypt_done
) ?:
262 xor_tweak_pre(req
, true) ?:
263 crypto_skcipher_encrypt(subreq
) ?:
264 xor_tweak_post(req
, true);
266 if (err
|| likely((req
->cryptlen
% XTS_BLOCK_SIZE
) == 0))
269 return cts_final(req
, crypto_skcipher_encrypt
);
272 static int decrypt(struct skcipher_request
*req
)
274 struct rctx
*rctx
= skcipher_request_ctx(req
);
275 struct skcipher_request
*subreq
= &rctx
->subreq
;
278 err
= init_crypt(req
, decrypt_done
) ?:
279 xor_tweak_pre(req
, false) ?:
280 crypto_skcipher_decrypt(subreq
) ?:
281 xor_tweak_post(req
, false);
283 if (err
|| likely((req
->cryptlen
% XTS_BLOCK_SIZE
) == 0))
286 return cts_final(req
, crypto_skcipher_decrypt
);
289 static int init_tfm(struct crypto_skcipher
*tfm
)
291 struct skcipher_instance
*inst
= skcipher_alg_instance(tfm
);
292 struct xts_instance_ctx
*ictx
= skcipher_instance_ctx(inst
);
293 struct priv
*ctx
= crypto_skcipher_ctx(tfm
);
294 struct crypto_skcipher
*child
;
295 struct crypto_cipher
*tweak
;
297 child
= crypto_spawn_skcipher(&ictx
->spawn
);
299 return PTR_ERR(child
);
303 tweak
= crypto_alloc_cipher(ictx
->name
, 0, 0);
305 crypto_free_skcipher(ctx
->child
);
306 return PTR_ERR(tweak
);
311 crypto_skcipher_set_reqsize(tfm
, crypto_skcipher_reqsize(child
) +
312 sizeof(struct rctx
));
317 static void exit_tfm(struct crypto_skcipher
*tfm
)
319 struct priv
*ctx
= crypto_skcipher_ctx(tfm
);
321 crypto_free_skcipher(ctx
->child
);
322 crypto_free_cipher(ctx
->tweak
);
325 static void free_inst(struct skcipher_instance
*inst
)
327 crypto_drop_skcipher(skcipher_instance_ctx(inst
));
331 static int create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
333 struct skcipher_instance
*inst
;
334 struct crypto_attr_type
*algt
;
335 struct xts_instance_ctx
*ctx
;
336 struct skcipher_alg
*alg
;
337 const char *cipher_name
;
341 algt
= crypto_get_attr_type(tb
);
343 return PTR_ERR(algt
);
345 if ((algt
->type
^ CRYPTO_ALG_TYPE_SKCIPHER
) & algt
->mask
)
348 cipher_name
= crypto_attr_alg_name(tb
[1]);
349 if (IS_ERR(cipher_name
))
350 return PTR_ERR(cipher_name
);
352 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
356 ctx
= skcipher_instance_ctx(inst
);
358 mask
= crypto_requires_off(algt
->type
, algt
->mask
,
359 CRYPTO_ALG_NEED_FALLBACK
|
362 err
= crypto_grab_skcipher(&ctx
->spawn
, skcipher_crypto_instance(inst
),
363 cipher_name
, 0, mask
);
364 if (err
== -ENOENT
) {
366 if (snprintf(ctx
->name
, CRYPTO_MAX_ALG_NAME
, "ecb(%s)",
367 cipher_name
) >= CRYPTO_MAX_ALG_NAME
)
370 err
= crypto_grab_skcipher(&ctx
->spawn
,
371 skcipher_crypto_instance(inst
),
378 alg
= crypto_skcipher_spawn_alg(&ctx
->spawn
);
381 if (alg
->base
.cra_blocksize
!= XTS_BLOCK_SIZE
)
384 if (crypto_skcipher_alg_ivsize(alg
))
387 err
= crypto_inst_setname(skcipher_crypto_instance(inst
), "xts",
393 cipher_name
= alg
->base
.cra_name
;
395 /* Alas we screwed up the naming so we have to mangle the
398 if (!strncmp(cipher_name
, "ecb(", 4)) {
401 len
= strlcpy(ctx
->name
, cipher_name
+ 4, sizeof(ctx
->name
));
402 if (len
< 2 || len
>= sizeof(ctx
->name
))
405 if (ctx
->name
[len
- 1] != ')')
408 ctx
->name
[len
- 1] = 0;
410 if (snprintf(inst
->alg
.base
.cra_name
, CRYPTO_MAX_ALG_NAME
,
411 "xts(%s)", ctx
->name
) >= CRYPTO_MAX_ALG_NAME
) {
418 inst
->alg
.base
.cra_flags
= alg
->base
.cra_flags
& CRYPTO_ALG_ASYNC
;
419 inst
->alg
.base
.cra_priority
= alg
->base
.cra_priority
;
420 inst
->alg
.base
.cra_blocksize
= XTS_BLOCK_SIZE
;
421 inst
->alg
.base
.cra_alignmask
= alg
->base
.cra_alignmask
|
422 (__alignof__(u64
) - 1);
424 inst
->alg
.ivsize
= XTS_BLOCK_SIZE
;
425 inst
->alg
.min_keysize
= crypto_skcipher_alg_min_keysize(alg
) * 2;
426 inst
->alg
.max_keysize
= crypto_skcipher_alg_max_keysize(alg
) * 2;
428 inst
->alg
.base
.cra_ctxsize
= sizeof(struct priv
);
430 inst
->alg
.init
= init_tfm
;
431 inst
->alg
.exit
= exit_tfm
;
433 inst
->alg
.setkey
= setkey
;
434 inst
->alg
.encrypt
= encrypt
;
435 inst
->alg
.decrypt
= decrypt
;
437 inst
->free
= free_inst
;
439 err
= skcipher_register_instance(tmpl
, inst
);
447 static struct crypto_template crypto_tmpl
= {
450 .module
= THIS_MODULE
,
453 static int __init
crypto_module_init(void)
455 return crypto_register_template(&crypto_tmpl
);
458 static void __exit
crypto_module_exit(void)
460 crypto_unregister_template(&crypto_tmpl
);
463 subsys_initcall(crypto_module_init
);
464 module_exit(crypto_module_exit
);
466 MODULE_LICENSE("GPL");
467 MODULE_DESCRIPTION("XTS block cipher mode");
468 MODULE_ALIAS_CRYPTO("xts");