1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /* XTS: as defined in IEEE1619/D16
3 * http://grouper.ieee.org/groups/1619/email/pdf00086.pdf
5 * Copyright (c) 2007 Rik Snel <rsnel@cube.dyndns.org>
8 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
10 #include <crypto/internal/skcipher.h>
11 #include <crypto/scatterwalk.h>
12 #include <linux/err.h>
13 #include <linux/init.h>
14 #include <linux/kernel.h>
15 #include <linux/module.h>
16 #include <linux/scatterlist.h>
17 #include <linux/slab.h>
19 #include <crypto/xts.h>
20 #include <crypto/b128ops.h>
21 #include <crypto/gf128mul.h>
24 struct crypto_skcipher
*child
;
25 struct crypto_cipher
*tweak
;
28 struct xts_instance_ctx
{
29 struct crypto_skcipher_spawn spawn
;
30 char name
[CRYPTO_MAX_ALG_NAME
];
35 struct scatterlist
*tail
;
36 struct scatterlist sg
[2];
37 struct skcipher_request subreq
;
40 static int setkey(struct crypto_skcipher
*parent
, const u8
*key
,
43 struct priv
*ctx
= crypto_skcipher_ctx(parent
);
44 struct crypto_skcipher
*child
;
45 struct crypto_cipher
*tweak
;
48 err
= xts_verify_key(parent
, key
, keylen
);
54 /* we need two cipher instances: one to compute the initial 'tweak'
55 * by encrypting the IV (usually the 'plain' iv) and the other
56 * one to encrypt and decrypt the data */
58 /* tweak cipher, uses Key2 i.e. the second half of *key */
60 crypto_cipher_clear_flags(tweak
, CRYPTO_TFM_REQ_MASK
);
61 crypto_cipher_set_flags(tweak
, crypto_skcipher_get_flags(parent
) &
63 err
= crypto_cipher_setkey(tweak
, key
+ keylen
, keylen
);
64 crypto_skcipher_set_flags(parent
, crypto_cipher_get_flags(tweak
) &
69 /* data cipher, uses Key1 i.e. the first half of *key */
71 crypto_skcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
72 crypto_skcipher_set_flags(child
, crypto_skcipher_get_flags(parent
) &
74 err
= crypto_skcipher_setkey(child
, key
, keylen
);
75 crypto_skcipher_set_flags(parent
, crypto_skcipher_get_flags(child
) &
82 * We compute the tweak masks twice (both before and after the ECB encryption or
83 * decryption) to avoid having to allocate a temporary buffer and/or make
84 * mutliple calls to the 'ecb(..)' instance, which usually would be slower than
85 * just doing the gf128mul_x_ble() calls again.
87 static int xor_tweak(struct skcipher_request
*req
, bool second_pass
, bool enc
)
89 struct rctx
*rctx
= skcipher_request_ctx(req
);
90 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
91 const bool cts
= (req
->cryptlen
% XTS_BLOCK_SIZE
);
92 const int bs
= XTS_BLOCK_SIZE
;
93 struct skcipher_walk w
;
99 /* set to our TFM to enforce correct alignment: */
100 skcipher_request_set_tfm(req
, tfm
);
102 err
= skcipher_walk_virt(&w
, req
, false);
105 unsigned int avail
= w
.nbytes
;
109 wsrc
= w
.src
.virt
.addr
;
110 wdst
= w
.dst
.virt
.addr
;
114 w
.total
- w
.nbytes
+ avail
< 2 * XTS_BLOCK_SIZE
) {
118 gf128mul_x_ble(&t
, &t
);
120 le128_xor(wdst
, &t
, wsrc
);
121 if (enc
&& second_pass
)
122 gf128mul_x_ble(&rctx
->t
, &t
);
123 skcipher_walk_done(&w
, avail
- bs
);
127 le128_xor(wdst
++, &t
, wsrc
++);
128 gf128mul_x_ble(&t
, &t
);
129 } while ((avail
-= bs
) >= bs
);
131 err
= skcipher_walk_done(&w
, avail
);
137 static int xor_tweak_pre(struct skcipher_request
*req
, bool enc
)
139 return xor_tweak(req
, false, enc
);
142 static int xor_tweak_post(struct skcipher_request
*req
, bool enc
)
144 return xor_tweak(req
, true, enc
);
147 static void cts_done(struct crypto_async_request
*areq
, int err
)
149 struct skcipher_request
*req
= areq
->data
;
153 struct rctx
*rctx
= skcipher_request_ctx(req
);
155 scatterwalk_map_and_copy(&b
, rctx
->tail
, 0, XTS_BLOCK_SIZE
, 0);
156 le128_xor(&b
, &rctx
->t
, &b
);
157 scatterwalk_map_and_copy(&b
, rctx
->tail
, 0, XTS_BLOCK_SIZE
, 1);
160 skcipher_request_complete(req
, err
);
163 static int cts_final(struct skcipher_request
*req
,
164 int (*crypt
)(struct skcipher_request
*req
))
166 struct priv
*ctx
= crypto_skcipher_ctx(crypto_skcipher_reqtfm(req
));
167 int offset
= req
->cryptlen
& ~(XTS_BLOCK_SIZE
- 1);
168 struct rctx
*rctx
= skcipher_request_ctx(req
);
169 struct skcipher_request
*subreq
= &rctx
->subreq
;
170 int tail
= req
->cryptlen
% XTS_BLOCK_SIZE
;
174 rctx
->tail
= scatterwalk_ffwd(rctx
->sg
, req
->dst
,
175 offset
- XTS_BLOCK_SIZE
);
177 scatterwalk_map_and_copy(b
, rctx
->tail
, 0, XTS_BLOCK_SIZE
, 0);
178 memcpy(b
+ 1, b
, tail
);
179 scatterwalk_map_and_copy(b
, req
->src
, offset
, tail
, 0);
181 le128_xor(b
, &rctx
->t
, b
);
183 scatterwalk_map_and_copy(b
, rctx
->tail
, 0, XTS_BLOCK_SIZE
+ tail
, 1);
185 skcipher_request_set_tfm(subreq
, ctx
->child
);
186 skcipher_request_set_callback(subreq
, req
->base
.flags
, cts_done
, req
);
187 skcipher_request_set_crypt(subreq
, rctx
->tail
, rctx
->tail
,
188 XTS_BLOCK_SIZE
, NULL
);
194 scatterwalk_map_and_copy(b
, rctx
->tail
, 0, XTS_BLOCK_SIZE
, 0);
195 le128_xor(b
, &rctx
->t
, b
);
196 scatterwalk_map_and_copy(b
, rctx
->tail
, 0, XTS_BLOCK_SIZE
, 1);
201 static void encrypt_done(struct crypto_async_request
*areq
, int err
)
203 struct skcipher_request
*req
= areq
->data
;
206 struct rctx
*rctx
= skcipher_request_ctx(req
);
208 rctx
->subreq
.base
.flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
209 err
= xor_tweak_post(req
, true);
211 if (!err
&& unlikely(req
->cryptlen
% XTS_BLOCK_SIZE
)) {
212 err
= cts_final(req
, crypto_skcipher_encrypt
);
213 if (err
== -EINPROGRESS
)
218 skcipher_request_complete(req
, err
);
221 static void decrypt_done(struct crypto_async_request
*areq
, int err
)
223 struct skcipher_request
*req
= areq
->data
;
226 struct rctx
*rctx
= skcipher_request_ctx(req
);
228 rctx
->subreq
.base
.flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
229 err
= xor_tweak_post(req
, false);
231 if (!err
&& unlikely(req
->cryptlen
% XTS_BLOCK_SIZE
)) {
232 err
= cts_final(req
, crypto_skcipher_decrypt
);
233 if (err
== -EINPROGRESS
)
238 skcipher_request_complete(req
, err
);
241 static int init_crypt(struct skcipher_request
*req
, crypto_completion_t
compl)
243 struct priv
*ctx
= crypto_skcipher_ctx(crypto_skcipher_reqtfm(req
));
244 struct rctx
*rctx
= skcipher_request_ctx(req
);
245 struct skcipher_request
*subreq
= &rctx
->subreq
;
247 if (req
->cryptlen
< XTS_BLOCK_SIZE
)
250 skcipher_request_set_tfm(subreq
, ctx
->child
);
251 skcipher_request_set_callback(subreq
, req
->base
.flags
, compl, req
);
252 skcipher_request_set_crypt(subreq
, req
->dst
, req
->dst
,
253 req
->cryptlen
& ~(XTS_BLOCK_SIZE
- 1), NULL
);
255 /* calculate first value of T */
256 crypto_cipher_encrypt_one(ctx
->tweak
, (u8
*)&rctx
->t
, req
->iv
);
261 static int encrypt(struct skcipher_request
*req
)
263 struct rctx
*rctx
= skcipher_request_ctx(req
);
264 struct skcipher_request
*subreq
= &rctx
->subreq
;
267 err
= init_crypt(req
, encrypt_done
) ?:
268 xor_tweak_pre(req
, true) ?:
269 crypto_skcipher_encrypt(subreq
) ?:
270 xor_tweak_post(req
, true);
272 if (err
|| likely((req
->cryptlen
% XTS_BLOCK_SIZE
) == 0))
275 return cts_final(req
, crypto_skcipher_encrypt
);
278 static int decrypt(struct skcipher_request
*req
)
280 struct rctx
*rctx
= skcipher_request_ctx(req
);
281 struct skcipher_request
*subreq
= &rctx
->subreq
;
284 err
= init_crypt(req
, decrypt_done
) ?:
285 xor_tweak_pre(req
, false) ?:
286 crypto_skcipher_decrypt(subreq
) ?:
287 xor_tweak_post(req
, false);
289 if (err
|| likely((req
->cryptlen
% XTS_BLOCK_SIZE
) == 0))
292 return cts_final(req
, crypto_skcipher_decrypt
);
295 static int init_tfm(struct crypto_skcipher
*tfm
)
297 struct skcipher_instance
*inst
= skcipher_alg_instance(tfm
);
298 struct xts_instance_ctx
*ictx
= skcipher_instance_ctx(inst
);
299 struct priv
*ctx
= crypto_skcipher_ctx(tfm
);
300 struct crypto_skcipher
*child
;
301 struct crypto_cipher
*tweak
;
303 child
= crypto_spawn_skcipher(&ictx
->spawn
);
305 return PTR_ERR(child
);
309 tweak
= crypto_alloc_cipher(ictx
->name
, 0, 0);
311 crypto_free_skcipher(ctx
->child
);
312 return PTR_ERR(tweak
);
317 crypto_skcipher_set_reqsize(tfm
, crypto_skcipher_reqsize(child
) +
318 sizeof(struct rctx
));
323 static void exit_tfm(struct crypto_skcipher
*tfm
)
325 struct priv
*ctx
= crypto_skcipher_ctx(tfm
);
327 crypto_free_skcipher(ctx
->child
);
328 crypto_free_cipher(ctx
->tweak
);
331 static void free(struct skcipher_instance
*inst
)
333 crypto_drop_skcipher(skcipher_instance_ctx(inst
));
337 static int create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
339 struct skcipher_instance
*inst
;
340 struct crypto_attr_type
*algt
;
341 struct xts_instance_ctx
*ctx
;
342 struct skcipher_alg
*alg
;
343 const char *cipher_name
;
347 algt
= crypto_get_attr_type(tb
);
349 return PTR_ERR(algt
);
351 if ((algt
->type
^ CRYPTO_ALG_TYPE_SKCIPHER
) & algt
->mask
)
354 cipher_name
= crypto_attr_alg_name(tb
[1]);
355 if (IS_ERR(cipher_name
))
356 return PTR_ERR(cipher_name
);
358 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
362 ctx
= skcipher_instance_ctx(inst
);
364 crypto_set_skcipher_spawn(&ctx
->spawn
, skcipher_crypto_instance(inst
));
366 mask
= crypto_requires_off(algt
->type
, algt
->mask
,
367 CRYPTO_ALG_NEED_FALLBACK
|
370 err
= crypto_grab_skcipher(&ctx
->spawn
, cipher_name
, 0, mask
);
371 if (err
== -ENOENT
) {
373 if (snprintf(ctx
->name
, CRYPTO_MAX_ALG_NAME
, "ecb(%s)",
374 cipher_name
) >= CRYPTO_MAX_ALG_NAME
)
377 err
= crypto_grab_skcipher(&ctx
->spawn
, ctx
->name
, 0, mask
);
383 alg
= crypto_skcipher_spawn_alg(&ctx
->spawn
);
386 if (alg
->base
.cra_blocksize
!= XTS_BLOCK_SIZE
)
389 if (crypto_skcipher_alg_ivsize(alg
))
392 err
= crypto_inst_setname(skcipher_crypto_instance(inst
), "xts",
398 cipher_name
= alg
->base
.cra_name
;
400 /* Alas we screwed up the naming so we have to mangle the
403 if (!strncmp(cipher_name
, "ecb(", 4)) {
406 len
= strlcpy(ctx
->name
, cipher_name
+ 4, sizeof(ctx
->name
));
407 if (len
< 2 || len
>= sizeof(ctx
->name
))
410 if (ctx
->name
[len
- 1] != ')')
413 ctx
->name
[len
- 1] = 0;
415 if (snprintf(inst
->alg
.base
.cra_name
, CRYPTO_MAX_ALG_NAME
,
416 "xts(%s)", ctx
->name
) >= CRYPTO_MAX_ALG_NAME
) {
423 inst
->alg
.base
.cra_flags
= alg
->base
.cra_flags
& CRYPTO_ALG_ASYNC
;
424 inst
->alg
.base
.cra_priority
= alg
->base
.cra_priority
;
425 inst
->alg
.base
.cra_blocksize
= XTS_BLOCK_SIZE
;
426 inst
->alg
.base
.cra_alignmask
= alg
->base
.cra_alignmask
|
427 (__alignof__(u64
) - 1);
429 inst
->alg
.ivsize
= XTS_BLOCK_SIZE
;
430 inst
->alg
.min_keysize
= crypto_skcipher_alg_min_keysize(alg
) * 2;
431 inst
->alg
.max_keysize
= crypto_skcipher_alg_max_keysize(alg
) * 2;
433 inst
->alg
.base
.cra_ctxsize
= sizeof(struct priv
);
435 inst
->alg
.init
= init_tfm
;
436 inst
->alg
.exit
= exit_tfm
;
438 inst
->alg
.setkey
= setkey
;
439 inst
->alg
.encrypt
= encrypt
;
440 inst
->alg
.decrypt
= decrypt
;
444 err
= skcipher_register_instance(tmpl
, inst
);
452 crypto_drop_skcipher(&ctx
->spawn
);
458 static struct crypto_template crypto_tmpl
= {
461 .module
= THIS_MODULE
,
464 static int __init
crypto_module_init(void)
466 return crypto_register_template(&crypto_tmpl
);
469 static void __exit
crypto_module_exit(void)
471 crypto_unregister_template(&crypto_tmpl
);
474 subsys_initcall(crypto_module_init
);
475 module_exit(crypto_module_exit
);
477 MODULE_LICENSE("GPL");
478 MODULE_DESCRIPTION("XTS block cipher mode");
479 MODULE_ALIAS_CRYPTO("xts");