1 /* XTS: as defined in IEEE1619/D16
2 * http://grouper.ieee.org/groups/1619/email/pdf00086.pdf
3 * (sector sizes which are not a multiple of 16 bytes are,
4 * however currently unsupported)
6 * Copyright (c) 2007 Rik Snel <rsnel@cube.dyndns.org>
9 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
16 #include <crypto/internal/skcipher.h>
17 #include <crypto/scatterwalk.h>
18 #include <linux/err.h>
19 #include <linux/init.h>
20 #include <linux/kernel.h>
21 #include <linux/module.h>
22 #include <linux/scatterlist.h>
23 #include <linux/slab.h>
25 #include <crypto/xts.h>
26 #include <crypto/b128ops.h>
27 #include <crypto/gf128mul.h>
29 #define XTS_BUFFER_SIZE 128u
32 struct crypto_skcipher
*child
;
33 struct crypto_cipher
*tweak
;
36 struct xts_instance_ctx
{
37 struct crypto_skcipher_spawn spawn
;
38 char name
[CRYPTO_MAX_ALG_NAME
];
42 le128 buf
[XTS_BUFFER_SIZE
/ sizeof(le128
)];
48 struct scatterlist srcbuf
[2];
49 struct scatterlist dstbuf
[2];
50 struct scatterlist
*src
;
51 struct scatterlist
*dst
;
55 struct skcipher_request subreq
;
58 static int setkey(struct crypto_skcipher
*parent
, const u8
*key
,
61 struct priv
*ctx
= crypto_skcipher_ctx(parent
);
62 struct crypto_skcipher
*child
;
63 struct crypto_cipher
*tweak
;
66 err
= xts_verify_key(parent
, key
, keylen
);
72 /* we need two cipher instances: one to compute the initial 'tweak'
73 * by encrypting the IV (usually the 'plain' iv) and the other
74 * one to encrypt and decrypt the data */
76 /* tweak cipher, uses Key2 i.e. the second half of *key */
78 crypto_cipher_clear_flags(tweak
, CRYPTO_TFM_REQ_MASK
);
79 crypto_cipher_set_flags(tweak
, crypto_skcipher_get_flags(parent
) &
81 err
= crypto_cipher_setkey(tweak
, key
+ keylen
, keylen
);
82 crypto_skcipher_set_flags(parent
, crypto_cipher_get_flags(tweak
) &
87 /* data cipher, uses Key1 i.e. the first half of *key */
89 crypto_skcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
90 crypto_skcipher_set_flags(child
, crypto_skcipher_get_flags(parent
) &
92 err
= crypto_skcipher_setkey(child
, key
, keylen
);
93 crypto_skcipher_set_flags(parent
, crypto_skcipher_get_flags(child
) &
99 static int post_crypt(struct skcipher_request
*req
)
101 struct rctx
*rctx
= skcipher_request_ctx(req
);
102 le128
*buf
= rctx
->ext
?: rctx
->buf
;
103 struct skcipher_request
*subreq
;
104 const int bs
= XTS_BLOCK_SIZE
;
105 struct skcipher_walk w
;
106 struct scatterlist
*sg
;
110 subreq
= &rctx
->subreq
;
111 err
= skcipher_walk_virt(&w
, subreq
, false);
114 unsigned int avail
= w
.nbytes
;
117 wdst
= w
.dst
.virt
.addr
;
120 le128_xor(wdst
, buf
++, wdst
);
122 } while ((avail
-= bs
) >= bs
);
124 err
= skcipher_walk_done(&w
, avail
);
127 rctx
->left
-= subreq
->cryptlen
;
129 if (err
|| !rctx
->left
)
132 rctx
->dst
= rctx
->dstbuf
;
134 scatterwalk_done(&w
.out
, 0, 1);
136 offset
= w
.out
.offset
;
138 if (rctx
->dst
!= sg
) {
140 sg_unmark_end(rctx
->dst
);
141 scatterwalk_crypto_chain(rctx
->dst
, sg_next(sg
), 0, 2);
143 rctx
->dst
[0].length
-= offset
- sg
->offset
;
144 rctx
->dst
[0].offset
= offset
;
150 static int pre_crypt(struct skcipher_request
*req
)
152 struct rctx
*rctx
= skcipher_request_ctx(req
);
153 le128
*buf
= rctx
->ext
?: rctx
->buf
;
154 struct skcipher_request
*subreq
;
155 const int bs
= XTS_BLOCK_SIZE
;
156 struct skcipher_walk w
;
157 struct scatterlist
*sg
;
163 subreq
= &rctx
->subreq
;
164 cryptlen
= subreq
->cryptlen
;
166 more
= rctx
->left
> cryptlen
;
168 cryptlen
= rctx
->left
;
170 skcipher_request_set_crypt(subreq
, rctx
->src
, rctx
->dst
,
173 err
= skcipher_walk_virt(&w
, subreq
, false);
176 unsigned int avail
= w
.nbytes
;
180 wsrc
= w
.src
.virt
.addr
;
181 wdst
= w
.dst
.virt
.addr
;
185 le128_xor(wdst
++, &rctx
->t
, wsrc
++);
186 gf128mul_x_ble(&rctx
->t
, &rctx
->t
);
187 } while ((avail
-= bs
) >= bs
);
189 err
= skcipher_walk_done(&w
, avail
);
192 skcipher_request_set_crypt(subreq
, rctx
->dst
, rctx
->dst
,
198 rctx
->src
= rctx
->srcbuf
;
200 scatterwalk_done(&w
.in
, 0, 1);
202 offset
= w
.in
.offset
;
204 if (rctx
->src
!= sg
) {
206 sg_unmark_end(rctx
->src
);
207 scatterwalk_crypto_chain(rctx
->src
, sg_next(sg
), 0, 2);
209 rctx
->src
[0].length
-= offset
- sg
->offset
;
210 rctx
->src
[0].offset
= offset
;
216 static int init_crypt(struct skcipher_request
*req
, crypto_completion_t done
)
218 struct priv
*ctx
= crypto_skcipher_ctx(crypto_skcipher_reqtfm(req
));
219 struct rctx
*rctx
= skcipher_request_ctx(req
);
220 struct skcipher_request
*subreq
;
223 subreq
= &rctx
->subreq
;
224 skcipher_request_set_tfm(subreq
, ctx
->child
);
225 skcipher_request_set_callback(subreq
, req
->base
.flags
, done
, req
);
227 gfp
= req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
? GFP_KERNEL
:
231 subreq
->cryptlen
= XTS_BUFFER_SIZE
;
232 if (req
->cryptlen
> XTS_BUFFER_SIZE
) {
233 unsigned int n
= min(req
->cryptlen
, (unsigned int)PAGE_SIZE
);
235 rctx
->ext
= kmalloc(n
, gfp
);
237 subreq
->cryptlen
= n
;
240 rctx
->src
= req
->src
;
241 rctx
->dst
= req
->dst
;
242 rctx
->left
= req
->cryptlen
;
244 /* calculate first value of T */
245 crypto_cipher_encrypt_one(ctx
->tweak
, (u8
*)&rctx
->t
, req
->iv
);
250 static void exit_crypt(struct skcipher_request
*req
)
252 struct rctx
*rctx
= skcipher_request_ctx(req
);
260 static int do_encrypt(struct skcipher_request
*req
, int err
)
262 struct rctx
*rctx
= skcipher_request_ctx(req
);
263 struct skcipher_request
*subreq
;
265 subreq
= &rctx
->subreq
;
267 while (!err
&& rctx
->left
) {
268 err
= pre_crypt(req
) ?:
269 crypto_skcipher_encrypt(subreq
) ?:
272 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
280 static void encrypt_done(struct crypto_async_request
*areq
, int err
)
282 struct skcipher_request
*req
= areq
->data
;
283 struct skcipher_request
*subreq
;
286 rctx
= skcipher_request_ctx(req
);
288 if (err
== -EINPROGRESS
) {
289 if (rctx
->left
!= req
->cryptlen
)
294 subreq
= &rctx
->subreq
;
295 subreq
->base
.flags
&= CRYPTO_TFM_REQ_MAY_BACKLOG
;
297 err
= do_encrypt(req
, err
?: post_crypt(req
));
302 skcipher_request_complete(req
, err
);
305 static int encrypt(struct skcipher_request
*req
)
307 return do_encrypt(req
, init_crypt(req
, encrypt_done
));
310 static int do_decrypt(struct skcipher_request
*req
, int err
)
312 struct rctx
*rctx
= skcipher_request_ctx(req
);
313 struct skcipher_request
*subreq
;
315 subreq
= &rctx
->subreq
;
317 while (!err
&& rctx
->left
) {
318 err
= pre_crypt(req
) ?:
319 crypto_skcipher_decrypt(subreq
) ?:
322 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
330 static void decrypt_done(struct crypto_async_request
*areq
, int err
)
332 struct skcipher_request
*req
= areq
->data
;
333 struct skcipher_request
*subreq
;
336 rctx
= skcipher_request_ctx(req
);
338 if (err
== -EINPROGRESS
) {
339 if (rctx
->left
!= req
->cryptlen
)
344 subreq
= &rctx
->subreq
;
345 subreq
->base
.flags
&= CRYPTO_TFM_REQ_MAY_BACKLOG
;
347 err
= do_decrypt(req
, err
?: post_crypt(req
));
352 skcipher_request_complete(req
, err
);
355 static int decrypt(struct skcipher_request
*req
)
357 return do_decrypt(req
, init_crypt(req
, decrypt_done
));
360 int xts_crypt(struct blkcipher_desc
*desc
, struct scatterlist
*sdst
,
361 struct scatterlist
*ssrc
, unsigned int nbytes
,
362 struct xts_crypt_req
*req
)
364 const unsigned int bsize
= XTS_BLOCK_SIZE
;
365 const unsigned int max_blks
= req
->tbuflen
/ bsize
;
366 struct blkcipher_walk walk
;
367 unsigned int nblocks
;
368 le128
*src
, *dst
, *t
;
369 le128
*t_buf
= req
->tbuf
;
372 BUG_ON(max_blks
< 1);
374 blkcipher_walk_init(&walk
, sdst
, ssrc
, nbytes
);
376 err
= blkcipher_walk_virt(desc
, &walk
);
377 nbytes
= walk
.nbytes
;
381 nblocks
= min(nbytes
/ bsize
, max_blks
);
382 src
= (le128
*)walk
.src
.virt
.addr
;
383 dst
= (le128
*)walk
.dst
.virt
.addr
;
385 /* calculate first value of T */
386 req
->tweak_fn(req
->tweak_ctx
, (u8
*)&t_buf
[0], walk
.iv
);
393 for (i
= 0; i
< nblocks
; i
++) {
394 gf128mul_x_ble(&t_buf
[i
], t
);
399 le128_xor(dst
+ i
, t
, src
+ i
);
402 /* CC <- E(Key2,PP) */
403 req
->crypt_fn(req
->crypt_ctx
, (u8
*)dst
,
407 for (i
= 0; i
< nblocks
; i
++)
408 le128_xor(dst
+ i
, dst
+ i
, &t_buf
[i
]);
412 nbytes
-= nblocks
* bsize
;
413 nblocks
= min(nbytes
/ bsize
, max_blks
);
414 } while (nblocks
> 0);
416 *(le128
*)walk
.iv
= *t
;
418 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
419 nbytes
= walk
.nbytes
;
423 nblocks
= min(nbytes
/ bsize
, max_blks
);
424 src
= (le128
*)walk
.src
.virt
.addr
;
425 dst
= (le128
*)walk
.dst
.virt
.addr
;
430 EXPORT_SYMBOL_GPL(xts_crypt
);
432 static int init_tfm(struct crypto_skcipher
*tfm
)
434 struct skcipher_instance
*inst
= skcipher_alg_instance(tfm
);
435 struct xts_instance_ctx
*ictx
= skcipher_instance_ctx(inst
);
436 struct priv
*ctx
= crypto_skcipher_ctx(tfm
);
437 struct crypto_skcipher
*child
;
438 struct crypto_cipher
*tweak
;
440 child
= crypto_spawn_skcipher(&ictx
->spawn
);
442 return PTR_ERR(child
);
446 tweak
= crypto_alloc_cipher(ictx
->name
, 0, 0);
448 crypto_free_skcipher(ctx
->child
);
449 return PTR_ERR(tweak
);
454 crypto_skcipher_set_reqsize(tfm
, crypto_skcipher_reqsize(child
) +
455 sizeof(struct rctx
));
460 static void exit_tfm(struct crypto_skcipher
*tfm
)
462 struct priv
*ctx
= crypto_skcipher_ctx(tfm
);
464 crypto_free_skcipher(ctx
->child
);
465 crypto_free_cipher(ctx
->tweak
);
468 static void free(struct skcipher_instance
*inst
)
470 crypto_drop_skcipher(skcipher_instance_ctx(inst
));
474 static int create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
476 struct skcipher_instance
*inst
;
477 struct crypto_attr_type
*algt
;
478 struct xts_instance_ctx
*ctx
;
479 struct skcipher_alg
*alg
;
480 const char *cipher_name
;
484 algt
= crypto_get_attr_type(tb
);
486 return PTR_ERR(algt
);
488 if ((algt
->type
^ CRYPTO_ALG_TYPE_SKCIPHER
) & algt
->mask
)
491 cipher_name
= crypto_attr_alg_name(tb
[1]);
492 if (IS_ERR(cipher_name
))
493 return PTR_ERR(cipher_name
);
495 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
499 ctx
= skcipher_instance_ctx(inst
);
501 crypto_set_skcipher_spawn(&ctx
->spawn
, skcipher_crypto_instance(inst
));
503 mask
= crypto_requires_off(algt
->type
, algt
->mask
,
504 CRYPTO_ALG_NEED_FALLBACK
|
507 err
= crypto_grab_skcipher(&ctx
->spawn
, cipher_name
, 0, mask
);
508 if (err
== -ENOENT
) {
510 if (snprintf(ctx
->name
, CRYPTO_MAX_ALG_NAME
, "ecb(%s)",
511 cipher_name
) >= CRYPTO_MAX_ALG_NAME
)
514 err
= crypto_grab_skcipher(&ctx
->spawn
, ctx
->name
, 0, mask
);
520 alg
= crypto_skcipher_spawn_alg(&ctx
->spawn
);
523 if (alg
->base
.cra_blocksize
!= XTS_BLOCK_SIZE
)
526 if (crypto_skcipher_alg_ivsize(alg
))
529 err
= crypto_inst_setname(skcipher_crypto_instance(inst
), "xts",
535 cipher_name
= alg
->base
.cra_name
;
537 /* Alas we screwed up the naming so we have to mangle the
540 if (!strncmp(cipher_name
, "ecb(", 4)) {
543 len
= strlcpy(ctx
->name
, cipher_name
+ 4, sizeof(ctx
->name
));
544 if (len
< 2 || len
>= sizeof(ctx
->name
))
547 if (ctx
->name
[len
- 1] != ')')
550 ctx
->name
[len
- 1] = 0;
552 if (snprintf(inst
->alg
.base
.cra_name
, CRYPTO_MAX_ALG_NAME
,
553 "xts(%s)", ctx
->name
) >= CRYPTO_MAX_ALG_NAME
) {
560 inst
->alg
.base
.cra_flags
= alg
->base
.cra_flags
& CRYPTO_ALG_ASYNC
;
561 inst
->alg
.base
.cra_priority
= alg
->base
.cra_priority
;
562 inst
->alg
.base
.cra_blocksize
= XTS_BLOCK_SIZE
;
563 inst
->alg
.base
.cra_alignmask
= alg
->base
.cra_alignmask
|
564 (__alignof__(u64
) - 1);
566 inst
->alg
.ivsize
= XTS_BLOCK_SIZE
;
567 inst
->alg
.min_keysize
= crypto_skcipher_alg_min_keysize(alg
) * 2;
568 inst
->alg
.max_keysize
= crypto_skcipher_alg_max_keysize(alg
) * 2;
570 inst
->alg
.base
.cra_ctxsize
= sizeof(struct priv
);
572 inst
->alg
.init
= init_tfm
;
573 inst
->alg
.exit
= exit_tfm
;
575 inst
->alg
.setkey
= setkey
;
576 inst
->alg
.encrypt
= encrypt
;
577 inst
->alg
.decrypt
= decrypt
;
581 err
= skcipher_register_instance(tmpl
, inst
);
589 crypto_drop_skcipher(&ctx
->spawn
);
595 static struct crypto_template crypto_tmpl
= {
598 .module
= THIS_MODULE
,
601 static int __init
crypto_module_init(void)
603 return crypto_register_template(&crypto_tmpl
);
606 static void __exit
crypto_module_exit(void)
608 crypto_unregister_template(&crypto_tmpl
);
611 module_init(crypto_module_init
);
612 module_exit(crypto_module_exit
);
614 MODULE_LICENSE("GPL");
615 MODULE_DESCRIPTION("XTS block cipher mode");
616 MODULE_ALIAS_CRYPTO("xts");