tcp: avoid infinite loop in tcp_splice_read()
[linux/fpc-iii.git] / crypto / cts.c
blob51976187b2bf81516f2c64b388387484714037d2
1 /*
2 * CTS: Cipher Text Stealing mode
4 * COPYRIGHT (c) 2008
5 * The Regents of the University of Michigan
6 * ALL RIGHTS RESERVED
8 * Permission is granted to use, copy, create derivative works
9 * and redistribute this software and such derivative works
10 * for any purpose, so long as the name of The University of
11 * Michigan is not used in any advertising or publicity
12 * pertaining to the use of distribution of this software
13 * without specific, written prior authorization. If the
14 * above copyright notice or any other identification of the
15 * University of Michigan is included in any copy of any
16 * portion of this software, then the disclaimer below must
17 * also be included.
19 * THIS SOFTWARE IS PROVIDED AS IS, WITHOUT REPRESENTATION
20 * FROM THE UNIVERSITY OF MICHIGAN AS TO ITS FITNESS FOR ANY
21 * PURPOSE, AND WITHOUT WARRANTY BY THE UNIVERSITY OF
22 * MICHIGAN OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING
23 * WITHOUT LIMITATION THE IMPLIED WARRANTIES OF
24 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE
25 * REGENTS OF THE UNIVERSITY OF MICHIGAN SHALL NOT BE LIABLE
26 * FOR ANY DAMAGES, INCLUDING SPECIAL, INDIRECT, INCIDENTAL, OR
27 * CONSEQUENTIAL DAMAGES, WITH RESPECT TO ANY CLAIM ARISING
28 * OUT OF OR IN CONNECTION WITH THE USE OF THE SOFTWARE, EVEN
29 * IF IT HAS BEEN OR IS HEREAFTER ADVISED OF THE POSSIBILITY OF
30 * SUCH DAMAGES.
33 /* Derived from various:
34 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
38 * This is the Cipher Text Stealing mode as described by
39 * Section 8 of rfc2040 and referenced by rfc3962.
40 * rfc3962 includes errata information in its Appendix A.
43 #include <crypto/internal/skcipher.h>
44 #include <linux/err.h>
45 #include <linux/init.h>
46 #include <linux/kernel.h>
47 #include <linux/log2.h>
48 #include <linux/module.h>
49 #include <linux/scatterlist.h>
50 #include <crypto/scatterwalk.h>
51 #include <linux/slab.h>
53 struct crypto_cts_ctx {
54 struct crypto_skcipher *child;
57 struct crypto_cts_reqctx {
58 struct scatterlist sg[2];
59 unsigned offset;
60 struct skcipher_request subreq;
63 static inline u8 *crypto_cts_reqctx_space(struct skcipher_request *req)
65 struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req);
66 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
67 struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
68 struct crypto_skcipher *child = ctx->child;
70 return PTR_ALIGN((u8 *)(rctx + 1) + crypto_skcipher_reqsize(child),
71 crypto_skcipher_alignmask(tfm) + 1);
74 static int crypto_cts_setkey(struct crypto_skcipher *parent, const u8 *key,
75 unsigned int keylen)
77 struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(parent);
78 struct crypto_skcipher *child = ctx->child;
79 int err;
81 crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
82 crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
83 CRYPTO_TFM_REQ_MASK);
84 err = crypto_skcipher_setkey(child, key, keylen);
85 crypto_skcipher_set_flags(parent, crypto_skcipher_get_flags(child) &
86 CRYPTO_TFM_RES_MASK);
87 return err;
90 static void cts_cbc_crypt_done(struct crypto_async_request *areq, int err)
92 struct skcipher_request *req = areq->data;
94 if (err == -EINPROGRESS)
95 return;
97 skcipher_request_complete(req, err);
100 static int cts_cbc_encrypt(struct skcipher_request *req)
102 struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req);
103 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
104 struct skcipher_request *subreq = &rctx->subreq;
105 int bsize = crypto_skcipher_blocksize(tfm);
106 u8 d[bsize * 2] __attribute__ ((aligned(__alignof__(u32))));
107 struct scatterlist *sg;
108 unsigned int offset;
109 int lastn;
111 offset = rctx->offset;
112 lastn = req->cryptlen - offset;
114 sg = scatterwalk_ffwd(rctx->sg, req->dst, offset - bsize);
115 scatterwalk_map_and_copy(d + bsize, sg, 0, bsize, 0);
117 memset(d, 0, bsize);
118 scatterwalk_map_and_copy(d, req->src, offset, lastn, 0);
120 scatterwalk_map_and_copy(d, sg, 0, bsize + lastn, 1);
121 memzero_explicit(d, sizeof(d));
123 skcipher_request_set_callback(subreq, req->base.flags &
124 CRYPTO_TFM_REQ_MAY_BACKLOG,
125 cts_cbc_crypt_done, req);
126 skcipher_request_set_crypt(subreq, sg, sg, bsize, req->iv);
127 return crypto_skcipher_encrypt(subreq);
130 static void crypto_cts_encrypt_done(struct crypto_async_request *areq, int err)
132 struct skcipher_request *req = areq->data;
134 if (err)
135 goto out;
137 err = cts_cbc_encrypt(req);
138 if (err == -EINPROGRESS ||
139 (err == -EBUSY && req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
140 return;
142 out:
143 skcipher_request_complete(req, err);
146 static int crypto_cts_encrypt(struct skcipher_request *req)
148 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
149 struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req);
150 struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
151 struct skcipher_request *subreq = &rctx->subreq;
152 int bsize = crypto_skcipher_blocksize(tfm);
153 unsigned int nbytes = req->cryptlen;
154 int cbc_blocks = (nbytes + bsize - 1) / bsize - 1;
155 unsigned int offset;
157 skcipher_request_set_tfm(subreq, ctx->child);
159 if (cbc_blocks <= 0) {
160 skcipher_request_set_callback(subreq, req->base.flags,
161 req->base.complete,
162 req->base.data);
163 skcipher_request_set_crypt(subreq, req->src, req->dst, nbytes,
164 req->iv);
165 return crypto_skcipher_encrypt(subreq);
168 offset = cbc_blocks * bsize;
169 rctx->offset = offset;
171 skcipher_request_set_callback(subreq, req->base.flags,
172 crypto_cts_encrypt_done, req);
173 skcipher_request_set_crypt(subreq, req->src, req->dst,
174 offset, req->iv);
176 return crypto_skcipher_encrypt(subreq) ?:
177 cts_cbc_encrypt(req);
180 static int cts_cbc_decrypt(struct skcipher_request *req)
182 struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req);
183 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
184 struct skcipher_request *subreq = &rctx->subreq;
185 int bsize = crypto_skcipher_blocksize(tfm);
186 u8 d[bsize * 2] __attribute__ ((aligned(__alignof__(u32))));
187 struct scatterlist *sg;
188 unsigned int offset;
189 u8 *space;
190 int lastn;
192 offset = rctx->offset;
193 lastn = req->cryptlen - offset;
195 sg = scatterwalk_ffwd(rctx->sg, req->dst, offset - bsize);
197 /* 1. Decrypt Cn-1 (s) to create Dn */
198 scatterwalk_map_and_copy(d + bsize, sg, 0, bsize, 0);
199 space = crypto_cts_reqctx_space(req);
200 crypto_xor(d + bsize, space, bsize);
201 /* 2. Pad Cn with zeros at the end to create C of length BB */
202 memset(d, 0, bsize);
203 scatterwalk_map_and_copy(d, req->src, offset, lastn, 0);
204 /* 3. Exclusive-or Dn with C to create Xn */
205 /* 4. Select the first Ln bytes of Xn to create Pn */
206 crypto_xor(d + bsize, d, lastn);
208 /* 5. Append the tail (BB - Ln) bytes of Xn to Cn to create En */
209 memcpy(d + lastn, d + bsize + lastn, bsize - lastn);
210 /* 6. Decrypt En to create Pn-1 */
212 scatterwalk_map_and_copy(d, sg, 0, bsize + lastn, 1);
213 memzero_explicit(d, sizeof(d));
215 skcipher_request_set_callback(subreq, req->base.flags &
216 CRYPTO_TFM_REQ_MAY_BACKLOG,
217 cts_cbc_crypt_done, req);
219 skcipher_request_set_crypt(subreq, sg, sg, bsize, space);
220 return crypto_skcipher_decrypt(subreq);
223 static void crypto_cts_decrypt_done(struct crypto_async_request *areq, int err)
225 struct skcipher_request *req = areq->data;
227 if (err)
228 goto out;
230 err = cts_cbc_decrypt(req);
231 if (err == -EINPROGRESS ||
232 (err == -EBUSY && req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
233 return;
235 out:
236 skcipher_request_complete(req, err);
239 static int crypto_cts_decrypt(struct skcipher_request *req)
241 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
242 struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req);
243 struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
244 struct skcipher_request *subreq = &rctx->subreq;
245 int bsize = crypto_skcipher_blocksize(tfm);
246 unsigned int nbytes = req->cryptlen;
247 int cbc_blocks = (nbytes + bsize - 1) / bsize - 1;
248 unsigned int offset;
249 u8 *space;
251 skcipher_request_set_tfm(subreq, ctx->child);
253 if (cbc_blocks <= 0) {
254 skcipher_request_set_callback(subreq, req->base.flags,
255 req->base.complete,
256 req->base.data);
257 skcipher_request_set_crypt(subreq, req->src, req->dst, nbytes,
258 req->iv);
259 return crypto_skcipher_decrypt(subreq);
262 skcipher_request_set_callback(subreq, req->base.flags,
263 crypto_cts_decrypt_done, req);
265 space = crypto_cts_reqctx_space(req);
267 offset = cbc_blocks * bsize;
268 rctx->offset = offset;
270 if (cbc_blocks <= 1)
271 memcpy(space, req->iv, bsize);
272 else
273 scatterwalk_map_and_copy(space, req->src, offset - 2 * bsize,
274 bsize, 0);
276 skcipher_request_set_crypt(subreq, req->src, req->dst,
277 offset, req->iv);
279 return crypto_skcipher_decrypt(subreq) ?:
280 cts_cbc_decrypt(req);
283 static int crypto_cts_init_tfm(struct crypto_skcipher *tfm)
285 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
286 struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst);
287 struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
288 struct crypto_skcipher *cipher;
289 unsigned reqsize;
290 unsigned bsize;
291 unsigned align;
293 cipher = crypto_spawn_skcipher2(spawn);
294 if (IS_ERR(cipher))
295 return PTR_ERR(cipher);
297 ctx->child = cipher;
299 align = crypto_skcipher_alignmask(tfm);
300 bsize = crypto_skcipher_blocksize(cipher);
301 reqsize = ALIGN(sizeof(struct crypto_cts_reqctx) +
302 crypto_skcipher_reqsize(cipher),
303 crypto_tfm_ctx_alignment()) +
304 (align & ~(crypto_tfm_ctx_alignment() - 1)) + bsize;
306 crypto_skcipher_set_reqsize(tfm, reqsize);
308 return 0;
311 static void crypto_cts_exit_tfm(struct crypto_skcipher *tfm)
313 struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
315 crypto_free_skcipher(ctx->child);
318 static void crypto_cts_free(struct skcipher_instance *inst)
320 crypto_drop_skcipher(skcipher_instance_ctx(inst));
321 kfree(inst);
324 static int crypto_cts_create(struct crypto_template *tmpl, struct rtattr **tb)
326 struct crypto_skcipher_spawn *spawn;
327 struct skcipher_instance *inst;
328 struct crypto_attr_type *algt;
329 struct skcipher_alg *alg;
330 const char *cipher_name;
331 int err;
333 algt = crypto_get_attr_type(tb);
334 if (IS_ERR(algt))
335 return PTR_ERR(algt);
337 if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask)
338 return -EINVAL;
340 cipher_name = crypto_attr_alg_name(tb[1]);
341 if (IS_ERR(cipher_name))
342 return PTR_ERR(cipher_name);
344 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
345 if (!inst)
346 return -ENOMEM;
348 spawn = skcipher_instance_ctx(inst);
350 crypto_set_skcipher_spawn(spawn, skcipher_crypto_instance(inst));
351 err = crypto_grab_skcipher2(spawn, cipher_name, 0,
352 crypto_requires_sync(algt->type,
353 algt->mask));
354 if (err)
355 goto err_free_inst;
357 alg = crypto_spawn_skcipher_alg(spawn);
359 err = -EINVAL;
360 if (crypto_skcipher_alg_ivsize(alg) != alg->base.cra_blocksize)
361 goto err_drop_spawn;
363 if (strncmp(alg->base.cra_name, "cbc(", 4))
364 goto err_drop_spawn;
366 err = crypto_inst_setname(skcipher_crypto_instance(inst), "cts",
367 &alg->base);
368 if (err)
369 goto err_drop_spawn;
371 inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
372 inst->alg.base.cra_priority = alg->base.cra_priority;
373 inst->alg.base.cra_blocksize = alg->base.cra_blocksize;
374 inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
376 /* We access the data as u32s when xoring. */
377 inst->alg.base.cra_alignmask |= __alignof__(u32) - 1;
379 inst->alg.ivsize = alg->base.cra_blocksize;
380 inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
381 inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg);
382 inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg);
384 inst->alg.base.cra_ctxsize = sizeof(struct crypto_cts_ctx);
386 inst->alg.init = crypto_cts_init_tfm;
387 inst->alg.exit = crypto_cts_exit_tfm;
389 inst->alg.setkey = crypto_cts_setkey;
390 inst->alg.encrypt = crypto_cts_encrypt;
391 inst->alg.decrypt = crypto_cts_decrypt;
393 inst->free = crypto_cts_free;
395 err = skcipher_register_instance(tmpl, inst);
396 if (err)
397 goto err_drop_spawn;
399 out:
400 return err;
402 err_drop_spawn:
403 crypto_drop_skcipher(spawn);
404 err_free_inst:
405 kfree(inst);
406 goto out;
409 static struct crypto_template crypto_cts_tmpl = {
410 .name = "cts",
411 .create = crypto_cts_create,
412 .module = THIS_MODULE,
415 static int __init crypto_cts_module_init(void)
417 return crypto_register_template(&crypto_cts_tmpl);
420 static void __exit crypto_cts_module_exit(void)
422 crypto_unregister_template(&crypto_cts_tmpl);
425 module_init(crypto_cts_module_init);
426 module_exit(crypto_cts_module_exit);
428 MODULE_LICENSE("Dual BSD/GPL");
429 MODULE_DESCRIPTION("CTS-CBC CipherText Stealing for CBC");
430 MODULE_ALIAS_CRYPTO("cts");