Merge tag 'for_linus' of git://git.kernel.org/pub/scm/linux/kernel/git/mst/vhost
[cris-mirror.git] / arch / x86 / crypto / glue_helper.c
blobd61e57960fe0d6629eaddef46ef583a60e28a604
1 /*
2 * Shared glue code for 128bit block ciphers
4 * Copyright © 2012-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
6 * CBC & ECB parts based on code (crypto/cbc.c,ecb.c) by:
7 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
8 * CTR part based on code (crypto/ctr.c) by:
9 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
11 * This program is free software; you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License as published by
13 * the Free Software Foundation; either version 2 of the License, or
14 * (at your option) any later version.
16 * This program is distributed in the hope that it will be useful,
17 * but WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 * GNU General Public License for more details.
21 * You should have received a copy of the GNU General Public License
22 * along with this program; if not, write to the Free Software
23 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
24 * USA
28 #include <linux/module.h>
29 #include <crypto/b128ops.h>
30 #include <crypto/gf128mul.h>
31 #include <crypto/internal/skcipher.h>
32 #include <crypto/lrw.h>
33 #include <crypto/xts.h>
34 #include <asm/crypto/glue_helper.h>
36 static int __glue_ecb_crypt_128bit(const struct common_glue_ctx *gctx,
37 struct blkcipher_desc *desc,
38 struct blkcipher_walk *walk)
40 void *ctx = crypto_blkcipher_ctx(desc->tfm);
41 const unsigned int bsize = 128 / 8;
42 unsigned int nbytes, i, func_bytes;
43 bool fpu_enabled = false;
44 int err;
46 err = blkcipher_walk_virt(desc, walk);
48 while ((nbytes = walk->nbytes)) {
49 u8 *wsrc = walk->src.virt.addr;
50 u8 *wdst = walk->dst.virt.addr;
52 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
53 desc, fpu_enabled, nbytes);
55 for (i = 0; i < gctx->num_funcs; i++) {
56 func_bytes = bsize * gctx->funcs[i].num_blocks;
58 /* Process multi-block batch */
59 if (nbytes >= func_bytes) {
60 do {
61 gctx->funcs[i].fn_u.ecb(ctx, wdst,
62 wsrc);
64 wsrc += func_bytes;
65 wdst += func_bytes;
66 nbytes -= func_bytes;
67 } while (nbytes >= func_bytes);
69 if (nbytes < bsize)
70 goto done;
74 done:
75 err = blkcipher_walk_done(desc, walk, nbytes);
78 glue_fpu_end(fpu_enabled);
79 return err;
82 int glue_ecb_crypt_128bit(const struct common_glue_ctx *gctx,
83 struct blkcipher_desc *desc, struct scatterlist *dst,
84 struct scatterlist *src, unsigned int nbytes)
86 struct blkcipher_walk walk;
88 blkcipher_walk_init(&walk, dst, src, nbytes);
89 return __glue_ecb_crypt_128bit(gctx, desc, &walk);
91 EXPORT_SYMBOL_GPL(glue_ecb_crypt_128bit);
93 static unsigned int __glue_cbc_encrypt_128bit(const common_glue_func_t fn,
94 struct blkcipher_desc *desc,
95 struct blkcipher_walk *walk)
97 void *ctx = crypto_blkcipher_ctx(desc->tfm);
98 const unsigned int bsize = 128 / 8;
99 unsigned int nbytes = walk->nbytes;
100 u128 *src = (u128 *)walk->src.virt.addr;
101 u128 *dst = (u128 *)walk->dst.virt.addr;
102 u128 *iv = (u128 *)walk->iv;
104 do {
105 u128_xor(dst, src, iv);
106 fn(ctx, (u8 *)dst, (u8 *)dst);
107 iv = dst;
109 src += 1;
110 dst += 1;
111 nbytes -= bsize;
112 } while (nbytes >= bsize);
114 *(u128 *)walk->iv = *iv;
115 return nbytes;
118 int glue_cbc_encrypt_128bit(const common_glue_func_t fn,
119 struct blkcipher_desc *desc,
120 struct scatterlist *dst,
121 struct scatterlist *src, unsigned int nbytes)
123 struct blkcipher_walk walk;
124 int err;
126 blkcipher_walk_init(&walk, dst, src, nbytes);
127 err = blkcipher_walk_virt(desc, &walk);
129 while ((nbytes = walk.nbytes)) {
130 nbytes = __glue_cbc_encrypt_128bit(fn, desc, &walk);
131 err = blkcipher_walk_done(desc, &walk, nbytes);
134 return err;
136 EXPORT_SYMBOL_GPL(glue_cbc_encrypt_128bit);
138 static unsigned int
139 __glue_cbc_decrypt_128bit(const struct common_glue_ctx *gctx,
140 struct blkcipher_desc *desc,
141 struct blkcipher_walk *walk)
143 void *ctx = crypto_blkcipher_ctx(desc->tfm);
144 const unsigned int bsize = 128 / 8;
145 unsigned int nbytes = walk->nbytes;
146 u128 *src = (u128 *)walk->src.virt.addr;
147 u128 *dst = (u128 *)walk->dst.virt.addr;
148 u128 last_iv;
149 unsigned int num_blocks, func_bytes;
150 unsigned int i;
152 /* Start of the last block. */
153 src += nbytes / bsize - 1;
154 dst += nbytes / bsize - 1;
156 last_iv = *src;
158 for (i = 0; i < gctx->num_funcs; i++) {
159 num_blocks = gctx->funcs[i].num_blocks;
160 func_bytes = bsize * num_blocks;
162 /* Process multi-block batch */
163 if (nbytes >= func_bytes) {
164 do {
165 nbytes -= func_bytes - bsize;
166 src -= num_blocks - 1;
167 dst -= num_blocks - 1;
169 gctx->funcs[i].fn_u.cbc(ctx, dst, src);
171 nbytes -= bsize;
172 if (nbytes < bsize)
173 goto done;
175 u128_xor(dst, dst, src - 1);
176 src -= 1;
177 dst -= 1;
178 } while (nbytes >= func_bytes);
182 done:
183 u128_xor(dst, dst, (u128 *)walk->iv);
184 *(u128 *)walk->iv = last_iv;
186 return nbytes;
189 int glue_cbc_decrypt_128bit(const struct common_glue_ctx *gctx,
190 struct blkcipher_desc *desc,
191 struct scatterlist *dst,
192 struct scatterlist *src, unsigned int nbytes)
194 const unsigned int bsize = 128 / 8;
195 bool fpu_enabled = false;
196 struct blkcipher_walk walk;
197 int err;
199 blkcipher_walk_init(&walk, dst, src, nbytes);
200 err = blkcipher_walk_virt(desc, &walk);
202 while ((nbytes = walk.nbytes)) {
203 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
204 desc, fpu_enabled, nbytes);
205 nbytes = __glue_cbc_decrypt_128bit(gctx, desc, &walk);
206 err = blkcipher_walk_done(desc, &walk, nbytes);
209 glue_fpu_end(fpu_enabled);
210 return err;
212 EXPORT_SYMBOL_GPL(glue_cbc_decrypt_128bit);
214 static void glue_ctr_crypt_final_128bit(const common_glue_ctr_func_t fn_ctr,
215 struct blkcipher_desc *desc,
216 struct blkcipher_walk *walk)
218 void *ctx = crypto_blkcipher_ctx(desc->tfm);
219 u8 *src = (u8 *)walk->src.virt.addr;
220 u8 *dst = (u8 *)walk->dst.virt.addr;
221 unsigned int nbytes = walk->nbytes;
222 le128 ctrblk;
223 u128 tmp;
225 be128_to_le128(&ctrblk, (be128 *)walk->iv);
227 memcpy(&tmp, src, nbytes);
228 fn_ctr(ctx, &tmp, &tmp, &ctrblk);
229 memcpy(dst, &tmp, nbytes);
231 le128_to_be128((be128 *)walk->iv, &ctrblk);
234 static unsigned int __glue_ctr_crypt_128bit(const struct common_glue_ctx *gctx,
235 struct blkcipher_desc *desc,
236 struct blkcipher_walk *walk)
238 const unsigned int bsize = 128 / 8;
239 void *ctx = crypto_blkcipher_ctx(desc->tfm);
240 unsigned int nbytes = walk->nbytes;
241 u128 *src = (u128 *)walk->src.virt.addr;
242 u128 *dst = (u128 *)walk->dst.virt.addr;
243 le128 ctrblk;
244 unsigned int num_blocks, func_bytes;
245 unsigned int i;
247 be128_to_le128(&ctrblk, (be128 *)walk->iv);
249 /* Process multi-block batch */
250 for (i = 0; i < gctx->num_funcs; i++) {
251 num_blocks = gctx->funcs[i].num_blocks;
252 func_bytes = bsize * num_blocks;
254 if (nbytes >= func_bytes) {
255 do {
256 gctx->funcs[i].fn_u.ctr(ctx, dst, src, &ctrblk);
258 src += num_blocks;
259 dst += num_blocks;
260 nbytes -= func_bytes;
261 } while (nbytes >= func_bytes);
263 if (nbytes < bsize)
264 goto done;
268 done:
269 le128_to_be128((be128 *)walk->iv, &ctrblk);
270 return nbytes;
273 int glue_ctr_crypt_128bit(const struct common_glue_ctx *gctx,
274 struct blkcipher_desc *desc, struct scatterlist *dst,
275 struct scatterlist *src, unsigned int nbytes)
277 const unsigned int bsize = 128 / 8;
278 bool fpu_enabled = false;
279 struct blkcipher_walk walk;
280 int err;
282 blkcipher_walk_init(&walk, dst, src, nbytes);
283 err = blkcipher_walk_virt_block(desc, &walk, bsize);
285 while ((nbytes = walk.nbytes) >= bsize) {
286 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
287 desc, fpu_enabled, nbytes);
288 nbytes = __glue_ctr_crypt_128bit(gctx, desc, &walk);
289 err = blkcipher_walk_done(desc, &walk, nbytes);
292 glue_fpu_end(fpu_enabled);
294 if (walk.nbytes) {
295 glue_ctr_crypt_final_128bit(
296 gctx->funcs[gctx->num_funcs - 1].fn_u.ctr, desc, &walk);
297 err = blkcipher_walk_done(desc, &walk, 0);
300 return err;
302 EXPORT_SYMBOL_GPL(glue_ctr_crypt_128bit);
304 static unsigned int __glue_xts_crypt_128bit(const struct common_glue_ctx *gctx,
305 void *ctx,
306 struct blkcipher_desc *desc,
307 struct blkcipher_walk *walk)
309 const unsigned int bsize = 128 / 8;
310 unsigned int nbytes = walk->nbytes;
311 u128 *src = (u128 *)walk->src.virt.addr;
312 u128 *dst = (u128 *)walk->dst.virt.addr;
313 unsigned int num_blocks, func_bytes;
314 unsigned int i;
316 /* Process multi-block batch */
317 for (i = 0; i < gctx->num_funcs; i++) {
318 num_blocks = gctx->funcs[i].num_blocks;
319 func_bytes = bsize * num_blocks;
321 if (nbytes >= func_bytes) {
322 do {
323 gctx->funcs[i].fn_u.xts(ctx, dst, src,
324 (le128 *)walk->iv);
326 src += num_blocks;
327 dst += num_blocks;
328 nbytes -= func_bytes;
329 } while (nbytes >= func_bytes);
331 if (nbytes < bsize)
332 goto done;
336 done:
337 return nbytes;
340 static unsigned int __glue_xts_req_128bit(const struct common_glue_ctx *gctx,
341 void *ctx,
342 struct skcipher_walk *walk)
344 const unsigned int bsize = 128 / 8;
345 unsigned int nbytes = walk->nbytes;
346 u128 *src = walk->src.virt.addr;
347 u128 *dst = walk->dst.virt.addr;
348 unsigned int num_blocks, func_bytes;
349 unsigned int i;
351 /* Process multi-block batch */
352 for (i = 0; i < gctx->num_funcs; i++) {
353 num_blocks = gctx->funcs[i].num_blocks;
354 func_bytes = bsize * num_blocks;
356 if (nbytes >= func_bytes) {
357 do {
358 gctx->funcs[i].fn_u.xts(ctx, dst, src,
359 walk->iv);
361 src += num_blocks;
362 dst += num_blocks;
363 nbytes -= func_bytes;
364 } while (nbytes >= func_bytes);
366 if (nbytes < bsize)
367 goto done;
371 done:
372 return nbytes;
375 /* for implementations implementing faster XTS IV generator */
376 int glue_xts_crypt_128bit(const struct common_glue_ctx *gctx,
377 struct blkcipher_desc *desc, struct scatterlist *dst,
378 struct scatterlist *src, unsigned int nbytes,
379 void (*tweak_fn)(void *ctx, u8 *dst, const u8 *src),
380 void *tweak_ctx, void *crypt_ctx)
382 const unsigned int bsize = 128 / 8;
383 bool fpu_enabled = false;
384 struct blkcipher_walk walk;
385 int err;
387 blkcipher_walk_init(&walk, dst, src, nbytes);
389 err = blkcipher_walk_virt(desc, &walk);
390 nbytes = walk.nbytes;
391 if (!nbytes)
392 return err;
394 /* set minimum length to bsize, for tweak_fn */
395 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
396 desc, fpu_enabled,
397 nbytes < bsize ? bsize : nbytes);
399 /* calculate first value of T */
400 tweak_fn(tweak_ctx, walk.iv, walk.iv);
402 while (nbytes) {
403 nbytes = __glue_xts_crypt_128bit(gctx, crypt_ctx, desc, &walk);
405 err = blkcipher_walk_done(desc, &walk, nbytes);
406 nbytes = walk.nbytes;
409 glue_fpu_end(fpu_enabled);
411 return err;
413 EXPORT_SYMBOL_GPL(glue_xts_crypt_128bit);
415 int glue_xts_req_128bit(const struct common_glue_ctx *gctx,
416 struct skcipher_request *req,
417 common_glue_func_t tweak_fn, void *tweak_ctx,
418 void *crypt_ctx)
420 const unsigned int bsize = 128 / 8;
421 struct skcipher_walk walk;
422 bool fpu_enabled = false;
423 unsigned int nbytes;
424 int err;
426 err = skcipher_walk_virt(&walk, req, false);
427 nbytes = walk.nbytes;
428 if (!nbytes)
429 return err;
431 /* set minimum length to bsize, for tweak_fn */
432 fpu_enabled = glue_skwalk_fpu_begin(bsize, gctx->fpu_blocks_limit,
433 &walk, fpu_enabled,
434 nbytes < bsize ? bsize : nbytes);
436 /* calculate first value of T */
437 tweak_fn(tweak_ctx, walk.iv, walk.iv);
439 while (nbytes) {
440 nbytes = __glue_xts_req_128bit(gctx, crypt_ctx, &walk);
442 err = skcipher_walk_done(&walk, nbytes);
443 nbytes = walk.nbytes;
446 glue_fpu_end(fpu_enabled);
448 return err;
450 EXPORT_SYMBOL_GPL(glue_xts_req_128bit);
452 void glue_xts_crypt_128bit_one(void *ctx, u128 *dst, const u128 *src, le128 *iv,
453 common_glue_func_t fn)
455 le128 ivblk = *iv;
457 /* generate next IV */
458 gf128mul_x_ble(iv, &ivblk);
460 /* CC <- T xor C */
461 u128_xor(dst, src, (u128 *)&ivblk);
463 /* PP <- D(Key2,CC) */
464 fn(ctx, (u8 *)dst, (u8 *)dst);
466 /* P <- T xor PP */
467 u128_xor(dst, dst, (u128 *)&ivblk);
469 EXPORT_SYMBOL_GPL(glue_xts_crypt_128bit_one);
471 MODULE_LICENSE("GPL");