1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Shared glue code for 128bit block ciphers
5 * Copyright © 2012-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
7 * CBC & ECB parts based on code (crypto/cbc.c,ecb.c) by:
8 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
9 * CTR part based on code (crypto/ctr.c) by:
10 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
13 #include <linux/module.h>
14 #include <crypto/b128ops.h>
15 #include <crypto/gf128mul.h>
16 #include <crypto/internal/skcipher.h>
17 #include <crypto/scatterwalk.h>
18 #include <crypto/xts.h>
19 #include <asm/crypto/glue_helper.h>
21 int glue_ecb_req_128bit(const struct common_glue_ctx
*gctx
,
22 struct skcipher_request
*req
)
24 void *ctx
= crypto_skcipher_ctx(crypto_skcipher_reqtfm(req
));
25 const unsigned int bsize
= 128 / 8;
26 struct skcipher_walk walk
;
27 bool fpu_enabled
= false;
31 err
= skcipher_walk_virt(&walk
, req
, false);
33 while ((nbytes
= walk
.nbytes
)) {
34 const u8
*src
= walk
.src
.virt
.addr
;
35 u8
*dst
= walk
.dst
.virt
.addr
;
36 unsigned int func_bytes
;
39 fpu_enabled
= glue_fpu_begin(bsize
, gctx
->fpu_blocks_limit
,
40 &walk
, fpu_enabled
, nbytes
);
41 for (i
= 0; i
< gctx
->num_funcs
; i
++) {
42 func_bytes
= bsize
* gctx
->funcs
[i
].num_blocks
;
44 if (nbytes
< func_bytes
)
47 /* Process multi-block batch */
49 gctx
->funcs
[i
].fn_u
.ecb(ctx
, dst
, src
);
53 } while (nbytes
>= func_bytes
);
58 err
= skcipher_walk_done(&walk
, nbytes
);
61 glue_fpu_end(fpu_enabled
);
64 EXPORT_SYMBOL_GPL(glue_ecb_req_128bit
);
66 int glue_cbc_encrypt_req_128bit(const common_glue_func_t fn
,
67 struct skcipher_request
*req
)
69 void *ctx
= crypto_skcipher_ctx(crypto_skcipher_reqtfm(req
));
70 const unsigned int bsize
= 128 / 8;
71 struct skcipher_walk walk
;
75 err
= skcipher_walk_virt(&walk
, req
, false);
77 while ((nbytes
= walk
.nbytes
)) {
78 const u128
*src
= (u128
*)walk
.src
.virt
.addr
;
79 u128
*dst
= (u128
*)walk
.dst
.virt
.addr
;
80 u128
*iv
= (u128
*)walk
.iv
;
83 u128_xor(dst
, src
, iv
);
84 fn(ctx
, (u8
*)dst
, (u8
*)dst
);
89 } while (nbytes
>= bsize
);
91 *(u128
*)walk
.iv
= *iv
;
92 err
= skcipher_walk_done(&walk
, nbytes
);
96 EXPORT_SYMBOL_GPL(glue_cbc_encrypt_req_128bit
);
98 int glue_cbc_decrypt_req_128bit(const struct common_glue_ctx
*gctx
,
99 struct skcipher_request
*req
)
101 void *ctx
= crypto_skcipher_ctx(crypto_skcipher_reqtfm(req
));
102 const unsigned int bsize
= 128 / 8;
103 struct skcipher_walk walk
;
104 bool fpu_enabled
= false;
108 err
= skcipher_walk_virt(&walk
, req
, false);
110 while ((nbytes
= walk
.nbytes
)) {
111 const u128
*src
= walk
.src
.virt
.addr
;
112 u128
*dst
= walk
.dst
.virt
.addr
;
113 unsigned int func_bytes
, num_blocks
;
117 fpu_enabled
= glue_fpu_begin(bsize
, gctx
->fpu_blocks_limit
,
118 &walk
, fpu_enabled
, nbytes
);
119 /* Start of the last block. */
120 src
+= nbytes
/ bsize
- 1;
121 dst
+= nbytes
/ bsize
- 1;
125 for (i
= 0; i
< gctx
->num_funcs
; i
++) {
126 num_blocks
= gctx
->funcs
[i
].num_blocks
;
127 func_bytes
= bsize
* num_blocks
;
129 if (nbytes
< func_bytes
)
132 /* Process multi-block batch */
134 src
-= num_blocks
- 1;
135 dst
-= num_blocks
- 1;
137 gctx
->funcs
[i
].fn_u
.cbc(ctx
, (u8
*)dst
,
140 nbytes
-= func_bytes
;
144 u128_xor(dst
, dst
, --src
);
146 } while (nbytes
>= func_bytes
);
149 u128_xor(dst
, dst
, (u128
*)walk
.iv
);
150 *(u128
*)walk
.iv
= last_iv
;
151 err
= skcipher_walk_done(&walk
, nbytes
);
154 glue_fpu_end(fpu_enabled
);
157 EXPORT_SYMBOL_GPL(glue_cbc_decrypt_req_128bit
);
159 int glue_ctr_req_128bit(const struct common_glue_ctx
*gctx
,
160 struct skcipher_request
*req
)
162 void *ctx
= crypto_skcipher_ctx(crypto_skcipher_reqtfm(req
));
163 const unsigned int bsize
= 128 / 8;
164 struct skcipher_walk walk
;
165 bool fpu_enabled
= false;
169 err
= skcipher_walk_virt(&walk
, req
, false);
171 while ((nbytes
= walk
.nbytes
) >= bsize
) {
172 const u128
*src
= walk
.src
.virt
.addr
;
173 u128
*dst
= walk
.dst
.virt
.addr
;
174 unsigned int func_bytes
, num_blocks
;
178 fpu_enabled
= glue_fpu_begin(bsize
, gctx
->fpu_blocks_limit
,
179 &walk
, fpu_enabled
, nbytes
);
181 be128_to_le128(&ctrblk
, (be128
*)walk
.iv
);
183 for (i
= 0; i
< gctx
->num_funcs
; i
++) {
184 num_blocks
= gctx
->funcs
[i
].num_blocks
;
185 func_bytes
= bsize
* num_blocks
;
187 if (nbytes
< func_bytes
)
190 /* Process multi-block batch */
192 gctx
->funcs
[i
].fn_u
.ctr(ctx
, (u8
*)dst
,
197 nbytes
-= func_bytes
;
198 } while (nbytes
>= func_bytes
);
204 le128_to_be128((be128
*)walk
.iv
, &ctrblk
);
205 err
= skcipher_walk_done(&walk
, nbytes
);
208 glue_fpu_end(fpu_enabled
);
214 be128_to_le128(&ctrblk
, (be128
*)walk
.iv
);
215 memcpy(&tmp
, walk
.src
.virt
.addr
, nbytes
);
216 gctx
->funcs
[gctx
->num_funcs
- 1].fn_u
.ctr(ctx
, (u8
*)&tmp
,
219 memcpy(walk
.dst
.virt
.addr
, &tmp
, nbytes
);
220 le128_to_be128((be128
*)walk
.iv
, &ctrblk
);
222 err
= skcipher_walk_done(&walk
, 0);
227 EXPORT_SYMBOL_GPL(glue_ctr_req_128bit
);
229 static unsigned int __glue_xts_req_128bit(const struct common_glue_ctx
*gctx
,
231 struct skcipher_walk
*walk
)
233 const unsigned int bsize
= 128 / 8;
234 unsigned int nbytes
= walk
->nbytes
;
235 u128
*src
= walk
->src
.virt
.addr
;
236 u128
*dst
= walk
->dst
.virt
.addr
;
237 unsigned int num_blocks
, func_bytes
;
240 /* Process multi-block batch */
241 for (i
= 0; i
< gctx
->num_funcs
; i
++) {
242 num_blocks
= gctx
->funcs
[i
].num_blocks
;
243 func_bytes
= bsize
* num_blocks
;
245 if (nbytes
>= func_bytes
) {
247 gctx
->funcs
[i
].fn_u
.xts(ctx
, (u8
*)dst
,
253 nbytes
-= func_bytes
;
254 } while (nbytes
>= func_bytes
);
265 int glue_xts_req_128bit(const struct common_glue_ctx
*gctx
,
266 struct skcipher_request
*req
,
267 common_glue_func_t tweak_fn
, void *tweak_ctx
,
268 void *crypt_ctx
, bool decrypt
)
270 const bool cts
= (req
->cryptlen
% XTS_BLOCK_SIZE
);
271 const unsigned int bsize
= 128 / 8;
272 struct skcipher_request subreq
;
273 struct skcipher_walk walk
;
274 bool fpu_enabled
= false;
275 unsigned int nbytes
, tail
;
278 if (req
->cryptlen
< XTS_BLOCK_SIZE
)
282 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
284 tail
= req
->cryptlen
% XTS_BLOCK_SIZE
+ XTS_BLOCK_SIZE
;
286 skcipher_request_set_tfm(&subreq
, tfm
);
287 skcipher_request_set_callback(&subreq
,
288 crypto_skcipher_get_flags(tfm
),
290 skcipher_request_set_crypt(&subreq
, req
->src
, req
->dst
,
291 req
->cryptlen
- tail
, req
->iv
);
295 err
= skcipher_walk_virt(&walk
, req
, false);
296 nbytes
= walk
.nbytes
;
300 /* set minimum length to bsize, for tweak_fn */
301 fpu_enabled
= glue_fpu_begin(bsize
, gctx
->fpu_blocks_limit
,
303 nbytes
< bsize
? bsize
: nbytes
);
305 /* calculate first value of T */
306 tweak_fn(tweak_ctx
, walk
.iv
, walk
.iv
);
309 nbytes
= __glue_xts_req_128bit(gctx
, crypt_ctx
, &walk
);
311 err
= skcipher_walk_done(&walk
, nbytes
);
312 nbytes
= walk
.nbytes
;
316 u8
*next_tweak
, *final_tweak
= req
->iv
;
317 struct scatterlist
*src
, *dst
;
318 struct scatterlist s
[2], d
[2];
321 dst
= src
= scatterwalk_ffwd(s
, req
->src
, req
->cryptlen
);
322 if (req
->dst
!= req
->src
)
323 dst
= scatterwalk_ffwd(d
, req
->dst
, req
->cryptlen
);
326 next_tweak
= memcpy(b
, req
->iv
, XTS_BLOCK_SIZE
);
327 gf128mul_x_ble(b
, b
);
329 next_tweak
= req
->iv
;
332 skcipher_request_set_crypt(&subreq
, src
, dst
, XTS_BLOCK_SIZE
,
335 err
= skcipher_walk_virt(&walk
, req
, false) ?:
336 skcipher_walk_done(&walk
,
337 __glue_xts_req_128bit(gctx
, crypt_ctx
, &walk
));
341 scatterwalk_map_and_copy(b
, dst
, 0, XTS_BLOCK_SIZE
, 0);
342 memcpy(b
+ 1, b
, tail
- XTS_BLOCK_SIZE
);
343 scatterwalk_map_and_copy(b
, src
, XTS_BLOCK_SIZE
,
344 tail
- XTS_BLOCK_SIZE
, 0);
345 scatterwalk_map_and_copy(b
, dst
, 0, tail
, 1);
347 skcipher_request_set_crypt(&subreq
, dst
, dst
, XTS_BLOCK_SIZE
,
350 err
= skcipher_walk_virt(&walk
, req
, false) ?:
351 skcipher_walk_done(&walk
,
352 __glue_xts_req_128bit(gctx
, crypt_ctx
, &walk
));
356 glue_fpu_end(fpu_enabled
);
360 EXPORT_SYMBOL_GPL(glue_xts_req_128bit
);
362 void glue_xts_crypt_128bit_one(const void *ctx
, u8
*dst
, const u8
*src
,
363 le128
*iv
, common_glue_func_t fn
)
367 /* generate next IV */
368 gf128mul_x_ble(iv
, &ivblk
);
371 u128_xor((u128
*)dst
, (const u128
*)src
, (u128
*)&ivblk
);
373 /* PP <- D(Key2,CC) */
377 u128_xor((u128
*)dst
, (u128
*)dst
, (u128
*)&ivblk
);
379 EXPORT_SYMBOL_GPL(glue_xts_crypt_128bit_one
);
381 MODULE_LICENSE("GPL");