staging: rtl8192u: remove redundant assignment to pointer crypt
[linux/fpc-iii.git] / arch / s390 / crypto / des_s390.c
blob374b42fc76379a20f758eaf66ddb840c44f87f8a
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3 * Cryptographic API.
5 * s390 implementation of the DES Cipher Algorithm.
7 * Copyright IBM Corp. 2003, 2011
8 * Author(s): Thomas Spatzier
9 * Jan Glauber (jan.glauber@de.ibm.com)
12 #include <linux/init.h>
13 #include <linux/module.h>
14 #include <linux/cpufeature.h>
15 #include <linux/crypto.h>
16 #include <linux/fips.h>
17 #include <linux/mutex.h>
18 #include <crypto/algapi.h>
19 #include <crypto/des.h>
20 #include <asm/cpacf.h>
22 #define DES3_KEY_SIZE (3 * DES_KEY_SIZE)
24 static u8 *ctrblk;
25 static DEFINE_MUTEX(ctrblk_lock);
27 static cpacf_mask_t km_functions, kmc_functions, kmctr_functions;
29 struct s390_des_ctx {
30 u8 iv[DES_BLOCK_SIZE];
31 u8 key[DES3_KEY_SIZE];
34 static int des_setkey(struct crypto_tfm *tfm, const u8 *key,
35 unsigned int key_len)
37 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
38 u32 tmp[DES_EXPKEY_WORDS];
40 /* check for weak keys */
41 if (!des_ekey(tmp, key) &&
42 (tfm->crt_flags & CRYPTO_TFM_REQ_FORBID_WEAK_KEYS)) {
43 tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
44 return -EINVAL;
47 memcpy(ctx->key, key, key_len);
48 return 0;
51 static void des_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
53 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
55 cpacf_km(CPACF_KM_DEA, ctx->key, out, in, DES_BLOCK_SIZE);
58 static void des_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
60 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
62 cpacf_km(CPACF_KM_DEA | CPACF_DECRYPT,
63 ctx->key, out, in, DES_BLOCK_SIZE);
66 static struct crypto_alg des_alg = {
67 .cra_name = "des",
68 .cra_driver_name = "des-s390",
69 .cra_priority = 300,
70 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
71 .cra_blocksize = DES_BLOCK_SIZE,
72 .cra_ctxsize = sizeof(struct s390_des_ctx),
73 .cra_module = THIS_MODULE,
74 .cra_u = {
75 .cipher = {
76 .cia_min_keysize = DES_KEY_SIZE,
77 .cia_max_keysize = DES_KEY_SIZE,
78 .cia_setkey = des_setkey,
79 .cia_encrypt = des_encrypt,
80 .cia_decrypt = des_decrypt,
85 static int ecb_desall_crypt(struct blkcipher_desc *desc, unsigned long fc,
86 struct blkcipher_walk *walk)
88 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
89 unsigned int nbytes, n;
90 int ret;
92 ret = blkcipher_walk_virt(desc, walk);
93 while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) {
94 /* only use complete blocks */
95 n = nbytes & ~(DES_BLOCK_SIZE - 1);
96 cpacf_km(fc, ctx->key, walk->dst.virt.addr,
97 walk->src.virt.addr, n);
98 ret = blkcipher_walk_done(desc, walk, nbytes - n);
100 return ret;
103 static int cbc_desall_crypt(struct blkcipher_desc *desc, unsigned long fc,
104 struct blkcipher_walk *walk)
106 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
107 unsigned int nbytes, n;
108 int ret;
109 struct {
110 u8 iv[DES_BLOCK_SIZE];
111 u8 key[DES3_KEY_SIZE];
112 } param;
114 ret = blkcipher_walk_virt(desc, walk);
115 memcpy(param.iv, walk->iv, DES_BLOCK_SIZE);
116 memcpy(param.key, ctx->key, DES3_KEY_SIZE);
117 while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) {
118 /* only use complete blocks */
119 n = nbytes & ~(DES_BLOCK_SIZE - 1);
120 cpacf_kmc(fc, &param, walk->dst.virt.addr,
121 walk->src.virt.addr, n);
122 ret = blkcipher_walk_done(desc, walk, nbytes - n);
124 memcpy(walk->iv, param.iv, DES_BLOCK_SIZE);
125 return ret;
128 static int ecb_des_encrypt(struct blkcipher_desc *desc,
129 struct scatterlist *dst, struct scatterlist *src,
130 unsigned int nbytes)
132 struct blkcipher_walk walk;
134 blkcipher_walk_init(&walk, dst, src, nbytes);
135 return ecb_desall_crypt(desc, CPACF_KM_DEA, &walk);
138 static int ecb_des_decrypt(struct blkcipher_desc *desc,
139 struct scatterlist *dst, struct scatterlist *src,
140 unsigned int nbytes)
142 struct blkcipher_walk walk;
144 blkcipher_walk_init(&walk, dst, src, nbytes);
145 return ecb_desall_crypt(desc, CPACF_KM_DEA | CPACF_DECRYPT, &walk);
148 static struct crypto_alg ecb_des_alg = {
149 .cra_name = "ecb(des)",
150 .cra_driver_name = "ecb-des-s390",
151 .cra_priority = 400, /* combo: des + ecb */
152 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
153 .cra_blocksize = DES_BLOCK_SIZE,
154 .cra_ctxsize = sizeof(struct s390_des_ctx),
155 .cra_type = &crypto_blkcipher_type,
156 .cra_module = THIS_MODULE,
157 .cra_u = {
158 .blkcipher = {
159 .min_keysize = DES_KEY_SIZE,
160 .max_keysize = DES_KEY_SIZE,
161 .setkey = des_setkey,
162 .encrypt = ecb_des_encrypt,
163 .decrypt = ecb_des_decrypt,
168 static int cbc_des_encrypt(struct blkcipher_desc *desc,
169 struct scatterlist *dst, struct scatterlist *src,
170 unsigned int nbytes)
172 struct blkcipher_walk walk;
174 blkcipher_walk_init(&walk, dst, src, nbytes);
175 return cbc_desall_crypt(desc, CPACF_KMC_DEA, &walk);
178 static int cbc_des_decrypt(struct blkcipher_desc *desc,
179 struct scatterlist *dst, struct scatterlist *src,
180 unsigned int nbytes)
182 struct blkcipher_walk walk;
184 blkcipher_walk_init(&walk, dst, src, nbytes);
185 return cbc_desall_crypt(desc, CPACF_KMC_DEA | CPACF_DECRYPT, &walk);
188 static struct crypto_alg cbc_des_alg = {
189 .cra_name = "cbc(des)",
190 .cra_driver_name = "cbc-des-s390",
191 .cra_priority = 400, /* combo: des + cbc */
192 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
193 .cra_blocksize = DES_BLOCK_SIZE,
194 .cra_ctxsize = sizeof(struct s390_des_ctx),
195 .cra_type = &crypto_blkcipher_type,
196 .cra_module = THIS_MODULE,
197 .cra_u = {
198 .blkcipher = {
199 .min_keysize = DES_KEY_SIZE,
200 .max_keysize = DES_KEY_SIZE,
201 .ivsize = DES_BLOCK_SIZE,
202 .setkey = des_setkey,
203 .encrypt = cbc_des_encrypt,
204 .decrypt = cbc_des_decrypt,
210 * RFC2451:
212 * For DES-EDE3, there is no known need to reject weak or
213 * complementation keys. Any weakness is obviated by the use of
214 * multiple keys.
216 * However, if the first two or last two independent 64-bit keys are
217 * equal (k1 == k2 or k2 == k3), then the DES3 operation is simply the
218 * same as DES. Implementers MUST reject keys that exhibit this
219 * property.
221 * In fips mode additinally check for all 3 keys are unique.
224 static int des3_setkey(struct crypto_tfm *tfm, const u8 *key,
225 unsigned int key_len)
227 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
228 int err;
230 err = __des3_verify_key(&tfm->crt_flags, key);
231 if (unlikely(err))
232 return err;
234 memcpy(ctx->key, key, key_len);
235 return 0;
238 static void des3_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
240 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
242 cpacf_km(CPACF_KM_TDEA_192, ctx->key, dst, src, DES_BLOCK_SIZE);
245 static void des3_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
247 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
249 cpacf_km(CPACF_KM_TDEA_192 | CPACF_DECRYPT,
250 ctx->key, dst, src, DES_BLOCK_SIZE);
253 static struct crypto_alg des3_alg = {
254 .cra_name = "des3_ede",
255 .cra_driver_name = "des3_ede-s390",
256 .cra_priority = 300,
257 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
258 .cra_blocksize = DES_BLOCK_SIZE,
259 .cra_ctxsize = sizeof(struct s390_des_ctx),
260 .cra_module = THIS_MODULE,
261 .cra_u = {
262 .cipher = {
263 .cia_min_keysize = DES3_KEY_SIZE,
264 .cia_max_keysize = DES3_KEY_SIZE,
265 .cia_setkey = des3_setkey,
266 .cia_encrypt = des3_encrypt,
267 .cia_decrypt = des3_decrypt,
272 static int ecb_des3_encrypt(struct blkcipher_desc *desc,
273 struct scatterlist *dst, struct scatterlist *src,
274 unsigned int nbytes)
276 struct blkcipher_walk walk;
278 blkcipher_walk_init(&walk, dst, src, nbytes);
279 return ecb_desall_crypt(desc, CPACF_KM_TDEA_192, &walk);
282 static int ecb_des3_decrypt(struct blkcipher_desc *desc,
283 struct scatterlist *dst, struct scatterlist *src,
284 unsigned int nbytes)
286 struct blkcipher_walk walk;
288 blkcipher_walk_init(&walk, dst, src, nbytes);
289 return ecb_desall_crypt(desc, CPACF_KM_TDEA_192 | CPACF_DECRYPT,
290 &walk);
293 static struct crypto_alg ecb_des3_alg = {
294 .cra_name = "ecb(des3_ede)",
295 .cra_driver_name = "ecb-des3_ede-s390",
296 .cra_priority = 400, /* combo: des3 + ecb */
297 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
298 .cra_blocksize = DES_BLOCK_SIZE,
299 .cra_ctxsize = sizeof(struct s390_des_ctx),
300 .cra_type = &crypto_blkcipher_type,
301 .cra_module = THIS_MODULE,
302 .cra_u = {
303 .blkcipher = {
304 .min_keysize = DES3_KEY_SIZE,
305 .max_keysize = DES3_KEY_SIZE,
306 .setkey = des3_setkey,
307 .encrypt = ecb_des3_encrypt,
308 .decrypt = ecb_des3_decrypt,
313 static int cbc_des3_encrypt(struct blkcipher_desc *desc,
314 struct scatterlist *dst, struct scatterlist *src,
315 unsigned int nbytes)
317 struct blkcipher_walk walk;
319 blkcipher_walk_init(&walk, dst, src, nbytes);
320 return cbc_desall_crypt(desc, CPACF_KMC_TDEA_192, &walk);
323 static int cbc_des3_decrypt(struct blkcipher_desc *desc,
324 struct scatterlist *dst, struct scatterlist *src,
325 unsigned int nbytes)
327 struct blkcipher_walk walk;
329 blkcipher_walk_init(&walk, dst, src, nbytes);
330 return cbc_desall_crypt(desc, CPACF_KMC_TDEA_192 | CPACF_DECRYPT,
331 &walk);
334 static struct crypto_alg cbc_des3_alg = {
335 .cra_name = "cbc(des3_ede)",
336 .cra_driver_name = "cbc-des3_ede-s390",
337 .cra_priority = 400, /* combo: des3 + cbc */
338 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
339 .cra_blocksize = DES_BLOCK_SIZE,
340 .cra_ctxsize = sizeof(struct s390_des_ctx),
341 .cra_type = &crypto_blkcipher_type,
342 .cra_module = THIS_MODULE,
343 .cra_u = {
344 .blkcipher = {
345 .min_keysize = DES3_KEY_SIZE,
346 .max_keysize = DES3_KEY_SIZE,
347 .ivsize = DES_BLOCK_SIZE,
348 .setkey = des3_setkey,
349 .encrypt = cbc_des3_encrypt,
350 .decrypt = cbc_des3_decrypt,
355 static unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes)
357 unsigned int i, n;
359 /* align to block size, max. PAGE_SIZE */
360 n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : nbytes & ~(DES_BLOCK_SIZE - 1);
361 memcpy(ctrptr, iv, DES_BLOCK_SIZE);
362 for (i = (n / DES_BLOCK_SIZE) - 1; i > 0; i--) {
363 memcpy(ctrptr + DES_BLOCK_SIZE, ctrptr, DES_BLOCK_SIZE);
364 crypto_inc(ctrptr + DES_BLOCK_SIZE, DES_BLOCK_SIZE);
365 ctrptr += DES_BLOCK_SIZE;
367 return n;
370 static int ctr_desall_crypt(struct blkcipher_desc *desc, unsigned long fc,
371 struct blkcipher_walk *walk)
373 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
374 u8 buf[DES_BLOCK_SIZE], *ctrptr;
375 unsigned int n, nbytes;
376 int ret, locked;
378 locked = mutex_trylock(&ctrblk_lock);
380 ret = blkcipher_walk_virt_block(desc, walk, DES_BLOCK_SIZE);
381 while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) {
382 n = DES_BLOCK_SIZE;
383 if (nbytes >= 2*DES_BLOCK_SIZE && locked)
384 n = __ctrblk_init(ctrblk, walk->iv, nbytes);
385 ctrptr = (n > DES_BLOCK_SIZE) ? ctrblk : walk->iv;
386 cpacf_kmctr(fc, ctx->key, walk->dst.virt.addr,
387 walk->src.virt.addr, n, ctrptr);
388 if (ctrptr == ctrblk)
389 memcpy(walk->iv, ctrptr + n - DES_BLOCK_SIZE,
390 DES_BLOCK_SIZE);
391 crypto_inc(walk->iv, DES_BLOCK_SIZE);
392 ret = blkcipher_walk_done(desc, walk, nbytes - n);
394 if (locked)
395 mutex_unlock(&ctrblk_lock);
396 /* final block may be < DES_BLOCK_SIZE, copy only nbytes */
397 if (nbytes) {
398 cpacf_kmctr(fc, ctx->key, buf, walk->src.virt.addr,
399 DES_BLOCK_SIZE, walk->iv);
400 memcpy(walk->dst.virt.addr, buf, nbytes);
401 crypto_inc(walk->iv, DES_BLOCK_SIZE);
402 ret = blkcipher_walk_done(desc, walk, 0);
404 return ret;
407 static int ctr_des_encrypt(struct blkcipher_desc *desc,
408 struct scatterlist *dst, struct scatterlist *src,
409 unsigned int nbytes)
411 struct blkcipher_walk walk;
413 blkcipher_walk_init(&walk, dst, src, nbytes);
414 return ctr_desall_crypt(desc, CPACF_KMCTR_DEA, &walk);
417 static int ctr_des_decrypt(struct blkcipher_desc *desc,
418 struct scatterlist *dst, struct scatterlist *src,
419 unsigned int nbytes)
421 struct blkcipher_walk walk;
423 blkcipher_walk_init(&walk, dst, src, nbytes);
424 return ctr_desall_crypt(desc, CPACF_KMCTR_DEA | CPACF_DECRYPT, &walk);
427 static struct crypto_alg ctr_des_alg = {
428 .cra_name = "ctr(des)",
429 .cra_driver_name = "ctr-des-s390",
430 .cra_priority = 400, /* combo: des + ctr */
431 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
432 .cra_blocksize = 1,
433 .cra_ctxsize = sizeof(struct s390_des_ctx),
434 .cra_type = &crypto_blkcipher_type,
435 .cra_module = THIS_MODULE,
436 .cra_u = {
437 .blkcipher = {
438 .min_keysize = DES_KEY_SIZE,
439 .max_keysize = DES_KEY_SIZE,
440 .ivsize = DES_BLOCK_SIZE,
441 .setkey = des_setkey,
442 .encrypt = ctr_des_encrypt,
443 .decrypt = ctr_des_decrypt,
448 static int ctr_des3_encrypt(struct blkcipher_desc *desc,
449 struct scatterlist *dst, struct scatterlist *src,
450 unsigned int nbytes)
452 struct blkcipher_walk walk;
454 blkcipher_walk_init(&walk, dst, src, nbytes);
455 return ctr_desall_crypt(desc, CPACF_KMCTR_TDEA_192, &walk);
458 static int ctr_des3_decrypt(struct blkcipher_desc *desc,
459 struct scatterlist *dst, struct scatterlist *src,
460 unsigned int nbytes)
462 struct blkcipher_walk walk;
464 blkcipher_walk_init(&walk, dst, src, nbytes);
465 return ctr_desall_crypt(desc, CPACF_KMCTR_TDEA_192 | CPACF_DECRYPT,
466 &walk);
469 static struct crypto_alg ctr_des3_alg = {
470 .cra_name = "ctr(des3_ede)",
471 .cra_driver_name = "ctr-des3_ede-s390",
472 .cra_priority = 400, /* combo: des3 + ede */
473 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
474 .cra_blocksize = 1,
475 .cra_ctxsize = sizeof(struct s390_des_ctx),
476 .cra_type = &crypto_blkcipher_type,
477 .cra_module = THIS_MODULE,
478 .cra_u = {
479 .blkcipher = {
480 .min_keysize = DES3_KEY_SIZE,
481 .max_keysize = DES3_KEY_SIZE,
482 .ivsize = DES_BLOCK_SIZE,
483 .setkey = des3_setkey,
484 .encrypt = ctr_des3_encrypt,
485 .decrypt = ctr_des3_decrypt,
490 static struct crypto_alg *des_s390_algs_ptr[8];
491 static int des_s390_algs_num;
493 static int des_s390_register_alg(struct crypto_alg *alg)
495 int ret;
497 ret = crypto_register_alg(alg);
498 if (!ret)
499 des_s390_algs_ptr[des_s390_algs_num++] = alg;
500 return ret;
503 static void des_s390_exit(void)
505 while (des_s390_algs_num--)
506 crypto_unregister_alg(des_s390_algs_ptr[des_s390_algs_num]);
507 if (ctrblk)
508 free_page((unsigned long) ctrblk);
511 static int __init des_s390_init(void)
513 int ret;
515 /* Query available functions for KM, KMC and KMCTR */
516 cpacf_query(CPACF_KM, &km_functions);
517 cpacf_query(CPACF_KMC, &kmc_functions);
518 cpacf_query(CPACF_KMCTR, &kmctr_functions);
520 if (cpacf_test_func(&km_functions, CPACF_KM_DEA)) {
521 ret = des_s390_register_alg(&des_alg);
522 if (ret)
523 goto out_err;
524 ret = des_s390_register_alg(&ecb_des_alg);
525 if (ret)
526 goto out_err;
528 if (cpacf_test_func(&kmc_functions, CPACF_KMC_DEA)) {
529 ret = des_s390_register_alg(&cbc_des_alg);
530 if (ret)
531 goto out_err;
533 if (cpacf_test_func(&km_functions, CPACF_KM_TDEA_192)) {
534 ret = des_s390_register_alg(&des3_alg);
535 if (ret)
536 goto out_err;
537 ret = des_s390_register_alg(&ecb_des3_alg);
538 if (ret)
539 goto out_err;
541 if (cpacf_test_func(&kmc_functions, CPACF_KMC_TDEA_192)) {
542 ret = des_s390_register_alg(&cbc_des3_alg);
543 if (ret)
544 goto out_err;
547 if (cpacf_test_func(&kmctr_functions, CPACF_KMCTR_DEA) ||
548 cpacf_test_func(&kmctr_functions, CPACF_KMCTR_TDEA_192)) {
549 ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
550 if (!ctrblk) {
551 ret = -ENOMEM;
552 goto out_err;
556 if (cpacf_test_func(&kmctr_functions, CPACF_KMCTR_DEA)) {
557 ret = des_s390_register_alg(&ctr_des_alg);
558 if (ret)
559 goto out_err;
561 if (cpacf_test_func(&kmctr_functions, CPACF_KMCTR_TDEA_192)) {
562 ret = des_s390_register_alg(&ctr_des3_alg);
563 if (ret)
564 goto out_err;
567 return 0;
568 out_err:
569 des_s390_exit();
570 return ret;
573 module_cpu_feature_match(MSA, des_s390_init);
574 module_exit(des_s390_exit);
576 MODULE_ALIAS_CRYPTO("des");
577 MODULE_ALIAS_CRYPTO("des3_ede");
579 MODULE_LICENSE("GPL");
580 MODULE_DESCRIPTION("DES & Triple DES EDE Cipher Algorithms");