1 // SPDX-License-Identifier: GPL-2.0
5 * s390 implementation of the AES Cipher Algorithm with protected keys.
8 * Copyright IBM Corp. 2017,2020
9 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>
10 * Harald Freudenberger <freude@de.ibm.com>
13 #define KMSG_COMPONENT "paes_s390"
14 #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
16 #include <crypto/aes.h>
17 #include <crypto/algapi.h>
18 #include <linux/bug.h>
19 #include <linux/err.h>
20 #include <linux/module.h>
21 #include <linux/cpufeature.h>
22 #include <linux/init.h>
23 #include <linux/mutex.h>
24 #include <linux/spinlock.h>
25 #include <crypto/internal/skcipher.h>
26 #include <crypto/xts.h>
27 #include <asm/cpacf.h>
31 * Key blobs smaller/bigger than these defines are rejected
32 * by the common code even before the individual setkey function
33 * is called. As paes can handle different kinds of key blobs
34 * and padding is also possible, the limits need to be generous.
36 #define PAES_MIN_KEYSIZE 16
37 #define PAES_MAX_KEYSIZE 320
40 static DEFINE_MUTEX(ctrblk_lock
);
42 static cpacf_mask_t km_functions
, kmc_functions
, kmctr_functions
;
46 * Small keys will be stored in the keybuf. Larger keys are
47 * stored in extra allocated memory. In both cases does
48 * key point to the memory where the key is stored.
49 * The code distinguishes by checking keylen against
50 * sizeof(keybuf). See the two following helper functions.
57 static inline int _key_to_kb(struct key_blob
*kb
,
61 struct clearkey_header
{
74 /* clear key value, prepare pkey clear key token in keybuf */
75 memset(kb
->keybuf
, 0, sizeof(kb
->keybuf
));
76 h
= (struct clearkey_header
*) kb
->keybuf
;
77 h
->version
= 0x02; /* TOKVER_CLEAR_KEY */
78 h
->keytype
= (keylen
- 8) >> 3;
80 memcpy(kb
->keybuf
+ sizeof(*h
), key
, keylen
);
81 kb
->keylen
= sizeof(*h
) + keylen
;
85 /* other key material, let pkey handle this */
86 if (keylen
<= sizeof(kb
->keybuf
))
89 kb
->key
= kmalloc(keylen
, GFP_KERNEL
);
93 memcpy(kb
->key
, key
, keylen
);
101 static inline void _free_kb_keybuf(struct key_blob
*kb
)
103 if (kb
->key
&& kb
->key
!= kb
->keybuf
104 && kb
->keylen
> sizeof(kb
->keybuf
)) {
110 struct s390_paes_ctx
{
112 struct pkey_protkey pk
;
117 struct s390_pxts_ctx
{
118 struct key_blob kb
[2];
119 struct pkey_protkey pk
[2];
124 static inline int __paes_keyblob2pkey(struct key_blob
*kb
,
125 struct pkey_protkey
*pk
)
129 /* try three times in case of failure */
130 for (i
= 0; i
< 3; i
++) {
131 ret
= pkey_keyblob2pkey(kb
->key
, kb
->keylen
, pk
);
139 static inline int __paes_convert_key(struct s390_paes_ctx
*ctx
)
141 struct pkey_protkey pkey
;
143 if (__paes_keyblob2pkey(&ctx
->kb
, &pkey
))
146 spin_lock_bh(&ctx
->pk_lock
);
147 memcpy(&ctx
->pk
, &pkey
, sizeof(pkey
));
148 spin_unlock_bh(&ctx
->pk_lock
);
153 static int ecb_paes_init(struct crypto_skcipher
*tfm
)
155 struct s390_paes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
158 spin_lock_init(&ctx
->pk_lock
);
163 static void ecb_paes_exit(struct crypto_skcipher
*tfm
)
165 struct s390_paes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
167 _free_kb_keybuf(&ctx
->kb
);
170 static inline int __ecb_paes_set_key(struct s390_paes_ctx
*ctx
)
174 if (__paes_convert_key(ctx
))
177 /* Pick the correct function code based on the protected key type */
178 fc
= (ctx
->pk
.type
== PKEY_KEYTYPE_AES_128
) ? CPACF_KM_PAES_128
:
179 (ctx
->pk
.type
== PKEY_KEYTYPE_AES_192
) ? CPACF_KM_PAES_192
:
180 (ctx
->pk
.type
== PKEY_KEYTYPE_AES_256
) ? CPACF_KM_PAES_256
: 0;
182 /* Check if the function code is available */
183 ctx
->fc
= (fc
&& cpacf_test_func(&km_functions
, fc
)) ? fc
: 0;
185 return ctx
->fc
? 0 : -EINVAL
;
188 static int ecb_paes_set_key(struct crypto_skcipher
*tfm
, const u8
*in_key
,
189 unsigned int key_len
)
192 struct s390_paes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
194 _free_kb_keybuf(&ctx
->kb
);
195 rc
= _key_to_kb(&ctx
->kb
, in_key
, key_len
);
199 return __ecb_paes_set_key(ctx
);
202 static int ecb_paes_crypt(struct skcipher_request
*req
, unsigned long modifier
)
204 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
205 struct s390_paes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
206 struct skcipher_walk walk
;
207 unsigned int nbytes
, n
, k
;
210 u8 key
[MAXPROTKEYSIZE
];
213 ret
= skcipher_walk_virt(&walk
, req
, false);
217 spin_lock_bh(&ctx
->pk_lock
);
218 memcpy(param
.key
, ctx
->pk
.protkey
, MAXPROTKEYSIZE
);
219 spin_unlock_bh(&ctx
->pk_lock
);
221 while ((nbytes
= walk
.nbytes
) != 0) {
222 /* only use complete blocks */
223 n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
224 k
= cpacf_km(ctx
->fc
| modifier
, ¶m
,
225 walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
, n
);
227 ret
= skcipher_walk_done(&walk
, nbytes
- k
);
229 if (__paes_convert_key(ctx
))
230 return skcipher_walk_done(&walk
, -EIO
);
231 spin_lock_bh(&ctx
->pk_lock
);
232 memcpy(param
.key
, ctx
->pk
.protkey
, MAXPROTKEYSIZE
);
233 spin_unlock_bh(&ctx
->pk_lock
);
239 static int ecb_paes_encrypt(struct skcipher_request
*req
)
241 return ecb_paes_crypt(req
, 0);
244 static int ecb_paes_decrypt(struct skcipher_request
*req
)
246 return ecb_paes_crypt(req
, CPACF_DECRYPT
);
249 static struct skcipher_alg ecb_paes_alg
= {
250 .base
.cra_name
= "ecb(paes)",
251 .base
.cra_driver_name
= "ecb-paes-s390",
252 .base
.cra_priority
= 401, /* combo: aes + ecb + 1 */
253 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
254 .base
.cra_ctxsize
= sizeof(struct s390_paes_ctx
),
255 .base
.cra_module
= THIS_MODULE
,
256 .base
.cra_list
= LIST_HEAD_INIT(ecb_paes_alg
.base
.cra_list
),
257 .init
= ecb_paes_init
,
258 .exit
= ecb_paes_exit
,
259 .min_keysize
= PAES_MIN_KEYSIZE
,
260 .max_keysize
= PAES_MAX_KEYSIZE
,
261 .setkey
= ecb_paes_set_key
,
262 .encrypt
= ecb_paes_encrypt
,
263 .decrypt
= ecb_paes_decrypt
,
266 static int cbc_paes_init(struct crypto_skcipher
*tfm
)
268 struct s390_paes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
271 spin_lock_init(&ctx
->pk_lock
);
276 static void cbc_paes_exit(struct crypto_skcipher
*tfm
)
278 struct s390_paes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
280 _free_kb_keybuf(&ctx
->kb
);
283 static inline int __cbc_paes_set_key(struct s390_paes_ctx
*ctx
)
287 if (__paes_convert_key(ctx
))
290 /* Pick the correct function code based on the protected key type */
291 fc
= (ctx
->pk
.type
== PKEY_KEYTYPE_AES_128
) ? CPACF_KMC_PAES_128
:
292 (ctx
->pk
.type
== PKEY_KEYTYPE_AES_192
) ? CPACF_KMC_PAES_192
:
293 (ctx
->pk
.type
== PKEY_KEYTYPE_AES_256
) ? CPACF_KMC_PAES_256
: 0;
295 /* Check if the function code is available */
296 ctx
->fc
= (fc
&& cpacf_test_func(&kmc_functions
, fc
)) ? fc
: 0;
298 return ctx
->fc
? 0 : -EINVAL
;
301 static int cbc_paes_set_key(struct crypto_skcipher
*tfm
, const u8
*in_key
,
302 unsigned int key_len
)
305 struct s390_paes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
307 _free_kb_keybuf(&ctx
->kb
);
308 rc
= _key_to_kb(&ctx
->kb
, in_key
, key_len
);
312 return __cbc_paes_set_key(ctx
);
315 static int cbc_paes_crypt(struct skcipher_request
*req
, unsigned long modifier
)
317 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
318 struct s390_paes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
319 struct skcipher_walk walk
;
320 unsigned int nbytes
, n
, k
;
323 u8 iv
[AES_BLOCK_SIZE
];
324 u8 key
[MAXPROTKEYSIZE
];
327 ret
= skcipher_walk_virt(&walk
, req
, false);
331 memcpy(param
.iv
, walk
.iv
, AES_BLOCK_SIZE
);
332 spin_lock_bh(&ctx
->pk_lock
);
333 memcpy(param
.key
, ctx
->pk
.protkey
, MAXPROTKEYSIZE
);
334 spin_unlock_bh(&ctx
->pk_lock
);
336 while ((nbytes
= walk
.nbytes
) != 0) {
337 /* only use complete blocks */
338 n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
339 k
= cpacf_kmc(ctx
->fc
| modifier
, ¶m
,
340 walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
, n
);
342 memcpy(walk
.iv
, param
.iv
, AES_BLOCK_SIZE
);
343 ret
= skcipher_walk_done(&walk
, nbytes
- k
);
346 if (__paes_convert_key(ctx
))
347 return skcipher_walk_done(&walk
, -EIO
);
348 spin_lock_bh(&ctx
->pk_lock
);
349 memcpy(param
.key
, ctx
->pk
.protkey
, MAXPROTKEYSIZE
);
350 spin_unlock_bh(&ctx
->pk_lock
);
356 static int cbc_paes_encrypt(struct skcipher_request
*req
)
358 return cbc_paes_crypt(req
, 0);
361 static int cbc_paes_decrypt(struct skcipher_request
*req
)
363 return cbc_paes_crypt(req
, CPACF_DECRYPT
);
366 static struct skcipher_alg cbc_paes_alg
= {
367 .base
.cra_name
= "cbc(paes)",
368 .base
.cra_driver_name
= "cbc-paes-s390",
369 .base
.cra_priority
= 402, /* ecb-paes-s390 + 1 */
370 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
371 .base
.cra_ctxsize
= sizeof(struct s390_paes_ctx
),
372 .base
.cra_module
= THIS_MODULE
,
373 .base
.cra_list
= LIST_HEAD_INIT(cbc_paes_alg
.base
.cra_list
),
374 .init
= cbc_paes_init
,
375 .exit
= cbc_paes_exit
,
376 .min_keysize
= PAES_MIN_KEYSIZE
,
377 .max_keysize
= PAES_MAX_KEYSIZE
,
378 .ivsize
= AES_BLOCK_SIZE
,
379 .setkey
= cbc_paes_set_key
,
380 .encrypt
= cbc_paes_encrypt
,
381 .decrypt
= cbc_paes_decrypt
,
384 static int xts_paes_init(struct crypto_skcipher
*tfm
)
386 struct s390_pxts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
388 ctx
->kb
[0].key
= NULL
;
389 ctx
->kb
[1].key
= NULL
;
390 spin_lock_init(&ctx
->pk_lock
);
395 static void xts_paes_exit(struct crypto_skcipher
*tfm
)
397 struct s390_pxts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
399 _free_kb_keybuf(&ctx
->kb
[0]);
400 _free_kb_keybuf(&ctx
->kb
[1]);
403 static inline int __xts_paes_convert_key(struct s390_pxts_ctx
*ctx
)
405 struct pkey_protkey pkey0
, pkey1
;
407 if (__paes_keyblob2pkey(&ctx
->kb
[0], &pkey0
) ||
408 __paes_keyblob2pkey(&ctx
->kb
[1], &pkey1
))
411 spin_lock_bh(&ctx
->pk_lock
);
412 memcpy(&ctx
->pk
[0], &pkey0
, sizeof(pkey0
));
413 memcpy(&ctx
->pk
[1], &pkey1
, sizeof(pkey1
));
414 spin_unlock_bh(&ctx
->pk_lock
);
419 static inline int __xts_paes_set_key(struct s390_pxts_ctx
*ctx
)
423 if (__xts_paes_convert_key(ctx
))
426 if (ctx
->pk
[0].type
!= ctx
->pk
[1].type
)
429 /* Pick the correct function code based on the protected key type */
430 fc
= (ctx
->pk
[0].type
== PKEY_KEYTYPE_AES_128
) ? CPACF_KM_PXTS_128
:
431 (ctx
->pk
[0].type
== PKEY_KEYTYPE_AES_256
) ?
432 CPACF_KM_PXTS_256
: 0;
434 /* Check if the function code is available */
435 ctx
->fc
= (fc
&& cpacf_test_func(&km_functions
, fc
)) ? fc
: 0;
437 return ctx
->fc
? 0 : -EINVAL
;
440 static int xts_paes_set_key(struct crypto_skcipher
*tfm
, const u8
*in_key
,
441 unsigned int xts_key_len
)
444 struct s390_pxts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
445 u8 ckey
[2 * AES_MAX_KEY_SIZE
];
446 unsigned int ckey_len
, key_len
;
451 key_len
= xts_key_len
/ 2;
453 _free_kb_keybuf(&ctx
->kb
[0]);
454 _free_kb_keybuf(&ctx
->kb
[1]);
455 rc
= _key_to_kb(&ctx
->kb
[0], in_key
, key_len
);
458 rc
= _key_to_kb(&ctx
->kb
[1], in_key
+ key_len
, key_len
);
462 rc
= __xts_paes_set_key(ctx
);
467 * xts_check_key verifies the key length is not odd and makes
468 * sure that the two keys are not the same. This can be done
469 * on the two protected keys as well
471 ckey_len
= (ctx
->pk
[0].type
== PKEY_KEYTYPE_AES_128
) ?
472 AES_KEYSIZE_128
: AES_KEYSIZE_256
;
473 memcpy(ckey
, ctx
->pk
[0].protkey
, ckey_len
);
474 memcpy(ckey
+ ckey_len
, ctx
->pk
[1].protkey
, ckey_len
);
475 return xts_verify_key(tfm
, ckey
, 2*ckey_len
);
478 static int xts_paes_crypt(struct skcipher_request
*req
, unsigned long modifier
)
480 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
481 struct s390_pxts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
482 struct skcipher_walk walk
;
483 unsigned int keylen
, offset
, nbytes
, n
, k
;
486 u8 key
[MAXPROTKEYSIZE
]; /* key + verification pattern */
493 u8 key
[MAXPROTKEYSIZE
]; /* key + verification pattern */
497 ret
= skcipher_walk_virt(&walk
, req
, false);
501 keylen
= (ctx
->pk
[0].type
== PKEY_KEYTYPE_AES_128
) ? 48 : 64;
502 offset
= (ctx
->pk
[0].type
== PKEY_KEYTYPE_AES_128
) ? 16 : 0;
504 memset(&pcc_param
, 0, sizeof(pcc_param
));
505 memcpy(pcc_param
.tweak
, walk
.iv
, sizeof(pcc_param
.tweak
));
506 spin_lock_bh(&ctx
->pk_lock
);
507 memcpy(pcc_param
.key
+ offset
, ctx
->pk
[1].protkey
, keylen
);
508 memcpy(xts_param
.key
+ offset
, ctx
->pk
[0].protkey
, keylen
);
509 spin_unlock_bh(&ctx
->pk_lock
);
510 cpacf_pcc(ctx
->fc
, pcc_param
.key
+ offset
);
511 memcpy(xts_param
.init
, pcc_param
.xts
, 16);
513 while ((nbytes
= walk
.nbytes
) != 0) {
514 /* only use complete blocks */
515 n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
516 k
= cpacf_km(ctx
->fc
| modifier
, xts_param
.key
+ offset
,
517 walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
, n
);
519 ret
= skcipher_walk_done(&walk
, nbytes
- k
);
521 if (__xts_paes_convert_key(ctx
))
522 return skcipher_walk_done(&walk
, -EIO
);
523 spin_lock_bh(&ctx
->pk_lock
);
524 memcpy(xts_param
.key
+ offset
,
525 ctx
->pk
[0].protkey
, keylen
);
526 spin_unlock_bh(&ctx
->pk_lock
);
533 static int xts_paes_encrypt(struct skcipher_request
*req
)
535 return xts_paes_crypt(req
, 0);
538 static int xts_paes_decrypt(struct skcipher_request
*req
)
540 return xts_paes_crypt(req
, CPACF_DECRYPT
);
543 static struct skcipher_alg xts_paes_alg
= {
544 .base
.cra_name
= "xts(paes)",
545 .base
.cra_driver_name
= "xts-paes-s390",
546 .base
.cra_priority
= 402, /* ecb-paes-s390 + 1 */
547 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
548 .base
.cra_ctxsize
= sizeof(struct s390_pxts_ctx
),
549 .base
.cra_module
= THIS_MODULE
,
550 .base
.cra_list
= LIST_HEAD_INIT(xts_paes_alg
.base
.cra_list
),
551 .init
= xts_paes_init
,
552 .exit
= xts_paes_exit
,
553 .min_keysize
= 2 * PAES_MIN_KEYSIZE
,
554 .max_keysize
= 2 * PAES_MAX_KEYSIZE
,
555 .ivsize
= AES_BLOCK_SIZE
,
556 .setkey
= xts_paes_set_key
,
557 .encrypt
= xts_paes_encrypt
,
558 .decrypt
= xts_paes_decrypt
,
561 static int ctr_paes_init(struct crypto_skcipher
*tfm
)
563 struct s390_paes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
566 spin_lock_init(&ctx
->pk_lock
);
571 static void ctr_paes_exit(struct crypto_skcipher
*tfm
)
573 struct s390_paes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
575 _free_kb_keybuf(&ctx
->kb
);
578 static inline int __ctr_paes_set_key(struct s390_paes_ctx
*ctx
)
582 if (__paes_convert_key(ctx
))
585 /* Pick the correct function code based on the protected key type */
586 fc
= (ctx
->pk
.type
== PKEY_KEYTYPE_AES_128
) ? CPACF_KMCTR_PAES_128
:
587 (ctx
->pk
.type
== PKEY_KEYTYPE_AES_192
) ? CPACF_KMCTR_PAES_192
:
588 (ctx
->pk
.type
== PKEY_KEYTYPE_AES_256
) ?
589 CPACF_KMCTR_PAES_256
: 0;
591 /* Check if the function code is available */
592 ctx
->fc
= (fc
&& cpacf_test_func(&kmctr_functions
, fc
)) ? fc
: 0;
594 return ctx
->fc
? 0 : -EINVAL
;
597 static int ctr_paes_set_key(struct crypto_skcipher
*tfm
, const u8
*in_key
,
598 unsigned int key_len
)
601 struct s390_paes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
603 _free_kb_keybuf(&ctx
->kb
);
604 rc
= _key_to_kb(&ctx
->kb
, in_key
, key_len
);
608 return __ctr_paes_set_key(ctx
);
611 static unsigned int __ctrblk_init(u8
*ctrptr
, u8
*iv
, unsigned int nbytes
)
615 /* only use complete blocks, max. PAGE_SIZE */
616 memcpy(ctrptr
, iv
, AES_BLOCK_SIZE
);
617 n
= (nbytes
> PAGE_SIZE
) ? PAGE_SIZE
: nbytes
& ~(AES_BLOCK_SIZE
- 1);
618 for (i
= (n
/ AES_BLOCK_SIZE
) - 1; i
> 0; i
--) {
619 memcpy(ctrptr
+ AES_BLOCK_SIZE
, ctrptr
, AES_BLOCK_SIZE
);
620 crypto_inc(ctrptr
+ AES_BLOCK_SIZE
, AES_BLOCK_SIZE
);
621 ctrptr
+= AES_BLOCK_SIZE
;
626 static int ctr_paes_crypt(struct skcipher_request
*req
)
628 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
629 struct s390_paes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
630 u8 buf
[AES_BLOCK_SIZE
], *ctrptr
;
631 struct skcipher_walk walk
;
632 unsigned int nbytes
, n
, k
;
635 u8 key
[MAXPROTKEYSIZE
];
638 ret
= skcipher_walk_virt(&walk
, req
, false);
642 spin_lock_bh(&ctx
->pk_lock
);
643 memcpy(param
.key
, ctx
->pk
.protkey
, MAXPROTKEYSIZE
);
644 spin_unlock_bh(&ctx
->pk_lock
);
646 locked
= mutex_trylock(&ctrblk_lock
);
648 while ((nbytes
= walk
.nbytes
) >= AES_BLOCK_SIZE
) {
650 if (nbytes
>= 2*AES_BLOCK_SIZE
&& locked
)
651 n
= __ctrblk_init(ctrblk
, walk
.iv
, nbytes
);
652 ctrptr
= (n
> AES_BLOCK_SIZE
) ? ctrblk
: walk
.iv
;
653 k
= cpacf_kmctr(ctx
->fc
, ¶m
, walk
.dst
.virt
.addr
,
654 walk
.src
.virt
.addr
, n
, ctrptr
);
656 if (ctrptr
== ctrblk
)
657 memcpy(walk
.iv
, ctrptr
+ k
- AES_BLOCK_SIZE
,
659 crypto_inc(walk
.iv
, AES_BLOCK_SIZE
);
660 ret
= skcipher_walk_done(&walk
, nbytes
- k
);
663 if (__paes_convert_key(ctx
)) {
665 mutex_unlock(&ctrblk_lock
);
666 return skcipher_walk_done(&walk
, -EIO
);
668 spin_lock_bh(&ctx
->pk_lock
);
669 memcpy(param
.key
, ctx
->pk
.protkey
, MAXPROTKEYSIZE
);
670 spin_unlock_bh(&ctx
->pk_lock
);
674 mutex_unlock(&ctrblk_lock
);
676 * final block may be < AES_BLOCK_SIZE, copy only nbytes
680 if (cpacf_kmctr(ctx
->fc
, ¶m
, buf
,
681 walk
.src
.virt
.addr
, AES_BLOCK_SIZE
,
682 walk
.iv
) == AES_BLOCK_SIZE
)
684 if (__paes_convert_key(ctx
))
685 return skcipher_walk_done(&walk
, -EIO
);
686 spin_lock_bh(&ctx
->pk_lock
);
687 memcpy(param
.key
, ctx
->pk
.protkey
, MAXPROTKEYSIZE
);
688 spin_unlock_bh(&ctx
->pk_lock
);
690 memcpy(walk
.dst
.virt
.addr
, buf
, nbytes
);
691 crypto_inc(walk
.iv
, AES_BLOCK_SIZE
);
692 ret
= skcipher_walk_done(&walk
, nbytes
);
698 static struct skcipher_alg ctr_paes_alg
= {
699 .base
.cra_name
= "ctr(paes)",
700 .base
.cra_driver_name
= "ctr-paes-s390",
701 .base
.cra_priority
= 402, /* ecb-paes-s390 + 1 */
702 .base
.cra_blocksize
= 1,
703 .base
.cra_ctxsize
= sizeof(struct s390_paes_ctx
),
704 .base
.cra_module
= THIS_MODULE
,
705 .base
.cra_list
= LIST_HEAD_INIT(ctr_paes_alg
.base
.cra_list
),
706 .init
= ctr_paes_init
,
707 .exit
= ctr_paes_exit
,
708 .min_keysize
= PAES_MIN_KEYSIZE
,
709 .max_keysize
= PAES_MAX_KEYSIZE
,
710 .ivsize
= AES_BLOCK_SIZE
,
711 .setkey
= ctr_paes_set_key
,
712 .encrypt
= ctr_paes_crypt
,
713 .decrypt
= ctr_paes_crypt
,
714 .chunksize
= AES_BLOCK_SIZE
,
717 static inline void __crypto_unregister_skcipher(struct skcipher_alg
*alg
)
719 if (!list_empty(&alg
->base
.cra_list
))
720 crypto_unregister_skcipher(alg
);
723 static void paes_s390_fini(void)
725 __crypto_unregister_skcipher(&ctr_paes_alg
);
726 __crypto_unregister_skcipher(&xts_paes_alg
);
727 __crypto_unregister_skcipher(&cbc_paes_alg
);
728 __crypto_unregister_skcipher(&ecb_paes_alg
);
730 free_page((unsigned long) ctrblk
);
733 static int __init
paes_s390_init(void)
737 /* Query available functions for KM, KMC and KMCTR */
738 cpacf_query(CPACF_KM
, &km_functions
);
739 cpacf_query(CPACF_KMC
, &kmc_functions
);
740 cpacf_query(CPACF_KMCTR
, &kmctr_functions
);
742 if (cpacf_test_func(&km_functions
, CPACF_KM_PAES_128
) ||
743 cpacf_test_func(&km_functions
, CPACF_KM_PAES_192
) ||
744 cpacf_test_func(&km_functions
, CPACF_KM_PAES_256
)) {
745 ret
= crypto_register_skcipher(&ecb_paes_alg
);
750 if (cpacf_test_func(&kmc_functions
, CPACF_KMC_PAES_128
) ||
751 cpacf_test_func(&kmc_functions
, CPACF_KMC_PAES_192
) ||
752 cpacf_test_func(&kmc_functions
, CPACF_KMC_PAES_256
)) {
753 ret
= crypto_register_skcipher(&cbc_paes_alg
);
758 if (cpacf_test_func(&km_functions
, CPACF_KM_PXTS_128
) ||
759 cpacf_test_func(&km_functions
, CPACF_KM_PXTS_256
)) {
760 ret
= crypto_register_skcipher(&xts_paes_alg
);
765 if (cpacf_test_func(&kmctr_functions
, CPACF_KMCTR_PAES_128
) ||
766 cpacf_test_func(&kmctr_functions
, CPACF_KMCTR_PAES_192
) ||
767 cpacf_test_func(&kmctr_functions
, CPACF_KMCTR_PAES_256
)) {
768 ctrblk
= (u8
*) __get_free_page(GFP_KERNEL
);
773 ret
= crypto_register_skcipher(&ctr_paes_alg
);
784 module_init(paes_s390_init
);
785 module_exit(paes_s390_fini
);
787 MODULE_ALIAS_CRYPTO("paes");
789 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm with protected keys");
790 MODULE_LICENSE("GPL");