1 // SPDX-License-Identifier: GPL-2.0
5 * s390 implementation of the AES Cipher Algorithm with protected keys.
8 * Copyright IBM Corp. 2017,2019
9 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>
10 * Harald Freudenberger <freude@de.ibm.com>
13 #define KMSG_COMPONENT "paes_s390"
14 #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
16 #include <crypto/aes.h>
17 #include <crypto/algapi.h>
18 #include <linux/bug.h>
19 #include <linux/err.h>
20 #include <linux/module.h>
21 #include <linux/cpufeature.h>
22 #include <linux/init.h>
23 #include <linux/spinlock.h>
24 #include <crypto/xts.h>
25 #include <asm/cpacf.h>
29 * Key blobs smaller/bigger than these defines are rejected
30 * by the common code even before the individual setkey function
31 * is called. As paes can handle different kinds of key blobs
32 * and padding is also possible, the limits need to be generous.
34 #define PAES_MIN_KEYSIZE 64
35 #define PAES_MAX_KEYSIZE 256
38 static DEFINE_SPINLOCK(ctrblk_lock
);
40 static cpacf_mask_t km_functions
, kmc_functions
, kmctr_functions
;
44 * Small keys will be stored in the keybuf. Larger keys are
45 * stored in extra allocated memory. In both cases does
46 * key point to the memory where the key is stored.
47 * The code distinguishes by checking keylen against
48 * sizeof(keybuf). See the two following helper functions.
55 static inline int _copy_key_to_kb(struct key_blob
*kb
,
59 if (keylen
<= sizeof(kb
->keybuf
))
62 kb
->key
= kmalloc(keylen
, GFP_KERNEL
);
66 memcpy(kb
->key
, key
, keylen
);
72 static inline void _free_kb_keybuf(struct key_blob
*kb
)
74 if (kb
->key
&& kb
->key
!= kb
->keybuf
75 && kb
->keylen
> sizeof(kb
->keybuf
)) {
81 struct s390_paes_ctx
{
83 struct pkey_protkey pk
;
87 struct s390_pxts_ctx
{
88 struct key_blob kb
[2];
89 struct pkey_protkey pk
[2];
93 static inline int __paes_convert_key(struct key_blob
*kb
,
94 struct pkey_protkey
*pk
)
98 /* try three times in case of failure */
99 for (i
= 0; i
< 3; i
++) {
100 ret
= pkey_keyblob2pkey(kb
->key
, kb
->keylen
, pk
);
108 static int __paes_set_key(struct s390_paes_ctx
*ctx
)
112 if (__paes_convert_key(&ctx
->kb
, &ctx
->pk
))
115 /* Pick the correct function code based on the protected key type */
116 fc
= (ctx
->pk
.type
== PKEY_KEYTYPE_AES_128
) ? CPACF_KM_PAES_128
:
117 (ctx
->pk
.type
== PKEY_KEYTYPE_AES_192
) ? CPACF_KM_PAES_192
:
118 (ctx
->pk
.type
== PKEY_KEYTYPE_AES_256
) ? CPACF_KM_PAES_256
: 0;
120 /* Check if the function code is available */
121 ctx
->fc
= (fc
&& cpacf_test_func(&km_functions
, fc
)) ? fc
: 0;
123 return ctx
->fc
? 0 : -EINVAL
;
126 static int ecb_paes_init(struct crypto_tfm
*tfm
)
128 struct s390_paes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
135 static void ecb_paes_exit(struct crypto_tfm
*tfm
)
137 struct s390_paes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
139 _free_kb_keybuf(&ctx
->kb
);
142 static int ecb_paes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
143 unsigned int key_len
)
146 struct s390_paes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
148 _free_kb_keybuf(&ctx
->kb
);
149 rc
= _copy_key_to_kb(&ctx
->kb
, in_key
, key_len
);
153 if (__paes_set_key(ctx
)) {
154 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
160 static int ecb_paes_crypt(struct blkcipher_desc
*desc
,
161 unsigned long modifier
,
162 struct blkcipher_walk
*walk
)
164 struct s390_paes_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
165 unsigned int nbytes
, n
, k
;
168 ret
= blkcipher_walk_virt(desc
, walk
);
169 while ((nbytes
= walk
->nbytes
) >= AES_BLOCK_SIZE
) {
170 /* only use complete blocks */
171 n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
172 k
= cpacf_km(ctx
->fc
| modifier
, ctx
->pk
.protkey
,
173 walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
, n
);
175 ret
= blkcipher_walk_done(desc
, walk
, nbytes
- k
);
177 if (__paes_set_key(ctx
) != 0)
178 return blkcipher_walk_done(desc
, walk
, -EIO
);
184 static int ecb_paes_encrypt(struct blkcipher_desc
*desc
,
185 struct scatterlist
*dst
, struct scatterlist
*src
,
188 struct blkcipher_walk walk
;
190 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
191 return ecb_paes_crypt(desc
, CPACF_ENCRYPT
, &walk
);
194 static int ecb_paes_decrypt(struct blkcipher_desc
*desc
,
195 struct scatterlist
*dst
, struct scatterlist
*src
,
198 struct blkcipher_walk walk
;
200 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
201 return ecb_paes_crypt(desc
, CPACF_DECRYPT
, &walk
);
204 static struct crypto_alg ecb_paes_alg
= {
205 .cra_name
= "ecb(paes)",
206 .cra_driver_name
= "ecb-paes-s390",
207 .cra_priority
= 401, /* combo: aes + ecb + 1 */
208 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
209 .cra_blocksize
= AES_BLOCK_SIZE
,
210 .cra_ctxsize
= sizeof(struct s390_paes_ctx
),
211 .cra_type
= &crypto_blkcipher_type
,
212 .cra_module
= THIS_MODULE
,
213 .cra_list
= LIST_HEAD_INIT(ecb_paes_alg
.cra_list
),
214 .cra_init
= ecb_paes_init
,
215 .cra_exit
= ecb_paes_exit
,
218 .min_keysize
= PAES_MIN_KEYSIZE
,
219 .max_keysize
= PAES_MAX_KEYSIZE
,
220 .setkey
= ecb_paes_set_key
,
221 .encrypt
= ecb_paes_encrypt
,
222 .decrypt
= ecb_paes_decrypt
,
227 static int cbc_paes_init(struct crypto_tfm
*tfm
)
229 struct s390_paes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
236 static void cbc_paes_exit(struct crypto_tfm
*tfm
)
238 struct s390_paes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
240 _free_kb_keybuf(&ctx
->kb
);
243 static int __cbc_paes_set_key(struct s390_paes_ctx
*ctx
)
247 if (__paes_convert_key(&ctx
->kb
, &ctx
->pk
))
250 /* Pick the correct function code based on the protected key type */
251 fc
= (ctx
->pk
.type
== PKEY_KEYTYPE_AES_128
) ? CPACF_KMC_PAES_128
:
252 (ctx
->pk
.type
== PKEY_KEYTYPE_AES_192
) ? CPACF_KMC_PAES_192
:
253 (ctx
->pk
.type
== PKEY_KEYTYPE_AES_256
) ? CPACF_KMC_PAES_256
: 0;
255 /* Check if the function code is available */
256 ctx
->fc
= (fc
&& cpacf_test_func(&kmc_functions
, fc
)) ? fc
: 0;
258 return ctx
->fc
? 0 : -EINVAL
;
261 static int cbc_paes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
262 unsigned int key_len
)
265 struct s390_paes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
267 _free_kb_keybuf(&ctx
->kb
);
268 rc
= _copy_key_to_kb(&ctx
->kb
, in_key
, key_len
);
272 if (__cbc_paes_set_key(ctx
)) {
273 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
279 static int cbc_paes_crypt(struct blkcipher_desc
*desc
, unsigned long modifier
,
280 struct blkcipher_walk
*walk
)
282 struct s390_paes_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
283 unsigned int nbytes
, n
, k
;
286 u8 iv
[AES_BLOCK_SIZE
];
287 u8 key
[MAXPROTKEYSIZE
];
290 ret
= blkcipher_walk_virt(desc
, walk
);
291 memcpy(param
.iv
, walk
->iv
, AES_BLOCK_SIZE
);
292 memcpy(param
.key
, ctx
->pk
.protkey
, MAXPROTKEYSIZE
);
293 while ((nbytes
= walk
->nbytes
) >= AES_BLOCK_SIZE
) {
294 /* only use complete blocks */
295 n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
296 k
= cpacf_kmc(ctx
->fc
| modifier
, ¶m
,
297 walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
, n
);
299 ret
= blkcipher_walk_done(desc
, walk
, nbytes
- k
);
301 if (__cbc_paes_set_key(ctx
) != 0)
302 return blkcipher_walk_done(desc
, walk
, -EIO
);
303 memcpy(param
.key
, ctx
->pk
.protkey
, MAXPROTKEYSIZE
);
306 memcpy(walk
->iv
, param
.iv
, AES_BLOCK_SIZE
);
310 static int cbc_paes_encrypt(struct blkcipher_desc
*desc
,
311 struct scatterlist
*dst
, struct scatterlist
*src
,
314 struct blkcipher_walk walk
;
316 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
317 return cbc_paes_crypt(desc
, 0, &walk
);
320 static int cbc_paes_decrypt(struct blkcipher_desc
*desc
,
321 struct scatterlist
*dst
, struct scatterlist
*src
,
324 struct blkcipher_walk walk
;
326 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
327 return cbc_paes_crypt(desc
, CPACF_DECRYPT
, &walk
);
330 static struct crypto_alg cbc_paes_alg
= {
331 .cra_name
= "cbc(paes)",
332 .cra_driver_name
= "cbc-paes-s390",
333 .cra_priority
= 402, /* ecb-paes-s390 + 1 */
334 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
335 .cra_blocksize
= AES_BLOCK_SIZE
,
336 .cra_ctxsize
= sizeof(struct s390_paes_ctx
),
337 .cra_type
= &crypto_blkcipher_type
,
338 .cra_module
= THIS_MODULE
,
339 .cra_list
= LIST_HEAD_INIT(cbc_paes_alg
.cra_list
),
340 .cra_init
= cbc_paes_init
,
341 .cra_exit
= cbc_paes_exit
,
344 .min_keysize
= PAES_MIN_KEYSIZE
,
345 .max_keysize
= PAES_MAX_KEYSIZE
,
346 .ivsize
= AES_BLOCK_SIZE
,
347 .setkey
= cbc_paes_set_key
,
348 .encrypt
= cbc_paes_encrypt
,
349 .decrypt
= cbc_paes_decrypt
,
354 static int xts_paes_init(struct crypto_tfm
*tfm
)
356 struct s390_pxts_ctx
*ctx
= crypto_tfm_ctx(tfm
);
358 ctx
->kb
[0].key
= NULL
;
359 ctx
->kb
[1].key
= NULL
;
364 static void xts_paes_exit(struct crypto_tfm
*tfm
)
366 struct s390_pxts_ctx
*ctx
= crypto_tfm_ctx(tfm
);
368 _free_kb_keybuf(&ctx
->kb
[0]);
369 _free_kb_keybuf(&ctx
->kb
[1]);
372 static int __xts_paes_set_key(struct s390_pxts_ctx
*ctx
)
376 if (__paes_convert_key(&ctx
->kb
[0], &ctx
->pk
[0]) ||
377 __paes_convert_key(&ctx
->kb
[1], &ctx
->pk
[1]))
380 if (ctx
->pk
[0].type
!= ctx
->pk
[1].type
)
383 /* Pick the correct function code based on the protected key type */
384 fc
= (ctx
->pk
[0].type
== PKEY_KEYTYPE_AES_128
) ? CPACF_KM_PXTS_128
:
385 (ctx
->pk
[0].type
== PKEY_KEYTYPE_AES_256
) ?
386 CPACF_KM_PXTS_256
: 0;
388 /* Check if the function code is available */
389 ctx
->fc
= (fc
&& cpacf_test_func(&km_functions
, fc
)) ? fc
: 0;
391 return ctx
->fc
? 0 : -EINVAL
;
394 static int xts_paes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
395 unsigned int xts_key_len
)
398 struct s390_pxts_ctx
*ctx
= crypto_tfm_ctx(tfm
);
399 u8 ckey
[2 * AES_MAX_KEY_SIZE
];
400 unsigned int ckey_len
, key_len
;
405 key_len
= xts_key_len
/ 2;
407 _free_kb_keybuf(&ctx
->kb
[0]);
408 _free_kb_keybuf(&ctx
->kb
[1]);
409 rc
= _copy_key_to_kb(&ctx
->kb
[0], in_key
, key_len
);
412 rc
= _copy_key_to_kb(&ctx
->kb
[1], in_key
+ key_len
, key_len
);
416 if (__xts_paes_set_key(ctx
)) {
417 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
422 * xts_check_key verifies the key length is not odd and makes
423 * sure that the two keys are not the same. This can be done
424 * on the two protected keys as well
426 ckey_len
= (ctx
->pk
[0].type
== PKEY_KEYTYPE_AES_128
) ?
427 AES_KEYSIZE_128
: AES_KEYSIZE_256
;
428 memcpy(ckey
, ctx
->pk
[0].protkey
, ckey_len
);
429 memcpy(ckey
+ ckey_len
, ctx
->pk
[1].protkey
, ckey_len
);
430 return xts_check_key(tfm
, ckey
, 2*ckey_len
);
433 static int xts_paes_crypt(struct blkcipher_desc
*desc
, unsigned long modifier
,
434 struct blkcipher_walk
*walk
)
436 struct s390_pxts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
437 unsigned int keylen
, offset
, nbytes
, n
, k
;
440 u8 key
[MAXPROTKEYSIZE
]; /* key + verification pattern */
447 u8 key
[MAXPROTKEYSIZE
]; /* key + verification pattern */
451 ret
= blkcipher_walk_virt(desc
, walk
);
452 keylen
= (ctx
->pk
[0].type
== PKEY_KEYTYPE_AES_128
) ? 48 : 64;
453 offset
= (ctx
->pk
[0].type
== PKEY_KEYTYPE_AES_128
) ? 16 : 0;
455 memset(&pcc_param
, 0, sizeof(pcc_param
));
456 memcpy(pcc_param
.tweak
, walk
->iv
, sizeof(pcc_param
.tweak
));
457 memcpy(pcc_param
.key
+ offset
, ctx
->pk
[1].protkey
, keylen
);
458 cpacf_pcc(ctx
->fc
, pcc_param
.key
+ offset
);
460 memcpy(xts_param
.key
+ offset
, ctx
->pk
[0].protkey
, keylen
);
461 memcpy(xts_param
.init
, pcc_param
.xts
, 16);
463 while ((nbytes
= walk
->nbytes
) >= AES_BLOCK_SIZE
) {
464 /* only use complete blocks */
465 n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
466 k
= cpacf_km(ctx
->fc
| modifier
, xts_param
.key
+ offset
,
467 walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
, n
);
469 ret
= blkcipher_walk_done(desc
, walk
, nbytes
- k
);
471 if (__xts_paes_set_key(ctx
) != 0)
472 return blkcipher_walk_done(desc
, walk
, -EIO
);
479 static int xts_paes_encrypt(struct blkcipher_desc
*desc
,
480 struct scatterlist
*dst
, struct scatterlist
*src
,
483 struct blkcipher_walk walk
;
485 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
486 return xts_paes_crypt(desc
, 0, &walk
);
489 static int xts_paes_decrypt(struct blkcipher_desc
*desc
,
490 struct scatterlist
*dst
, struct scatterlist
*src
,
493 struct blkcipher_walk walk
;
495 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
496 return xts_paes_crypt(desc
, CPACF_DECRYPT
, &walk
);
499 static struct crypto_alg xts_paes_alg
= {
500 .cra_name
= "xts(paes)",
501 .cra_driver_name
= "xts-paes-s390",
502 .cra_priority
= 402, /* ecb-paes-s390 + 1 */
503 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
504 .cra_blocksize
= AES_BLOCK_SIZE
,
505 .cra_ctxsize
= sizeof(struct s390_pxts_ctx
),
506 .cra_type
= &crypto_blkcipher_type
,
507 .cra_module
= THIS_MODULE
,
508 .cra_list
= LIST_HEAD_INIT(xts_paes_alg
.cra_list
),
509 .cra_init
= xts_paes_init
,
510 .cra_exit
= xts_paes_exit
,
513 .min_keysize
= 2 * PAES_MIN_KEYSIZE
,
514 .max_keysize
= 2 * PAES_MAX_KEYSIZE
,
515 .ivsize
= AES_BLOCK_SIZE
,
516 .setkey
= xts_paes_set_key
,
517 .encrypt
= xts_paes_encrypt
,
518 .decrypt
= xts_paes_decrypt
,
523 static int ctr_paes_init(struct crypto_tfm
*tfm
)
525 struct s390_paes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
532 static void ctr_paes_exit(struct crypto_tfm
*tfm
)
534 struct s390_paes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
536 _free_kb_keybuf(&ctx
->kb
);
539 static int __ctr_paes_set_key(struct s390_paes_ctx
*ctx
)
543 if (__paes_convert_key(&ctx
->kb
, &ctx
->pk
))
546 /* Pick the correct function code based on the protected key type */
547 fc
= (ctx
->pk
.type
== PKEY_KEYTYPE_AES_128
) ? CPACF_KMCTR_PAES_128
:
548 (ctx
->pk
.type
== PKEY_KEYTYPE_AES_192
) ? CPACF_KMCTR_PAES_192
:
549 (ctx
->pk
.type
== PKEY_KEYTYPE_AES_256
) ?
550 CPACF_KMCTR_PAES_256
: 0;
552 /* Check if the function code is available */
553 ctx
->fc
= (fc
&& cpacf_test_func(&kmctr_functions
, fc
)) ? fc
: 0;
555 return ctx
->fc
? 0 : -EINVAL
;
558 static int ctr_paes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
559 unsigned int key_len
)
562 struct s390_paes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
564 _free_kb_keybuf(&ctx
->kb
);
565 rc
= _copy_key_to_kb(&ctx
->kb
, in_key
, key_len
);
569 if (__ctr_paes_set_key(ctx
)) {
570 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
576 static unsigned int __ctrblk_init(u8
*ctrptr
, u8
*iv
, unsigned int nbytes
)
580 /* only use complete blocks, max. PAGE_SIZE */
581 memcpy(ctrptr
, iv
, AES_BLOCK_SIZE
);
582 n
= (nbytes
> PAGE_SIZE
) ? PAGE_SIZE
: nbytes
& ~(AES_BLOCK_SIZE
- 1);
583 for (i
= (n
/ AES_BLOCK_SIZE
) - 1; i
> 0; i
--) {
584 memcpy(ctrptr
+ AES_BLOCK_SIZE
, ctrptr
, AES_BLOCK_SIZE
);
585 crypto_inc(ctrptr
+ AES_BLOCK_SIZE
, AES_BLOCK_SIZE
);
586 ctrptr
+= AES_BLOCK_SIZE
;
591 static int ctr_paes_crypt(struct blkcipher_desc
*desc
, unsigned long modifier
,
592 struct blkcipher_walk
*walk
)
594 struct s390_paes_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
595 u8 buf
[AES_BLOCK_SIZE
], *ctrptr
;
596 unsigned int nbytes
, n
, k
;
599 locked
= spin_trylock(&ctrblk_lock
);
601 ret
= blkcipher_walk_virt_block(desc
, walk
, AES_BLOCK_SIZE
);
602 while ((nbytes
= walk
->nbytes
) >= AES_BLOCK_SIZE
) {
604 if (nbytes
>= 2*AES_BLOCK_SIZE
&& locked
)
605 n
= __ctrblk_init(ctrblk
, walk
->iv
, nbytes
);
606 ctrptr
= (n
> AES_BLOCK_SIZE
) ? ctrblk
: walk
->iv
;
607 k
= cpacf_kmctr(ctx
->fc
| modifier
, ctx
->pk
.protkey
,
608 walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
,
611 if (ctrptr
== ctrblk
)
612 memcpy(walk
->iv
, ctrptr
+ k
- AES_BLOCK_SIZE
,
614 crypto_inc(walk
->iv
, AES_BLOCK_SIZE
);
615 ret
= blkcipher_walk_done(desc
, walk
, nbytes
- n
);
618 if (__ctr_paes_set_key(ctx
) != 0) {
620 spin_unlock(&ctrblk_lock
);
621 return blkcipher_walk_done(desc
, walk
, -EIO
);
626 spin_unlock(&ctrblk_lock
);
628 * final block may be < AES_BLOCK_SIZE, copy only nbytes
632 if (cpacf_kmctr(ctx
->fc
| modifier
,
633 ctx
->pk
.protkey
, buf
,
634 walk
->src
.virt
.addr
, AES_BLOCK_SIZE
,
635 walk
->iv
) == AES_BLOCK_SIZE
)
637 if (__ctr_paes_set_key(ctx
) != 0)
638 return blkcipher_walk_done(desc
, walk
, -EIO
);
640 memcpy(walk
->dst
.virt
.addr
, buf
, nbytes
);
641 crypto_inc(walk
->iv
, AES_BLOCK_SIZE
);
642 ret
= blkcipher_walk_done(desc
, walk
, 0);
648 static int ctr_paes_encrypt(struct blkcipher_desc
*desc
,
649 struct scatterlist
*dst
, struct scatterlist
*src
,
652 struct blkcipher_walk walk
;
654 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
655 return ctr_paes_crypt(desc
, 0, &walk
);
658 static int ctr_paes_decrypt(struct blkcipher_desc
*desc
,
659 struct scatterlist
*dst
, struct scatterlist
*src
,
662 struct blkcipher_walk walk
;
664 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
665 return ctr_paes_crypt(desc
, CPACF_DECRYPT
, &walk
);
668 static struct crypto_alg ctr_paes_alg
= {
669 .cra_name
= "ctr(paes)",
670 .cra_driver_name
= "ctr-paes-s390",
671 .cra_priority
= 402, /* ecb-paes-s390 + 1 */
672 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
674 .cra_ctxsize
= sizeof(struct s390_paes_ctx
),
675 .cra_type
= &crypto_blkcipher_type
,
676 .cra_module
= THIS_MODULE
,
677 .cra_list
= LIST_HEAD_INIT(ctr_paes_alg
.cra_list
),
678 .cra_init
= ctr_paes_init
,
679 .cra_exit
= ctr_paes_exit
,
682 .min_keysize
= PAES_MIN_KEYSIZE
,
683 .max_keysize
= PAES_MAX_KEYSIZE
,
684 .ivsize
= AES_BLOCK_SIZE
,
685 .setkey
= ctr_paes_set_key
,
686 .encrypt
= ctr_paes_encrypt
,
687 .decrypt
= ctr_paes_decrypt
,
692 static inline void __crypto_unregister_alg(struct crypto_alg
*alg
)
694 if (!list_empty(&alg
->cra_list
))
695 crypto_unregister_alg(alg
);
698 static void paes_s390_fini(void)
701 free_page((unsigned long) ctrblk
);
702 __crypto_unregister_alg(&ctr_paes_alg
);
703 __crypto_unregister_alg(&xts_paes_alg
);
704 __crypto_unregister_alg(&cbc_paes_alg
);
705 __crypto_unregister_alg(&ecb_paes_alg
);
708 static int __init
paes_s390_init(void)
712 /* Query available functions for KM, KMC and KMCTR */
713 cpacf_query(CPACF_KM
, &km_functions
);
714 cpacf_query(CPACF_KMC
, &kmc_functions
);
715 cpacf_query(CPACF_KMCTR
, &kmctr_functions
);
717 if (cpacf_test_func(&km_functions
, CPACF_KM_PAES_128
) ||
718 cpacf_test_func(&km_functions
, CPACF_KM_PAES_192
) ||
719 cpacf_test_func(&km_functions
, CPACF_KM_PAES_256
)) {
720 ret
= crypto_register_alg(&ecb_paes_alg
);
725 if (cpacf_test_func(&kmc_functions
, CPACF_KMC_PAES_128
) ||
726 cpacf_test_func(&kmc_functions
, CPACF_KMC_PAES_192
) ||
727 cpacf_test_func(&kmc_functions
, CPACF_KMC_PAES_256
)) {
728 ret
= crypto_register_alg(&cbc_paes_alg
);
733 if (cpacf_test_func(&km_functions
, CPACF_KM_PXTS_128
) ||
734 cpacf_test_func(&km_functions
, CPACF_KM_PXTS_256
)) {
735 ret
= crypto_register_alg(&xts_paes_alg
);
740 if (cpacf_test_func(&kmctr_functions
, CPACF_KMCTR_PAES_128
) ||
741 cpacf_test_func(&kmctr_functions
, CPACF_KMCTR_PAES_192
) ||
742 cpacf_test_func(&kmctr_functions
, CPACF_KMCTR_PAES_256
)) {
743 ret
= crypto_register_alg(&ctr_paes_alg
);
746 ctrblk
= (u8
*) __get_free_page(GFP_KERNEL
);
759 module_init(paes_s390_init
);
760 module_exit(paes_s390_fini
);
762 MODULE_ALIAS_CRYPTO("paes");
764 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm with protected keys");
765 MODULE_LICENSE("GPL");