1 // SPDX-License-Identifier: GPL-2.0
5 * s390 implementation of the AES Cipher Algorithm with protected keys.
8 * Copyright IBM Corp. 2017
9 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>
10 * Harald Freudenberger <freude@de.ibm.com>
13 #define KMSG_COMPONENT "paes_s390"
14 #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
16 #include <crypto/aes.h>
17 #include <crypto/algapi.h>
18 #include <linux/bug.h>
19 #include <linux/err.h>
20 #include <linux/module.h>
21 #include <linux/cpufeature.h>
22 #include <linux/init.h>
23 #include <linux/spinlock.h>
24 #include <crypto/xts.h>
25 #include <asm/cpacf.h>
29 static DEFINE_SPINLOCK(ctrblk_lock
);
31 static cpacf_mask_t km_functions
, kmc_functions
, kmctr_functions
;
33 struct s390_paes_ctx
{
34 struct pkey_seckey sk
;
35 struct pkey_protkey pk
;
39 struct s390_pxts_ctx
{
40 struct pkey_seckey sk
[2];
41 struct pkey_protkey pk
[2];
45 static inline int __paes_convert_key(struct pkey_seckey
*sk
,
46 struct pkey_protkey
*pk
)
50 /* try three times in case of failure */
51 for (i
= 0; i
< 3; i
++) {
52 ret
= pkey_skey2pkey(sk
, pk
);
60 static int __paes_set_key(struct s390_paes_ctx
*ctx
)
64 if (__paes_convert_key(&ctx
->sk
, &ctx
->pk
))
67 /* Pick the correct function code based on the protected key type */
68 fc
= (ctx
->pk
.type
== PKEY_KEYTYPE_AES_128
) ? CPACF_KM_PAES_128
:
69 (ctx
->pk
.type
== PKEY_KEYTYPE_AES_192
) ? CPACF_KM_PAES_192
:
70 (ctx
->pk
.type
== PKEY_KEYTYPE_AES_256
) ? CPACF_KM_PAES_256
: 0;
72 /* Check if the function code is available */
73 ctx
->fc
= (fc
&& cpacf_test_func(&km_functions
, fc
)) ? fc
: 0;
75 return ctx
->fc
? 0 : -EINVAL
;
78 static int ecb_paes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
81 struct s390_paes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
83 if (key_len
!= SECKEYBLOBSIZE
)
86 memcpy(ctx
->sk
.seckey
, in_key
, SECKEYBLOBSIZE
);
87 if (__paes_set_key(ctx
)) {
88 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
94 static int ecb_paes_crypt(struct blkcipher_desc
*desc
,
95 unsigned long modifier
,
96 struct blkcipher_walk
*walk
)
98 struct s390_paes_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
99 unsigned int nbytes
, n
, k
;
102 ret
= blkcipher_walk_virt(desc
, walk
);
103 while ((nbytes
= walk
->nbytes
) >= AES_BLOCK_SIZE
) {
104 /* only use complete blocks */
105 n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
106 k
= cpacf_km(ctx
->fc
| modifier
, ctx
->pk
.protkey
,
107 walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
, n
);
109 ret
= blkcipher_walk_done(desc
, walk
, nbytes
- k
);
111 if (__paes_set_key(ctx
) != 0)
112 return blkcipher_walk_done(desc
, walk
, -EIO
);
118 static int ecb_paes_encrypt(struct blkcipher_desc
*desc
,
119 struct scatterlist
*dst
, struct scatterlist
*src
,
122 struct blkcipher_walk walk
;
124 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
125 return ecb_paes_crypt(desc
, CPACF_ENCRYPT
, &walk
);
128 static int ecb_paes_decrypt(struct blkcipher_desc
*desc
,
129 struct scatterlist
*dst
, struct scatterlist
*src
,
132 struct blkcipher_walk walk
;
134 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
135 return ecb_paes_crypt(desc
, CPACF_DECRYPT
, &walk
);
138 static struct crypto_alg ecb_paes_alg
= {
139 .cra_name
= "ecb(paes)",
140 .cra_driver_name
= "ecb-paes-s390",
141 .cra_priority
= 400, /* combo: aes + ecb */
142 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
143 .cra_blocksize
= AES_BLOCK_SIZE
,
144 .cra_ctxsize
= sizeof(struct s390_paes_ctx
),
145 .cra_type
= &crypto_blkcipher_type
,
146 .cra_module
= THIS_MODULE
,
147 .cra_list
= LIST_HEAD_INIT(ecb_paes_alg
.cra_list
),
150 .min_keysize
= SECKEYBLOBSIZE
,
151 .max_keysize
= SECKEYBLOBSIZE
,
152 .setkey
= ecb_paes_set_key
,
153 .encrypt
= ecb_paes_encrypt
,
154 .decrypt
= ecb_paes_decrypt
,
159 static int __cbc_paes_set_key(struct s390_paes_ctx
*ctx
)
163 if (__paes_convert_key(&ctx
->sk
, &ctx
->pk
))
166 /* Pick the correct function code based on the protected key type */
167 fc
= (ctx
->pk
.type
== PKEY_KEYTYPE_AES_128
) ? CPACF_KMC_PAES_128
:
168 (ctx
->pk
.type
== PKEY_KEYTYPE_AES_192
) ? CPACF_KMC_PAES_192
:
169 (ctx
->pk
.type
== PKEY_KEYTYPE_AES_256
) ? CPACF_KMC_PAES_256
: 0;
171 /* Check if the function code is available */
172 ctx
->fc
= (fc
&& cpacf_test_func(&kmc_functions
, fc
)) ? fc
: 0;
174 return ctx
->fc
? 0 : -EINVAL
;
177 static int cbc_paes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
178 unsigned int key_len
)
180 struct s390_paes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
182 memcpy(ctx
->sk
.seckey
, in_key
, SECKEYBLOBSIZE
);
183 if (__cbc_paes_set_key(ctx
)) {
184 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
190 static int cbc_paes_crypt(struct blkcipher_desc
*desc
, unsigned long modifier
,
191 struct blkcipher_walk
*walk
)
193 struct s390_paes_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
194 unsigned int nbytes
, n
, k
;
197 u8 iv
[AES_BLOCK_SIZE
];
198 u8 key
[MAXPROTKEYSIZE
];
201 ret
= blkcipher_walk_virt(desc
, walk
);
202 memcpy(param
.iv
, walk
->iv
, AES_BLOCK_SIZE
);
203 memcpy(param
.key
, ctx
->pk
.protkey
, MAXPROTKEYSIZE
);
204 while ((nbytes
= walk
->nbytes
) >= AES_BLOCK_SIZE
) {
205 /* only use complete blocks */
206 n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
207 k
= cpacf_kmc(ctx
->fc
| modifier
, ¶m
,
208 walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
, n
);
210 ret
= blkcipher_walk_done(desc
, walk
, nbytes
- k
);
212 if (__cbc_paes_set_key(ctx
) != 0)
213 return blkcipher_walk_done(desc
, walk
, -EIO
);
214 memcpy(param
.key
, ctx
->pk
.protkey
, MAXPROTKEYSIZE
);
217 memcpy(walk
->iv
, param
.iv
, AES_BLOCK_SIZE
);
221 static int cbc_paes_encrypt(struct blkcipher_desc
*desc
,
222 struct scatterlist
*dst
, struct scatterlist
*src
,
225 struct blkcipher_walk walk
;
227 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
228 return cbc_paes_crypt(desc
, 0, &walk
);
231 static int cbc_paes_decrypt(struct blkcipher_desc
*desc
,
232 struct scatterlist
*dst
, struct scatterlist
*src
,
235 struct blkcipher_walk walk
;
237 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
238 return cbc_paes_crypt(desc
, CPACF_DECRYPT
, &walk
);
241 static struct crypto_alg cbc_paes_alg
= {
242 .cra_name
= "cbc(paes)",
243 .cra_driver_name
= "cbc-paes-s390",
244 .cra_priority
= 400, /* combo: aes + cbc */
245 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
246 .cra_blocksize
= AES_BLOCK_SIZE
,
247 .cra_ctxsize
= sizeof(struct s390_paes_ctx
),
248 .cra_type
= &crypto_blkcipher_type
,
249 .cra_module
= THIS_MODULE
,
250 .cra_list
= LIST_HEAD_INIT(cbc_paes_alg
.cra_list
),
253 .min_keysize
= SECKEYBLOBSIZE
,
254 .max_keysize
= SECKEYBLOBSIZE
,
255 .ivsize
= AES_BLOCK_SIZE
,
256 .setkey
= cbc_paes_set_key
,
257 .encrypt
= cbc_paes_encrypt
,
258 .decrypt
= cbc_paes_decrypt
,
263 static int __xts_paes_set_key(struct s390_pxts_ctx
*ctx
)
267 if (__paes_convert_key(&ctx
->sk
[0], &ctx
->pk
[0]) ||
268 __paes_convert_key(&ctx
->sk
[1], &ctx
->pk
[1]))
271 if (ctx
->pk
[0].type
!= ctx
->pk
[1].type
)
274 /* Pick the correct function code based on the protected key type */
275 fc
= (ctx
->pk
[0].type
== PKEY_KEYTYPE_AES_128
) ? CPACF_KM_PXTS_128
:
276 (ctx
->pk
[0].type
== PKEY_KEYTYPE_AES_256
) ?
277 CPACF_KM_PXTS_256
: 0;
279 /* Check if the function code is available */
280 ctx
->fc
= (fc
&& cpacf_test_func(&km_functions
, fc
)) ? fc
: 0;
282 return ctx
->fc
? 0 : -EINVAL
;
285 static int xts_paes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
286 unsigned int key_len
)
288 struct s390_pxts_ctx
*ctx
= crypto_tfm_ctx(tfm
);
289 u8 ckey
[2 * AES_MAX_KEY_SIZE
];
290 unsigned int ckey_len
;
292 memcpy(ctx
->sk
[0].seckey
, in_key
, SECKEYBLOBSIZE
);
293 memcpy(ctx
->sk
[1].seckey
, in_key
+ SECKEYBLOBSIZE
, SECKEYBLOBSIZE
);
294 if (__xts_paes_set_key(ctx
)) {
295 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
300 * xts_check_key verifies the key length is not odd and makes
301 * sure that the two keys are not the same. This can be done
302 * on the two protected keys as well
304 ckey_len
= (ctx
->pk
[0].type
== PKEY_KEYTYPE_AES_128
) ?
305 AES_KEYSIZE_128
: AES_KEYSIZE_256
;
306 memcpy(ckey
, ctx
->pk
[0].protkey
, ckey_len
);
307 memcpy(ckey
+ ckey_len
, ctx
->pk
[1].protkey
, ckey_len
);
308 return xts_check_key(tfm
, ckey
, 2*ckey_len
);
311 static int xts_paes_crypt(struct blkcipher_desc
*desc
, unsigned long modifier
,
312 struct blkcipher_walk
*walk
)
314 struct s390_pxts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
315 unsigned int keylen
, offset
, nbytes
, n
, k
;
318 u8 key
[MAXPROTKEYSIZE
]; /* key + verification pattern */
325 u8 key
[MAXPROTKEYSIZE
]; /* key + verification pattern */
329 ret
= blkcipher_walk_virt(desc
, walk
);
330 keylen
= (ctx
->pk
[0].type
== PKEY_KEYTYPE_AES_128
) ? 48 : 64;
331 offset
= (ctx
->pk
[0].type
== PKEY_KEYTYPE_AES_128
) ? 16 : 0;
333 memset(&pcc_param
, 0, sizeof(pcc_param
));
334 memcpy(pcc_param
.tweak
, walk
->iv
, sizeof(pcc_param
.tweak
));
335 memcpy(pcc_param
.key
+ offset
, ctx
->pk
[1].protkey
, keylen
);
336 cpacf_pcc(ctx
->fc
, pcc_param
.key
+ offset
);
338 memcpy(xts_param
.key
+ offset
, ctx
->pk
[0].protkey
, keylen
);
339 memcpy(xts_param
.init
, pcc_param
.xts
, 16);
341 while ((nbytes
= walk
->nbytes
) >= AES_BLOCK_SIZE
) {
342 /* only use complete blocks */
343 n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
344 k
= cpacf_km(ctx
->fc
| modifier
, xts_param
.key
+ offset
,
345 walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
, n
);
347 ret
= blkcipher_walk_done(desc
, walk
, nbytes
- k
);
349 if (__xts_paes_set_key(ctx
) != 0)
350 return blkcipher_walk_done(desc
, walk
, -EIO
);
357 static int xts_paes_encrypt(struct blkcipher_desc
*desc
,
358 struct scatterlist
*dst
, struct scatterlist
*src
,
361 struct blkcipher_walk walk
;
363 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
364 return xts_paes_crypt(desc
, 0, &walk
);
367 static int xts_paes_decrypt(struct blkcipher_desc
*desc
,
368 struct scatterlist
*dst
, struct scatterlist
*src
,
371 struct blkcipher_walk walk
;
373 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
374 return xts_paes_crypt(desc
, CPACF_DECRYPT
, &walk
);
377 static struct crypto_alg xts_paes_alg
= {
378 .cra_name
= "xts(paes)",
379 .cra_driver_name
= "xts-paes-s390",
380 .cra_priority
= 400, /* combo: aes + xts */
381 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
382 .cra_blocksize
= AES_BLOCK_SIZE
,
383 .cra_ctxsize
= sizeof(struct s390_pxts_ctx
),
384 .cra_type
= &crypto_blkcipher_type
,
385 .cra_module
= THIS_MODULE
,
386 .cra_list
= LIST_HEAD_INIT(xts_paes_alg
.cra_list
),
389 .min_keysize
= 2 * SECKEYBLOBSIZE
,
390 .max_keysize
= 2 * SECKEYBLOBSIZE
,
391 .ivsize
= AES_BLOCK_SIZE
,
392 .setkey
= xts_paes_set_key
,
393 .encrypt
= xts_paes_encrypt
,
394 .decrypt
= xts_paes_decrypt
,
399 static int __ctr_paes_set_key(struct s390_paes_ctx
*ctx
)
403 if (__paes_convert_key(&ctx
->sk
, &ctx
->pk
))
406 /* Pick the correct function code based on the protected key type */
407 fc
= (ctx
->pk
.type
== PKEY_KEYTYPE_AES_128
) ? CPACF_KMCTR_PAES_128
:
408 (ctx
->pk
.type
== PKEY_KEYTYPE_AES_192
) ? CPACF_KMCTR_PAES_192
:
409 (ctx
->pk
.type
== PKEY_KEYTYPE_AES_256
) ?
410 CPACF_KMCTR_PAES_256
: 0;
412 /* Check if the function code is available */
413 ctx
->fc
= (fc
&& cpacf_test_func(&kmctr_functions
, fc
)) ? fc
: 0;
415 return ctx
->fc
? 0 : -EINVAL
;
418 static int ctr_paes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
419 unsigned int key_len
)
421 struct s390_paes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
423 memcpy(ctx
->sk
.seckey
, in_key
, key_len
);
424 if (__ctr_paes_set_key(ctx
)) {
425 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
431 static unsigned int __ctrblk_init(u8
*ctrptr
, u8
*iv
, unsigned int nbytes
)
435 /* only use complete blocks, max. PAGE_SIZE */
436 memcpy(ctrptr
, iv
, AES_BLOCK_SIZE
);
437 n
= (nbytes
> PAGE_SIZE
) ? PAGE_SIZE
: nbytes
& ~(AES_BLOCK_SIZE
- 1);
438 for (i
= (n
/ AES_BLOCK_SIZE
) - 1; i
> 0; i
--) {
439 memcpy(ctrptr
+ AES_BLOCK_SIZE
, ctrptr
, AES_BLOCK_SIZE
);
440 crypto_inc(ctrptr
+ AES_BLOCK_SIZE
, AES_BLOCK_SIZE
);
441 ctrptr
+= AES_BLOCK_SIZE
;
446 static int ctr_paes_crypt(struct blkcipher_desc
*desc
, unsigned long modifier
,
447 struct blkcipher_walk
*walk
)
449 struct s390_paes_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
450 u8 buf
[AES_BLOCK_SIZE
], *ctrptr
;
451 unsigned int nbytes
, n
, k
;
454 locked
= spin_trylock(&ctrblk_lock
);
456 ret
= blkcipher_walk_virt_block(desc
, walk
, AES_BLOCK_SIZE
);
457 while ((nbytes
= walk
->nbytes
) >= AES_BLOCK_SIZE
) {
459 if (nbytes
>= 2*AES_BLOCK_SIZE
&& locked
)
460 n
= __ctrblk_init(ctrblk
, walk
->iv
, nbytes
);
461 ctrptr
= (n
> AES_BLOCK_SIZE
) ? ctrblk
: walk
->iv
;
462 k
= cpacf_kmctr(ctx
->fc
| modifier
, ctx
->pk
.protkey
,
463 walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
,
466 if (ctrptr
== ctrblk
)
467 memcpy(walk
->iv
, ctrptr
+ k
- AES_BLOCK_SIZE
,
469 crypto_inc(walk
->iv
, AES_BLOCK_SIZE
);
470 ret
= blkcipher_walk_done(desc
, walk
, nbytes
- n
);
473 if (__ctr_paes_set_key(ctx
) != 0) {
475 spin_unlock(&ctrblk_lock
);
476 return blkcipher_walk_done(desc
, walk
, -EIO
);
481 spin_unlock(&ctrblk_lock
);
483 * final block may be < AES_BLOCK_SIZE, copy only nbytes
487 if (cpacf_kmctr(ctx
->fc
| modifier
,
488 ctx
->pk
.protkey
, buf
,
489 walk
->src
.virt
.addr
, AES_BLOCK_SIZE
,
490 walk
->iv
) == AES_BLOCK_SIZE
)
492 if (__ctr_paes_set_key(ctx
) != 0)
493 return blkcipher_walk_done(desc
, walk
, -EIO
);
495 memcpy(walk
->dst
.virt
.addr
, buf
, nbytes
);
496 crypto_inc(walk
->iv
, AES_BLOCK_SIZE
);
497 ret
= blkcipher_walk_done(desc
, walk
, 0);
503 static int ctr_paes_encrypt(struct blkcipher_desc
*desc
,
504 struct scatterlist
*dst
, struct scatterlist
*src
,
507 struct blkcipher_walk walk
;
509 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
510 return ctr_paes_crypt(desc
, 0, &walk
);
513 static int ctr_paes_decrypt(struct blkcipher_desc
*desc
,
514 struct scatterlist
*dst
, struct scatterlist
*src
,
517 struct blkcipher_walk walk
;
519 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
520 return ctr_paes_crypt(desc
, CPACF_DECRYPT
, &walk
);
523 static struct crypto_alg ctr_paes_alg
= {
524 .cra_name
= "ctr(paes)",
525 .cra_driver_name
= "ctr-paes-s390",
526 .cra_priority
= 400, /* combo: aes + ctr */
527 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
529 .cra_ctxsize
= sizeof(struct s390_paes_ctx
),
530 .cra_type
= &crypto_blkcipher_type
,
531 .cra_module
= THIS_MODULE
,
532 .cra_list
= LIST_HEAD_INIT(ctr_paes_alg
.cra_list
),
535 .min_keysize
= SECKEYBLOBSIZE
,
536 .max_keysize
= SECKEYBLOBSIZE
,
537 .ivsize
= AES_BLOCK_SIZE
,
538 .setkey
= ctr_paes_set_key
,
539 .encrypt
= ctr_paes_encrypt
,
540 .decrypt
= ctr_paes_decrypt
,
545 static inline void __crypto_unregister_alg(struct crypto_alg
*alg
)
547 if (!list_empty(&alg
->cra_list
))
548 crypto_unregister_alg(alg
);
551 static void paes_s390_fini(void)
554 free_page((unsigned long) ctrblk
);
555 __crypto_unregister_alg(&ctr_paes_alg
);
556 __crypto_unregister_alg(&xts_paes_alg
);
557 __crypto_unregister_alg(&cbc_paes_alg
);
558 __crypto_unregister_alg(&ecb_paes_alg
);
561 static int __init
paes_s390_init(void)
565 /* Query available functions for KM, KMC and KMCTR */
566 cpacf_query(CPACF_KM
, &km_functions
);
567 cpacf_query(CPACF_KMC
, &kmc_functions
);
568 cpacf_query(CPACF_KMCTR
, &kmctr_functions
);
570 if (cpacf_test_func(&km_functions
, CPACF_KM_PAES_128
) ||
571 cpacf_test_func(&km_functions
, CPACF_KM_PAES_192
) ||
572 cpacf_test_func(&km_functions
, CPACF_KM_PAES_256
)) {
573 ret
= crypto_register_alg(&ecb_paes_alg
);
578 if (cpacf_test_func(&kmc_functions
, CPACF_KMC_PAES_128
) ||
579 cpacf_test_func(&kmc_functions
, CPACF_KMC_PAES_192
) ||
580 cpacf_test_func(&kmc_functions
, CPACF_KMC_PAES_256
)) {
581 ret
= crypto_register_alg(&cbc_paes_alg
);
586 if (cpacf_test_func(&km_functions
, CPACF_KM_PXTS_128
) ||
587 cpacf_test_func(&km_functions
, CPACF_KM_PXTS_256
)) {
588 ret
= crypto_register_alg(&xts_paes_alg
);
593 if (cpacf_test_func(&kmctr_functions
, CPACF_KMCTR_PAES_128
) ||
594 cpacf_test_func(&kmctr_functions
, CPACF_KMCTR_PAES_192
) ||
595 cpacf_test_func(&kmctr_functions
, CPACF_KMCTR_PAES_256
)) {
596 ret
= crypto_register_alg(&ctr_paes_alg
);
599 ctrblk
= (u8
*) __get_free_page(GFP_KERNEL
);
612 module_init(paes_s390_init
);
613 module_exit(paes_s390_fini
);
615 MODULE_ALIAS_CRYPTO("paes");
617 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm with protected keys");
618 MODULE_LICENSE("GPL");