4 * s390 implementation of the AES Cipher Algorithm with protected keys.
7 * Copyright IBM Corp. 2017
8 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>
9 * Harald Freudenberger <freude@de.ibm.com>
11 * This program is free software; you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License (version 2 only)
13 * as published by the Free Software Foundation.
17 #define KMSG_COMPONENT "paes_s390"
18 #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
20 #include <crypto/aes.h>
21 #include <crypto/algapi.h>
22 #include <linux/bug.h>
23 #include <linux/err.h>
24 #include <linux/module.h>
25 #include <linux/cpufeature.h>
26 #include <linux/init.h>
27 #include <linux/spinlock.h>
28 #include <crypto/xts.h>
29 #include <asm/cpacf.h>
33 static DEFINE_SPINLOCK(ctrblk_lock
);
35 static cpacf_mask_t km_functions
, kmc_functions
, kmctr_functions
;
37 struct s390_paes_ctx
{
38 struct pkey_seckey sk
;
39 struct pkey_protkey pk
;
43 struct s390_pxts_ctx
{
44 struct pkey_seckey sk
[2];
45 struct pkey_protkey pk
[2];
49 static inline int __paes_convert_key(struct pkey_seckey
*sk
,
50 struct pkey_protkey
*pk
)
54 /* try three times in case of failure */
55 for (i
= 0; i
< 3; i
++) {
56 ret
= pkey_skey2pkey(sk
, pk
);
64 static int __paes_set_key(struct s390_paes_ctx
*ctx
)
68 if (__paes_convert_key(&ctx
->sk
, &ctx
->pk
))
71 /* Pick the correct function code based on the protected key type */
72 fc
= (ctx
->pk
.type
== PKEY_KEYTYPE_AES_128
) ? CPACF_KM_PAES_128
:
73 (ctx
->pk
.type
== PKEY_KEYTYPE_AES_192
) ? CPACF_KM_PAES_192
:
74 (ctx
->pk
.type
== PKEY_KEYTYPE_AES_256
) ? CPACF_KM_PAES_256
: 0;
76 /* Check if the function code is available */
77 ctx
->fc
= (fc
&& cpacf_test_func(&km_functions
, fc
)) ? fc
: 0;
79 return ctx
->fc
? 0 : -EINVAL
;
82 static int ecb_paes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
85 struct s390_paes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
87 if (key_len
!= SECKEYBLOBSIZE
)
90 memcpy(ctx
->sk
.seckey
, in_key
, SECKEYBLOBSIZE
);
91 if (__paes_set_key(ctx
)) {
92 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
98 static int ecb_paes_crypt(struct blkcipher_desc
*desc
,
99 unsigned long modifier
,
100 struct blkcipher_walk
*walk
)
102 struct s390_paes_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
103 unsigned int nbytes
, n
, k
;
106 ret
= blkcipher_walk_virt(desc
, walk
);
107 while ((nbytes
= walk
->nbytes
) >= AES_BLOCK_SIZE
) {
108 /* only use complete blocks */
109 n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
110 k
= cpacf_km(ctx
->fc
| modifier
, ctx
->pk
.protkey
,
111 walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
, n
);
113 ret
= blkcipher_walk_done(desc
, walk
, nbytes
- k
);
115 if (__paes_set_key(ctx
) != 0)
116 return blkcipher_walk_done(desc
, walk
, -EIO
);
122 static int ecb_paes_encrypt(struct blkcipher_desc
*desc
,
123 struct scatterlist
*dst
, struct scatterlist
*src
,
126 struct blkcipher_walk walk
;
128 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
129 return ecb_paes_crypt(desc
, CPACF_ENCRYPT
, &walk
);
132 static int ecb_paes_decrypt(struct blkcipher_desc
*desc
,
133 struct scatterlist
*dst
, struct scatterlist
*src
,
136 struct blkcipher_walk walk
;
138 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
139 return ecb_paes_crypt(desc
, CPACF_DECRYPT
, &walk
);
142 static struct crypto_alg ecb_paes_alg
= {
143 .cra_name
= "ecb(paes)",
144 .cra_driver_name
= "ecb-paes-s390",
145 .cra_priority
= 400, /* combo: aes + ecb */
146 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
147 .cra_blocksize
= AES_BLOCK_SIZE
,
148 .cra_ctxsize
= sizeof(struct s390_paes_ctx
),
149 .cra_type
= &crypto_blkcipher_type
,
150 .cra_module
= THIS_MODULE
,
151 .cra_list
= LIST_HEAD_INIT(ecb_paes_alg
.cra_list
),
154 .min_keysize
= SECKEYBLOBSIZE
,
155 .max_keysize
= SECKEYBLOBSIZE
,
156 .setkey
= ecb_paes_set_key
,
157 .encrypt
= ecb_paes_encrypt
,
158 .decrypt
= ecb_paes_decrypt
,
163 static int __cbc_paes_set_key(struct s390_paes_ctx
*ctx
)
167 if (__paes_convert_key(&ctx
->sk
, &ctx
->pk
))
170 /* Pick the correct function code based on the protected key type */
171 fc
= (ctx
->pk
.type
== PKEY_KEYTYPE_AES_128
) ? CPACF_KMC_PAES_128
:
172 (ctx
->pk
.type
== PKEY_KEYTYPE_AES_192
) ? CPACF_KMC_PAES_192
:
173 (ctx
->pk
.type
== PKEY_KEYTYPE_AES_256
) ? CPACF_KMC_PAES_256
: 0;
175 /* Check if the function code is available */
176 ctx
->fc
= (fc
&& cpacf_test_func(&kmc_functions
, fc
)) ? fc
: 0;
178 return ctx
->fc
? 0 : -EINVAL
;
181 static int cbc_paes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
182 unsigned int key_len
)
184 struct s390_paes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
186 memcpy(ctx
->sk
.seckey
, in_key
, SECKEYBLOBSIZE
);
187 if (__cbc_paes_set_key(ctx
)) {
188 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
194 static int cbc_paes_crypt(struct blkcipher_desc
*desc
, unsigned long modifier
,
195 struct blkcipher_walk
*walk
)
197 struct s390_paes_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
198 unsigned int nbytes
, n
, k
;
201 u8 iv
[AES_BLOCK_SIZE
];
202 u8 key
[MAXPROTKEYSIZE
];
205 ret
= blkcipher_walk_virt(desc
, walk
);
206 memcpy(param
.iv
, walk
->iv
, AES_BLOCK_SIZE
);
207 memcpy(param
.key
, ctx
->pk
.protkey
, MAXPROTKEYSIZE
);
208 while ((nbytes
= walk
->nbytes
) >= AES_BLOCK_SIZE
) {
209 /* only use complete blocks */
210 n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
211 k
= cpacf_kmc(ctx
->fc
| modifier
, ¶m
,
212 walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
, n
);
214 ret
= blkcipher_walk_done(desc
, walk
, nbytes
- k
);
216 if (__cbc_paes_set_key(ctx
) != 0)
217 return blkcipher_walk_done(desc
, walk
, -EIO
);
218 memcpy(param
.key
, ctx
->pk
.protkey
, MAXPROTKEYSIZE
);
221 memcpy(walk
->iv
, param
.iv
, AES_BLOCK_SIZE
);
225 static int cbc_paes_encrypt(struct blkcipher_desc
*desc
,
226 struct scatterlist
*dst
, struct scatterlist
*src
,
229 struct blkcipher_walk walk
;
231 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
232 return cbc_paes_crypt(desc
, 0, &walk
);
235 static int cbc_paes_decrypt(struct blkcipher_desc
*desc
,
236 struct scatterlist
*dst
, struct scatterlist
*src
,
239 struct blkcipher_walk walk
;
241 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
242 return cbc_paes_crypt(desc
, CPACF_DECRYPT
, &walk
);
245 static struct crypto_alg cbc_paes_alg
= {
246 .cra_name
= "cbc(paes)",
247 .cra_driver_name
= "cbc-paes-s390",
248 .cra_priority
= 400, /* combo: aes + cbc */
249 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
250 .cra_blocksize
= AES_BLOCK_SIZE
,
251 .cra_ctxsize
= sizeof(struct s390_paes_ctx
),
252 .cra_type
= &crypto_blkcipher_type
,
253 .cra_module
= THIS_MODULE
,
254 .cra_list
= LIST_HEAD_INIT(cbc_paes_alg
.cra_list
),
257 .min_keysize
= SECKEYBLOBSIZE
,
258 .max_keysize
= SECKEYBLOBSIZE
,
259 .ivsize
= AES_BLOCK_SIZE
,
260 .setkey
= cbc_paes_set_key
,
261 .encrypt
= cbc_paes_encrypt
,
262 .decrypt
= cbc_paes_decrypt
,
267 static int __xts_paes_set_key(struct s390_pxts_ctx
*ctx
)
271 if (__paes_convert_key(&ctx
->sk
[0], &ctx
->pk
[0]) ||
272 __paes_convert_key(&ctx
->sk
[1], &ctx
->pk
[1]))
275 if (ctx
->pk
[0].type
!= ctx
->pk
[1].type
)
278 /* Pick the correct function code based on the protected key type */
279 fc
= (ctx
->pk
[0].type
== PKEY_KEYTYPE_AES_128
) ? CPACF_KM_PXTS_128
:
280 (ctx
->pk
[0].type
== PKEY_KEYTYPE_AES_256
) ?
281 CPACF_KM_PXTS_256
: 0;
283 /* Check if the function code is available */
284 ctx
->fc
= (fc
&& cpacf_test_func(&km_functions
, fc
)) ? fc
: 0;
286 return ctx
->fc
? 0 : -EINVAL
;
289 static int xts_paes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
290 unsigned int key_len
)
292 struct s390_pxts_ctx
*ctx
= crypto_tfm_ctx(tfm
);
293 u8 ckey
[2 * AES_MAX_KEY_SIZE
];
294 unsigned int ckey_len
;
296 memcpy(ctx
->sk
[0].seckey
, in_key
, SECKEYBLOBSIZE
);
297 memcpy(ctx
->sk
[1].seckey
, in_key
+ SECKEYBLOBSIZE
, SECKEYBLOBSIZE
);
298 if (__xts_paes_set_key(ctx
)) {
299 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
304 * xts_check_key verifies the key length is not odd and makes
305 * sure that the two keys are not the same. This can be done
306 * on the two protected keys as well
308 ckey_len
= (ctx
->pk
[0].type
== PKEY_KEYTYPE_AES_128
) ?
309 AES_KEYSIZE_128
: AES_KEYSIZE_256
;
310 memcpy(ckey
, ctx
->pk
[0].protkey
, ckey_len
);
311 memcpy(ckey
+ ckey_len
, ctx
->pk
[1].protkey
, ckey_len
);
312 return xts_check_key(tfm
, ckey
, 2*ckey_len
);
315 static int xts_paes_crypt(struct blkcipher_desc
*desc
, unsigned long modifier
,
316 struct blkcipher_walk
*walk
)
318 struct s390_pxts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
319 unsigned int keylen
, offset
, nbytes
, n
, k
;
322 u8 key
[MAXPROTKEYSIZE
]; /* key + verification pattern */
329 u8 key
[MAXPROTKEYSIZE
]; /* key + verification pattern */
333 ret
= blkcipher_walk_virt(desc
, walk
);
334 keylen
= (ctx
->pk
[0].type
== PKEY_KEYTYPE_AES_128
) ? 48 : 64;
335 offset
= (ctx
->pk
[0].type
== PKEY_KEYTYPE_AES_128
) ? 16 : 0;
337 memset(&pcc_param
, 0, sizeof(pcc_param
));
338 memcpy(pcc_param
.tweak
, walk
->iv
, sizeof(pcc_param
.tweak
));
339 memcpy(pcc_param
.key
+ offset
, ctx
->pk
[1].protkey
, keylen
);
340 cpacf_pcc(ctx
->fc
, pcc_param
.key
+ offset
);
342 memcpy(xts_param
.key
+ offset
, ctx
->pk
[0].protkey
, keylen
);
343 memcpy(xts_param
.init
, pcc_param
.xts
, 16);
345 while ((nbytes
= walk
->nbytes
) >= AES_BLOCK_SIZE
) {
346 /* only use complete blocks */
347 n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
348 k
= cpacf_km(ctx
->fc
| modifier
, xts_param
.key
+ offset
,
349 walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
, n
);
351 ret
= blkcipher_walk_done(desc
, walk
, nbytes
- k
);
353 if (__xts_paes_set_key(ctx
) != 0)
354 return blkcipher_walk_done(desc
, walk
, -EIO
);
361 static int xts_paes_encrypt(struct blkcipher_desc
*desc
,
362 struct scatterlist
*dst
, struct scatterlist
*src
,
365 struct blkcipher_walk walk
;
367 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
368 return xts_paes_crypt(desc
, 0, &walk
);
371 static int xts_paes_decrypt(struct blkcipher_desc
*desc
,
372 struct scatterlist
*dst
, struct scatterlist
*src
,
375 struct blkcipher_walk walk
;
377 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
378 return xts_paes_crypt(desc
, CPACF_DECRYPT
, &walk
);
381 static struct crypto_alg xts_paes_alg
= {
382 .cra_name
= "xts(paes)",
383 .cra_driver_name
= "xts-paes-s390",
384 .cra_priority
= 400, /* combo: aes + xts */
385 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
386 .cra_blocksize
= AES_BLOCK_SIZE
,
387 .cra_ctxsize
= sizeof(struct s390_pxts_ctx
),
388 .cra_type
= &crypto_blkcipher_type
,
389 .cra_module
= THIS_MODULE
,
390 .cra_list
= LIST_HEAD_INIT(xts_paes_alg
.cra_list
),
393 .min_keysize
= 2 * SECKEYBLOBSIZE
,
394 .max_keysize
= 2 * SECKEYBLOBSIZE
,
395 .ivsize
= AES_BLOCK_SIZE
,
396 .setkey
= xts_paes_set_key
,
397 .encrypt
= xts_paes_encrypt
,
398 .decrypt
= xts_paes_decrypt
,
403 static int __ctr_paes_set_key(struct s390_paes_ctx
*ctx
)
407 if (__paes_convert_key(&ctx
->sk
, &ctx
->pk
))
410 /* Pick the correct function code based on the protected key type */
411 fc
= (ctx
->pk
.type
== PKEY_KEYTYPE_AES_128
) ? CPACF_KMCTR_PAES_128
:
412 (ctx
->pk
.type
== PKEY_KEYTYPE_AES_192
) ? CPACF_KMCTR_PAES_192
:
413 (ctx
->pk
.type
== PKEY_KEYTYPE_AES_256
) ?
414 CPACF_KMCTR_PAES_256
: 0;
416 /* Check if the function code is available */
417 ctx
->fc
= (fc
&& cpacf_test_func(&kmctr_functions
, fc
)) ? fc
: 0;
419 return ctx
->fc
? 0 : -EINVAL
;
422 static int ctr_paes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
423 unsigned int key_len
)
425 struct s390_paes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
427 memcpy(ctx
->sk
.seckey
, in_key
, key_len
);
428 if (__ctr_paes_set_key(ctx
)) {
429 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
435 static unsigned int __ctrblk_init(u8
*ctrptr
, u8
*iv
, unsigned int nbytes
)
439 /* only use complete blocks, max. PAGE_SIZE */
440 memcpy(ctrptr
, iv
, AES_BLOCK_SIZE
);
441 n
= (nbytes
> PAGE_SIZE
) ? PAGE_SIZE
: nbytes
& ~(AES_BLOCK_SIZE
- 1);
442 for (i
= (n
/ AES_BLOCK_SIZE
) - 1; i
> 0; i
--) {
443 memcpy(ctrptr
+ AES_BLOCK_SIZE
, ctrptr
, AES_BLOCK_SIZE
);
444 crypto_inc(ctrptr
+ AES_BLOCK_SIZE
, AES_BLOCK_SIZE
);
445 ctrptr
+= AES_BLOCK_SIZE
;
450 static int ctr_paes_crypt(struct blkcipher_desc
*desc
, unsigned long modifier
,
451 struct blkcipher_walk
*walk
)
453 struct s390_paes_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
454 u8 buf
[AES_BLOCK_SIZE
], *ctrptr
;
455 unsigned int nbytes
, n
, k
;
458 locked
= spin_trylock(&ctrblk_lock
);
460 ret
= blkcipher_walk_virt_block(desc
, walk
, AES_BLOCK_SIZE
);
461 while ((nbytes
= walk
->nbytes
) >= AES_BLOCK_SIZE
) {
463 if (nbytes
>= 2*AES_BLOCK_SIZE
&& locked
)
464 n
= __ctrblk_init(ctrblk
, walk
->iv
, nbytes
);
465 ctrptr
= (n
> AES_BLOCK_SIZE
) ? ctrblk
: walk
->iv
;
466 k
= cpacf_kmctr(ctx
->fc
| modifier
, ctx
->pk
.protkey
,
467 walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
,
470 if (ctrptr
== ctrblk
)
471 memcpy(walk
->iv
, ctrptr
+ k
- AES_BLOCK_SIZE
,
473 crypto_inc(walk
->iv
, AES_BLOCK_SIZE
);
474 ret
= blkcipher_walk_done(desc
, walk
, nbytes
- n
);
477 if (__ctr_paes_set_key(ctx
) != 0) {
479 spin_unlock(&ctrblk_lock
);
480 return blkcipher_walk_done(desc
, walk
, -EIO
);
485 spin_unlock(&ctrblk_lock
);
487 * final block may be < AES_BLOCK_SIZE, copy only nbytes
491 if (cpacf_kmctr(ctx
->fc
| modifier
,
492 ctx
->pk
.protkey
, buf
,
493 walk
->src
.virt
.addr
, AES_BLOCK_SIZE
,
494 walk
->iv
) == AES_BLOCK_SIZE
)
496 if (__ctr_paes_set_key(ctx
) != 0)
497 return blkcipher_walk_done(desc
, walk
, -EIO
);
499 memcpy(walk
->dst
.virt
.addr
, buf
, nbytes
);
500 crypto_inc(walk
->iv
, AES_BLOCK_SIZE
);
501 ret
= blkcipher_walk_done(desc
, walk
, 0);
507 static int ctr_paes_encrypt(struct blkcipher_desc
*desc
,
508 struct scatterlist
*dst
, struct scatterlist
*src
,
511 struct blkcipher_walk walk
;
513 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
514 return ctr_paes_crypt(desc
, 0, &walk
);
517 static int ctr_paes_decrypt(struct blkcipher_desc
*desc
,
518 struct scatterlist
*dst
, struct scatterlist
*src
,
521 struct blkcipher_walk walk
;
523 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
524 return ctr_paes_crypt(desc
, CPACF_DECRYPT
, &walk
);
527 static struct crypto_alg ctr_paes_alg
= {
528 .cra_name
= "ctr(paes)",
529 .cra_driver_name
= "ctr-paes-s390",
530 .cra_priority
= 400, /* combo: aes + ctr */
531 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
533 .cra_ctxsize
= sizeof(struct s390_paes_ctx
),
534 .cra_type
= &crypto_blkcipher_type
,
535 .cra_module
= THIS_MODULE
,
536 .cra_list
= LIST_HEAD_INIT(ctr_paes_alg
.cra_list
),
539 .min_keysize
= SECKEYBLOBSIZE
,
540 .max_keysize
= SECKEYBLOBSIZE
,
541 .ivsize
= AES_BLOCK_SIZE
,
542 .setkey
= ctr_paes_set_key
,
543 .encrypt
= ctr_paes_encrypt
,
544 .decrypt
= ctr_paes_decrypt
,
549 static inline void __crypto_unregister_alg(struct crypto_alg
*alg
)
551 if (!list_empty(&alg
->cra_list
))
552 crypto_unregister_alg(alg
);
555 static void paes_s390_fini(void)
558 free_page((unsigned long) ctrblk
);
559 __crypto_unregister_alg(&ctr_paes_alg
);
560 __crypto_unregister_alg(&xts_paes_alg
);
561 __crypto_unregister_alg(&cbc_paes_alg
);
562 __crypto_unregister_alg(&ecb_paes_alg
);
565 static int __init
paes_s390_init(void)
569 /* Query available functions for KM, KMC and KMCTR */
570 cpacf_query(CPACF_KM
, &km_functions
);
571 cpacf_query(CPACF_KMC
, &kmc_functions
);
572 cpacf_query(CPACF_KMCTR
, &kmctr_functions
);
574 if (cpacf_test_func(&km_functions
, CPACF_KM_PAES_128
) ||
575 cpacf_test_func(&km_functions
, CPACF_KM_PAES_192
) ||
576 cpacf_test_func(&km_functions
, CPACF_KM_PAES_256
)) {
577 ret
= crypto_register_alg(&ecb_paes_alg
);
582 if (cpacf_test_func(&kmc_functions
, CPACF_KMC_PAES_128
) ||
583 cpacf_test_func(&kmc_functions
, CPACF_KMC_PAES_192
) ||
584 cpacf_test_func(&kmc_functions
, CPACF_KMC_PAES_256
)) {
585 ret
= crypto_register_alg(&cbc_paes_alg
);
590 if (cpacf_test_func(&km_functions
, CPACF_KM_PXTS_128
) ||
591 cpacf_test_func(&km_functions
, CPACF_KM_PXTS_256
)) {
592 ret
= crypto_register_alg(&xts_paes_alg
);
597 if (cpacf_test_func(&kmctr_functions
, CPACF_KMCTR_PAES_128
) ||
598 cpacf_test_func(&kmctr_functions
, CPACF_KMCTR_PAES_192
) ||
599 cpacf_test_func(&kmctr_functions
, CPACF_KMCTR_PAES_256
)) {
600 ret
= crypto_register_alg(&ctr_paes_alg
);
603 ctrblk
= (u8
*) __get_free_page(GFP_KERNEL
);
616 module_init(paes_s390_init
);
617 module_exit(paes_s390_fini
);
619 MODULE_ALIAS_CRYPTO("paes");
621 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm with protected keys");
622 MODULE_LICENSE("GPL");