4 * s390 implementation of the AES Cipher Algorithm.
7 * Copyright IBM Corp. 2005, 2007
8 * Author(s): Jan Glauber (jang@de.ibm.com)
9 * Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
11 * Derived from "crypto/aes_generic.c"
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
20 #define KMSG_COMPONENT "aes_s390"
21 #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
23 #include <crypto/aes.h>
24 #include <crypto/algapi.h>
25 #include <crypto/internal/skcipher.h>
26 #include <linux/err.h>
27 #include <linux/module.h>
28 #include <linux/cpufeature.h>
29 #include <linux/init.h>
30 #include <linux/spinlock.h>
31 #include <linux/fips.h>
32 #include <crypto/xts.h>
33 #include <asm/cpacf.h>
36 static DEFINE_SPINLOCK(ctrblk_lock
);
38 static cpacf_mask_t km_functions
, kmc_functions
, kmctr_functions
;
41 u8 key
[AES_MAX_KEY_SIZE
];
45 struct crypto_skcipher
*blk
;
46 struct crypto_cipher
*cip
;
55 struct crypto_skcipher
*fallback
;
58 static int setkey_fallback_cip(struct crypto_tfm
*tfm
, const u8
*in_key
,
61 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
64 sctx
->fallback
.cip
->base
.crt_flags
&= ~CRYPTO_TFM_REQ_MASK
;
65 sctx
->fallback
.cip
->base
.crt_flags
|= (tfm
->crt_flags
&
68 ret
= crypto_cipher_setkey(sctx
->fallback
.cip
, in_key
, key_len
);
70 tfm
->crt_flags
&= ~CRYPTO_TFM_RES_MASK
;
71 tfm
->crt_flags
|= (sctx
->fallback
.cip
->base
.crt_flags
&
77 static int aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
80 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
83 /* Pick the correct function code based on the key length */
84 fc
= (key_len
== 16) ? CPACF_KM_AES_128
:
85 (key_len
== 24) ? CPACF_KM_AES_192
:
86 (key_len
== 32) ? CPACF_KM_AES_256
: 0;
88 /* Check if the function code is available */
89 sctx
->fc
= (fc
&& cpacf_test_func(&km_functions
, fc
)) ? fc
: 0;
91 return setkey_fallback_cip(tfm
, in_key
, key_len
);
93 sctx
->key_len
= key_len
;
94 memcpy(sctx
->key
, in_key
, key_len
);
98 static void aes_encrypt(struct crypto_tfm
*tfm
, u8
*out
, const u8
*in
)
100 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
102 if (unlikely(!sctx
->fc
)) {
103 crypto_cipher_encrypt_one(sctx
->fallback
.cip
, out
, in
);
106 cpacf_km(sctx
->fc
, &sctx
->key
, out
, in
, AES_BLOCK_SIZE
);
109 static void aes_decrypt(struct crypto_tfm
*tfm
, u8
*out
, const u8
*in
)
111 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
113 if (unlikely(!sctx
->fc
)) {
114 crypto_cipher_decrypt_one(sctx
->fallback
.cip
, out
, in
);
117 cpacf_km(sctx
->fc
| CPACF_DECRYPT
,
118 &sctx
->key
, out
, in
, AES_BLOCK_SIZE
);
121 static int fallback_init_cip(struct crypto_tfm
*tfm
)
123 const char *name
= tfm
->__crt_alg
->cra_name
;
124 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
126 sctx
->fallback
.cip
= crypto_alloc_cipher(name
, 0,
127 CRYPTO_ALG_ASYNC
| CRYPTO_ALG_NEED_FALLBACK
);
129 if (IS_ERR(sctx
->fallback
.cip
)) {
130 pr_err("Allocating AES fallback algorithm %s failed\n",
132 return PTR_ERR(sctx
->fallback
.cip
);
138 static void fallback_exit_cip(struct crypto_tfm
*tfm
)
140 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
142 crypto_free_cipher(sctx
->fallback
.cip
);
143 sctx
->fallback
.cip
= NULL
;
146 static struct crypto_alg aes_alg
= {
148 .cra_driver_name
= "aes-s390",
150 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
|
151 CRYPTO_ALG_NEED_FALLBACK
,
152 .cra_blocksize
= AES_BLOCK_SIZE
,
153 .cra_ctxsize
= sizeof(struct s390_aes_ctx
),
154 .cra_module
= THIS_MODULE
,
155 .cra_init
= fallback_init_cip
,
156 .cra_exit
= fallback_exit_cip
,
159 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
160 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
161 .cia_setkey
= aes_set_key
,
162 .cia_encrypt
= aes_encrypt
,
163 .cia_decrypt
= aes_decrypt
,
168 static int setkey_fallback_blk(struct crypto_tfm
*tfm
, const u8
*key
,
171 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
174 crypto_skcipher_clear_flags(sctx
->fallback
.blk
, CRYPTO_TFM_REQ_MASK
);
175 crypto_skcipher_set_flags(sctx
->fallback
.blk
, tfm
->crt_flags
&
176 CRYPTO_TFM_REQ_MASK
);
178 ret
= crypto_skcipher_setkey(sctx
->fallback
.blk
, key
, len
);
180 tfm
->crt_flags
&= ~CRYPTO_TFM_RES_MASK
;
181 tfm
->crt_flags
|= crypto_skcipher_get_flags(sctx
->fallback
.blk
) &
187 static int fallback_blk_dec(struct blkcipher_desc
*desc
,
188 struct scatterlist
*dst
, struct scatterlist
*src
,
192 struct crypto_blkcipher
*tfm
= desc
->tfm
;
193 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(tfm
);
194 SKCIPHER_REQUEST_ON_STACK(req
, sctx
->fallback
.blk
);
196 skcipher_request_set_tfm(req
, sctx
->fallback
.blk
);
197 skcipher_request_set_callback(req
, desc
->flags
, NULL
, NULL
);
198 skcipher_request_set_crypt(req
, src
, dst
, nbytes
, desc
->info
);
200 ret
= crypto_skcipher_decrypt(req
);
202 skcipher_request_zero(req
);
206 static int fallback_blk_enc(struct blkcipher_desc
*desc
,
207 struct scatterlist
*dst
, struct scatterlist
*src
,
211 struct crypto_blkcipher
*tfm
= desc
->tfm
;
212 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(tfm
);
213 SKCIPHER_REQUEST_ON_STACK(req
, sctx
->fallback
.blk
);
215 skcipher_request_set_tfm(req
, sctx
->fallback
.blk
);
216 skcipher_request_set_callback(req
, desc
->flags
, NULL
, NULL
);
217 skcipher_request_set_crypt(req
, src
, dst
, nbytes
, desc
->info
);
219 ret
= crypto_skcipher_encrypt(req
);
223 static int ecb_aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
224 unsigned int key_len
)
226 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
229 /* Pick the correct function code based on the key length */
230 fc
= (key_len
== 16) ? CPACF_KM_AES_128
:
231 (key_len
== 24) ? CPACF_KM_AES_192
:
232 (key_len
== 32) ? CPACF_KM_AES_256
: 0;
234 /* Check if the function code is available */
235 sctx
->fc
= (fc
&& cpacf_test_func(&km_functions
, fc
)) ? fc
: 0;
237 return setkey_fallback_blk(tfm
, in_key
, key_len
);
239 sctx
->key_len
= key_len
;
240 memcpy(sctx
->key
, in_key
, key_len
);
244 static int ecb_aes_crypt(struct blkcipher_desc
*desc
, unsigned long modifier
,
245 struct blkcipher_walk
*walk
)
247 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
248 unsigned int nbytes
, n
;
251 ret
= blkcipher_walk_virt(desc
, walk
);
252 while ((nbytes
= walk
->nbytes
) >= AES_BLOCK_SIZE
) {
253 /* only use complete blocks */
254 n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
255 cpacf_km(sctx
->fc
| modifier
, sctx
->key
,
256 walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
, n
);
257 ret
= blkcipher_walk_done(desc
, walk
, nbytes
- n
);
263 static int ecb_aes_encrypt(struct blkcipher_desc
*desc
,
264 struct scatterlist
*dst
, struct scatterlist
*src
,
267 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
268 struct blkcipher_walk walk
;
270 if (unlikely(!sctx
->fc
))
271 return fallback_blk_enc(desc
, dst
, src
, nbytes
);
273 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
274 return ecb_aes_crypt(desc
, 0, &walk
);
277 static int ecb_aes_decrypt(struct blkcipher_desc
*desc
,
278 struct scatterlist
*dst
, struct scatterlist
*src
,
281 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
282 struct blkcipher_walk walk
;
284 if (unlikely(!sctx
->fc
))
285 return fallback_blk_dec(desc
, dst
, src
, nbytes
);
287 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
288 return ecb_aes_crypt(desc
, CPACF_DECRYPT
, &walk
);
291 static int fallback_init_blk(struct crypto_tfm
*tfm
)
293 const char *name
= tfm
->__crt_alg
->cra_name
;
294 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
296 sctx
->fallback
.blk
= crypto_alloc_skcipher(name
, 0,
298 CRYPTO_ALG_NEED_FALLBACK
);
300 if (IS_ERR(sctx
->fallback
.blk
)) {
301 pr_err("Allocating AES fallback algorithm %s failed\n",
303 return PTR_ERR(sctx
->fallback
.blk
);
309 static void fallback_exit_blk(struct crypto_tfm
*tfm
)
311 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
313 crypto_free_skcipher(sctx
->fallback
.blk
);
316 static struct crypto_alg ecb_aes_alg
= {
317 .cra_name
= "ecb(aes)",
318 .cra_driver_name
= "ecb-aes-s390",
319 .cra_priority
= 400, /* combo: aes + ecb */
320 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
321 CRYPTO_ALG_NEED_FALLBACK
,
322 .cra_blocksize
= AES_BLOCK_SIZE
,
323 .cra_ctxsize
= sizeof(struct s390_aes_ctx
),
324 .cra_type
= &crypto_blkcipher_type
,
325 .cra_module
= THIS_MODULE
,
326 .cra_init
= fallback_init_blk
,
327 .cra_exit
= fallback_exit_blk
,
330 .min_keysize
= AES_MIN_KEY_SIZE
,
331 .max_keysize
= AES_MAX_KEY_SIZE
,
332 .setkey
= ecb_aes_set_key
,
333 .encrypt
= ecb_aes_encrypt
,
334 .decrypt
= ecb_aes_decrypt
,
339 static int cbc_aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
340 unsigned int key_len
)
342 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
345 /* Pick the correct function code based on the key length */
346 fc
= (key_len
== 16) ? CPACF_KMC_AES_128
:
347 (key_len
== 24) ? CPACF_KMC_AES_192
:
348 (key_len
== 32) ? CPACF_KMC_AES_256
: 0;
350 /* Check if the function code is available */
351 sctx
->fc
= (fc
&& cpacf_test_func(&kmc_functions
, fc
)) ? fc
: 0;
353 return setkey_fallback_blk(tfm
, in_key
, key_len
);
355 sctx
->key_len
= key_len
;
356 memcpy(sctx
->key
, in_key
, key_len
);
360 static int cbc_aes_crypt(struct blkcipher_desc
*desc
, unsigned long modifier
,
361 struct blkcipher_walk
*walk
)
363 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
364 unsigned int nbytes
, n
;
367 u8 iv
[AES_BLOCK_SIZE
];
368 u8 key
[AES_MAX_KEY_SIZE
];
371 ret
= blkcipher_walk_virt(desc
, walk
);
372 memcpy(param
.iv
, walk
->iv
, AES_BLOCK_SIZE
);
373 memcpy(param
.key
, sctx
->key
, sctx
->key_len
);
374 while ((nbytes
= walk
->nbytes
) >= AES_BLOCK_SIZE
) {
375 /* only use complete blocks */
376 n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
377 cpacf_kmc(sctx
->fc
| modifier
, ¶m
,
378 walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
, n
);
379 ret
= blkcipher_walk_done(desc
, walk
, nbytes
- n
);
381 memcpy(walk
->iv
, param
.iv
, AES_BLOCK_SIZE
);
385 static int cbc_aes_encrypt(struct blkcipher_desc
*desc
,
386 struct scatterlist
*dst
, struct scatterlist
*src
,
389 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
390 struct blkcipher_walk walk
;
392 if (unlikely(!sctx
->fc
))
393 return fallback_blk_enc(desc
, dst
, src
, nbytes
);
395 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
396 return cbc_aes_crypt(desc
, 0, &walk
);
399 static int cbc_aes_decrypt(struct blkcipher_desc
*desc
,
400 struct scatterlist
*dst
, struct scatterlist
*src
,
403 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
404 struct blkcipher_walk walk
;
406 if (unlikely(!sctx
->fc
))
407 return fallback_blk_dec(desc
, dst
, src
, nbytes
);
409 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
410 return cbc_aes_crypt(desc
, CPACF_DECRYPT
, &walk
);
413 static struct crypto_alg cbc_aes_alg
= {
414 .cra_name
= "cbc(aes)",
415 .cra_driver_name
= "cbc-aes-s390",
416 .cra_priority
= 400, /* combo: aes + cbc */
417 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
418 CRYPTO_ALG_NEED_FALLBACK
,
419 .cra_blocksize
= AES_BLOCK_SIZE
,
420 .cra_ctxsize
= sizeof(struct s390_aes_ctx
),
421 .cra_type
= &crypto_blkcipher_type
,
422 .cra_module
= THIS_MODULE
,
423 .cra_init
= fallback_init_blk
,
424 .cra_exit
= fallback_exit_blk
,
427 .min_keysize
= AES_MIN_KEY_SIZE
,
428 .max_keysize
= AES_MAX_KEY_SIZE
,
429 .ivsize
= AES_BLOCK_SIZE
,
430 .setkey
= cbc_aes_set_key
,
431 .encrypt
= cbc_aes_encrypt
,
432 .decrypt
= cbc_aes_decrypt
,
437 static int xts_fallback_setkey(struct crypto_tfm
*tfm
, const u8
*key
,
440 struct s390_xts_ctx
*xts_ctx
= crypto_tfm_ctx(tfm
);
443 crypto_skcipher_clear_flags(xts_ctx
->fallback
, CRYPTO_TFM_REQ_MASK
);
444 crypto_skcipher_set_flags(xts_ctx
->fallback
, tfm
->crt_flags
&
445 CRYPTO_TFM_REQ_MASK
);
447 ret
= crypto_skcipher_setkey(xts_ctx
->fallback
, key
, len
);
449 tfm
->crt_flags
&= ~CRYPTO_TFM_RES_MASK
;
450 tfm
->crt_flags
|= crypto_skcipher_get_flags(xts_ctx
->fallback
) &
456 static int xts_fallback_decrypt(struct blkcipher_desc
*desc
,
457 struct scatterlist
*dst
, struct scatterlist
*src
,
460 struct crypto_blkcipher
*tfm
= desc
->tfm
;
461 struct s390_xts_ctx
*xts_ctx
= crypto_blkcipher_ctx(tfm
);
462 SKCIPHER_REQUEST_ON_STACK(req
, xts_ctx
->fallback
);
465 skcipher_request_set_tfm(req
, xts_ctx
->fallback
);
466 skcipher_request_set_callback(req
, desc
->flags
, NULL
, NULL
);
467 skcipher_request_set_crypt(req
, src
, dst
, nbytes
, desc
->info
);
469 ret
= crypto_skcipher_decrypt(req
);
471 skcipher_request_zero(req
);
475 static int xts_fallback_encrypt(struct blkcipher_desc
*desc
,
476 struct scatterlist
*dst
, struct scatterlist
*src
,
479 struct crypto_blkcipher
*tfm
= desc
->tfm
;
480 struct s390_xts_ctx
*xts_ctx
= crypto_blkcipher_ctx(tfm
);
481 SKCIPHER_REQUEST_ON_STACK(req
, xts_ctx
->fallback
);
484 skcipher_request_set_tfm(req
, xts_ctx
->fallback
);
485 skcipher_request_set_callback(req
, desc
->flags
, NULL
, NULL
);
486 skcipher_request_set_crypt(req
, src
, dst
, nbytes
, desc
->info
);
488 ret
= crypto_skcipher_encrypt(req
);
490 skcipher_request_zero(req
);
494 static int xts_aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
495 unsigned int key_len
)
497 struct s390_xts_ctx
*xts_ctx
= crypto_tfm_ctx(tfm
);
501 err
= xts_check_key(tfm
, in_key
, key_len
);
505 /* In fips mode only 128 bit or 256 bit keys are valid */
506 if (fips_enabled
&& key_len
!= 32 && key_len
!= 64) {
507 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
511 /* Pick the correct function code based on the key length */
512 fc
= (key_len
== 32) ? CPACF_KM_XTS_128
:
513 (key_len
== 64) ? CPACF_KM_XTS_256
: 0;
515 /* Check if the function code is available */
516 xts_ctx
->fc
= (fc
&& cpacf_test_func(&km_functions
, fc
)) ? fc
: 0;
518 return xts_fallback_setkey(tfm
, in_key
, key_len
);
520 /* Split the XTS key into the two subkeys */
521 key_len
= key_len
/ 2;
522 xts_ctx
->key_len
= key_len
;
523 memcpy(xts_ctx
->key
, in_key
, key_len
);
524 memcpy(xts_ctx
->pcc_key
, in_key
+ key_len
, key_len
);
528 static int xts_aes_crypt(struct blkcipher_desc
*desc
, unsigned long modifier
,
529 struct blkcipher_walk
*walk
)
531 struct s390_xts_ctx
*xts_ctx
= crypto_blkcipher_ctx(desc
->tfm
);
532 unsigned int offset
, nbytes
, n
;
546 ret
= blkcipher_walk_virt(desc
, walk
);
547 offset
= xts_ctx
->key_len
& 0x10;
548 memset(pcc_param
.block
, 0, sizeof(pcc_param
.block
));
549 memset(pcc_param
.bit
, 0, sizeof(pcc_param
.bit
));
550 memset(pcc_param
.xts
, 0, sizeof(pcc_param
.xts
));
551 memcpy(pcc_param
.tweak
, walk
->iv
, sizeof(pcc_param
.tweak
));
552 memcpy(pcc_param
.key
+ offset
, xts_ctx
->pcc_key
, xts_ctx
->key_len
);
553 cpacf_pcc(xts_ctx
->fc
, pcc_param
.key
+ offset
);
555 memcpy(xts_param
.key
+ offset
, xts_ctx
->key
, xts_ctx
->key_len
);
556 memcpy(xts_param
.init
, pcc_param
.xts
, 16);
558 while ((nbytes
= walk
->nbytes
) >= AES_BLOCK_SIZE
) {
559 /* only use complete blocks */
560 n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
561 cpacf_km(xts_ctx
->fc
| modifier
, xts_param
.key
+ offset
,
562 walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
, n
);
563 ret
= blkcipher_walk_done(desc
, walk
, nbytes
- n
);
568 static int xts_aes_encrypt(struct blkcipher_desc
*desc
,
569 struct scatterlist
*dst
, struct scatterlist
*src
,
572 struct s390_xts_ctx
*xts_ctx
= crypto_blkcipher_ctx(desc
->tfm
);
573 struct blkcipher_walk walk
;
575 if (unlikely(!xts_ctx
->fc
))
576 return xts_fallback_encrypt(desc
, dst
, src
, nbytes
);
578 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
579 return xts_aes_crypt(desc
, 0, &walk
);
582 static int xts_aes_decrypt(struct blkcipher_desc
*desc
,
583 struct scatterlist
*dst
, struct scatterlist
*src
,
586 struct s390_xts_ctx
*xts_ctx
= crypto_blkcipher_ctx(desc
->tfm
);
587 struct blkcipher_walk walk
;
589 if (unlikely(!xts_ctx
->fc
))
590 return xts_fallback_decrypt(desc
, dst
, src
, nbytes
);
592 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
593 return xts_aes_crypt(desc
, CPACF_DECRYPT
, &walk
);
596 static int xts_fallback_init(struct crypto_tfm
*tfm
)
598 const char *name
= tfm
->__crt_alg
->cra_name
;
599 struct s390_xts_ctx
*xts_ctx
= crypto_tfm_ctx(tfm
);
601 xts_ctx
->fallback
= crypto_alloc_skcipher(name
, 0,
603 CRYPTO_ALG_NEED_FALLBACK
);
605 if (IS_ERR(xts_ctx
->fallback
)) {
606 pr_err("Allocating XTS fallback algorithm %s failed\n",
608 return PTR_ERR(xts_ctx
->fallback
);
613 static void xts_fallback_exit(struct crypto_tfm
*tfm
)
615 struct s390_xts_ctx
*xts_ctx
= crypto_tfm_ctx(tfm
);
617 crypto_free_skcipher(xts_ctx
->fallback
);
620 static struct crypto_alg xts_aes_alg
= {
621 .cra_name
= "xts(aes)",
622 .cra_driver_name
= "xts-aes-s390",
623 .cra_priority
= 400, /* combo: aes + xts */
624 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
625 CRYPTO_ALG_NEED_FALLBACK
,
626 .cra_blocksize
= AES_BLOCK_SIZE
,
627 .cra_ctxsize
= sizeof(struct s390_xts_ctx
),
628 .cra_type
= &crypto_blkcipher_type
,
629 .cra_module
= THIS_MODULE
,
630 .cra_init
= xts_fallback_init
,
631 .cra_exit
= xts_fallback_exit
,
634 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
635 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
636 .ivsize
= AES_BLOCK_SIZE
,
637 .setkey
= xts_aes_set_key
,
638 .encrypt
= xts_aes_encrypt
,
639 .decrypt
= xts_aes_decrypt
,
644 static int ctr_aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
645 unsigned int key_len
)
647 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
650 /* Pick the correct function code based on the key length */
651 fc
= (key_len
== 16) ? CPACF_KMCTR_AES_128
:
652 (key_len
== 24) ? CPACF_KMCTR_AES_192
:
653 (key_len
== 32) ? CPACF_KMCTR_AES_256
: 0;
655 /* Check if the function code is available */
656 sctx
->fc
= (fc
&& cpacf_test_func(&kmctr_functions
, fc
)) ? fc
: 0;
658 return setkey_fallback_blk(tfm
, in_key
, key_len
);
660 sctx
->key_len
= key_len
;
661 memcpy(sctx
->key
, in_key
, key_len
);
665 static unsigned int __ctrblk_init(u8
*ctrptr
, u8
*iv
, unsigned int nbytes
)
669 /* only use complete blocks, max. PAGE_SIZE */
670 memcpy(ctrptr
, iv
, AES_BLOCK_SIZE
);
671 n
= (nbytes
> PAGE_SIZE
) ? PAGE_SIZE
: nbytes
& ~(AES_BLOCK_SIZE
- 1);
672 for (i
= (n
/ AES_BLOCK_SIZE
) - 1; i
> 0; i
--) {
673 memcpy(ctrptr
+ AES_BLOCK_SIZE
, ctrptr
, AES_BLOCK_SIZE
);
674 crypto_inc(ctrptr
+ AES_BLOCK_SIZE
, AES_BLOCK_SIZE
);
675 ctrptr
+= AES_BLOCK_SIZE
;
680 static int ctr_aes_crypt(struct blkcipher_desc
*desc
, unsigned long modifier
,
681 struct blkcipher_walk
*walk
)
683 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
684 u8 buf
[AES_BLOCK_SIZE
], *ctrptr
;
685 unsigned int n
, nbytes
;
688 locked
= spin_trylock(&ctrblk_lock
);
690 ret
= blkcipher_walk_virt_block(desc
, walk
, AES_BLOCK_SIZE
);
691 while ((nbytes
= walk
->nbytes
) >= AES_BLOCK_SIZE
) {
693 if (nbytes
>= 2*AES_BLOCK_SIZE
&& locked
)
694 n
= __ctrblk_init(ctrblk
, walk
->iv
, nbytes
);
695 ctrptr
= (n
> AES_BLOCK_SIZE
) ? ctrblk
: walk
->iv
;
696 cpacf_kmctr(sctx
->fc
| modifier
, sctx
->key
,
697 walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
,
699 if (ctrptr
== ctrblk
)
700 memcpy(walk
->iv
, ctrptr
+ n
- AES_BLOCK_SIZE
,
702 crypto_inc(walk
->iv
, AES_BLOCK_SIZE
);
703 ret
= blkcipher_walk_done(desc
, walk
, nbytes
- n
);
706 spin_unlock(&ctrblk_lock
);
708 * final block may be < AES_BLOCK_SIZE, copy only nbytes
711 cpacf_kmctr(sctx
->fc
| modifier
, sctx
->key
,
712 buf
, walk
->src
.virt
.addr
,
713 AES_BLOCK_SIZE
, walk
->iv
);
714 memcpy(walk
->dst
.virt
.addr
, buf
, nbytes
);
715 crypto_inc(walk
->iv
, AES_BLOCK_SIZE
);
716 ret
= blkcipher_walk_done(desc
, walk
, 0);
722 static int ctr_aes_encrypt(struct blkcipher_desc
*desc
,
723 struct scatterlist
*dst
, struct scatterlist
*src
,
726 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
727 struct blkcipher_walk walk
;
729 if (unlikely(!sctx
->fc
))
730 return fallback_blk_enc(desc
, dst
, src
, nbytes
);
732 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
733 return ctr_aes_crypt(desc
, 0, &walk
);
736 static int ctr_aes_decrypt(struct blkcipher_desc
*desc
,
737 struct scatterlist
*dst
, struct scatterlist
*src
,
740 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
741 struct blkcipher_walk walk
;
743 if (unlikely(!sctx
->fc
))
744 return fallback_blk_dec(desc
, dst
, src
, nbytes
);
746 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
747 return ctr_aes_crypt(desc
, CPACF_DECRYPT
, &walk
);
750 static struct crypto_alg ctr_aes_alg
= {
751 .cra_name
= "ctr(aes)",
752 .cra_driver_name
= "ctr-aes-s390",
753 .cra_priority
= 400, /* combo: aes + ctr */
754 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
755 CRYPTO_ALG_NEED_FALLBACK
,
757 .cra_ctxsize
= sizeof(struct s390_aes_ctx
),
758 .cra_type
= &crypto_blkcipher_type
,
759 .cra_module
= THIS_MODULE
,
760 .cra_init
= fallback_init_blk
,
761 .cra_exit
= fallback_exit_blk
,
764 .min_keysize
= AES_MIN_KEY_SIZE
,
765 .max_keysize
= AES_MAX_KEY_SIZE
,
766 .ivsize
= AES_BLOCK_SIZE
,
767 .setkey
= ctr_aes_set_key
,
768 .encrypt
= ctr_aes_encrypt
,
769 .decrypt
= ctr_aes_decrypt
,
774 static struct crypto_alg
*aes_s390_algs_ptr
[5];
775 static int aes_s390_algs_num
;
777 static int aes_s390_register_alg(struct crypto_alg
*alg
)
781 ret
= crypto_register_alg(alg
);
783 aes_s390_algs_ptr
[aes_s390_algs_num
++] = alg
;
787 static void aes_s390_fini(void)
789 while (aes_s390_algs_num
--)
790 crypto_unregister_alg(aes_s390_algs_ptr
[aes_s390_algs_num
]);
792 free_page((unsigned long) ctrblk
);
795 static int __init
aes_s390_init(void)
799 /* Query available functions for KM, KMC and KMCTR */
800 cpacf_query(CPACF_KM
, &km_functions
);
801 cpacf_query(CPACF_KMC
, &kmc_functions
);
802 cpacf_query(CPACF_KMCTR
, &kmctr_functions
);
804 if (cpacf_test_func(&km_functions
, CPACF_KM_AES_128
) ||
805 cpacf_test_func(&km_functions
, CPACF_KM_AES_192
) ||
806 cpacf_test_func(&km_functions
, CPACF_KM_AES_256
)) {
807 ret
= aes_s390_register_alg(&aes_alg
);
810 ret
= aes_s390_register_alg(&ecb_aes_alg
);
815 if (cpacf_test_func(&kmc_functions
, CPACF_KMC_AES_128
) ||
816 cpacf_test_func(&kmc_functions
, CPACF_KMC_AES_192
) ||
817 cpacf_test_func(&kmc_functions
, CPACF_KMC_AES_256
)) {
818 ret
= aes_s390_register_alg(&cbc_aes_alg
);
823 if (cpacf_test_func(&km_functions
, CPACF_KM_XTS_128
) ||
824 cpacf_test_func(&km_functions
, CPACF_KM_XTS_256
)) {
825 ret
= aes_s390_register_alg(&xts_aes_alg
);
830 if (cpacf_test_func(&kmctr_functions
, CPACF_KMCTR_AES_128
) ||
831 cpacf_test_func(&kmctr_functions
, CPACF_KMCTR_AES_192
) ||
832 cpacf_test_func(&kmctr_functions
, CPACF_KMCTR_AES_256
)) {
833 ctrblk
= (u8
*) __get_free_page(GFP_KERNEL
);
838 ret
= aes_s390_register_alg(&ctr_aes_alg
);
849 module_cpu_feature_match(MSA
, aes_s390_init
);
850 module_exit(aes_s390_fini
);
852 MODULE_ALIAS_CRYPTO("aes-all");
854 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
855 MODULE_LICENSE("GPL");