4 * s390 implementation of the AES Cipher Algorithm.
7 * Copyright IBM Corp. 2005, 2007
8 * Author(s): Jan Glauber (jang@de.ibm.com)
9 * Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
11 * Derived from "crypto/aes_generic.c"
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
20 #define KMSG_COMPONENT "aes_s390"
21 #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
23 #include <crypto/aes.h>
24 #include <crypto/algapi.h>
25 #include <linux/err.h>
26 #include <linux/module.h>
27 #include <linux/init.h>
28 #include <linux/spinlock.h>
29 #include "crypt_s390.h"
31 #define AES_KEYLEN_128 1
32 #define AES_KEYLEN_192 2
33 #define AES_KEYLEN_256 4
36 static DEFINE_SPINLOCK(ctrblk_lock
);
37 static char keylen_flag
;
40 u8 key
[AES_MAX_KEY_SIZE
];
45 struct crypto_blkcipher
*blk
;
46 struct crypto_cipher
*cip
;
64 struct crypto_blkcipher
*fallback
;
68 * Check if the key_len is supported by the HW.
69 * Returns 0 if it is, a positive number if it is not and software fallback is
70 * required or a negative number in case the key size is not valid
72 static int need_fallback(unsigned int key_len
)
76 if (!(keylen_flag
& AES_KEYLEN_128
))
80 if (!(keylen_flag
& AES_KEYLEN_192
))
84 if (!(keylen_flag
& AES_KEYLEN_256
))
94 static int setkey_fallback_cip(struct crypto_tfm
*tfm
, const u8
*in_key
,
97 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
100 sctx
->fallback
.cip
->base
.crt_flags
&= ~CRYPTO_TFM_REQ_MASK
;
101 sctx
->fallback
.cip
->base
.crt_flags
|= (tfm
->crt_flags
&
102 CRYPTO_TFM_REQ_MASK
);
104 ret
= crypto_cipher_setkey(sctx
->fallback
.cip
, in_key
, key_len
);
106 tfm
->crt_flags
&= ~CRYPTO_TFM_RES_MASK
;
107 tfm
->crt_flags
|= (sctx
->fallback
.cip
->base
.crt_flags
&
108 CRYPTO_TFM_RES_MASK
);
113 static int aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
114 unsigned int key_len
)
116 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
117 u32
*flags
= &tfm
->crt_flags
;
120 ret
= need_fallback(key_len
);
122 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
126 sctx
->key_len
= key_len
;
128 memcpy(sctx
->key
, in_key
, key_len
);
132 return setkey_fallback_cip(tfm
, in_key
, key_len
);
135 static void aes_encrypt(struct crypto_tfm
*tfm
, u8
*out
, const u8
*in
)
137 const struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
139 if (unlikely(need_fallback(sctx
->key_len
))) {
140 crypto_cipher_encrypt_one(sctx
->fallback
.cip
, out
, in
);
144 switch (sctx
->key_len
) {
146 crypt_s390_km(KM_AES_128_ENCRYPT
, &sctx
->key
, out
, in
,
150 crypt_s390_km(KM_AES_192_ENCRYPT
, &sctx
->key
, out
, in
,
154 crypt_s390_km(KM_AES_256_ENCRYPT
, &sctx
->key
, out
, in
,
160 static void aes_decrypt(struct crypto_tfm
*tfm
, u8
*out
, const u8
*in
)
162 const struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
164 if (unlikely(need_fallback(sctx
->key_len
))) {
165 crypto_cipher_decrypt_one(sctx
->fallback
.cip
, out
, in
);
169 switch (sctx
->key_len
) {
171 crypt_s390_km(KM_AES_128_DECRYPT
, &sctx
->key
, out
, in
,
175 crypt_s390_km(KM_AES_192_DECRYPT
, &sctx
->key
, out
, in
,
179 crypt_s390_km(KM_AES_256_DECRYPT
, &sctx
->key
, out
, in
,
185 static int fallback_init_cip(struct crypto_tfm
*tfm
)
187 const char *name
= tfm
->__crt_alg
->cra_name
;
188 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
190 sctx
->fallback
.cip
= crypto_alloc_cipher(name
, 0,
191 CRYPTO_ALG_ASYNC
| CRYPTO_ALG_NEED_FALLBACK
);
193 if (IS_ERR(sctx
->fallback
.cip
)) {
194 pr_err("Allocating AES fallback algorithm %s failed\n",
196 return PTR_ERR(sctx
->fallback
.cip
);
202 static void fallback_exit_cip(struct crypto_tfm
*tfm
)
204 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
206 crypto_free_cipher(sctx
->fallback
.cip
);
207 sctx
->fallback
.cip
= NULL
;
210 static struct crypto_alg aes_alg
= {
212 .cra_driver_name
= "aes-s390",
213 .cra_priority
= CRYPT_S390_PRIORITY
,
214 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
|
215 CRYPTO_ALG_NEED_FALLBACK
,
216 .cra_blocksize
= AES_BLOCK_SIZE
,
217 .cra_ctxsize
= sizeof(struct s390_aes_ctx
),
218 .cra_module
= THIS_MODULE
,
219 .cra_init
= fallback_init_cip
,
220 .cra_exit
= fallback_exit_cip
,
223 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
224 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
225 .cia_setkey
= aes_set_key
,
226 .cia_encrypt
= aes_encrypt
,
227 .cia_decrypt
= aes_decrypt
,
232 static int setkey_fallback_blk(struct crypto_tfm
*tfm
, const u8
*key
,
235 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
238 sctx
->fallback
.blk
->base
.crt_flags
&= ~CRYPTO_TFM_REQ_MASK
;
239 sctx
->fallback
.blk
->base
.crt_flags
|= (tfm
->crt_flags
&
240 CRYPTO_TFM_REQ_MASK
);
242 ret
= crypto_blkcipher_setkey(sctx
->fallback
.blk
, key
, len
);
244 tfm
->crt_flags
&= ~CRYPTO_TFM_RES_MASK
;
245 tfm
->crt_flags
|= (sctx
->fallback
.blk
->base
.crt_flags
&
246 CRYPTO_TFM_RES_MASK
);
251 static int fallback_blk_dec(struct blkcipher_desc
*desc
,
252 struct scatterlist
*dst
, struct scatterlist
*src
,
256 struct crypto_blkcipher
*tfm
;
257 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
260 desc
->tfm
= sctx
->fallback
.blk
;
262 ret
= crypto_blkcipher_decrypt_iv(desc
, dst
, src
, nbytes
);
268 static int fallback_blk_enc(struct blkcipher_desc
*desc
,
269 struct scatterlist
*dst
, struct scatterlist
*src
,
273 struct crypto_blkcipher
*tfm
;
274 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
277 desc
->tfm
= sctx
->fallback
.blk
;
279 ret
= crypto_blkcipher_encrypt_iv(desc
, dst
, src
, nbytes
);
285 static int ecb_aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
286 unsigned int key_len
)
288 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
291 ret
= need_fallback(key_len
);
293 sctx
->key_len
= key_len
;
294 return setkey_fallback_blk(tfm
, in_key
, key_len
);
299 sctx
->enc
= KM_AES_128_ENCRYPT
;
300 sctx
->dec
= KM_AES_128_DECRYPT
;
303 sctx
->enc
= KM_AES_192_ENCRYPT
;
304 sctx
->dec
= KM_AES_192_DECRYPT
;
307 sctx
->enc
= KM_AES_256_ENCRYPT
;
308 sctx
->dec
= KM_AES_256_DECRYPT
;
312 return aes_set_key(tfm
, in_key
, key_len
);
315 static int ecb_aes_crypt(struct blkcipher_desc
*desc
, long func
, void *param
,
316 struct blkcipher_walk
*walk
)
318 int ret
= blkcipher_walk_virt(desc
, walk
);
321 while ((nbytes
= walk
->nbytes
)) {
322 /* only use complete blocks */
323 unsigned int n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
324 u8
*out
= walk
->dst
.virt
.addr
;
325 u8
*in
= walk
->src
.virt
.addr
;
327 ret
= crypt_s390_km(func
, param
, out
, in
, n
);
328 if (ret
< 0 || ret
!= n
)
331 nbytes
&= AES_BLOCK_SIZE
- 1;
332 ret
= blkcipher_walk_done(desc
, walk
, nbytes
);
338 static int ecb_aes_encrypt(struct blkcipher_desc
*desc
,
339 struct scatterlist
*dst
, struct scatterlist
*src
,
342 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
343 struct blkcipher_walk walk
;
345 if (unlikely(need_fallback(sctx
->key_len
)))
346 return fallback_blk_enc(desc
, dst
, src
, nbytes
);
348 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
349 return ecb_aes_crypt(desc
, sctx
->enc
, sctx
->key
, &walk
);
352 static int ecb_aes_decrypt(struct blkcipher_desc
*desc
,
353 struct scatterlist
*dst
, struct scatterlist
*src
,
356 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
357 struct blkcipher_walk walk
;
359 if (unlikely(need_fallback(sctx
->key_len
)))
360 return fallback_blk_dec(desc
, dst
, src
, nbytes
);
362 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
363 return ecb_aes_crypt(desc
, sctx
->dec
, sctx
->key
, &walk
);
366 static int fallback_init_blk(struct crypto_tfm
*tfm
)
368 const char *name
= tfm
->__crt_alg
->cra_name
;
369 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
371 sctx
->fallback
.blk
= crypto_alloc_blkcipher(name
, 0,
372 CRYPTO_ALG_ASYNC
| CRYPTO_ALG_NEED_FALLBACK
);
374 if (IS_ERR(sctx
->fallback
.blk
)) {
375 pr_err("Allocating AES fallback algorithm %s failed\n",
377 return PTR_ERR(sctx
->fallback
.blk
);
383 static void fallback_exit_blk(struct crypto_tfm
*tfm
)
385 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
387 crypto_free_blkcipher(sctx
->fallback
.blk
);
388 sctx
->fallback
.blk
= NULL
;
391 static struct crypto_alg ecb_aes_alg
= {
392 .cra_name
= "ecb(aes)",
393 .cra_driver_name
= "ecb-aes-s390",
394 .cra_priority
= CRYPT_S390_COMPOSITE_PRIORITY
,
395 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
396 CRYPTO_ALG_NEED_FALLBACK
,
397 .cra_blocksize
= AES_BLOCK_SIZE
,
398 .cra_ctxsize
= sizeof(struct s390_aes_ctx
),
399 .cra_type
= &crypto_blkcipher_type
,
400 .cra_module
= THIS_MODULE
,
401 .cra_init
= fallback_init_blk
,
402 .cra_exit
= fallback_exit_blk
,
405 .min_keysize
= AES_MIN_KEY_SIZE
,
406 .max_keysize
= AES_MAX_KEY_SIZE
,
407 .setkey
= ecb_aes_set_key
,
408 .encrypt
= ecb_aes_encrypt
,
409 .decrypt
= ecb_aes_decrypt
,
414 static int cbc_aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
415 unsigned int key_len
)
417 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
420 ret
= need_fallback(key_len
);
422 sctx
->key_len
= key_len
;
423 return setkey_fallback_blk(tfm
, in_key
, key_len
);
428 sctx
->enc
= KMC_AES_128_ENCRYPT
;
429 sctx
->dec
= KMC_AES_128_DECRYPT
;
432 sctx
->enc
= KMC_AES_192_ENCRYPT
;
433 sctx
->dec
= KMC_AES_192_DECRYPT
;
436 sctx
->enc
= KMC_AES_256_ENCRYPT
;
437 sctx
->dec
= KMC_AES_256_DECRYPT
;
441 return aes_set_key(tfm
, in_key
, key_len
);
444 static int cbc_aes_crypt(struct blkcipher_desc
*desc
, long func
,
445 struct blkcipher_walk
*walk
)
447 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
448 int ret
= blkcipher_walk_virt(desc
, walk
);
449 unsigned int nbytes
= walk
->nbytes
;
451 u8 iv
[AES_BLOCK_SIZE
];
452 u8 key
[AES_MAX_KEY_SIZE
];
458 memcpy(param
.iv
, walk
->iv
, AES_BLOCK_SIZE
);
459 memcpy(param
.key
, sctx
->key
, sctx
->key_len
);
461 /* only use complete blocks */
462 unsigned int n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
463 u8
*out
= walk
->dst
.virt
.addr
;
464 u8
*in
= walk
->src
.virt
.addr
;
466 ret
= crypt_s390_kmc(func
, ¶m
, out
, in
, n
);
467 if (ret
< 0 || ret
!= n
)
470 nbytes
&= AES_BLOCK_SIZE
- 1;
471 ret
= blkcipher_walk_done(desc
, walk
, nbytes
);
472 } while ((nbytes
= walk
->nbytes
));
473 memcpy(walk
->iv
, param
.iv
, AES_BLOCK_SIZE
);
479 static int cbc_aes_encrypt(struct blkcipher_desc
*desc
,
480 struct scatterlist
*dst
, struct scatterlist
*src
,
483 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
484 struct blkcipher_walk walk
;
486 if (unlikely(need_fallback(sctx
->key_len
)))
487 return fallback_blk_enc(desc
, dst
, src
, nbytes
);
489 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
490 return cbc_aes_crypt(desc
, sctx
->enc
, &walk
);
493 static int cbc_aes_decrypt(struct blkcipher_desc
*desc
,
494 struct scatterlist
*dst
, struct scatterlist
*src
,
497 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
498 struct blkcipher_walk walk
;
500 if (unlikely(need_fallback(sctx
->key_len
)))
501 return fallback_blk_dec(desc
, dst
, src
, nbytes
);
503 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
504 return cbc_aes_crypt(desc
, sctx
->dec
, &walk
);
507 static struct crypto_alg cbc_aes_alg
= {
508 .cra_name
= "cbc(aes)",
509 .cra_driver_name
= "cbc-aes-s390",
510 .cra_priority
= CRYPT_S390_COMPOSITE_PRIORITY
,
511 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
512 CRYPTO_ALG_NEED_FALLBACK
,
513 .cra_blocksize
= AES_BLOCK_SIZE
,
514 .cra_ctxsize
= sizeof(struct s390_aes_ctx
),
515 .cra_type
= &crypto_blkcipher_type
,
516 .cra_module
= THIS_MODULE
,
517 .cra_init
= fallback_init_blk
,
518 .cra_exit
= fallback_exit_blk
,
521 .min_keysize
= AES_MIN_KEY_SIZE
,
522 .max_keysize
= AES_MAX_KEY_SIZE
,
523 .ivsize
= AES_BLOCK_SIZE
,
524 .setkey
= cbc_aes_set_key
,
525 .encrypt
= cbc_aes_encrypt
,
526 .decrypt
= cbc_aes_decrypt
,
531 static int xts_fallback_setkey(struct crypto_tfm
*tfm
, const u8
*key
,
534 struct s390_xts_ctx
*xts_ctx
= crypto_tfm_ctx(tfm
);
537 xts_ctx
->fallback
->base
.crt_flags
&= ~CRYPTO_TFM_REQ_MASK
;
538 xts_ctx
->fallback
->base
.crt_flags
|= (tfm
->crt_flags
&
539 CRYPTO_TFM_REQ_MASK
);
541 ret
= crypto_blkcipher_setkey(xts_ctx
->fallback
, key
, len
);
543 tfm
->crt_flags
&= ~CRYPTO_TFM_RES_MASK
;
544 tfm
->crt_flags
|= (xts_ctx
->fallback
->base
.crt_flags
&
545 CRYPTO_TFM_RES_MASK
);
550 static int xts_fallback_decrypt(struct blkcipher_desc
*desc
,
551 struct scatterlist
*dst
, struct scatterlist
*src
,
554 struct s390_xts_ctx
*xts_ctx
= crypto_blkcipher_ctx(desc
->tfm
);
555 struct crypto_blkcipher
*tfm
;
559 desc
->tfm
= xts_ctx
->fallback
;
561 ret
= crypto_blkcipher_decrypt_iv(desc
, dst
, src
, nbytes
);
567 static int xts_fallback_encrypt(struct blkcipher_desc
*desc
,
568 struct scatterlist
*dst
, struct scatterlist
*src
,
571 struct s390_xts_ctx
*xts_ctx
= crypto_blkcipher_ctx(desc
->tfm
);
572 struct crypto_blkcipher
*tfm
;
576 desc
->tfm
= xts_ctx
->fallback
;
578 ret
= crypto_blkcipher_encrypt_iv(desc
, dst
, src
, nbytes
);
584 static int xts_aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
585 unsigned int key_len
)
587 struct s390_xts_ctx
*xts_ctx
= crypto_tfm_ctx(tfm
);
588 u32
*flags
= &tfm
->crt_flags
;
592 xts_ctx
->enc
= KM_XTS_128_ENCRYPT
;
593 xts_ctx
->dec
= KM_XTS_128_DECRYPT
;
594 memcpy(xts_ctx
->key
+ 16, in_key
, 16);
595 memcpy(xts_ctx
->pcc_key
+ 16, in_key
+ 16, 16);
600 xts_fallback_setkey(tfm
, in_key
, key_len
);
603 xts_ctx
->enc
= KM_XTS_256_ENCRYPT
;
604 xts_ctx
->dec
= KM_XTS_256_DECRYPT
;
605 memcpy(xts_ctx
->key
, in_key
, 32);
606 memcpy(xts_ctx
->pcc_key
, in_key
+ 32, 32);
609 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
612 xts_ctx
->key_len
= key_len
;
616 static int xts_aes_crypt(struct blkcipher_desc
*desc
, long func
,
617 struct s390_xts_ctx
*xts_ctx
,
618 struct blkcipher_walk
*walk
)
620 unsigned int offset
= (xts_ctx
->key_len
>> 1) & 0x10;
621 int ret
= blkcipher_walk_virt(desc
, walk
);
622 unsigned int nbytes
= walk
->nbytes
;
625 struct pcc_param pcc_param
;
634 memset(pcc_param
.block
, 0, sizeof(pcc_param
.block
));
635 memset(pcc_param
.bit
, 0, sizeof(pcc_param
.bit
));
636 memset(pcc_param
.xts
, 0, sizeof(pcc_param
.xts
));
637 memcpy(pcc_param
.tweak
, walk
->iv
, sizeof(pcc_param
.tweak
));
638 memcpy(pcc_param
.key
, xts_ctx
->pcc_key
, 32);
639 ret
= crypt_s390_pcc(func
, &pcc_param
.key
[offset
]);
643 memcpy(xts_param
.key
, xts_ctx
->key
, 32);
644 memcpy(xts_param
.init
, pcc_param
.xts
, 16);
646 /* only use complete blocks */
647 n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
648 out
= walk
->dst
.virt
.addr
;
649 in
= walk
->src
.virt
.addr
;
651 ret
= crypt_s390_km(func
, &xts_param
.key
[offset
], out
, in
, n
);
652 if (ret
< 0 || ret
!= n
)
655 nbytes
&= AES_BLOCK_SIZE
- 1;
656 ret
= blkcipher_walk_done(desc
, walk
, nbytes
);
657 } while ((nbytes
= walk
->nbytes
));
662 static int xts_aes_encrypt(struct blkcipher_desc
*desc
,
663 struct scatterlist
*dst
, struct scatterlist
*src
,
666 struct s390_xts_ctx
*xts_ctx
= crypto_blkcipher_ctx(desc
->tfm
);
667 struct blkcipher_walk walk
;
669 if (unlikely(xts_ctx
->key_len
== 48))
670 return xts_fallback_encrypt(desc
, dst
, src
, nbytes
);
672 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
673 return xts_aes_crypt(desc
, xts_ctx
->enc
, xts_ctx
, &walk
);
676 static int xts_aes_decrypt(struct blkcipher_desc
*desc
,
677 struct scatterlist
*dst
, struct scatterlist
*src
,
680 struct s390_xts_ctx
*xts_ctx
= crypto_blkcipher_ctx(desc
->tfm
);
681 struct blkcipher_walk walk
;
683 if (unlikely(xts_ctx
->key_len
== 48))
684 return xts_fallback_decrypt(desc
, dst
, src
, nbytes
);
686 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
687 return xts_aes_crypt(desc
, xts_ctx
->dec
, xts_ctx
, &walk
);
690 static int xts_fallback_init(struct crypto_tfm
*tfm
)
692 const char *name
= tfm
->__crt_alg
->cra_name
;
693 struct s390_xts_ctx
*xts_ctx
= crypto_tfm_ctx(tfm
);
695 xts_ctx
->fallback
= crypto_alloc_blkcipher(name
, 0,
696 CRYPTO_ALG_ASYNC
| CRYPTO_ALG_NEED_FALLBACK
);
698 if (IS_ERR(xts_ctx
->fallback
)) {
699 pr_err("Allocating XTS fallback algorithm %s failed\n",
701 return PTR_ERR(xts_ctx
->fallback
);
706 static void xts_fallback_exit(struct crypto_tfm
*tfm
)
708 struct s390_xts_ctx
*xts_ctx
= crypto_tfm_ctx(tfm
);
710 crypto_free_blkcipher(xts_ctx
->fallback
);
711 xts_ctx
->fallback
= NULL
;
714 static struct crypto_alg xts_aes_alg
= {
715 .cra_name
= "xts(aes)",
716 .cra_driver_name
= "xts-aes-s390",
717 .cra_priority
= CRYPT_S390_COMPOSITE_PRIORITY
,
718 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
719 CRYPTO_ALG_NEED_FALLBACK
,
720 .cra_blocksize
= AES_BLOCK_SIZE
,
721 .cra_ctxsize
= sizeof(struct s390_xts_ctx
),
722 .cra_type
= &crypto_blkcipher_type
,
723 .cra_module
= THIS_MODULE
,
724 .cra_init
= xts_fallback_init
,
725 .cra_exit
= xts_fallback_exit
,
728 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
729 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
730 .ivsize
= AES_BLOCK_SIZE
,
731 .setkey
= xts_aes_set_key
,
732 .encrypt
= xts_aes_encrypt
,
733 .decrypt
= xts_aes_decrypt
,
738 static int xts_aes_alg_reg
;
740 static int ctr_aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
741 unsigned int key_len
)
743 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
747 sctx
->enc
= KMCTR_AES_128_ENCRYPT
;
748 sctx
->dec
= KMCTR_AES_128_DECRYPT
;
751 sctx
->enc
= KMCTR_AES_192_ENCRYPT
;
752 sctx
->dec
= KMCTR_AES_192_DECRYPT
;
755 sctx
->enc
= KMCTR_AES_256_ENCRYPT
;
756 sctx
->dec
= KMCTR_AES_256_DECRYPT
;
760 return aes_set_key(tfm
, in_key
, key_len
);
763 static unsigned int __ctrblk_init(u8
*ctrptr
, unsigned int nbytes
)
767 /* only use complete blocks, max. PAGE_SIZE */
768 n
= (nbytes
> PAGE_SIZE
) ? PAGE_SIZE
: nbytes
& ~(AES_BLOCK_SIZE
- 1);
769 for (i
= AES_BLOCK_SIZE
; i
< n
; i
+= AES_BLOCK_SIZE
) {
770 memcpy(ctrptr
+ i
, ctrptr
+ i
- AES_BLOCK_SIZE
,
772 crypto_inc(ctrptr
+ i
, AES_BLOCK_SIZE
);
777 static int ctr_aes_crypt(struct blkcipher_desc
*desc
, long func
,
778 struct s390_aes_ctx
*sctx
, struct blkcipher_walk
*walk
)
780 int ret
= blkcipher_walk_virt_block(desc
, walk
, AES_BLOCK_SIZE
);
781 unsigned int n
, nbytes
;
782 u8 buf
[AES_BLOCK_SIZE
], ctrbuf
[AES_BLOCK_SIZE
];
783 u8
*out
, *in
, *ctrptr
= ctrbuf
;
788 if (spin_trylock(&ctrblk_lock
))
791 memcpy(ctrptr
, walk
->iv
, AES_BLOCK_SIZE
);
792 while ((nbytes
= walk
->nbytes
) >= AES_BLOCK_SIZE
) {
793 out
= walk
->dst
.virt
.addr
;
794 in
= walk
->src
.virt
.addr
;
795 while (nbytes
>= AES_BLOCK_SIZE
) {
796 if (ctrptr
== ctrblk
)
797 n
= __ctrblk_init(ctrptr
, nbytes
);
800 ret
= crypt_s390_kmctr(func
, sctx
->key
, out
, in
,
802 if (ret
< 0 || ret
!= n
) {
803 if (ctrptr
== ctrblk
)
804 spin_unlock(&ctrblk_lock
);
807 if (n
> AES_BLOCK_SIZE
)
808 memcpy(ctrptr
, ctrptr
+ n
- AES_BLOCK_SIZE
,
810 crypto_inc(ctrptr
, AES_BLOCK_SIZE
);
815 ret
= blkcipher_walk_done(desc
, walk
, nbytes
);
817 if (ctrptr
== ctrblk
) {
819 memcpy(ctrbuf
, ctrptr
, AES_BLOCK_SIZE
);
821 memcpy(walk
->iv
, ctrptr
, AES_BLOCK_SIZE
);
822 spin_unlock(&ctrblk_lock
);
825 * final block may be < AES_BLOCK_SIZE, copy only nbytes
828 out
= walk
->dst
.virt
.addr
;
829 in
= walk
->src
.virt
.addr
;
830 ret
= crypt_s390_kmctr(func
, sctx
->key
, buf
, in
,
831 AES_BLOCK_SIZE
, ctrbuf
);
832 if (ret
< 0 || ret
!= AES_BLOCK_SIZE
)
834 memcpy(out
, buf
, nbytes
);
835 crypto_inc(ctrbuf
, AES_BLOCK_SIZE
);
836 ret
= blkcipher_walk_done(desc
, walk
, 0);
837 memcpy(walk
->iv
, ctrbuf
, AES_BLOCK_SIZE
);
843 static int ctr_aes_encrypt(struct blkcipher_desc
*desc
,
844 struct scatterlist
*dst
, struct scatterlist
*src
,
847 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
848 struct blkcipher_walk walk
;
850 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
851 return ctr_aes_crypt(desc
, sctx
->enc
, sctx
, &walk
);
854 static int ctr_aes_decrypt(struct blkcipher_desc
*desc
,
855 struct scatterlist
*dst
, struct scatterlist
*src
,
858 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
859 struct blkcipher_walk walk
;
861 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
862 return ctr_aes_crypt(desc
, sctx
->dec
, sctx
, &walk
);
865 static struct crypto_alg ctr_aes_alg
= {
866 .cra_name
= "ctr(aes)",
867 .cra_driver_name
= "ctr-aes-s390",
868 .cra_priority
= CRYPT_S390_COMPOSITE_PRIORITY
,
869 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
871 .cra_ctxsize
= sizeof(struct s390_aes_ctx
),
872 .cra_type
= &crypto_blkcipher_type
,
873 .cra_module
= THIS_MODULE
,
876 .min_keysize
= AES_MIN_KEY_SIZE
,
877 .max_keysize
= AES_MAX_KEY_SIZE
,
878 .ivsize
= AES_BLOCK_SIZE
,
879 .setkey
= ctr_aes_set_key
,
880 .encrypt
= ctr_aes_encrypt
,
881 .decrypt
= ctr_aes_decrypt
,
886 static int ctr_aes_alg_reg
;
888 static int __init
aes_s390_init(void)
892 if (crypt_s390_func_available(KM_AES_128_ENCRYPT
, CRYPT_S390_MSA
))
893 keylen_flag
|= AES_KEYLEN_128
;
894 if (crypt_s390_func_available(KM_AES_192_ENCRYPT
, CRYPT_S390_MSA
))
895 keylen_flag
|= AES_KEYLEN_192
;
896 if (crypt_s390_func_available(KM_AES_256_ENCRYPT
, CRYPT_S390_MSA
))
897 keylen_flag
|= AES_KEYLEN_256
;
902 /* z9 109 and z9 BC/EC only support 128 bit key length */
903 if (keylen_flag
== AES_KEYLEN_128
)
904 pr_info("AES hardware acceleration is only available for"
907 ret
= crypto_register_alg(&aes_alg
);
911 ret
= crypto_register_alg(&ecb_aes_alg
);
915 ret
= crypto_register_alg(&cbc_aes_alg
);
919 if (crypt_s390_func_available(KM_XTS_128_ENCRYPT
,
920 CRYPT_S390_MSA
| CRYPT_S390_MSA4
) &&
921 crypt_s390_func_available(KM_XTS_256_ENCRYPT
,
922 CRYPT_S390_MSA
| CRYPT_S390_MSA4
)) {
923 ret
= crypto_register_alg(&xts_aes_alg
);
929 if (crypt_s390_func_available(KMCTR_AES_128_ENCRYPT
,
930 CRYPT_S390_MSA
| CRYPT_S390_MSA4
) &&
931 crypt_s390_func_available(KMCTR_AES_192_ENCRYPT
,
932 CRYPT_S390_MSA
| CRYPT_S390_MSA4
) &&
933 crypt_s390_func_available(KMCTR_AES_256_ENCRYPT
,
934 CRYPT_S390_MSA
| CRYPT_S390_MSA4
)) {
935 ctrblk
= (u8
*) __get_free_page(GFP_KERNEL
);
940 ret
= crypto_register_alg(&ctr_aes_alg
);
942 free_page((unsigned long) ctrblk
);
952 crypto_unregister_alg(&xts_aes_alg
);
954 crypto_unregister_alg(&cbc_aes_alg
);
956 crypto_unregister_alg(&ecb_aes_alg
);
958 crypto_unregister_alg(&aes_alg
);
963 static void __exit
aes_s390_fini(void)
965 if (ctr_aes_alg_reg
) {
966 crypto_unregister_alg(&ctr_aes_alg
);
967 free_page((unsigned long) ctrblk
);
970 crypto_unregister_alg(&xts_aes_alg
);
971 crypto_unregister_alg(&cbc_aes_alg
);
972 crypto_unregister_alg(&ecb_aes_alg
);
973 crypto_unregister_alg(&aes_alg
);
976 module_init(aes_s390_init
);
977 module_exit(aes_s390_fini
);
979 MODULE_ALIAS("aes-all");
981 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
982 MODULE_LICENSE("GPL");