clk: samsung: Add bus clock for GPU/G3D on Exynos4412
[linux/fpc-iii.git] / arch / s390 / crypto / paes_s390.c
blobe8d9fa54569cd9254541d6c28c230ff86548949d
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * Cryptographic API.
5 * s390 implementation of the AES Cipher Algorithm with protected keys.
7 * s390 Version:
8 * Copyright IBM Corp. 2017
9 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>
10 * Harald Freudenberger <freude@de.ibm.com>
13 #define KMSG_COMPONENT "paes_s390"
14 #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
16 #include <crypto/aes.h>
17 #include <crypto/algapi.h>
18 #include <linux/bug.h>
19 #include <linux/err.h>
20 #include <linux/module.h>
21 #include <linux/cpufeature.h>
22 #include <linux/init.h>
23 #include <linux/spinlock.h>
24 #include <crypto/xts.h>
25 #include <asm/cpacf.h>
26 #include <asm/pkey.h>
28 static u8 *ctrblk;
29 static DEFINE_SPINLOCK(ctrblk_lock);
31 static cpacf_mask_t km_functions, kmc_functions, kmctr_functions;
33 struct key_blob {
34 __u8 key[MAXKEYBLOBSIZE];
35 unsigned int keylen;
38 struct s390_paes_ctx {
39 struct key_blob kb;
40 struct pkey_protkey pk;
41 unsigned long fc;
44 struct s390_pxts_ctx {
45 struct key_blob kb[2];
46 struct pkey_protkey pk[2];
47 unsigned long fc;
50 static inline int __paes_convert_key(struct key_blob *kb,
51 struct pkey_protkey *pk)
53 int i, ret;
55 /* try three times in case of failure */
56 for (i = 0; i < 3; i++) {
57 ret = pkey_keyblob2pkey(kb->key, kb->keylen, pk);
58 if (ret == 0)
59 break;
62 return ret;
65 static int __paes_set_key(struct s390_paes_ctx *ctx)
67 unsigned long fc;
69 if (__paes_convert_key(&ctx->kb, &ctx->pk))
70 return -EINVAL;
72 /* Pick the correct function code based on the protected key type */
73 fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KM_PAES_128 :
74 (ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KM_PAES_192 :
75 (ctx->pk.type == PKEY_KEYTYPE_AES_256) ? CPACF_KM_PAES_256 : 0;
77 /* Check if the function code is available */
78 ctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
80 return ctx->fc ? 0 : -EINVAL;
83 static int ecb_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
84 unsigned int key_len)
86 struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
88 memcpy(ctx->kb.key, in_key, key_len);
89 ctx->kb.keylen = key_len;
90 if (__paes_set_key(ctx)) {
91 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
92 return -EINVAL;
94 return 0;
97 static int ecb_paes_crypt(struct blkcipher_desc *desc,
98 unsigned long modifier,
99 struct blkcipher_walk *walk)
101 struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
102 unsigned int nbytes, n, k;
103 int ret;
105 ret = blkcipher_walk_virt(desc, walk);
106 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
107 /* only use complete blocks */
108 n = nbytes & ~(AES_BLOCK_SIZE - 1);
109 k = cpacf_km(ctx->fc | modifier, ctx->pk.protkey,
110 walk->dst.virt.addr, walk->src.virt.addr, n);
111 if (k)
112 ret = blkcipher_walk_done(desc, walk, nbytes - k);
113 if (k < n) {
114 if (__paes_set_key(ctx) != 0)
115 return blkcipher_walk_done(desc, walk, -EIO);
118 return ret;
121 static int ecb_paes_encrypt(struct blkcipher_desc *desc,
122 struct scatterlist *dst, struct scatterlist *src,
123 unsigned int nbytes)
125 struct blkcipher_walk walk;
127 blkcipher_walk_init(&walk, dst, src, nbytes);
128 return ecb_paes_crypt(desc, CPACF_ENCRYPT, &walk);
131 static int ecb_paes_decrypt(struct blkcipher_desc *desc,
132 struct scatterlist *dst, struct scatterlist *src,
133 unsigned int nbytes)
135 struct blkcipher_walk walk;
137 blkcipher_walk_init(&walk, dst, src, nbytes);
138 return ecb_paes_crypt(desc, CPACF_DECRYPT, &walk);
141 static struct crypto_alg ecb_paes_alg = {
142 .cra_name = "ecb(paes)",
143 .cra_driver_name = "ecb-paes-s390",
144 .cra_priority = 401, /* combo: aes + ecb + 1 */
145 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
146 .cra_blocksize = AES_BLOCK_SIZE,
147 .cra_ctxsize = sizeof(struct s390_paes_ctx),
148 .cra_type = &crypto_blkcipher_type,
149 .cra_module = THIS_MODULE,
150 .cra_list = LIST_HEAD_INIT(ecb_paes_alg.cra_list),
151 .cra_u = {
152 .blkcipher = {
153 .min_keysize = MINKEYBLOBSIZE,
154 .max_keysize = MAXKEYBLOBSIZE,
155 .setkey = ecb_paes_set_key,
156 .encrypt = ecb_paes_encrypt,
157 .decrypt = ecb_paes_decrypt,
162 static int __cbc_paes_set_key(struct s390_paes_ctx *ctx)
164 unsigned long fc;
166 if (__paes_convert_key(&ctx->kb, &ctx->pk))
167 return -EINVAL;
169 /* Pick the correct function code based on the protected key type */
170 fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KMC_PAES_128 :
171 (ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KMC_PAES_192 :
172 (ctx->pk.type == PKEY_KEYTYPE_AES_256) ? CPACF_KMC_PAES_256 : 0;
174 /* Check if the function code is available */
175 ctx->fc = (fc && cpacf_test_func(&kmc_functions, fc)) ? fc : 0;
177 return ctx->fc ? 0 : -EINVAL;
180 static int cbc_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
181 unsigned int key_len)
183 struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
185 memcpy(ctx->kb.key, in_key, key_len);
186 ctx->kb.keylen = key_len;
187 if (__cbc_paes_set_key(ctx)) {
188 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
189 return -EINVAL;
191 return 0;
194 static int cbc_paes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
195 struct blkcipher_walk *walk)
197 struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
198 unsigned int nbytes, n, k;
199 int ret;
200 struct {
201 u8 iv[AES_BLOCK_SIZE];
202 u8 key[MAXPROTKEYSIZE];
203 } param;
205 ret = blkcipher_walk_virt(desc, walk);
206 memcpy(param.iv, walk->iv, AES_BLOCK_SIZE);
207 memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
208 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
209 /* only use complete blocks */
210 n = nbytes & ~(AES_BLOCK_SIZE - 1);
211 k = cpacf_kmc(ctx->fc | modifier, &param,
212 walk->dst.virt.addr, walk->src.virt.addr, n);
213 if (k)
214 ret = blkcipher_walk_done(desc, walk, nbytes - k);
215 if (k < n) {
216 if (__cbc_paes_set_key(ctx) != 0)
217 return blkcipher_walk_done(desc, walk, -EIO);
218 memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
221 memcpy(walk->iv, param.iv, AES_BLOCK_SIZE);
222 return ret;
225 static int cbc_paes_encrypt(struct blkcipher_desc *desc,
226 struct scatterlist *dst, struct scatterlist *src,
227 unsigned int nbytes)
229 struct blkcipher_walk walk;
231 blkcipher_walk_init(&walk, dst, src, nbytes);
232 return cbc_paes_crypt(desc, 0, &walk);
235 static int cbc_paes_decrypt(struct blkcipher_desc *desc,
236 struct scatterlist *dst, struct scatterlist *src,
237 unsigned int nbytes)
239 struct blkcipher_walk walk;
241 blkcipher_walk_init(&walk, dst, src, nbytes);
242 return cbc_paes_crypt(desc, CPACF_DECRYPT, &walk);
245 static struct crypto_alg cbc_paes_alg = {
246 .cra_name = "cbc(paes)",
247 .cra_driver_name = "cbc-paes-s390",
248 .cra_priority = 402, /* ecb-paes-s390 + 1 */
249 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
250 .cra_blocksize = AES_BLOCK_SIZE,
251 .cra_ctxsize = sizeof(struct s390_paes_ctx),
252 .cra_type = &crypto_blkcipher_type,
253 .cra_module = THIS_MODULE,
254 .cra_list = LIST_HEAD_INIT(cbc_paes_alg.cra_list),
255 .cra_u = {
256 .blkcipher = {
257 .min_keysize = MINKEYBLOBSIZE,
258 .max_keysize = MAXKEYBLOBSIZE,
259 .ivsize = AES_BLOCK_SIZE,
260 .setkey = cbc_paes_set_key,
261 .encrypt = cbc_paes_encrypt,
262 .decrypt = cbc_paes_decrypt,
267 static int __xts_paes_set_key(struct s390_pxts_ctx *ctx)
269 unsigned long fc;
271 if (__paes_convert_key(&ctx->kb[0], &ctx->pk[0]) ||
272 __paes_convert_key(&ctx->kb[1], &ctx->pk[1]))
273 return -EINVAL;
275 if (ctx->pk[0].type != ctx->pk[1].type)
276 return -EINVAL;
278 /* Pick the correct function code based on the protected key type */
279 fc = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? CPACF_KM_PXTS_128 :
280 (ctx->pk[0].type == PKEY_KEYTYPE_AES_256) ?
281 CPACF_KM_PXTS_256 : 0;
283 /* Check if the function code is available */
284 ctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
286 return ctx->fc ? 0 : -EINVAL;
289 static int xts_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
290 unsigned int key_len)
292 struct s390_pxts_ctx *ctx = crypto_tfm_ctx(tfm);
293 u8 ckey[2 * AES_MAX_KEY_SIZE];
294 unsigned int ckey_len, keytok_len;
296 if (key_len % 2)
297 return -EINVAL;
299 keytok_len = key_len / 2;
300 memcpy(ctx->kb[0].key, in_key, keytok_len);
301 ctx->kb[0].keylen = keytok_len;
302 memcpy(ctx->kb[1].key, in_key + keytok_len, keytok_len);
303 ctx->kb[1].keylen = keytok_len;
304 if (__xts_paes_set_key(ctx)) {
305 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
306 return -EINVAL;
310 * xts_check_key verifies the key length is not odd and makes
311 * sure that the two keys are not the same. This can be done
312 * on the two protected keys as well
314 ckey_len = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ?
315 AES_KEYSIZE_128 : AES_KEYSIZE_256;
316 memcpy(ckey, ctx->pk[0].protkey, ckey_len);
317 memcpy(ckey + ckey_len, ctx->pk[1].protkey, ckey_len);
318 return xts_check_key(tfm, ckey, 2*ckey_len);
321 static int xts_paes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
322 struct blkcipher_walk *walk)
324 struct s390_pxts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
325 unsigned int keylen, offset, nbytes, n, k;
326 int ret;
327 struct {
328 u8 key[MAXPROTKEYSIZE]; /* key + verification pattern */
329 u8 tweak[16];
330 u8 block[16];
331 u8 bit[16];
332 u8 xts[16];
333 } pcc_param;
334 struct {
335 u8 key[MAXPROTKEYSIZE]; /* key + verification pattern */
336 u8 init[16];
337 } xts_param;
339 ret = blkcipher_walk_virt(desc, walk);
340 keylen = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? 48 : 64;
341 offset = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? 16 : 0;
342 retry:
343 memset(&pcc_param, 0, sizeof(pcc_param));
344 memcpy(pcc_param.tweak, walk->iv, sizeof(pcc_param.tweak));
345 memcpy(pcc_param.key + offset, ctx->pk[1].protkey, keylen);
346 cpacf_pcc(ctx->fc, pcc_param.key + offset);
348 memcpy(xts_param.key + offset, ctx->pk[0].protkey, keylen);
349 memcpy(xts_param.init, pcc_param.xts, 16);
351 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
352 /* only use complete blocks */
353 n = nbytes & ~(AES_BLOCK_SIZE - 1);
354 k = cpacf_km(ctx->fc | modifier, xts_param.key + offset,
355 walk->dst.virt.addr, walk->src.virt.addr, n);
356 if (k)
357 ret = blkcipher_walk_done(desc, walk, nbytes - k);
358 if (k < n) {
359 if (__xts_paes_set_key(ctx) != 0)
360 return blkcipher_walk_done(desc, walk, -EIO);
361 goto retry;
364 return ret;
367 static int xts_paes_encrypt(struct blkcipher_desc *desc,
368 struct scatterlist *dst, struct scatterlist *src,
369 unsigned int nbytes)
371 struct blkcipher_walk walk;
373 blkcipher_walk_init(&walk, dst, src, nbytes);
374 return xts_paes_crypt(desc, 0, &walk);
377 static int xts_paes_decrypt(struct blkcipher_desc *desc,
378 struct scatterlist *dst, struct scatterlist *src,
379 unsigned int nbytes)
381 struct blkcipher_walk walk;
383 blkcipher_walk_init(&walk, dst, src, nbytes);
384 return xts_paes_crypt(desc, CPACF_DECRYPT, &walk);
387 static struct crypto_alg xts_paes_alg = {
388 .cra_name = "xts(paes)",
389 .cra_driver_name = "xts-paes-s390",
390 .cra_priority = 402, /* ecb-paes-s390 + 1 */
391 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
392 .cra_blocksize = AES_BLOCK_SIZE,
393 .cra_ctxsize = sizeof(struct s390_pxts_ctx),
394 .cra_type = &crypto_blkcipher_type,
395 .cra_module = THIS_MODULE,
396 .cra_list = LIST_HEAD_INIT(xts_paes_alg.cra_list),
397 .cra_u = {
398 .blkcipher = {
399 .min_keysize = 2 * MINKEYBLOBSIZE,
400 .max_keysize = 2 * MAXKEYBLOBSIZE,
401 .ivsize = AES_BLOCK_SIZE,
402 .setkey = xts_paes_set_key,
403 .encrypt = xts_paes_encrypt,
404 .decrypt = xts_paes_decrypt,
409 static int __ctr_paes_set_key(struct s390_paes_ctx *ctx)
411 unsigned long fc;
413 if (__paes_convert_key(&ctx->kb, &ctx->pk))
414 return -EINVAL;
416 /* Pick the correct function code based on the protected key type */
417 fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KMCTR_PAES_128 :
418 (ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KMCTR_PAES_192 :
419 (ctx->pk.type == PKEY_KEYTYPE_AES_256) ?
420 CPACF_KMCTR_PAES_256 : 0;
422 /* Check if the function code is available */
423 ctx->fc = (fc && cpacf_test_func(&kmctr_functions, fc)) ? fc : 0;
425 return ctx->fc ? 0 : -EINVAL;
428 static int ctr_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
429 unsigned int key_len)
431 struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
433 memcpy(ctx->kb.key, in_key, key_len);
434 ctx->kb.keylen = key_len;
435 if (__ctr_paes_set_key(ctx)) {
436 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
437 return -EINVAL;
439 return 0;
442 static unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes)
444 unsigned int i, n;
446 /* only use complete blocks, max. PAGE_SIZE */
447 memcpy(ctrptr, iv, AES_BLOCK_SIZE);
448 n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : nbytes & ~(AES_BLOCK_SIZE - 1);
449 for (i = (n / AES_BLOCK_SIZE) - 1; i > 0; i--) {
450 memcpy(ctrptr + AES_BLOCK_SIZE, ctrptr, AES_BLOCK_SIZE);
451 crypto_inc(ctrptr + AES_BLOCK_SIZE, AES_BLOCK_SIZE);
452 ctrptr += AES_BLOCK_SIZE;
454 return n;
457 static int ctr_paes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
458 struct blkcipher_walk *walk)
460 struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
461 u8 buf[AES_BLOCK_SIZE], *ctrptr;
462 unsigned int nbytes, n, k;
463 int ret, locked;
465 locked = spin_trylock(&ctrblk_lock);
467 ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE);
468 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
469 n = AES_BLOCK_SIZE;
470 if (nbytes >= 2*AES_BLOCK_SIZE && locked)
471 n = __ctrblk_init(ctrblk, walk->iv, nbytes);
472 ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk->iv;
473 k = cpacf_kmctr(ctx->fc | modifier, ctx->pk.protkey,
474 walk->dst.virt.addr, walk->src.virt.addr,
475 n, ctrptr);
476 if (k) {
477 if (ctrptr == ctrblk)
478 memcpy(walk->iv, ctrptr + k - AES_BLOCK_SIZE,
479 AES_BLOCK_SIZE);
480 crypto_inc(walk->iv, AES_BLOCK_SIZE);
481 ret = blkcipher_walk_done(desc, walk, nbytes - n);
483 if (k < n) {
484 if (__ctr_paes_set_key(ctx) != 0) {
485 if (locked)
486 spin_unlock(&ctrblk_lock);
487 return blkcipher_walk_done(desc, walk, -EIO);
491 if (locked)
492 spin_unlock(&ctrblk_lock);
494 * final block may be < AES_BLOCK_SIZE, copy only nbytes
496 if (nbytes) {
497 while (1) {
498 if (cpacf_kmctr(ctx->fc | modifier,
499 ctx->pk.protkey, buf,
500 walk->src.virt.addr, AES_BLOCK_SIZE,
501 walk->iv) == AES_BLOCK_SIZE)
502 break;
503 if (__ctr_paes_set_key(ctx) != 0)
504 return blkcipher_walk_done(desc, walk, -EIO);
506 memcpy(walk->dst.virt.addr, buf, nbytes);
507 crypto_inc(walk->iv, AES_BLOCK_SIZE);
508 ret = blkcipher_walk_done(desc, walk, 0);
511 return ret;
514 static int ctr_paes_encrypt(struct blkcipher_desc *desc,
515 struct scatterlist *dst, struct scatterlist *src,
516 unsigned int nbytes)
518 struct blkcipher_walk walk;
520 blkcipher_walk_init(&walk, dst, src, nbytes);
521 return ctr_paes_crypt(desc, 0, &walk);
524 static int ctr_paes_decrypt(struct blkcipher_desc *desc,
525 struct scatterlist *dst, struct scatterlist *src,
526 unsigned int nbytes)
528 struct blkcipher_walk walk;
530 blkcipher_walk_init(&walk, dst, src, nbytes);
531 return ctr_paes_crypt(desc, CPACF_DECRYPT, &walk);
534 static struct crypto_alg ctr_paes_alg = {
535 .cra_name = "ctr(paes)",
536 .cra_driver_name = "ctr-paes-s390",
537 .cra_priority = 402, /* ecb-paes-s390 + 1 */
538 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
539 .cra_blocksize = 1,
540 .cra_ctxsize = sizeof(struct s390_paes_ctx),
541 .cra_type = &crypto_blkcipher_type,
542 .cra_module = THIS_MODULE,
543 .cra_list = LIST_HEAD_INIT(ctr_paes_alg.cra_list),
544 .cra_u = {
545 .blkcipher = {
546 .min_keysize = MINKEYBLOBSIZE,
547 .max_keysize = MAXKEYBLOBSIZE,
548 .ivsize = AES_BLOCK_SIZE,
549 .setkey = ctr_paes_set_key,
550 .encrypt = ctr_paes_encrypt,
551 .decrypt = ctr_paes_decrypt,
556 static inline void __crypto_unregister_alg(struct crypto_alg *alg)
558 if (!list_empty(&alg->cra_list))
559 crypto_unregister_alg(alg);
562 static void paes_s390_fini(void)
564 if (ctrblk)
565 free_page((unsigned long) ctrblk);
566 __crypto_unregister_alg(&ctr_paes_alg);
567 __crypto_unregister_alg(&xts_paes_alg);
568 __crypto_unregister_alg(&cbc_paes_alg);
569 __crypto_unregister_alg(&ecb_paes_alg);
572 static int __init paes_s390_init(void)
574 int ret;
576 /* Query available functions for KM, KMC and KMCTR */
577 cpacf_query(CPACF_KM, &km_functions);
578 cpacf_query(CPACF_KMC, &kmc_functions);
579 cpacf_query(CPACF_KMCTR, &kmctr_functions);
581 if (cpacf_test_func(&km_functions, CPACF_KM_PAES_128) ||
582 cpacf_test_func(&km_functions, CPACF_KM_PAES_192) ||
583 cpacf_test_func(&km_functions, CPACF_KM_PAES_256)) {
584 ret = crypto_register_alg(&ecb_paes_alg);
585 if (ret)
586 goto out_err;
589 if (cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_128) ||
590 cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_192) ||
591 cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_256)) {
592 ret = crypto_register_alg(&cbc_paes_alg);
593 if (ret)
594 goto out_err;
597 if (cpacf_test_func(&km_functions, CPACF_KM_PXTS_128) ||
598 cpacf_test_func(&km_functions, CPACF_KM_PXTS_256)) {
599 ret = crypto_register_alg(&xts_paes_alg);
600 if (ret)
601 goto out_err;
604 if (cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_128) ||
605 cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_192) ||
606 cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_256)) {
607 ret = crypto_register_alg(&ctr_paes_alg);
608 if (ret)
609 goto out_err;
610 ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
611 if (!ctrblk) {
612 ret = -ENOMEM;
613 goto out_err;
617 return 0;
618 out_err:
619 paes_s390_fini();
620 return ret;
623 module_init(paes_s390_init);
624 module_exit(paes_s390_fini);
626 MODULE_ALIAS_CRYPTO("paes");
628 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm with protected keys");
629 MODULE_LICENSE("GPL");