4 * s390 implementation of the AES Cipher Algorithm.
7 * Copyright (C) 2005 IBM Deutschland GmbH, IBM Corporation
8 * Author(s): Jan Glauber (jang@de.ibm.com)
10 * Derived from "crypto/aes.c"
12 * This program is free software; you can redistribute it and/or modify it
13 * under the terms of the GNU General Public License as published by the Free
14 * Software Foundation; either version 2 of the License, or (at your option)
19 #include <crypto/algapi.h>
20 #include <linux/module.h>
21 #include <linux/init.h>
22 #include "crypt_s390.h"
24 #define AES_MIN_KEY_SIZE 16
25 #define AES_MAX_KEY_SIZE 32
27 /* data block size for all key lengths */
28 #define AES_BLOCK_SIZE 16
35 u8 iv
[AES_BLOCK_SIZE
];
36 u8 key
[AES_MAX_KEY_SIZE
];
42 static int aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
45 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
46 u32
*flags
= &tfm
->crt_flags
;
63 /* invalid key length */
68 sctx
->key_len
= key_len
;
69 memcpy(sctx
->key
, in_key
, key_len
);
72 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
76 static void aes_encrypt(struct crypto_tfm
*tfm
, u8
*out
, const u8
*in
)
78 const struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
80 switch (sctx
->key_len
) {
82 crypt_s390_km(KM_AES_128_ENCRYPT
, &sctx
->key
, out
, in
,
86 crypt_s390_km(KM_AES_192_ENCRYPT
, &sctx
->key
, out
, in
,
90 crypt_s390_km(KM_AES_256_ENCRYPT
, &sctx
->key
, out
, in
,
96 static void aes_decrypt(struct crypto_tfm
*tfm
, u8
*out
, const u8
*in
)
98 const struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
100 switch (sctx
->key_len
) {
102 crypt_s390_km(KM_AES_128_DECRYPT
, &sctx
->key
, out
, in
,
106 crypt_s390_km(KM_AES_192_DECRYPT
, &sctx
->key
, out
, in
,
110 crypt_s390_km(KM_AES_256_DECRYPT
, &sctx
->key
, out
, in
,
117 static struct crypto_alg aes_alg
= {
119 .cra_driver_name
= "aes-s390",
120 .cra_priority
= CRYPT_S390_PRIORITY
,
121 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
122 .cra_blocksize
= AES_BLOCK_SIZE
,
123 .cra_ctxsize
= sizeof(struct s390_aes_ctx
),
124 .cra_module
= THIS_MODULE
,
125 .cra_list
= LIST_HEAD_INIT(aes_alg
.cra_list
),
128 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
129 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
130 .cia_setkey
= aes_set_key
,
131 .cia_encrypt
= aes_encrypt
,
132 .cia_decrypt
= aes_decrypt
,
137 static int ecb_aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
138 unsigned int key_len
)
140 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
144 sctx
->enc
= KM_AES_128_ENCRYPT
;
145 sctx
->dec
= KM_AES_128_DECRYPT
;
148 sctx
->enc
= KM_AES_192_ENCRYPT
;
149 sctx
->dec
= KM_AES_192_DECRYPT
;
152 sctx
->enc
= KM_AES_256_ENCRYPT
;
153 sctx
->dec
= KM_AES_256_DECRYPT
;
157 return aes_set_key(tfm
, in_key
, key_len
);
160 static int ecb_aes_crypt(struct blkcipher_desc
*desc
, long func
, void *param
,
161 struct blkcipher_walk
*walk
)
163 int ret
= blkcipher_walk_virt(desc
, walk
);
166 while ((nbytes
= walk
->nbytes
)) {
167 /* only use complete blocks */
168 unsigned int n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
169 u8
*out
= walk
->dst
.virt
.addr
;
170 u8
*in
= walk
->src
.virt
.addr
;
172 ret
= crypt_s390_km(func
, param
, out
, in
, n
);
173 BUG_ON((ret
< 0) || (ret
!= n
));
175 nbytes
&= AES_BLOCK_SIZE
- 1;
176 ret
= blkcipher_walk_done(desc
, walk
, nbytes
);
182 static int ecb_aes_encrypt(struct blkcipher_desc
*desc
,
183 struct scatterlist
*dst
, struct scatterlist
*src
,
186 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
187 struct blkcipher_walk walk
;
189 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
190 return ecb_aes_crypt(desc
, sctx
->enc
, sctx
->key
, &walk
);
193 static int ecb_aes_decrypt(struct blkcipher_desc
*desc
,
194 struct scatterlist
*dst
, struct scatterlist
*src
,
197 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
198 struct blkcipher_walk walk
;
200 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
201 return ecb_aes_crypt(desc
, sctx
->dec
, sctx
->key
, &walk
);
204 static struct crypto_alg ecb_aes_alg
= {
205 .cra_name
= "ecb(aes)",
206 .cra_driver_name
= "ecb-aes-s390",
207 .cra_priority
= CRYPT_S390_COMPOSITE_PRIORITY
,
208 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
209 .cra_blocksize
= AES_BLOCK_SIZE
,
210 .cra_ctxsize
= sizeof(struct s390_aes_ctx
),
211 .cra_type
= &crypto_blkcipher_type
,
212 .cra_module
= THIS_MODULE
,
213 .cra_list
= LIST_HEAD_INIT(ecb_aes_alg
.cra_list
),
216 .min_keysize
= AES_MIN_KEY_SIZE
,
217 .max_keysize
= AES_MAX_KEY_SIZE
,
218 .setkey
= ecb_aes_set_key
,
219 .encrypt
= ecb_aes_encrypt
,
220 .decrypt
= ecb_aes_decrypt
,
225 static int cbc_aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
226 unsigned int key_len
)
228 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
232 sctx
->enc
= KMC_AES_128_ENCRYPT
;
233 sctx
->dec
= KMC_AES_128_DECRYPT
;
236 sctx
->enc
= KMC_AES_192_ENCRYPT
;
237 sctx
->dec
= KMC_AES_192_DECRYPT
;
240 sctx
->enc
= KMC_AES_256_ENCRYPT
;
241 sctx
->dec
= KMC_AES_256_DECRYPT
;
245 return aes_set_key(tfm
, in_key
, key_len
);
248 static int cbc_aes_crypt(struct blkcipher_desc
*desc
, long func
, void *param
,
249 struct blkcipher_walk
*walk
)
251 int ret
= blkcipher_walk_virt(desc
, walk
);
252 unsigned int nbytes
= walk
->nbytes
;
257 memcpy(param
, walk
->iv
, AES_BLOCK_SIZE
);
259 /* only use complete blocks */
260 unsigned int n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
261 u8
*out
= walk
->dst
.virt
.addr
;
262 u8
*in
= walk
->src
.virt
.addr
;
264 ret
= crypt_s390_kmc(func
, param
, out
, in
, n
);
265 BUG_ON((ret
< 0) || (ret
!= n
));
267 nbytes
&= AES_BLOCK_SIZE
- 1;
268 ret
= blkcipher_walk_done(desc
, walk
, nbytes
);
269 } while ((nbytes
= walk
->nbytes
));
270 memcpy(walk
->iv
, param
, AES_BLOCK_SIZE
);
276 static int cbc_aes_encrypt(struct blkcipher_desc
*desc
,
277 struct scatterlist
*dst
, struct scatterlist
*src
,
280 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
281 struct blkcipher_walk walk
;
283 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
284 return cbc_aes_crypt(desc
, sctx
->enc
, sctx
->iv
, &walk
);
287 static int cbc_aes_decrypt(struct blkcipher_desc
*desc
,
288 struct scatterlist
*dst
, struct scatterlist
*src
,
291 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
292 struct blkcipher_walk walk
;
294 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
295 return cbc_aes_crypt(desc
, sctx
->dec
, sctx
->iv
, &walk
);
298 static struct crypto_alg cbc_aes_alg
= {
299 .cra_name
= "cbc(aes)",
300 .cra_driver_name
= "cbc-aes-s390",
301 .cra_priority
= CRYPT_S390_COMPOSITE_PRIORITY
,
302 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
303 .cra_blocksize
= AES_BLOCK_SIZE
,
304 .cra_ctxsize
= sizeof(struct s390_aes_ctx
),
305 .cra_type
= &crypto_blkcipher_type
,
306 .cra_module
= THIS_MODULE
,
307 .cra_list
= LIST_HEAD_INIT(cbc_aes_alg
.cra_list
),
310 .min_keysize
= AES_MIN_KEY_SIZE
,
311 .max_keysize
= AES_MAX_KEY_SIZE
,
312 .ivsize
= AES_BLOCK_SIZE
,
313 .setkey
= cbc_aes_set_key
,
314 .encrypt
= cbc_aes_encrypt
,
315 .decrypt
= cbc_aes_decrypt
,
320 static int __init
aes_init(void)
324 if (crypt_s390_func_available(KM_AES_128_ENCRYPT
))
326 if (crypt_s390_func_available(KM_AES_192_ENCRYPT
))
328 if (crypt_s390_func_available(KM_AES_256_ENCRYPT
))
331 if (!has_aes_128
&& !has_aes_192
&& !has_aes_256
)
334 ret
= crypto_register_alg(&aes_alg
);
336 printk(KERN_INFO
"crypt_s390: aes-s390 couldn't be loaded.\n");
340 ret
= crypto_register_alg(&ecb_aes_alg
);
343 "crypt_s390: ecb-aes-s390 couldn't be loaded.\n");
347 ret
= crypto_register_alg(&cbc_aes_alg
);
350 "crypt_s390: cbc-aes-s390 couldn't be loaded.\n");
358 crypto_unregister_alg(&ecb_aes_alg
);
360 crypto_unregister_alg(&aes_alg
);
365 static void __exit
aes_fini(void)
367 crypto_unregister_alg(&cbc_aes_alg
);
368 crypto_unregister_alg(&ecb_aes_alg
);
369 crypto_unregister_alg(&aes_alg
);
372 module_init(aes_init
);
373 module_exit(aes_fini
);
377 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
378 MODULE_LICENSE("GPL");