Merge tag 'sched-urgent-2020-12-27' of git://git.kernel.org/pub/scm/linux/kernel...
[linux/fpc-iii.git] / arch / sparc / crypto / aes_glue.c
blobe3d2138ff9e294014d1e175297a1c6b5d8977fb5
1 // SPDX-License-Identifier: GPL-2.0-only
2 /* Glue code for AES encryption optimized for sparc64 crypto opcodes.
4 * This is based largely upon arch/x86/crypto/aesni-intel_glue.c
6 * Copyright (C) 2008, Intel Corp.
7 * Author: Huang Ying <ying.huang@intel.com>
9 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
10 * interface for 64-bit kernels.
11 * Authors: Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Aidan O'Mahony (aidan.o.mahony@intel.com)
15 * Copyright (c) 2010, Intel Corporation.
18 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
20 #include <linux/crypto.h>
21 #include <linux/init.h>
22 #include <linux/module.h>
23 #include <linux/mm.h>
24 #include <linux/types.h>
25 #include <crypto/algapi.h>
26 #include <crypto/aes.h>
27 #include <crypto/internal/skcipher.h>
29 #include <asm/fpumacro.h>
30 #include <asm/pstate.h>
31 #include <asm/elf.h>
33 #include "opcodes.h"
35 struct aes_ops {
36 void (*encrypt)(const u64 *key, const u32 *input, u32 *output);
37 void (*decrypt)(const u64 *key, const u32 *input, u32 *output);
38 void (*load_encrypt_keys)(const u64 *key);
39 void (*load_decrypt_keys)(const u64 *key);
40 void (*ecb_encrypt)(const u64 *key, const u64 *input, u64 *output,
41 unsigned int len);
42 void (*ecb_decrypt)(const u64 *key, const u64 *input, u64 *output,
43 unsigned int len);
44 void (*cbc_encrypt)(const u64 *key, const u64 *input, u64 *output,
45 unsigned int len, u64 *iv);
46 void (*cbc_decrypt)(const u64 *key, const u64 *input, u64 *output,
47 unsigned int len, u64 *iv);
48 void (*ctr_crypt)(const u64 *key, const u64 *input, u64 *output,
49 unsigned int len, u64 *iv);
52 struct crypto_sparc64_aes_ctx {
53 struct aes_ops *ops;
54 u64 key[AES_MAX_KEYLENGTH / sizeof(u64)];
55 u32 key_length;
56 u32 expanded_key_length;
59 extern void aes_sparc64_encrypt_128(const u64 *key, const u32 *input,
60 u32 *output);
61 extern void aes_sparc64_encrypt_192(const u64 *key, const u32 *input,
62 u32 *output);
63 extern void aes_sparc64_encrypt_256(const u64 *key, const u32 *input,
64 u32 *output);
66 extern void aes_sparc64_decrypt_128(const u64 *key, const u32 *input,
67 u32 *output);
68 extern void aes_sparc64_decrypt_192(const u64 *key, const u32 *input,
69 u32 *output);
70 extern void aes_sparc64_decrypt_256(const u64 *key, const u32 *input,
71 u32 *output);
73 extern void aes_sparc64_load_encrypt_keys_128(const u64 *key);
74 extern void aes_sparc64_load_encrypt_keys_192(const u64 *key);
75 extern void aes_sparc64_load_encrypt_keys_256(const u64 *key);
77 extern void aes_sparc64_load_decrypt_keys_128(const u64 *key);
78 extern void aes_sparc64_load_decrypt_keys_192(const u64 *key);
79 extern void aes_sparc64_load_decrypt_keys_256(const u64 *key);
81 extern void aes_sparc64_ecb_encrypt_128(const u64 *key, const u64 *input,
82 u64 *output, unsigned int len);
83 extern void aes_sparc64_ecb_encrypt_192(const u64 *key, const u64 *input,
84 u64 *output, unsigned int len);
85 extern void aes_sparc64_ecb_encrypt_256(const u64 *key, const u64 *input,
86 u64 *output, unsigned int len);
88 extern void aes_sparc64_ecb_decrypt_128(const u64 *key, const u64 *input,
89 u64 *output, unsigned int len);
90 extern void aes_sparc64_ecb_decrypt_192(const u64 *key, const u64 *input,
91 u64 *output, unsigned int len);
92 extern void aes_sparc64_ecb_decrypt_256(const u64 *key, const u64 *input,
93 u64 *output, unsigned int len);
95 extern void aes_sparc64_cbc_encrypt_128(const u64 *key, const u64 *input,
96 u64 *output, unsigned int len,
97 u64 *iv);
99 extern void aes_sparc64_cbc_encrypt_192(const u64 *key, const u64 *input,
100 u64 *output, unsigned int len,
101 u64 *iv);
103 extern void aes_sparc64_cbc_encrypt_256(const u64 *key, const u64 *input,
104 u64 *output, unsigned int len,
105 u64 *iv);
107 extern void aes_sparc64_cbc_decrypt_128(const u64 *key, const u64 *input,
108 u64 *output, unsigned int len,
109 u64 *iv);
111 extern void aes_sparc64_cbc_decrypt_192(const u64 *key, const u64 *input,
112 u64 *output, unsigned int len,
113 u64 *iv);
115 extern void aes_sparc64_cbc_decrypt_256(const u64 *key, const u64 *input,
116 u64 *output, unsigned int len,
117 u64 *iv);
119 extern void aes_sparc64_ctr_crypt_128(const u64 *key, const u64 *input,
120 u64 *output, unsigned int len,
121 u64 *iv);
122 extern void aes_sparc64_ctr_crypt_192(const u64 *key, const u64 *input,
123 u64 *output, unsigned int len,
124 u64 *iv);
125 extern void aes_sparc64_ctr_crypt_256(const u64 *key, const u64 *input,
126 u64 *output, unsigned int len,
127 u64 *iv);
129 static struct aes_ops aes128_ops = {
130 .encrypt = aes_sparc64_encrypt_128,
131 .decrypt = aes_sparc64_decrypt_128,
132 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_128,
133 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_128,
134 .ecb_encrypt = aes_sparc64_ecb_encrypt_128,
135 .ecb_decrypt = aes_sparc64_ecb_decrypt_128,
136 .cbc_encrypt = aes_sparc64_cbc_encrypt_128,
137 .cbc_decrypt = aes_sparc64_cbc_decrypt_128,
138 .ctr_crypt = aes_sparc64_ctr_crypt_128,
141 static struct aes_ops aes192_ops = {
142 .encrypt = aes_sparc64_encrypt_192,
143 .decrypt = aes_sparc64_decrypt_192,
144 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_192,
145 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_192,
146 .ecb_encrypt = aes_sparc64_ecb_encrypt_192,
147 .ecb_decrypt = aes_sparc64_ecb_decrypt_192,
148 .cbc_encrypt = aes_sparc64_cbc_encrypt_192,
149 .cbc_decrypt = aes_sparc64_cbc_decrypt_192,
150 .ctr_crypt = aes_sparc64_ctr_crypt_192,
153 static struct aes_ops aes256_ops = {
154 .encrypt = aes_sparc64_encrypt_256,
155 .decrypt = aes_sparc64_decrypt_256,
156 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_256,
157 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_256,
158 .ecb_encrypt = aes_sparc64_ecb_encrypt_256,
159 .ecb_decrypt = aes_sparc64_ecb_decrypt_256,
160 .cbc_encrypt = aes_sparc64_cbc_encrypt_256,
161 .cbc_decrypt = aes_sparc64_cbc_decrypt_256,
162 .ctr_crypt = aes_sparc64_ctr_crypt_256,
165 extern void aes_sparc64_key_expand(const u32 *in_key, u64 *output_key,
166 unsigned int key_len);
168 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
169 unsigned int key_len)
171 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
173 switch (key_len) {
174 case AES_KEYSIZE_128:
175 ctx->expanded_key_length = 0xb0;
176 ctx->ops = &aes128_ops;
177 break;
179 case AES_KEYSIZE_192:
180 ctx->expanded_key_length = 0xd0;
181 ctx->ops = &aes192_ops;
182 break;
184 case AES_KEYSIZE_256:
185 ctx->expanded_key_length = 0xf0;
186 ctx->ops = &aes256_ops;
187 break;
189 default:
190 return -EINVAL;
193 aes_sparc64_key_expand((const u32 *)in_key, &ctx->key[0], key_len);
194 ctx->key_length = key_len;
196 return 0;
199 static int aes_set_key_skcipher(struct crypto_skcipher *tfm, const u8 *in_key,
200 unsigned int key_len)
202 return aes_set_key(crypto_skcipher_tfm(tfm), in_key, key_len);
205 static void crypto_aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
207 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
209 ctx->ops->encrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
212 static void crypto_aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
214 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
216 ctx->ops->decrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
219 static int ecb_encrypt(struct skcipher_request *req)
221 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
222 const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
223 struct skcipher_walk walk;
224 unsigned int nbytes;
225 int err;
227 err = skcipher_walk_virt(&walk, req, true);
228 if (err)
229 return err;
231 ctx->ops->load_encrypt_keys(&ctx->key[0]);
232 while ((nbytes = walk.nbytes) != 0) {
233 ctx->ops->ecb_encrypt(&ctx->key[0], walk.src.virt.addr,
234 walk.dst.virt.addr,
235 round_down(nbytes, AES_BLOCK_SIZE));
236 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
238 fprs_write(0);
239 return err;
242 static int ecb_decrypt(struct skcipher_request *req)
244 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
245 const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
246 const u64 *key_end;
247 struct skcipher_walk walk;
248 unsigned int nbytes;
249 int err;
251 err = skcipher_walk_virt(&walk, req, true);
252 if (err)
253 return err;
255 ctx->ops->load_decrypt_keys(&ctx->key[0]);
256 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
257 while ((nbytes = walk.nbytes) != 0) {
258 ctx->ops->ecb_decrypt(key_end, walk.src.virt.addr,
259 walk.dst.virt.addr,
260 round_down(nbytes, AES_BLOCK_SIZE));
261 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
263 fprs_write(0);
265 return err;
268 static int cbc_encrypt(struct skcipher_request *req)
270 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
271 const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
272 struct skcipher_walk walk;
273 unsigned int nbytes;
274 int err;
276 err = skcipher_walk_virt(&walk, req, true);
277 if (err)
278 return err;
280 ctx->ops->load_encrypt_keys(&ctx->key[0]);
281 while ((nbytes = walk.nbytes) != 0) {
282 ctx->ops->cbc_encrypt(&ctx->key[0], walk.src.virt.addr,
283 walk.dst.virt.addr,
284 round_down(nbytes, AES_BLOCK_SIZE),
285 walk.iv);
286 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
288 fprs_write(0);
289 return err;
292 static int cbc_decrypt(struct skcipher_request *req)
294 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
295 const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
296 const u64 *key_end;
297 struct skcipher_walk walk;
298 unsigned int nbytes;
299 int err;
301 err = skcipher_walk_virt(&walk, req, true);
302 if (err)
303 return err;
305 ctx->ops->load_decrypt_keys(&ctx->key[0]);
306 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
307 while ((nbytes = walk.nbytes) != 0) {
308 ctx->ops->cbc_decrypt(key_end, walk.src.virt.addr,
309 walk.dst.virt.addr,
310 round_down(nbytes, AES_BLOCK_SIZE),
311 walk.iv);
312 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
314 fprs_write(0);
316 return err;
319 static void ctr_crypt_final(const struct crypto_sparc64_aes_ctx *ctx,
320 struct skcipher_walk *walk)
322 u8 *ctrblk = walk->iv;
323 u64 keystream[AES_BLOCK_SIZE / sizeof(u64)];
324 u8 *src = walk->src.virt.addr;
325 u8 *dst = walk->dst.virt.addr;
326 unsigned int nbytes = walk->nbytes;
328 ctx->ops->ecb_encrypt(&ctx->key[0], (const u64 *)ctrblk,
329 keystream, AES_BLOCK_SIZE);
330 crypto_xor_cpy(dst, (u8 *) keystream, src, nbytes);
331 crypto_inc(ctrblk, AES_BLOCK_SIZE);
334 static int ctr_crypt(struct skcipher_request *req)
336 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
337 const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
338 struct skcipher_walk walk;
339 unsigned int nbytes;
340 int err;
342 err = skcipher_walk_virt(&walk, req, true);
343 if (err)
344 return err;
346 ctx->ops->load_encrypt_keys(&ctx->key[0]);
347 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
348 ctx->ops->ctr_crypt(&ctx->key[0], walk.src.virt.addr,
349 walk.dst.virt.addr,
350 round_down(nbytes, AES_BLOCK_SIZE),
351 walk.iv);
352 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
354 if (walk.nbytes) {
355 ctr_crypt_final(ctx, &walk);
356 err = skcipher_walk_done(&walk, 0);
358 fprs_write(0);
359 return err;
362 static struct crypto_alg cipher_alg = {
363 .cra_name = "aes",
364 .cra_driver_name = "aes-sparc64",
365 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
366 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
367 .cra_blocksize = AES_BLOCK_SIZE,
368 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
369 .cra_alignmask = 3,
370 .cra_module = THIS_MODULE,
371 .cra_u = {
372 .cipher = {
373 .cia_min_keysize = AES_MIN_KEY_SIZE,
374 .cia_max_keysize = AES_MAX_KEY_SIZE,
375 .cia_setkey = aes_set_key,
376 .cia_encrypt = crypto_aes_encrypt,
377 .cia_decrypt = crypto_aes_decrypt
382 static struct skcipher_alg skcipher_algs[] = {
384 .base.cra_name = "ecb(aes)",
385 .base.cra_driver_name = "ecb-aes-sparc64",
386 .base.cra_priority = SPARC_CR_OPCODE_PRIORITY,
387 .base.cra_blocksize = AES_BLOCK_SIZE,
388 .base.cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
389 .base.cra_alignmask = 7,
390 .base.cra_module = THIS_MODULE,
391 .min_keysize = AES_MIN_KEY_SIZE,
392 .max_keysize = AES_MAX_KEY_SIZE,
393 .setkey = aes_set_key_skcipher,
394 .encrypt = ecb_encrypt,
395 .decrypt = ecb_decrypt,
396 }, {
397 .base.cra_name = "cbc(aes)",
398 .base.cra_driver_name = "cbc-aes-sparc64",
399 .base.cra_priority = SPARC_CR_OPCODE_PRIORITY,
400 .base.cra_blocksize = AES_BLOCK_SIZE,
401 .base.cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
402 .base.cra_alignmask = 7,
403 .base.cra_module = THIS_MODULE,
404 .min_keysize = AES_MIN_KEY_SIZE,
405 .max_keysize = AES_MAX_KEY_SIZE,
406 .ivsize = AES_BLOCK_SIZE,
407 .setkey = aes_set_key_skcipher,
408 .encrypt = cbc_encrypt,
409 .decrypt = cbc_decrypt,
410 }, {
411 .base.cra_name = "ctr(aes)",
412 .base.cra_driver_name = "ctr-aes-sparc64",
413 .base.cra_priority = SPARC_CR_OPCODE_PRIORITY,
414 .base.cra_blocksize = 1,
415 .base.cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
416 .base.cra_alignmask = 7,
417 .base.cra_module = THIS_MODULE,
418 .min_keysize = AES_MIN_KEY_SIZE,
419 .max_keysize = AES_MAX_KEY_SIZE,
420 .ivsize = AES_BLOCK_SIZE,
421 .setkey = aes_set_key_skcipher,
422 .encrypt = ctr_crypt,
423 .decrypt = ctr_crypt,
424 .chunksize = AES_BLOCK_SIZE,
428 static bool __init sparc64_has_aes_opcode(void)
430 unsigned long cfr;
432 if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
433 return false;
435 __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
436 if (!(cfr & CFR_AES))
437 return false;
439 return true;
442 static int __init aes_sparc64_mod_init(void)
444 int err;
446 if (!sparc64_has_aes_opcode()) {
447 pr_info("sparc64 aes opcodes not available.\n");
448 return -ENODEV;
450 pr_info("Using sparc64 aes opcodes optimized AES implementation\n");
451 err = crypto_register_alg(&cipher_alg);
452 if (err)
453 return err;
454 err = crypto_register_skciphers(skcipher_algs,
455 ARRAY_SIZE(skcipher_algs));
456 if (err)
457 crypto_unregister_alg(&cipher_alg);
458 return err;
461 static void __exit aes_sparc64_mod_fini(void)
463 crypto_unregister_alg(&cipher_alg);
464 crypto_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
467 module_init(aes_sparc64_mod_init);
468 module_exit(aes_sparc64_mod_fini);
470 MODULE_LICENSE("GPL");
471 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, sparc64 aes opcode accelerated");
473 MODULE_ALIAS_CRYPTO("aes");
475 #include "crop_devid.c"