staging: rtl8188eu: rename HalSetBrateCfg() - style
[linux/fpc-iii.git] / drivers / crypto / vmx / aes_xts.c
blobe9954a7d46944d36cd2aeffdfd8202b54c793d71
1 /**
2 * AES XTS routines supporting VMX In-core instructions on Power 8
4 * Copyright (C) 2015 International Business Machines Inc.
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundations; version 2 only.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY of FITNESS FOR A PARTICUPAR PURPOSE. See the
13 * GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
19 * Author: Leonidas S. Barbosa <leosilva@linux.vnet.ibm.com>
22 #include <linux/types.h>
23 #include <linux/err.h>
24 #include <linux/crypto.h>
25 #include <linux/delay.h>
26 #include <linux/hardirq.h>
27 #include <asm/switch_to.h>
28 #include <crypto/aes.h>
29 #include <crypto/scatterwalk.h>
30 #include <crypto/xts.h>
31 #include <crypto/skcipher.h>
33 #include "aesp8-ppc.h"
35 struct p8_aes_xts_ctx {
36 struct crypto_skcipher *fallback;
37 struct aes_key enc_key;
38 struct aes_key dec_key;
39 struct aes_key tweak_key;
42 static int p8_aes_xts_init(struct crypto_tfm *tfm)
44 const char *alg = crypto_tfm_alg_name(tfm);
45 struct crypto_skcipher *fallback;
46 struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm);
48 fallback = crypto_alloc_skcipher(alg, 0,
49 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
50 if (IS_ERR(fallback)) {
51 printk(KERN_ERR
52 "Failed to allocate transformation for '%s': %ld\n",
53 alg, PTR_ERR(fallback));
54 return PTR_ERR(fallback);
57 crypto_skcipher_set_flags(
58 fallback,
59 crypto_skcipher_get_flags((struct crypto_skcipher *)tfm));
60 ctx->fallback = fallback;
62 return 0;
65 static void p8_aes_xts_exit(struct crypto_tfm *tfm)
67 struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm);
69 if (ctx->fallback) {
70 crypto_free_skcipher(ctx->fallback);
71 ctx->fallback = NULL;
75 static int p8_aes_xts_setkey(struct crypto_tfm *tfm, const u8 *key,
76 unsigned int keylen)
78 int ret;
79 struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm);
81 ret = xts_check_key(tfm, key, keylen);
82 if (ret)
83 return ret;
85 preempt_disable();
86 pagefault_disable();
87 enable_kernel_vsx();
88 ret = aes_p8_set_encrypt_key(key + keylen/2, (keylen/2) * 8, &ctx->tweak_key);
89 ret += aes_p8_set_encrypt_key(key, (keylen/2) * 8, &ctx->enc_key);
90 ret += aes_p8_set_decrypt_key(key, (keylen/2) * 8, &ctx->dec_key);
91 disable_kernel_vsx();
92 pagefault_enable();
93 preempt_enable();
95 ret += crypto_skcipher_setkey(ctx->fallback, key, keylen);
96 return ret;
99 static int p8_aes_xts_crypt(struct blkcipher_desc *desc,
100 struct scatterlist *dst,
101 struct scatterlist *src,
102 unsigned int nbytes, int enc)
104 int ret;
105 u8 tweak[AES_BLOCK_SIZE];
106 u8 *iv;
107 struct blkcipher_walk walk;
108 struct p8_aes_xts_ctx *ctx =
109 crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
111 if (in_interrupt()) {
112 SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback);
113 skcipher_request_set_tfm(req, ctx->fallback);
114 skcipher_request_set_callback(req, desc->flags, NULL, NULL);
115 skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
116 ret = enc? crypto_skcipher_encrypt(req) : crypto_skcipher_decrypt(req);
117 skcipher_request_zero(req);
118 } else {
119 blkcipher_walk_init(&walk, dst, src, nbytes);
121 ret = blkcipher_walk_virt(desc, &walk);
123 preempt_disable();
124 pagefault_disable();
125 enable_kernel_vsx();
127 iv = walk.iv;
128 memset(tweak, 0, AES_BLOCK_SIZE);
129 aes_p8_encrypt(iv, tweak, &ctx->tweak_key);
131 disable_kernel_vsx();
132 pagefault_enable();
133 preempt_enable();
135 while ((nbytes = walk.nbytes)) {
136 preempt_disable();
137 pagefault_disable();
138 enable_kernel_vsx();
139 if (enc)
140 aes_p8_xts_encrypt(walk.src.virt.addr, walk.dst.virt.addr,
141 nbytes & AES_BLOCK_MASK, &ctx->enc_key, NULL, tweak);
142 else
143 aes_p8_xts_decrypt(walk.src.virt.addr, walk.dst.virt.addr,
144 nbytes & AES_BLOCK_MASK, &ctx->dec_key, NULL, tweak);
145 disable_kernel_vsx();
146 pagefault_enable();
147 preempt_enable();
149 nbytes &= AES_BLOCK_SIZE - 1;
150 ret = blkcipher_walk_done(desc, &walk, nbytes);
153 return ret;
156 static int p8_aes_xts_encrypt(struct blkcipher_desc *desc,
157 struct scatterlist *dst,
158 struct scatterlist *src, unsigned int nbytes)
160 return p8_aes_xts_crypt(desc, dst, src, nbytes, 1);
163 static int p8_aes_xts_decrypt(struct blkcipher_desc *desc,
164 struct scatterlist *dst,
165 struct scatterlist *src, unsigned int nbytes)
167 return p8_aes_xts_crypt(desc, dst, src, nbytes, 0);
170 struct crypto_alg p8_aes_xts_alg = {
171 .cra_name = "xts(aes)",
172 .cra_driver_name = "p8_aes_xts",
173 .cra_module = THIS_MODULE,
174 .cra_priority = 2000,
175 .cra_type = &crypto_blkcipher_type,
176 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK,
177 .cra_alignmask = 0,
178 .cra_blocksize = AES_BLOCK_SIZE,
179 .cra_ctxsize = sizeof(struct p8_aes_xts_ctx),
180 .cra_init = p8_aes_xts_init,
181 .cra_exit = p8_aes_xts_exit,
182 .cra_blkcipher = {
183 .ivsize = AES_BLOCK_SIZE,
184 .min_keysize = 2 * AES_MIN_KEY_SIZE,
185 .max_keysize = 2 * AES_MAX_KEY_SIZE,
186 .setkey = p8_aes_xts_setkey,
187 .encrypt = p8_aes_xts_encrypt,
188 .decrypt = p8_aes_xts_decrypt,