rtc: stm32: fix misspelling and misalignment issues
[linux/fpc-iii.git] / drivers / crypto / vmx / aes_ctr.c
blobfc60d00a2e844405934152cc23ba6dac9ec9ecd6
1 /**
2 * AES CTR routines supporting VMX instructions on the Power 8
4 * Copyright (C) 2015 International Business Machines Inc.
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; version 2 only.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
19 * Author: Marcelo Henrique Cerri <mhcerri@br.ibm.com>
22 #include <linux/types.h>
23 #include <linux/err.h>
24 #include <linux/crypto.h>
25 #include <linux/delay.h>
26 #include <linux/hardirq.h>
27 #include <asm/switch_to.h>
28 #include <crypto/aes.h>
29 #include <crypto/scatterwalk.h>
30 #include <crypto/skcipher.h>
32 #include "aesp8-ppc.h"
34 struct p8_aes_ctr_ctx {
35 struct crypto_skcipher *fallback;
36 struct aes_key enc_key;
39 static int p8_aes_ctr_init(struct crypto_tfm *tfm)
41 const char *alg = crypto_tfm_alg_name(tfm);
42 struct crypto_skcipher *fallback;
43 struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
45 fallback = crypto_alloc_skcipher(alg, 0,
46 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
47 if (IS_ERR(fallback)) {
48 printk(KERN_ERR
49 "Failed to allocate transformation for '%s': %ld\n",
50 alg, PTR_ERR(fallback));
51 return PTR_ERR(fallback);
53 printk(KERN_INFO "Using '%s' as fallback implementation.\n",
54 crypto_skcipher_driver_name(fallback));
56 crypto_skcipher_set_flags(
57 fallback,
58 crypto_skcipher_get_flags((struct crypto_skcipher *)tfm));
59 ctx->fallback = fallback;
61 return 0;
64 static void p8_aes_ctr_exit(struct crypto_tfm *tfm)
66 struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
68 if (ctx->fallback) {
69 crypto_free_skcipher(ctx->fallback);
70 ctx->fallback = NULL;
74 static int p8_aes_ctr_setkey(struct crypto_tfm *tfm, const u8 *key,
75 unsigned int keylen)
77 int ret;
78 struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
80 preempt_disable();
81 pagefault_disable();
82 enable_kernel_vsx();
83 ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
84 disable_kernel_vsx();
85 pagefault_enable();
86 preempt_enable();
88 ret += crypto_skcipher_setkey(ctx->fallback, key, keylen);
89 return ret;
92 static void p8_aes_ctr_final(struct p8_aes_ctr_ctx *ctx,
93 struct blkcipher_walk *walk)
95 u8 *ctrblk = walk->iv;
96 u8 keystream[AES_BLOCK_SIZE];
97 u8 *src = walk->src.virt.addr;
98 u8 *dst = walk->dst.virt.addr;
99 unsigned int nbytes = walk->nbytes;
101 preempt_disable();
102 pagefault_disable();
103 enable_kernel_vsx();
104 aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key);
105 disable_kernel_vsx();
106 pagefault_enable();
107 preempt_enable();
109 crypto_xor_cpy(dst, keystream, src, nbytes);
110 crypto_inc(ctrblk, AES_BLOCK_SIZE);
113 static int p8_aes_ctr_crypt(struct blkcipher_desc *desc,
114 struct scatterlist *dst,
115 struct scatterlist *src, unsigned int nbytes)
117 int ret;
118 u64 inc;
119 struct blkcipher_walk walk;
120 struct p8_aes_ctr_ctx *ctx =
121 crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
123 if (in_interrupt()) {
124 SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback);
125 skcipher_request_set_tfm(req, ctx->fallback);
126 skcipher_request_set_callback(req, desc->flags, NULL, NULL);
127 skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
128 ret = crypto_skcipher_encrypt(req);
129 skcipher_request_zero(req);
130 } else {
131 blkcipher_walk_init(&walk, dst, src, nbytes);
132 ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
133 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
134 preempt_disable();
135 pagefault_disable();
136 enable_kernel_vsx();
137 aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr,
138 walk.dst.virt.addr,
139 (nbytes &
140 AES_BLOCK_MASK) /
141 AES_BLOCK_SIZE,
142 &ctx->enc_key,
143 walk.iv);
144 disable_kernel_vsx();
145 pagefault_enable();
146 preempt_enable();
148 /* We need to update IV mostly for last bytes/round */
149 inc = (nbytes & AES_BLOCK_MASK) / AES_BLOCK_SIZE;
150 if (inc > 0)
151 while (inc--)
152 crypto_inc(walk.iv, AES_BLOCK_SIZE);
154 nbytes &= AES_BLOCK_SIZE - 1;
155 ret = blkcipher_walk_done(desc, &walk, nbytes);
157 if (walk.nbytes) {
158 p8_aes_ctr_final(ctx, &walk);
159 ret = blkcipher_walk_done(desc, &walk, 0);
163 return ret;
166 struct crypto_alg p8_aes_ctr_alg = {
167 .cra_name = "ctr(aes)",
168 .cra_driver_name = "p8_aes_ctr",
169 .cra_module = THIS_MODULE,
170 .cra_priority = 2000,
171 .cra_type = &crypto_blkcipher_type,
172 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK,
173 .cra_alignmask = 0,
174 .cra_blocksize = 1,
175 .cra_ctxsize = sizeof(struct p8_aes_ctr_ctx),
176 .cra_init = p8_aes_ctr_init,
177 .cra_exit = p8_aes_ctr_exit,
178 .cra_blkcipher = {
179 .ivsize = AES_BLOCK_SIZE,
180 .min_keysize = AES_MIN_KEY_SIZE,
181 .max_keysize = AES_MAX_KEY_SIZE,
182 .setkey = p8_aes_ctr_setkey,
183 .encrypt = p8_aes_ctr_crypt,
184 .decrypt = p8_aes_ctr_crypt,