2 * AES CTR routines supporting VMX instructions on the Power 8
4 * Copyright (C) 2015 International Business Machines Inc.
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; version 2 only.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
19 * Author: Marcelo Henrique Cerri <mhcerri@br.ibm.com>
22 #include <linux/types.h>
23 #include <linux/err.h>
24 #include <linux/crypto.h>
25 #include <linux/delay.h>
26 #include <linux/hardirq.h>
27 #include <asm/switch_to.h>
28 #include <crypto/aes.h>
29 #include <crypto/scatterwalk.h>
30 #include <crypto/skcipher.h>
32 #include "aesp8-ppc.h"
34 struct p8_aes_ctr_ctx
{
35 struct crypto_skcipher
*fallback
;
36 struct aes_key enc_key
;
39 static int p8_aes_ctr_init(struct crypto_tfm
*tfm
)
41 const char *alg
= crypto_tfm_alg_name(tfm
);
42 struct crypto_skcipher
*fallback
;
43 struct p8_aes_ctr_ctx
*ctx
= crypto_tfm_ctx(tfm
);
45 fallback
= crypto_alloc_skcipher(alg
, 0,
46 CRYPTO_ALG_ASYNC
| CRYPTO_ALG_NEED_FALLBACK
);
47 if (IS_ERR(fallback
)) {
49 "Failed to allocate transformation for '%s': %ld\n",
50 alg
, PTR_ERR(fallback
));
51 return PTR_ERR(fallback
);
53 printk(KERN_INFO
"Using '%s' as fallback implementation.\n",
54 crypto_skcipher_driver_name(fallback
));
56 crypto_skcipher_set_flags(
58 crypto_skcipher_get_flags((struct crypto_skcipher
*)tfm
));
59 ctx
->fallback
= fallback
;
64 static void p8_aes_ctr_exit(struct crypto_tfm
*tfm
)
66 struct p8_aes_ctr_ctx
*ctx
= crypto_tfm_ctx(tfm
);
69 crypto_free_skcipher(ctx
->fallback
);
74 static int p8_aes_ctr_setkey(struct crypto_tfm
*tfm
, const u8
*key
,
78 struct p8_aes_ctr_ctx
*ctx
= crypto_tfm_ctx(tfm
);
83 ret
= aes_p8_set_encrypt_key(key
, keylen
* 8, &ctx
->enc_key
);
88 ret
+= crypto_skcipher_setkey(ctx
->fallback
, key
, keylen
);
92 static void p8_aes_ctr_final(struct p8_aes_ctr_ctx
*ctx
,
93 struct blkcipher_walk
*walk
)
95 u8
*ctrblk
= walk
->iv
;
96 u8 keystream
[AES_BLOCK_SIZE
];
97 u8
*src
= walk
->src
.virt
.addr
;
98 u8
*dst
= walk
->dst
.virt
.addr
;
99 unsigned int nbytes
= walk
->nbytes
;
104 aes_p8_encrypt(ctrblk
, keystream
, &ctx
->enc_key
);
105 disable_kernel_vsx();
109 crypto_xor_cpy(dst
, keystream
, src
, nbytes
);
110 crypto_inc(ctrblk
, AES_BLOCK_SIZE
);
113 static int p8_aes_ctr_crypt(struct blkcipher_desc
*desc
,
114 struct scatterlist
*dst
,
115 struct scatterlist
*src
, unsigned int nbytes
)
119 struct blkcipher_walk walk
;
120 struct p8_aes_ctr_ctx
*ctx
=
121 crypto_tfm_ctx(crypto_blkcipher_tfm(desc
->tfm
));
123 if (in_interrupt()) {
124 SKCIPHER_REQUEST_ON_STACK(req
, ctx
->fallback
);
125 skcipher_request_set_tfm(req
, ctx
->fallback
);
126 skcipher_request_set_callback(req
, desc
->flags
, NULL
, NULL
);
127 skcipher_request_set_crypt(req
, src
, dst
, nbytes
, desc
->info
);
128 ret
= crypto_skcipher_encrypt(req
);
129 skcipher_request_zero(req
);
131 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
132 ret
= blkcipher_walk_virt_block(desc
, &walk
, AES_BLOCK_SIZE
);
133 while ((nbytes
= walk
.nbytes
) >= AES_BLOCK_SIZE
) {
137 aes_p8_ctr32_encrypt_blocks(walk
.src
.virt
.addr
,
144 disable_kernel_vsx();
148 /* We need to update IV mostly for last bytes/round */
149 inc
= (nbytes
& AES_BLOCK_MASK
) / AES_BLOCK_SIZE
;
152 crypto_inc(walk
.iv
, AES_BLOCK_SIZE
);
154 nbytes
&= AES_BLOCK_SIZE
- 1;
155 ret
= blkcipher_walk_done(desc
, &walk
, nbytes
);
158 p8_aes_ctr_final(ctx
, &walk
);
159 ret
= blkcipher_walk_done(desc
, &walk
, 0);
166 struct crypto_alg p8_aes_ctr_alg
= {
167 .cra_name
= "ctr(aes)",
168 .cra_driver_name
= "p8_aes_ctr",
169 .cra_module
= THIS_MODULE
,
170 .cra_priority
= 2000,
171 .cra_type
= &crypto_blkcipher_type
,
172 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
| CRYPTO_ALG_NEED_FALLBACK
,
175 .cra_ctxsize
= sizeof(struct p8_aes_ctr_ctx
),
176 .cra_init
= p8_aes_ctr_init
,
177 .cra_exit
= p8_aes_ctr_exit
,
179 .ivsize
= AES_BLOCK_SIZE
,
180 .min_keysize
= AES_MIN_KEY_SIZE
,
181 .max_keysize
= AES_MAX_KEY_SIZE
,
182 .setkey
= p8_aes_ctr_setkey
,
183 .encrypt
= p8_aes_ctr_crypt
,
184 .decrypt
= p8_aes_ctr_crypt
,