2 * AES XTS routines supporting VMX In-core instructions on Power 8
4 * Copyright (C) 2015 International Business Machines Inc.
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundations; version 2 only.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY of FITNESS FOR A PARTICUPAR PURPOSE. See the
13 * GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
19 * Author: Leonidas S. Barbosa <leosilva@linux.vnet.ibm.com>
22 #include <linux/types.h>
23 #include <linux/err.h>
24 #include <linux/crypto.h>
25 #include <linux/delay.h>
26 #include <linux/hardirq.h>
27 #include <asm/switch_to.h>
28 #include <crypto/aes.h>
29 #include <crypto/scatterwalk.h>
30 #include <crypto/xts.h>
31 #include <crypto/skcipher.h>
33 #include "aesp8-ppc.h"
35 struct p8_aes_xts_ctx
{
36 struct crypto_skcipher
*fallback
;
37 struct aes_key enc_key
;
38 struct aes_key dec_key
;
39 struct aes_key tweak_key
;
42 static int p8_aes_xts_init(struct crypto_tfm
*tfm
)
44 const char *alg
= crypto_tfm_alg_name(tfm
);
45 struct crypto_skcipher
*fallback
;
46 struct p8_aes_xts_ctx
*ctx
= crypto_tfm_ctx(tfm
);
48 fallback
= crypto_alloc_skcipher(alg
, 0,
49 CRYPTO_ALG_ASYNC
| CRYPTO_ALG_NEED_FALLBACK
);
50 if (IS_ERR(fallback
)) {
52 "Failed to allocate transformation for '%s': %ld\n",
53 alg
, PTR_ERR(fallback
));
54 return PTR_ERR(fallback
);
56 printk(KERN_INFO
"Using '%s' as fallback implementation.\n",
57 crypto_skcipher_driver_name(fallback
));
59 crypto_skcipher_set_flags(
61 crypto_skcipher_get_flags((struct crypto_skcipher
*)tfm
));
62 ctx
->fallback
= fallback
;
67 static void p8_aes_xts_exit(struct crypto_tfm
*tfm
)
69 struct p8_aes_xts_ctx
*ctx
= crypto_tfm_ctx(tfm
);
72 crypto_free_skcipher(ctx
->fallback
);
77 static int p8_aes_xts_setkey(struct crypto_tfm
*tfm
, const u8
*key
,
81 struct p8_aes_xts_ctx
*ctx
= crypto_tfm_ctx(tfm
);
83 ret
= xts_check_key(tfm
, key
, keylen
);
90 ret
= aes_p8_set_encrypt_key(key
+ keylen
/2, (keylen
/2) * 8, &ctx
->tweak_key
);
91 ret
+= aes_p8_set_encrypt_key(key
, (keylen
/2) * 8, &ctx
->enc_key
);
92 ret
+= aes_p8_set_decrypt_key(key
, (keylen
/2) * 8, &ctx
->dec_key
);
97 ret
+= crypto_skcipher_setkey(ctx
->fallback
, key
, keylen
);
101 static int p8_aes_xts_crypt(struct blkcipher_desc
*desc
,
102 struct scatterlist
*dst
,
103 struct scatterlist
*src
,
104 unsigned int nbytes
, int enc
)
107 u8 tweak
[AES_BLOCK_SIZE
];
109 struct blkcipher_walk walk
;
110 struct p8_aes_xts_ctx
*ctx
=
111 crypto_tfm_ctx(crypto_blkcipher_tfm(desc
->tfm
));
113 if (in_interrupt()) {
114 SKCIPHER_REQUEST_ON_STACK(req
, ctx
->fallback
);
115 skcipher_request_set_tfm(req
, ctx
->fallback
);
116 skcipher_request_set_callback(req
, desc
->flags
, NULL
, NULL
);
117 skcipher_request_set_crypt(req
, src
, dst
, nbytes
, desc
->info
);
118 ret
= enc
? crypto_skcipher_encrypt(req
) : crypto_skcipher_decrypt(req
);
119 skcipher_request_zero(req
);
125 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
127 ret
= blkcipher_walk_virt(desc
, &walk
);
129 memset(tweak
, 0, AES_BLOCK_SIZE
);
130 aes_p8_encrypt(iv
, tweak
, &ctx
->tweak_key
);
132 while ((nbytes
= walk
.nbytes
)) {
134 aes_p8_xts_encrypt(walk
.src
.virt
.addr
, walk
.dst
.virt
.addr
,
135 nbytes
& AES_BLOCK_MASK
, &ctx
->enc_key
, NULL
, tweak
);
137 aes_p8_xts_decrypt(walk
.src
.virt
.addr
, walk
.dst
.virt
.addr
,
138 nbytes
& AES_BLOCK_MASK
, &ctx
->dec_key
, NULL
, tweak
);
140 nbytes
&= AES_BLOCK_SIZE
- 1;
141 ret
= blkcipher_walk_done(desc
, &walk
, nbytes
);
144 disable_kernel_vsx();
151 static int p8_aes_xts_encrypt(struct blkcipher_desc
*desc
,
152 struct scatterlist
*dst
,
153 struct scatterlist
*src
, unsigned int nbytes
)
155 return p8_aes_xts_crypt(desc
, dst
, src
, nbytes
, 1);
158 static int p8_aes_xts_decrypt(struct blkcipher_desc
*desc
,
159 struct scatterlist
*dst
,
160 struct scatterlist
*src
, unsigned int nbytes
)
162 return p8_aes_xts_crypt(desc
, dst
, src
, nbytes
, 0);
165 struct crypto_alg p8_aes_xts_alg
= {
166 .cra_name
= "xts(aes)",
167 .cra_driver_name
= "p8_aes_xts",
168 .cra_module
= THIS_MODULE
,
169 .cra_priority
= 2000,
170 .cra_type
= &crypto_blkcipher_type
,
171 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
| CRYPTO_ALG_NEED_FALLBACK
,
173 .cra_blocksize
= AES_BLOCK_SIZE
,
174 .cra_ctxsize
= sizeof(struct p8_aes_xts_ctx
),
175 .cra_init
= p8_aes_xts_init
,
176 .cra_exit
= p8_aes_xts_exit
,
178 .ivsize
= AES_BLOCK_SIZE
,
179 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
180 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
181 .setkey
= p8_aes_xts_setkey
,
182 .encrypt
= p8_aes_xts_encrypt
,
183 .decrypt
= p8_aes_xts_decrypt
,