2 * Driver for /dev/crypto device (aka CryptoDev)
4 * Copyright (c) 2010,2011 Nikos Mavrogiannopoulos <nmav@gnutls.org>
5 * Portions Copyright (c) 2010 Michael Weiser
6 * Portions Copyright (c) 2010 Phil Sutter
8 * This file is part of linux cryptodev.
10 * This program is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU General Public License
12 * as published by the Free Software Foundation; either version 2
13 * of the License, or (at your option) any later version.
15 * This program is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 * GNU General Public License for more details.
20 * You should have received a copy of the GNU General Public License
21 * along with this program; if not, write to the Free Software
23 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
26 #include <linux/crypto.h>
28 #include <linux/highmem.h>
29 #include <linux/ioctl.h>
30 #include <linux/random.h>
31 #include <linux/scatterlist.h>
32 #include <linux/uaccess.h>
33 #include <crypto/algapi.h>
34 #include <crypto/hash.h>
35 #include <crypto/cryptodev.h>
36 #include <crypto/aead.h>
37 #include "cryptodev_int.h"
40 struct cryptodev_result
{
41 struct completion completion
;
45 static void cryptodev_complete(struct crypto_async_request
*req
, int err
)
47 struct cryptodev_result
*res
= req
->data
;
49 if (err
== -EINPROGRESS
)
53 complete(&res
->completion
);
56 int cryptodev_cipher_init(struct cipher_data
*out
, const char *alg_name
,
57 uint8_t *keyp
, size_t keylen
, int stream
, int aead
)
61 memset(out
, 0, sizeof(*out
));
64 struct ablkcipher_alg
*alg
;
66 out
->async
.s
= crypto_alloc_ablkcipher(alg_name
, 0, 0);
67 if (unlikely(IS_ERR(out
->async
.s
))) {
68 dprintk(1, KERN_DEBUG
, "Failed to load cipher %s\n", alg_name
);
72 alg
= crypto_ablkcipher_alg(out
->async
.s
);
74 /* Was correct key length supplied? */
75 if (alg
->max_keysize
> 0 &&
76 unlikely((keylen
< alg
->min_keysize
) ||
77 (keylen
> alg
->max_keysize
))) {
78 dprintk(1, KERN_DEBUG
,
79 "Wrong keylen '%zu' for algorithm '%s'. \
81 keylen
, alg_name
, alg
->min_keysize
,
88 out
->blocksize
= crypto_ablkcipher_blocksize(out
->async
.s
);
89 out
->ivsize
= crypto_ablkcipher_ivsize(out
->async
.s
);
90 out
->alignmask
= crypto_ablkcipher_alignmask(out
->async
.s
);
92 ret
= crypto_ablkcipher_setkey(out
->async
.s
, keyp
, keylen
);
94 out
->async
.as
= crypto_alloc_aead(alg_name
, 0, 0);
95 if (unlikely(IS_ERR(out
->async
.as
))) {
96 dprintk(1, KERN_DEBUG
, "Failed to load cipher %s\n", alg_name
);
100 out
->blocksize
= crypto_aead_blocksize(out
->async
.as
);
101 out
->ivsize
= crypto_aead_ivsize(out
->async
.as
);
102 out
->alignmask
= crypto_aead_alignmask(out
->async
.as
);
104 ret
= crypto_aead_setkey(out
->async
.as
, keyp
, keylen
);
108 dprintk(1, KERN_DEBUG
, "Setting key failed for %s-%zu.\n",
114 out
->stream
= stream
;
117 out
->async
.result
= kmalloc(sizeof(*out
->async
.result
), GFP_KERNEL
);
118 if (unlikely(!out
->async
.result
)) {
123 memset(out
->async
.result
, 0, sizeof(*out
->async
.result
));
124 init_completion(&out
->async
.result
->completion
);
127 out
->async
.request
= ablkcipher_request_alloc(out
->async
.s
, GFP_KERNEL
);
128 if (unlikely(!out
->async
.request
)) {
129 dprintk(1, KERN_ERR
, "error allocating async crypto request\n");
134 ablkcipher_request_set_callback(out
->async
.request
,
135 CRYPTO_TFM_REQ_MAY_BACKLOG
,
136 cryptodev_complete
, out
->async
.result
);
138 out
->async
.arequest
= aead_request_alloc(out
->async
.as
, GFP_KERNEL
);
139 if (unlikely(!out
->async
.arequest
)) {
140 dprintk(1, KERN_ERR
, "error allocating async crypto request\n");
145 aead_request_set_callback(out
->async
.arequest
,
146 CRYPTO_TFM_REQ_MAY_BACKLOG
,
147 cryptodev_complete
, out
->async
.result
);
154 if (out
->async
.request
)
155 ablkcipher_request_free(out
->async
.request
);
157 crypto_free_ablkcipher(out
->async
.s
);
159 if (out
->async
.arequest
)
160 aead_request_free(out
->async
.arequest
);
162 crypto_free_aead(out
->async
.as
);
164 kfree(out
->async
.result
);
169 void cryptodev_cipher_deinit(struct cipher_data
*cdata
)
172 if (cdata
->aead
== 0) {
173 if (cdata
->async
.request
)
174 ablkcipher_request_free(cdata
->async
.request
);
176 crypto_free_ablkcipher(cdata
->async
.s
);
178 if (cdata
->async
.arequest
)
179 aead_request_free(cdata
->async
.arequest
);
181 crypto_free_aead(cdata
->async
.as
);
184 kfree(cdata
->async
.result
);
189 static inline int waitfor(struct cryptodev_result
*cr
, ssize_t ret
)
196 wait_for_completion(&cr
->completion
);
197 /* At this point we known for sure the request has finished,
198 * because wait_for_completion above was not interruptible.
199 * This is important because otherwise hardware or driver
200 * might try to access memory which will be freed or reused for
201 * another request. */
203 if (unlikely(cr
->err
)) {
204 dprintk(0, KERN_ERR
, "error from async request: %d\n",
217 ssize_t
cryptodev_cipher_encrypt(struct cipher_data
*cdata
,
218 const struct scatterlist
*src
, struct scatterlist
*dst
,
223 INIT_COMPLETION(cdata
->async
.result
->completion
);
225 if (cdata
->aead
== 0) {
226 ablkcipher_request_set_crypt(cdata
->async
.request
,
227 (struct scatterlist
*)src
, dst
,
228 len
, cdata
->async
.iv
);
229 ret
= crypto_ablkcipher_encrypt(cdata
->async
.request
);
231 aead_request_set_crypt(cdata
->async
.arequest
,
232 (struct scatterlist
*)src
, dst
,
233 len
, cdata
->async
.iv
);
234 ret
= crypto_aead_encrypt(cdata
->async
.arequest
);
237 return waitfor(cdata
->async
.result
, ret
);
240 ssize_t
cryptodev_cipher_decrypt(struct cipher_data
*cdata
,
241 const struct scatterlist
*src
, struct scatterlist
*dst
,
246 INIT_COMPLETION(cdata
->async
.result
->completion
);
247 if (cdata
->aead
== 0) {
248 ablkcipher_request_set_crypt(cdata
->async
.request
,
249 (struct scatterlist
*)src
, dst
,
250 len
, cdata
->async
.iv
);
251 ret
= crypto_ablkcipher_decrypt(cdata
->async
.request
);
253 aead_request_set_crypt(cdata
->async
.arequest
,
254 (struct scatterlist
*)src
, dst
,
255 len
, cdata
->async
.iv
);
256 ret
= crypto_aead_decrypt(cdata
->async
.arequest
);
259 return waitfor(cdata
->async
.result
, ret
);
264 int cryptodev_hash_init(struct hash_data
*hdata
, const char *alg_name
,
265 int hmac_mode
, void *mackey
, size_t mackeylen
)
269 hdata
->async
.s
= crypto_alloc_ahash(alg_name
, 0, 0);
270 if (unlikely(IS_ERR(hdata
->async
.s
))) {
271 dprintk(1, KERN_DEBUG
, "Failed to load transform for %s\n", alg_name
);
275 /* Copy the key from user and set to TFM. */
276 if (hmac_mode
!= 0) {
277 ret
= crypto_ahash_setkey(hdata
->async
.s
, mackey
, mackeylen
);
279 dprintk(1, KERN_DEBUG
,
280 "Setting hmac key failed for %s-%zu.\n",
281 alg_name
, mackeylen
*8);
287 hdata
->digestsize
= crypto_ahash_digestsize(hdata
->async
.s
);
288 hdata
->alignmask
= crypto_ahash_alignmask(hdata
->async
.s
);
290 hdata
->async
.result
= kmalloc(sizeof(*hdata
->async
.result
), GFP_KERNEL
);
291 if (unlikely(!hdata
->async
.result
)) {
296 memset(hdata
->async
.result
, 0, sizeof(*hdata
->async
.result
));
297 init_completion(&hdata
->async
.result
->completion
);
299 hdata
->async
.request
= ahash_request_alloc(hdata
->async
.s
, GFP_KERNEL
);
300 if (unlikely(!hdata
->async
.request
)) {
301 dprintk(0, KERN_ERR
, "error allocating async crypto request\n");
306 ahash_request_set_callback(hdata
->async
.request
,
307 CRYPTO_TFM_REQ_MAY_BACKLOG
,
308 cryptodev_complete
, hdata
->async
.result
);
310 ret
= crypto_ahash_init(hdata
->async
.request
);
312 dprintk(0, KERN_ERR
, "error in crypto_hash_init()\n");
320 ahash_request_free(hdata
->async
.request
);
322 kfree(hdata
->async
.result
);
323 crypto_free_ahash(hdata
->async
.s
);
327 void cryptodev_hash_deinit(struct hash_data
*hdata
)
330 if (hdata
->async
.request
)
331 ahash_request_free(hdata
->async
.request
);
332 kfree(hdata
->async
.result
);
334 crypto_free_ahash(hdata
->async
.s
);
339 int cryptodev_hash_reset(struct hash_data
*hdata
)
343 ret
= crypto_ahash_init(hdata
->async
.request
);
345 dprintk(0, KERN_ERR
, "error in crypto_hash_init()\n");
353 ssize_t
cryptodev_hash_update(struct hash_data
*hdata
,
354 struct scatterlist
*sg
, size_t len
)
358 INIT_COMPLETION(hdata
->async
.result
->completion
);
359 ahash_request_set_crypt(hdata
->async
.request
, sg
, NULL
, len
);
361 ret
= crypto_ahash_update(hdata
->async
.request
);
363 return waitfor(hdata
->async
.result
, ret
);
366 int cryptodev_hash_final(struct hash_data
*hdata
, void* output
)
370 INIT_COMPLETION(hdata
->async
.result
->completion
);
371 ahash_request_set_crypt(hdata
->async
.request
, NULL
, output
, 0);
373 ret
= crypto_ahash_final(hdata
->async
.request
);
375 return waitfor(hdata
->async
.result
, ret
);