of/platform: Initialise default DMA masks
[linux/fpc-iii.git] / drivers / crypto / inside-secure / safexcel_cipher.c
blob6bb60fda204397077bf06eb6b476c98167cb28c3
1 /*
2 * Copyright (C) 2017 Marvell
4 * Antoine Tenart <antoine.tenart@free-electrons.com>
6 * This file is licensed under the terms of the GNU General Public
7 * License version 2. This program is licensed "as is" without any
8 * warranty of any kind, whether express or implied.
9 */
11 #include <linux/device.h>
12 #include <linux/dma-mapping.h>
13 #include <linux/dmapool.h>
15 #include <crypto/aead.h>
16 #include <crypto/aes.h>
17 #include <crypto/authenc.h>
18 #include <crypto/sha.h>
19 #include <crypto/skcipher.h>
20 #include <crypto/internal/aead.h>
21 #include <crypto/internal/skcipher.h>
23 #include "safexcel.h"
25 enum safexcel_cipher_direction {
26 SAFEXCEL_ENCRYPT,
27 SAFEXCEL_DECRYPT,
30 struct safexcel_cipher_ctx {
31 struct safexcel_context base;
32 struct safexcel_crypto_priv *priv;
34 u32 mode;
35 bool aead;
37 __le32 key[8];
38 unsigned int key_len;
40 /* All the below is AEAD specific */
41 u32 alg;
42 u32 state_sz;
43 u32 ipad[SHA256_DIGEST_SIZE / sizeof(u32)];
44 u32 opad[SHA256_DIGEST_SIZE / sizeof(u32)];
47 struct safexcel_cipher_req {
48 enum safexcel_cipher_direction direction;
49 bool needs_inv;
52 static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
53 struct safexcel_command_desc *cdesc,
54 u32 length)
56 struct safexcel_token *token;
57 unsigned offset = 0;
59 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) {
60 offset = AES_BLOCK_SIZE / sizeof(u32);
61 memcpy(cdesc->control_data.token, iv, AES_BLOCK_SIZE);
63 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
66 token = (struct safexcel_token *)(cdesc->control_data.token + offset);
68 token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
69 token[0].packet_length = length;
70 token[0].stat = EIP197_TOKEN_STAT_LAST_PACKET |
71 EIP197_TOKEN_STAT_LAST_HASH;
72 token[0].instructions = EIP197_TOKEN_INS_LAST |
73 EIP197_TOKEN_INS_TYPE_CRYTO |
74 EIP197_TOKEN_INS_TYPE_OUTPUT;
77 static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
78 struct safexcel_command_desc *cdesc,
79 enum safexcel_cipher_direction direction,
80 u32 cryptlen, u32 assoclen, u32 digestsize)
82 struct safexcel_token *token;
83 unsigned offset = 0;
85 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) {
86 offset = AES_BLOCK_SIZE / sizeof(u32);
87 memcpy(cdesc->control_data.token, iv, AES_BLOCK_SIZE);
89 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
92 token = (struct safexcel_token *)(cdesc->control_data.token + offset);
94 if (direction == SAFEXCEL_DECRYPT)
95 cryptlen -= digestsize;
97 token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
98 token[0].packet_length = assoclen;
99 token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH |
100 EIP197_TOKEN_INS_TYPE_OUTPUT;
102 token[1].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
103 token[1].packet_length = cryptlen;
104 token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
105 token[1].instructions = EIP197_TOKEN_INS_LAST |
106 EIP197_TOKEN_INS_TYPE_CRYTO |
107 EIP197_TOKEN_INS_TYPE_HASH |
108 EIP197_TOKEN_INS_TYPE_OUTPUT;
110 if (direction == SAFEXCEL_ENCRYPT) {
111 token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
112 token[2].packet_length = digestsize;
113 token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
114 EIP197_TOKEN_STAT_LAST_PACKET;
115 token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
116 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
117 } else {
118 token[2].opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
119 token[2].packet_length = digestsize;
120 token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
121 EIP197_TOKEN_STAT_LAST_PACKET;
122 token[2].instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
124 token[3].opcode = EIP197_TOKEN_OPCODE_VERIFY;
125 token[3].packet_length = digestsize |
126 EIP197_TOKEN_HASH_RESULT_VERIFY;
127 token[3].stat = EIP197_TOKEN_STAT_LAST_HASH |
128 EIP197_TOKEN_STAT_LAST_PACKET;
129 token[3].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
133 static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
134 const u8 *key, unsigned int len)
136 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
137 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
138 struct safexcel_crypto_priv *priv = ctx->priv;
139 struct crypto_aes_ctx aes;
140 int ret, i;
142 ret = crypto_aes_expand_key(&aes, key, len);
143 if (ret) {
144 crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
145 return ret;
148 if (priv->version == EIP197 && ctx->base.ctxr_dma) {
149 for (i = 0; i < len / sizeof(u32); i++) {
150 if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
151 ctx->base.needs_inv = true;
152 break;
157 for (i = 0; i < len / sizeof(u32); i++)
158 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
160 ctx->key_len = len;
162 memzero_explicit(&aes, sizeof(aes));
163 return 0;
166 static int safexcel_aead_aes_setkey(struct crypto_aead *ctfm, const u8 *key,
167 unsigned int len)
169 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
170 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
171 struct safexcel_ahash_export_state istate, ostate;
172 struct safexcel_crypto_priv *priv = ctx->priv;
173 struct crypto_authenc_keys keys;
175 if (crypto_authenc_extractkeys(&keys, key, len) != 0)
176 goto badkey;
178 if (keys.enckeylen > sizeof(ctx->key))
179 goto badkey;
181 /* Encryption key */
182 if (priv->version == EIP197 && ctx->base.ctxr_dma &&
183 memcmp(ctx->key, keys.enckey, keys.enckeylen))
184 ctx->base.needs_inv = true;
186 /* Auth key */
187 switch (ctx->alg) {
188 case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
189 if (safexcel_hmac_setkey("safexcel-sha1", keys.authkey,
190 keys.authkeylen, &istate, &ostate))
191 goto badkey;
192 break;
193 case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
194 if (safexcel_hmac_setkey("safexcel-sha224", keys.authkey,
195 keys.authkeylen, &istate, &ostate))
196 goto badkey;
197 break;
198 case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
199 if (safexcel_hmac_setkey("safexcel-sha256", keys.authkey,
200 keys.authkeylen, &istate, &ostate))
201 goto badkey;
202 break;
203 default:
204 dev_err(priv->dev, "aead: unsupported hash algorithm\n");
205 goto badkey;
208 crypto_aead_set_flags(ctfm, crypto_aead_get_flags(ctfm) &
209 CRYPTO_TFM_RES_MASK);
211 if (priv->version == EIP197 && ctx->base.ctxr_dma &&
212 (memcmp(ctx->ipad, istate.state, ctx->state_sz) ||
213 memcmp(ctx->opad, ostate.state, ctx->state_sz)))
214 ctx->base.needs_inv = true;
216 /* Now copy the keys into the context */
217 memcpy(ctx->key, keys.enckey, keys.enckeylen);
218 ctx->key_len = keys.enckeylen;
220 memcpy(ctx->ipad, &istate.state, ctx->state_sz);
221 memcpy(ctx->opad, &ostate.state, ctx->state_sz);
223 memzero_explicit(&keys, sizeof(keys));
224 return 0;
226 badkey:
227 crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
228 memzero_explicit(&keys, sizeof(keys));
229 return -EINVAL;
232 static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
233 struct crypto_async_request *async,
234 struct safexcel_cipher_req *sreq,
235 struct safexcel_command_desc *cdesc)
237 struct safexcel_crypto_priv *priv = ctx->priv;
238 int ctrl_size;
240 if (ctx->aead) {
241 if (sreq->direction == SAFEXCEL_ENCRYPT)
242 cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
243 else
244 cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
245 } else {
246 cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_CRYPTO_OUT;
248 /* The decryption control type is a combination of the
249 * encryption type and CONTEXT_CONTROL_TYPE_NULL_IN, for all
250 * types.
252 if (sreq->direction == SAFEXCEL_DECRYPT)
253 cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_NULL_IN;
256 cdesc->control_data.control0 |= CONTEXT_CONTROL_KEY_EN;
257 cdesc->control_data.control1 |= ctx->mode;
259 if (ctx->aead)
260 cdesc->control_data.control0 |= CONTEXT_CONTROL_DIGEST_HMAC |
261 ctx->alg;
263 switch (ctx->key_len) {
264 case AES_KEYSIZE_128:
265 cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES128;
266 break;
267 case AES_KEYSIZE_192:
268 cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES192;
269 break;
270 case AES_KEYSIZE_256:
271 cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES256;
272 break;
273 default:
274 dev_err(priv->dev, "aes keysize not supported: %u\n",
275 ctx->key_len);
276 return -EINVAL;
279 ctrl_size = ctx->key_len / sizeof(u32);
280 if (ctx->aead)
281 /* Take in account the ipad+opad digests */
282 ctrl_size += ctx->state_sz / sizeof(u32) * 2;
283 cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(ctrl_size);
285 return 0;
288 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
289 struct crypto_async_request *async,
290 struct scatterlist *src,
291 struct scatterlist *dst,
292 unsigned int cryptlen,
293 struct safexcel_cipher_req *sreq,
294 bool *should_complete, int *ret)
296 struct safexcel_result_desc *rdesc;
297 int ndesc = 0;
299 *ret = 0;
301 spin_lock_bh(&priv->ring[ring].egress_lock);
302 do {
303 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
304 if (IS_ERR(rdesc)) {
305 dev_err(priv->dev,
306 "cipher: result: could not retrieve the result descriptor\n");
307 *ret = PTR_ERR(rdesc);
308 break;
311 if (likely(!*ret))
312 *ret = safexcel_rdesc_check_errors(priv, rdesc);
314 ndesc++;
315 } while (!rdesc->last_seg);
317 safexcel_complete(priv, ring);
318 spin_unlock_bh(&priv->ring[ring].egress_lock);
320 if (src == dst) {
321 dma_unmap_sg(priv->dev, src,
322 sg_nents_for_len(src, cryptlen),
323 DMA_BIDIRECTIONAL);
324 } else {
325 dma_unmap_sg(priv->dev, src,
326 sg_nents_for_len(src, cryptlen),
327 DMA_TO_DEVICE);
328 dma_unmap_sg(priv->dev, dst,
329 sg_nents_for_len(dst, cryptlen),
330 DMA_FROM_DEVICE);
333 *should_complete = true;
335 return ndesc;
338 static int safexcel_aes_send(struct crypto_async_request *base, int ring,
339 struct safexcel_request *request,
340 struct safexcel_cipher_req *sreq,
341 struct scatterlist *src, struct scatterlist *dst,
342 unsigned int cryptlen, unsigned int assoclen,
343 unsigned int digestsize, u8 *iv, int *commands,
344 int *results)
346 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
347 struct safexcel_crypto_priv *priv = ctx->priv;
348 struct safexcel_command_desc *cdesc;
349 struct safexcel_result_desc *rdesc;
350 struct scatterlist *sg;
351 unsigned int totlen = cryptlen + assoclen;
352 int nr_src, nr_dst, n_cdesc = 0, n_rdesc = 0, queued = totlen;
353 int i, ret = 0;
355 if (src == dst) {
356 nr_src = dma_map_sg(priv->dev, src,
357 sg_nents_for_len(src, totlen),
358 DMA_BIDIRECTIONAL);
359 nr_dst = nr_src;
360 if (!nr_src)
361 return -EINVAL;
362 } else {
363 nr_src = dma_map_sg(priv->dev, src,
364 sg_nents_for_len(src, totlen),
365 DMA_TO_DEVICE);
366 if (!nr_src)
367 return -EINVAL;
369 nr_dst = dma_map_sg(priv->dev, dst,
370 sg_nents_for_len(dst, totlen),
371 DMA_FROM_DEVICE);
372 if (!nr_dst) {
373 dma_unmap_sg(priv->dev, src,
374 sg_nents_for_len(src, totlen),
375 DMA_TO_DEVICE);
376 return -EINVAL;
380 memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
382 if (ctx->aead) {
383 memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
384 ctx->ipad, ctx->state_sz);
385 memcpy(ctx->base.ctxr->data + (ctx->key_len + ctx->state_sz) / sizeof(u32),
386 ctx->opad, ctx->state_sz);
389 spin_lock_bh(&priv->ring[ring].egress_lock);
391 /* command descriptors */
392 for_each_sg(src, sg, nr_src, i) {
393 int len = sg_dma_len(sg);
395 /* Do not overflow the request */
396 if (queued - len < 0)
397 len = queued;
399 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc, !(queued - len),
400 sg_dma_address(sg), len, totlen,
401 ctx->base.ctxr_dma);
402 if (IS_ERR(cdesc)) {
403 /* No space left in the command descriptor ring */
404 ret = PTR_ERR(cdesc);
405 goto cdesc_rollback;
407 n_cdesc++;
409 if (n_cdesc == 1) {
410 safexcel_context_control(ctx, base, sreq, cdesc);
411 if (ctx->aead)
412 safexcel_aead_token(ctx, iv, cdesc,
413 sreq->direction, cryptlen,
414 assoclen, digestsize);
415 else
416 safexcel_skcipher_token(ctx, iv, cdesc,
417 cryptlen);
420 queued -= len;
421 if (!queued)
422 break;
425 /* result descriptors */
426 for_each_sg(dst, sg, nr_dst, i) {
427 bool first = !i, last = (i == nr_dst - 1);
428 u32 len = sg_dma_len(sg);
430 rdesc = safexcel_add_rdesc(priv, ring, first, last,
431 sg_dma_address(sg), len);
432 if (IS_ERR(rdesc)) {
433 /* No space left in the result descriptor ring */
434 ret = PTR_ERR(rdesc);
435 goto rdesc_rollback;
437 n_rdesc++;
440 spin_unlock_bh(&priv->ring[ring].egress_lock);
442 request->req = base;
444 *commands = n_cdesc;
445 *results = n_rdesc;
446 return 0;
448 rdesc_rollback:
449 for (i = 0; i < n_rdesc; i++)
450 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
451 cdesc_rollback:
452 for (i = 0; i < n_cdesc; i++)
453 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
455 spin_unlock_bh(&priv->ring[ring].egress_lock);
457 if (src == dst) {
458 dma_unmap_sg(priv->dev, src,
459 sg_nents_for_len(src, totlen),
460 DMA_BIDIRECTIONAL);
461 } else {
462 dma_unmap_sg(priv->dev, src,
463 sg_nents_for_len(src, totlen),
464 DMA_TO_DEVICE);
465 dma_unmap_sg(priv->dev, dst,
466 sg_nents_for_len(dst, totlen),
467 DMA_FROM_DEVICE);
470 return ret;
473 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
474 int ring,
475 struct crypto_async_request *base,
476 bool *should_complete, int *ret)
478 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
479 struct safexcel_result_desc *rdesc;
480 int ndesc = 0, enq_ret;
482 *ret = 0;
484 spin_lock_bh(&priv->ring[ring].egress_lock);
485 do {
486 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
487 if (IS_ERR(rdesc)) {
488 dev_err(priv->dev,
489 "cipher: invalidate: could not retrieve the result descriptor\n");
490 *ret = PTR_ERR(rdesc);
491 break;
494 if (rdesc->result_data.error_code) {
495 dev_err(priv->dev, "cipher: invalidate: result descriptor error (%d)\n",
496 rdesc->result_data.error_code);
497 *ret = -EIO;
500 ndesc++;
501 } while (!rdesc->last_seg);
503 safexcel_complete(priv, ring);
504 spin_unlock_bh(&priv->ring[ring].egress_lock);
506 if (ctx->base.exit_inv) {
507 dma_pool_free(priv->context_pool, ctx->base.ctxr,
508 ctx->base.ctxr_dma);
510 *should_complete = true;
512 return ndesc;
515 ring = safexcel_select_ring(priv);
516 ctx->base.ring = ring;
518 spin_lock_bh(&priv->ring[ring].queue_lock);
519 enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
520 spin_unlock_bh(&priv->ring[ring].queue_lock);
522 if (enq_ret != -EINPROGRESS)
523 *ret = enq_ret;
525 queue_work(priv->ring[ring].workqueue,
526 &priv->ring[ring].work_data.work);
528 *should_complete = false;
530 return ndesc;
533 static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
534 int ring,
535 struct crypto_async_request *async,
536 bool *should_complete, int *ret)
538 struct skcipher_request *req = skcipher_request_cast(async);
539 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
540 int err;
542 if (sreq->needs_inv) {
543 sreq->needs_inv = false;
544 err = safexcel_handle_inv_result(priv, ring, async,
545 should_complete, ret);
546 } else {
547 err = safexcel_handle_req_result(priv, ring, async, req->src,
548 req->dst, req->cryptlen, sreq,
549 should_complete, ret);
552 return err;
555 static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
556 int ring,
557 struct crypto_async_request *async,
558 bool *should_complete, int *ret)
560 struct aead_request *req = aead_request_cast(async);
561 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
562 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
563 int err;
565 if (sreq->needs_inv) {
566 sreq->needs_inv = false;
567 err = safexcel_handle_inv_result(priv, ring, async,
568 should_complete, ret);
569 } else {
570 err = safexcel_handle_req_result(priv, ring, async, req->src,
571 req->dst,
572 req->cryptlen + crypto_aead_authsize(tfm),
573 sreq, should_complete, ret);
576 return err;
579 static int safexcel_cipher_send_inv(struct crypto_async_request *base,
580 int ring, struct safexcel_request *request,
581 int *commands, int *results)
583 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
584 struct safexcel_crypto_priv *priv = ctx->priv;
585 int ret;
587 ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring,
588 request);
589 if (unlikely(ret))
590 return ret;
592 *commands = 1;
593 *results = 1;
595 return 0;
598 static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
599 struct safexcel_request *request,
600 int *commands, int *results)
602 struct skcipher_request *req = skcipher_request_cast(async);
603 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
604 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
605 struct safexcel_crypto_priv *priv = ctx->priv;
606 int ret;
608 BUG_ON(priv->version == EIP97 && sreq->needs_inv);
610 if (sreq->needs_inv)
611 ret = safexcel_cipher_send_inv(async, ring, request, commands,
612 results);
613 else
614 ret = safexcel_aes_send(async, ring, request, sreq, req->src,
615 req->dst, req->cryptlen, 0, 0, req->iv,
616 commands, results);
617 return ret;
620 static int safexcel_aead_send(struct crypto_async_request *async, int ring,
621 struct safexcel_request *request, int *commands,
622 int *results)
624 struct aead_request *req = aead_request_cast(async);
625 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
626 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
627 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
628 struct safexcel_crypto_priv *priv = ctx->priv;
629 int ret;
631 BUG_ON(priv->version == EIP97 && sreq->needs_inv);
633 if (sreq->needs_inv)
634 ret = safexcel_cipher_send_inv(async, ring, request, commands,
635 results);
636 else
637 ret = safexcel_aes_send(async, ring, request, sreq, req->src,
638 req->dst, req->cryptlen, req->assoclen,
639 crypto_aead_authsize(tfm), req->iv,
640 commands, results);
641 return ret;
644 static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
645 struct crypto_async_request *base,
646 struct safexcel_cipher_req *sreq,
647 struct safexcel_inv_result *result)
649 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
650 struct safexcel_crypto_priv *priv = ctx->priv;
651 int ring = ctx->base.ring;
653 init_completion(&result->completion);
655 ctx = crypto_tfm_ctx(base->tfm);
656 ctx->base.exit_inv = true;
657 sreq->needs_inv = true;
659 spin_lock_bh(&priv->ring[ring].queue_lock);
660 crypto_enqueue_request(&priv->ring[ring].queue, base);
661 spin_unlock_bh(&priv->ring[ring].queue_lock);
663 queue_work(priv->ring[ring].workqueue,
664 &priv->ring[ring].work_data.work);
666 wait_for_completion(&result->completion);
668 if (result->error) {
669 dev_warn(priv->dev,
670 "cipher: sync: invalidate: completion error %d\n",
671 result->error);
672 return result->error;
675 return 0;
678 static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
680 EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
681 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
682 struct safexcel_inv_result result = {};
684 memset(req, 0, sizeof(struct skcipher_request));
686 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
687 safexcel_inv_complete, &result);
688 skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
690 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
693 static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
695 EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
696 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
697 struct safexcel_inv_result result = {};
699 memset(req, 0, sizeof(struct aead_request));
701 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
702 safexcel_inv_complete, &result);
703 aead_request_set_tfm(req, __crypto_aead_cast(tfm));
705 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
708 static int safexcel_aes(struct crypto_async_request *base,
709 struct safexcel_cipher_req *sreq,
710 enum safexcel_cipher_direction dir, u32 mode)
712 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
713 struct safexcel_crypto_priv *priv = ctx->priv;
714 int ret, ring;
716 sreq->needs_inv = false;
717 sreq->direction = dir;
718 ctx->mode = mode;
720 if (ctx->base.ctxr) {
721 if (priv->version == EIP197 && ctx->base.needs_inv) {
722 sreq->needs_inv = true;
723 ctx->base.needs_inv = false;
725 } else {
726 ctx->base.ring = safexcel_select_ring(priv);
727 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
728 EIP197_GFP_FLAGS(*base),
729 &ctx->base.ctxr_dma);
730 if (!ctx->base.ctxr)
731 return -ENOMEM;
734 ring = ctx->base.ring;
736 spin_lock_bh(&priv->ring[ring].queue_lock);
737 ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
738 spin_unlock_bh(&priv->ring[ring].queue_lock);
740 queue_work(priv->ring[ring].workqueue,
741 &priv->ring[ring].work_data.work);
743 return ret;
746 static int safexcel_ecb_aes_encrypt(struct skcipher_request *req)
748 return safexcel_aes(&req->base, skcipher_request_ctx(req),
749 SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB);
752 static int safexcel_ecb_aes_decrypt(struct skcipher_request *req)
754 return safexcel_aes(&req->base, skcipher_request_ctx(req),
755 SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB);
758 static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
760 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
761 struct safexcel_alg_template *tmpl =
762 container_of(tfm->__crt_alg, struct safexcel_alg_template,
763 alg.skcipher.base);
765 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
766 sizeof(struct safexcel_cipher_req));
768 ctx->priv = tmpl->priv;
770 ctx->base.send = safexcel_skcipher_send;
771 ctx->base.handle_result = safexcel_skcipher_handle_result;
772 return 0;
775 static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
777 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
779 memzero_explicit(ctx->key, sizeof(ctx->key));
781 /* context not allocated, skip invalidation */
782 if (!ctx->base.ctxr)
783 return -ENOMEM;
785 memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
786 return 0;
789 static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
791 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
792 struct safexcel_crypto_priv *priv = ctx->priv;
793 int ret;
795 if (safexcel_cipher_cra_exit(tfm))
796 return;
798 if (priv->version == EIP197) {
799 ret = safexcel_skcipher_exit_inv(tfm);
800 if (ret)
801 dev_warn(priv->dev, "skcipher: invalidation error %d\n",
802 ret);
803 } else {
804 dma_pool_free(priv->context_pool, ctx->base.ctxr,
805 ctx->base.ctxr_dma);
809 static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
811 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
812 struct safexcel_crypto_priv *priv = ctx->priv;
813 int ret;
815 if (safexcel_cipher_cra_exit(tfm))
816 return;
818 if (priv->version == EIP197) {
819 ret = safexcel_aead_exit_inv(tfm);
820 if (ret)
821 dev_warn(priv->dev, "aead: invalidation error %d\n",
822 ret);
823 } else {
824 dma_pool_free(priv->context_pool, ctx->base.ctxr,
825 ctx->base.ctxr_dma);
829 struct safexcel_alg_template safexcel_alg_ecb_aes = {
830 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
831 .alg.skcipher = {
832 .setkey = safexcel_skcipher_aes_setkey,
833 .encrypt = safexcel_ecb_aes_encrypt,
834 .decrypt = safexcel_ecb_aes_decrypt,
835 .min_keysize = AES_MIN_KEY_SIZE,
836 .max_keysize = AES_MAX_KEY_SIZE,
837 .base = {
838 .cra_name = "ecb(aes)",
839 .cra_driver_name = "safexcel-ecb-aes",
840 .cra_priority = 300,
841 .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_ASYNC |
842 CRYPTO_ALG_KERN_DRIVER_ONLY,
843 .cra_blocksize = AES_BLOCK_SIZE,
844 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
845 .cra_alignmask = 0,
846 .cra_init = safexcel_skcipher_cra_init,
847 .cra_exit = safexcel_skcipher_cra_exit,
848 .cra_module = THIS_MODULE,
853 static int safexcel_cbc_aes_encrypt(struct skcipher_request *req)
855 return safexcel_aes(&req->base, skcipher_request_ctx(req),
856 SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC);
859 static int safexcel_cbc_aes_decrypt(struct skcipher_request *req)
861 return safexcel_aes(&req->base, skcipher_request_ctx(req),
862 SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC);
865 struct safexcel_alg_template safexcel_alg_cbc_aes = {
866 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
867 .alg.skcipher = {
868 .setkey = safexcel_skcipher_aes_setkey,
869 .encrypt = safexcel_cbc_aes_encrypt,
870 .decrypt = safexcel_cbc_aes_decrypt,
871 .min_keysize = AES_MIN_KEY_SIZE,
872 .max_keysize = AES_MAX_KEY_SIZE,
873 .ivsize = AES_BLOCK_SIZE,
874 .base = {
875 .cra_name = "cbc(aes)",
876 .cra_driver_name = "safexcel-cbc-aes",
877 .cra_priority = 300,
878 .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_ASYNC |
879 CRYPTO_ALG_KERN_DRIVER_ONLY,
880 .cra_blocksize = AES_BLOCK_SIZE,
881 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
882 .cra_alignmask = 0,
883 .cra_init = safexcel_skcipher_cra_init,
884 .cra_exit = safexcel_skcipher_cra_exit,
885 .cra_module = THIS_MODULE,
890 static int safexcel_aead_encrypt(struct aead_request *req)
892 struct safexcel_cipher_req *creq = aead_request_ctx(req);
894 return safexcel_aes(&req->base, creq, SAFEXCEL_ENCRYPT,
895 CONTEXT_CONTROL_CRYPTO_MODE_CBC);
898 static int safexcel_aead_decrypt(struct aead_request *req)
900 struct safexcel_cipher_req *creq = aead_request_ctx(req);
902 return safexcel_aes(&req->base, creq, SAFEXCEL_DECRYPT,
903 CONTEXT_CONTROL_CRYPTO_MODE_CBC);
906 static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
908 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
909 struct safexcel_alg_template *tmpl =
910 container_of(tfm->__crt_alg, struct safexcel_alg_template,
911 alg.aead.base);
913 crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
914 sizeof(struct safexcel_cipher_req));
916 ctx->priv = tmpl->priv;
918 ctx->aead = true;
919 ctx->base.send = safexcel_aead_send;
920 ctx->base.handle_result = safexcel_aead_handle_result;
921 return 0;
924 static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
926 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
928 safexcel_aead_cra_init(tfm);
929 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
930 ctx->state_sz = SHA1_DIGEST_SIZE;
931 return 0;
934 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
935 .type = SAFEXCEL_ALG_TYPE_AEAD,
936 .alg.aead = {
937 .setkey = safexcel_aead_aes_setkey,
938 .encrypt = safexcel_aead_encrypt,
939 .decrypt = safexcel_aead_decrypt,
940 .ivsize = AES_BLOCK_SIZE,
941 .maxauthsize = SHA1_DIGEST_SIZE,
942 .base = {
943 .cra_name = "authenc(hmac(sha1),cbc(aes))",
944 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
945 .cra_priority = 300,
946 .cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC |
947 CRYPTO_ALG_KERN_DRIVER_ONLY,
948 .cra_blocksize = AES_BLOCK_SIZE,
949 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
950 .cra_alignmask = 0,
951 .cra_init = safexcel_aead_sha1_cra_init,
952 .cra_exit = safexcel_aead_cra_exit,
953 .cra_module = THIS_MODULE,
958 static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
960 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
962 safexcel_aead_cra_init(tfm);
963 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
964 ctx->state_sz = SHA256_DIGEST_SIZE;
965 return 0;
968 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
969 .type = SAFEXCEL_ALG_TYPE_AEAD,
970 .alg.aead = {
971 .setkey = safexcel_aead_aes_setkey,
972 .encrypt = safexcel_aead_encrypt,
973 .decrypt = safexcel_aead_decrypt,
974 .ivsize = AES_BLOCK_SIZE,
975 .maxauthsize = SHA256_DIGEST_SIZE,
976 .base = {
977 .cra_name = "authenc(hmac(sha256),cbc(aes))",
978 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
979 .cra_priority = 300,
980 .cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC |
981 CRYPTO_ALG_KERN_DRIVER_ONLY,
982 .cra_blocksize = AES_BLOCK_SIZE,
983 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
984 .cra_alignmask = 0,
985 .cra_init = safexcel_aead_sha256_cra_init,
986 .cra_exit = safexcel_aead_cra_exit,
987 .cra_module = THIS_MODULE,
992 static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
994 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
996 safexcel_aead_cra_init(tfm);
997 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
998 ctx->state_sz = SHA256_DIGEST_SIZE;
999 return 0;
1002 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1003 .type = SAFEXCEL_ALG_TYPE_AEAD,
1004 .alg.aead = {
1005 .setkey = safexcel_aead_aes_setkey,
1006 .encrypt = safexcel_aead_encrypt,
1007 .decrypt = safexcel_aead_decrypt,
1008 .ivsize = AES_BLOCK_SIZE,
1009 .maxauthsize = SHA224_DIGEST_SIZE,
1010 .base = {
1011 .cra_name = "authenc(hmac(sha224),cbc(aes))",
1012 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1013 .cra_priority = 300,
1014 .cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC |
1015 CRYPTO_ALG_KERN_DRIVER_ONLY,
1016 .cra_blocksize = AES_BLOCK_SIZE,
1017 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1018 .cra_alignmask = 0,
1019 .cra_init = safexcel_aead_sha224_cra_init,
1020 .cra_exit = safexcel_aead_cra_exit,
1021 .cra_module = THIS_MODULE,