Revert "unicode: Don't special case ignorable code points"
[linux.git] / drivers / crypto / starfive / jh7110-aes.c
blob86a1a1fa9f8f9f54c52d81aa91ced854d6aea7b8
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * StarFive AES acceleration driver
5 * Copyright (c) 2022 StarFive Technology
6 */
8 #include <crypto/engine.h>
9 #include <crypto/gcm.h>
10 #include <crypto/internal/aead.h>
11 #include <crypto/internal/skcipher.h>
12 #include <crypto/scatterwalk.h>
13 #include "jh7110-cryp.h"
14 #include <linux/err.h>
15 #include <linux/iopoll.h>
16 #include <linux/kernel.h>
17 #include <linux/slab.h>
18 #include <linux/string.h>
20 #define STARFIVE_AES_REGS_OFFSET 0x100
21 #define STARFIVE_AES_AESDIO0R (STARFIVE_AES_REGS_OFFSET + 0x0)
22 #define STARFIVE_AES_KEY0 (STARFIVE_AES_REGS_OFFSET + 0x4)
23 #define STARFIVE_AES_KEY1 (STARFIVE_AES_REGS_OFFSET + 0x8)
24 #define STARFIVE_AES_KEY2 (STARFIVE_AES_REGS_OFFSET + 0xC)
25 #define STARFIVE_AES_KEY3 (STARFIVE_AES_REGS_OFFSET + 0x10)
26 #define STARFIVE_AES_KEY4 (STARFIVE_AES_REGS_OFFSET + 0x14)
27 #define STARFIVE_AES_KEY5 (STARFIVE_AES_REGS_OFFSET + 0x18)
28 #define STARFIVE_AES_KEY6 (STARFIVE_AES_REGS_OFFSET + 0x1C)
29 #define STARFIVE_AES_KEY7 (STARFIVE_AES_REGS_OFFSET + 0x20)
30 #define STARFIVE_AES_CSR (STARFIVE_AES_REGS_OFFSET + 0x24)
31 #define STARFIVE_AES_IV0 (STARFIVE_AES_REGS_OFFSET + 0x28)
32 #define STARFIVE_AES_IV1 (STARFIVE_AES_REGS_OFFSET + 0x2C)
33 #define STARFIVE_AES_IV2 (STARFIVE_AES_REGS_OFFSET + 0x30)
34 #define STARFIVE_AES_IV3 (STARFIVE_AES_REGS_OFFSET + 0x34)
35 #define STARFIVE_AES_NONCE0 (STARFIVE_AES_REGS_OFFSET + 0x3C)
36 #define STARFIVE_AES_NONCE1 (STARFIVE_AES_REGS_OFFSET + 0x40)
37 #define STARFIVE_AES_NONCE2 (STARFIVE_AES_REGS_OFFSET + 0x44)
38 #define STARFIVE_AES_NONCE3 (STARFIVE_AES_REGS_OFFSET + 0x48)
39 #define STARFIVE_AES_ALEN0 (STARFIVE_AES_REGS_OFFSET + 0x4C)
40 #define STARFIVE_AES_ALEN1 (STARFIVE_AES_REGS_OFFSET + 0x50)
41 #define STARFIVE_AES_MLEN0 (STARFIVE_AES_REGS_OFFSET + 0x54)
42 #define STARFIVE_AES_MLEN1 (STARFIVE_AES_REGS_OFFSET + 0x58)
43 #define STARFIVE_AES_IVLEN (STARFIVE_AES_REGS_OFFSET + 0x5C)
45 #define FLG_MODE_MASK GENMASK(2, 0)
46 #define FLG_ENCRYPT BIT(4)
48 /* Misc */
49 #define CCM_B0_ADATA 0x40
50 #define AES_BLOCK_32 (AES_BLOCK_SIZE / sizeof(u32))
52 static inline int starfive_aes_wait_busy(struct starfive_cryp_dev *cryp)
54 u32 status;
56 return readl_relaxed_poll_timeout(cryp->base + STARFIVE_AES_CSR, status,
57 !(status & STARFIVE_AES_BUSY), 10, 100000);
60 static inline int starfive_aes_wait_keydone(struct starfive_cryp_dev *cryp)
62 u32 status;
64 return readl_relaxed_poll_timeout(cryp->base + STARFIVE_AES_CSR, status,
65 (status & STARFIVE_AES_KEY_DONE), 10, 100000);
68 static inline int starfive_aes_wait_gcmdone(struct starfive_cryp_dev *cryp)
70 u32 status;
72 return readl_relaxed_poll_timeout(cryp->base + STARFIVE_AES_CSR, status,
73 (status & STARFIVE_AES_GCM_DONE), 10, 100000);
76 static inline int is_gcm(struct starfive_cryp_dev *cryp)
78 return (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_GCM;
81 static inline bool is_encrypt(struct starfive_cryp_dev *cryp)
83 return cryp->flags & FLG_ENCRYPT;
86 static void starfive_aes_aead_hw_start(struct starfive_cryp_ctx *ctx, u32 hw_mode)
88 struct starfive_cryp_dev *cryp = ctx->cryp;
89 unsigned int value;
91 switch (hw_mode) {
92 case STARFIVE_AES_MODE_GCM:
93 value = readl(ctx->cryp->base + STARFIVE_AES_CSR);
94 value |= STARFIVE_AES_GCM_START;
95 writel(value, cryp->base + STARFIVE_AES_CSR);
96 starfive_aes_wait_gcmdone(cryp);
97 break;
98 case STARFIVE_AES_MODE_CCM:
99 value = readl(ctx->cryp->base + STARFIVE_AES_CSR);
100 value |= STARFIVE_AES_CCM_START;
101 writel(value, cryp->base + STARFIVE_AES_CSR);
102 break;
106 static inline void starfive_aes_set_alen(struct starfive_cryp_ctx *ctx)
108 struct starfive_cryp_dev *cryp = ctx->cryp;
110 writel(upper_32_bits(cryp->assoclen), cryp->base + STARFIVE_AES_ALEN0);
111 writel(lower_32_bits(cryp->assoclen), cryp->base + STARFIVE_AES_ALEN1);
114 static inline void starfive_aes_set_mlen(struct starfive_cryp_ctx *ctx)
116 struct starfive_cryp_dev *cryp = ctx->cryp;
118 writel(upper_32_bits(cryp->total_in), cryp->base + STARFIVE_AES_MLEN0);
119 writel(lower_32_bits(cryp->total_in), cryp->base + STARFIVE_AES_MLEN1);
122 static inline int starfive_aes_ccm_check_iv(const u8 *iv)
124 /* 2 <= L <= 8, so 1 <= L' <= 7. */
125 if (iv[0] < 1 || iv[0] > 7)
126 return -EINVAL;
128 return 0;
131 static int starfive_aes_write_iv(struct starfive_cryp_ctx *ctx, u32 *iv)
133 struct starfive_cryp_dev *cryp = ctx->cryp;
135 writel(iv[0], cryp->base + STARFIVE_AES_IV0);
136 writel(iv[1], cryp->base + STARFIVE_AES_IV1);
137 writel(iv[2], cryp->base + STARFIVE_AES_IV2);
139 if (is_gcm(cryp)) {
140 if (starfive_aes_wait_gcmdone(cryp))
141 return -ETIMEDOUT;
143 return 0;
146 writel(iv[3], cryp->base + STARFIVE_AES_IV3);
148 return 0;
151 static inline void starfive_aes_get_iv(struct starfive_cryp_dev *cryp, u32 *iv)
153 iv[0] = readl(cryp->base + STARFIVE_AES_IV0);
154 iv[1] = readl(cryp->base + STARFIVE_AES_IV1);
155 iv[2] = readl(cryp->base + STARFIVE_AES_IV2);
156 iv[3] = readl(cryp->base + STARFIVE_AES_IV3);
159 static inline void starfive_aes_write_nonce(struct starfive_cryp_ctx *ctx, u32 *nonce)
161 struct starfive_cryp_dev *cryp = ctx->cryp;
163 writel(nonce[0], cryp->base + STARFIVE_AES_NONCE0);
164 writel(nonce[1], cryp->base + STARFIVE_AES_NONCE1);
165 writel(nonce[2], cryp->base + STARFIVE_AES_NONCE2);
166 writel(nonce[3], cryp->base + STARFIVE_AES_NONCE3);
169 static int starfive_aes_write_key(struct starfive_cryp_ctx *ctx)
171 struct starfive_cryp_dev *cryp = ctx->cryp;
172 u32 *key = (u32 *)ctx->key;
174 if (ctx->keylen >= AES_KEYSIZE_128) {
175 writel(key[0], cryp->base + STARFIVE_AES_KEY0);
176 writel(key[1], cryp->base + STARFIVE_AES_KEY1);
177 writel(key[2], cryp->base + STARFIVE_AES_KEY2);
178 writel(key[3], cryp->base + STARFIVE_AES_KEY3);
181 if (ctx->keylen >= AES_KEYSIZE_192) {
182 writel(key[4], cryp->base + STARFIVE_AES_KEY4);
183 writel(key[5], cryp->base + STARFIVE_AES_KEY5);
186 if (ctx->keylen >= AES_KEYSIZE_256) {
187 writel(key[6], cryp->base + STARFIVE_AES_KEY6);
188 writel(key[7], cryp->base + STARFIVE_AES_KEY7);
191 if (starfive_aes_wait_keydone(cryp))
192 return -ETIMEDOUT;
194 return 0;
197 static int starfive_aes_ccm_init(struct starfive_cryp_ctx *ctx)
199 struct starfive_cryp_dev *cryp = ctx->cryp;
200 u8 iv[AES_BLOCK_SIZE], b0[AES_BLOCK_SIZE];
201 unsigned int textlen;
203 memcpy(iv, cryp->req.areq->iv, AES_BLOCK_SIZE);
204 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
206 /* Build B0 */
207 memcpy(b0, iv, AES_BLOCK_SIZE);
209 b0[0] |= (8 * ((cryp->authsize - 2) / 2));
211 if (cryp->assoclen)
212 b0[0] |= CCM_B0_ADATA;
214 textlen = cryp->total_in;
216 b0[AES_BLOCK_SIZE - 2] = textlen >> 8;
217 b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF;
219 starfive_aes_write_nonce(ctx, (u32 *)b0);
221 return 0;
224 static int starfive_aes_hw_init(struct starfive_cryp_ctx *ctx)
226 struct starfive_cryp_request_ctx *rctx = ctx->rctx;
227 struct starfive_cryp_dev *cryp = ctx->cryp;
228 u32 hw_mode;
230 /* reset */
231 rctx->csr.aes.v = 0;
232 rctx->csr.aes.aesrst = 1;
233 writel(rctx->csr.aes.v, cryp->base + STARFIVE_AES_CSR);
235 /* csr setup */
236 hw_mode = cryp->flags & FLG_MODE_MASK;
238 rctx->csr.aes.v = 0;
240 switch (ctx->keylen) {
241 case AES_KEYSIZE_128:
242 rctx->csr.aes.keymode = STARFIVE_AES_KEYMODE_128;
243 break;
244 case AES_KEYSIZE_192:
245 rctx->csr.aes.keymode = STARFIVE_AES_KEYMODE_192;
246 break;
247 case AES_KEYSIZE_256:
248 rctx->csr.aes.keymode = STARFIVE_AES_KEYMODE_256;
249 break;
252 rctx->csr.aes.mode = hw_mode;
253 rctx->csr.aes.cmode = !is_encrypt(cryp);
254 rctx->csr.aes.stmode = STARFIVE_AES_MODE_XFB_1;
256 if (cryp->side_chan) {
257 rctx->csr.aes.delay_aes = 1;
258 rctx->csr.aes.vaes_start = 1;
261 writel(rctx->csr.aes.v, cryp->base + STARFIVE_AES_CSR);
263 cryp->err = starfive_aes_write_key(ctx);
264 if (cryp->err)
265 return cryp->err;
267 switch (hw_mode) {
268 case STARFIVE_AES_MODE_GCM:
269 starfive_aes_set_alen(ctx);
270 starfive_aes_set_mlen(ctx);
271 writel(GCM_AES_IV_SIZE, cryp->base + STARFIVE_AES_IVLEN);
272 starfive_aes_aead_hw_start(ctx, hw_mode);
273 starfive_aes_write_iv(ctx, (void *)cryp->req.areq->iv);
274 break;
275 case STARFIVE_AES_MODE_CCM:
276 starfive_aes_set_alen(ctx);
277 starfive_aes_set_mlen(ctx);
278 starfive_aes_ccm_init(ctx);
279 starfive_aes_aead_hw_start(ctx, hw_mode);
280 break;
281 case STARFIVE_AES_MODE_CBC:
282 case STARFIVE_AES_MODE_CTR:
283 starfive_aes_write_iv(ctx, (void *)cryp->req.sreq->iv);
284 break;
285 default:
286 break;
289 return cryp->err;
292 static int starfive_aes_read_authtag(struct starfive_cryp_ctx *ctx)
294 struct starfive_cryp_dev *cryp = ctx->cryp;
295 struct starfive_cryp_request_ctx *rctx = ctx->rctx;
296 int i;
298 if (starfive_aes_wait_busy(cryp))
299 return dev_err_probe(cryp->dev, -ETIMEDOUT,
300 "Timeout waiting for tag generation.");
302 if ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_GCM) {
303 cryp->tag_out[0] = readl(cryp->base + STARFIVE_AES_NONCE0);
304 cryp->tag_out[1] = readl(cryp->base + STARFIVE_AES_NONCE1);
305 cryp->tag_out[2] = readl(cryp->base + STARFIVE_AES_NONCE2);
306 cryp->tag_out[3] = readl(cryp->base + STARFIVE_AES_NONCE3);
307 } else {
308 for (i = 0; i < AES_BLOCK_32; i++)
309 cryp->tag_out[i] = readl(cryp->base + STARFIVE_AES_AESDIO0R);
312 if (is_encrypt(cryp)) {
313 scatterwalk_map_and_copy(cryp->tag_out, rctx->out_sg,
314 cryp->total_in, cryp->authsize, 1);
315 } else {
316 if (crypto_memneq(cryp->tag_in, cryp->tag_out, cryp->authsize))
317 return -EBADMSG;
320 return 0;
323 static void starfive_aes_finish_req(struct starfive_cryp_ctx *ctx)
325 struct starfive_cryp_dev *cryp = ctx->cryp;
326 int err = cryp->err;
328 if (!err && cryp->authsize)
329 err = starfive_aes_read_authtag(ctx);
331 if (!err && ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CBC ||
332 (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CTR))
333 starfive_aes_get_iv(cryp, (void *)cryp->req.sreq->iv);
335 if (cryp->authsize)
336 crypto_finalize_aead_request(cryp->engine, cryp->req.areq, err);
337 else
338 crypto_finalize_skcipher_request(cryp->engine, cryp->req.sreq,
339 err);
342 static int starfive_aes_gcm_write_adata(struct starfive_cryp_ctx *ctx)
344 struct starfive_cryp_dev *cryp = ctx->cryp;
345 struct starfive_cryp_request_ctx *rctx = ctx->rctx;
346 u32 *buffer;
347 int total_len, loop;
349 total_len = ALIGN(cryp->assoclen, AES_BLOCK_SIZE) / sizeof(unsigned int);
350 buffer = (u32 *)rctx->adata;
352 for (loop = 0; loop < total_len; loop += 4) {
353 writel(*buffer, cryp->base + STARFIVE_AES_NONCE0);
354 buffer++;
355 writel(*buffer, cryp->base + STARFIVE_AES_NONCE1);
356 buffer++;
357 writel(*buffer, cryp->base + STARFIVE_AES_NONCE2);
358 buffer++;
359 writel(*buffer, cryp->base + STARFIVE_AES_NONCE3);
360 buffer++;
363 if (starfive_aes_wait_gcmdone(cryp))
364 return dev_err_probe(cryp->dev, -ETIMEDOUT,
365 "Timeout processing gcm aad block");
367 return 0;
370 static int starfive_aes_ccm_write_adata(struct starfive_cryp_ctx *ctx)
372 struct starfive_cryp_dev *cryp = ctx->cryp;
373 struct starfive_cryp_request_ctx *rctx = ctx->rctx;
374 u32 *buffer;
375 u8 *ci;
376 int total_len, loop;
378 total_len = cryp->assoclen;
380 ci = rctx->adata;
381 writeb(*ci, cryp->base + STARFIVE_AES_AESDIO0R);
382 ci++;
383 writeb(*ci, cryp->base + STARFIVE_AES_AESDIO0R);
384 ci++;
385 total_len -= 2;
386 buffer = (u32 *)ci;
388 for (loop = 0; loop < 3; loop++, buffer++)
389 writel(*buffer, cryp->base + STARFIVE_AES_AESDIO0R);
391 total_len -= 12;
393 while (total_len > 0) {
394 for (loop = 0; loop < AES_BLOCK_32; loop++, buffer++)
395 writel(*buffer, cryp->base + STARFIVE_AES_AESDIO0R);
397 total_len -= AES_BLOCK_SIZE;
400 if (starfive_aes_wait_busy(cryp))
401 return dev_err_probe(cryp->dev, -ETIMEDOUT,
402 "Timeout processing ccm aad block");
404 return 0;
407 static void starfive_aes_dma_done(void *param)
409 struct starfive_cryp_dev *cryp = param;
411 complete(&cryp->dma_done);
414 static void starfive_aes_dma_init(struct starfive_cryp_dev *cryp)
416 cryp->cfg_in.direction = DMA_MEM_TO_DEV;
417 cryp->cfg_in.src_addr_width = DMA_SLAVE_BUSWIDTH_16_BYTES;
418 cryp->cfg_in.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
419 cryp->cfg_in.src_maxburst = cryp->dma_maxburst;
420 cryp->cfg_in.dst_maxburst = cryp->dma_maxburst;
421 cryp->cfg_in.dst_addr = cryp->phys_base + STARFIVE_ALG_FIFO_OFFSET;
423 dmaengine_slave_config(cryp->tx, &cryp->cfg_in);
425 cryp->cfg_out.direction = DMA_DEV_TO_MEM;
426 cryp->cfg_out.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
427 cryp->cfg_out.dst_addr_width = DMA_SLAVE_BUSWIDTH_16_BYTES;
428 cryp->cfg_out.src_maxburst = 4;
429 cryp->cfg_out.dst_maxburst = 4;
430 cryp->cfg_out.src_addr = cryp->phys_base + STARFIVE_ALG_FIFO_OFFSET;
432 dmaengine_slave_config(cryp->rx, &cryp->cfg_out);
434 init_completion(&cryp->dma_done);
437 static int starfive_aes_dma_xfer(struct starfive_cryp_dev *cryp,
438 struct scatterlist *src,
439 struct scatterlist *dst,
440 int len)
442 struct dma_async_tx_descriptor *in_desc, *out_desc;
443 union starfive_alg_cr alg_cr;
444 int ret = 0, in_save, out_save;
446 alg_cr.v = 0;
447 alg_cr.start = 1;
448 alg_cr.aes_dma_en = 1;
449 writel(alg_cr.v, cryp->base + STARFIVE_ALG_CR_OFFSET);
451 in_save = sg_dma_len(src);
452 out_save = sg_dma_len(dst);
454 writel(ALIGN(len, AES_BLOCK_SIZE), cryp->base + STARFIVE_DMA_IN_LEN_OFFSET);
455 writel(ALIGN(len, AES_BLOCK_SIZE), cryp->base + STARFIVE_DMA_OUT_LEN_OFFSET);
457 sg_dma_len(src) = ALIGN(len, AES_BLOCK_SIZE);
458 sg_dma_len(dst) = ALIGN(len, AES_BLOCK_SIZE);
460 out_desc = dmaengine_prep_slave_sg(cryp->rx, dst, 1, DMA_DEV_TO_MEM,
461 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
462 if (!out_desc) {
463 ret = -EINVAL;
464 goto dma_err;
467 out_desc->callback = starfive_aes_dma_done;
468 out_desc->callback_param = cryp;
470 reinit_completion(&cryp->dma_done);
471 dmaengine_submit(out_desc);
472 dma_async_issue_pending(cryp->rx);
474 in_desc = dmaengine_prep_slave_sg(cryp->tx, src, 1, DMA_MEM_TO_DEV,
475 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
476 if (!in_desc) {
477 ret = -EINVAL;
478 goto dma_err;
481 dmaengine_submit(in_desc);
482 dma_async_issue_pending(cryp->tx);
484 if (!wait_for_completion_timeout(&cryp->dma_done,
485 msecs_to_jiffies(1000)))
486 ret = -ETIMEDOUT;
488 dma_err:
489 sg_dma_len(src) = in_save;
490 sg_dma_len(dst) = out_save;
492 alg_cr.v = 0;
493 alg_cr.clear = 1;
494 writel(alg_cr.v, cryp->base + STARFIVE_ALG_CR_OFFSET);
496 return ret;
499 static int starfive_aes_map_sg(struct starfive_cryp_dev *cryp,
500 struct scatterlist *src,
501 struct scatterlist *dst)
503 struct scatterlist *stsg, *dtsg;
504 struct scatterlist _src[2], _dst[2];
505 unsigned int remain = cryp->total_in;
506 unsigned int len, src_nents, dst_nents;
507 int ret;
509 if (src == dst) {
510 for (stsg = src, dtsg = dst; remain > 0;
511 stsg = sg_next(stsg), dtsg = sg_next(dtsg)) {
512 src_nents = dma_map_sg(cryp->dev, stsg, 1, DMA_BIDIRECTIONAL);
513 if (src_nents == 0)
514 return dev_err_probe(cryp->dev, -ENOMEM,
515 "dma_map_sg error\n");
517 dst_nents = src_nents;
518 len = min(sg_dma_len(stsg), remain);
520 ret = starfive_aes_dma_xfer(cryp, stsg, dtsg, len);
521 dma_unmap_sg(cryp->dev, stsg, 1, DMA_BIDIRECTIONAL);
522 if (ret)
523 return ret;
525 remain -= len;
527 } else {
528 for (stsg = src, dtsg = dst;;) {
529 src_nents = dma_map_sg(cryp->dev, stsg, 1, DMA_TO_DEVICE);
530 if (src_nents == 0)
531 return dev_err_probe(cryp->dev, -ENOMEM,
532 "dma_map_sg src error\n");
534 dst_nents = dma_map_sg(cryp->dev, dtsg, 1, DMA_FROM_DEVICE);
535 if (dst_nents == 0)
536 return dev_err_probe(cryp->dev, -ENOMEM,
537 "dma_map_sg dst error\n");
539 len = min(sg_dma_len(stsg), sg_dma_len(dtsg));
540 len = min(len, remain);
542 ret = starfive_aes_dma_xfer(cryp, stsg, dtsg, len);
543 dma_unmap_sg(cryp->dev, stsg, 1, DMA_TO_DEVICE);
544 dma_unmap_sg(cryp->dev, dtsg, 1, DMA_FROM_DEVICE);
545 if (ret)
546 return ret;
548 remain -= len;
549 if (remain == 0)
550 break;
552 if (sg_dma_len(stsg) - len) {
553 stsg = scatterwalk_ffwd(_src, stsg, len);
554 dtsg = sg_next(dtsg);
555 } else if (sg_dma_len(dtsg) - len) {
556 dtsg = scatterwalk_ffwd(_dst, dtsg, len);
557 stsg = sg_next(stsg);
558 } else {
559 stsg = sg_next(stsg);
560 dtsg = sg_next(dtsg);
565 return 0;
568 static int starfive_aes_do_one_req(struct crypto_engine *engine, void *areq)
570 struct skcipher_request *req =
571 container_of(areq, struct skcipher_request, base);
572 struct starfive_cryp_ctx *ctx =
573 crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
574 struct starfive_cryp_request_ctx *rctx = skcipher_request_ctx(req);
575 struct starfive_cryp_dev *cryp = ctx->cryp;
576 int ret;
578 cryp->req.sreq = req;
579 cryp->total_in = req->cryptlen;
580 cryp->total_out = req->cryptlen;
581 cryp->assoclen = 0;
582 cryp->authsize = 0;
584 rctx->in_sg = req->src;
585 rctx->out_sg = req->dst;
587 ctx->rctx = rctx;
589 ret = starfive_aes_hw_init(ctx);
590 if (ret)
591 return ret;
593 if (!cryp->total_in)
594 goto finish_req;
596 starfive_aes_dma_init(cryp);
598 ret = starfive_aes_map_sg(cryp, rctx->in_sg, rctx->out_sg);
599 if (ret)
600 return ret;
602 finish_req:
603 starfive_aes_finish_req(ctx);
605 return 0;
608 static int starfive_aes_init_tfm(struct crypto_skcipher *tfm,
609 const char *alg_name)
611 struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
613 ctx->cryp = starfive_cryp_find_dev(ctx);
614 if (!ctx->cryp)
615 return -ENODEV;
617 ctx->skcipher_fbk = crypto_alloc_skcipher(alg_name, 0,
618 CRYPTO_ALG_NEED_FALLBACK);
619 if (IS_ERR(ctx->skcipher_fbk))
620 return dev_err_probe(ctx->cryp->dev, PTR_ERR(ctx->skcipher_fbk),
621 "%s() failed to allocate fallback for %s\n",
622 __func__, alg_name);
624 crypto_skcipher_set_reqsize(tfm, sizeof(struct starfive_cryp_request_ctx) +
625 crypto_skcipher_reqsize(ctx->skcipher_fbk));
627 return 0;
630 static void starfive_aes_exit_tfm(struct crypto_skcipher *tfm)
632 struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
634 crypto_free_skcipher(ctx->skcipher_fbk);
637 static int starfive_aes_aead_do_one_req(struct crypto_engine *engine, void *areq)
639 struct aead_request *req =
640 container_of(areq, struct aead_request, base);
641 struct starfive_cryp_ctx *ctx =
642 crypto_aead_ctx(crypto_aead_reqtfm(req));
643 struct starfive_cryp_dev *cryp = ctx->cryp;
644 struct starfive_cryp_request_ctx *rctx = aead_request_ctx(req);
645 struct scatterlist _src[2], _dst[2];
646 int ret;
648 cryp->req.areq = req;
649 cryp->assoclen = req->assoclen;
650 cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(req));
652 rctx->in_sg = scatterwalk_ffwd(_src, req->src, cryp->assoclen);
653 if (req->src == req->dst)
654 rctx->out_sg = rctx->in_sg;
655 else
656 rctx->out_sg = scatterwalk_ffwd(_dst, req->dst, cryp->assoclen);
658 if (is_encrypt(cryp)) {
659 cryp->total_in = req->cryptlen;
660 cryp->total_out = req->cryptlen;
661 } else {
662 cryp->total_in = req->cryptlen - cryp->authsize;
663 cryp->total_out = cryp->total_in;
664 scatterwalk_map_and_copy(cryp->tag_in, req->src,
665 cryp->total_in + cryp->assoclen,
666 cryp->authsize, 0);
669 if (cryp->assoclen) {
670 rctx->adata = kzalloc(cryp->assoclen + AES_BLOCK_SIZE, GFP_KERNEL);
671 if (!rctx->adata)
672 return dev_err_probe(cryp->dev, -ENOMEM,
673 "Failed to alloc memory for adata");
675 if (sg_copy_to_buffer(req->src, sg_nents_for_len(req->src, cryp->assoclen),
676 rctx->adata, cryp->assoclen) != cryp->assoclen)
677 return -EINVAL;
680 if (cryp->total_in)
681 sg_zero_buffer(rctx->in_sg, sg_nents(rctx->in_sg),
682 sg_dma_len(rctx->in_sg) - cryp->total_in,
683 cryp->total_in);
685 ctx->rctx = rctx;
687 ret = starfive_aes_hw_init(ctx);
688 if (ret)
689 return ret;
691 if (!cryp->assoclen)
692 goto write_text;
694 if ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CCM)
695 ret = starfive_aes_ccm_write_adata(ctx);
696 else
697 ret = starfive_aes_gcm_write_adata(ctx);
699 kfree(rctx->adata);
701 if (ret)
702 return ret;
704 write_text:
705 if (!cryp->total_in)
706 goto finish_req;
708 starfive_aes_dma_init(cryp);
710 ret = starfive_aes_map_sg(cryp, rctx->in_sg, rctx->out_sg);
711 if (ret)
712 return ret;
714 finish_req:
715 starfive_aes_finish_req(ctx);
716 return 0;
719 static int starfive_aes_aead_init_tfm(struct crypto_aead *tfm,
720 const char *alg_name)
722 struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
724 ctx->cryp = starfive_cryp_find_dev(ctx);
725 if (!ctx->cryp)
726 return -ENODEV;
728 ctx->aead_fbk = crypto_alloc_aead(alg_name, 0,
729 CRYPTO_ALG_NEED_FALLBACK);
730 if (IS_ERR(ctx->aead_fbk))
731 return dev_err_probe(ctx->cryp->dev, PTR_ERR(ctx->aead_fbk),
732 "%s() failed to allocate fallback for %s\n",
733 __func__, alg_name);
735 crypto_aead_set_reqsize(tfm, sizeof(struct starfive_cryp_request_ctx) +
736 crypto_aead_reqsize(ctx->aead_fbk));
738 return 0;
741 static void starfive_aes_aead_exit_tfm(struct crypto_aead *tfm)
743 struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
745 crypto_free_aead(ctx->aead_fbk);
748 static bool starfive_aes_check_unaligned(struct starfive_cryp_dev *cryp,
749 struct scatterlist *src,
750 struct scatterlist *dst)
752 struct scatterlist *tsg;
753 int i;
755 for_each_sg(src, tsg, sg_nents(src), i)
756 if (!IS_ALIGNED(tsg->offset, sizeof(u32)) ||
757 (!IS_ALIGNED(tsg->length, AES_BLOCK_SIZE) &&
758 !sg_is_last(tsg)))
759 return true;
761 if (src != dst)
762 for_each_sg(dst, tsg, sg_nents(dst), i)
763 if (!IS_ALIGNED(tsg->offset, sizeof(u32)) ||
764 (!IS_ALIGNED(tsg->length, AES_BLOCK_SIZE) &&
765 !sg_is_last(tsg)))
766 return true;
768 return false;
771 static int starfive_aes_do_fallback(struct skcipher_request *req, bool enc)
773 struct starfive_cryp_ctx *ctx =
774 crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
775 struct skcipher_request *subreq = skcipher_request_ctx(req);
777 skcipher_request_set_tfm(subreq, ctx->skcipher_fbk);
778 skcipher_request_set_callback(subreq, req->base.flags,
779 req->base.complete,
780 req->base.data);
781 skcipher_request_set_crypt(subreq, req->src, req->dst,
782 req->cryptlen, req->iv);
784 return enc ? crypto_skcipher_encrypt(subreq) :
785 crypto_skcipher_decrypt(subreq);
788 static int starfive_aes_crypt(struct skcipher_request *req, unsigned long flags)
790 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
791 struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
792 struct starfive_cryp_dev *cryp = ctx->cryp;
793 unsigned int blocksize_align = crypto_skcipher_blocksize(tfm) - 1;
795 cryp->flags = flags;
797 if ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_ECB ||
798 (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CBC)
799 if (req->cryptlen & blocksize_align)
800 return -EINVAL;
802 if (starfive_aes_check_unaligned(cryp, req->src, req->dst))
803 return starfive_aes_do_fallback(req, is_encrypt(cryp));
805 return crypto_transfer_skcipher_request_to_engine(cryp->engine, req);
808 static int starfive_aes_aead_do_fallback(struct aead_request *req, bool enc)
810 struct starfive_cryp_ctx *ctx =
811 crypto_aead_ctx(crypto_aead_reqtfm(req));
812 struct aead_request *subreq = aead_request_ctx(req);
814 aead_request_set_tfm(subreq, ctx->aead_fbk);
815 aead_request_set_callback(subreq, req->base.flags,
816 req->base.complete,
817 req->base.data);
818 aead_request_set_crypt(subreq, req->src, req->dst,
819 req->cryptlen, req->iv);
820 aead_request_set_ad(subreq, req->assoclen);
822 return enc ? crypto_aead_encrypt(subreq) :
823 crypto_aead_decrypt(subreq);
826 static int starfive_aes_aead_crypt(struct aead_request *req, unsigned long flags)
828 struct starfive_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
829 struct starfive_cryp_dev *cryp = ctx->cryp;
830 struct scatterlist *src, *dst, _src[2], _dst[2];
832 cryp->flags = flags;
834 /* aes-ccm does not support tag verification for non-aligned text,
835 * use fallback for ccm decryption instead.
837 if (((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CCM) &&
838 !is_encrypt(cryp))
839 return starfive_aes_aead_do_fallback(req, 0);
841 src = scatterwalk_ffwd(_src, req->src, req->assoclen);
843 if (req->src == req->dst)
844 dst = src;
845 else
846 dst = scatterwalk_ffwd(_dst, req->dst, req->assoclen);
848 if (starfive_aes_check_unaligned(cryp, src, dst))
849 return starfive_aes_aead_do_fallback(req, is_encrypt(cryp));
851 return crypto_transfer_aead_request_to_engine(cryp->engine, req);
854 static int starfive_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
855 unsigned int keylen)
857 struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
859 if (!key || !keylen)
860 return -EINVAL;
862 if (keylen != AES_KEYSIZE_128 &&
863 keylen != AES_KEYSIZE_192 &&
864 keylen != AES_KEYSIZE_256)
865 return -EINVAL;
867 memcpy(ctx->key, key, keylen);
868 ctx->keylen = keylen;
870 return crypto_skcipher_setkey(ctx->skcipher_fbk, key, keylen);
873 static int starfive_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key,
874 unsigned int keylen)
876 struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
878 if (!key || !keylen)
879 return -EINVAL;
881 if (keylen != AES_KEYSIZE_128 &&
882 keylen != AES_KEYSIZE_192 &&
883 keylen != AES_KEYSIZE_256)
884 return -EINVAL;
886 memcpy(ctx->key, key, keylen);
887 ctx->keylen = keylen;
889 return crypto_aead_setkey(ctx->aead_fbk, key, keylen);
892 static int starfive_aes_gcm_setauthsize(struct crypto_aead *tfm,
893 unsigned int authsize)
895 struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
896 int ret;
898 ret = crypto_gcm_check_authsize(authsize);
899 if (ret)
900 return ret;
902 return crypto_aead_setauthsize(ctx->aead_fbk, authsize);
905 static int starfive_aes_ccm_setauthsize(struct crypto_aead *tfm,
906 unsigned int authsize)
908 struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
910 switch (authsize) {
911 case 4:
912 case 6:
913 case 8:
914 case 10:
915 case 12:
916 case 14:
917 case 16:
918 break;
919 default:
920 return -EINVAL;
923 return crypto_aead_setauthsize(ctx->aead_fbk, authsize);
926 static int starfive_aes_ecb_encrypt(struct skcipher_request *req)
928 return starfive_aes_crypt(req, STARFIVE_AES_MODE_ECB | FLG_ENCRYPT);
931 static int starfive_aes_ecb_decrypt(struct skcipher_request *req)
933 return starfive_aes_crypt(req, STARFIVE_AES_MODE_ECB);
936 static int starfive_aes_cbc_encrypt(struct skcipher_request *req)
938 return starfive_aes_crypt(req, STARFIVE_AES_MODE_CBC | FLG_ENCRYPT);
941 static int starfive_aes_cbc_decrypt(struct skcipher_request *req)
943 return starfive_aes_crypt(req, STARFIVE_AES_MODE_CBC);
946 static int starfive_aes_ctr_encrypt(struct skcipher_request *req)
948 return starfive_aes_crypt(req, STARFIVE_AES_MODE_CTR | FLG_ENCRYPT);
951 static int starfive_aes_ctr_decrypt(struct skcipher_request *req)
953 return starfive_aes_crypt(req, STARFIVE_AES_MODE_CTR);
956 static int starfive_aes_gcm_encrypt(struct aead_request *req)
958 return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_GCM | FLG_ENCRYPT);
961 static int starfive_aes_gcm_decrypt(struct aead_request *req)
963 return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_GCM);
966 static int starfive_aes_ccm_encrypt(struct aead_request *req)
968 int ret;
970 ret = starfive_aes_ccm_check_iv(req->iv);
971 if (ret)
972 return ret;
974 return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_CCM | FLG_ENCRYPT);
977 static int starfive_aes_ccm_decrypt(struct aead_request *req)
979 int ret;
981 ret = starfive_aes_ccm_check_iv(req->iv);
982 if (ret)
983 return ret;
985 return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_CCM);
988 static int starfive_aes_ecb_init_tfm(struct crypto_skcipher *tfm)
990 return starfive_aes_init_tfm(tfm, "ecb(aes-generic)");
993 static int starfive_aes_cbc_init_tfm(struct crypto_skcipher *tfm)
995 return starfive_aes_init_tfm(tfm, "cbc(aes-generic)");
998 static int starfive_aes_ctr_init_tfm(struct crypto_skcipher *tfm)
1000 return starfive_aes_init_tfm(tfm, "ctr(aes-generic)");
1003 static int starfive_aes_ccm_init_tfm(struct crypto_aead *tfm)
1005 return starfive_aes_aead_init_tfm(tfm, "ccm_base(ctr(aes-generic),cbcmac(aes-generic))");
1008 static int starfive_aes_gcm_init_tfm(struct crypto_aead *tfm)
1010 return starfive_aes_aead_init_tfm(tfm, "gcm_base(ctr(aes-generic),ghash-generic)");
1013 static struct skcipher_engine_alg skcipher_algs[] = {
1015 .base.init = starfive_aes_ecb_init_tfm,
1016 .base.exit = starfive_aes_exit_tfm,
1017 .base.setkey = starfive_aes_setkey,
1018 .base.encrypt = starfive_aes_ecb_encrypt,
1019 .base.decrypt = starfive_aes_ecb_decrypt,
1020 .base.min_keysize = AES_MIN_KEY_SIZE,
1021 .base.max_keysize = AES_MAX_KEY_SIZE,
1022 .base.base = {
1023 .cra_name = "ecb(aes)",
1024 .cra_driver_name = "starfive-ecb-aes",
1025 .cra_priority = 200,
1026 .cra_flags = CRYPTO_ALG_ASYNC |
1027 CRYPTO_ALG_NEED_FALLBACK,
1028 .cra_blocksize = AES_BLOCK_SIZE,
1029 .cra_ctxsize = sizeof(struct starfive_cryp_ctx),
1030 .cra_alignmask = 0xf,
1031 .cra_module = THIS_MODULE,
1033 .op = {
1034 .do_one_request = starfive_aes_do_one_req,
1036 }, {
1037 .base.init = starfive_aes_cbc_init_tfm,
1038 .base.exit = starfive_aes_exit_tfm,
1039 .base.setkey = starfive_aes_setkey,
1040 .base.encrypt = starfive_aes_cbc_encrypt,
1041 .base.decrypt = starfive_aes_cbc_decrypt,
1042 .base.min_keysize = AES_MIN_KEY_SIZE,
1043 .base.max_keysize = AES_MAX_KEY_SIZE,
1044 .base.ivsize = AES_BLOCK_SIZE,
1045 .base.base = {
1046 .cra_name = "cbc(aes)",
1047 .cra_driver_name = "starfive-cbc-aes",
1048 .cra_priority = 200,
1049 .cra_flags = CRYPTO_ALG_ASYNC |
1050 CRYPTO_ALG_NEED_FALLBACK,
1051 .cra_blocksize = AES_BLOCK_SIZE,
1052 .cra_ctxsize = sizeof(struct starfive_cryp_ctx),
1053 .cra_alignmask = 0xf,
1054 .cra_module = THIS_MODULE,
1056 .op = {
1057 .do_one_request = starfive_aes_do_one_req,
1059 }, {
1060 .base.init = starfive_aes_ctr_init_tfm,
1061 .base.exit = starfive_aes_exit_tfm,
1062 .base.setkey = starfive_aes_setkey,
1063 .base.encrypt = starfive_aes_ctr_encrypt,
1064 .base.decrypt = starfive_aes_ctr_decrypt,
1065 .base.min_keysize = AES_MIN_KEY_SIZE,
1066 .base.max_keysize = AES_MAX_KEY_SIZE,
1067 .base.ivsize = AES_BLOCK_SIZE,
1068 .base.base = {
1069 .cra_name = "ctr(aes)",
1070 .cra_driver_name = "starfive-ctr-aes",
1071 .cra_priority = 200,
1072 .cra_flags = CRYPTO_ALG_ASYNC |
1073 CRYPTO_ALG_NEED_FALLBACK,
1074 .cra_blocksize = 1,
1075 .cra_ctxsize = sizeof(struct starfive_cryp_ctx),
1076 .cra_alignmask = 0xf,
1077 .cra_module = THIS_MODULE,
1079 .op = {
1080 .do_one_request = starfive_aes_do_one_req,
1085 static struct aead_engine_alg aead_algs[] = {
1087 .base.setkey = starfive_aes_aead_setkey,
1088 .base.setauthsize = starfive_aes_gcm_setauthsize,
1089 .base.encrypt = starfive_aes_gcm_encrypt,
1090 .base.decrypt = starfive_aes_gcm_decrypt,
1091 .base.init = starfive_aes_gcm_init_tfm,
1092 .base.exit = starfive_aes_aead_exit_tfm,
1093 .base.ivsize = GCM_AES_IV_SIZE,
1094 .base.maxauthsize = AES_BLOCK_SIZE,
1095 .base.base = {
1096 .cra_name = "gcm(aes)",
1097 .cra_driver_name = "starfive-gcm-aes",
1098 .cra_priority = 200,
1099 .cra_flags = CRYPTO_ALG_ASYNC |
1100 CRYPTO_ALG_NEED_FALLBACK,
1101 .cra_blocksize = 1,
1102 .cra_ctxsize = sizeof(struct starfive_cryp_ctx),
1103 .cra_alignmask = 0xf,
1104 .cra_module = THIS_MODULE,
1106 .op = {
1107 .do_one_request = starfive_aes_aead_do_one_req,
1109 }, {
1110 .base.setkey = starfive_aes_aead_setkey,
1111 .base.setauthsize = starfive_aes_ccm_setauthsize,
1112 .base.encrypt = starfive_aes_ccm_encrypt,
1113 .base.decrypt = starfive_aes_ccm_decrypt,
1114 .base.init = starfive_aes_ccm_init_tfm,
1115 .base.exit = starfive_aes_aead_exit_tfm,
1116 .base.ivsize = AES_BLOCK_SIZE,
1117 .base.maxauthsize = AES_BLOCK_SIZE,
1118 .base.base = {
1119 .cra_name = "ccm(aes)",
1120 .cra_driver_name = "starfive-ccm-aes",
1121 .cra_priority = 200,
1122 .cra_flags = CRYPTO_ALG_ASYNC |
1123 CRYPTO_ALG_NEED_FALLBACK,
1124 .cra_blocksize = 1,
1125 .cra_ctxsize = sizeof(struct starfive_cryp_ctx),
1126 .cra_alignmask = 0xf,
1127 .cra_module = THIS_MODULE,
1129 .op = {
1130 .do_one_request = starfive_aes_aead_do_one_req,
1135 int starfive_aes_register_algs(void)
1137 int ret;
1139 ret = crypto_engine_register_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
1140 if (ret)
1141 return ret;
1143 ret = crypto_engine_register_aeads(aead_algs, ARRAY_SIZE(aead_algs));
1144 if (ret)
1145 crypto_engine_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
1147 return ret;
1150 void starfive_aes_unregister_algs(void)
1152 crypto_engine_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs));
1153 crypto_engine_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));