1 // SPDX-License-Identifier: GPL-2.0
5 * Support for ATMEL AES HW acceleration.
7 * Copyright (c) 2012 Eukréa Electromatique - ATMEL
8 * Author: Nicolas Royer <nicolas@eukrea.com>
10 * Some ideas are from omap-aes.c driver.
14 #include <linux/kernel.h>
15 #include <linux/module.h>
16 #include <linux/slab.h>
17 #include <linux/err.h>
18 #include <linux/clk.h>
20 #include <linux/hw_random.h>
21 #include <linux/platform_device.h>
23 #include <linux/device.h>
24 #include <linux/dmaengine.h>
25 #include <linux/init.h>
26 #include <linux/errno.h>
27 #include <linux/interrupt.h>
28 #include <linux/irq.h>
29 #include <linux/scatterlist.h>
30 #include <linux/dma-mapping.h>
31 #include <linux/of_device.h>
32 #include <linux/delay.h>
33 #include <linux/crypto.h>
34 #include <crypto/scatterwalk.h>
35 #include <crypto/algapi.h>
36 #include <crypto/aes.h>
37 #include <crypto/gcm.h>
38 #include <crypto/xts.h>
39 #include <crypto/internal/aead.h>
40 #include <crypto/internal/skcipher.h>
41 #include "atmel-aes-regs.h"
42 #include "atmel-authenc.h"
44 #define ATMEL_AES_PRIORITY 300
46 #define ATMEL_AES_BUFFER_ORDER 2
47 #define ATMEL_AES_BUFFER_SIZE (PAGE_SIZE << ATMEL_AES_BUFFER_ORDER)
49 #define CFB8_BLOCK_SIZE 1
50 #define CFB16_BLOCK_SIZE 2
51 #define CFB32_BLOCK_SIZE 4
52 #define CFB64_BLOCK_SIZE 8
54 #define SIZE_IN_WORDS(x) ((x) >> 2)
57 /* Reserve bits [18:16] [14:12] [1:0] for mode (same as for AES_MR) */
58 #define AES_FLAGS_ENCRYPT AES_MR_CYPHER_ENC
59 #define AES_FLAGS_GTAGEN AES_MR_GTAGEN
60 #define AES_FLAGS_OPMODE_MASK (AES_MR_OPMOD_MASK | AES_MR_CFBS_MASK)
61 #define AES_FLAGS_ECB AES_MR_OPMOD_ECB
62 #define AES_FLAGS_CBC AES_MR_OPMOD_CBC
63 #define AES_FLAGS_OFB AES_MR_OPMOD_OFB
64 #define AES_FLAGS_CFB128 (AES_MR_OPMOD_CFB | AES_MR_CFBS_128b)
65 #define AES_FLAGS_CFB64 (AES_MR_OPMOD_CFB | AES_MR_CFBS_64b)
66 #define AES_FLAGS_CFB32 (AES_MR_OPMOD_CFB | AES_MR_CFBS_32b)
67 #define AES_FLAGS_CFB16 (AES_MR_OPMOD_CFB | AES_MR_CFBS_16b)
68 #define AES_FLAGS_CFB8 (AES_MR_OPMOD_CFB | AES_MR_CFBS_8b)
69 #define AES_FLAGS_CTR AES_MR_OPMOD_CTR
70 #define AES_FLAGS_GCM AES_MR_OPMOD_GCM
71 #define AES_FLAGS_XTS AES_MR_OPMOD_XTS
73 #define AES_FLAGS_MODE_MASK (AES_FLAGS_OPMODE_MASK | \
77 #define AES_FLAGS_BUSY BIT(3)
78 #define AES_FLAGS_DUMP_REG BIT(4)
79 #define AES_FLAGS_OWN_SHA BIT(5)
81 #define AES_FLAGS_PERSISTENT AES_FLAGS_BUSY
83 #define ATMEL_AES_QUEUE_LENGTH 50
85 #define ATMEL_AES_DMA_THRESHOLD 256
88 struct atmel_aes_caps
{
100 typedef int (*atmel_aes_fn_t
)(struct atmel_aes_dev
*);
103 struct atmel_aes_base_ctx
{
104 struct atmel_aes_dev
*dd
;
105 atmel_aes_fn_t start
;
107 u32 key
[AES_KEYSIZE_256
/ sizeof(u32
)];
112 struct atmel_aes_ctx
{
113 struct atmel_aes_base_ctx base
;
116 struct atmel_aes_ctr_ctx
{
117 struct atmel_aes_base_ctx base
;
119 __be32 iv
[AES_BLOCK_SIZE
/ sizeof(u32
)];
121 struct scatterlist src
[2];
122 struct scatterlist dst
[2];
126 struct atmel_aes_gcm_ctx
{
127 struct atmel_aes_base_ctx base
;
129 struct scatterlist src
[2];
130 struct scatterlist dst
[2];
132 __be32 j0
[AES_BLOCK_SIZE
/ sizeof(u32
)];
133 u32 tag
[AES_BLOCK_SIZE
/ sizeof(u32
)];
134 __be32 ghash
[AES_BLOCK_SIZE
/ sizeof(u32
)];
137 const __be32
*ghash_in
;
139 atmel_aes_fn_t ghash_resume
;
142 struct atmel_aes_xts_ctx
{
143 struct atmel_aes_base_ctx base
;
145 u32 key2
[AES_KEYSIZE_256
/ sizeof(u32
)];
148 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
149 struct atmel_aes_authenc_ctx
{
150 struct atmel_aes_base_ctx base
;
151 struct atmel_sha_authenc_ctx
*auth
;
155 struct atmel_aes_reqctx
{
157 u8 lastc
[AES_BLOCK_SIZE
];
160 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
161 struct atmel_aes_authenc_reqctx
{
162 struct atmel_aes_reqctx base
;
164 struct scatterlist src
[2];
165 struct scatterlist dst
[2];
167 u32 digest
[SHA512_DIGEST_SIZE
/ sizeof(u32
)];
169 /* auth_req MUST be place last. */
170 struct ahash_request auth_req
;
174 struct atmel_aes_dma
{
175 struct dma_chan
*chan
;
176 struct scatterlist
*sg
;
178 unsigned int remainder
;
182 struct atmel_aes_dev
{
183 struct list_head list
;
184 unsigned long phys_base
;
185 void __iomem
*io_base
;
187 struct crypto_async_request
*areq
;
188 struct atmel_aes_base_ctx
*ctx
;
191 atmel_aes_fn_t resume
;
192 atmel_aes_fn_t cpu_transfer_complete
;
201 struct crypto_queue queue
;
203 struct tasklet_struct done_task
;
204 struct tasklet_struct queue_task
;
210 struct atmel_aes_dma src
;
211 struct atmel_aes_dma dst
;
215 struct scatterlist aligned_sg
;
216 struct scatterlist
*real_dst
;
218 struct atmel_aes_caps caps
;
223 struct atmel_aes_drv
{
224 struct list_head dev_list
;
228 static struct atmel_aes_drv atmel_aes
= {
229 .dev_list
= LIST_HEAD_INIT(atmel_aes
.dev_list
),
230 .lock
= __SPIN_LOCK_UNLOCKED(atmel_aes
.lock
),
234 static const char *atmel_aes_reg_name(u32 offset
, char *tmp
, size_t sz
)
263 snprintf(tmp
, sz
, "KEYWR[%u]", (offset
- AES_KEYWR(0)) >> 2);
270 snprintf(tmp
, sz
, "IDATAR[%u]", (offset
- AES_IDATAR(0)) >> 2);
277 snprintf(tmp
, sz
, "ODATAR[%u]", (offset
- AES_ODATAR(0)) >> 2);
284 snprintf(tmp
, sz
, "IVR[%u]", (offset
- AES_IVR(0)) >> 2);
297 snprintf(tmp
, sz
, "GHASHR[%u]", (offset
- AES_GHASHR(0)) >> 2);
304 snprintf(tmp
, sz
, "TAGR[%u]", (offset
- AES_TAGR(0)) >> 2);
314 snprintf(tmp
, sz
, "GCMHR[%u]", (offset
- AES_GCMHR(0)) >> 2);
324 snprintf(tmp
, sz
, "TWR[%u]", (offset
- AES_TWR(0)) >> 2);
331 snprintf(tmp
, sz
, "ALPHAR[%u]", (offset
- AES_ALPHAR(0)) >> 2);
335 snprintf(tmp
, sz
, "0x%02x", offset
);
341 #endif /* VERBOSE_DEBUG */
343 /* Shared functions */
345 static inline u32
atmel_aes_read(struct atmel_aes_dev
*dd
, u32 offset
)
347 u32 value
= readl_relaxed(dd
->io_base
+ offset
);
350 if (dd
->flags
& AES_FLAGS_DUMP_REG
) {
353 dev_vdbg(dd
->dev
, "read 0x%08x from %s\n", value
,
354 atmel_aes_reg_name(offset
, tmp
, sizeof(tmp
)));
356 #endif /* VERBOSE_DEBUG */
361 static inline void atmel_aes_write(struct atmel_aes_dev
*dd
,
362 u32 offset
, u32 value
)
365 if (dd
->flags
& AES_FLAGS_DUMP_REG
) {
368 dev_vdbg(dd
->dev
, "write 0x%08x into %s\n", value
,
369 atmel_aes_reg_name(offset
, tmp
, sizeof(tmp
)));
371 #endif /* VERBOSE_DEBUG */
373 writel_relaxed(value
, dd
->io_base
+ offset
);
376 static void atmel_aes_read_n(struct atmel_aes_dev
*dd
, u32 offset
,
377 u32
*value
, int count
)
379 for (; count
--; value
++, offset
+= 4)
380 *value
= atmel_aes_read(dd
, offset
);
383 static void atmel_aes_write_n(struct atmel_aes_dev
*dd
, u32 offset
,
384 const u32
*value
, int count
)
386 for (; count
--; value
++, offset
+= 4)
387 atmel_aes_write(dd
, offset
, *value
);
390 static inline void atmel_aes_read_block(struct atmel_aes_dev
*dd
, u32 offset
,
393 atmel_aes_read_n(dd
, offset
, value
, SIZE_IN_WORDS(AES_BLOCK_SIZE
));
396 static inline void atmel_aes_write_block(struct atmel_aes_dev
*dd
, u32 offset
,
399 atmel_aes_write_n(dd
, offset
, value
, SIZE_IN_WORDS(AES_BLOCK_SIZE
));
402 static inline int atmel_aes_wait_for_data_ready(struct atmel_aes_dev
*dd
,
403 atmel_aes_fn_t resume
)
405 u32 isr
= atmel_aes_read(dd
, AES_ISR
);
407 if (unlikely(isr
& AES_INT_DATARDY
))
411 atmel_aes_write(dd
, AES_IER
, AES_INT_DATARDY
);
415 static inline size_t atmel_aes_padlen(size_t len
, size_t block_size
)
417 len
&= block_size
- 1;
418 return len
? block_size
- len
: 0;
421 static struct atmel_aes_dev
*atmel_aes_find_dev(struct atmel_aes_base_ctx
*ctx
)
423 struct atmel_aes_dev
*aes_dd
= NULL
;
424 struct atmel_aes_dev
*tmp
;
426 spin_lock_bh(&atmel_aes
.lock
);
428 list_for_each_entry(tmp
, &atmel_aes
.dev_list
, list
) {
437 spin_unlock_bh(&atmel_aes
.lock
);
442 static int atmel_aes_hw_init(struct atmel_aes_dev
*dd
)
446 err
= clk_enable(dd
->iclk
);
450 atmel_aes_write(dd
, AES_CR
, AES_CR_SWRST
);
451 atmel_aes_write(dd
, AES_MR
, 0xE << AES_MR_CKEY_OFFSET
);
456 static inline unsigned int atmel_aes_get_version(struct atmel_aes_dev
*dd
)
458 return atmel_aes_read(dd
, AES_HW_VERSION
) & 0x00000fff;
461 static int atmel_aes_hw_version_init(struct atmel_aes_dev
*dd
)
465 err
= atmel_aes_hw_init(dd
);
469 dd
->hw_version
= atmel_aes_get_version(dd
);
471 dev_info(dd
->dev
, "version: 0x%x\n", dd
->hw_version
);
473 clk_disable(dd
->iclk
);
477 static inline void atmel_aes_set_mode(struct atmel_aes_dev
*dd
,
478 const struct atmel_aes_reqctx
*rctx
)
480 /* Clear all but persistent flags and set request flags. */
481 dd
->flags
= (dd
->flags
& AES_FLAGS_PERSISTENT
) | rctx
->mode
;
484 static inline bool atmel_aes_is_encrypt(const struct atmel_aes_dev
*dd
)
486 return (dd
->flags
& AES_FLAGS_ENCRYPT
);
489 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
490 static void atmel_aes_authenc_complete(struct atmel_aes_dev
*dd
, int err
);
493 static void atmel_aes_set_iv_as_last_ciphertext_block(struct atmel_aes_dev
*dd
)
495 struct skcipher_request
*req
= skcipher_request_cast(dd
->areq
);
496 struct atmel_aes_reqctx
*rctx
= skcipher_request_ctx(req
);
497 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(req
);
498 unsigned int ivsize
= crypto_skcipher_ivsize(skcipher
);
500 if (req
->cryptlen
< ivsize
)
503 if (rctx
->mode
& AES_FLAGS_ENCRYPT
) {
504 scatterwalk_map_and_copy(req
->iv
, req
->dst
,
505 req
->cryptlen
- ivsize
, ivsize
, 0);
507 if (req
->src
== req
->dst
)
508 memcpy(req
->iv
, rctx
->lastc
, ivsize
);
510 scatterwalk_map_and_copy(req
->iv
, req
->src
,
511 req
->cryptlen
- ivsize
,
516 static inline struct atmel_aes_ctr_ctx
*
517 atmel_aes_ctr_ctx_cast(struct atmel_aes_base_ctx
*ctx
)
519 return container_of(ctx
, struct atmel_aes_ctr_ctx
, base
);
522 static void atmel_aes_ctr_update_req_iv(struct atmel_aes_dev
*dd
)
524 struct atmel_aes_ctr_ctx
*ctx
= atmel_aes_ctr_ctx_cast(dd
->ctx
);
525 struct skcipher_request
*req
= skcipher_request_cast(dd
->areq
);
526 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(req
);
527 unsigned int ivsize
= crypto_skcipher_ivsize(skcipher
);
531 * The CTR transfer works in fragments of data of maximum 1 MByte
532 * because of the 16 bit CTR counter embedded in the IP. When reaching
533 * here, ctx->blocks contains the number of blocks of the last fragment
534 * processed, there is no need to explicit cast it to u16.
536 for (i
= 0; i
< ctx
->blocks
; i
++)
537 crypto_inc((u8
*)ctx
->iv
, AES_BLOCK_SIZE
);
539 memcpy(req
->iv
, ctx
->iv
, ivsize
);
542 static inline int atmel_aes_complete(struct atmel_aes_dev
*dd
, int err
)
544 struct skcipher_request
*req
= skcipher_request_cast(dd
->areq
);
545 struct atmel_aes_reqctx
*rctx
= skcipher_request_ctx(req
);
547 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
548 if (dd
->ctx
->is_aead
)
549 atmel_aes_authenc_complete(dd
, err
);
552 clk_disable(dd
->iclk
);
553 dd
->flags
&= ~AES_FLAGS_BUSY
;
555 if (!err
&& !dd
->ctx
->is_aead
&&
556 (rctx
->mode
& AES_FLAGS_OPMODE_MASK
) != AES_FLAGS_ECB
) {
557 if ((rctx
->mode
& AES_FLAGS_OPMODE_MASK
) != AES_FLAGS_CTR
)
558 atmel_aes_set_iv_as_last_ciphertext_block(dd
);
560 atmel_aes_ctr_update_req_iv(dd
);
564 dd
->areq
->complete(dd
->areq
, err
);
566 tasklet_schedule(&dd
->queue_task
);
571 static void atmel_aes_write_ctrl_key(struct atmel_aes_dev
*dd
, bool use_dma
,
572 const __be32
*iv
, const u32
*key
, int keylen
)
576 /* MR register must be set before IV registers */
577 if (keylen
== AES_KEYSIZE_128
)
578 valmr
|= AES_MR_KEYSIZE_128
;
579 else if (keylen
== AES_KEYSIZE_192
)
580 valmr
|= AES_MR_KEYSIZE_192
;
582 valmr
|= AES_MR_KEYSIZE_256
;
584 valmr
|= dd
->flags
& AES_FLAGS_MODE_MASK
;
587 valmr
|= AES_MR_SMOD_IDATAR0
;
588 if (dd
->caps
.has_dualbuff
)
589 valmr
|= AES_MR_DUALBUFF
;
591 valmr
|= AES_MR_SMOD_AUTO
;
594 atmel_aes_write(dd
, AES_MR
, valmr
);
596 atmel_aes_write_n(dd
, AES_KEYWR(0), key
, SIZE_IN_WORDS(keylen
));
598 if (iv
&& (valmr
& AES_MR_OPMOD_MASK
) != AES_MR_OPMOD_ECB
)
599 atmel_aes_write_block(dd
, AES_IVR(0), iv
);
602 static inline void atmel_aes_write_ctrl(struct atmel_aes_dev
*dd
, bool use_dma
,
606 atmel_aes_write_ctrl_key(dd
, use_dma
, iv
,
607 dd
->ctx
->key
, dd
->ctx
->keylen
);
612 static int atmel_aes_cpu_transfer(struct atmel_aes_dev
*dd
)
618 atmel_aes_read_block(dd
, AES_ODATAR(0), dd
->data
);
620 dd
->datalen
-= AES_BLOCK_SIZE
;
622 if (dd
->datalen
< AES_BLOCK_SIZE
)
625 atmel_aes_write_block(dd
, AES_IDATAR(0), dd
->data
);
627 isr
= atmel_aes_read(dd
, AES_ISR
);
628 if (!(isr
& AES_INT_DATARDY
)) {
629 dd
->resume
= atmel_aes_cpu_transfer
;
630 atmel_aes_write(dd
, AES_IER
, AES_INT_DATARDY
);
635 if (!sg_copy_from_buffer(dd
->real_dst
, sg_nents(dd
->real_dst
),
640 return atmel_aes_complete(dd
, err
);
642 return dd
->cpu_transfer_complete(dd
);
645 static int atmel_aes_cpu_start(struct atmel_aes_dev
*dd
,
646 struct scatterlist
*src
,
647 struct scatterlist
*dst
,
649 atmel_aes_fn_t resume
)
651 size_t padlen
= atmel_aes_padlen(len
, AES_BLOCK_SIZE
);
653 if (unlikely(len
== 0))
656 sg_copy_to_buffer(src
, sg_nents(src
), dd
->buf
, len
);
660 dd
->cpu_transfer_complete
= resume
;
661 dd
->datalen
= len
+ padlen
;
662 dd
->data
= (u32
*)dd
->buf
;
663 atmel_aes_write_block(dd
, AES_IDATAR(0), dd
->data
);
664 return atmel_aes_wait_for_data_ready(dd
, atmel_aes_cpu_transfer
);
670 static void atmel_aes_dma_callback(void *data
);
672 static bool atmel_aes_check_aligned(struct atmel_aes_dev
*dd
,
673 struct scatterlist
*sg
,
675 struct atmel_aes_dma
*dma
)
679 if (!IS_ALIGNED(len
, dd
->ctx
->block_size
))
682 for (nents
= 0; sg
; sg
= sg_next(sg
), ++nents
) {
683 if (!IS_ALIGNED(sg
->offset
, sizeof(u32
)))
686 if (len
<= sg
->length
) {
687 if (!IS_ALIGNED(len
, dd
->ctx
->block_size
))
690 dma
->nents
= nents
+1;
691 dma
->remainder
= sg
->length
- len
;
696 if (!IS_ALIGNED(sg
->length
, dd
->ctx
->block_size
))
705 static inline void atmel_aes_restore_sg(const struct atmel_aes_dma
*dma
)
707 struct scatterlist
*sg
= dma
->sg
;
708 int nents
= dma
->nents
;
713 while (--nents
> 0 && sg
)
719 sg
->length
+= dma
->remainder
;
722 static int atmel_aes_map(struct atmel_aes_dev
*dd
,
723 struct scatterlist
*src
,
724 struct scatterlist
*dst
,
727 bool src_aligned
, dst_aligned
;
735 src_aligned
= atmel_aes_check_aligned(dd
, src
, len
, &dd
->src
);
737 dst_aligned
= src_aligned
;
739 dst_aligned
= atmel_aes_check_aligned(dd
, dst
, len
, &dd
->dst
);
740 if (!src_aligned
|| !dst_aligned
) {
741 padlen
= atmel_aes_padlen(len
, dd
->ctx
->block_size
);
743 if (dd
->buflen
< len
+ padlen
)
747 sg_copy_to_buffer(src
, sg_nents(src
), dd
->buf
, len
);
748 dd
->src
.sg
= &dd
->aligned_sg
;
750 dd
->src
.remainder
= 0;
754 dd
->dst
.sg
= &dd
->aligned_sg
;
756 dd
->dst
.remainder
= 0;
759 sg_init_table(&dd
->aligned_sg
, 1);
760 sg_set_buf(&dd
->aligned_sg
, dd
->buf
, len
+ padlen
);
763 if (dd
->src
.sg
== dd
->dst
.sg
) {
764 dd
->src
.sg_len
= dma_map_sg(dd
->dev
, dd
->src
.sg
, dd
->src
.nents
,
766 dd
->dst
.sg_len
= dd
->src
.sg_len
;
770 dd
->src
.sg_len
= dma_map_sg(dd
->dev
, dd
->src
.sg
, dd
->src
.nents
,
775 dd
->dst
.sg_len
= dma_map_sg(dd
->dev
, dd
->dst
.sg
, dd
->dst
.nents
,
777 if (!dd
->dst
.sg_len
) {
778 dma_unmap_sg(dd
->dev
, dd
->src
.sg
, dd
->src
.nents
,
787 static void atmel_aes_unmap(struct atmel_aes_dev
*dd
)
789 if (dd
->src
.sg
== dd
->dst
.sg
) {
790 dma_unmap_sg(dd
->dev
, dd
->src
.sg
, dd
->src
.nents
,
793 if (dd
->src
.sg
!= &dd
->aligned_sg
)
794 atmel_aes_restore_sg(&dd
->src
);
796 dma_unmap_sg(dd
->dev
, dd
->dst
.sg
, dd
->dst
.nents
,
799 if (dd
->dst
.sg
!= &dd
->aligned_sg
)
800 atmel_aes_restore_sg(&dd
->dst
);
802 dma_unmap_sg(dd
->dev
, dd
->src
.sg
, dd
->src
.nents
,
805 if (dd
->src
.sg
!= &dd
->aligned_sg
)
806 atmel_aes_restore_sg(&dd
->src
);
809 if (dd
->dst
.sg
== &dd
->aligned_sg
)
810 sg_copy_from_buffer(dd
->real_dst
, sg_nents(dd
->real_dst
),
814 static int atmel_aes_dma_transfer_start(struct atmel_aes_dev
*dd
,
815 enum dma_slave_buswidth addr_width
,
816 enum dma_transfer_direction dir
,
819 struct dma_async_tx_descriptor
*desc
;
820 struct dma_slave_config config
;
821 dma_async_tx_callback callback
;
822 struct atmel_aes_dma
*dma
;
825 memset(&config
, 0, sizeof(config
));
826 config
.src_addr_width
= addr_width
;
827 config
.dst_addr_width
= addr_width
;
828 config
.src_maxburst
= maxburst
;
829 config
.dst_maxburst
= maxburst
;
835 config
.dst_addr
= dd
->phys_base
+ AES_IDATAR(0);
840 callback
= atmel_aes_dma_callback
;
841 config
.src_addr
= dd
->phys_base
+ AES_ODATAR(0);
848 err
= dmaengine_slave_config(dma
->chan
, &config
);
852 desc
= dmaengine_prep_slave_sg(dma
->chan
, dma
->sg
, dma
->sg_len
, dir
,
853 DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
);
857 desc
->callback
= callback
;
858 desc
->callback_param
= dd
;
859 dmaengine_submit(desc
);
860 dma_async_issue_pending(dma
->chan
);
865 static int atmel_aes_dma_start(struct atmel_aes_dev
*dd
,
866 struct scatterlist
*src
,
867 struct scatterlist
*dst
,
869 atmel_aes_fn_t resume
)
871 enum dma_slave_buswidth addr_width
;
875 switch (dd
->ctx
->block_size
) {
876 case CFB8_BLOCK_SIZE
:
877 addr_width
= DMA_SLAVE_BUSWIDTH_1_BYTE
;
881 case CFB16_BLOCK_SIZE
:
882 addr_width
= DMA_SLAVE_BUSWIDTH_2_BYTES
;
886 case CFB32_BLOCK_SIZE
:
887 case CFB64_BLOCK_SIZE
:
888 addr_width
= DMA_SLAVE_BUSWIDTH_4_BYTES
;
893 addr_width
= DMA_SLAVE_BUSWIDTH_4_BYTES
;
894 maxburst
= dd
->caps
.max_burst_size
;
902 err
= atmel_aes_map(dd
, src
, dst
, len
);
908 /* Set output DMA transfer first */
909 err
= atmel_aes_dma_transfer_start(dd
, addr_width
, DMA_DEV_TO_MEM
,
914 /* Then set input DMA transfer */
915 err
= atmel_aes_dma_transfer_start(dd
, addr_width
, DMA_MEM_TO_DEV
,
918 goto output_transfer_stop
;
922 output_transfer_stop
:
923 dmaengine_terminate_sync(dd
->dst
.chan
);
927 return atmel_aes_complete(dd
, err
);
930 static void atmel_aes_dma_callback(void *data
)
932 struct atmel_aes_dev
*dd
= data
;
936 (void)dd
->resume(dd
);
939 static int atmel_aes_handle_queue(struct atmel_aes_dev
*dd
,
940 struct crypto_async_request
*new_areq
)
942 struct crypto_async_request
*areq
, *backlog
;
943 struct atmel_aes_base_ctx
*ctx
;
948 spin_lock_irqsave(&dd
->lock
, flags
);
950 ret
= crypto_enqueue_request(&dd
->queue
, new_areq
);
951 if (dd
->flags
& AES_FLAGS_BUSY
) {
952 spin_unlock_irqrestore(&dd
->lock
, flags
);
955 backlog
= crypto_get_backlog(&dd
->queue
);
956 areq
= crypto_dequeue_request(&dd
->queue
);
958 dd
->flags
|= AES_FLAGS_BUSY
;
959 spin_unlock_irqrestore(&dd
->lock
, flags
);
965 backlog
->complete(backlog
, -EINPROGRESS
);
967 ctx
= crypto_tfm_ctx(areq
->tfm
);
971 start_async
= (areq
!= new_areq
);
972 dd
->is_async
= start_async
;
974 /* WARNING: ctx->start() MAY change dd->is_async. */
975 err
= ctx
->start(dd
);
976 return (start_async
) ? ret
: err
;
980 /* AES async block ciphers */
982 static int atmel_aes_transfer_complete(struct atmel_aes_dev
*dd
)
984 return atmel_aes_complete(dd
, 0);
987 static int atmel_aes_start(struct atmel_aes_dev
*dd
)
989 struct skcipher_request
*req
= skcipher_request_cast(dd
->areq
);
990 struct atmel_aes_reqctx
*rctx
= skcipher_request_ctx(req
);
991 bool use_dma
= (req
->cryptlen
>= ATMEL_AES_DMA_THRESHOLD
||
992 dd
->ctx
->block_size
!= AES_BLOCK_SIZE
);
995 atmel_aes_set_mode(dd
, rctx
);
997 err
= atmel_aes_hw_init(dd
);
999 return atmel_aes_complete(dd
, err
);
1001 atmel_aes_write_ctrl(dd
, use_dma
, (void *)req
->iv
);
1003 return atmel_aes_dma_start(dd
, req
->src
, req
->dst
,
1005 atmel_aes_transfer_complete
);
1007 return atmel_aes_cpu_start(dd
, req
->src
, req
->dst
, req
->cryptlen
,
1008 atmel_aes_transfer_complete
);
1011 static int atmel_aes_ctr_transfer(struct atmel_aes_dev
*dd
)
1013 struct atmel_aes_ctr_ctx
*ctx
= atmel_aes_ctr_ctx_cast(dd
->ctx
);
1014 struct skcipher_request
*req
= skcipher_request_cast(dd
->areq
);
1015 struct scatterlist
*src
, *dst
;
1019 bool use_dma
, fragmented
= false;
1021 /* Check for transfer completion. */
1022 ctx
->offset
+= dd
->total
;
1023 if (ctx
->offset
>= req
->cryptlen
)
1024 return atmel_aes_transfer_complete(dd
);
1026 /* Compute data length. */
1027 datalen
= req
->cryptlen
- ctx
->offset
;
1028 ctx
->blocks
= DIV_ROUND_UP(datalen
, AES_BLOCK_SIZE
);
1029 ctr
= be32_to_cpu(ctx
->iv
[3]);
1031 /* Check 16bit counter overflow. */
1032 start
= ctr
& 0xffff;
1033 end
= start
+ ctx
->blocks
- 1;
1035 if (ctx
->blocks
>> 16 || end
< start
) {
1037 datalen
= AES_BLOCK_SIZE
* (0x10000 - start
);
1041 use_dma
= (datalen
>= ATMEL_AES_DMA_THRESHOLD
);
1043 /* Jump to offset. */
1044 src
= scatterwalk_ffwd(ctx
->src
, req
->src
, ctx
->offset
);
1045 dst
= ((req
->src
== req
->dst
) ? src
:
1046 scatterwalk_ffwd(ctx
->dst
, req
->dst
, ctx
->offset
));
1048 /* Configure hardware. */
1049 atmel_aes_write_ctrl(dd
, use_dma
, ctx
->iv
);
1050 if (unlikely(fragmented
)) {
1052 * Increment the counter manually to cope with the hardware
1055 ctx
->iv
[3] = cpu_to_be32(ctr
);
1056 crypto_inc((u8
*)ctx
->iv
, AES_BLOCK_SIZE
);
1060 return atmel_aes_dma_start(dd
, src
, dst
, datalen
,
1061 atmel_aes_ctr_transfer
);
1063 return atmel_aes_cpu_start(dd
, src
, dst
, datalen
,
1064 atmel_aes_ctr_transfer
);
1067 static int atmel_aes_ctr_start(struct atmel_aes_dev
*dd
)
1069 struct atmel_aes_ctr_ctx
*ctx
= atmel_aes_ctr_ctx_cast(dd
->ctx
);
1070 struct skcipher_request
*req
= skcipher_request_cast(dd
->areq
);
1071 struct atmel_aes_reqctx
*rctx
= skcipher_request_ctx(req
);
1074 atmel_aes_set_mode(dd
, rctx
);
1076 err
= atmel_aes_hw_init(dd
);
1078 return atmel_aes_complete(dd
, err
);
1080 memcpy(ctx
->iv
, req
->iv
, AES_BLOCK_SIZE
);
1083 return atmel_aes_ctr_transfer(dd
);
1086 static int atmel_aes_crypt(struct skcipher_request
*req
, unsigned long mode
)
1088 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(req
);
1089 struct atmel_aes_base_ctx
*ctx
= crypto_skcipher_ctx(skcipher
);
1090 struct atmel_aes_reqctx
*rctx
;
1091 struct atmel_aes_dev
*dd
;
1093 switch (mode
& AES_FLAGS_OPMODE_MASK
) {
1094 case AES_FLAGS_CFB8
:
1095 ctx
->block_size
= CFB8_BLOCK_SIZE
;
1098 case AES_FLAGS_CFB16
:
1099 ctx
->block_size
= CFB16_BLOCK_SIZE
;
1102 case AES_FLAGS_CFB32
:
1103 ctx
->block_size
= CFB32_BLOCK_SIZE
;
1106 case AES_FLAGS_CFB64
:
1107 ctx
->block_size
= CFB64_BLOCK_SIZE
;
1111 ctx
->block_size
= AES_BLOCK_SIZE
;
1114 ctx
->is_aead
= false;
1116 dd
= atmel_aes_find_dev(ctx
);
1120 rctx
= skcipher_request_ctx(req
);
1123 if ((mode
& AES_FLAGS_OPMODE_MASK
) != AES_FLAGS_ECB
&&
1124 !(mode
& AES_FLAGS_ENCRYPT
) && req
->src
== req
->dst
) {
1125 unsigned int ivsize
= crypto_skcipher_ivsize(skcipher
);
1127 if (req
->cryptlen
>= ivsize
)
1128 scatterwalk_map_and_copy(rctx
->lastc
, req
->src
,
1129 req
->cryptlen
- ivsize
,
1133 return atmel_aes_handle_queue(dd
, &req
->base
);
1136 static int atmel_aes_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
1137 unsigned int keylen
)
1139 struct atmel_aes_base_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
1141 if (keylen
!= AES_KEYSIZE_128
&&
1142 keylen
!= AES_KEYSIZE_192
&&
1143 keylen
!= AES_KEYSIZE_256
)
1146 memcpy(ctx
->key
, key
, keylen
);
1147 ctx
->keylen
= keylen
;
1152 static int atmel_aes_ecb_encrypt(struct skcipher_request
*req
)
1154 return atmel_aes_crypt(req
, AES_FLAGS_ECB
| AES_FLAGS_ENCRYPT
);
1157 static int atmel_aes_ecb_decrypt(struct skcipher_request
*req
)
1159 return atmel_aes_crypt(req
, AES_FLAGS_ECB
);
1162 static int atmel_aes_cbc_encrypt(struct skcipher_request
*req
)
1164 return atmel_aes_crypt(req
, AES_FLAGS_CBC
| AES_FLAGS_ENCRYPT
);
1167 static int atmel_aes_cbc_decrypt(struct skcipher_request
*req
)
1169 return atmel_aes_crypt(req
, AES_FLAGS_CBC
);
1172 static int atmel_aes_ofb_encrypt(struct skcipher_request
*req
)
1174 return atmel_aes_crypt(req
, AES_FLAGS_OFB
| AES_FLAGS_ENCRYPT
);
1177 static int atmel_aes_ofb_decrypt(struct skcipher_request
*req
)
1179 return atmel_aes_crypt(req
, AES_FLAGS_OFB
);
1182 static int atmel_aes_cfb_encrypt(struct skcipher_request
*req
)
1184 return atmel_aes_crypt(req
, AES_FLAGS_CFB128
| AES_FLAGS_ENCRYPT
);
1187 static int atmel_aes_cfb_decrypt(struct skcipher_request
*req
)
1189 return atmel_aes_crypt(req
, AES_FLAGS_CFB128
);
1192 static int atmel_aes_cfb64_encrypt(struct skcipher_request
*req
)
1194 return atmel_aes_crypt(req
, AES_FLAGS_CFB64
| AES_FLAGS_ENCRYPT
);
1197 static int atmel_aes_cfb64_decrypt(struct skcipher_request
*req
)
1199 return atmel_aes_crypt(req
, AES_FLAGS_CFB64
);
1202 static int atmel_aes_cfb32_encrypt(struct skcipher_request
*req
)
1204 return atmel_aes_crypt(req
, AES_FLAGS_CFB32
| AES_FLAGS_ENCRYPT
);
1207 static int atmel_aes_cfb32_decrypt(struct skcipher_request
*req
)
1209 return atmel_aes_crypt(req
, AES_FLAGS_CFB32
);
1212 static int atmel_aes_cfb16_encrypt(struct skcipher_request
*req
)
1214 return atmel_aes_crypt(req
, AES_FLAGS_CFB16
| AES_FLAGS_ENCRYPT
);
1217 static int atmel_aes_cfb16_decrypt(struct skcipher_request
*req
)
1219 return atmel_aes_crypt(req
, AES_FLAGS_CFB16
);
1222 static int atmel_aes_cfb8_encrypt(struct skcipher_request
*req
)
1224 return atmel_aes_crypt(req
, AES_FLAGS_CFB8
| AES_FLAGS_ENCRYPT
);
1227 static int atmel_aes_cfb8_decrypt(struct skcipher_request
*req
)
1229 return atmel_aes_crypt(req
, AES_FLAGS_CFB8
);
1232 static int atmel_aes_ctr_encrypt(struct skcipher_request
*req
)
1234 return atmel_aes_crypt(req
, AES_FLAGS_CTR
| AES_FLAGS_ENCRYPT
);
1237 static int atmel_aes_ctr_decrypt(struct skcipher_request
*req
)
1239 return atmel_aes_crypt(req
, AES_FLAGS_CTR
);
1242 static int atmel_aes_init_tfm(struct crypto_skcipher
*tfm
)
1244 struct atmel_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
1246 crypto_skcipher_set_reqsize(tfm
, sizeof(struct atmel_aes_reqctx
));
1247 ctx
->base
.start
= atmel_aes_start
;
1252 static int atmel_aes_ctr_init_tfm(struct crypto_skcipher
*tfm
)
1254 struct atmel_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
1256 crypto_skcipher_set_reqsize(tfm
, sizeof(struct atmel_aes_reqctx
));
1257 ctx
->base
.start
= atmel_aes_ctr_start
;
1262 static struct skcipher_alg aes_algs
[] = {
1264 .base
.cra_name
= "ecb(aes)",
1265 .base
.cra_driver_name
= "atmel-ecb-aes",
1266 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
1267 .base
.cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1269 .init
= atmel_aes_init_tfm
,
1270 .min_keysize
= AES_MIN_KEY_SIZE
,
1271 .max_keysize
= AES_MAX_KEY_SIZE
,
1272 .setkey
= atmel_aes_setkey
,
1273 .encrypt
= atmel_aes_ecb_encrypt
,
1274 .decrypt
= atmel_aes_ecb_decrypt
,
1277 .base
.cra_name
= "cbc(aes)",
1278 .base
.cra_driver_name
= "atmel-cbc-aes",
1279 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
1280 .base
.cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1282 .init
= atmel_aes_init_tfm
,
1283 .min_keysize
= AES_MIN_KEY_SIZE
,
1284 .max_keysize
= AES_MAX_KEY_SIZE
,
1285 .setkey
= atmel_aes_setkey
,
1286 .encrypt
= atmel_aes_cbc_encrypt
,
1287 .decrypt
= atmel_aes_cbc_decrypt
,
1288 .ivsize
= AES_BLOCK_SIZE
,
1291 .base
.cra_name
= "ofb(aes)",
1292 .base
.cra_driver_name
= "atmel-ofb-aes",
1293 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
1294 .base
.cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1296 .init
= atmel_aes_init_tfm
,
1297 .min_keysize
= AES_MIN_KEY_SIZE
,
1298 .max_keysize
= AES_MAX_KEY_SIZE
,
1299 .setkey
= atmel_aes_setkey
,
1300 .encrypt
= atmel_aes_ofb_encrypt
,
1301 .decrypt
= atmel_aes_ofb_decrypt
,
1302 .ivsize
= AES_BLOCK_SIZE
,
1305 .base
.cra_name
= "cfb(aes)",
1306 .base
.cra_driver_name
= "atmel-cfb-aes",
1307 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
1308 .base
.cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1310 .init
= atmel_aes_init_tfm
,
1311 .min_keysize
= AES_MIN_KEY_SIZE
,
1312 .max_keysize
= AES_MAX_KEY_SIZE
,
1313 .setkey
= atmel_aes_setkey
,
1314 .encrypt
= atmel_aes_cfb_encrypt
,
1315 .decrypt
= atmel_aes_cfb_decrypt
,
1316 .ivsize
= AES_BLOCK_SIZE
,
1319 .base
.cra_name
= "cfb32(aes)",
1320 .base
.cra_driver_name
= "atmel-cfb32-aes",
1321 .base
.cra_blocksize
= CFB32_BLOCK_SIZE
,
1322 .base
.cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1324 .init
= atmel_aes_init_tfm
,
1325 .min_keysize
= AES_MIN_KEY_SIZE
,
1326 .max_keysize
= AES_MAX_KEY_SIZE
,
1327 .setkey
= atmel_aes_setkey
,
1328 .encrypt
= atmel_aes_cfb32_encrypt
,
1329 .decrypt
= atmel_aes_cfb32_decrypt
,
1330 .ivsize
= AES_BLOCK_SIZE
,
1333 .base
.cra_name
= "cfb16(aes)",
1334 .base
.cra_driver_name
= "atmel-cfb16-aes",
1335 .base
.cra_blocksize
= CFB16_BLOCK_SIZE
,
1336 .base
.cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1338 .init
= atmel_aes_init_tfm
,
1339 .min_keysize
= AES_MIN_KEY_SIZE
,
1340 .max_keysize
= AES_MAX_KEY_SIZE
,
1341 .setkey
= atmel_aes_setkey
,
1342 .encrypt
= atmel_aes_cfb16_encrypt
,
1343 .decrypt
= atmel_aes_cfb16_decrypt
,
1344 .ivsize
= AES_BLOCK_SIZE
,
1347 .base
.cra_name
= "cfb8(aes)",
1348 .base
.cra_driver_name
= "atmel-cfb8-aes",
1349 .base
.cra_blocksize
= CFB8_BLOCK_SIZE
,
1350 .base
.cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1352 .init
= atmel_aes_init_tfm
,
1353 .min_keysize
= AES_MIN_KEY_SIZE
,
1354 .max_keysize
= AES_MAX_KEY_SIZE
,
1355 .setkey
= atmel_aes_setkey
,
1356 .encrypt
= atmel_aes_cfb8_encrypt
,
1357 .decrypt
= atmel_aes_cfb8_decrypt
,
1358 .ivsize
= AES_BLOCK_SIZE
,
1361 .base
.cra_name
= "ctr(aes)",
1362 .base
.cra_driver_name
= "atmel-ctr-aes",
1363 .base
.cra_blocksize
= 1,
1364 .base
.cra_ctxsize
= sizeof(struct atmel_aes_ctr_ctx
),
1366 .init
= atmel_aes_ctr_init_tfm
,
1367 .min_keysize
= AES_MIN_KEY_SIZE
,
1368 .max_keysize
= AES_MAX_KEY_SIZE
,
1369 .setkey
= atmel_aes_setkey
,
1370 .encrypt
= atmel_aes_ctr_encrypt
,
1371 .decrypt
= atmel_aes_ctr_decrypt
,
1372 .ivsize
= AES_BLOCK_SIZE
,
1376 static struct skcipher_alg aes_cfb64_alg
= {
1377 .base
.cra_name
= "cfb64(aes)",
1378 .base
.cra_driver_name
= "atmel-cfb64-aes",
1379 .base
.cra_blocksize
= CFB64_BLOCK_SIZE
,
1380 .base
.cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1382 .init
= atmel_aes_init_tfm
,
1383 .min_keysize
= AES_MIN_KEY_SIZE
,
1384 .max_keysize
= AES_MAX_KEY_SIZE
,
1385 .setkey
= atmel_aes_setkey
,
1386 .encrypt
= atmel_aes_cfb64_encrypt
,
1387 .decrypt
= atmel_aes_cfb64_decrypt
,
1388 .ivsize
= AES_BLOCK_SIZE
,
1392 /* gcm aead functions */
1394 static int atmel_aes_gcm_ghash(struct atmel_aes_dev
*dd
,
1395 const u32
*data
, size_t datalen
,
1396 const __be32
*ghash_in
, __be32
*ghash_out
,
1397 atmel_aes_fn_t resume
);
1398 static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev
*dd
);
1399 static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev
*dd
);
1401 static int atmel_aes_gcm_start(struct atmel_aes_dev
*dd
);
1402 static int atmel_aes_gcm_process(struct atmel_aes_dev
*dd
);
1403 static int atmel_aes_gcm_length(struct atmel_aes_dev
*dd
);
1404 static int atmel_aes_gcm_data(struct atmel_aes_dev
*dd
);
1405 static int atmel_aes_gcm_tag_init(struct atmel_aes_dev
*dd
);
1406 static int atmel_aes_gcm_tag(struct atmel_aes_dev
*dd
);
1407 static int atmel_aes_gcm_finalize(struct atmel_aes_dev
*dd
);
1409 static inline struct atmel_aes_gcm_ctx
*
1410 atmel_aes_gcm_ctx_cast(struct atmel_aes_base_ctx
*ctx
)
1412 return container_of(ctx
, struct atmel_aes_gcm_ctx
, base
);
1415 static int atmel_aes_gcm_ghash(struct atmel_aes_dev
*dd
,
1416 const u32
*data
, size_t datalen
,
1417 const __be32
*ghash_in
, __be32
*ghash_out
,
1418 atmel_aes_fn_t resume
)
1420 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1422 dd
->data
= (u32
*)data
;
1423 dd
->datalen
= datalen
;
1424 ctx
->ghash_in
= ghash_in
;
1425 ctx
->ghash_out
= ghash_out
;
1426 ctx
->ghash_resume
= resume
;
1428 atmel_aes_write_ctrl(dd
, false, NULL
);
1429 return atmel_aes_wait_for_data_ready(dd
, atmel_aes_gcm_ghash_init
);
1432 static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev
*dd
)
1434 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1436 /* Set the data length. */
1437 atmel_aes_write(dd
, AES_AADLENR
, dd
->total
);
1438 atmel_aes_write(dd
, AES_CLENR
, 0);
1440 /* If needed, overwrite the GCM Intermediate Hash Word Registers */
1442 atmel_aes_write_block(dd
, AES_GHASHR(0), ctx
->ghash_in
);
1444 return atmel_aes_gcm_ghash_finalize(dd
);
1447 static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev
*dd
)
1449 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1452 /* Write data into the Input Data Registers. */
1453 while (dd
->datalen
> 0) {
1454 atmel_aes_write_block(dd
, AES_IDATAR(0), dd
->data
);
1456 dd
->datalen
-= AES_BLOCK_SIZE
;
1458 isr
= atmel_aes_read(dd
, AES_ISR
);
1459 if (!(isr
& AES_INT_DATARDY
)) {
1460 dd
->resume
= atmel_aes_gcm_ghash_finalize
;
1461 atmel_aes_write(dd
, AES_IER
, AES_INT_DATARDY
);
1462 return -EINPROGRESS
;
1466 /* Read the computed hash from GHASHRx. */
1467 atmel_aes_read_block(dd
, AES_GHASHR(0), ctx
->ghash_out
);
1469 return ctx
->ghash_resume(dd
);
1473 static int atmel_aes_gcm_start(struct atmel_aes_dev
*dd
)
1475 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1476 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1477 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1478 struct atmel_aes_reqctx
*rctx
= aead_request_ctx(req
);
1479 size_t ivsize
= crypto_aead_ivsize(tfm
);
1480 size_t datalen
, padlen
;
1481 const void *iv
= req
->iv
;
1485 atmel_aes_set_mode(dd
, rctx
);
1487 err
= atmel_aes_hw_init(dd
);
1489 return atmel_aes_complete(dd
, err
);
1491 if (likely(ivsize
== GCM_AES_IV_SIZE
)) {
1492 memcpy(ctx
->j0
, iv
, ivsize
);
1493 ctx
->j0
[3] = cpu_to_be32(1);
1494 return atmel_aes_gcm_process(dd
);
1497 padlen
= atmel_aes_padlen(ivsize
, AES_BLOCK_SIZE
);
1498 datalen
= ivsize
+ padlen
+ AES_BLOCK_SIZE
;
1499 if (datalen
> dd
->buflen
)
1500 return atmel_aes_complete(dd
, -EINVAL
);
1502 memcpy(data
, iv
, ivsize
);
1503 memset(data
+ ivsize
, 0, padlen
+ sizeof(u64
));
1504 ((__be64
*)(data
+ datalen
))[-1] = cpu_to_be64(ivsize
* 8);
1506 return atmel_aes_gcm_ghash(dd
, (const u32
*)data
, datalen
,
1507 NULL
, ctx
->j0
, atmel_aes_gcm_process
);
1510 static int atmel_aes_gcm_process(struct atmel_aes_dev
*dd
)
1512 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1513 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1514 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1515 bool enc
= atmel_aes_is_encrypt(dd
);
1518 /* Compute text length. */
1519 authsize
= crypto_aead_authsize(tfm
);
1520 ctx
->textlen
= req
->cryptlen
- (enc
? 0 : authsize
);
1523 * According to tcrypt test suite, the GCM Automatic Tag Generation
1524 * fails when both the message and its associated data are empty.
1526 if (likely(req
->assoclen
!= 0 || ctx
->textlen
!= 0))
1527 dd
->flags
|= AES_FLAGS_GTAGEN
;
1529 atmel_aes_write_ctrl(dd
, false, NULL
);
1530 return atmel_aes_wait_for_data_ready(dd
, atmel_aes_gcm_length
);
1533 static int atmel_aes_gcm_length(struct atmel_aes_dev
*dd
)
1535 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1536 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1537 __be32 j0_lsw
, *j0
= ctx
->j0
;
1540 /* Write incr32(J0) into IV. */
1542 be32_add_cpu(&j0
[3], 1);
1543 atmel_aes_write_block(dd
, AES_IVR(0), j0
);
1546 /* Set aad and text lengths. */
1547 atmel_aes_write(dd
, AES_AADLENR
, req
->assoclen
);
1548 atmel_aes_write(dd
, AES_CLENR
, ctx
->textlen
);
1550 /* Check whether AAD are present. */
1551 if (unlikely(req
->assoclen
== 0)) {
1553 return atmel_aes_gcm_data(dd
);
1556 /* Copy assoc data and add padding. */
1557 padlen
= atmel_aes_padlen(req
->assoclen
, AES_BLOCK_SIZE
);
1558 if (unlikely(req
->assoclen
+ padlen
> dd
->buflen
))
1559 return atmel_aes_complete(dd
, -EINVAL
);
1560 sg_copy_to_buffer(req
->src
, sg_nents(req
->src
), dd
->buf
, req
->assoclen
);
1562 /* Write assoc data into the Input Data register. */
1563 dd
->data
= (u32
*)dd
->buf
;
1564 dd
->datalen
= req
->assoclen
+ padlen
;
1565 return atmel_aes_gcm_data(dd
);
1568 static int atmel_aes_gcm_data(struct atmel_aes_dev
*dd
)
1570 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1571 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1572 bool use_dma
= (ctx
->textlen
>= ATMEL_AES_DMA_THRESHOLD
);
1573 struct scatterlist
*src
, *dst
;
1576 /* Write AAD first. */
1577 while (dd
->datalen
> 0) {
1578 atmel_aes_write_block(dd
, AES_IDATAR(0), dd
->data
);
1580 dd
->datalen
-= AES_BLOCK_SIZE
;
1582 isr
= atmel_aes_read(dd
, AES_ISR
);
1583 if (!(isr
& AES_INT_DATARDY
)) {
1584 dd
->resume
= atmel_aes_gcm_data
;
1585 atmel_aes_write(dd
, AES_IER
, AES_INT_DATARDY
);
1586 return -EINPROGRESS
;
1591 if (unlikely(ctx
->textlen
== 0))
1592 return atmel_aes_gcm_tag_init(dd
);
1594 /* Prepare src and dst scatter lists to transfer cipher/plain texts */
1595 src
= scatterwalk_ffwd(ctx
->src
, req
->src
, req
->assoclen
);
1596 dst
= ((req
->src
== req
->dst
) ? src
:
1597 scatterwalk_ffwd(ctx
->dst
, req
->dst
, req
->assoclen
));
1600 /* Update the Mode Register for DMA transfers. */
1601 mr
= atmel_aes_read(dd
, AES_MR
);
1602 mr
&= ~(AES_MR_SMOD_MASK
| AES_MR_DUALBUFF
);
1603 mr
|= AES_MR_SMOD_IDATAR0
;
1604 if (dd
->caps
.has_dualbuff
)
1605 mr
|= AES_MR_DUALBUFF
;
1606 atmel_aes_write(dd
, AES_MR
, mr
);
1608 return atmel_aes_dma_start(dd
, src
, dst
, ctx
->textlen
,
1609 atmel_aes_gcm_tag_init
);
1612 return atmel_aes_cpu_start(dd
, src
, dst
, ctx
->textlen
,
1613 atmel_aes_gcm_tag_init
);
1616 static int atmel_aes_gcm_tag_init(struct atmel_aes_dev
*dd
)
1618 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1619 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1620 __be64
*data
= dd
->buf
;
1622 if (likely(dd
->flags
& AES_FLAGS_GTAGEN
)) {
1623 if (!(atmel_aes_read(dd
, AES_ISR
) & AES_INT_TAGRDY
)) {
1624 dd
->resume
= atmel_aes_gcm_tag_init
;
1625 atmel_aes_write(dd
, AES_IER
, AES_INT_TAGRDY
);
1626 return -EINPROGRESS
;
1629 return atmel_aes_gcm_finalize(dd
);
1632 /* Read the GCM Intermediate Hash Word Registers. */
1633 atmel_aes_read_block(dd
, AES_GHASHR(0), ctx
->ghash
);
1635 data
[0] = cpu_to_be64(req
->assoclen
* 8);
1636 data
[1] = cpu_to_be64(ctx
->textlen
* 8);
1638 return atmel_aes_gcm_ghash(dd
, (const u32
*)data
, AES_BLOCK_SIZE
,
1639 ctx
->ghash
, ctx
->ghash
, atmel_aes_gcm_tag
);
1642 static int atmel_aes_gcm_tag(struct atmel_aes_dev
*dd
)
1644 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1645 unsigned long flags
;
1648 * Change mode to CTR to complete the tag generation.
1649 * Use J0 as Initialization Vector.
1652 dd
->flags
&= ~(AES_FLAGS_OPMODE_MASK
| AES_FLAGS_GTAGEN
);
1653 dd
->flags
|= AES_FLAGS_CTR
;
1654 atmel_aes_write_ctrl(dd
, false, ctx
->j0
);
1657 atmel_aes_write_block(dd
, AES_IDATAR(0), ctx
->ghash
);
1658 return atmel_aes_wait_for_data_ready(dd
, atmel_aes_gcm_finalize
);
1661 static int atmel_aes_gcm_finalize(struct atmel_aes_dev
*dd
)
1663 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1664 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1665 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1666 bool enc
= atmel_aes_is_encrypt(dd
);
1667 u32 offset
, authsize
, itag
[4], *otag
= ctx
->tag
;
1670 /* Read the computed tag. */
1671 if (likely(dd
->flags
& AES_FLAGS_GTAGEN
))
1672 atmel_aes_read_block(dd
, AES_TAGR(0), ctx
->tag
);
1674 atmel_aes_read_block(dd
, AES_ODATAR(0), ctx
->tag
);
1676 offset
= req
->assoclen
+ ctx
->textlen
;
1677 authsize
= crypto_aead_authsize(tfm
);
1679 scatterwalk_map_and_copy(otag
, req
->dst
, offset
, authsize
, 1);
1682 scatterwalk_map_and_copy(itag
, req
->src
, offset
, authsize
, 0);
1683 err
= crypto_memneq(itag
, otag
, authsize
) ? -EBADMSG
: 0;
1686 return atmel_aes_complete(dd
, err
);
1689 static int atmel_aes_gcm_crypt(struct aead_request
*req
,
1692 struct atmel_aes_base_ctx
*ctx
;
1693 struct atmel_aes_reqctx
*rctx
;
1694 struct atmel_aes_dev
*dd
;
1696 ctx
= crypto_aead_ctx(crypto_aead_reqtfm(req
));
1697 ctx
->block_size
= AES_BLOCK_SIZE
;
1698 ctx
->is_aead
= true;
1700 dd
= atmel_aes_find_dev(ctx
);
1704 rctx
= aead_request_ctx(req
);
1705 rctx
->mode
= AES_FLAGS_GCM
| mode
;
1707 return atmel_aes_handle_queue(dd
, &req
->base
);
1710 static int atmel_aes_gcm_setkey(struct crypto_aead
*tfm
, const u8
*key
,
1711 unsigned int keylen
)
1713 struct atmel_aes_base_ctx
*ctx
= crypto_aead_ctx(tfm
);
1715 if (keylen
!= AES_KEYSIZE_256
&&
1716 keylen
!= AES_KEYSIZE_192
&&
1717 keylen
!= AES_KEYSIZE_128
)
1720 memcpy(ctx
->key
, key
, keylen
);
1721 ctx
->keylen
= keylen
;
1726 static int atmel_aes_gcm_setauthsize(struct crypto_aead
*tfm
,
1727 unsigned int authsize
)
1729 return crypto_gcm_check_authsize(authsize
);
1732 static int atmel_aes_gcm_encrypt(struct aead_request
*req
)
1734 return atmel_aes_gcm_crypt(req
, AES_FLAGS_ENCRYPT
);
1737 static int atmel_aes_gcm_decrypt(struct aead_request
*req
)
1739 return atmel_aes_gcm_crypt(req
, 0);
1742 static int atmel_aes_gcm_init(struct crypto_aead
*tfm
)
1744 struct atmel_aes_gcm_ctx
*ctx
= crypto_aead_ctx(tfm
);
1746 crypto_aead_set_reqsize(tfm
, sizeof(struct atmel_aes_reqctx
));
1747 ctx
->base
.start
= atmel_aes_gcm_start
;
1752 static struct aead_alg aes_gcm_alg
= {
1753 .setkey
= atmel_aes_gcm_setkey
,
1754 .setauthsize
= atmel_aes_gcm_setauthsize
,
1755 .encrypt
= atmel_aes_gcm_encrypt
,
1756 .decrypt
= atmel_aes_gcm_decrypt
,
1757 .init
= atmel_aes_gcm_init
,
1758 .ivsize
= GCM_AES_IV_SIZE
,
1759 .maxauthsize
= AES_BLOCK_SIZE
,
1762 .cra_name
= "gcm(aes)",
1763 .cra_driver_name
= "atmel-gcm-aes",
1765 .cra_ctxsize
= sizeof(struct atmel_aes_gcm_ctx
),
1772 static inline struct atmel_aes_xts_ctx
*
1773 atmel_aes_xts_ctx_cast(struct atmel_aes_base_ctx
*ctx
)
1775 return container_of(ctx
, struct atmel_aes_xts_ctx
, base
);
1778 static int atmel_aes_xts_process_data(struct atmel_aes_dev
*dd
);
1780 static int atmel_aes_xts_start(struct atmel_aes_dev
*dd
)
1782 struct atmel_aes_xts_ctx
*ctx
= atmel_aes_xts_ctx_cast(dd
->ctx
);
1783 struct skcipher_request
*req
= skcipher_request_cast(dd
->areq
);
1784 struct atmel_aes_reqctx
*rctx
= skcipher_request_ctx(req
);
1785 unsigned long flags
;
1788 atmel_aes_set_mode(dd
, rctx
);
1790 err
= atmel_aes_hw_init(dd
);
1792 return atmel_aes_complete(dd
, err
);
1794 /* Compute the tweak value from req->iv with ecb(aes). */
1796 dd
->flags
&= ~AES_FLAGS_MODE_MASK
;
1797 dd
->flags
|= (AES_FLAGS_ECB
| AES_FLAGS_ENCRYPT
);
1798 atmel_aes_write_ctrl_key(dd
, false, NULL
,
1799 ctx
->key2
, ctx
->base
.keylen
);
1802 atmel_aes_write_block(dd
, AES_IDATAR(0), req
->iv
);
1803 return atmel_aes_wait_for_data_ready(dd
, atmel_aes_xts_process_data
);
1806 static int atmel_aes_xts_process_data(struct atmel_aes_dev
*dd
)
1808 struct skcipher_request
*req
= skcipher_request_cast(dd
->areq
);
1809 bool use_dma
= (req
->cryptlen
>= ATMEL_AES_DMA_THRESHOLD
);
1810 u32 tweak
[AES_BLOCK_SIZE
/ sizeof(u32
)];
1811 static const __le32 one
[AES_BLOCK_SIZE
/ sizeof(u32
)] = {cpu_to_le32(1), };
1812 u8
*tweak_bytes
= (u8
*)tweak
;
1815 /* Read the computed ciphered tweak value. */
1816 atmel_aes_read_block(dd
, AES_ODATAR(0), tweak
);
1819 * the order of the ciphered tweak bytes need to be reversed before
1820 * writing them into the ODATARx registers.
1822 for (i
= 0; i
< AES_BLOCK_SIZE
/2; ++i
) {
1823 u8 tmp
= tweak_bytes
[AES_BLOCK_SIZE
- 1 - i
];
1825 tweak_bytes
[AES_BLOCK_SIZE
- 1 - i
] = tweak_bytes
[i
];
1826 tweak_bytes
[i
] = tmp
;
1829 /* Process the data. */
1830 atmel_aes_write_ctrl(dd
, use_dma
, NULL
);
1831 atmel_aes_write_block(dd
, AES_TWR(0), tweak
);
1832 atmel_aes_write_block(dd
, AES_ALPHAR(0), one
);
1834 return atmel_aes_dma_start(dd
, req
->src
, req
->dst
,
1836 atmel_aes_transfer_complete
);
1838 return atmel_aes_cpu_start(dd
, req
->src
, req
->dst
, req
->cryptlen
,
1839 atmel_aes_transfer_complete
);
1842 static int atmel_aes_xts_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
1843 unsigned int keylen
)
1845 struct atmel_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
1848 err
= xts_check_key(crypto_skcipher_tfm(tfm
), key
, keylen
);
1852 memcpy(ctx
->base
.key
, key
, keylen
/2);
1853 memcpy(ctx
->key2
, key
+ keylen
/2, keylen
/2);
1854 ctx
->base
.keylen
= keylen
/2;
1859 static int atmel_aes_xts_encrypt(struct skcipher_request
*req
)
1861 return atmel_aes_crypt(req
, AES_FLAGS_XTS
| AES_FLAGS_ENCRYPT
);
1864 static int atmel_aes_xts_decrypt(struct skcipher_request
*req
)
1866 return atmel_aes_crypt(req
, AES_FLAGS_XTS
);
1869 static int atmel_aes_xts_init_tfm(struct crypto_skcipher
*tfm
)
1871 struct atmel_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
1873 crypto_skcipher_set_reqsize(tfm
, sizeof(struct atmel_aes_reqctx
));
1874 ctx
->base
.start
= atmel_aes_xts_start
;
1879 static struct skcipher_alg aes_xts_alg
= {
1880 .base
.cra_name
= "xts(aes)",
1881 .base
.cra_driver_name
= "atmel-xts-aes",
1882 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
1883 .base
.cra_ctxsize
= sizeof(struct atmel_aes_xts_ctx
),
1885 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
1886 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
1887 .ivsize
= AES_BLOCK_SIZE
,
1888 .setkey
= atmel_aes_xts_setkey
,
1889 .encrypt
= atmel_aes_xts_encrypt
,
1890 .decrypt
= atmel_aes_xts_decrypt
,
1891 .init
= atmel_aes_xts_init_tfm
,
1894 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
1895 /* authenc aead functions */
1897 static int atmel_aes_authenc_start(struct atmel_aes_dev
*dd
);
1898 static int atmel_aes_authenc_init(struct atmel_aes_dev
*dd
, int err
,
1900 static int atmel_aes_authenc_transfer(struct atmel_aes_dev
*dd
, int err
,
1902 static int atmel_aes_authenc_digest(struct atmel_aes_dev
*dd
);
1903 static int atmel_aes_authenc_final(struct atmel_aes_dev
*dd
, int err
,
1906 static void atmel_aes_authenc_complete(struct atmel_aes_dev
*dd
, int err
)
1908 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1909 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
1911 if (err
&& (dd
->flags
& AES_FLAGS_OWN_SHA
))
1912 atmel_sha_authenc_abort(&rctx
->auth_req
);
1913 dd
->flags
&= ~AES_FLAGS_OWN_SHA
;
1916 static int atmel_aes_authenc_start(struct atmel_aes_dev
*dd
)
1918 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1919 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
1920 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1921 struct atmel_aes_authenc_ctx
*ctx
= crypto_aead_ctx(tfm
);
1924 atmel_aes_set_mode(dd
, &rctx
->base
);
1926 err
= atmel_aes_hw_init(dd
);
1928 return atmel_aes_complete(dd
, err
);
1930 return atmel_sha_authenc_schedule(&rctx
->auth_req
, ctx
->auth
,
1931 atmel_aes_authenc_init
, dd
);
1934 static int atmel_aes_authenc_init(struct atmel_aes_dev
*dd
, int err
,
1937 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1938 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
1941 dd
->is_async
= true;
1943 return atmel_aes_complete(dd
, err
);
1945 /* If here, we've got the ownership of the SHA device. */
1946 dd
->flags
|= AES_FLAGS_OWN_SHA
;
1948 /* Configure the SHA device. */
1949 return atmel_sha_authenc_init(&rctx
->auth_req
,
1950 req
->src
, req
->assoclen
,
1952 atmel_aes_authenc_transfer
, dd
);
1955 static int atmel_aes_authenc_transfer(struct atmel_aes_dev
*dd
, int err
,
1958 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1959 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
1960 bool enc
= atmel_aes_is_encrypt(dd
);
1961 struct scatterlist
*src
, *dst
;
1962 __be32 iv
[AES_BLOCK_SIZE
/ sizeof(u32
)];
1966 dd
->is_async
= true;
1968 return atmel_aes_complete(dd
, err
);
1970 /* Prepare src and dst scatter-lists to transfer cipher/plain texts. */
1971 src
= scatterwalk_ffwd(rctx
->src
, req
->src
, req
->assoclen
);
1974 if (req
->src
!= req
->dst
)
1975 dst
= scatterwalk_ffwd(rctx
->dst
, req
->dst
, req
->assoclen
);
1977 /* Configure the AES device. */
1978 memcpy(iv
, req
->iv
, sizeof(iv
));
1981 * Here we always set the 2nd parameter of atmel_aes_write_ctrl() to
1982 * 'true' even if the data transfer is actually performed by the CPU (so
1983 * not by the DMA) because we must force the AES_MR_SMOD bitfield to the
1984 * value AES_MR_SMOD_IDATAR0. Indeed, both AES_MR_SMOD and SHA_MR_SMOD
1985 * must be set to *_MR_SMOD_IDATAR0.
1987 atmel_aes_write_ctrl(dd
, true, iv
);
1988 emr
= AES_EMR_PLIPEN
;
1990 emr
|= AES_EMR_PLIPD
;
1991 atmel_aes_write(dd
, AES_EMR
, emr
);
1993 /* Transfer data. */
1994 return atmel_aes_dma_start(dd
, src
, dst
, rctx
->textlen
,
1995 atmel_aes_authenc_digest
);
1998 static int atmel_aes_authenc_digest(struct atmel_aes_dev
*dd
)
2000 struct aead_request
*req
= aead_request_cast(dd
->areq
);
2001 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
2003 /* atmel_sha_authenc_final() releases the SHA device. */
2004 dd
->flags
&= ~AES_FLAGS_OWN_SHA
;
2005 return atmel_sha_authenc_final(&rctx
->auth_req
,
2006 rctx
->digest
, sizeof(rctx
->digest
),
2007 atmel_aes_authenc_final
, dd
);
2010 static int atmel_aes_authenc_final(struct atmel_aes_dev
*dd
, int err
,
2013 struct aead_request
*req
= aead_request_cast(dd
->areq
);
2014 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
2015 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
2016 bool enc
= atmel_aes_is_encrypt(dd
);
2017 u32 idigest
[SHA512_DIGEST_SIZE
/ sizeof(u32
)], *odigest
= rctx
->digest
;
2021 dd
->is_async
= true;
2025 offs
= req
->assoclen
+ rctx
->textlen
;
2026 authsize
= crypto_aead_authsize(tfm
);
2028 scatterwalk_map_and_copy(odigest
, req
->dst
, offs
, authsize
, 1);
2030 scatterwalk_map_and_copy(idigest
, req
->src
, offs
, authsize
, 0);
2031 if (crypto_memneq(idigest
, odigest
, authsize
))
2036 return atmel_aes_complete(dd
, err
);
2039 static int atmel_aes_authenc_setkey(struct crypto_aead
*tfm
, const u8
*key
,
2040 unsigned int keylen
)
2042 struct atmel_aes_authenc_ctx
*ctx
= crypto_aead_ctx(tfm
);
2043 struct crypto_authenc_keys keys
;
2046 if (crypto_authenc_extractkeys(&keys
, key
, keylen
) != 0)
2049 if (keys
.enckeylen
> sizeof(ctx
->base
.key
))
2052 /* Save auth key. */
2053 err
= atmel_sha_authenc_setkey(ctx
->auth
,
2054 keys
.authkey
, keys
.authkeylen
,
2055 crypto_aead_get_flags(tfm
));
2057 memzero_explicit(&keys
, sizeof(keys
));
2062 ctx
->base
.keylen
= keys
.enckeylen
;
2063 memcpy(ctx
->base
.key
, keys
.enckey
, keys
.enckeylen
);
2065 memzero_explicit(&keys
, sizeof(keys
));
2069 memzero_explicit(&keys
, sizeof(keys
));
2073 static int atmel_aes_authenc_init_tfm(struct crypto_aead
*tfm
,
2074 unsigned long auth_mode
)
2076 struct atmel_aes_authenc_ctx
*ctx
= crypto_aead_ctx(tfm
);
2077 unsigned int auth_reqsize
= atmel_sha_authenc_get_reqsize();
2079 ctx
->auth
= atmel_sha_authenc_spawn(auth_mode
);
2080 if (IS_ERR(ctx
->auth
))
2081 return PTR_ERR(ctx
->auth
);
2083 crypto_aead_set_reqsize(tfm
, (sizeof(struct atmel_aes_authenc_reqctx
) +
2085 ctx
->base
.start
= atmel_aes_authenc_start
;
2090 static int atmel_aes_authenc_hmac_sha1_init_tfm(struct crypto_aead
*tfm
)
2092 return atmel_aes_authenc_init_tfm(tfm
, SHA_FLAGS_HMAC_SHA1
);
2095 static int atmel_aes_authenc_hmac_sha224_init_tfm(struct crypto_aead
*tfm
)
2097 return atmel_aes_authenc_init_tfm(tfm
, SHA_FLAGS_HMAC_SHA224
);
2100 static int atmel_aes_authenc_hmac_sha256_init_tfm(struct crypto_aead
*tfm
)
2102 return atmel_aes_authenc_init_tfm(tfm
, SHA_FLAGS_HMAC_SHA256
);
2105 static int atmel_aes_authenc_hmac_sha384_init_tfm(struct crypto_aead
*tfm
)
2107 return atmel_aes_authenc_init_tfm(tfm
, SHA_FLAGS_HMAC_SHA384
);
2110 static int atmel_aes_authenc_hmac_sha512_init_tfm(struct crypto_aead
*tfm
)
2112 return atmel_aes_authenc_init_tfm(tfm
, SHA_FLAGS_HMAC_SHA512
);
2115 static void atmel_aes_authenc_exit_tfm(struct crypto_aead
*tfm
)
2117 struct atmel_aes_authenc_ctx
*ctx
= crypto_aead_ctx(tfm
);
2119 atmel_sha_authenc_free(ctx
->auth
);
2122 static int atmel_aes_authenc_crypt(struct aead_request
*req
,
2125 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
2126 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
2127 struct atmel_aes_base_ctx
*ctx
= crypto_aead_ctx(tfm
);
2128 u32 authsize
= crypto_aead_authsize(tfm
);
2129 bool enc
= (mode
& AES_FLAGS_ENCRYPT
);
2130 struct atmel_aes_dev
*dd
;
2132 /* Compute text length. */
2133 if (!enc
&& req
->cryptlen
< authsize
)
2135 rctx
->textlen
= req
->cryptlen
- (enc
? 0 : authsize
);
2138 * Currently, empty messages are not supported yet:
2139 * the SHA auto-padding can be used only on non-empty messages.
2140 * Hence a special case needs to be implemented for empty message.
2142 if (!rctx
->textlen
&& !req
->assoclen
)
2145 rctx
->base
.mode
= mode
;
2146 ctx
->block_size
= AES_BLOCK_SIZE
;
2147 ctx
->is_aead
= true;
2149 dd
= atmel_aes_find_dev(ctx
);
2153 return atmel_aes_handle_queue(dd
, &req
->base
);
2156 static int atmel_aes_authenc_cbc_aes_encrypt(struct aead_request
*req
)
2158 return atmel_aes_authenc_crypt(req
, AES_FLAGS_CBC
| AES_FLAGS_ENCRYPT
);
2161 static int atmel_aes_authenc_cbc_aes_decrypt(struct aead_request
*req
)
2163 return atmel_aes_authenc_crypt(req
, AES_FLAGS_CBC
);
2166 static struct aead_alg aes_authenc_algs
[] = {
2168 .setkey
= atmel_aes_authenc_setkey
,
2169 .encrypt
= atmel_aes_authenc_cbc_aes_encrypt
,
2170 .decrypt
= atmel_aes_authenc_cbc_aes_decrypt
,
2171 .init
= atmel_aes_authenc_hmac_sha1_init_tfm
,
2172 .exit
= atmel_aes_authenc_exit_tfm
,
2173 .ivsize
= AES_BLOCK_SIZE
,
2174 .maxauthsize
= SHA1_DIGEST_SIZE
,
2177 .cra_name
= "authenc(hmac(sha1),cbc(aes))",
2178 .cra_driver_name
= "atmel-authenc-hmac-sha1-cbc-aes",
2179 .cra_blocksize
= AES_BLOCK_SIZE
,
2180 .cra_ctxsize
= sizeof(struct atmel_aes_authenc_ctx
),
2184 .setkey
= atmel_aes_authenc_setkey
,
2185 .encrypt
= atmel_aes_authenc_cbc_aes_encrypt
,
2186 .decrypt
= atmel_aes_authenc_cbc_aes_decrypt
,
2187 .init
= atmel_aes_authenc_hmac_sha224_init_tfm
,
2188 .exit
= atmel_aes_authenc_exit_tfm
,
2189 .ivsize
= AES_BLOCK_SIZE
,
2190 .maxauthsize
= SHA224_DIGEST_SIZE
,
2193 .cra_name
= "authenc(hmac(sha224),cbc(aes))",
2194 .cra_driver_name
= "atmel-authenc-hmac-sha224-cbc-aes",
2195 .cra_blocksize
= AES_BLOCK_SIZE
,
2196 .cra_ctxsize
= sizeof(struct atmel_aes_authenc_ctx
),
2200 .setkey
= atmel_aes_authenc_setkey
,
2201 .encrypt
= atmel_aes_authenc_cbc_aes_encrypt
,
2202 .decrypt
= atmel_aes_authenc_cbc_aes_decrypt
,
2203 .init
= atmel_aes_authenc_hmac_sha256_init_tfm
,
2204 .exit
= atmel_aes_authenc_exit_tfm
,
2205 .ivsize
= AES_BLOCK_SIZE
,
2206 .maxauthsize
= SHA256_DIGEST_SIZE
,
2209 .cra_name
= "authenc(hmac(sha256),cbc(aes))",
2210 .cra_driver_name
= "atmel-authenc-hmac-sha256-cbc-aes",
2211 .cra_blocksize
= AES_BLOCK_SIZE
,
2212 .cra_ctxsize
= sizeof(struct atmel_aes_authenc_ctx
),
2216 .setkey
= atmel_aes_authenc_setkey
,
2217 .encrypt
= atmel_aes_authenc_cbc_aes_encrypt
,
2218 .decrypt
= atmel_aes_authenc_cbc_aes_decrypt
,
2219 .init
= atmel_aes_authenc_hmac_sha384_init_tfm
,
2220 .exit
= atmel_aes_authenc_exit_tfm
,
2221 .ivsize
= AES_BLOCK_SIZE
,
2222 .maxauthsize
= SHA384_DIGEST_SIZE
,
2225 .cra_name
= "authenc(hmac(sha384),cbc(aes))",
2226 .cra_driver_name
= "atmel-authenc-hmac-sha384-cbc-aes",
2227 .cra_blocksize
= AES_BLOCK_SIZE
,
2228 .cra_ctxsize
= sizeof(struct atmel_aes_authenc_ctx
),
2232 .setkey
= atmel_aes_authenc_setkey
,
2233 .encrypt
= atmel_aes_authenc_cbc_aes_encrypt
,
2234 .decrypt
= atmel_aes_authenc_cbc_aes_decrypt
,
2235 .init
= atmel_aes_authenc_hmac_sha512_init_tfm
,
2236 .exit
= atmel_aes_authenc_exit_tfm
,
2237 .ivsize
= AES_BLOCK_SIZE
,
2238 .maxauthsize
= SHA512_DIGEST_SIZE
,
2241 .cra_name
= "authenc(hmac(sha512),cbc(aes))",
2242 .cra_driver_name
= "atmel-authenc-hmac-sha512-cbc-aes",
2243 .cra_blocksize
= AES_BLOCK_SIZE
,
2244 .cra_ctxsize
= sizeof(struct atmel_aes_authenc_ctx
),
2248 #endif /* CONFIG_CRYPTO_DEV_ATMEL_AUTHENC */
2250 /* Probe functions */
2252 static int atmel_aes_buff_init(struct atmel_aes_dev
*dd
)
2254 dd
->buf
= (void *)__get_free_pages(GFP_KERNEL
, ATMEL_AES_BUFFER_ORDER
);
2255 dd
->buflen
= ATMEL_AES_BUFFER_SIZE
;
2256 dd
->buflen
&= ~(AES_BLOCK_SIZE
- 1);
2259 dev_err(dd
->dev
, "unable to alloc pages.\n");
2266 static void atmel_aes_buff_cleanup(struct atmel_aes_dev
*dd
)
2268 free_page((unsigned long)dd
->buf
);
2271 static int atmel_aes_dma_init(struct atmel_aes_dev
*dd
)
2275 /* Try to grab 2 DMA channels */
2276 dd
->src
.chan
= dma_request_chan(dd
->dev
, "tx");
2277 if (IS_ERR(dd
->src
.chan
)) {
2278 ret
= PTR_ERR(dd
->src
.chan
);
2282 dd
->dst
.chan
= dma_request_chan(dd
->dev
, "rx");
2283 if (IS_ERR(dd
->dst
.chan
)) {
2284 ret
= PTR_ERR(dd
->dst
.chan
);
2291 dma_release_channel(dd
->src
.chan
);
2293 dev_err(dd
->dev
, "no DMA channel available\n");
2297 static void atmel_aes_dma_cleanup(struct atmel_aes_dev
*dd
)
2299 dma_release_channel(dd
->dst
.chan
);
2300 dma_release_channel(dd
->src
.chan
);
2303 static void atmel_aes_queue_task(unsigned long data
)
2305 struct atmel_aes_dev
*dd
= (struct atmel_aes_dev
*)data
;
2307 atmel_aes_handle_queue(dd
, NULL
);
2310 static void atmel_aes_done_task(unsigned long data
)
2312 struct atmel_aes_dev
*dd
= (struct atmel_aes_dev
*)data
;
2314 dd
->is_async
= true;
2315 (void)dd
->resume(dd
);
2318 static irqreturn_t
atmel_aes_irq(int irq
, void *dev_id
)
2320 struct atmel_aes_dev
*aes_dd
= dev_id
;
2323 reg
= atmel_aes_read(aes_dd
, AES_ISR
);
2324 if (reg
& atmel_aes_read(aes_dd
, AES_IMR
)) {
2325 atmel_aes_write(aes_dd
, AES_IDR
, reg
);
2326 if (AES_FLAGS_BUSY
& aes_dd
->flags
)
2327 tasklet_schedule(&aes_dd
->done_task
);
2329 dev_warn(aes_dd
->dev
, "AES interrupt when no active requests.\n");
2336 static void atmel_aes_unregister_algs(struct atmel_aes_dev
*dd
)
2340 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
2341 if (dd
->caps
.has_authenc
)
2342 for (i
= 0; i
< ARRAY_SIZE(aes_authenc_algs
); i
++)
2343 crypto_unregister_aead(&aes_authenc_algs
[i
]);
2346 if (dd
->caps
.has_xts
)
2347 crypto_unregister_skcipher(&aes_xts_alg
);
2349 if (dd
->caps
.has_gcm
)
2350 crypto_unregister_aead(&aes_gcm_alg
);
2352 if (dd
->caps
.has_cfb64
)
2353 crypto_unregister_skcipher(&aes_cfb64_alg
);
2355 for (i
= 0; i
< ARRAY_SIZE(aes_algs
); i
++)
2356 crypto_unregister_skcipher(&aes_algs
[i
]);
2359 static void atmel_aes_crypto_alg_init(struct crypto_alg
*alg
)
2361 alg
->cra_flags
= CRYPTO_ALG_ASYNC
;
2362 alg
->cra_alignmask
= 0xf;
2363 alg
->cra_priority
= ATMEL_AES_PRIORITY
;
2364 alg
->cra_module
= THIS_MODULE
;
2367 static int atmel_aes_register_algs(struct atmel_aes_dev
*dd
)
2371 for (i
= 0; i
< ARRAY_SIZE(aes_algs
); i
++) {
2372 atmel_aes_crypto_alg_init(&aes_algs
[i
].base
);
2374 err
= crypto_register_skcipher(&aes_algs
[i
]);
2379 if (dd
->caps
.has_cfb64
) {
2380 atmel_aes_crypto_alg_init(&aes_cfb64_alg
.base
);
2382 err
= crypto_register_skcipher(&aes_cfb64_alg
);
2384 goto err_aes_cfb64_alg
;
2387 if (dd
->caps
.has_gcm
) {
2388 atmel_aes_crypto_alg_init(&aes_gcm_alg
.base
);
2390 err
= crypto_register_aead(&aes_gcm_alg
);
2392 goto err_aes_gcm_alg
;
2395 if (dd
->caps
.has_xts
) {
2396 atmel_aes_crypto_alg_init(&aes_xts_alg
.base
);
2398 err
= crypto_register_skcipher(&aes_xts_alg
);
2400 goto err_aes_xts_alg
;
2403 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
2404 if (dd
->caps
.has_authenc
) {
2405 for (i
= 0; i
< ARRAY_SIZE(aes_authenc_algs
); i
++) {
2406 atmel_aes_crypto_alg_init(&aes_authenc_algs
[i
].base
);
2408 err
= crypto_register_aead(&aes_authenc_algs
[i
]);
2410 goto err_aes_authenc_alg
;
2417 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
2418 /* i = ARRAY_SIZE(aes_authenc_algs); */
2419 err_aes_authenc_alg
:
2420 for (j
= 0; j
< i
; j
++)
2421 crypto_unregister_aead(&aes_authenc_algs
[j
]);
2422 crypto_unregister_skcipher(&aes_xts_alg
);
2425 crypto_unregister_aead(&aes_gcm_alg
);
2427 crypto_unregister_skcipher(&aes_cfb64_alg
);
2429 i
= ARRAY_SIZE(aes_algs
);
2431 for (j
= 0; j
< i
; j
++)
2432 crypto_unregister_skcipher(&aes_algs
[j
]);
2437 static void atmel_aes_get_cap(struct atmel_aes_dev
*dd
)
2439 dd
->caps
.has_dualbuff
= 0;
2440 dd
->caps
.has_cfb64
= 0;
2441 dd
->caps
.has_gcm
= 0;
2442 dd
->caps
.has_xts
= 0;
2443 dd
->caps
.has_authenc
= 0;
2444 dd
->caps
.max_burst_size
= 1;
2446 /* keep only major version number */
2447 switch (dd
->hw_version
& 0xff0) {
2449 dd
->caps
.has_dualbuff
= 1;
2450 dd
->caps
.has_cfb64
= 1;
2451 dd
->caps
.has_gcm
= 1;
2452 dd
->caps
.has_xts
= 1;
2453 dd
->caps
.has_authenc
= 1;
2454 dd
->caps
.max_burst_size
= 4;
2457 dd
->caps
.has_dualbuff
= 1;
2458 dd
->caps
.has_cfb64
= 1;
2459 dd
->caps
.has_gcm
= 1;
2460 dd
->caps
.max_burst_size
= 4;
2463 dd
->caps
.has_dualbuff
= 1;
2464 dd
->caps
.has_cfb64
= 1;
2465 dd
->caps
.max_burst_size
= 4;
2471 "Unmanaged aes version, set minimum capabilities\n");
2476 #if defined(CONFIG_OF)
2477 static const struct of_device_id atmel_aes_dt_ids
[] = {
2478 { .compatible
= "atmel,at91sam9g46-aes" },
2481 MODULE_DEVICE_TABLE(of
, atmel_aes_dt_ids
);
2484 static int atmel_aes_probe(struct platform_device
*pdev
)
2486 struct atmel_aes_dev
*aes_dd
;
2487 struct device
*dev
= &pdev
->dev
;
2488 struct resource
*aes_res
;
2491 aes_dd
= devm_kzalloc(&pdev
->dev
, sizeof(*aes_dd
), GFP_KERNEL
);
2497 platform_set_drvdata(pdev
, aes_dd
);
2499 INIT_LIST_HEAD(&aes_dd
->list
);
2500 spin_lock_init(&aes_dd
->lock
);
2502 tasklet_init(&aes_dd
->done_task
, atmel_aes_done_task
,
2503 (unsigned long)aes_dd
);
2504 tasklet_init(&aes_dd
->queue_task
, atmel_aes_queue_task
,
2505 (unsigned long)aes_dd
);
2507 crypto_init_queue(&aes_dd
->queue
, ATMEL_AES_QUEUE_LENGTH
);
2509 /* Get the base address */
2510 aes_res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
2512 dev_err(dev
, "no MEM resource info\n");
2514 goto err_tasklet_kill
;
2516 aes_dd
->phys_base
= aes_res
->start
;
2519 aes_dd
->irq
= platform_get_irq(pdev
, 0);
2520 if (aes_dd
->irq
< 0) {
2522 goto err_tasklet_kill
;
2525 err
= devm_request_irq(&pdev
->dev
, aes_dd
->irq
, atmel_aes_irq
,
2526 IRQF_SHARED
, "atmel-aes", aes_dd
);
2528 dev_err(dev
, "unable to request aes irq.\n");
2529 goto err_tasklet_kill
;
2532 /* Initializing the clock */
2533 aes_dd
->iclk
= devm_clk_get(&pdev
->dev
, "aes_clk");
2534 if (IS_ERR(aes_dd
->iclk
)) {
2535 dev_err(dev
, "clock initialization failed.\n");
2536 err
= PTR_ERR(aes_dd
->iclk
);
2537 goto err_tasklet_kill
;
2540 aes_dd
->io_base
= devm_ioremap_resource(&pdev
->dev
, aes_res
);
2541 if (IS_ERR(aes_dd
->io_base
)) {
2542 dev_err(dev
, "can't ioremap\n");
2543 err
= PTR_ERR(aes_dd
->io_base
);
2544 goto err_tasklet_kill
;
2547 err
= clk_prepare(aes_dd
->iclk
);
2549 goto err_tasklet_kill
;
2551 err
= atmel_aes_hw_version_init(aes_dd
);
2553 goto err_iclk_unprepare
;
2555 atmel_aes_get_cap(aes_dd
);
2557 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
2558 if (aes_dd
->caps
.has_authenc
&& !atmel_sha_authenc_is_ready()) {
2559 err
= -EPROBE_DEFER
;
2560 goto err_iclk_unprepare
;
2564 err
= atmel_aes_buff_init(aes_dd
);
2566 goto err_iclk_unprepare
;
2568 err
= atmel_aes_dma_init(aes_dd
);
2570 goto err_buff_cleanup
;
2572 spin_lock(&atmel_aes
.lock
);
2573 list_add_tail(&aes_dd
->list
, &atmel_aes
.dev_list
);
2574 spin_unlock(&atmel_aes
.lock
);
2576 err
= atmel_aes_register_algs(aes_dd
);
2580 dev_info(dev
, "Atmel AES - Using %s, %s for DMA transfers\n",
2581 dma_chan_name(aes_dd
->src
.chan
),
2582 dma_chan_name(aes_dd
->dst
.chan
));
2587 spin_lock(&atmel_aes
.lock
);
2588 list_del(&aes_dd
->list
);
2589 spin_unlock(&atmel_aes
.lock
);
2590 atmel_aes_dma_cleanup(aes_dd
);
2592 atmel_aes_buff_cleanup(aes_dd
);
2594 clk_unprepare(aes_dd
->iclk
);
2596 tasklet_kill(&aes_dd
->done_task
);
2597 tasklet_kill(&aes_dd
->queue_task
);
2602 static int atmel_aes_remove(struct platform_device
*pdev
)
2604 struct atmel_aes_dev
*aes_dd
;
2606 aes_dd
= platform_get_drvdata(pdev
);
2609 spin_lock(&atmel_aes
.lock
);
2610 list_del(&aes_dd
->list
);
2611 spin_unlock(&atmel_aes
.lock
);
2613 atmel_aes_unregister_algs(aes_dd
);
2615 tasklet_kill(&aes_dd
->done_task
);
2616 tasklet_kill(&aes_dd
->queue_task
);
2618 atmel_aes_dma_cleanup(aes_dd
);
2619 atmel_aes_buff_cleanup(aes_dd
);
2621 clk_unprepare(aes_dd
->iclk
);
2626 static struct platform_driver atmel_aes_driver
= {
2627 .probe
= atmel_aes_probe
,
2628 .remove
= atmel_aes_remove
,
2630 .name
= "atmel_aes",
2631 .of_match_table
= of_match_ptr(atmel_aes_dt_ids
),
2635 module_platform_driver(atmel_aes_driver
);
2637 MODULE_DESCRIPTION("Atmel AES hw acceleration support.");
2638 MODULE_LICENSE("GPL v2");
2639 MODULE_AUTHOR("Nicolas Royer - Eukréa Electromatique");