1 // SPDX-License-Identifier: GPL-2.0
5 * Support for ATMEL AES HW acceleration.
7 * Copyright (c) 2012 Eukréa Electromatique - ATMEL
8 * Author: Nicolas Royer <nicolas@eukrea.com>
10 * Some ideas are from omap-aes.c driver.
14 #include <linux/kernel.h>
15 #include <linux/module.h>
16 #include <linux/slab.h>
17 #include <linux/err.h>
18 #include <linux/clk.h>
20 #include <linux/hw_random.h>
21 #include <linux/platform_device.h>
23 #include <linux/device.h>
24 #include <linux/dmaengine.h>
25 #include <linux/init.h>
26 #include <linux/errno.h>
27 #include <linux/interrupt.h>
28 #include <linux/irq.h>
29 #include <linux/scatterlist.h>
30 #include <linux/dma-mapping.h>
31 #include <linux/mod_devicetable.h>
32 #include <linux/delay.h>
33 #include <linux/crypto.h>
34 #include <crypto/scatterwalk.h>
35 #include <crypto/algapi.h>
36 #include <crypto/aes.h>
37 #include <crypto/gcm.h>
38 #include <crypto/xts.h>
39 #include <crypto/internal/aead.h>
40 #include <crypto/internal/skcipher.h>
41 #include "atmel-aes-regs.h"
42 #include "atmel-authenc.h"
44 #define ATMEL_AES_PRIORITY 300
46 #define ATMEL_AES_BUFFER_ORDER 2
47 #define ATMEL_AES_BUFFER_SIZE (PAGE_SIZE << ATMEL_AES_BUFFER_ORDER)
49 #define SIZE_IN_WORDS(x) ((x) >> 2)
52 /* Reserve bits [18:16] [14:12] [1:0] for mode (same as for AES_MR) */
53 #define AES_FLAGS_ENCRYPT AES_MR_CYPHER_ENC
54 #define AES_FLAGS_GTAGEN AES_MR_GTAGEN
55 #define AES_FLAGS_OPMODE_MASK (AES_MR_OPMOD_MASK | AES_MR_CFBS_MASK)
56 #define AES_FLAGS_ECB AES_MR_OPMOD_ECB
57 #define AES_FLAGS_CBC AES_MR_OPMOD_CBC
58 #define AES_FLAGS_CTR AES_MR_OPMOD_CTR
59 #define AES_FLAGS_GCM AES_MR_OPMOD_GCM
60 #define AES_FLAGS_XTS AES_MR_OPMOD_XTS
62 #define AES_FLAGS_MODE_MASK (AES_FLAGS_OPMODE_MASK | \
66 #define AES_FLAGS_BUSY BIT(3)
67 #define AES_FLAGS_DUMP_REG BIT(4)
68 #define AES_FLAGS_OWN_SHA BIT(5)
70 #define AES_FLAGS_PERSISTENT AES_FLAGS_BUSY
72 #define ATMEL_AES_QUEUE_LENGTH 50
74 #define ATMEL_AES_DMA_THRESHOLD 256
77 struct atmel_aes_caps
{
88 typedef int (*atmel_aes_fn_t
)(struct atmel_aes_dev
*);
91 struct atmel_aes_base_ctx
{
92 struct atmel_aes_dev
*dd
;
95 u32 key
[AES_KEYSIZE_256
/ sizeof(u32
)];
100 struct atmel_aes_ctx
{
101 struct atmel_aes_base_ctx base
;
104 struct atmel_aes_ctr_ctx
{
105 struct atmel_aes_base_ctx base
;
107 __be32 iv
[AES_BLOCK_SIZE
/ sizeof(u32
)];
109 struct scatterlist src
[2];
110 struct scatterlist dst
[2];
114 struct atmel_aes_gcm_ctx
{
115 struct atmel_aes_base_ctx base
;
117 struct scatterlist src
[2];
118 struct scatterlist dst
[2];
120 __be32 j0
[AES_BLOCK_SIZE
/ sizeof(u32
)];
121 u32 tag
[AES_BLOCK_SIZE
/ sizeof(u32
)];
122 __be32 ghash
[AES_BLOCK_SIZE
/ sizeof(u32
)];
125 const __be32
*ghash_in
;
127 atmel_aes_fn_t ghash_resume
;
130 struct atmel_aes_xts_ctx
{
131 struct atmel_aes_base_ctx base
;
133 u32 key2
[AES_KEYSIZE_256
/ sizeof(u32
)];
134 struct crypto_skcipher
*fallback_tfm
;
137 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
138 struct atmel_aes_authenc_ctx
{
139 struct atmel_aes_base_ctx base
;
140 struct atmel_sha_authenc_ctx
*auth
;
144 struct atmel_aes_reqctx
{
146 u8 lastc
[AES_BLOCK_SIZE
];
147 struct skcipher_request fallback_req
;
150 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
151 struct atmel_aes_authenc_reqctx
{
152 struct atmel_aes_reqctx base
;
154 struct scatterlist src
[2];
155 struct scatterlist dst
[2];
157 u32 digest
[SHA512_DIGEST_SIZE
/ sizeof(u32
)];
159 /* auth_req MUST be place last. */
160 struct ahash_request auth_req
;
164 struct atmel_aes_dma
{
165 struct dma_chan
*chan
;
166 struct scatterlist
*sg
;
168 unsigned int remainder
;
172 struct atmel_aes_dev
{
173 struct list_head list
;
174 unsigned long phys_base
;
175 void __iomem
*io_base
;
177 struct crypto_async_request
*areq
;
178 struct atmel_aes_base_ctx
*ctx
;
181 atmel_aes_fn_t resume
;
182 atmel_aes_fn_t cpu_transfer_complete
;
191 struct crypto_queue queue
;
193 struct tasklet_struct done_task
;
194 struct tasklet_struct queue_task
;
200 struct atmel_aes_dma src
;
201 struct atmel_aes_dma dst
;
205 struct scatterlist aligned_sg
;
206 struct scatterlist
*real_dst
;
208 struct atmel_aes_caps caps
;
213 struct atmel_aes_drv
{
214 struct list_head dev_list
;
218 static struct atmel_aes_drv atmel_aes
= {
219 .dev_list
= LIST_HEAD_INIT(atmel_aes
.dev_list
),
220 .lock
= __SPIN_LOCK_UNLOCKED(atmel_aes
.lock
),
224 static const char *atmel_aes_reg_name(u32 offset
, char *tmp
, size_t sz
)
253 snprintf(tmp
, sz
, "KEYWR[%u]", (offset
- AES_KEYWR(0)) >> 2);
260 snprintf(tmp
, sz
, "IDATAR[%u]", (offset
- AES_IDATAR(0)) >> 2);
267 snprintf(tmp
, sz
, "ODATAR[%u]", (offset
- AES_ODATAR(0)) >> 2);
274 snprintf(tmp
, sz
, "IVR[%u]", (offset
- AES_IVR(0)) >> 2);
287 snprintf(tmp
, sz
, "GHASHR[%u]", (offset
- AES_GHASHR(0)) >> 2);
294 snprintf(tmp
, sz
, "TAGR[%u]", (offset
- AES_TAGR(0)) >> 2);
304 snprintf(tmp
, sz
, "GCMHR[%u]", (offset
- AES_GCMHR(0)) >> 2);
314 snprintf(tmp
, sz
, "TWR[%u]", (offset
- AES_TWR(0)) >> 2);
321 snprintf(tmp
, sz
, "ALPHAR[%u]", (offset
- AES_ALPHAR(0)) >> 2);
325 snprintf(tmp
, sz
, "0x%02x", offset
);
331 #endif /* VERBOSE_DEBUG */
333 /* Shared functions */
335 static inline u32
atmel_aes_read(struct atmel_aes_dev
*dd
, u32 offset
)
337 u32 value
= readl_relaxed(dd
->io_base
+ offset
);
340 if (dd
->flags
& AES_FLAGS_DUMP_REG
) {
343 dev_vdbg(dd
->dev
, "read 0x%08x from %s\n", value
,
344 atmel_aes_reg_name(offset
, tmp
, sizeof(tmp
)));
346 #endif /* VERBOSE_DEBUG */
351 static inline void atmel_aes_write(struct atmel_aes_dev
*dd
,
352 u32 offset
, u32 value
)
355 if (dd
->flags
& AES_FLAGS_DUMP_REG
) {
358 dev_vdbg(dd
->dev
, "write 0x%08x into %s\n", value
,
359 atmel_aes_reg_name(offset
, tmp
, sizeof(tmp
)));
361 #endif /* VERBOSE_DEBUG */
363 writel_relaxed(value
, dd
->io_base
+ offset
);
366 static void atmel_aes_read_n(struct atmel_aes_dev
*dd
, u32 offset
,
367 u32
*value
, int count
)
369 for (; count
--; value
++, offset
+= 4)
370 *value
= atmel_aes_read(dd
, offset
);
373 static void atmel_aes_write_n(struct atmel_aes_dev
*dd
, u32 offset
,
374 const u32
*value
, int count
)
376 for (; count
--; value
++, offset
+= 4)
377 atmel_aes_write(dd
, offset
, *value
);
380 static inline void atmel_aes_read_block(struct atmel_aes_dev
*dd
, u32 offset
,
383 atmel_aes_read_n(dd
, offset
, value
, SIZE_IN_WORDS(AES_BLOCK_SIZE
));
386 static inline void atmel_aes_write_block(struct atmel_aes_dev
*dd
, u32 offset
,
389 atmel_aes_write_n(dd
, offset
, value
, SIZE_IN_WORDS(AES_BLOCK_SIZE
));
392 static inline int atmel_aes_wait_for_data_ready(struct atmel_aes_dev
*dd
,
393 atmel_aes_fn_t resume
)
395 u32 isr
= atmel_aes_read(dd
, AES_ISR
);
397 if (unlikely(isr
& AES_INT_DATARDY
))
401 atmel_aes_write(dd
, AES_IER
, AES_INT_DATARDY
);
405 static inline size_t atmel_aes_padlen(size_t len
, size_t block_size
)
407 len
&= block_size
- 1;
408 return len
? block_size
- len
: 0;
411 static struct atmel_aes_dev
*atmel_aes_dev_alloc(struct atmel_aes_base_ctx
*ctx
)
413 struct atmel_aes_dev
*aes_dd
;
415 spin_lock_bh(&atmel_aes
.lock
);
416 /* One AES IP per SoC. */
417 aes_dd
= list_first_entry_or_null(&atmel_aes
.dev_list
,
418 struct atmel_aes_dev
, list
);
419 spin_unlock_bh(&atmel_aes
.lock
);
423 static int atmel_aes_hw_init(struct atmel_aes_dev
*dd
)
427 err
= clk_enable(dd
->iclk
);
431 atmel_aes_write(dd
, AES_CR
, AES_CR_SWRST
);
432 atmel_aes_write(dd
, AES_MR
, 0xE << AES_MR_CKEY_OFFSET
);
437 static inline unsigned int atmel_aes_get_version(struct atmel_aes_dev
*dd
)
439 return atmel_aes_read(dd
, AES_HW_VERSION
) & 0x00000fff;
442 static int atmel_aes_hw_version_init(struct atmel_aes_dev
*dd
)
446 err
= atmel_aes_hw_init(dd
);
450 dd
->hw_version
= atmel_aes_get_version(dd
);
452 dev_info(dd
->dev
, "version: 0x%x\n", dd
->hw_version
);
454 clk_disable(dd
->iclk
);
458 static inline void atmel_aes_set_mode(struct atmel_aes_dev
*dd
,
459 const struct atmel_aes_reqctx
*rctx
)
461 /* Clear all but persistent flags and set request flags. */
462 dd
->flags
= (dd
->flags
& AES_FLAGS_PERSISTENT
) | rctx
->mode
;
465 static inline bool atmel_aes_is_encrypt(const struct atmel_aes_dev
*dd
)
467 return (dd
->flags
& AES_FLAGS_ENCRYPT
);
470 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
471 static void atmel_aes_authenc_complete(struct atmel_aes_dev
*dd
, int err
);
474 static void atmel_aes_set_iv_as_last_ciphertext_block(struct atmel_aes_dev
*dd
)
476 struct skcipher_request
*req
= skcipher_request_cast(dd
->areq
);
477 struct atmel_aes_reqctx
*rctx
= skcipher_request_ctx(req
);
478 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(req
);
479 unsigned int ivsize
= crypto_skcipher_ivsize(skcipher
);
481 if (req
->cryptlen
< ivsize
)
484 if (rctx
->mode
& AES_FLAGS_ENCRYPT
)
485 scatterwalk_map_and_copy(req
->iv
, req
->dst
,
486 req
->cryptlen
- ivsize
, ivsize
, 0);
488 memcpy(req
->iv
, rctx
->lastc
, ivsize
);
491 static inline struct atmel_aes_ctr_ctx
*
492 atmel_aes_ctr_ctx_cast(struct atmel_aes_base_ctx
*ctx
)
494 return container_of(ctx
, struct atmel_aes_ctr_ctx
, base
);
497 static void atmel_aes_ctr_update_req_iv(struct atmel_aes_dev
*dd
)
499 struct atmel_aes_ctr_ctx
*ctx
= atmel_aes_ctr_ctx_cast(dd
->ctx
);
500 struct skcipher_request
*req
= skcipher_request_cast(dd
->areq
);
501 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(req
);
502 unsigned int ivsize
= crypto_skcipher_ivsize(skcipher
);
506 * The CTR transfer works in fragments of data of maximum 1 MByte
507 * because of the 16 bit CTR counter embedded in the IP. When reaching
508 * here, ctx->blocks contains the number of blocks of the last fragment
509 * processed, there is no need to explicit cast it to u16.
511 for (i
= 0; i
< ctx
->blocks
; i
++)
512 crypto_inc((u8
*)ctx
->iv
, AES_BLOCK_SIZE
);
514 memcpy(req
->iv
, ctx
->iv
, ivsize
);
517 static inline int atmel_aes_complete(struct atmel_aes_dev
*dd
, int err
)
519 struct skcipher_request
*req
= skcipher_request_cast(dd
->areq
);
520 struct atmel_aes_reqctx
*rctx
= skcipher_request_ctx(req
);
522 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
523 if (dd
->ctx
->is_aead
)
524 atmel_aes_authenc_complete(dd
, err
);
527 clk_disable(dd
->iclk
);
528 dd
->flags
&= ~AES_FLAGS_BUSY
;
530 if (!err
&& !dd
->ctx
->is_aead
&&
531 (rctx
->mode
& AES_FLAGS_OPMODE_MASK
) != AES_FLAGS_ECB
) {
532 if ((rctx
->mode
& AES_FLAGS_OPMODE_MASK
) != AES_FLAGS_CTR
)
533 atmel_aes_set_iv_as_last_ciphertext_block(dd
);
535 atmel_aes_ctr_update_req_iv(dd
);
539 crypto_request_complete(dd
->areq
, err
);
541 tasklet_schedule(&dd
->queue_task
);
546 static void atmel_aes_write_ctrl_key(struct atmel_aes_dev
*dd
, bool use_dma
,
547 const __be32
*iv
, const u32
*key
, int keylen
)
551 /* MR register must be set before IV registers */
552 if (keylen
== AES_KEYSIZE_128
)
553 valmr
|= AES_MR_KEYSIZE_128
;
554 else if (keylen
== AES_KEYSIZE_192
)
555 valmr
|= AES_MR_KEYSIZE_192
;
557 valmr
|= AES_MR_KEYSIZE_256
;
559 valmr
|= dd
->flags
& AES_FLAGS_MODE_MASK
;
562 valmr
|= AES_MR_SMOD_IDATAR0
;
563 if (dd
->caps
.has_dualbuff
)
564 valmr
|= AES_MR_DUALBUFF
;
566 valmr
|= AES_MR_SMOD_AUTO
;
569 atmel_aes_write(dd
, AES_MR
, valmr
);
571 atmel_aes_write_n(dd
, AES_KEYWR(0), key
, SIZE_IN_WORDS(keylen
));
573 if (iv
&& (valmr
& AES_MR_OPMOD_MASK
) != AES_MR_OPMOD_ECB
)
574 atmel_aes_write_block(dd
, AES_IVR(0), iv
);
577 static inline void atmel_aes_write_ctrl(struct atmel_aes_dev
*dd
, bool use_dma
,
581 atmel_aes_write_ctrl_key(dd
, use_dma
, iv
,
582 dd
->ctx
->key
, dd
->ctx
->keylen
);
587 static int atmel_aes_cpu_transfer(struct atmel_aes_dev
*dd
)
593 atmel_aes_read_block(dd
, AES_ODATAR(0), dd
->data
);
595 dd
->datalen
-= AES_BLOCK_SIZE
;
597 if (dd
->datalen
< AES_BLOCK_SIZE
)
600 atmel_aes_write_block(dd
, AES_IDATAR(0), dd
->data
);
602 isr
= atmel_aes_read(dd
, AES_ISR
);
603 if (!(isr
& AES_INT_DATARDY
)) {
604 dd
->resume
= atmel_aes_cpu_transfer
;
605 atmel_aes_write(dd
, AES_IER
, AES_INT_DATARDY
);
610 if (!sg_copy_from_buffer(dd
->real_dst
, sg_nents(dd
->real_dst
),
615 return atmel_aes_complete(dd
, err
);
617 return dd
->cpu_transfer_complete(dd
);
620 static int atmel_aes_cpu_start(struct atmel_aes_dev
*dd
,
621 struct scatterlist
*src
,
622 struct scatterlist
*dst
,
624 atmel_aes_fn_t resume
)
626 size_t padlen
= atmel_aes_padlen(len
, AES_BLOCK_SIZE
);
628 if (unlikely(len
== 0))
631 sg_copy_to_buffer(src
, sg_nents(src
), dd
->buf
, len
);
635 dd
->cpu_transfer_complete
= resume
;
636 dd
->datalen
= len
+ padlen
;
637 dd
->data
= (u32
*)dd
->buf
;
638 atmel_aes_write_block(dd
, AES_IDATAR(0), dd
->data
);
639 return atmel_aes_wait_for_data_ready(dd
, atmel_aes_cpu_transfer
);
645 static void atmel_aes_dma_callback(void *data
);
647 static bool atmel_aes_check_aligned(struct atmel_aes_dev
*dd
,
648 struct scatterlist
*sg
,
650 struct atmel_aes_dma
*dma
)
654 if (!IS_ALIGNED(len
, dd
->ctx
->block_size
))
657 for (nents
= 0; sg
; sg
= sg_next(sg
), ++nents
) {
658 if (!IS_ALIGNED(sg
->offset
, sizeof(u32
)))
661 if (len
<= sg
->length
) {
662 if (!IS_ALIGNED(len
, dd
->ctx
->block_size
))
665 dma
->nents
= nents
+1;
666 dma
->remainder
= sg
->length
- len
;
671 if (!IS_ALIGNED(sg
->length
, dd
->ctx
->block_size
))
680 static inline void atmel_aes_restore_sg(const struct atmel_aes_dma
*dma
)
682 struct scatterlist
*sg
= dma
->sg
;
683 int nents
= dma
->nents
;
688 while (--nents
> 0 && sg
)
694 sg
->length
+= dma
->remainder
;
697 static int atmel_aes_map(struct atmel_aes_dev
*dd
,
698 struct scatterlist
*src
,
699 struct scatterlist
*dst
,
702 bool src_aligned
, dst_aligned
;
710 src_aligned
= atmel_aes_check_aligned(dd
, src
, len
, &dd
->src
);
712 dst_aligned
= src_aligned
;
714 dst_aligned
= atmel_aes_check_aligned(dd
, dst
, len
, &dd
->dst
);
715 if (!src_aligned
|| !dst_aligned
) {
716 padlen
= atmel_aes_padlen(len
, dd
->ctx
->block_size
);
718 if (dd
->buflen
< len
+ padlen
)
722 sg_copy_to_buffer(src
, sg_nents(src
), dd
->buf
, len
);
723 dd
->src
.sg
= &dd
->aligned_sg
;
725 dd
->src
.remainder
= 0;
729 dd
->dst
.sg
= &dd
->aligned_sg
;
731 dd
->dst
.remainder
= 0;
734 sg_init_table(&dd
->aligned_sg
, 1);
735 sg_set_buf(&dd
->aligned_sg
, dd
->buf
, len
+ padlen
);
738 if (dd
->src
.sg
== dd
->dst
.sg
) {
739 dd
->src
.sg_len
= dma_map_sg(dd
->dev
, dd
->src
.sg
, dd
->src
.nents
,
741 dd
->dst
.sg_len
= dd
->src
.sg_len
;
745 dd
->src
.sg_len
= dma_map_sg(dd
->dev
, dd
->src
.sg
, dd
->src
.nents
,
750 dd
->dst
.sg_len
= dma_map_sg(dd
->dev
, dd
->dst
.sg
, dd
->dst
.nents
,
752 if (!dd
->dst
.sg_len
) {
753 dma_unmap_sg(dd
->dev
, dd
->src
.sg
, dd
->src
.nents
,
762 static void atmel_aes_unmap(struct atmel_aes_dev
*dd
)
764 if (dd
->src
.sg
== dd
->dst
.sg
) {
765 dma_unmap_sg(dd
->dev
, dd
->src
.sg
, dd
->src
.nents
,
768 if (dd
->src
.sg
!= &dd
->aligned_sg
)
769 atmel_aes_restore_sg(&dd
->src
);
771 dma_unmap_sg(dd
->dev
, dd
->dst
.sg
, dd
->dst
.nents
,
774 if (dd
->dst
.sg
!= &dd
->aligned_sg
)
775 atmel_aes_restore_sg(&dd
->dst
);
777 dma_unmap_sg(dd
->dev
, dd
->src
.sg
, dd
->src
.nents
,
780 if (dd
->src
.sg
!= &dd
->aligned_sg
)
781 atmel_aes_restore_sg(&dd
->src
);
784 if (dd
->dst
.sg
== &dd
->aligned_sg
)
785 sg_copy_from_buffer(dd
->real_dst
, sg_nents(dd
->real_dst
),
789 static int atmel_aes_dma_transfer_start(struct atmel_aes_dev
*dd
,
790 enum dma_slave_buswidth addr_width
,
791 enum dma_transfer_direction dir
,
794 struct dma_async_tx_descriptor
*desc
;
795 struct dma_slave_config config
;
796 dma_async_tx_callback callback
;
797 struct atmel_aes_dma
*dma
;
800 memset(&config
, 0, sizeof(config
));
801 config
.src_addr_width
= addr_width
;
802 config
.dst_addr_width
= addr_width
;
803 config
.src_maxburst
= maxburst
;
804 config
.dst_maxburst
= maxburst
;
810 config
.dst_addr
= dd
->phys_base
+ AES_IDATAR(0);
815 callback
= atmel_aes_dma_callback
;
816 config
.src_addr
= dd
->phys_base
+ AES_ODATAR(0);
823 err
= dmaengine_slave_config(dma
->chan
, &config
);
827 desc
= dmaengine_prep_slave_sg(dma
->chan
, dma
->sg
, dma
->sg_len
, dir
,
828 DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
);
832 desc
->callback
= callback
;
833 desc
->callback_param
= dd
;
834 dmaengine_submit(desc
);
835 dma_async_issue_pending(dma
->chan
);
840 static int atmel_aes_dma_start(struct atmel_aes_dev
*dd
,
841 struct scatterlist
*src
,
842 struct scatterlist
*dst
,
844 atmel_aes_fn_t resume
)
846 enum dma_slave_buswidth addr_width
;
850 switch (dd
->ctx
->block_size
) {
852 addr_width
= DMA_SLAVE_BUSWIDTH_4_BYTES
;
853 maxburst
= dd
->caps
.max_burst_size
;
861 err
= atmel_aes_map(dd
, src
, dst
, len
);
867 /* Set output DMA transfer first */
868 err
= atmel_aes_dma_transfer_start(dd
, addr_width
, DMA_DEV_TO_MEM
,
873 /* Then set input DMA transfer */
874 err
= atmel_aes_dma_transfer_start(dd
, addr_width
, DMA_MEM_TO_DEV
,
877 goto output_transfer_stop
;
881 output_transfer_stop
:
882 dmaengine_terminate_sync(dd
->dst
.chan
);
886 return atmel_aes_complete(dd
, err
);
889 static void atmel_aes_dma_callback(void *data
)
891 struct atmel_aes_dev
*dd
= data
;
895 (void)dd
->resume(dd
);
898 static int atmel_aes_handle_queue(struct atmel_aes_dev
*dd
,
899 struct crypto_async_request
*new_areq
)
901 struct crypto_async_request
*areq
, *backlog
;
902 struct atmel_aes_base_ctx
*ctx
;
907 spin_lock_irqsave(&dd
->lock
, flags
);
909 ret
= crypto_enqueue_request(&dd
->queue
, new_areq
);
910 if (dd
->flags
& AES_FLAGS_BUSY
) {
911 spin_unlock_irqrestore(&dd
->lock
, flags
);
914 backlog
= crypto_get_backlog(&dd
->queue
);
915 areq
= crypto_dequeue_request(&dd
->queue
);
917 dd
->flags
|= AES_FLAGS_BUSY
;
918 spin_unlock_irqrestore(&dd
->lock
, flags
);
924 crypto_request_complete(backlog
, -EINPROGRESS
);
926 ctx
= crypto_tfm_ctx(areq
->tfm
);
930 start_async
= (areq
!= new_areq
);
931 dd
->is_async
= start_async
;
933 /* WARNING: ctx->start() MAY change dd->is_async. */
934 err
= ctx
->start(dd
);
935 return (start_async
) ? ret
: err
;
939 /* AES async block ciphers */
941 static int atmel_aes_transfer_complete(struct atmel_aes_dev
*dd
)
943 return atmel_aes_complete(dd
, 0);
946 static int atmel_aes_start(struct atmel_aes_dev
*dd
)
948 struct skcipher_request
*req
= skcipher_request_cast(dd
->areq
);
949 struct atmel_aes_reqctx
*rctx
= skcipher_request_ctx(req
);
950 bool use_dma
= (req
->cryptlen
>= ATMEL_AES_DMA_THRESHOLD
||
951 dd
->ctx
->block_size
!= AES_BLOCK_SIZE
);
954 atmel_aes_set_mode(dd
, rctx
);
956 err
= atmel_aes_hw_init(dd
);
958 return atmel_aes_complete(dd
, err
);
960 atmel_aes_write_ctrl(dd
, use_dma
, (void *)req
->iv
);
962 return atmel_aes_dma_start(dd
, req
->src
, req
->dst
,
964 atmel_aes_transfer_complete
);
966 return atmel_aes_cpu_start(dd
, req
->src
, req
->dst
, req
->cryptlen
,
967 atmel_aes_transfer_complete
);
970 static int atmel_aes_ctr_transfer(struct atmel_aes_dev
*dd
)
972 struct atmel_aes_ctr_ctx
*ctx
= atmel_aes_ctr_ctx_cast(dd
->ctx
);
973 struct skcipher_request
*req
= skcipher_request_cast(dd
->areq
);
974 struct scatterlist
*src
, *dst
;
978 bool use_dma
, fragmented
= false;
980 /* Check for transfer completion. */
981 ctx
->offset
+= dd
->total
;
982 if (ctx
->offset
>= req
->cryptlen
)
983 return atmel_aes_transfer_complete(dd
);
985 /* Compute data length. */
986 datalen
= req
->cryptlen
- ctx
->offset
;
987 ctx
->blocks
= DIV_ROUND_UP(datalen
, AES_BLOCK_SIZE
);
988 ctr
= be32_to_cpu(ctx
->iv
[3]);
990 /* Check 16bit counter overflow. */
991 start
= ctr
& 0xffff;
992 end
= start
+ ctx
->blocks
- 1;
994 if (ctx
->blocks
>> 16 || end
< start
) {
996 datalen
= AES_BLOCK_SIZE
* (0x10000 - start
);
1000 use_dma
= (datalen
>= ATMEL_AES_DMA_THRESHOLD
);
1002 /* Jump to offset. */
1003 src
= scatterwalk_ffwd(ctx
->src
, req
->src
, ctx
->offset
);
1004 dst
= ((req
->src
== req
->dst
) ? src
:
1005 scatterwalk_ffwd(ctx
->dst
, req
->dst
, ctx
->offset
));
1007 /* Configure hardware. */
1008 atmel_aes_write_ctrl(dd
, use_dma
, ctx
->iv
);
1009 if (unlikely(fragmented
)) {
1011 * Increment the counter manually to cope with the hardware
1014 ctx
->iv
[3] = cpu_to_be32(ctr
);
1015 crypto_inc((u8
*)ctx
->iv
, AES_BLOCK_SIZE
);
1019 return atmel_aes_dma_start(dd
, src
, dst
, datalen
,
1020 atmel_aes_ctr_transfer
);
1022 return atmel_aes_cpu_start(dd
, src
, dst
, datalen
,
1023 atmel_aes_ctr_transfer
);
1026 static int atmel_aes_ctr_start(struct atmel_aes_dev
*dd
)
1028 struct atmel_aes_ctr_ctx
*ctx
= atmel_aes_ctr_ctx_cast(dd
->ctx
);
1029 struct skcipher_request
*req
= skcipher_request_cast(dd
->areq
);
1030 struct atmel_aes_reqctx
*rctx
= skcipher_request_ctx(req
);
1033 atmel_aes_set_mode(dd
, rctx
);
1035 err
= atmel_aes_hw_init(dd
);
1037 return atmel_aes_complete(dd
, err
);
1039 memcpy(ctx
->iv
, req
->iv
, AES_BLOCK_SIZE
);
1042 return atmel_aes_ctr_transfer(dd
);
1045 static int atmel_aes_xts_fallback(struct skcipher_request
*req
, bool enc
)
1047 struct atmel_aes_reqctx
*rctx
= skcipher_request_ctx(req
);
1048 struct atmel_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(
1049 crypto_skcipher_reqtfm(req
));
1051 skcipher_request_set_tfm(&rctx
->fallback_req
, ctx
->fallback_tfm
);
1052 skcipher_request_set_callback(&rctx
->fallback_req
, req
->base
.flags
,
1053 req
->base
.complete
, req
->base
.data
);
1054 skcipher_request_set_crypt(&rctx
->fallback_req
, req
->src
, req
->dst
,
1055 req
->cryptlen
, req
->iv
);
1057 return enc
? crypto_skcipher_encrypt(&rctx
->fallback_req
) :
1058 crypto_skcipher_decrypt(&rctx
->fallback_req
);
1061 static int atmel_aes_crypt(struct skcipher_request
*req
, unsigned long mode
)
1063 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(req
);
1064 struct atmel_aes_base_ctx
*ctx
= crypto_skcipher_ctx(skcipher
);
1065 struct atmel_aes_reqctx
*rctx
;
1066 u32 opmode
= mode
& AES_FLAGS_OPMODE_MASK
;
1068 if (opmode
== AES_FLAGS_XTS
) {
1069 if (req
->cryptlen
< XTS_BLOCK_SIZE
)
1072 if (!IS_ALIGNED(req
->cryptlen
, XTS_BLOCK_SIZE
))
1073 return atmel_aes_xts_fallback(req
,
1074 mode
& AES_FLAGS_ENCRYPT
);
1078 * ECB, CBC or CTR mode require the plaintext and ciphertext
1079 * to have a positve integer length.
1081 if (!req
->cryptlen
&& opmode
!= AES_FLAGS_XTS
)
1084 if ((opmode
== AES_FLAGS_ECB
|| opmode
== AES_FLAGS_CBC
) &&
1085 !IS_ALIGNED(req
->cryptlen
, crypto_skcipher_blocksize(skcipher
)))
1088 ctx
->block_size
= AES_BLOCK_SIZE
;
1089 ctx
->is_aead
= false;
1091 rctx
= skcipher_request_ctx(req
);
1094 if (opmode
!= AES_FLAGS_ECB
&&
1095 !(mode
& AES_FLAGS_ENCRYPT
)) {
1096 unsigned int ivsize
= crypto_skcipher_ivsize(skcipher
);
1098 if (req
->cryptlen
>= ivsize
)
1099 scatterwalk_map_and_copy(rctx
->lastc
, req
->src
,
1100 req
->cryptlen
- ivsize
,
1104 return atmel_aes_handle_queue(ctx
->dd
, &req
->base
);
1107 static int atmel_aes_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
1108 unsigned int keylen
)
1110 struct atmel_aes_base_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
1112 if (keylen
!= AES_KEYSIZE_128
&&
1113 keylen
!= AES_KEYSIZE_192
&&
1114 keylen
!= AES_KEYSIZE_256
)
1117 memcpy(ctx
->key
, key
, keylen
);
1118 ctx
->keylen
= keylen
;
1123 static int atmel_aes_ecb_encrypt(struct skcipher_request
*req
)
1125 return atmel_aes_crypt(req
, AES_FLAGS_ECB
| AES_FLAGS_ENCRYPT
);
1128 static int atmel_aes_ecb_decrypt(struct skcipher_request
*req
)
1130 return atmel_aes_crypt(req
, AES_FLAGS_ECB
);
1133 static int atmel_aes_cbc_encrypt(struct skcipher_request
*req
)
1135 return atmel_aes_crypt(req
, AES_FLAGS_CBC
| AES_FLAGS_ENCRYPT
);
1138 static int atmel_aes_cbc_decrypt(struct skcipher_request
*req
)
1140 return atmel_aes_crypt(req
, AES_FLAGS_CBC
);
1143 static int atmel_aes_ctr_encrypt(struct skcipher_request
*req
)
1145 return atmel_aes_crypt(req
, AES_FLAGS_CTR
| AES_FLAGS_ENCRYPT
);
1148 static int atmel_aes_ctr_decrypt(struct skcipher_request
*req
)
1150 return atmel_aes_crypt(req
, AES_FLAGS_CTR
);
1153 static int atmel_aes_init_tfm(struct crypto_skcipher
*tfm
)
1155 struct atmel_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
1156 struct atmel_aes_dev
*dd
;
1158 dd
= atmel_aes_dev_alloc(&ctx
->base
);
1162 crypto_skcipher_set_reqsize(tfm
, sizeof(struct atmel_aes_reqctx
));
1164 ctx
->base
.start
= atmel_aes_start
;
1169 static int atmel_aes_ctr_init_tfm(struct crypto_skcipher
*tfm
)
1171 struct atmel_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
1172 struct atmel_aes_dev
*dd
;
1174 dd
= atmel_aes_dev_alloc(&ctx
->base
);
1178 crypto_skcipher_set_reqsize(tfm
, sizeof(struct atmel_aes_reqctx
));
1180 ctx
->base
.start
= atmel_aes_ctr_start
;
1185 static struct skcipher_alg aes_algs
[] = {
1187 .base
.cra_name
= "ecb(aes)",
1188 .base
.cra_driver_name
= "atmel-ecb-aes",
1189 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
1190 .base
.cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1192 .init
= atmel_aes_init_tfm
,
1193 .min_keysize
= AES_MIN_KEY_SIZE
,
1194 .max_keysize
= AES_MAX_KEY_SIZE
,
1195 .setkey
= atmel_aes_setkey
,
1196 .encrypt
= atmel_aes_ecb_encrypt
,
1197 .decrypt
= atmel_aes_ecb_decrypt
,
1200 .base
.cra_name
= "cbc(aes)",
1201 .base
.cra_driver_name
= "atmel-cbc-aes",
1202 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
1203 .base
.cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1205 .init
= atmel_aes_init_tfm
,
1206 .min_keysize
= AES_MIN_KEY_SIZE
,
1207 .max_keysize
= AES_MAX_KEY_SIZE
,
1208 .setkey
= atmel_aes_setkey
,
1209 .encrypt
= atmel_aes_cbc_encrypt
,
1210 .decrypt
= atmel_aes_cbc_decrypt
,
1211 .ivsize
= AES_BLOCK_SIZE
,
1214 .base
.cra_name
= "ctr(aes)",
1215 .base
.cra_driver_name
= "atmel-ctr-aes",
1216 .base
.cra_blocksize
= 1,
1217 .base
.cra_ctxsize
= sizeof(struct atmel_aes_ctr_ctx
),
1219 .init
= atmel_aes_ctr_init_tfm
,
1220 .min_keysize
= AES_MIN_KEY_SIZE
,
1221 .max_keysize
= AES_MAX_KEY_SIZE
,
1222 .setkey
= atmel_aes_setkey
,
1223 .encrypt
= atmel_aes_ctr_encrypt
,
1224 .decrypt
= atmel_aes_ctr_decrypt
,
1225 .ivsize
= AES_BLOCK_SIZE
,
1230 /* gcm aead functions */
1232 static int atmel_aes_gcm_ghash(struct atmel_aes_dev
*dd
,
1233 const u32
*data
, size_t datalen
,
1234 const __be32
*ghash_in
, __be32
*ghash_out
,
1235 atmel_aes_fn_t resume
);
1236 static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev
*dd
);
1237 static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev
*dd
);
1239 static int atmel_aes_gcm_start(struct atmel_aes_dev
*dd
);
1240 static int atmel_aes_gcm_process(struct atmel_aes_dev
*dd
);
1241 static int atmel_aes_gcm_length(struct atmel_aes_dev
*dd
);
1242 static int atmel_aes_gcm_data(struct atmel_aes_dev
*dd
);
1243 static int atmel_aes_gcm_tag_init(struct atmel_aes_dev
*dd
);
1244 static int atmel_aes_gcm_tag(struct atmel_aes_dev
*dd
);
1245 static int atmel_aes_gcm_finalize(struct atmel_aes_dev
*dd
);
1247 static inline struct atmel_aes_gcm_ctx
*
1248 atmel_aes_gcm_ctx_cast(struct atmel_aes_base_ctx
*ctx
)
1250 return container_of(ctx
, struct atmel_aes_gcm_ctx
, base
);
1253 static int atmel_aes_gcm_ghash(struct atmel_aes_dev
*dd
,
1254 const u32
*data
, size_t datalen
,
1255 const __be32
*ghash_in
, __be32
*ghash_out
,
1256 atmel_aes_fn_t resume
)
1258 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1260 dd
->data
= (u32
*)data
;
1261 dd
->datalen
= datalen
;
1262 ctx
->ghash_in
= ghash_in
;
1263 ctx
->ghash_out
= ghash_out
;
1264 ctx
->ghash_resume
= resume
;
1266 atmel_aes_write_ctrl(dd
, false, NULL
);
1267 return atmel_aes_wait_for_data_ready(dd
, atmel_aes_gcm_ghash_init
);
1270 static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev
*dd
)
1272 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1274 /* Set the data length. */
1275 atmel_aes_write(dd
, AES_AADLENR
, dd
->total
);
1276 atmel_aes_write(dd
, AES_CLENR
, 0);
1278 /* If needed, overwrite the GCM Intermediate Hash Word Registers */
1280 atmel_aes_write_block(dd
, AES_GHASHR(0), ctx
->ghash_in
);
1282 return atmel_aes_gcm_ghash_finalize(dd
);
1285 static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev
*dd
)
1287 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1290 /* Write data into the Input Data Registers. */
1291 while (dd
->datalen
> 0) {
1292 atmel_aes_write_block(dd
, AES_IDATAR(0), dd
->data
);
1294 dd
->datalen
-= AES_BLOCK_SIZE
;
1296 isr
= atmel_aes_read(dd
, AES_ISR
);
1297 if (!(isr
& AES_INT_DATARDY
)) {
1298 dd
->resume
= atmel_aes_gcm_ghash_finalize
;
1299 atmel_aes_write(dd
, AES_IER
, AES_INT_DATARDY
);
1300 return -EINPROGRESS
;
1304 /* Read the computed hash from GHASHRx. */
1305 atmel_aes_read_block(dd
, AES_GHASHR(0), ctx
->ghash_out
);
1307 return ctx
->ghash_resume(dd
);
1311 static int atmel_aes_gcm_start(struct atmel_aes_dev
*dd
)
1313 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1314 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1315 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1316 struct atmel_aes_reqctx
*rctx
= aead_request_ctx(req
);
1317 size_t ivsize
= crypto_aead_ivsize(tfm
);
1318 size_t datalen
, padlen
;
1319 const void *iv
= req
->iv
;
1323 atmel_aes_set_mode(dd
, rctx
);
1325 err
= atmel_aes_hw_init(dd
);
1327 return atmel_aes_complete(dd
, err
);
1329 if (likely(ivsize
== GCM_AES_IV_SIZE
)) {
1330 memcpy(ctx
->j0
, iv
, ivsize
);
1331 ctx
->j0
[3] = cpu_to_be32(1);
1332 return atmel_aes_gcm_process(dd
);
1335 padlen
= atmel_aes_padlen(ivsize
, AES_BLOCK_SIZE
);
1336 datalen
= ivsize
+ padlen
+ AES_BLOCK_SIZE
;
1337 if (datalen
> dd
->buflen
)
1338 return atmel_aes_complete(dd
, -EINVAL
);
1340 memcpy(data
, iv
, ivsize
);
1341 memset(data
+ ivsize
, 0, padlen
+ sizeof(u64
));
1342 ((__be64
*)(data
+ datalen
))[-1] = cpu_to_be64(ivsize
* 8);
1344 return atmel_aes_gcm_ghash(dd
, (const u32
*)data
, datalen
,
1345 NULL
, ctx
->j0
, atmel_aes_gcm_process
);
1348 static int atmel_aes_gcm_process(struct atmel_aes_dev
*dd
)
1350 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1351 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1352 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1353 bool enc
= atmel_aes_is_encrypt(dd
);
1356 /* Compute text length. */
1357 authsize
= crypto_aead_authsize(tfm
);
1358 ctx
->textlen
= req
->cryptlen
- (enc
? 0 : authsize
);
1361 * According to tcrypt test suite, the GCM Automatic Tag Generation
1362 * fails when both the message and its associated data are empty.
1364 if (likely(req
->assoclen
!= 0 || ctx
->textlen
!= 0))
1365 dd
->flags
|= AES_FLAGS_GTAGEN
;
1367 atmel_aes_write_ctrl(dd
, false, NULL
);
1368 return atmel_aes_wait_for_data_ready(dd
, atmel_aes_gcm_length
);
1371 static int atmel_aes_gcm_length(struct atmel_aes_dev
*dd
)
1373 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1374 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1375 __be32 j0_lsw
, *j0
= ctx
->j0
;
1378 /* Write incr32(J0) into IV. */
1380 be32_add_cpu(&j0
[3], 1);
1381 atmel_aes_write_block(dd
, AES_IVR(0), j0
);
1384 /* Set aad and text lengths. */
1385 atmel_aes_write(dd
, AES_AADLENR
, req
->assoclen
);
1386 atmel_aes_write(dd
, AES_CLENR
, ctx
->textlen
);
1388 /* Check whether AAD are present. */
1389 if (unlikely(req
->assoclen
== 0)) {
1391 return atmel_aes_gcm_data(dd
);
1394 /* Copy assoc data and add padding. */
1395 padlen
= atmel_aes_padlen(req
->assoclen
, AES_BLOCK_SIZE
);
1396 if (unlikely(req
->assoclen
+ padlen
> dd
->buflen
))
1397 return atmel_aes_complete(dd
, -EINVAL
);
1398 sg_copy_to_buffer(req
->src
, sg_nents(req
->src
), dd
->buf
, req
->assoclen
);
1400 /* Write assoc data into the Input Data register. */
1401 dd
->data
= (u32
*)dd
->buf
;
1402 dd
->datalen
= req
->assoclen
+ padlen
;
1403 return atmel_aes_gcm_data(dd
);
1406 static int atmel_aes_gcm_data(struct atmel_aes_dev
*dd
)
1408 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1409 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1410 bool use_dma
= (ctx
->textlen
>= ATMEL_AES_DMA_THRESHOLD
);
1411 struct scatterlist
*src
, *dst
;
1414 /* Write AAD first. */
1415 while (dd
->datalen
> 0) {
1416 atmel_aes_write_block(dd
, AES_IDATAR(0), dd
->data
);
1418 dd
->datalen
-= AES_BLOCK_SIZE
;
1420 isr
= atmel_aes_read(dd
, AES_ISR
);
1421 if (!(isr
& AES_INT_DATARDY
)) {
1422 dd
->resume
= atmel_aes_gcm_data
;
1423 atmel_aes_write(dd
, AES_IER
, AES_INT_DATARDY
);
1424 return -EINPROGRESS
;
1429 if (unlikely(ctx
->textlen
== 0))
1430 return atmel_aes_gcm_tag_init(dd
);
1432 /* Prepare src and dst scatter lists to transfer cipher/plain texts */
1433 src
= scatterwalk_ffwd(ctx
->src
, req
->src
, req
->assoclen
);
1434 dst
= ((req
->src
== req
->dst
) ? src
:
1435 scatterwalk_ffwd(ctx
->dst
, req
->dst
, req
->assoclen
));
1438 /* Update the Mode Register for DMA transfers. */
1439 mr
= atmel_aes_read(dd
, AES_MR
);
1440 mr
&= ~(AES_MR_SMOD_MASK
| AES_MR_DUALBUFF
);
1441 mr
|= AES_MR_SMOD_IDATAR0
;
1442 if (dd
->caps
.has_dualbuff
)
1443 mr
|= AES_MR_DUALBUFF
;
1444 atmel_aes_write(dd
, AES_MR
, mr
);
1446 return atmel_aes_dma_start(dd
, src
, dst
, ctx
->textlen
,
1447 atmel_aes_gcm_tag_init
);
1450 return atmel_aes_cpu_start(dd
, src
, dst
, ctx
->textlen
,
1451 atmel_aes_gcm_tag_init
);
1454 static int atmel_aes_gcm_tag_init(struct atmel_aes_dev
*dd
)
1456 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1457 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1458 __be64
*data
= dd
->buf
;
1460 if (likely(dd
->flags
& AES_FLAGS_GTAGEN
)) {
1461 if (!(atmel_aes_read(dd
, AES_ISR
) & AES_INT_TAGRDY
)) {
1462 dd
->resume
= atmel_aes_gcm_tag_init
;
1463 atmel_aes_write(dd
, AES_IER
, AES_INT_TAGRDY
);
1464 return -EINPROGRESS
;
1467 return atmel_aes_gcm_finalize(dd
);
1470 /* Read the GCM Intermediate Hash Word Registers. */
1471 atmel_aes_read_block(dd
, AES_GHASHR(0), ctx
->ghash
);
1473 data
[0] = cpu_to_be64(req
->assoclen
* 8);
1474 data
[1] = cpu_to_be64(ctx
->textlen
* 8);
1476 return atmel_aes_gcm_ghash(dd
, (const u32
*)data
, AES_BLOCK_SIZE
,
1477 ctx
->ghash
, ctx
->ghash
, atmel_aes_gcm_tag
);
1480 static int atmel_aes_gcm_tag(struct atmel_aes_dev
*dd
)
1482 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1483 unsigned long flags
;
1486 * Change mode to CTR to complete the tag generation.
1487 * Use J0 as Initialization Vector.
1490 dd
->flags
&= ~(AES_FLAGS_OPMODE_MASK
| AES_FLAGS_GTAGEN
);
1491 dd
->flags
|= AES_FLAGS_CTR
;
1492 atmel_aes_write_ctrl(dd
, false, ctx
->j0
);
1495 atmel_aes_write_block(dd
, AES_IDATAR(0), ctx
->ghash
);
1496 return atmel_aes_wait_for_data_ready(dd
, atmel_aes_gcm_finalize
);
1499 static int atmel_aes_gcm_finalize(struct atmel_aes_dev
*dd
)
1501 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1502 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1503 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1504 bool enc
= atmel_aes_is_encrypt(dd
);
1505 u32 offset
, authsize
, itag
[4], *otag
= ctx
->tag
;
1508 /* Read the computed tag. */
1509 if (likely(dd
->flags
& AES_FLAGS_GTAGEN
))
1510 atmel_aes_read_block(dd
, AES_TAGR(0), ctx
->tag
);
1512 atmel_aes_read_block(dd
, AES_ODATAR(0), ctx
->tag
);
1514 offset
= req
->assoclen
+ ctx
->textlen
;
1515 authsize
= crypto_aead_authsize(tfm
);
1517 scatterwalk_map_and_copy(otag
, req
->dst
, offset
, authsize
, 1);
1520 scatterwalk_map_and_copy(itag
, req
->src
, offset
, authsize
, 0);
1521 err
= crypto_memneq(itag
, otag
, authsize
) ? -EBADMSG
: 0;
1524 return atmel_aes_complete(dd
, err
);
1527 static int atmel_aes_gcm_crypt(struct aead_request
*req
,
1530 struct atmel_aes_base_ctx
*ctx
;
1531 struct atmel_aes_reqctx
*rctx
;
1533 ctx
= crypto_aead_ctx(crypto_aead_reqtfm(req
));
1534 ctx
->block_size
= AES_BLOCK_SIZE
;
1535 ctx
->is_aead
= true;
1537 rctx
= aead_request_ctx(req
);
1538 rctx
->mode
= AES_FLAGS_GCM
| mode
;
1540 return atmel_aes_handle_queue(ctx
->dd
, &req
->base
);
1543 static int atmel_aes_gcm_setkey(struct crypto_aead
*tfm
, const u8
*key
,
1544 unsigned int keylen
)
1546 struct atmel_aes_base_ctx
*ctx
= crypto_aead_ctx(tfm
);
1548 if (keylen
!= AES_KEYSIZE_256
&&
1549 keylen
!= AES_KEYSIZE_192
&&
1550 keylen
!= AES_KEYSIZE_128
)
1553 memcpy(ctx
->key
, key
, keylen
);
1554 ctx
->keylen
= keylen
;
1559 static int atmel_aes_gcm_setauthsize(struct crypto_aead
*tfm
,
1560 unsigned int authsize
)
1562 return crypto_gcm_check_authsize(authsize
);
1565 static int atmel_aes_gcm_encrypt(struct aead_request
*req
)
1567 return atmel_aes_gcm_crypt(req
, AES_FLAGS_ENCRYPT
);
1570 static int atmel_aes_gcm_decrypt(struct aead_request
*req
)
1572 return atmel_aes_gcm_crypt(req
, 0);
1575 static int atmel_aes_gcm_init(struct crypto_aead
*tfm
)
1577 struct atmel_aes_gcm_ctx
*ctx
= crypto_aead_ctx(tfm
);
1578 struct atmel_aes_dev
*dd
;
1580 dd
= atmel_aes_dev_alloc(&ctx
->base
);
1584 crypto_aead_set_reqsize(tfm
, sizeof(struct atmel_aes_reqctx
));
1586 ctx
->base
.start
= atmel_aes_gcm_start
;
1591 static struct aead_alg aes_gcm_alg
= {
1592 .setkey
= atmel_aes_gcm_setkey
,
1593 .setauthsize
= atmel_aes_gcm_setauthsize
,
1594 .encrypt
= atmel_aes_gcm_encrypt
,
1595 .decrypt
= atmel_aes_gcm_decrypt
,
1596 .init
= atmel_aes_gcm_init
,
1597 .ivsize
= GCM_AES_IV_SIZE
,
1598 .maxauthsize
= AES_BLOCK_SIZE
,
1601 .cra_name
= "gcm(aes)",
1602 .cra_driver_name
= "atmel-gcm-aes",
1604 .cra_ctxsize
= sizeof(struct atmel_aes_gcm_ctx
),
1611 static inline struct atmel_aes_xts_ctx
*
1612 atmel_aes_xts_ctx_cast(struct atmel_aes_base_ctx
*ctx
)
1614 return container_of(ctx
, struct atmel_aes_xts_ctx
, base
);
1617 static int atmel_aes_xts_process_data(struct atmel_aes_dev
*dd
);
1619 static int atmel_aes_xts_start(struct atmel_aes_dev
*dd
)
1621 struct atmel_aes_xts_ctx
*ctx
= atmel_aes_xts_ctx_cast(dd
->ctx
);
1622 struct skcipher_request
*req
= skcipher_request_cast(dd
->areq
);
1623 struct atmel_aes_reqctx
*rctx
= skcipher_request_ctx(req
);
1624 unsigned long flags
;
1627 atmel_aes_set_mode(dd
, rctx
);
1629 err
= atmel_aes_hw_init(dd
);
1631 return atmel_aes_complete(dd
, err
);
1633 /* Compute the tweak value from req->iv with ecb(aes). */
1635 dd
->flags
&= ~AES_FLAGS_MODE_MASK
;
1636 dd
->flags
|= (AES_FLAGS_ECB
| AES_FLAGS_ENCRYPT
);
1637 atmel_aes_write_ctrl_key(dd
, false, NULL
,
1638 ctx
->key2
, ctx
->base
.keylen
);
1641 atmel_aes_write_block(dd
, AES_IDATAR(0), req
->iv
);
1642 return atmel_aes_wait_for_data_ready(dd
, atmel_aes_xts_process_data
);
1645 static int atmel_aes_xts_process_data(struct atmel_aes_dev
*dd
)
1647 struct skcipher_request
*req
= skcipher_request_cast(dd
->areq
);
1648 bool use_dma
= (req
->cryptlen
>= ATMEL_AES_DMA_THRESHOLD
);
1649 u32 tweak
[AES_BLOCK_SIZE
/ sizeof(u32
)];
1650 static const __le32 one
[AES_BLOCK_SIZE
/ sizeof(u32
)] = {cpu_to_le32(1), };
1651 u8
*tweak_bytes
= (u8
*)tweak
;
1654 /* Read the computed ciphered tweak value. */
1655 atmel_aes_read_block(dd
, AES_ODATAR(0), tweak
);
1658 * the order of the ciphered tweak bytes need to be reversed before
1659 * writing them into the ODATARx registers.
1661 for (i
= 0; i
< AES_BLOCK_SIZE
/2; ++i
)
1662 swap(tweak_bytes
[i
], tweak_bytes
[AES_BLOCK_SIZE
- 1 - i
]);
1664 /* Process the data. */
1665 atmel_aes_write_ctrl(dd
, use_dma
, NULL
);
1666 atmel_aes_write_block(dd
, AES_TWR(0), tweak
);
1667 atmel_aes_write_block(dd
, AES_ALPHAR(0), one
);
1669 return atmel_aes_dma_start(dd
, req
->src
, req
->dst
,
1671 atmel_aes_transfer_complete
);
1673 return atmel_aes_cpu_start(dd
, req
->src
, req
->dst
, req
->cryptlen
,
1674 atmel_aes_transfer_complete
);
1677 static int atmel_aes_xts_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
1678 unsigned int keylen
)
1680 struct atmel_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
1683 err
= xts_verify_key(tfm
, key
, keylen
);
1687 crypto_skcipher_clear_flags(ctx
->fallback_tfm
, CRYPTO_TFM_REQ_MASK
);
1688 crypto_skcipher_set_flags(ctx
->fallback_tfm
, tfm
->base
.crt_flags
&
1689 CRYPTO_TFM_REQ_MASK
);
1690 err
= crypto_skcipher_setkey(ctx
->fallback_tfm
, key
, keylen
);
1694 memcpy(ctx
->base
.key
, key
, keylen
/2);
1695 memcpy(ctx
->key2
, key
+ keylen
/2, keylen
/2);
1696 ctx
->base
.keylen
= keylen
/2;
1701 static int atmel_aes_xts_encrypt(struct skcipher_request
*req
)
1703 return atmel_aes_crypt(req
, AES_FLAGS_XTS
| AES_FLAGS_ENCRYPT
);
1706 static int atmel_aes_xts_decrypt(struct skcipher_request
*req
)
1708 return atmel_aes_crypt(req
, AES_FLAGS_XTS
);
1711 static int atmel_aes_xts_init_tfm(struct crypto_skcipher
*tfm
)
1713 struct atmel_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
1714 struct atmel_aes_dev
*dd
;
1715 const char *tfm_name
= crypto_tfm_alg_name(&tfm
->base
);
1717 dd
= atmel_aes_dev_alloc(&ctx
->base
);
1721 ctx
->fallback_tfm
= crypto_alloc_skcipher(tfm_name
, 0,
1722 CRYPTO_ALG_NEED_FALLBACK
);
1723 if (IS_ERR(ctx
->fallback_tfm
))
1724 return PTR_ERR(ctx
->fallback_tfm
);
1726 crypto_skcipher_set_reqsize(tfm
, sizeof(struct atmel_aes_reqctx
) +
1727 crypto_skcipher_reqsize(ctx
->fallback_tfm
));
1729 ctx
->base
.start
= atmel_aes_xts_start
;
1734 static void atmel_aes_xts_exit_tfm(struct crypto_skcipher
*tfm
)
1736 struct atmel_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
1738 crypto_free_skcipher(ctx
->fallback_tfm
);
1741 static struct skcipher_alg aes_xts_alg
= {
1742 .base
.cra_name
= "xts(aes)",
1743 .base
.cra_driver_name
= "atmel-xts-aes",
1744 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
1745 .base
.cra_ctxsize
= sizeof(struct atmel_aes_xts_ctx
),
1746 .base
.cra_flags
= CRYPTO_ALG_NEED_FALLBACK
,
1748 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
1749 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
1750 .ivsize
= AES_BLOCK_SIZE
,
1751 .setkey
= atmel_aes_xts_setkey
,
1752 .encrypt
= atmel_aes_xts_encrypt
,
1753 .decrypt
= atmel_aes_xts_decrypt
,
1754 .init
= atmel_aes_xts_init_tfm
,
1755 .exit
= atmel_aes_xts_exit_tfm
,
1758 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
1759 /* authenc aead functions */
1761 static int atmel_aes_authenc_start(struct atmel_aes_dev
*dd
);
1762 static int atmel_aes_authenc_init(struct atmel_aes_dev
*dd
, int err
,
1764 static int atmel_aes_authenc_transfer(struct atmel_aes_dev
*dd
, int err
,
1766 static int atmel_aes_authenc_digest(struct atmel_aes_dev
*dd
);
1767 static int atmel_aes_authenc_final(struct atmel_aes_dev
*dd
, int err
,
1770 static void atmel_aes_authenc_complete(struct atmel_aes_dev
*dd
, int err
)
1772 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1773 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
1775 if (err
&& (dd
->flags
& AES_FLAGS_OWN_SHA
))
1776 atmel_sha_authenc_abort(&rctx
->auth_req
);
1777 dd
->flags
&= ~AES_FLAGS_OWN_SHA
;
1780 static int atmel_aes_authenc_start(struct atmel_aes_dev
*dd
)
1782 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1783 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
1784 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1785 struct atmel_aes_authenc_ctx
*ctx
= crypto_aead_ctx(tfm
);
1788 atmel_aes_set_mode(dd
, &rctx
->base
);
1790 err
= atmel_aes_hw_init(dd
);
1792 return atmel_aes_complete(dd
, err
);
1794 return atmel_sha_authenc_schedule(&rctx
->auth_req
, ctx
->auth
,
1795 atmel_aes_authenc_init
, dd
);
1798 static int atmel_aes_authenc_init(struct atmel_aes_dev
*dd
, int err
,
1801 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1802 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
1805 dd
->is_async
= true;
1807 return atmel_aes_complete(dd
, err
);
1809 /* If here, we've got the ownership of the SHA device. */
1810 dd
->flags
|= AES_FLAGS_OWN_SHA
;
1812 /* Configure the SHA device. */
1813 return atmel_sha_authenc_init(&rctx
->auth_req
,
1814 req
->src
, req
->assoclen
,
1816 atmel_aes_authenc_transfer
, dd
);
1819 static int atmel_aes_authenc_transfer(struct atmel_aes_dev
*dd
, int err
,
1822 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1823 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
1824 bool enc
= atmel_aes_is_encrypt(dd
);
1825 struct scatterlist
*src
, *dst
;
1826 __be32 iv
[AES_BLOCK_SIZE
/ sizeof(u32
)];
1830 dd
->is_async
= true;
1832 return atmel_aes_complete(dd
, err
);
1834 /* Prepare src and dst scatter-lists to transfer cipher/plain texts. */
1835 src
= scatterwalk_ffwd(rctx
->src
, req
->src
, req
->assoclen
);
1838 if (req
->src
!= req
->dst
)
1839 dst
= scatterwalk_ffwd(rctx
->dst
, req
->dst
, req
->assoclen
);
1841 /* Configure the AES device. */
1842 memcpy(iv
, req
->iv
, sizeof(iv
));
1845 * Here we always set the 2nd parameter of atmel_aes_write_ctrl() to
1846 * 'true' even if the data transfer is actually performed by the CPU (so
1847 * not by the DMA) because we must force the AES_MR_SMOD bitfield to the
1848 * value AES_MR_SMOD_IDATAR0. Indeed, both AES_MR_SMOD and SHA_MR_SMOD
1849 * must be set to *_MR_SMOD_IDATAR0.
1851 atmel_aes_write_ctrl(dd
, true, iv
);
1852 emr
= AES_EMR_PLIPEN
;
1854 emr
|= AES_EMR_PLIPD
;
1855 atmel_aes_write(dd
, AES_EMR
, emr
);
1857 /* Transfer data. */
1858 return atmel_aes_dma_start(dd
, src
, dst
, rctx
->textlen
,
1859 atmel_aes_authenc_digest
);
1862 static int atmel_aes_authenc_digest(struct atmel_aes_dev
*dd
)
1864 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1865 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
1867 /* atmel_sha_authenc_final() releases the SHA device. */
1868 dd
->flags
&= ~AES_FLAGS_OWN_SHA
;
1869 return atmel_sha_authenc_final(&rctx
->auth_req
,
1870 rctx
->digest
, sizeof(rctx
->digest
),
1871 atmel_aes_authenc_final
, dd
);
1874 static int atmel_aes_authenc_final(struct atmel_aes_dev
*dd
, int err
,
1877 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1878 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
1879 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1880 bool enc
= atmel_aes_is_encrypt(dd
);
1881 u32 idigest
[SHA512_DIGEST_SIZE
/ sizeof(u32
)], *odigest
= rctx
->digest
;
1885 dd
->is_async
= true;
1889 offs
= req
->assoclen
+ rctx
->textlen
;
1890 authsize
= crypto_aead_authsize(tfm
);
1892 scatterwalk_map_and_copy(odigest
, req
->dst
, offs
, authsize
, 1);
1894 scatterwalk_map_and_copy(idigest
, req
->src
, offs
, authsize
, 0);
1895 if (crypto_memneq(idigest
, odigest
, authsize
))
1900 return atmel_aes_complete(dd
, err
);
1903 static int atmel_aes_authenc_setkey(struct crypto_aead
*tfm
, const u8
*key
,
1904 unsigned int keylen
)
1906 struct atmel_aes_authenc_ctx
*ctx
= crypto_aead_ctx(tfm
);
1907 struct crypto_authenc_keys keys
;
1910 if (crypto_authenc_extractkeys(&keys
, key
, keylen
) != 0)
1913 if (keys
.enckeylen
> sizeof(ctx
->base
.key
))
1916 /* Save auth key. */
1917 err
= atmel_sha_authenc_setkey(ctx
->auth
,
1918 keys
.authkey
, keys
.authkeylen
,
1919 crypto_aead_get_flags(tfm
));
1921 memzero_explicit(&keys
, sizeof(keys
));
1926 ctx
->base
.keylen
= keys
.enckeylen
;
1927 memcpy(ctx
->base
.key
, keys
.enckey
, keys
.enckeylen
);
1929 memzero_explicit(&keys
, sizeof(keys
));
1933 memzero_explicit(&keys
, sizeof(keys
));
1937 static int atmel_aes_authenc_init_tfm(struct crypto_aead
*tfm
,
1938 unsigned long auth_mode
)
1940 struct atmel_aes_authenc_ctx
*ctx
= crypto_aead_ctx(tfm
);
1941 unsigned int auth_reqsize
= atmel_sha_authenc_get_reqsize();
1942 struct atmel_aes_dev
*dd
;
1944 dd
= atmel_aes_dev_alloc(&ctx
->base
);
1948 ctx
->auth
= atmel_sha_authenc_spawn(auth_mode
);
1949 if (IS_ERR(ctx
->auth
))
1950 return PTR_ERR(ctx
->auth
);
1952 crypto_aead_set_reqsize(tfm
, (sizeof(struct atmel_aes_authenc_reqctx
) +
1955 ctx
->base
.start
= atmel_aes_authenc_start
;
1960 static int atmel_aes_authenc_hmac_sha1_init_tfm(struct crypto_aead
*tfm
)
1962 return atmel_aes_authenc_init_tfm(tfm
, SHA_FLAGS_HMAC_SHA1
);
1965 static int atmel_aes_authenc_hmac_sha224_init_tfm(struct crypto_aead
*tfm
)
1967 return atmel_aes_authenc_init_tfm(tfm
, SHA_FLAGS_HMAC_SHA224
);
1970 static int atmel_aes_authenc_hmac_sha256_init_tfm(struct crypto_aead
*tfm
)
1972 return atmel_aes_authenc_init_tfm(tfm
, SHA_FLAGS_HMAC_SHA256
);
1975 static int atmel_aes_authenc_hmac_sha384_init_tfm(struct crypto_aead
*tfm
)
1977 return atmel_aes_authenc_init_tfm(tfm
, SHA_FLAGS_HMAC_SHA384
);
1980 static int atmel_aes_authenc_hmac_sha512_init_tfm(struct crypto_aead
*tfm
)
1982 return atmel_aes_authenc_init_tfm(tfm
, SHA_FLAGS_HMAC_SHA512
);
1985 static void atmel_aes_authenc_exit_tfm(struct crypto_aead
*tfm
)
1987 struct atmel_aes_authenc_ctx
*ctx
= crypto_aead_ctx(tfm
);
1989 atmel_sha_authenc_free(ctx
->auth
);
1992 static int atmel_aes_authenc_crypt(struct aead_request
*req
,
1995 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
1996 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1997 struct atmel_aes_base_ctx
*ctx
= crypto_aead_ctx(tfm
);
1998 u32 authsize
= crypto_aead_authsize(tfm
);
1999 bool enc
= (mode
& AES_FLAGS_ENCRYPT
);
2001 /* Compute text length. */
2002 if (!enc
&& req
->cryptlen
< authsize
)
2004 rctx
->textlen
= req
->cryptlen
- (enc
? 0 : authsize
);
2007 * Currently, empty messages are not supported yet:
2008 * the SHA auto-padding can be used only on non-empty messages.
2009 * Hence a special case needs to be implemented for empty message.
2011 if (!rctx
->textlen
&& !req
->assoclen
)
2014 rctx
->base
.mode
= mode
;
2015 ctx
->block_size
= AES_BLOCK_SIZE
;
2016 ctx
->is_aead
= true;
2018 return atmel_aes_handle_queue(ctx
->dd
, &req
->base
);
2021 static int atmel_aes_authenc_cbc_aes_encrypt(struct aead_request
*req
)
2023 return atmel_aes_authenc_crypt(req
, AES_FLAGS_CBC
| AES_FLAGS_ENCRYPT
);
2026 static int atmel_aes_authenc_cbc_aes_decrypt(struct aead_request
*req
)
2028 return atmel_aes_authenc_crypt(req
, AES_FLAGS_CBC
);
2031 static struct aead_alg aes_authenc_algs
[] = {
2033 .setkey
= atmel_aes_authenc_setkey
,
2034 .encrypt
= atmel_aes_authenc_cbc_aes_encrypt
,
2035 .decrypt
= atmel_aes_authenc_cbc_aes_decrypt
,
2036 .init
= atmel_aes_authenc_hmac_sha1_init_tfm
,
2037 .exit
= atmel_aes_authenc_exit_tfm
,
2038 .ivsize
= AES_BLOCK_SIZE
,
2039 .maxauthsize
= SHA1_DIGEST_SIZE
,
2042 .cra_name
= "authenc(hmac(sha1),cbc(aes))",
2043 .cra_driver_name
= "atmel-authenc-hmac-sha1-cbc-aes",
2044 .cra_blocksize
= AES_BLOCK_SIZE
,
2045 .cra_ctxsize
= sizeof(struct atmel_aes_authenc_ctx
),
2049 .setkey
= atmel_aes_authenc_setkey
,
2050 .encrypt
= atmel_aes_authenc_cbc_aes_encrypt
,
2051 .decrypt
= atmel_aes_authenc_cbc_aes_decrypt
,
2052 .init
= atmel_aes_authenc_hmac_sha224_init_tfm
,
2053 .exit
= atmel_aes_authenc_exit_tfm
,
2054 .ivsize
= AES_BLOCK_SIZE
,
2055 .maxauthsize
= SHA224_DIGEST_SIZE
,
2058 .cra_name
= "authenc(hmac(sha224),cbc(aes))",
2059 .cra_driver_name
= "atmel-authenc-hmac-sha224-cbc-aes",
2060 .cra_blocksize
= AES_BLOCK_SIZE
,
2061 .cra_ctxsize
= sizeof(struct atmel_aes_authenc_ctx
),
2065 .setkey
= atmel_aes_authenc_setkey
,
2066 .encrypt
= atmel_aes_authenc_cbc_aes_encrypt
,
2067 .decrypt
= atmel_aes_authenc_cbc_aes_decrypt
,
2068 .init
= atmel_aes_authenc_hmac_sha256_init_tfm
,
2069 .exit
= atmel_aes_authenc_exit_tfm
,
2070 .ivsize
= AES_BLOCK_SIZE
,
2071 .maxauthsize
= SHA256_DIGEST_SIZE
,
2074 .cra_name
= "authenc(hmac(sha256),cbc(aes))",
2075 .cra_driver_name
= "atmel-authenc-hmac-sha256-cbc-aes",
2076 .cra_blocksize
= AES_BLOCK_SIZE
,
2077 .cra_ctxsize
= sizeof(struct atmel_aes_authenc_ctx
),
2081 .setkey
= atmel_aes_authenc_setkey
,
2082 .encrypt
= atmel_aes_authenc_cbc_aes_encrypt
,
2083 .decrypt
= atmel_aes_authenc_cbc_aes_decrypt
,
2084 .init
= atmel_aes_authenc_hmac_sha384_init_tfm
,
2085 .exit
= atmel_aes_authenc_exit_tfm
,
2086 .ivsize
= AES_BLOCK_SIZE
,
2087 .maxauthsize
= SHA384_DIGEST_SIZE
,
2090 .cra_name
= "authenc(hmac(sha384),cbc(aes))",
2091 .cra_driver_name
= "atmel-authenc-hmac-sha384-cbc-aes",
2092 .cra_blocksize
= AES_BLOCK_SIZE
,
2093 .cra_ctxsize
= sizeof(struct atmel_aes_authenc_ctx
),
2097 .setkey
= atmel_aes_authenc_setkey
,
2098 .encrypt
= atmel_aes_authenc_cbc_aes_encrypt
,
2099 .decrypt
= atmel_aes_authenc_cbc_aes_decrypt
,
2100 .init
= atmel_aes_authenc_hmac_sha512_init_tfm
,
2101 .exit
= atmel_aes_authenc_exit_tfm
,
2102 .ivsize
= AES_BLOCK_SIZE
,
2103 .maxauthsize
= SHA512_DIGEST_SIZE
,
2106 .cra_name
= "authenc(hmac(sha512),cbc(aes))",
2107 .cra_driver_name
= "atmel-authenc-hmac-sha512-cbc-aes",
2108 .cra_blocksize
= AES_BLOCK_SIZE
,
2109 .cra_ctxsize
= sizeof(struct atmel_aes_authenc_ctx
),
2113 #endif /* CONFIG_CRYPTO_DEV_ATMEL_AUTHENC */
2115 /* Probe functions */
2117 static int atmel_aes_buff_init(struct atmel_aes_dev
*dd
)
2119 dd
->buf
= (void *)__get_free_pages(GFP_KERNEL
, ATMEL_AES_BUFFER_ORDER
);
2120 dd
->buflen
= ATMEL_AES_BUFFER_SIZE
;
2121 dd
->buflen
&= ~(AES_BLOCK_SIZE
- 1);
2124 dev_err(dd
->dev
, "unable to alloc pages.\n");
2131 static void atmel_aes_buff_cleanup(struct atmel_aes_dev
*dd
)
2133 free_page((unsigned long)dd
->buf
);
2136 static int atmel_aes_dma_init(struct atmel_aes_dev
*dd
)
2140 /* Try to grab 2 DMA channels */
2141 dd
->src
.chan
= dma_request_chan(dd
->dev
, "tx");
2142 if (IS_ERR(dd
->src
.chan
)) {
2143 ret
= PTR_ERR(dd
->src
.chan
);
2147 dd
->dst
.chan
= dma_request_chan(dd
->dev
, "rx");
2148 if (IS_ERR(dd
->dst
.chan
)) {
2149 ret
= PTR_ERR(dd
->dst
.chan
);
2156 dma_release_channel(dd
->src
.chan
);
2158 dev_err(dd
->dev
, "no DMA channel available\n");
2162 static void atmel_aes_dma_cleanup(struct atmel_aes_dev
*dd
)
2164 dma_release_channel(dd
->dst
.chan
);
2165 dma_release_channel(dd
->src
.chan
);
2168 static void atmel_aes_queue_task(unsigned long data
)
2170 struct atmel_aes_dev
*dd
= (struct atmel_aes_dev
*)data
;
2172 atmel_aes_handle_queue(dd
, NULL
);
2175 static void atmel_aes_done_task(unsigned long data
)
2177 struct atmel_aes_dev
*dd
= (struct atmel_aes_dev
*)data
;
2179 dd
->is_async
= true;
2180 (void)dd
->resume(dd
);
2183 static irqreturn_t
atmel_aes_irq(int irq
, void *dev_id
)
2185 struct atmel_aes_dev
*aes_dd
= dev_id
;
2188 reg
= atmel_aes_read(aes_dd
, AES_ISR
);
2189 if (reg
& atmel_aes_read(aes_dd
, AES_IMR
)) {
2190 atmel_aes_write(aes_dd
, AES_IDR
, reg
);
2191 if (AES_FLAGS_BUSY
& aes_dd
->flags
)
2192 tasklet_schedule(&aes_dd
->done_task
);
2194 dev_warn(aes_dd
->dev
, "AES interrupt when no active requests.\n");
2201 static void atmel_aes_unregister_algs(struct atmel_aes_dev
*dd
)
2205 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
2206 if (dd
->caps
.has_authenc
)
2207 for (i
= 0; i
< ARRAY_SIZE(aes_authenc_algs
); i
++)
2208 crypto_unregister_aead(&aes_authenc_algs
[i
]);
2211 if (dd
->caps
.has_xts
)
2212 crypto_unregister_skcipher(&aes_xts_alg
);
2214 if (dd
->caps
.has_gcm
)
2215 crypto_unregister_aead(&aes_gcm_alg
);
2217 for (i
= 0; i
< ARRAY_SIZE(aes_algs
); i
++)
2218 crypto_unregister_skcipher(&aes_algs
[i
]);
2221 static void atmel_aes_crypto_alg_init(struct crypto_alg
*alg
)
2223 alg
->cra_flags
|= CRYPTO_ALG_ASYNC
;
2224 alg
->cra_alignmask
= 0xf;
2225 alg
->cra_priority
= ATMEL_AES_PRIORITY
;
2226 alg
->cra_module
= THIS_MODULE
;
2229 static int atmel_aes_register_algs(struct atmel_aes_dev
*dd
)
2233 for (i
= 0; i
< ARRAY_SIZE(aes_algs
); i
++) {
2234 atmel_aes_crypto_alg_init(&aes_algs
[i
].base
);
2236 err
= crypto_register_skcipher(&aes_algs
[i
]);
2241 if (dd
->caps
.has_gcm
) {
2242 atmel_aes_crypto_alg_init(&aes_gcm_alg
.base
);
2244 err
= crypto_register_aead(&aes_gcm_alg
);
2246 goto err_aes_gcm_alg
;
2249 if (dd
->caps
.has_xts
) {
2250 atmel_aes_crypto_alg_init(&aes_xts_alg
.base
);
2252 err
= crypto_register_skcipher(&aes_xts_alg
);
2254 goto err_aes_xts_alg
;
2257 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
2258 if (dd
->caps
.has_authenc
) {
2259 for (i
= 0; i
< ARRAY_SIZE(aes_authenc_algs
); i
++) {
2260 atmel_aes_crypto_alg_init(&aes_authenc_algs
[i
].base
);
2262 err
= crypto_register_aead(&aes_authenc_algs
[i
]);
2264 goto err_aes_authenc_alg
;
2271 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
2272 /* i = ARRAY_SIZE(aes_authenc_algs); */
2273 err_aes_authenc_alg
:
2274 for (j
= 0; j
< i
; j
++)
2275 crypto_unregister_aead(&aes_authenc_algs
[j
]);
2276 crypto_unregister_skcipher(&aes_xts_alg
);
2279 crypto_unregister_aead(&aes_gcm_alg
);
2281 i
= ARRAY_SIZE(aes_algs
);
2283 for (j
= 0; j
< i
; j
++)
2284 crypto_unregister_skcipher(&aes_algs
[j
]);
2289 static void atmel_aes_get_cap(struct atmel_aes_dev
*dd
)
2291 dd
->caps
.has_dualbuff
= 0;
2292 dd
->caps
.has_gcm
= 0;
2293 dd
->caps
.has_xts
= 0;
2294 dd
->caps
.has_authenc
= 0;
2295 dd
->caps
.max_burst_size
= 1;
2297 /* keep only major version number */
2298 switch (dd
->hw_version
& 0xff0) {
2302 dd
->caps
.has_dualbuff
= 1;
2303 dd
->caps
.has_gcm
= 1;
2304 dd
->caps
.has_xts
= 1;
2305 dd
->caps
.has_authenc
= 1;
2306 dd
->caps
.max_burst_size
= 4;
2309 dd
->caps
.has_dualbuff
= 1;
2310 dd
->caps
.has_gcm
= 1;
2311 dd
->caps
.max_burst_size
= 4;
2314 dd
->caps
.has_dualbuff
= 1;
2315 dd
->caps
.max_burst_size
= 4;
2321 "Unmanaged aes version, set minimum capabilities\n");
2326 static const struct of_device_id atmel_aes_dt_ids
[] = {
2327 { .compatible
= "atmel,at91sam9g46-aes" },
2330 MODULE_DEVICE_TABLE(of
, atmel_aes_dt_ids
);
2332 static int atmel_aes_probe(struct platform_device
*pdev
)
2334 struct atmel_aes_dev
*aes_dd
;
2335 struct device
*dev
= &pdev
->dev
;
2336 struct resource
*aes_res
;
2339 aes_dd
= devm_kzalloc(&pdev
->dev
, sizeof(*aes_dd
), GFP_KERNEL
);
2345 platform_set_drvdata(pdev
, aes_dd
);
2347 INIT_LIST_HEAD(&aes_dd
->list
);
2348 spin_lock_init(&aes_dd
->lock
);
2350 tasklet_init(&aes_dd
->done_task
, atmel_aes_done_task
,
2351 (unsigned long)aes_dd
);
2352 tasklet_init(&aes_dd
->queue_task
, atmel_aes_queue_task
,
2353 (unsigned long)aes_dd
);
2355 crypto_init_queue(&aes_dd
->queue
, ATMEL_AES_QUEUE_LENGTH
);
2357 aes_dd
->io_base
= devm_platform_get_and_ioremap_resource(pdev
, 0, &aes_res
);
2358 if (IS_ERR(aes_dd
->io_base
)) {
2359 err
= PTR_ERR(aes_dd
->io_base
);
2360 goto err_tasklet_kill
;
2362 aes_dd
->phys_base
= aes_res
->start
;
2365 aes_dd
->irq
= platform_get_irq(pdev
, 0);
2366 if (aes_dd
->irq
< 0) {
2368 goto err_tasklet_kill
;
2371 err
= devm_request_irq(&pdev
->dev
, aes_dd
->irq
, atmel_aes_irq
,
2372 IRQF_SHARED
, "atmel-aes", aes_dd
);
2374 dev_err(dev
, "unable to request aes irq.\n");
2375 goto err_tasklet_kill
;
2378 /* Initializing the clock */
2379 aes_dd
->iclk
= devm_clk_get_prepared(&pdev
->dev
, "aes_clk");
2380 if (IS_ERR(aes_dd
->iclk
)) {
2381 dev_err(dev
, "clock initialization failed.\n");
2382 err
= PTR_ERR(aes_dd
->iclk
);
2383 goto err_tasklet_kill
;
2386 err
= atmel_aes_hw_version_init(aes_dd
);
2388 goto err_tasklet_kill
;
2390 atmel_aes_get_cap(aes_dd
);
2392 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
2393 if (aes_dd
->caps
.has_authenc
&& !atmel_sha_authenc_is_ready()) {
2394 err
= -EPROBE_DEFER
;
2395 goto err_tasklet_kill
;
2399 err
= atmel_aes_buff_init(aes_dd
);
2401 goto err_tasklet_kill
;
2403 err
= atmel_aes_dma_init(aes_dd
);
2405 goto err_buff_cleanup
;
2407 spin_lock(&atmel_aes
.lock
);
2408 list_add_tail(&aes_dd
->list
, &atmel_aes
.dev_list
);
2409 spin_unlock(&atmel_aes
.lock
);
2411 err
= atmel_aes_register_algs(aes_dd
);
2415 dev_info(dev
, "Atmel AES - Using %s, %s for DMA transfers\n",
2416 dma_chan_name(aes_dd
->src
.chan
),
2417 dma_chan_name(aes_dd
->dst
.chan
));
2422 spin_lock(&atmel_aes
.lock
);
2423 list_del(&aes_dd
->list
);
2424 spin_unlock(&atmel_aes
.lock
);
2425 atmel_aes_dma_cleanup(aes_dd
);
2427 atmel_aes_buff_cleanup(aes_dd
);
2429 tasklet_kill(&aes_dd
->done_task
);
2430 tasklet_kill(&aes_dd
->queue_task
);
2435 static void atmel_aes_remove(struct platform_device
*pdev
)
2437 struct atmel_aes_dev
*aes_dd
;
2439 aes_dd
= platform_get_drvdata(pdev
);
2441 spin_lock(&atmel_aes
.lock
);
2442 list_del(&aes_dd
->list
);
2443 spin_unlock(&atmel_aes
.lock
);
2445 atmel_aes_unregister_algs(aes_dd
);
2447 tasklet_kill(&aes_dd
->done_task
);
2448 tasklet_kill(&aes_dd
->queue_task
);
2450 atmel_aes_dma_cleanup(aes_dd
);
2451 atmel_aes_buff_cleanup(aes_dd
);
2454 static struct platform_driver atmel_aes_driver
= {
2455 .probe
= atmel_aes_probe
,
2456 .remove
= atmel_aes_remove
,
2458 .name
= "atmel_aes",
2459 .of_match_table
= atmel_aes_dt_ids
,
2463 module_platform_driver(atmel_aes_driver
);
2465 MODULE_DESCRIPTION("Atmel AES hw acceleration support.");
2466 MODULE_LICENSE("GPL v2");
2467 MODULE_AUTHOR("Nicolas Royer - Eukréa Electromatique");