2 * Driver for ARTPEC-6 crypto block using the kernel asynchronous crypto api.
4 * Copyright (C) 2014-2017 Axis Communications AB
6 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
8 #include <linux/bitfield.h>
9 #include <linux/crypto.h>
10 #include <linux/debugfs.h>
11 #include <linux/delay.h>
12 #include <linux/dma-mapping.h>
13 #include <linux/fault-inject.h>
14 #include <linux/init.h>
15 #include <linux/interrupt.h>
16 #include <linux/kernel.h>
17 #include <linux/list.h>
18 #include <linux/module.h>
20 #include <linux/platform_device.h>
21 #include <linux/scatterlist.h>
22 #include <linux/slab.h>
24 #include <crypto/aes.h>
25 #include <crypto/gcm.h>
26 #include <crypto/internal/aead.h>
27 #include <crypto/internal/hash.h>
28 #include <crypto/internal/skcipher.h>
29 #include <crypto/scatterwalk.h>
30 #include <crypto/sha.h>
31 #include <crypto/xts.h>
33 /* Max length of a line in all cache levels for Artpec SoCs. */
34 #define ARTPEC_CACHE_LINE_MAX 32
36 #define PDMA_OUT_CFG 0x0000
37 #define PDMA_OUT_BUF_CFG 0x0004
38 #define PDMA_OUT_CMD 0x0008
39 #define PDMA_OUT_DESCRQ_PUSH 0x0010
40 #define PDMA_OUT_DESCRQ_STAT 0x0014
42 #define A6_PDMA_IN_CFG 0x0028
43 #define A6_PDMA_IN_BUF_CFG 0x002c
44 #define A6_PDMA_IN_CMD 0x0030
45 #define A6_PDMA_IN_STATQ_PUSH 0x0038
46 #define A6_PDMA_IN_DESCRQ_PUSH 0x0044
47 #define A6_PDMA_IN_DESCRQ_STAT 0x0048
48 #define A6_PDMA_INTR_MASK 0x0068
49 #define A6_PDMA_ACK_INTR 0x006c
50 #define A6_PDMA_MASKED_INTR 0x0074
52 #define A7_PDMA_IN_CFG 0x002c
53 #define A7_PDMA_IN_BUF_CFG 0x0030
54 #define A7_PDMA_IN_CMD 0x0034
55 #define A7_PDMA_IN_STATQ_PUSH 0x003c
56 #define A7_PDMA_IN_DESCRQ_PUSH 0x0048
57 #define A7_PDMA_IN_DESCRQ_STAT 0x004C
58 #define A7_PDMA_INTR_MASK 0x006c
59 #define A7_PDMA_ACK_INTR 0x0070
60 #define A7_PDMA_MASKED_INTR 0x0078
62 #define PDMA_OUT_CFG_EN BIT(0)
64 #define PDMA_OUT_BUF_CFG_DATA_BUF_SIZE GENMASK(4, 0)
65 #define PDMA_OUT_BUF_CFG_DESCR_BUF_SIZE GENMASK(9, 5)
67 #define PDMA_OUT_CMD_START BIT(0)
68 #define A6_PDMA_OUT_CMD_STOP BIT(3)
69 #define A7_PDMA_OUT_CMD_STOP BIT(2)
71 #define PDMA_OUT_DESCRQ_PUSH_LEN GENMASK(5, 0)
72 #define PDMA_OUT_DESCRQ_PUSH_ADDR GENMASK(31, 6)
74 #define PDMA_OUT_DESCRQ_STAT_LEVEL GENMASK(3, 0)
75 #define PDMA_OUT_DESCRQ_STAT_SIZE GENMASK(7, 4)
77 #define PDMA_IN_CFG_EN BIT(0)
79 #define PDMA_IN_BUF_CFG_DATA_BUF_SIZE GENMASK(4, 0)
80 #define PDMA_IN_BUF_CFG_DESCR_BUF_SIZE GENMASK(9, 5)
81 #define PDMA_IN_BUF_CFG_STAT_BUF_SIZE GENMASK(14, 10)
83 #define PDMA_IN_CMD_START BIT(0)
84 #define A6_PDMA_IN_CMD_FLUSH_STAT BIT(2)
85 #define A6_PDMA_IN_CMD_STOP BIT(3)
86 #define A7_PDMA_IN_CMD_FLUSH_STAT BIT(1)
87 #define A7_PDMA_IN_CMD_STOP BIT(2)
89 #define PDMA_IN_STATQ_PUSH_LEN GENMASK(5, 0)
90 #define PDMA_IN_STATQ_PUSH_ADDR GENMASK(31, 6)
92 #define PDMA_IN_DESCRQ_PUSH_LEN GENMASK(5, 0)
93 #define PDMA_IN_DESCRQ_PUSH_ADDR GENMASK(31, 6)
95 #define PDMA_IN_DESCRQ_STAT_LEVEL GENMASK(3, 0)
96 #define PDMA_IN_DESCRQ_STAT_SIZE GENMASK(7, 4)
98 #define A6_PDMA_INTR_MASK_IN_DATA BIT(2)
99 #define A6_PDMA_INTR_MASK_IN_EOP BIT(3)
100 #define A6_PDMA_INTR_MASK_IN_EOP_FLUSH BIT(4)
102 #define A7_PDMA_INTR_MASK_IN_DATA BIT(3)
103 #define A7_PDMA_INTR_MASK_IN_EOP BIT(4)
104 #define A7_PDMA_INTR_MASK_IN_EOP_FLUSH BIT(5)
106 #define A6_CRY_MD_OPER GENMASK(19, 16)
108 #define A6_CRY_MD_HASH_SEL_CTX GENMASK(21, 20)
109 #define A6_CRY_MD_HASH_HMAC_FIN BIT(23)
111 #define A6_CRY_MD_CIPHER_LEN GENMASK(21, 20)
112 #define A6_CRY_MD_CIPHER_DECR BIT(22)
113 #define A6_CRY_MD_CIPHER_TWEAK BIT(23)
114 #define A6_CRY_MD_CIPHER_DSEQ BIT(24)
116 #define A7_CRY_MD_OPER GENMASK(11, 8)
118 #define A7_CRY_MD_HASH_SEL_CTX GENMASK(13, 12)
119 #define A7_CRY_MD_HASH_HMAC_FIN BIT(15)
121 #define A7_CRY_MD_CIPHER_LEN GENMASK(13, 12)
122 #define A7_CRY_MD_CIPHER_DECR BIT(14)
123 #define A7_CRY_MD_CIPHER_TWEAK BIT(15)
124 #define A7_CRY_MD_CIPHER_DSEQ BIT(16)
126 /* DMA metadata constants */
127 #define regk_crypto_aes_cbc 0x00000002
128 #define regk_crypto_aes_ctr 0x00000003
129 #define regk_crypto_aes_ecb 0x00000001
130 #define regk_crypto_aes_gcm 0x00000004
131 #define regk_crypto_aes_xts 0x00000005
132 #define regk_crypto_cache 0x00000002
133 #define a6_regk_crypto_dlkey 0x0000000a
134 #define a7_regk_crypto_dlkey 0x0000000e
135 #define regk_crypto_ext 0x00000001
136 #define regk_crypto_hmac_sha1 0x00000007
137 #define regk_crypto_hmac_sha256 0x00000009
138 #define regk_crypto_hmac_sha384 0x0000000b
139 #define regk_crypto_hmac_sha512 0x0000000d
140 #define regk_crypto_init 0x00000000
141 #define regk_crypto_key_128 0x00000000
142 #define regk_crypto_key_192 0x00000001
143 #define regk_crypto_key_256 0x00000002
144 #define regk_crypto_null 0x00000000
145 #define regk_crypto_sha1 0x00000006
146 #define regk_crypto_sha256 0x00000008
147 #define regk_crypto_sha384 0x0000000a
148 #define regk_crypto_sha512 0x0000000c
150 /* DMA descriptor structures */
151 struct pdma_descr_ctrl
{
152 unsigned char short_descr
: 1;
153 unsigned char pad1
: 1;
154 unsigned char eop
: 1;
155 unsigned char intr
: 1;
156 unsigned char short_len
: 3;
157 unsigned char pad2
: 1;
160 struct pdma_data_descr
{
161 unsigned int len
: 24;
162 unsigned int buf
: 32;
165 struct pdma_short_descr
{
166 unsigned char data
[7];
170 struct pdma_descr_ctrl ctrl
;
172 struct pdma_data_descr data
;
173 struct pdma_short_descr shrt
;
177 struct pdma_stat_descr
{
178 unsigned char pad1
: 1;
179 unsigned char pad2
: 1;
180 unsigned char eop
: 1;
181 unsigned char pad3
: 5;
182 unsigned int len
: 24;
185 /* Each descriptor array can hold max 64 entries */
186 #define PDMA_DESCR_COUNT 64
188 #define MODULE_NAME "Artpec-6 CA"
190 /* Hash modes (including HMAC variants) */
191 #define ARTPEC6_CRYPTO_HASH_SHA1 1
192 #define ARTPEC6_CRYPTO_HASH_SHA256 2
193 #define ARTPEC6_CRYPTO_HASH_SHA384 3
194 #define ARTPEC6_CRYPTO_HASH_SHA512 4
197 #define ARTPEC6_CRYPTO_CIPHER_AES_ECB 1
198 #define ARTPEC6_CRYPTO_CIPHER_AES_CBC 2
199 #define ARTPEC6_CRYPTO_CIPHER_AES_CTR 3
200 #define ARTPEC6_CRYPTO_CIPHER_AES_XTS 5
202 /* The PDMA is a DMA-engine tightly coupled with a ciphering engine.
203 * It operates on a descriptor array with up to 64 descriptor entries.
204 * The arrays must be 64 byte aligned in memory.
206 * The ciphering unit has no registers and is completely controlled by
207 * a 4-byte metadata that is inserted at the beginning of each dma packet.
209 * A dma packet is a sequence of descriptors terminated by setting the .eop
210 * field in the final descriptor of the packet.
212 * Multiple packets are used for providing context data, key data and
213 * the plain/ciphertext.
215 * PDMA Descriptors (Array)
216 * +------+------+------+~~+-------+------+----
217 * | 0 | 1 | 2 |~~| 11 EOP| 12 | ....
218 * +--+---+--+---+----+-+~~+-------+----+-+----
221 * __|__ +-------++-------++-------+ +----+
222 * | MD | |Payload||Payload||Payload| | MD |
223 * +-----+ +-------++-------++-------+ +----+
226 struct artpec6_crypto_bounce_buffer
{
227 struct list_head list
;
229 struct scatterlist
*sg
;
231 /* buf is aligned to ARTPEC_CACHE_LINE_MAX and
232 * holds up to ARTPEC_CACHE_LINE_MAX bytes data.
237 struct artpec6_crypto_dma_map
{
240 enum dma_data_direction dir
;
243 struct artpec6_crypto_dma_descriptors
{
244 struct pdma_descr out
[PDMA_DESCR_COUNT
] __aligned(64);
245 struct pdma_descr in
[PDMA_DESCR_COUNT
] __aligned(64);
246 u32 stat
[PDMA_DESCR_COUNT
] __aligned(64);
247 struct list_head bounce_buffers
;
248 /* Enough maps for all out/in buffers, and all three descr. arrays */
249 struct artpec6_crypto_dma_map maps
[PDMA_DESCR_COUNT
* 2 + 2];
250 dma_addr_t out_dma_addr
;
251 dma_addr_t in_dma_addr
;
252 dma_addr_t stat_dma_addr
;
258 enum artpec6_crypto_variant
{
263 struct artpec6_crypto
{
265 spinlock_t queue_lock
;
266 struct list_head queue
; /* waiting for pdma fifo space */
267 struct list_head pending
; /* submitted to pdma fifo */
268 struct tasklet_struct task
;
269 struct kmem_cache
*dma_cache
;
271 struct timer_list timer
;
272 enum artpec6_crypto_variant variant
;
273 void *pad_buffer
; /* cache-aligned block padding buffer */
277 enum artpec6_crypto_hash_flags
{
278 HASH_FLAG_INIT_CTX
= 2,
279 HASH_FLAG_UPDATE
= 4,
280 HASH_FLAG_FINALIZE
= 8,
282 HASH_FLAG_UPDATE_KEY
= 32,
285 struct artpec6_crypto_req_common
{
286 struct list_head list
;
287 struct artpec6_crypto_dma_descriptors
*dma
;
288 struct crypto_async_request
*req
;
289 void (*complete
)(struct crypto_async_request
*req
);
293 struct artpec6_hash_request_context
{
294 char partial_buffer
[SHA512_BLOCK_SIZE
];
295 char partial_buffer_out
[SHA512_BLOCK_SIZE
];
296 char key_buffer
[SHA512_BLOCK_SIZE
];
297 char pad_buffer
[SHA512_BLOCK_SIZE
+ 32];
298 unsigned char digeststate
[SHA512_DIGEST_SIZE
];
299 size_t partial_bytes
;
303 enum artpec6_crypto_hash_flags hash_flags
;
304 struct artpec6_crypto_req_common common
;
307 struct artpec6_hash_export_state
{
308 char partial_buffer
[SHA512_BLOCK_SIZE
];
309 unsigned char digeststate
[SHA512_DIGEST_SIZE
];
310 size_t partial_bytes
;
313 unsigned int hash_flags
;
316 struct artpec6_hashalg_context
{
317 char hmac_key
[SHA512_BLOCK_SIZE
];
318 size_t hmac_key_length
;
319 struct crypto_shash
*child_hash
;
322 struct artpec6_crypto_request_context
{
325 struct artpec6_crypto_req_common common
;
328 struct artpec6_cryptotfm_context
{
329 unsigned char aes_key
[2*AES_MAX_KEY_SIZE
];
333 struct crypto_skcipher
*fallback
;
336 struct artpec6_crypto_aead_hw_ctx
{
337 __be64 aad_length_bits
;
338 __be64 text_length_bits
;
339 __u8 J0
[AES_BLOCK_SIZE
];
342 struct artpec6_crypto_aead_req_ctx
{
343 struct artpec6_crypto_aead_hw_ctx hw_ctx
;
346 struct artpec6_crypto_req_common common
;
347 __u8 decryption_tag
[AES_BLOCK_SIZE
] ____cacheline_aligned
;
350 /* The crypto framework makes it hard to avoid this global. */
351 static struct device
*artpec6_crypto_dev
;
353 #ifdef CONFIG_FAULT_INJECTION
354 static DECLARE_FAULT_ATTR(artpec6_crypto_fail_status_read
);
355 static DECLARE_FAULT_ATTR(artpec6_crypto_fail_dma_array_full
);
359 ARTPEC6_CRYPTO_PREPARE_HASH_NO_START
,
360 ARTPEC6_CRYPTO_PREPARE_HASH_START
,
363 static int artpec6_crypto_prepare_aead(struct aead_request
*areq
);
364 static int artpec6_crypto_prepare_crypto(struct skcipher_request
*areq
);
365 static int artpec6_crypto_prepare_hash(struct ahash_request
*areq
);
368 artpec6_crypto_complete_crypto(struct crypto_async_request
*req
);
370 artpec6_crypto_complete_cbc_encrypt(struct crypto_async_request
*req
);
372 artpec6_crypto_complete_cbc_decrypt(struct crypto_async_request
*req
);
374 artpec6_crypto_complete_aead(struct crypto_async_request
*req
);
376 artpec6_crypto_complete_hash(struct crypto_async_request
*req
);
379 artpec6_crypto_common_destroy(struct artpec6_crypto_req_common
*common
);
382 artpec6_crypto_start_dma(struct artpec6_crypto_req_common
*common
);
384 struct artpec6_crypto_walk
{
385 struct scatterlist
*sg
;
389 static void artpec6_crypto_walk_init(struct artpec6_crypto_walk
*awalk
,
390 struct scatterlist
*sg
)
396 static size_t artpec6_crypto_walk_advance(struct artpec6_crypto_walk
*awalk
,
399 while (nbytes
&& awalk
->sg
) {
402 WARN_ON(awalk
->offset
> awalk
->sg
->length
);
404 piece
= min(nbytes
, (size_t)awalk
->sg
->length
- awalk
->offset
);
406 awalk
->offset
+= piece
;
407 if (awalk
->offset
== awalk
->sg
->length
) {
408 awalk
->sg
= sg_next(awalk
->sg
);
418 artpec6_crypto_walk_chunklen(const struct artpec6_crypto_walk
*awalk
)
420 WARN_ON(awalk
->sg
->length
== awalk
->offset
);
422 return awalk
->sg
->length
- awalk
->offset
;
426 artpec6_crypto_walk_chunk_phys(const struct artpec6_crypto_walk
*awalk
)
428 return sg_phys(awalk
->sg
) + awalk
->offset
;
432 artpec6_crypto_copy_bounce_buffers(struct artpec6_crypto_req_common
*common
)
434 struct artpec6_crypto_dma_descriptors
*dma
= common
->dma
;
435 struct artpec6_crypto_bounce_buffer
*b
;
436 struct artpec6_crypto_bounce_buffer
*next
;
438 list_for_each_entry_safe(b
, next
, &dma
->bounce_buffers
, list
) {
439 pr_debug("bounce entry %p: %zu bytes @ %zu from %p\n",
440 b
, b
->length
, b
->offset
, b
->buf
);
441 sg_pcopy_from_buffer(b
->sg
,
452 static inline bool artpec6_crypto_busy(void)
454 struct artpec6_crypto
*ac
= dev_get_drvdata(artpec6_crypto_dev
);
455 int fifo_count
= ac
->pending_count
;
457 return fifo_count
> 6;
460 static int artpec6_crypto_submit(struct artpec6_crypto_req_common
*req
)
462 struct artpec6_crypto
*ac
= dev_get_drvdata(artpec6_crypto_dev
);
465 spin_lock_bh(&ac
->queue_lock
);
467 if (!artpec6_crypto_busy()) {
468 list_add_tail(&req
->list
, &ac
->pending
);
469 artpec6_crypto_start_dma(req
);
471 } else if (req
->req
->flags
& CRYPTO_TFM_REQ_MAY_BACKLOG
) {
472 list_add_tail(&req
->list
, &ac
->queue
);
474 artpec6_crypto_common_destroy(req
);
477 spin_unlock_bh(&ac
->queue_lock
);
482 static void artpec6_crypto_start_dma(struct artpec6_crypto_req_common
*common
)
484 struct artpec6_crypto
*ac
= dev_get_drvdata(artpec6_crypto_dev
);
485 enum artpec6_crypto_variant variant
= ac
->variant
;
486 void __iomem
*base
= ac
->base
;
487 struct artpec6_crypto_dma_descriptors
*dma
= common
->dma
;
488 u32 ind
, statd
, outd
;
490 /* Make descriptor content visible to the DMA before starting it. */
493 ind
= FIELD_PREP(PDMA_IN_DESCRQ_PUSH_LEN
, dma
->in_cnt
- 1) |
494 FIELD_PREP(PDMA_IN_DESCRQ_PUSH_ADDR
, dma
->in_dma_addr
>> 6);
496 statd
= FIELD_PREP(PDMA_IN_STATQ_PUSH_LEN
, dma
->in_cnt
- 1) |
497 FIELD_PREP(PDMA_IN_STATQ_PUSH_ADDR
, dma
->stat_dma_addr
>> 6);
499 outd
= FIELD_PREP(PDMA_OUT_DESCRQ_PUSH_LEN
, dma
->out_cnt
- 1) |
500 FIELD_PREP(PDMA_OUT_DESCRQ_PUSH_ADDR
, dma
->out_dma_addr
>> 6);
502 if (variant
== ARTPEC6_CRYPTO
) {
503 writel_relaxed(ind
, base
+ A6_PDMA_IN_DESCRQ_PUSH
);
504 writel_relaxed(statd
, base
+ A6_PDMA_IN_STATQ_PUSH
);
505 writel_relaxed(PDMA_IN_CMD_START
, base
+ A6_PDMA_IN_CMD
);
507 writel_relaxed(ind
, base
+ A7_PDMA_IN_DESCRQ_PUSH
);
508 writel_relaxed(statd
, base
+ A7_PDMA_IN_STATQ_PUSH
);
509 writel_relaxed(PDMA_IN_CMD_START
, base
+ A7_PDMA_IN_CMD
);
512 writel_relaxed(outd
, base
+ PDMA_OUT_DESCRQ_PUSH
);
513 writel_relaxed(PDMA_OUT_CMD_START
, base
+ PDMA_OUT_CMD
);
519 artpec6_crypto_init_dma_operation(struct artpec6_crypto_req_common
*common
)
521 struct artpec6_crypto_dma_descriptors
*dma
= common
->dma
;
526 INIT_LIST_HEAD(&dma
->bounce_buffers
);
529 static bool fault_inject_dma_descr(void)
531 #ifdef CONFIG_FAULT_INJECTION
532 return should_fail(&artpec6_crypto_fail_dma_array_full
, 1);
538 /** artpec6_crypto_setup_out_descr_phys - Setup an out channel with a
541 * @addr: The physical address of the data buffer
542 * @len: The length of the data buffer
543 * @eop: True if this is the last buffer in the packet
545 * @return 0 on success or -ENOSPC if there are no more descriptors available
548 artpec6_crypto_setup_out_descr_phys(struct artpec6_crypto_req_common
*common
,
549 dma_addr_t addr
, size_t len
, bool eop
)
551 struct artpec6_crypto_dma_descriptors
*dma
= common
->dma
;
552 struct pdma_descr
*d
;
554 if (dma
->out_cnt
>= PDMA_DESCR_COUNT
||
555 fault_inject_dma_descr()) {
556 pr_err("No free OUT DMA descriptors available!\n");
560 d
= &dma
->out
[dma
->out_cnt
++];
561 memset(d
, 0, sizeof(*d
));
563 d
->ctrl
.short_descr
= 0;
570 /** artpec6_crypto_setup_out_descr_short - Setup a short out descriptor
572 * @dst: The virtual address of the data
573 * @len: The length of the data, must be between 1 to 7 bytes
574 * @eop: True if this is the last buffer in the packet
576 * @return 0 on success
577 * -ENOSPC if no more descriptors are available
578 * -EINVAL if the data length exceeds 7 bytes
581 artpec6_crypto_setup_out_descr_short(struct artpec6_crypto_req_common
*common
,
582 void *dst
, unsigned int len
, bool eop
)
584 struct artpec6_crypto_dma_descriptors
*dma
= common
->dma
;
585 struct pdma_descr
*d
;
587 if (dma
->out_cnt
>= PDMA_DESCR_COUNT
||
588 fault_inject_dma_descr()) {
589 pr_err("No free OUT DMA descriptors available!\n");
591 } else if (len
> 7 || len
< 1) {
594 d
= &dma
->out
[dma
->out_cnt
++];
595 memset(d
, 0, sizeof(*d
));
597 d
->ctrl
.short_descr
= 1;
598 d
->ctrl
.short_len
= len
;
600 memcpy(d
->shrt
.data
, dst
, len
);
604 static int artpec6_crypto_dma_map_page(struct artpec6_crypto_req_common
*common
,
605 struct page
*page
, size_t offset
,
607 enum dma_data_direction dir
,
608 dma_addr_t
*dma_addr_out
)
610 struct artpec6_crypto_dma_descriptors
*dma
= common
->dma
;
611 struct device
*dev
= artpec6_crypto_dev
;
612 struct artpec6_crypto_dma_map
*map
;
617 if (dma
->map_count
>= ARRAY_SIZE(dma
->maps
))
620 dma_addr
= dma_map_page(dev
, page
, offset
, size
, dir
);
621 if (dma_mapping_error(dev
, dma_addr
))
624 map
= &dma
->maps
[dma
->map_count
++];
626 map
->dma_addr
= dma_addr
;
629 *dma_addr_out
= dma_addr
;
635 artpec6_crypto_dma_map_single(struct artpec6_crypto_req_common
*common
,
636 void *ptr
, size_t size
,
637 enum dma_data_direction dir
,
638 dma_addr_t
*dma_addr_out
)
640 struct page
*page
= virt_to_page(ptr
);
641 size_t offset
= (uintptr_t)ptr
& ~PAGE_MASK
;
643 return artpec6_crypto_dma_map_page(common
, page
, offset
, size
, dir
,
648 artpec6_crypto_dma_map_descs(struct artpec6_crypto_req_common
*common
)
650 struct artpec6_crypto_dma_descriptors
*dma
= common
->dma
;
653 ret
= artpec6_crypto_dma_map_single(common
, dma
->in
,
654 sizeof(dma
->in
[0]) * dma
->in_cnt
,
655 DMA_TO_DEVICE
, &dma
->in_dma_addr
);
659 ret
= artpec6_crypto_dma_map_single(common
, dma
->out
,
660 sizeof(dma
->out
[0]) * dma
->out_cnt
,
661 DMA_TO_DEVICE
, &dma
->out_dma_addr
);
665 /* We only read one stat descriptor */
666 dma
->stat
[dma
->in_cnt
- 1] = 0;
669 * DMA_BIDIRECTIONAL since we need our zeroing of the stat descriptor
672 return artpec6_crypto_dma_map_single(common
,
673 dma
->stat
+ dma
->in_cnt
- 1,
674 sizeof(dma
->stat
[0]),
676 &dma
->stat_dma_addr
);
680 artpec6_crypto_dma_unmap_all(struct artpec6_crypto_req_common
*common
)
682 struct artpec6_crypto_dma_descriptors
*dma
= common
->dma
;
683 struct device
*dev
= artpec6_crypto_dev
;
686 for (i
= 0; i
< dma
->map_count
; i
++) {
687 struct artpec6_crypto_dma_map
*map
= &dma
->maps
[i
];
689 dma_unmap_page(dev
, map
->dma_addr
, map
->size
, map
->dir
);
695 /** artpec6_crypto_setup_out_descr - Setup an out descriptor
697 * @dst: The virtual address of the data
698 * @len: The length of the data
699 * @eop: True if this is the last buffer in the packet
700 * @use_short: If this is true and the data length is 7 bytes or less then
701 * a short descriptor will be used
703 * @return 0 on success
704 * Any errors from artpec6_crypto_setup_out_descr_short() or
705 * setup_out_descr_phys()
708 artpec6_crypto_setup_out_descr(struct artpec6_crypto_req_common
*common
,
709 void *dst
, unsigned int len
, bool eop
,
712 if (use_short
&& len
< 7) {
713 return artpec6_crypto_setup_out_descr_short(common
, dst
, len
,
719 ret
= artpec6_crypto_dma_map_single(common
, dst
, len
,
725 return artpec6_crypto_setup_out_descr_phys(common
, dma_addr
,
730 /** artpec6_crypto_setup_in_descr_phys - Setup an in channel with a
733 * @addr: The physical address of the data buffer
734 * @len: The length of the data buffer
735 * @intr: True if an interrupt should be fired after HW processing of this
740 artpec6_crypto_setup_in_descr_phys(struct artpec6_crypto_req_common
*common
,
741 dma_addr_t addr
, unsigned int len
, bool intr
)
743 struct artpec6_crypto_dma_descriptors
*dma
= common
->dma
;
744 struct pdma_descr
*d
;
746 if (dma
->in_cnt
>= PDMA_DESCR_COUNT
||
747 fault_inject_dma_descr()) {
748 pr_err("No free IN DMA descriptors available!\n");
751 d
= &dma
->in
[dma
->in_cnt
++];
752 memset(d
, 0, sizeof(*d
));
760 /** artpec6_crypto_setup_in_descr - Setup an in channel descriptor
762 * @buffer: The virtual address to of the data buffer
763 * @len: The length of the data buffer
764 * @last: If this is the last data buffer in the request (i.e. an interrupt
767 * Short descriptors are not used for the in channel
770 artpec6_crypto_setup_in_descr(struct artpec6_crypto_req_common
*common
,
771 void *buffer
, unsigned int len
, bool last
)
776 ret
= artpec6_crypto_dma_map_single(common
, buffer
, len
,
777 DMA_FROM_DEVICE
, &dma_addr
);
781 return artpec6_crypto_setup_in_descr_phys(common
, dma_addr
, len
, last
);
784 static struct artpec6_crypto_bounce_buffer
*
785 artpec6_crypto_alloc_bounce(gfp_t flags
)
788 size_t alloc_size
= sizeof(struct artpec6_crypto_bounce_buffer
) +
789 2 * ARTPEC_CACHE_LINE_MAX
;
790 struct artpec6_crypto_bounce_buffer
*bbuf
= kzalloc(alloc_size
, flags
);
796 bbuf
->buf
= PTR_ALIGN(base
, ARTPEC_CACHE_LINE_MAX
);
800 static int setup_bounce_buffer_in(struct artpec6_crypto_req_common
*common
,
801 struct artpec6_crypto_walk
*walk
, size_t size
)
803 struct artpec6_crypto_bounce_buffer
*bbuf
;
806 bbuf
= artpec6_crypto_alloc_bounce(common
->gfp_flags
);
812 bbuf
->offset
= walk
->offset
;
814 ret
= artpec6_crypto_setup_in_descr(common
, bbuf
->buf
, size
, false);
820 pr_debug("BOUNCE %zu offset %zu\n", size
, walk
->offset
);
821 list_add_tail(&bbuf
->list
, &common
->dma
->bounce_buffers
);
826 artpec6_crypto_setup_sg_descrs_in(struct artpec6_crypto_req_common
*common
,
827 struct artpec6_crypto_walk
*walk
,
834 while (walk
->sg
&& count
) {
835 chunk
= min(count
, artpec6_crypto_walk_chunklen(walk
));
836 addr
= artpec6_crypto_walk_chunk_phys(walk
);
838 /* When destination buffers are not aligned to the cache line
839 * size we need bounce buffers. The DMA-API requires that the
840 * entire line is owned by the DMA buffer and this holds also
841 * for the case when coherent DMA is used.
843 if (!IS_ALIGNED(addr
, ARTPEC_CACHE_LINE_MAX
)) {
844 chunk
= min_t(dma_addr_t
, chunk
,
845 ALIGN(addr
, ARTPEC_CACHE_LINE_MAX
) -
848 pr_debug("CHUNK-b %pad:%zu\n", &addr
, chunk
);
849 ret
= setup_bounce_buffer_in(common
, walk
, chunk
);
850 } else if (chunk
< ARTPEC_CACHE_LINE_MAX
) {
851 pr_debug("CHUNK-b %pad:%zu\n", &addr
, chunk
);
852 ret
= setup_bounce_buffer_in(common
, walk
, chunk
);
856 chunk
= chunk
& ~(ARTPEC_CACHE_LINE_MAX
-1);
858 pr_debug("CHUNK %pad:%zu\n", &addr
, chunk
);
860 ret
= artpec6_crypto_dma_map_page(common
,
870 ret
= artpec6_crypto_setup_in_descr_phys(common
,
878 count
= count
- chunk
;
879 artpec6_crypto_walk_advance(walk
, chunk
);
883 pr_err("EOL unexpected %zu bytes left\n", count
);
885 return count
? -EINVAL
: 0;
889 artpec6_crypto_setup_sg_descrs_out(struct artpec6_crypto_req_common
*common
,
890 struct artpec6_crypto_walk
*walk
,
897 while (walk
->sg
&& count
) {
898 chunk
= min(count
, artpec6_crypto_walk_chunklen(walk
));
899 addr
= artpec6_crypto_walk_chunk_phys(walk
);
901 pr_debug("OUT-CHUNK %pad:%zu\n", &addr
, chunk
);
906 chunk
= min_t(size_t, chunk
, (4-(addr
&3)));
908 sg_pcopy_to_buffer(walk
->sg
, 1, buf
, chunk
,
911 ret
= artpec6_crypto_setup_out_descr_short(common
, buf
,
917 ret
= artpec6_crypto_dma_map_page(common
,
927 ret
= artpec6_crypto_setup_out_descr_phys(common
,
935 count
= count
- chunk
;
936 artpec6_crypto_walk_advance(walk
, chunk
);
940 pr_err("EOL unexpected %zu bytes left\n", count
);
942 return count
? -EINVAL
: 0;
946 /** artpec6_crypto_terminate_out_descrs - Set the EOP on the last out descriptor
948 * If the out descriptor list is non-empty, then the eop flag on the
949 * last used out descriptor will be set.
951 * @return 0 on success
952 * -EINVAL if the out descriptor is empty or has overflown
955 artpec6_crypto_terminate_out_descrs(struct artpec6_crypto_req_common
*common
)
957 struct artpec6_crypto_dma_descriptors
*dma
= common
->dma
;
958 struct pdma_descr
*d
;
960 if (!dma
->out_cnt
|| dma
->out_cnt
> PDMA_DESCR_COUNT
) {
961 pr_err("%s: OUT descriptor list is %s\n",
962 MODULE_NAME
, dma
->out_cnt
? "empty" : "full");
967 d
= &dma
->out
[dma
->out_cnt
-1];
973 /** artpec6_crypto_terminate_in_descrs - Set the interrupt flag on the last
976 * See artpec6_crypto_terminate_out_descrs() for return values
979 artpec6_crypto_terminate_in_descrs(struct artpec6_crypto_req_common
*common
)
981 struct artpec6_crypto_dma_descriptors
*dma
= common
->dma
;
982 struct pdma_descr
*d
;
984 if (!dma
->in_cnt
|| dma
->in_cnt
> PDMA_DESCR_COUNT
) {
985 pr_err("%s: IN descriptor list is %s\n",
986 MODULE_NAME
, dma
->in_cnt
? "empty" : "full");
990 d
= &dma
->in
[dma
->in_cnt
-1];
995 /** create_hash_pad - Create a Secure Hash conformant pad
997 * @dst: The destination buffer to write the pad. Must be at least 64 bytes
998 * @dgstlen: The total length of the hash digest in bytes
999 * @bitcount: The total length of the digest in bits
1001 * @return The total number of padding bytes written to @dst
1004 create_hash_pad(int oper
, unsigned char *dst
, u64 dgstlen
, u64 bitcount
)
1006 unsigned int mod
, target
, diff
, pad_bytes
, size_bytes
;
1007 __be64 bits
= __cpu_to_be64(bitcount
);
1010 case regk_crypto_sha1
:
1011 case regk_crypto_sha256
:
1012 case regk_crypto_hmac_sha1
:
1013 case regk_crypto_hmac_sha256
:
1026 diff
= dgstlen
& (mod
- 1);
1027 pad_bytes
= diff
> target
? target
+ mod
- diff
: target
- diff
;
1029 memset(dst
+ 1, 0, pad_bytes
);
1032 if (size_bytes
== 16) {
1033 memset(dst
+ 1 + pad_bytes
, 0, 8);
1034 memcpy(dst
+ 1 + pad_bytes
+ 8, &bits
, 8);
1036 memcpy(dst
+ 1 + pad_bytes
, &bits
, 8);
1039 return pad_bytes
+ size_bytes
+ 1;
1042 static int artpec6_crypto_common_init(struct artpec6_crypto_req_common
*common
,
1043 struct crypto_async_request
*parent
,
1044 void (*complete
)(struct crypto_async_request
*req
),
1045 struct scatterlist
*dstsg
, unsigned int nbytes
)
1048 struct artpec6_crypto
*ac
= dev_get_drvdata(artpec6_crypto_dev
);
1050 flags
= (parent
->flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ?
1051 GFP_KERNEL
: GFP_ATOMIC
;
1053 common
->gfp_flags
= flags
;
1054 common
->dma
= kmem_cache_alloc(ac
->dma_cache
, flags
);
1058 common
->req
= parent
;
1059 common
->complete
= complete
;
1064 artpec6_crypto_bounce_destroy(struct artpec6_crypto_dma_descriptors
*dma
)
1066 struct artpec6_crypto_bounce_buffer
*b
;
1067 struct artpec6_crypto_bounce_buffer
*next
;
1069 list_for_each_entry_safe(b
, next
, &dma
->bounce_buffers
, list
) {
1075 artpec6_crypto_common_destroy(struct artpec6_crypto_req_common
*common
)
1077 struct artpec6_crypto
*ac
= dev_get_drvdata(artpec6_crypto_dev
);
1079 artpec6_crypto_dma_unmap_all(common
);
1080 artpec6_crypto_bounce_destroy(common
->dma
);
1081 kmem_cache_free(ac
->dma_cache
, common
->dma
);
1087 * Ciphering functions.
1089 static int artpec6_crypto_encrypt(struct skcipher_request
*req
)
1091 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(req
);
1092 struct artpec6_cryptotfm_context
*ctx
= crypto_skcipher_ctx(cipher
);
1093 struct artpec6_crypto_request_context
*req_ctx
= NULL
;
1094 void (*complete
)(struct crypto_async_request
*req
);
1097 req_ctx
= skcipher_request_ctx(req
);
1099 switch (ctx
->crypto_type
) {
1100 case ARTPEC6_CRYPTO_CIPHER_AES_CBC
:
1101 case ARTPEC6_CRYPTO_CIPHER_AES_ECB
:
1102 case ARTPEC6_CRYPTO_CIPHER_AES_XTS
:
1103 req_ctx
->decrypt
= 0;
1109 switch (ctx
->crypto_type
) {
1110 case ARTPEC6_CRYPTO_CIPHER_AES_CBC
:
1111 complete
= artpec6_crypto_complete_cbc_encrypt
;
1114 complete
= artpec6_crypto_complete_crypto
;
1118 ret
= artpec6_crypto_common_init(&req_ctx
->common
,
1121 req
->dst
, req
->cryptlen
);
1125 ret
= artpec6_crypto_prepare_crypto(req
);
1127 artpec6_crypto_common_destroy(&req_ctx
->common
);
1131 return artpec6_crypto_submit(&req_ctx
->common
);
1134 static int artpec6_crypto_decrypt(struct skcipher_request
*req
)
1137 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(req
);
1138 struct artpec6_cryptotfm_context
*ctx
= crypto_skcipher_ctx(cipher
);
1139 struct artpec6_crypto_request_context
*req_ctx
= NULL
;
1140 void (*complete
)(struct crypto_async_request
*req
);
1142 req_ctx
= skcipher_request_ctx(req
);
1144 switch (ctx
->crypto_type
) {
1145 case ARTPEC6_CRYPTO_CIPHER_AES_CBC
:
1146 case ARTPEC6_CRYPTO_CIPHER_AES_ECB
:
1147 case ARTPEC6_CRYPTO_CIPHER_AES_XTS
:
1148 req_ctx
->decrypt
= 1;
1155 switch (ctx
->crypto_type
) {
1156 case ARTPEC6_CRYPTO_CIPHER_AES_CBC
:
1157 complete
= artpec6_crypto_complete_cbc_decrypt
;
1160 complete
= artpec6_crypto_complete_crypto
;
1164 ret
= artpec6_crypto_common_init(&req_ctx
->common
, &req
->base
,
1166 req
->dst
, req
->cryptlen
);
1170 ret
= artpec6_crypto_prepare_crypto(req
);
1172 artpec6_crypto_common_destroy(&req_ctx
->common
);
1176 return artpec6_crypto_submit(&req_ctx
->common
);
1180 artpec6_crypto_ctr_crypt(struct skcipher_request
*req
, bool encrypt
)
1182 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(req
);
1183 struct artpec6_cryptotfm_context
*ctx
= crypto_skcipher_ctx(cipher
);
1184 size_t iv_len
= crypto_skcipher_ivsize(cipher
);
1185 unsigned int counter
= be32_to_cpup((__be32
*)
1186 (req
->iv
+ iv_len
- 4));
1187 unsigned int nblks
= ALIGN(req
->cryptlen
, AES_BLOCK_SIZE
) /
1191 * The hardware uses only the last 32-bits as the counter while the
1192 * kernel tests (aes_ctr_enc_tv_template[4] for example) expect that
1193 * the whole IV is a counter. So fallback if the counter is going to
1196 if (counter
+ nblks
< counter
) {
1199 pr_debug("counter %x will overflow (nblks %u), falling back\n",
1200 counter
, counter
+ nblks
);
1202 ret
= crypto_skcipher_setkey(ctx
->fallback
, ctx
->aes_key
,
1208 SKCIPHER_REQUEST_ON_STACK(subreq
, ctx
->fallback
);
1210 skcipher_request_set_tfm(subreq
, ctx
->fallback
);
1211 skcipher_request_set_callback(subreq
, req
->base
.flags
,
1213 skcipher_request_set_crypt(subreq
, req
->src
, req
->dst
,
1214 req
->cryptlen
, req
->iv
);
1215 ret
= encrypt
? crypto_skcipher_encrypt(subreq
)
1216 : crypto_skcipher_decrypt(subreq
);
1217 skcipher_request_zero(subreq
);
1222 return encrypt
? artpec6_crypto_encrypt(req
)
1223 : artpec6_crypto_decrypt(req
);
1226 static int artpec6_crypto_ctr_encrypt(struct skcipher_request
*req
)
1228 return artpec6_crypto_ctr_crypt(req
, true);
1231 static int artpec6_crypto_ctr_decrypt(struct skcipher_request
*req
)
1233 return artpec6_crypto_ctr_crypt(req
, false);
1239 static int artpec6_crypto_aead_init(struct crypto_aead
*tfm
)
1241 struct artpec6_cryptotfm_context
*tfm_ctx
= crypto_aead_ctx(tfm
);
1243 memset(tfm_ctx
, 0, sizeof(*tfm_ctx
));
1245 crypto_aead_set_reqsize(tfm
,
1246 sizeof(struct artpec6_crypto_aead_req_ctx
));
1251 static int artpec6_crypto_aead_set_key(struct crypto_aead
*tfm
, const u8
*key
,
1254 struct artpec6_cryptotfm_context
*ctx
= crypto_tfm_ctx(&tfm
->base
);
1256 if (len
!= 16 && len
!= 24 && len
!= 32) {
1257 crypto_aead_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
1261 ctx
->key_length
= len
;
1263 memcpy(ctx
->aes_key
, key
, len
);
1267 static int artpec6_crypto_aead_encrypt(struct aead_request
*req
)
1270 struct artpec6_crypto_aead_req_ctx
*req_ctx
= aead_request_ctx(req
);
1272 req_ctx
->decrypt
= false;
1273 ret
= artpec6_crypto_common_init(&req_ctx
->common
, &req
->base
,
1274 artpec6_crypto_complete_aead
,
1279 ret
= artpec6_crypto_prepare_aead(req
);
1281 artpec6_crypto_common_destroy(&req_ctx
->common
);
1285 return artpec6_crypto_submit(&req_ctx
->common
);
1288 static int artpec6_crypto_aead_decrypt(struct aead_request
*req
)
1291 struct artpec6_crypto_aead_req_ctx
*req_ctx
= aead_request_ctx(req
);
1293 req_ctx
->decrypt
= true;
1294 if (req
->cryptlen
< AES_BLOCK_SIZE
)
1297 ret
= artpec6_crypto_common_init(&req_ctx
->common
,
1299 artpec6_crypto_complete_aead
,
1304 ret
= artpec6_crypto_prepare_aead(req
);
1306 artpec6_crypto_common_destroy(&req_ctx
->common
);
1310 return artpec6_crypto_submit(&req_ctx
->common
);
1313 static int artpec6_crypto_prepare_hash(struct ahash_request
*areq
)
1315 struct artpec6_hashalg_context
*ctx
= crypto_tfm_ctx(areq
->base
.tfm
);
1316 struct artpec6_hash_request_context
*req_ctx
= ahash_request_ctx(areq
);
1317 size_t digestsize
= crypto_ahash_digestsize(crypto_ahash_reqtfm(areq
));
1318 size_t contextsize
= digestsize
== SHA384_DIGEST_SIZE
?
1319 SHA512_DIGEST_SIZE
: digestsize
;
1320 size_t blocksize
= crypto_tfm_alg_blocksize(
1321 crypto_ahash_tfm(crypto_ahash_reqtfm(areq
)));
1322 struct artpec6_crypto_req_common
*common
= &req_ctx
->common
;
1323 struct artpec6_crypto
*ac
= dev_get_drvdata(artpec6_crypto_dev
);
1324 enum artpec6_crypto_variant variant
= ac
->variant
;
1326 bool ext_ctx
= false;
1327 bool run_hw
= false;
1330 artpec6_crypto_init_dma_operation(common
);
1332 /* Upload HMAC key, must be first the first packet */
1333 if (req_ctx
->hash_flags
& HASH_FLAG_HMAC
) {
1334 if (variant
== ARTPEC6_CRYPTO
) {
1335 req_ctx
->key_md
= FIELD_PREP(A6_CRY_MD_OPER
,
1336 a6_regk_crypto_dlkey
);
1338 req_ctx
->key_md
= FIELD_PREP(A7_CRY_MD_OPER
,
1339 a7_regk_crypto_dlkey
);
1342 /* Copy and pad up the key */
1343 memcpy(req_ctx
->key_buffer
, ctx
->hmac_key
,
1344 ctx
->hmac_key_length
);
1345 memset(req_ctx
->key_buffer
+ ctx
->hmac_key_length
, 0,
1346 blocksize
- ctx
->hmac_key_length
);
1348 error
= artpec6_crypto_setup_out_descr(common
,
1349 (void *)&req_ctx
->key_md
,
1350 sizeof(req_ctx
->key_md
), false, false);
1354 error
= artpec6_crypto_setup_out_descr(common
,
1355 req_ctx
->key_buffer
, blocksize
,
1361 if (!(req_ctx
->hash_flags
& HASH_FLAG_INIT_CTX
)) {
1362 /* Restore context */
1363 sel_ctx
= regk_crypto_ext
;
1366 sel_ctx
= regk_crypto_init
;
1369 if (variant
== ARTPEC6_CRYPTO
) {
1370 req_ctx
->hash_md
&= ~A6_CRY_MD_HASH_SEL_CTX
;
1371 req_ctx
->hash_md
|= FIELD_PREP(A6_CRY_MD_HASH_SEL_CTX
, sel_ctx
);
1373 /* If this is the final round, set the final flag */
1374 if (req_ctx
->hash_flags
& HASH_FLAG_FINALIZE
)
1375 req_ctx
->hash_md
|= A6_CRY_MD_HASH_HMAC_FIN
;
1377 req_ctx
->hash_md
&= ~A7_CRY_MD_HASH_SEL_CTX
;
1378 req_ctx
->hash_md
|= FIELD_PREP(A7_CRY_MD_HASH_SEL_CTX
, sel_ctx
);
1380 /* If this is the final round, set the final flag */
1381 if (req_ctx
->hash_flags
& HASH_FLAG_FINALIZE
)
1382 req_ctx
->hash_md
|= A7_CRY_MD_HASH_HMAC_FIN
;
1385 /* Setup up metadata descriptors */
1386 error
= artpec6_crypto_setup_out_descr(common
,
1387 (void *)&req_ctx
->hash_md
,
1388 sizeof(req_ctx
->hash_md
), false, false);
1392 error
= artpec6_crypto_setup_in_descr(common
, ac
->pad_buffer
, 4, false);
1397 error
= artpec6_crypto_setup_out_descr(common
,
1398 req_ctx
->digeststate
,
1399 contextsize
, false, false);
1405 if (req_ctx
->hash_flags
& HASH_FLAG_UPDATE
) {
1406 size_t done_bytes
= 0;
1407 size_t total_bytes
= areq
->nbytes
+ req_ctx
->partial_bytes
;
1408 size_t ready_bytes
= round_down(total_bytes
, blocksize
);
1409 struct artpec6_crypto_walk walk
;
1411 run_hw
= ready_bytes
> 0;
1412 if (req_ctx
->partial_bytes
&& ready_bytes
) {
1413 /* We have a partial buffer and will at least some bytes
1414 * to the HW. Empty this partial buffer before tackling
1417 memcpy(req_ctx
->partial_buffer_out
,
1418 req_ctx
->partial_buffer
,
1419 req_ctx
->partial_bytes
);
1421 error
= artpec6_crypto_setup_out_descr(common
,
1422 req_ctx
->partial_buffer_out
,
1423 req_ctx
->partial_bytes
,
1428 /* Reset partial buffer */
1429 done_bytes
+= req_ctx
->partial_bytes
;
1430 req_ctx
->partial_bytes
= 0;
1433 artpec6_crypto_walk_init(&walk
, areq
->src
);
1435 error
= artpec6_crypto_setup_sg_descrs_out(common
, &walk
,
1442 size_t sg_skip
= ready_bytes
- done_bytes
;
1443 size_t sg_rem
= areq
->nbytes
- sg_skip
;
1445 sg_pcopy_to_buffer(areq
->src
, sg_nents(areq
->src
),
1446 req_ctx
->partial_buffer
+
1447 req_ctx
->partial_bytes
,
1450 req_ctx
->partial_bytes
+= sg_rem
;
1453 req_ctx
->digcnt
+= ready_bytes
;
1454 req_ctx
->hash_flags
&= ~(HASH_FLAG_UPDATE
);
1458 if (req_ctx
->hash_flags
& HASH_FLAG_FINALIZE
) {
1459 bool needtrim
= contextsize
!= digestsize
;
1460 size_t hash_pad_len
;
1464 if (variant
== ARTPEC6_CRYPTO
)
1465 oper
= FIELD_GET(A6_CRY_MD_OPER
, req_ctx
->hash_md
);
1467 oper
= FIELD_GET(A7_CRY_MD_OPER
, req_ctx
->hash_md
);
1469 /* Write out the partial buffer if present */
1470 if (req_ctx
->partial_bytes
) {
1471 memcpy(req_ctx
->partial_buffer_out
,
1472 req_ctx
->partial_buffer
,
1473 req_ctx
->partial_bytes
);
1474 error
= artpec6_crypto_setup_out_descr(common
,
1475 req_ctx
->partial_buffer_out
,
1476 req_ctx
->partial_bytes
,
1481 req_ctx
->digcnt
+= req_ctx
->partial_bytes
;
1482 req_ctx
->partial_bytes
= 0;
1485 if (req_ctx
->hash_flags
& HASH_FLAG_HMAC
)
1486 digest_bits
= 8 * (req_ctx
->digcnt
+ blocksize
);
1488 digest_bits
= 8 * req_ctx
->digcnt
;
1490 /* Add the hash pad */
1491 hash_pad_len
= create_hash_pad(oper
, req_ctx
->pad_buffer
,
1492 req_ctx
->digcnt
, digest_bits
);
1493 error
= artpec6_crypto_setup_out_descr(common
,
1494 req_ctx
->pad_buffer
,
1495 hash_pad_len
, false,
1497 req_ctx
->digcnt
= 0;
1502 /* Descriptor for the final result */
1503 error
= artpec6_crypto_setup_in_descr(common
, areq
->result
,
1510 /* Discard the extra context bytes for SHA-384 */
1511 error
= artpec6_crypto_setup_in_descr(common
,
1512 req_ctx
->partial_buffer
,
1513 digestsize
- contextsize
, true);
1518 } else { /* This is not the final operation for this request */
1520 return ARTPEC6_CRYPTO_PREPARE_HASH_NO_START
;
1522 /* Save the result to the context */
1523 error
= artpec6_crypto_setup_in_descr(common
,
1524 req_ctx
->digeststate
,
1525 contextsize
, false);
1531 req_ctx
->hash_flags
&= ~(HASH_FLAG_INIT_CTX
| HASH_FLAG_UPDATE
|
1532 HASH_FLAG_FINALIZE
);
1534 error
= artpec6_crypto_terminate_in_descrs(common
);
1538 error
= artpec6_crypto_terminate_out_descrs(common
);
1542 error
= artpec6_crypto_dma_map_descs(common
);
1546 return ARTPEC6_CRYPTO_PREPARE_HASH_START
;
1550 static int artpec6_crypto_aes_ecb_init(struct crypto_skcipher
*tfm
)
1552 struct artpec6_cryptotfm_context
*ctx
= crypto_skcipher_ctx(tfm
);
1554 tfm
->reqsize
= sizeof(struct artpec6_crypto_request_context
);
1555 ctx
->crypto_type
= ARTPEC6_CRYPTO_CIPHER_AES_ECB
;
1560 static int artpec6_crypto_aes_ctr_init(struct crypto_skcipher
*tfm
)
1562 struct artpec6_cryptotfm_context
*ctx
= crypto_skcipher_ctx(tfm
);
1564 ctx
->fallback
= crypto_alloc_skcipher(crypto_tfm_alg_name(&tfm
->base
),
1567 CRYPTO_ALG_NEED_FALLBACK
);
1568 if (IS_ERR(ctx
->fallback
))
1569 return PTR_ERR(ctx
->fallback
);
1571 tfm
->reqsize
= sizeof(struct artpec6_crypto_request_context
);
1572 ctx
->crypto_type
= ARTPEC6_CRYPTO_CIPHER_AES_CTR
;
1577 static int artpec6_crypto_aes_cbc_init(struct crypto_skcipher
*tfm
)
1579 struct artpec6_cryptotfm_context
*ctx
= crypto_skcipher_ctx(tfm
);
1581 tfm
->reqsize
= sizeof(struct artpec6_crypto_request_context
);
1582 ctx
->crypto_type
= ARTPEC6_CRYPTO_CIPHER_AES_CBC
;
1587 static int artpec6_crypto_aes_xts_init(struct crypto_skcipher
*tfm
)
1589 struct artpec6_cryptotfm_context
*ctx
= crypto_skcipher_ctx(tfm
);
1591 tfm
->reqsize
= sizeof(struct artpec6_crypto_request_context
);
1592 ctx
->crypto_type
= ARTPEC6_CRYPTO_CIPHER_AES_XTS
;
1597 static void artpec6_crypto_aes_exit(struct crypto_skcipher
*tfm
)
1599 struct artpec6_cryptotfm_context
*ctx
= crypto_skcipher_ctx(tfm
);
1601 memset(ctx
, 0, sizeof(*ctx
));
1604 static void artpec6_crypto_aes_ctr_exit(struct crypto_skcipher
*tfm
)
1606 struct artpec6_cryptotfm_context
*ctx
= crypto_skcipher_ctx(tfm
);
1608 crypto_free_skcipher(ctx
->fallback
);
1609 artpec6_crypto_aes_exit(tfm
);
1613 artpec6_crypto_cipher_set_key(struct crypto_skcipher
*cipher
, const u8
*key
,
1614 unsigned int keylen
)
1616 struct artpec6_cryptotfm_context
*ctx
=
1617 crypto_skcipher_ctx(cipher
);
1625 crypto_skcipher_set_flags(cipher
,
1626 CRYPTO_TFM_RES_BAD_KEY_LEN
);
1630 memcpy(ctx
->aes_key
, key
, keylen
);
1631 ctx
->key_length
= keylen
;
1636 artpec6_crypto_xts_set_key(struct crypto_skcipher
*cipher
, const u8
*key
,
1637 unsigned int keylen
)
1639 struct artpec6_cryptotfm_context
*ctx
=
1640 crypto_skcipher_ctx(cipher
);
1643 ret
= xts_check_key(&cipher
->base
, key
, keylen
);
1653 crypto_skcipher_set_flags(cipher
,
1654 CRYPTO_TFM_RES_BAD_KEY_LEN
);
1658 memcpy(ctx
->aes_key
, key
, keylen
);
1659 ctx
->key_length
= keylen
;
1663 /** artpec6_crypto_process_crypto - Prepare an async block cipher crypto request
1665 * @req: The asynch request to process
1667 * @return 0 if the dma job was successfully prepared
1670 * This function sets up the PDMA descriptors for a block cipher request.
1672 * The required padding is added for AES-CTR using a statically defined
1675 * The PDMA descriptor list will be as follows:
1677 * OUT: [KEY_MD][KEY][EOP]<CIPHER_MD>[IV]<data_0>...[data_n][AES-CTR_pad]<eop>
1678 * IN: <CIPHER_MD><data_0>...[data_n]<intr>
1681 static int artpec6_crypto_prepare_crypto(struct skcipher_request
*areq
)
1684 struct artpec6_crypto_walk walk
;
1685 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(areq
);
1686 struct artpec6_cryptotfm_context
*ctx
= crypto_skcipher_ctx(cipher
);
1687 struct artpec6_crypto_request_context
*req_ctx
= NULL
;
1688 size_t iv_len
= crypto_skcipher_ivsize(cipher
);
1689 struct artpec6_crypto
*ac
= dev_get_drvdata(artpec6_crypto_dev
);
1690 enum artpec6_crypto_variant variant
= ac
->variant
;
1691 struct artpec6_crypto_req_common
*common
;
1692 bool cipher_decr
= false;
1694 u32 cipher_len
= 0; /* Same as regk_crypto_key_128 for NULL crypto */
1697 req_ctx
= skcipher_request_ctx(areq
);
1698 common
= &req_ctx
->common
;
1700 artpec6_crypto_init_dma_operation(common
);
1702 if (variant
== ARTPEC6_CRYPTO
)
1703 ctx
->key_md
= FIELD_PREP(A6_CRY_MD_OPER
, a6_regk_crypto_dlkey
);
1705 ctx
->key_md
= FIELD_PREP(A7_CRY_MD_OPER
, a7_regk_crypto_dlkey
);
1707 ret
= artpec6_crypto_setup_out_descr(common
, (void *)&ctx
->key_md
,
1708 sizeof(ctx
->key_md
), false, false);
1712 ret
= artpec6_crypto_setup_out_descr(common
, ctx
->aes_key
,
1713 ctx
->key_length
, true, false);
1717 req_ctx
->cipher_md
= 0;
1719 if (ctx
->crypto_type
== ARTPEC6_CRYPTO_CIPHER_AES_XTS
)
1720 cipher_klen
= ctx
->key_length
/2;
1722 cipher_klen
= ctx
->key_length
;
1725 switch (cipher_klen
) {
1727 cipher_len
= regk_crypto_key_128
;
1730 cipher_len
= regk_crypto_key_192
;
1733 cipher_len
= regk_crypto_key_256
;
1736 pr_err("%s: Invalid key length %d!\n",
1737 MODULE_NAME
, ctx
->key_length
);
1741 switch (ctx
->crypto_type
) {
1742 case ARTPEC6_CRYPTO_CIPHER_AES_ECB
:
1743 oper
= regk_crypto_aes_ecb
;
1744 cipher_decr
= req_ctx
->decrypt
;
1747 case ARTPEC6_CRYPTO_CIPHER_AES_CBC
:
1748 oper
= regk_crypto_aes_cbc
;
1749 cipher_decr
= req_ctx
->decrypt
;
1752 case ARTPEC6_CRYPTO_CIPHER_AES_CTR
:
1753 oper
= regk_crypto_aes_ctr
;
1754 cipher_decr
= false;
1757 case ARTPEC6_CRYPTO_CIPHER_AES_XTS
:
1758 oper
= regk_crypto_aes_xts
;
1759 cipher_decr
= req_ctx
->decrypt
;
1761 if (variant
== ARTPEC6_CRYPTO
)
1762 req_ctx
->cipher_md
|= A6_CRY_MD_CIPHER_DSEQ
;
1764 req_ctx
->cipher_md
|= A7_CRY_MD_CIPHER_DSEQ
;
1768 pr_err("%s: Invalid cipher mode %d!\n",
1769 MODULE_NAME
, ctx
->crypto_type
);
1773 if (variant
== ARTPEC6_CRYPTO
) {
1774 req_ctx
->cipher_md
|= FIELD_PREP(A6_CRY_MD_OPER
, oper
);
1775 req_ctx
->cipher_md
|= FIELD_PREP(A6_CRY_MD_CIPHER_LEN
,
1778 req_ctx
->cipher_md
|= A6_CRY_MD_CIPHER_DECR
;
1780 req_ctx
->cipher_md
|= FIELD_PREP(A7_CRY_MD_OPER
, oper
);
1781 req_ctx
->cipher_md
|= FIELD_PREP(A7_CRY_MD_CIPHER_LEN
,
1784 req_ctx
->cipher_md
|= A7_CRY_MD_CIPHER_DECR
;
1787 ret
= artpec6_crypto_setup_out_descr(common
,
1788 &req_ctx
->cipher_md
,
1789 sizeof(req_ctx
->cipher_md
),
1794 ret
= artpec6_crypto_setup_in_descr(common
, ac
->pad_buffer
, 4, false);
1799 ret
= artpec6_crypto_setup_out_descr(common
, areq
->iv
, iv_len
,
1805 artpec6_crypto_walk_init(&walk
, areq
->src
);
1806 ret
= artpec6_crypto_setup_sg_descrs_out(common
, &walk
, areq
->cryptlen
);
1811 artpec6_crypto_walk_init(&walk
, areq
->dst
);
1812 ret
= artpec6_crypto_setup_sg_descrs_in(common
, &walk
, areq
->cryptlen
);
1816 /* CTR-mode padding required by the HW. */
1817 if (ctx
->crypto_type
== ARTPEC6_CRYPTO_CIPHER_AES_CTR
||
1818 ctx
->crypto_type
== ARTPEC6_CRYPTO_CIPHER_AES_XTS
) {
1819 size_t pad
= ALIGN(areq
->cryptlen
, AES_BLOCK_SIZE
) -
1823 ret
= artpec6_crypto_setup_out_descr(common
,
1829 ret
= artpec6_crypto_setup_in_descr(common
,
1830 ac
->pad_buffer
, pad
,
1837 ret
= artpec6_crypto_terminate_out_descrs(common
);
1841 ret
= artpec6_crypto_terminate_in_descrs(common
);
1845 return artpec6_crypto_dma_map_descs(common
);
1848 static int artpec6_crypto_prepare_aead(struct aead_request
*areq
)
1852 size_t input_length
;
1853 struct artpec6_cryptotfm_context
*ctx
= crypto_tfm_ctx(areq
->base
.tfm
);
1854 struct artpec6_crypto_aead_req_ctx
*req_ctx
= aead_request_ctx(areq
);
1855 struct crypto_aead
*cipher
= crypto_aead_reqtfm(areq
);
1856 struct artpec6_crypto_req_common
*common
= &req_ctx
->common
;
1857 struct artpec6_crypto
*ac
= dev_get_drvdata(artpec6_crypto_dev
);
1858 enum artpec6_crypto_variant variant
= ac
->variant
;
1861 artpec6_crypto_init_dma_operation(common
);
1864 if (variant
== ARTPEC6_CRYPTO
) {
1865 ctx
->key_md
= FIELD_PREP(A6_CRY_MD_OPER
,
1866 a6_regk_crypto_dlkey
);
1868 ctx
->key_md
= FIELD_PREP(A7_CRY_MD_OPER
,
1869 a7_regk_crypto_dlkey
);
1871 ret
= artpec6_crypto_setup_out_descr(common
, (void *)&ctx
->key_md
,
1872 sizeof(ctx
->key_md
), false, false);
1876 ret
= artpec6_crypto_setup_out_descr(common
, ctx
->aes_key
,
1877 ctx
->key_length
, true, false);
1881 req_ctx
->cipher_md
= 0;
1883 switch (ctx
->key_length
) {
1885 md_cipher_len
= regk_crypto_key_128
;
1888 md_cipher_len
= regk_crypto_key_192
;
1891 md_cipher_len
= regk_crypto_key_256
;
1897 if (variant
== ARTPEC6_CRYPTO
) {
1898 req_ctx
->cipher_md
|= FIELD_PREP(A6_CRY_MD_OPER
,
1899 regk_crypto_aes_gcm
);
1900 req_ctx
->cipher_md
|= FIELD_PREP(A6_CRY_MD_CIPHER_LEN
,
1902 if (req_ctx
->decrypt
)
1903 req_ctx
->cipher_md
|= A6_CRY_MD_CIPHER_DECR
;
1905 req_ctx
->cipher_md
|= FIELD_PREP(A7_CRY_MD_OPER
,
1906 regk_crypto_aes_gcm
);
1907 req_ctx
->cipher_md
|= FIELD_PREP(A7_CRY_MD_CIPHER_LEN
,
1909 if (req_ctx
->decrypt
)
1910 req_ctx
->cipher_md
|= A7_CRY_MD_CIPHER_DECR
;
1913 ret
= artpec6_crypto_setup_out_descr(common
,
1914 (void *) &req_ctx
->cipher_md
,
1915 sizeof(req_ctx
->cipher_md
), false,
1920 ret
= artpec6_crypto_setup_in_descr(common
, ac
->pad_buffer
, 4, false);
1924 /* For the decryption, cryptlen includes the tag. */
1925 input_length
= areq
->cryptlen
;
1926 if (req_ctx
->decrypt
)
1927 input_length
-= AES_BLOCK_SIZE
;
1929 /* Prepare the context buffer */
1930 req_ctx
->hw_ctx
.aad_length_bits
=
1931 __cpu_to_be64(8*areq
->assoclen
);
1933 req_ctx
->hw_ctx
.text_length_bits
=
1934 __cpu_to_be64(8*input_length
);
1936 memcpy(req_ctx
->hw_ctx
.J0
, areq
->iv
, crypto_aead_ivsize(cipher
));
1937 // The HW omits the initial increment of the counter field.
1938 memcpy(req_ctx
->hw_ctx
.J0
+ GCM_AES_IV_SIZE
, "\x00\x00\x00\x01", 4);
1940 ret
= artpec6_crypto_setup_out_descr(common
, &req_ctx
->hw_ctx
,
1941 sizeof(struct artpec6_crypto_aead_hw_ctx
), false, false);
1946 struct artpec6_crypto_walk walk
;
1948 artpec6_crypto_walk_init(&walk
, areq
->src
);
1950 /* Associated data */
1951 count
= areq
->assoclen
;
1952 ret
= artpec6_crypto_setup_sg_descrs_out(common
, &walk
, count
);
1956 if (!IS_ALIGNED(areq
->assoclen
, 16)) {
1957 size_t assoc_pad
= 16 - (areq
->assoclen
% 16);
1958 /* The HW mandates zero padding here */
1959 ret
= artpec6_crypto_setup_out_descr(common
,
1967 /* Data to crypto */
1968 count
= input_length
;
1969 ret
= artpec6_crypto_setup_sg_descrs_out(common
, &walk
, count
);
1973 if (!IS_ALIGNED(input_length
, 16)) {
1974 size_t crypto_pad
= 16 - (input_length
% 16);
1975 /* The HW mandates zero padding here */
1976 ret
= artpec6_crypto_setup_out_descr(common
,
1986 /* Data from crypto */
1988 struct artpec6_crypto_walk walk
;
1989 size_t output_len
= areq
->cryptlen
;
1991 if (req_ctx
->decrypt
)
1992 output_len
-= AES_BLOCK_SIZE
;
1994 artpec6_crypto_walk_init(&walk
, areq
->dst
);
1996 /* skip associated data in the output */
1997 count
= artpec6_crypto_walk_advance(&walk
, areq
->assoclen
);
2002 ret
= artpec6_crypto_setup_sg_descrs_in(common
, &walk
, count
);
2006 /* Put padding between the cryptotext and the auth tag */
2007 if (!IS_ALIGNED(output_len
, 16)) {
2008 size_t crypto_pad
= 16 - (output_len
% 16);
2010 ret
= artpec6_crypto_setup_in_descr(common
,
2017 /* The authentication tag shall follow immediately after
2018 * the output ciphertext. For decryption it is put in a context
2019 * buffer for later compare against the input tag.
2021 count
= AES_BLOCK_SIZE
;
2023 if (req_ctx
->decrypt
) {
2024 ret
= artpec6_crypto_setup_in_descr(common
,
2025 req_ctx
->decryption_tag
, count
, false);
2030 ret
= artpec6_crypto_setup_sg_descrs_in(common
, &walk
,
2038 ret
= artpec6_crypto_terminate_in_descrs(common
);
2042 ret
= artpec6_crypto_terminate_out_descrs(common
);
2046 return artpec6_crypto_dma_map_descs(common
);
2049 static void artpec6_crypto_process_queue(struct artpec6_crypto
*ac
)
2051 struct artpec6_crypto_req_common
*req
;
2053 while (!list_empty(&ac
->queue
) && !artpec6_crypto_busy()) {
2054 req
= list_first_entry(&ac
->queue
,
2055 struct artpec6_crypto_req_common
,
2057 list_move_tail(&req
->list
, &ac
->pending
);
2058 artpec6_crypto_start_dma(req
);
2060 req
->req
->complete(req
->req
, -EINPROGRESS
);
2064 * In some cases, the hardware can raise an in_eop_flush interrupt
2065 * before actually updating the status, so we have an timer which will
2066 * recheck the status on timeout. Since the cases are expected to be
2067 * very rare, we use a relatively large timeout value. There should be
2068 * no noticeable negative effect if we timeout spuriously.
2070 if (ac
->pending_count
)
2071 mod_timer(&ac
->timer
, jiffies
+ msecs_to_jiffies(100));
2073 del_timer(&ac
->timer
);
2076 static void artpec6_crypto_timeout(struct timer_list
*t
)
2078 struct artpec6_crypto
*ac
= from_timer(ac
, t
, timer
);
2080 dev_info_ratelimited(artpec6_crypto_dev
, "timeout\n");
2082 tasklet_schedule(&ac
->task
);
2085 static void artpec6_crypto_task(unsigned long data
)
2087 struct artpec6_crypto
*ac
= (struct artpec6_crypto
*)data
;
2088 struct artpec6_crypto_req_common
*req
;
2089 struct artpec6_crypto_req_common
*n
;
2091 if (list_empty(&ac
->pending
)) {
2092 pr_debug("Spurious IRQ\n");
2096 spin_lock_bh(&ac
->queue_lock
);
2098 list_for_each_entry_safe(req
, n
, &ac
->pending
, list
) {
2099 struct artpec6_crypto_dma_descriptors
*dma
= req
->dma
;
2102 dma_sync_single_for_cpu(artpec6_crypto_dev
, dma
->stat_dma_addr
,
2103 sizeof(dma
->stat
[0]),
2106 stat
= req
->dma
->stat
[req
->dma
->in_cnt
-1];
2108 /* A non-zero final status descriptor indicates
2109 * this job has finished.
2111 pr_debug("Request %p status is %X\n", req
, stat
);
2115 /* Allow testing of timeout handling with fault injection */
2116 #ifdef CONFIG_FAULT_INJECTION
2117 if (should_fail(&artpec6_crypto_fail_status_read
, 1))
2121 pr_debug("Completing request %p\n", req
);
2123 list_del(&req
->list
);
2125 artpec6_crypto_dma_unmap_all(req
);
2126 artpec6_crypto_copy_bounce_buffers(req
);
2128 ac
->pending_count
--;
2129 artpec6_crypto_common_destroy(req
);
2130 req
->complete(req
->req
);
2133 artpec6_crypto_process_queue(ac
);
2135 spin_unlock_bh(&ac
->queue_lock
);
2138 static void artpec6_crypto_complete_crypto(struct crypto_async_request
*req
)
2140 req
->complete(req
, 0);
2144 artpec6_crypto_complete_cbc_decrypt(struct crypto_async_request
*req
)
2146 struct skcipher_request
*cipher_req
= container_of(req
,
2147 struct skcipher_request
, base
);
2149 scatterwalk_map_and_copy(cipher_req
->iv
, cipher_req
->src
,
2150 cipher_req
->cryptlen
- AES_BLOCK_SIZE
,
2152 req
->complete(req
, 0);
2156 artpec6_crypto_complete_cbc_encrypt(struct crypto_async_request
*req
)
2158 struct skcipher_request
*cipher_req
= container_of(req
,
2159 struct skcipher_request
, base
);
2161 scatterwalk_map_and_copy(cipher_req
->iv
, cipher_req
->dst
,
2162 cipher_req
->cryptlen
- AES_BLOCK_SIZE
,
2164 req
->complete(req
, 0);
2167 static void artpec6_crypto_complete_aead(struct crypto_async_request
*req
)
2171 /* Verify GCM hashtag. */
2172 struct aead_request
*areq
= container_of(req
,
2173 struct aead_request
, base
);
2174 struct artpec6_crypto_aead_req_ctx
*req_ctx
= aead_request_ctx(areq
);
2176 if (req_ctx
->decrypt
) {
2177 u8 input_tag
[AES_BLOCK_SIZE
];
2179 sg_pcopy_to_buffer(areq
->src
,
2180 sg_nents(areq
->src
),
2183 areq
->assoclen
+ areq
->cryptlen
-
2186 if (memcmp(req_ctx
->decryption_tag
,
2189 pr_debug("***EBADMSG:\n");
2190 print_hex_dump_debug("ref:", DUMP_PREFIX_ADDRESS
, 32, 1,
2191 input_tag
, AES_BLOCK_SIZE
, true);
2192 print_hex_dump_debug("out:", DUMP_PREFIX_ADDRESS
, 32, 1,
2193 req_ctx
->decryption_tag
,
2194 AES_BLOCK_SIZE
, true);
2200 req
->complete(req
, result
);
2203 static void artpec6_crypto_complete_hash(struct crypto_async_request
*req
)
2205 req
->complete(req
, 0);
2209 /*------------------- Hash functions -----------------------------------------*/
2211 artpec6_crypto_hash_set_key(struct crypto_ahash
*tfm
,
2212 const u8
*key
, unsigned int keylen
)
2214 struct artpec6_hashalg_context
*tfm_ctx
= crypto_tfm_ctx(&tfm
->base
);
2219 pr_err("Invalid length (%d) of HMAC key\n",
2224 memset(tfm_ctx
->hmac_key
, 0, sizeof(tfm_ctx
->hmac_key
));
2226 blocksize
= crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm
));
2228 if (keylen
> blocksize
) {
2229 SHASH_DESC_ON_STACK(hdesc
, tfm_ctx
->child_hash
);
2231 hdesc
->tfm
= tfm_ctx
->child_hash
;
2232 hdesc
->flags
= crypto_ahash_get_flags(tfm
) &
2233 CRYPTO_TFM_REQ_MAY_SLEEP
;
2235 tfm_ctx
->hmac_key_length
= blocksize
;
2236 ret
= crypto_shash_digest(hdesc
, key
, keylen
,
2242 memcpy(tfm_ctx
->hmac_key
, key
, keylen
);
2243 tfm_ctx
->hmac_key_length
= keylen
;
2250 artpec6_crypto_init_hash(struct ahash_request
*req
, u8 type
, int hmac
)
2252 struct artpec6_crypto
*ac
= dev_get_drvdata(artpec6_crypto_dev
);
2253 enum artpec6_crypto_variant variant
= ac
->variant
;
2254 struct artpec6_hash_request_context
*req_ctx
= ahash_request_ctx(req
);
2257 memset(req_ctx
, 0, sizeof(*req_ctx
));
2259 req_ctx
->hash_flags
= HASH_FLAG_INIT_CTX
;
2261 req_ctx
->hash_flags
|= (HASH_FLAG_HMAC
| HASH_FLAG_UPDATE_KEY
);
2264 case ARTPEC6_CRYPTO_HASH_SHA1
:
2265 oper
= hmac
? regk_crypto_hmac_sha1
: regk_crypto_sha1
;
2267 case ARTPEC6_CRYPTO_HASH_SHA256
:
2268 oper
= hmac
? regk_crypto_hmac_sha256
: regk_crypto_sha256
;
2270 case ARTPEC6_CRYPTO_HASH_SHA384
:
2271 oper
= hmac
? regk_crypto_hmac_sha384
: regk_crypto_sha384
;
2273 case ARTPEC6_CRYPTO_HASH_SHA512
:
2274 oper
= hmac
? regk_crypto_hmac_sha512
: regk_crypto_sha512
;
2278 pr_err("%s: Unsupported hash type 0x%x\n", MODULE_NAME
, type
);
2282 if (variant
== ARTPEC6_CRYPTO
)
2283 req_ctx
->hash_md
= FIELD_PREP(A6_CRY_MD_OPER
, oper
);
2285 req_ctx
->hash_md
= FIELD_PREP(A7_CRY_MD_OPER
, oper
);
2290 static int artpec6_crypto_prepare_submit_hash(struct ahash_request
*req
)
2292 struct artpec6_hash_request_context
*req_ctx
= ahash_request_ctx(req
);
2295 if (!req_ctx
->common
.dma
) {
2296 ret
= artpec6_crypto_common_init(&req_ctx
->common
,
2298 artpec6_crypto_complete_hash
,
2305 ret
= artpec6_crypto_prepare_hash(req
);
2307 case ARTPEC6_CRYPTO_PREPARE_HASH_START
:
2308 ret
= artpec6_crypto_submit(&req_ctx
->common
);
2311 case ARTPEC6_CRYPTO_PREPARE_HASH_NO_START
:
2316 artpec6_crypto_common_destroy(&req_ctx
->common
);
2323 static int artpec6_crypto_hash_final(struct ahash_request
*req
)
2325 struct artpec6_hash_request_context
*req_ctx
= ahash_request_ctx(req
);
2327 req_ctx
->hash_flags
|= HASH_FLAG_FINALIZE
;
2329 return artpec6_crypto_prepare_submit_hash(req
);
2332 static int artpec6_crypto_hash_update(struct ahash_request
*req
)
2334 struct artpec6_hash_request_context
*req_ctx
= ahash_request_ctx(req
);
2336 req_ctx
->hash_flags
|= HASH_FLAG_UPDATE
;
2338 return artpec6_crypto_prepare_submit_hash(req
);
2341 static int artpec6_crypto_sha1_init(struct ahash_request
*req
)
2343 return artpec6_crypto_init_hash(req
, ARTPEC6_CRYPTO_HASH_SHA1
, 0);
2346 static int artpec6_crypto_sha1_digest(struct ahash_request
*req
)
2348 struct artpec6_hash_request_context
*req_ctx
= ahash_request_ctx(req
);
2350 artpec6_crypto_init_hash(req
, ARTPEC6_CRYPTO_HASH_SHA1
, 0);
2352 req_ctx
->hash_flags
|= HASH_FLAG_UPDATE
| HASH_FLAG_FINALIZE
;
2354 return artpec6_crypto_prepare_submit_hash(req
);
2357 static int artpec6_crypto_sha256_init(struct ahash_request
*req
)
2359 return artpec6_crypto_init_hash(req
, ARTPEC6_CRYPTO_HASH_SHA256
, 0);
2362 static int artpec6_crypto_sha256_digest(struct ahash_request
*req
)
2364 struct artpec6_hash_request_context
*req_ctx
= ahash_request_ctx(req
);
2366 artpec6_crypto_init_hash(req
, ARTPEC6_CRYPTO_HASH_SHA256
, 0);
2367 req_ctx
->hash_flags
|= HASH_FLAG_UPDATE
| HASH_FLAG_FINALIZE
;
2369 return artpec6_crypto_prepare_submit_hash(req
);
2372 static int __maybe_unused
artpec6_crypto_sha384_init(struct ahash_request
*req
)
2374 return artpec6_crypto_init_hash(req
, ARTPEC6_CRYPTO_HASH_SHA384
, 0);
2377 static int __maybe_unused
2378 artpec6_crypto_sha384_digest(struct ahash_request
*req
)
2380 struct artpec6_hash_request_context
*req_ctx
= ahash_request_ctx(req
);
2382 artpec6_crypto_init_hash(req
, ARTPEC6_CRYPTO_HASH_SHA384
, 0);
2383 req_ctx
->hash_flags
|= HASH_FLAG_UPDATE
| HASH_FLAG_FINALIZE
;
2385 return artpec6_crypto_prepare_submit_hash(req
);
2388 static int artpec6_crypto_sha512_init(struct ahash_request
*req
)
2390 return artpec6_crypto_init_hash(req
, ARTPEC6_CRYPTO_HASH_SHA512
, 0);
2393 static int artpec6_crypto_sha512_digest(struct ahash_request
*req
)
2395 struct artpec6_hash_request_context
*req_ctx
= ahash_request_ctx(req
);
2397 artpec6_crypto_init_hash(req
, ARTPEC6_CRYPTO_HASH_SHA512
, 0);
2398 req_ctx
->hash_flags
|= HASH_FLAG_UPDATE
| HASH_FLAG_FINALIZE
;
2400 return artpec6_crypto_prepare_submit_hash(req
);
2403 static int artpec6_crypto_hmac_sha256_init(struct ahash_request
*req
)
2405 return artpec6_crypto_init_hash(req
, ARTPEC6_CRYPTO_HASH_SHA256
, 1);
2408 static int __maybe_unused
2409 artpec6_crypto_hmac_sha384_init(struct ahash_request
*req
)
2411 return artpec6_crypto_init_hash(req
, ARTPEC6_CRYPTO_HASH_SHA384
, 1);
2414 static int artpec6_crypto_hmac_sha512_init(struct ahash_request
*req
)
2416 return artpec6_crypto_init_hash(req
, ARTPEC6_CRYPTO_HASH_SHA512
, 1);
2419 static int artpec6_crypto_hmac_sha256_digest(struct ahash_request
*req
)
2421 struct artpec6_hash_request_context
*req_ctx
= ahash_request_ctx(req
);
2423 artpec6_crypto_init_hash(req
, ARTPEC6_CRYPTO_HASH_SHA256
, 1);
2424 req_ctx
->hash_flags
|= HASH_FLAG_UPDATE
| HASH_FLAG_FINALIZE
;
2426 return artpec6_crypto_prepare_submit_hash(req
);
2429 static int __maybe_unused
2430 artpec6_crypto_hmac_sha384_digest(struct ahash_request
*req
)
2432 struct artpec6_hash_request_context
*req_ctx
= ahash_request_ctx(req
);
2434 artpec6_crypto_init_hash(req
, ARTPEC6_CRYPTO_HASH_SHA384
, 1);
2435 req_ctx
->hash_flags
|= HASH_FLAG_UPDATE
| HASH_FLAG_FINALIZE
;
2437 return artpec6_crypto_prepare_submit_hash(req
);
2440 static int artpec6_crypto_hmac_sha512_digest(struct ahash_request
*req
)
2442 struct artpec6_hash_request_context
*req_ctx
= ahash_request_ctx(req
);
2444 artpec6_crypto_init_hash(req
, ARTPEC6_CRYPTO_HASH_SHA512
, 1);
2445 req_ctx
->hash_flags
|= HASH_FLAG_UPDATE
| HASH_FLAG_FINALIZE
;
2447 return artpec6_crypto_prepare_submit_hash(req
);
2450 static int artpec6_crypto_ahash_init_common(struct crypto_tfm
*tfm
,
2451 const char *base_hash_name
)
2453 struct artpec6_hashalg_context
*tfm_ctx
= crypto_tfm_ctx(tfm
);
2455 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm
),
2456 sizeof(struct artpec6_hash_request_context
));
2457 memset(tfm_ctx
, 0, sizeof(*tfm_ctx
));
2459 if (base_hash_name
) {
2460 struct crypto_shash
*child
;
2462 child
= crypto_alloc_shash(base_hash_name
, 0,
2463 CRYPTO_ALG_NEED_FALLBACK
);
2466 return PTR_ERR(child
);
2468 tfm_ctx
->child_hash
= child
;
2474 static int artpec6_crypto_ahash_init(struct crypto_tfm
*tfm
)
2476 return artpec6_crypto_ahash_init_common(tfm
, NULL
);
2479 static int artpec6_crypto_ahash_init_hmac_sha256(struct crypto_tfm
*tfm
)
2481 return artpec6_crypto_ahash_init_common(tfm
, "sha256");
2484 static int __maybe_unused
2485 artpec6_crypto_ahash_init_hmac_sha384(struct crypto_tfm
*tfm
)
2487 return artpec6_crypto_ahash_init_common(tfm
, "sha384");
2490 static int artpec6_crypto_ahash_init_hmac_sha512(struct crypto_tfm
*tfm
)
2492 return artpec6_crypto_ahash_init_common(tfm
, "sha512");
2495 static void artpec6_crypto_ahash_exit(struct crypto_tfm
*tfm
)
2497 struct artpec6_hashalg_context
*tfm_ctx
= crypto_tfm_ctx(tfm
);
2499 if (tfm_ctx
->child_hash
)
2500 crypto_free_shash(tfm_ctx
->child_hash
);
2502 memset(tfm_ctx
->hmac_key
, 0, sizeof(tfm_ctx
->hmac_key
));
2503 tfm_ctx
->hmac_key_length
= 0;
2506 static int artpec6_crypto_hash_export(struct ahash_request
*req
, void *out
)
2508 const struct artpec6_hash_request_context
*ctx
= ahash_request_ctx(req
);
2509 struct artpec6_hash_export_state
*state
= out
;
2510 struct artpec6_crypto
*ac
= dev_get_drvdata(artpec6_crypto_dev
);
2511 enum artpec6_crypto_variant variant
= ac
->variant
;
2513 BUILD_BUG_ON(sizeof(state
->partial_buffer
) !=
2514 sizeof(ctx
->partial_buffer
));
2515 BUILD_BUG_ON(sizeof(state
->digeststate
) != sizeof(ctx
->digeststate
));
2517 state
->digcnt
= ctx
->digcnt
;
2518 state
->partial_bytes
= ctx
->partial_bytes
;
2519 state
->hash_flags
= ctx
->hash_flags
;
2521 if (variant
== ARTPEC6_CRYPTO
)
2522 state
->oper
= FIELD_GET(A6_CRY_MD_OPER
, ctx
->hash_md
);
2524 state
->oper
= FIELD_GET(A7_CRY_MD_OPER
, ctx
->hash_md
);
2526 memcpy(state
->partial_buffer
, ctx
->partial_buffer
,
2527 sizeof(state
->partial_buffer
));
2528 memcpy(state
->digeststate
, ctx
->digeststate
,
2529 sizeof(state
->digeststate
));
2534 static int artpec6_crypto_hash_import(struct ahash_request
*req
, const void *in
)
2536 struct artpec6_hash_request_context
*ctx
= ahash_request_ctx(req
);
2537 const struct artpec6_hash_export_state
*state
= in
;
2538 struct artpec6_crypto
*ac
= dev_get_drvdata(artpec6_crypto_dev
);
2539 enum artpec6_crypto_variant variant
= ac
->variant
;
2541 memset(ctx
, 0, sizeof(*ctx
));
2543 ctx
->digcnt
= state
->digcnt
;
2544 ctx
->partial_bytes
= state
->partial_bytes
;
2545 ctx
->hash_flags
= state
->hash_flags
;
2547 if (variant
== ARTPEC6_CRYPTO
)
2548 ctx
->hash_md
= FIELD_PREP(A6_CRY_MD_OPER
, state
->oper
);
2550 ctx
->hash_md
= FIELD_PREP(A7_CRY_MD_OPER
, state
->oper
);
2552 memcpy(ctx
->partial_buffer
, state
->partial_buffer
,
2553 sizeof(state
->partial_buffer
));
2554 memcpy(ctx
->digeststate
, state
->digeststate
,
2555 sizeof(state
->digeststate
));
2560 static int init_crypto_hw(struct artpec6_crypto
*ac
)
2562 enum artpec6_crypto_variant variant
= ac
->variant
;
2563 void __iomem
*base
= ac
->base
;
2564 u32 out_descr_buf_size
;
2565 u32 out_data_buf_size
;
2566 u32 in_data_buf_size
;
2567 u32 in_descr_buf_size
;
2568 u32 in_stat_buf_size
;
2572 * The PDMA unit contains 1984 bytes of internal memory for the OUT
2573 * channels and 1024 bytes for the IN channel. This is an elastic
2574 * memory used to internally store the descriptors and data. The values
2575 * ares specified in 64 byte incremements. Trustzone buffers are not
2576 * used at this stage.
2578 out_data_buf_size
= 16; /* 1024 bytes for data */
2579 out_descr_buf_size
= 15; /* 960 bytes for descriptors */
2580 in_data_buf_size
= 8; /* 512 bytes for data */
2581 in_descr_buf_size
= 4; /* 256 bytes for descriptors */
2582 in_stat_buf_size
= 4; /* 256 bytes for stat descrs */
2584 BUILD_BUG_ON_MSG((out_data_buf_size
2585 + out_descr_buf_size
) * 64 > 1984,
2586 "Invalid OUT configuration");
2588 BUILD_BUG_ON_MSG((in_data_buf_size
2590 + in_stat_buf_size
) * 64 > 1024,
2591 "Invalid IN configuration");
2593 in
= FIELD_PREP(PDMA_IN_BUF_CFG_DATA_BUF_SIZE
, in_data_buf_size
) |
2594 FIELD_PREP(PDMA_IN_BUF_CFG_DESCR_BUF_SIZE
, in_descr_buf_size
) |
2595 FIELD_PREP(PDMA_IN_BUF_CFG_STAT_BUF_SIZE
, in_stat_buf_size
);
2597 out
= FIELD_PREP(PDMA_OUT_BUF_CFG_DATA_BUF_SIZE
, out_data_buf_size
) |
2598 FIELD_PREP(PDMA_OUT_BUF_CFG_DESCR_BUF_SIZE
, out_descr_buf_size
);
2600 writel_relaxed(out
, base
+ PDMA_OUT_BUF_CFG
);
2601 writel_relaxed(PDMA_OUT_CFG_EN
, base
+ PDMA_OUT_CFG
);
2603 if (variant
== ARTPEC6_CRYPTO
) {
2604 writel_relaxed(in
, base
+ A6_PDMA_IN_BUF_CFG
);
2605 writel_relaxed(PDMA_IN_CFG_EN
, base
+ A6_PDMA_IN_CFG
);
2606 writel_relaxed(A6_PDMA_INTR_MASK_IN_DATA
|
2607 A6_PDMA_INTR_MASK_IN_EOP_FLUSH
,
2608 base
+ A6_PDMA_INTR_MASK
);
2610 writel_relaxed(in
, base
+ A7_PDMA_IN_BUF_CFG
);
2611 writel_relaxed(PDMA_IN_CFG_EN
, base
+ A7_PDMA_IN_CFG
);
2612 writel_relaxed(A7_PDMA_INTR_MASK_IN_DATA
|
2613 A7_PDMA_INTR_MASK_IN_EOP_FLUSH
,
2614 base
+ A7_PDMA_INTR_MASK
);
2620 static void artpec6_crypto_disable_hw(struct artpec6_crypto
*ac
)
2622 enum artpec6_crypto_variant variant
= ac
->variant
;
2623 void __iomem
*base
= ac
->base
;
2625 if (variant
== ARTPEC6_CRYPTO
) {
2626 writel_relaxed(A6_PDMA_IN_CMD_STOP
, base
+ A6_PDMA_IN_CMD
);
2627 writel_relaxed(0, base
+ A6_PDMA_IN_CFG
);
2628 writel_relaxed(A6_PDMA_OUT_CMD_STOP
, base
+ PDMA_OUT_CMD
);
2630 writel_relaxed(A7_PDMA_IN_CMD_STOP
, base
+ A7_PDMA_IN_CMD
);
2631 writel_relaxed(0, base
+ A7_PDMA_IN_CFG
);
2632 writel_relaxed(A7_PDMA_OUT_CMD_STOP
, base
+ PDMA_OUT_CMD
);
2635 writel_relaxed(0, base
+ PDMA_OUT_CFG
);
2639 static irqreturn_t
artpec6_crypto_irq(int irq
, void *dev_id
)
2641 struct artpec6_crypto
*ac
= dev_id
;
2642 enum artpec6_crypto_variant variant
= ac
->variant
;
2643 void __iomem
*base
= ac
->base
;
2644 u32 mask_in_data
, mask_in_eop_flush
;
2645 u32 in_cmd_flush_stat
, in_cmd_reg
;
2650 if (variant
== ARTPEC6_CRYPTO
) {
2651 intr
= readl_relaxed(base
+ A6_PDMA_MASKED_INTR
);
2652 mask_in_data
= A6_PDMA_INTR_MASK_IN_DATA
;
2653 mask_in_eop_flush
= A6_PDMA_INTR_MASK_IN_EOP_FLUSH
;
2654 in_cmd_flush_stat
= A6_PDMA_IN_CMD_FLUSH_STAT
;
2655 in_cmd_reg
= A6_PDMA_IN_CMD
;
2656 ack_intr_reg
= A6_PDMA_ACK_INTR
;
2658 intr
= readl_relaxed(base
+ A7_PDMA_MASKED_INTR
);
2659 mask_in_data
= A7_PDMA_INTR_MASK_IN_DATA
;
2660 mask_in_eop_flush
= A7_PDMA_INTR_MASK_IN_EOP_FLUSH
;
2661 in_cmd_flush_stat
= A7_PDMA_IN_CMD_FLUSH_STAT
;
2662 in_cmd_reg
= A7_PDMA_IN_CMD
;
2663 ack_intr_reg
= A7_PDMA_ACK_INTR
;
2666 /* We get two interrupt notifications from each job.
2667 * The in_data means all data was sent to memory and then
2668 * we request a status flush command to write the per-job
2669 * status to its status vector. This ensures that the
2670 * tasklet can detect exactly how many submitted jobs
2671 * that have finished.
2673 if (intr
& mask_in_data
)
2674 ack
|= mask_in_data
;
2676 if (intr
& mask_in_eop_flush
)
2677 ack
|= mask_in_eop_flush
;
2679 writel_relaxed(in_cmd_flush_stat
, base
+ in_cmd_reg
);
2681 writel_relaxed(ack
, base
+ ack_intr_reg
);
2683 if (intr
& mask_in_eop_flush
)
2684 tasklet_schedule(&ac
->task
);
2689 /*------------------- Algorithm definitions ----------------------------------*/
2692 static struct ahash_alg hash_algos
[] = {
2695 .init
= artpec6_crypto_sha1_init
,
2696 .update
= artpec6_crypto_hash_update
,
2697 .final
= artpec6_crypto_hash_final
,
2698 .digest
= artpec6_crypto_sha1_digest
,
2699 .import
= artpec6_crypto_hash_import
,
2700 .export
= artpec6_crypto_hash_export
,
2701 .halg
.digestsize
= SHA1_DIGEST_SIZE
,
2702 .halg
.statesize
= sizeof(struct artpec6_hash_export_state
),
2705 .cra_driver_name
= "artpec-sha1",
2706 .cra_priority
= 300,
2707 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
| CRYPTO_ALG_ASYNC
,
2708 .cra_blocksize
= SHA1_BLOCK_SIZE
,
2709 .cra_ctxsize
= sizeof(struct artpec6_hashalg_context
),
2711 .cra_module
= THIS_MODULE
,
2712 .cra_init
= artpec6_crypto_ahash_init
,
2713 .cra_exit
= artpec6_crypto_ahash_exit
,
2718 .init
= artpec6_crypto_sha256_init
,
2719 .update
= artpec6_crypto_hash_update
,
2720 .final
= artpec6_crypto_hash_final
,
2721 .digest
= artpec6_crypto_sha256_digest
,
2722 .import
= artpec6_crypto_hash_import
,
2723 .export
= artpec6_crypto_hash_export
,
2724 .halg
.digestsize
= SHA256_DIGEST_SIZE
,
2725 .halg
.statesize
= sizeof(struct artpec6_hash_export_state
),
2727 .cra_name
= "sha256",
2728 .cra_driver_name
= "artpec-sha256",
2729 .cra_priority
= 300,
2730 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
| CRYPTO_ALG_ASYNC
,
2731 .cra_blocksize
= SHA256_BLOCK_SIZE
,
2732 .cra_ctxsize
= sizeof(struct artpec6_hashalg_context
),
2734 .cra_module
= THIS_MODULE
,
2735 .cra_init
= artpec6_crypto_ahash_init
,
2736 .cra_exit
= artpec6_crypto_ahash_exit
,
2741 .init
= artpec6_crypto_hmac_sha256_init
,
2742 .update
= artpec6_crypto_hash_update
,
2743 .final
= artpec6_crypto_hash_final
,
2744 .digest
= artpec6_crypto_hmac_sha256_digest
,
2745 .import
= artpec6_crypto_hash_import
,
2746 .export
= artpec6_crypto_hash_export
,
2747 .setkey
= artpec6_crypto_hash_set_key
,
2748 .halg
.digestsize
= SHA256_DIGEST_SIZE
,
2749 .halg
.statesize
= sizeof(struct artpec6_hash_export_state
),
2751 .cra_name
= "hmac(sha256)",
2752 .cra_driver_name
= "artpec-hmac-sha256",
2753 .cra_priority
= 300,
2754 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
| CRYPTO_ALG_ASYNC
,
2755 .cra_blocksize
= SHA256_BLOCK_SIZE
,
2756 .cra_ctxsize
= sizeof(struct artpec6_hashalg_context
),
2758 .cra_module
= THIS_MODULE
,
2759 .cra_init
= artpec6_crypto_ahash_init_hmac_sha256
,
2760 .cra_exit
= artpec6_crypto_ahash_exit
,
2765 static struct ahash_alg artpec7_hash_algos
[] = {
2768 .init
= artpec6_crypto_sha384_init
,
2769 .update
= artpec6_crypto_hash_update
,
2770 .final
= artpec6_crypto_hash_final
,
2771 .digest
= artpec6_crypto_sha384_digest
,
2772 .import
= artpec6_crypto_hash_import
,
2773 .export
= artpec6_crypto_hash_export
,
2774 .halg
.digestsize
= SHA384_DIGEST_SIZE
,
2775 .halg
.statesize
= sizeof(struct artpec6_hash_export_state
),
2777 .cra_name
= "sha384",
2778 .cra_driver_name
= "artpec-sha384",
2779 .cra_priority
= 300,
2780 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
| CRYPTO_ALG_ASYNC
,
2781 .cra_blocksize
= SHA384_BLOCK_SIZE
,
2782 .cra_ctxsize
= sizeof(struct artpec6_hashalg_context
),
2784 .cra_module
= THIS_MODULE
,
2785 .cra_init
= artpec6_crypto_ahash_init
,
2786 .cra_exit
= artpec6_crypto_ahash_exit
,
2791 .init
= artpec6_crypto_hmac_sha384_init
,
2792 .update
= artpec6_crypto_hash_update
,
2793 .final
= artpec6_crypto_hash_final
,
2794 .digest
= artpec6_crypto_hmac_sha384_digest
,
2795 .import
= artpec6_crypto_hash_import
,
2796 .export
= artpec6_crypto_hash_export
,
2797 .setkey
= artpec6_crypto_hash_set_key
,
2798 .halg
.digestsize
= SHA384_DIGEST_SIZE
,
2799 .halg
.statesize
= sizeof(struct artpec6_hash_export_state
),
2801 .cra_name
= "hmac(sha384)",
2802 .cra_driver_name
= "artpec-hmac-sha384",
2803 .cra_priority
= 300,
2804 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
| CRYPTO_ALG_ASYNC
,
2805 .cra_blocksize
= SHA384_BLOCK_SIZE
,
2806 .cra_ctxsize
= sizeof(struct artpec6_hashalg_context
),
2808 .cra_module
= THIS_MODULE
,
2809 .cra_init
= artpec6_crypto_ahash_init_hmac_sha384
,
2810 .cra_exit
= artpec6_crypto_ahash_exit
,
2815 .init
= artpec6_crypto_sha512_init
,
2816 .update
= artpec6_crypto_hash_update
,
2817 .final
= artpec6_crypto_hash_final
,
2818 .digest
= artpec6_crypto_sha512_digest
,
2819 .import
= artpec6_crypto_hash_import
,
2820 .export
= artpec6_crypto_hash_export
,
2821 .halg
.digestsize
= SHA512_DIGEST_SIZE
,
2822 .halg
.statesize
= sizeof(struct artpec6_hash_export_state
),
2824 .cra_name
= "sha512",
2825 .cra_driver_name
= "artpec-sha512",
2826 .cra_priority
= 300,
2827 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
| CRYPTO_ALG_ASYNC
,
2828 .cra_blocksize
= SHA512_BLOCK_SIZE
,
2829 .cra_ctxsize
= sizeof(struct artpec6_hashalg_context
),
2831 .cra_module
= THIS_MODULE
,
2832 .cra_init
= artpec6_crypto_ahash_init
,
2833 .cra_exit
= artpec6_crypto_ahash_exit
,
2838 .init
= artpec6_crypto_hmac_sha512_init
,
2839 .update
= artpec6_crypto_hash_update
,
2840 .final
= artpec6_crypto_hash_final
,
2841 .digest
= artpec6_crypto_hmac_sha512_digest
,
2842 .import
= artpec6_crypto_hash_import
,
2843 .export
= artpec6_crypto_hash_export
,
2844 .setkey
= artpec6_crypto_hash_set_key
,
2845 .halg
.digestsize
= SHA512_DIGEST_SIZE
,
2846 .halg
.statesize
= sizeof(struct artpec6_hash_export_state
),
2848 .cra_name
= "hmac(sha512)",
2849 .cra_driver_name
= "artpec-hmac-sha512",
2850 .cra_priority
= 300,
2851 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
| CRYPTO_ALG_ASYNC
,
2852 .cra_blocksize
= SHA512_BLOCK_SIZE
,
2853 .cra_ctxsize
= sizeof(struct artpec6_hashalg_context
),
2855 .cra_module
= THIS_MODULE
,
2856 .cra_init
= artpec6_crypto_ahash_init_hmac_sha512
,
2857 .cra_exit
= artpec6_crypto_ahash_exit
,
2863 static struct skcipher_alg crypto_algos
[] = {
2867 .cra_name
= "ecb(aes)",
2868 .cra_driver_name
= "artpec6-ecb-aes",
2869 .cra_priority
= 300,
2870 .cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
|
2872 .cra_blocksize
= AES_BLOCK_SIZE
,
2873 .cra_ctxsize
= sizeof(struct artpec6_cryptotfm_context
),
2875 .cra_module
= THIS_MODULE
,
2877 .min_keysize
= AES_MIN_KEY_SIZE
,
2878 .max_keysize
= AES_MAX_KEY_SIZE
,
2879 .setkey
= artpec6_crypto_cipher_set_key
,
2880 .encrypt
= artpec6_crypto_encrypt
,
2881 .decrypt
= artpec6_crypto_decrypt
,
2882 .init
= artpec6_crypto_aes_ecb_init
,
2883 .exit
= artpec6_crypto_aes_exit
,
2888 .cra_name
= "ctr(aes)",
2889 .cra_driver_name
= "artpec6-ctr-aes",
2890 .cra_priority
= 300,
2891 .cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
|
2893 CRYPTO_ALG_NEED_FALLBACK
,
2895 .cra_ctxsize
= sizeof(struct artpec6_cryptotfm_context
),
2897 .cra_module
= THIS_MODULE
,
2899 .min_keysize
= AES_MIN_KEY_SIZE
,
2900 .max_keysize
= AES_MAX_KEY_SIZE
,
2901 .ivsize
= AES_BLOCK_SIZE
,
2902 .setkey
= artpec6_crypto_cipher_set_key
,
2903 .encrypt
= artpec6_crypto_ctr_encrypt
,
2904 .decrypt
= artpec6_crypto_ctr_decrypt
,
2905 .init
= artpec6_crypto_aes_ctr_init
,
2906 .exit
= artpec6_crypto_aes_ctr_exit
,
2911 .cra_name
= "cbc(aes)",
2912 .cra_driver_name
= "artpec6-cbc-aes",
2913 .cra_priority
= 300,
2914 .cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
|
2916 .cra_blocksize
= AES_BLOCK_SIZE
,
2917 .cra_ctxsize
= sizeof(struct artpec6_cryptotfm_context
),
2919 .cra_module
= THIS_MODULE
,
2921 .min_keysize
= AES_MIN_KEY_SIZE
,
2922 .max_keysize
= AES_MAX_KEY_SIZE
,
2923 .ivsize
= AES_BLOCK_SIZE
,
2924 .setkey
= artpec6_crypto_cipher_set_key
,
2925 .encrypt
= artpec6_crypto_encrypt
,
2926 .decrypt
= artpec6_crypto_decrypt
,
2927 .init
= artpec6_crypto_aes_cbc_init
,
2928 .exit
= artpec6_crypto_aes_exit
2933 .cra_name
= "xts(aes)",
2934 .cra_driver_name
= "artpec6-xts-aes",
2935 .cra_priority
= 300,
2936 .cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
|
2939 .cra_ctxsize
= sizeof(struct artpec6_cryptotfm_context
),
2941 .cra_module
= THIS_MODULE
,
2943 .min_keysize
= 2*AES_MIN_KEY_SIZE
,
2944 .max_keysize
= 2*AES_MAX_KEY_SIZE
,
2946 .setkey
= artpec6_crypto_xts_set_key
,
2947 .encrypt
= artpec6_crypto_encrypt
,
2948 .decrypt
= artpec6_crypto_decrypt
,
2949 .init
= artpec6_crypto_aes_xts_init
,
2950 .exit
= artpec6_crypto_aes_exit
,
2954 static struct aead_alg aead_algos
[] = {
2956 .init
= artpec6_crypto_aead_init
,
2957 .setkey
= artpec6_crypto_aead_set_key
,
2958 .encrypt
= artpec6_crypto_aead_encrypt
,
2959 .decrypt
= artpec6_crypto_aead_decrypt
,
2960 .ivsize
= GCM_AES_IV_SIZE
,
2961 .maxauthsize
= AES_BLOCK_SIZE
,
2964 .cra_name
= "gcm(aes)",
2965 .cra_driver_name
= "artpec-gcm-aes",
2966 .cra_priority
= 300,
2967 .cra_flags
= CRYPTO_ALG_TYPE_AEAD
| CRYPTO_ALG_ASYNC
|
2968 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2970 .cra_ctxsize
= sizeof(struct artpec6_cryptotfm_context
),
2972 .cra_module
= THIS_MODULE
,
2977 #ifdef CONFIG_DEBUG_FS
2986 static struct dentry
*dbgfs_root
;
2988 static void artpec6_crypto_init_debugfs(void)
2990 dbgfs_root
= debugfs_create_dir("artpec6_crypto", NULL
);
2992 if (!dbgfs_root
|| IS_ERR(dbgfs_root
)) {
2994 pr_err("%s: Could not initialise debugfs!\n", MODULE_NAME
);
2998 #ifdef CONFIG_FAULT_INJECTION
2999 fault_create_debugfs_attr("fail_status_read", dbgfs_root
,
3000 &artpec6_crypto_fail_status_read
);
3002 fault_create_debugfs_attr("fail_dma_array_full", dbgfs_root
,
3003 &artpec6_crypto_fail_dma_array_full
);
3007 static void artpec6_crypto_free_debugfs(void)
3012 debugfs_remove_recursive(dbgfs_root
);
3017 static const struct of_device_id artpec6_crypto_of_match
[] = {
3018 { .compatible
= "axis,artpec6-crypto", .data
= (void *)ARTPEC6_CRYPTO
},
3019 { .compatible
= "axis,artpec7-crypto", .data
= (void *)ARTPEC7_CRYPTO
},
3022 MODULE_DEVICE_TABLE(of
, artpec6_crypto_of_match
);
3024 static int artpec6_crypto_probe(struct platform_device
*pdev
)
3026 const struct of_device_id
*match
;
3027 enum artpec6_crypto_variant variant
;
3028 struct artpec6_crypto
*ac
;
3029 struct device
*dev
= &pdev
->dev
;
3031 struct resource
*res
;
3035 if (artpec6_crypto_dev
)
3038 match
= of_match_node(artpec6_crypto_of_match
, dev
->of_node
);
3042 variant
= (enum artpec6_crypto_variant
)match
->data
;
3044 res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
3045 base
= devm_ioremap_resource(&pdev
->dev
, res
);
3047 return PTR_ERR(base
);
3049 irq
= platform_get_irq(pdev
, 0);
3053 ac
= devm_kzalloc(&pdev
->dev
, sizeof(struct artpec6_crypto
),
3058 platform_set_drvdata(pdev
, ac
);
3059 ac
->variant
= variant
;
3061 spin_lock_init(&ac
->queue_lock
);
3062 INIT_LIST_HEAD(&ac
->queue
);
3063 INIT_LIST_HEAD(&ac
->pending
);
3064 timer_setup(&ac
->timer
, artpec6_crypto_timeout
, 0);
3068 ac
->dma_cache
= kmem_cache_create("artpec6_crypto_dma",
3069 sizeof(struct artpec6_crypto_dma_descriptors
),
3076 #ifdef CONFIG_DEBUG_FS
3077 artpec6_crypto_init_debugfs();
3080 tasklet_init(&ac
->task
, artpec6_crypto_task
,
3083 ac
->pad_buffer
= devm_kzalloc(&pdev
->dev
, 2 * ARTPEC_CACHE_LINE_MAX
,
3085 if (!ac
->pad_buffer
)
3087 ac
->pad_buffer
= PTR_ALIGN(ac
->pad_buffer
, ARTPEC_CACHE_LINE_MAX
);
3089 ac
->zero_buffer
= devm_kzalloc(&pdev
->dev
, 2 * ARTPEC_CACHE_LINE_MAX
,
3091 if (!ac
->zero_buffer
)
3093 ac
->zero_buffer
= PTR_ALIGN(ac
->zero_buffer
, ARTPEC_CACHE_LINE_MAX
);
3095 err
= init_crypto_hw(ac
);
3099 err
= devm_request_irq(&pdev
->dev
, irq
, artpec6_crypto_irq
, 0,
3100 "artpec6-crypto", ac
);
3104 artpec6_crypto_dev
= &pdev
->dev
;
3106 err
= crypto_register_ahashes(hash_algos
, ARRAY_SIZE(hash_algos
));
3108 dev_err(dev
, "Failed to register ahashes\n");
3112 if (variant
!= ARTPEC6_CRYPTO
) {
3113 err
= crypto_register_ahashes(artpec7_hash_algos
,
3114 ARRAY_SIZE(artpec7_hash_algos
));
3116 dev_err(dev
, "Failed to register ahashes\n");
3117 goto unregister_ahashes
;
3121 err
= crypto_register_skciphers(crypto_algos
, ARRAY_SIZE(crypto_algos
));
3123 dev_err(dev
, "Failed to register ciphers\n");
3124 goto unregister_a7_ahashes
;
3127 err
= crypto_register_aeads(aead_algos
, ARRAY_SIZE(aead_algos
));
3129 dev_err(dev
, "Failed to register aeads\n");
3130 goto unregister_algs
;
3136 crypto_unregister_skciphers(crypto_algos
, ARRAY_SIZE(crypto_algos
));
3137 unregister_a7_ahashes
:
3138 if (variant
!= ARTPEC6_CRYPTO
)
3139 crypto_unregister_ahashes(artpec7_hash_algos
,
3140 ARRAY_SIZE(artpec7_hash_algos
));
3142 crypto_unregister_ahashes(hash_algos
, ARRAY_SIZE(hash_algos
));
3144 artpec6_crypto_disable_hw(ac
);
3146 kmem_cache_destroy(ac
->dma_cache
);
3150 static int artpec6_crypto_remove(struct platform_device
*pdev
)
3152 struct artpec6_crypto
*ac
= platform_get_drvdata(pdev
);
3153 int irq
= platform_get_irq(pdev
, 0);
3155 crypto_unregister_ahashes(hash_algos
, ARRAY_SIZE(hash_algos
));
3156 if (ac
->variant
!= ARTPEC6_CRYPTO
)
3157 crypto_unregister_ahashes(artpec7_hash_algos
,
3158 ARRAY_SIZE(artpec7_hash_algos
));
3159 crypto_unregister_skciphers(crypto_algos
, ARRAY_SIZE(crypto_algos
));
3160 crypto_unregister_aeads(aead_algos
, ARRAY_SIZE(aead_algos
));
3162 tasklet_disable(&ac
->task
);
3163 devm_free_irq(&pdev
->dev
, irq
, ac
);
3164 tasklet_kill(&ac
->task
);
3165 del_timer_sync(&ac
->timer
);
3167 artpec6_crypto_disable_hw(ac
);
3169 kmem_cache_destroy(ac
->dma_cache
);
3170 #ifdef CONFIG_DEBUG_FS
3171 artpec6_crypto_free_debugfs();
3176 static struct platform_driver artpec6_crypto_driver
= {
3177 .probe
= artpec6_crypto_probe
,
3178 .remove
= artpec6_crypto_remove
,
3180 .name
= "artpec6-crypto",
3181 .owner
= THIS_MODULE
,
3182 .of_match_table
= artpec6_crypto_of_match
,
3186 module_platform_driver(artpec6_crypto_driver
);
3188 MODULE_AUTHOR("Axis Communications AB");
3189 MODULE_DESCRIPTION("ARTPEC-6 Crypto driver");
3190 MODULE_LICENSE("GPL");