1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * talitos - Freescale Integrated Security Engine (SEC) device driver
5 * Copyright (c) 2008-2011 Freescale Semiconductor, Inc.
7 * Scatterlist Crypto API glue code copied from files with the following:
8 * Copyright (c) 2006-2007 Herbert Xu <herbert@gondor.apana.org.au>
10 * Crypto algorithm registration code copied from hifn driver:
11 * 2007+ Copyright (c) Evgeniy Polyakov <johnpol@2ka.mipt.ru>
12 * All rights reserved.
15 #include <linux/kernel.h>
16 #include <linux/module.h>
17 #include <linux/mod_devicetable.h>
18 #include <linux/device.h>
19 #include <linux/interrupt.h>
20 #include <linux/crypto.h>
21 #include <linux/hw_random.h>
22 #include <linux/of_address.h>
23 #include <linux/of_irq.h>
24 #include <linux/of_platform.h>
25 #include <linux/dma-mapping.h>
27 #include <linux/spinlock.h>
28 #include <linux/rtnetlink.h>
29 #include <linux/slab.h>
31 #include <crypto/algapi.h>
32 #include <crypto/aes.h>
33 #include <crypto/internal/des.h>
34 #include <crypto/sha1.h>
35 #include <crypto/sha2.h>
36 #include <crypto/md5.h>
37 #include <crypto/internal/aead.h>
38 #include <crypto/authenc.h>
39 #include <crypto/internal/skcipher.h>
40 #include <crypto/hash.h>
41 #include <crypto/internal/hash.h>
42 #include <crypto/scatterwalk.h>
46 static void to_talitos_ptr(struct talitos_ptr
*ptr
, dma_addr_t dma_addr
,
47 unsigned int len
, bool is_sec1
)
49 ptr
->ptr
= cpu_to_be32(lower_32_bits(dma_addr
));
51 ptr
->len1
= cpu_to_be16(len
);
53 ptr
->len
= cpu_to_be16(len
);
54 ptr
->eptr
= upper_32_bits(dma_addr
);
58 static void copy_talitos_ptr(struct talitos_ptr
*dst_ptr
,
59 struct talitos_ptr
*src_ptr
, bool is_sec1
)
61 dst_ptr
->ptr
= src_ptr
->ptr
;
63 dst_ptr
->len1
= src_ptr
->len1
;
65 dst_ptr
->len
= src_ptr
->len
;
66 dst_ptr
->eptr
= src_ptr
->eptr
;
70 static unsigned short from_talitos_ptr_len(struct talitos_ptr
*ptr
,
74 return be16_to_cpu(ptr
->len1
);
76 return be16_to_cpu(ptr
->len
);
79 static void to_talitos_ptr_ext_set(struct talitos_ptr
*ptr
, u8 val
,
86 static void to_talitos_ptr_ext_or(struct talitos_ptr
*ptr
, u8 val
, bool is_sec1
)
93 * map virtual single (contiguous) pointer to h/w descriptor pointer
95 static void __map_single_talitos_ptr(struct device
*dev
,
96 struct talitos_ptr
*ptr
,
97 unsigned int len
, void *data
,
98 enum dma_data_direction dir
,
101 dma_addr_t dma_addr
= dma_map_single_attrs(dev
, data
, len
, dir
, attrs
);
102 struct talitos_private
*priv
= dev_get_drvdata(dev
);
103 bool is_sec1
= has_ftr_sec1(priv
);
105 to_talitos_ptr(ptr
, dma_addr
, len
, is_sec1
);
108 static void map_single_talitos_ptr(struct device
*dev
,
109 struct talitos_ptr
*ptr
,
110 unsigned int len
, void *data
,
111 enum dma_data_direction dir
)
113 __map_single_talitos_ptr(dev
, ptr
, len
, data
, dir
, 0);
116 static void map_single_talitos_ptr_nosync(struct device
*dev
,
117 struct talitos_ptr
*ptr
,
118 unsigned int len
, void *data
,
119 enum dma_data_direction dir
)
121 __map_single_talitos_ptr(dev
, ptr
, len
, data
, dir
,
122 DMA_ATTR_SKIP_CPU_SYNC
);
126 * unmap bus single (contiguous) h/w descriptor pointer
128 static void unmap_single_talitos_ptr(struct device
*dev
,
129 struct talitos_ptr
*ptr
,
130 enum dma_data_direction dir
)
132 struct talitos_private
*priv
= dev_get_drvdata(dev
);
133 bool is_sec1
= has_ftr_sec1(priv
);
135 dma_unmap_single(dev
, be32_to_cpu(ptr
->ptr
),
136 from_talitos_ptr_len(ptr
, is_sec1
), dir
);
139 static int reset_channel(struct device
*dev
, int ch
)
141 struct talitos_private
*priv
= dev_get_drvdata(dev
);
142 unsigned int timeout
= TALITOS_TIMEOUT
;
143 bool is_sec1
= has_ftr_sec1(priv
);
146 setbits32(priv
->chan
[ch
].reg
+ TALITOS_CCCR_LO
,
147 TALITOS1_CCCR_LO_RESET
);
149 while ((in_be32(priv
->chan
[ch
].reg
+ TALITOS_CCCR_LO
) &
150 TALITOS1_CCCR_LO_RESET
) && --timeout
)
153 setbits32(priv
->chan
[ch
].reg
+ TALITOS_CCCR
,
154 TALITOS2_CCCR_RESET
);
156 while ((in_be32(priv
->chan
[ch
].reg
+ TALITOS_CCCR
) &
157 TALITOS2_CCCR_RESET
) && --timeout
)
162 dev_err(dev
, "failed to reset channel %d\n", ch
);
166 /* set 36-bit addressing, done writeback enable and done IRQ enable */
167 setbits32(priv
->chan
[ch
].reg
+ TALITOS_CCCR_LO
, TALITOS_CCCR_LO_EAE
|
168 TALITOS_CCCR_LO_CDWE
| TALITOS_CCCR_LO_CDIE
);
169 /* enable chaining descriptors */
171 setbits32(priv
->chan
[ch
].reg
+ TALITOS_CCCR_LO
,
174 /* and ICCR writeback, if available */
175 if (priv
->features
& TALITOS_FTR_HW_AUTH_CHECK
)
176 setbits32(priv
->chan
[ch
].reg
+ TALITOS_CCCR_LO
,
177 TALITOS_CCCR_LO_IWSE
);
182 static int reset_device(struct device
*dev
)
184 struct talitos_private
*priv
= dev_get_drvdata(dev
);
185 unsigned int timeout
= TALITOS_TIMEOUT
;
186 bool is_sec1
= has_ftr_sec1(priv
);
187 u32 mcr
= is_sec1
? TALITOS1_MCR_SWR
: TALITOS2_MCR_SWR
;
189 setbits32(priv
->reg
+ TALITOS_MCR
, mcr
);
191 while ((in_be32(priv
->reg
+ TALITOS_MCR
) & mcr
)
196 mcr
= TALITOS_MCR_RCA1
| TALITOS_MCR_RCA3
;
197 setbits32(priv
->reg
+ TALITOS_MCR
, mcr
);
201 dev_err(dev
, "failed to reset device\n");
209 * Reset and initialize the device
211 static int init_device(struct device
*dev
)
213 struct talitos_private
*priv
= dev_get_drvdata(dev
);
215 bool is_sec1
= has_ftr_sec1(priv
);
219 * errata documentation: warning: certain SEC interrupts
220 * are not fully cleared by writing the MCR:SWR bit,
221 * set bit twice to completely reset
223 err
= reset_device(dev
);
227 err
= reset_device(dev
);
232 for (ch
= 0; ch
< priv
->num_channels
; ch
++) {
233 err
= reset_channel(dev
, ch
);
238 /* enable channel done and error interrupts */
240 clrbits32(priv
->reg
+ TALITOS_IMR
, TALITOS1_IMR_INIT
);
241 clrbits32(priv
->reg
+ TALITOS_IMR_LO
, TALITOS1_IMR_LO_INIT
);
242 /* disable parity error check in DEU (erroneous? test vect.) */
243 setbits32(priv
->reg_deu
+ TALITOS_EUICR
, TALITOS1_DEUICR_KPE
);
245 setbits32(priv
->reg
+ TALITOS_IMR
, TALITOS2_IMR_INIT
);
246 setbits32(priv
->reg
+ TALITOS_IMR_LO
, TALITOS2_IMR_LO_INIT
);
249 /* disable integrity check error interrupts (use writeback instead) */
250 if (priv
->features
& TALITOS_FTR_HW_AUTH_CHECK
)
251 setbits32(priv
->reg_mdeu
+ TALITOS_EUICR_LO
,
252 TALITOS_MDEUICR_LO_ICE
);
258 * talitos_submit - submits a descriptor to the device for processing
259 * @dev: the SEC device to be used
260 * @ch: the SEC device channel to be used
261 * @desc: the descriptor to be processed by the device
262 * @callback: whom to call when processing is complete
263 * @context: a handle for use by caller (optional)
265 * desc must contain valid dma-mapped (bus physical) address pointers.
266 * callback must check err and feedback in descriptor header
267 * for device processing status.
269 static int talitos_submit(struct device
*dev
, int ch
, struct talitos_desc
*desc
,
270 void (*callback
)(struct device
*dev
,
271 struct talitos_desc
*desc
,
272 void *context
, int error
),
275 struct talitos_private
*priv
= dev_get_drvdata(dev
);
276 struct talitos_request
*request
;
279 bool is_sec1
= has_ftr_sec1(priv
);
281 spin_lock_irqsave(&priv
->chan
[ch
].head_lock
, flags
);
283 if (!atomic_inc_not_zero(&priv
->chan
[ch
].submit_count
)) {
284 /* h/w fifo is full */
285 spin_unlock_irqrestore(&priv
->chan
[ch
].head_lock
, flags
);
289 head
= priv
->chan
[ch
].head
;
290 request
= &priv
->chan
[ch
].fifo
[head
];
292 /* map descriptor and save caller data */
294 desc
->hdr1
= desc
->hdr
;
295 request
->dma_desc
= dma_map_single(dev
, &desc
->hdr1
,
299 request
->dma_desc
= dma_map_single(dev
, desc
,
303 request
->callback
= callback
;
304 request
->context
= context
;
306 /* increment fifo head */
307 priv
->chan
[ch
].head
= (priv
->chan
[ch
].head
+ 1) & (priv
->fifo_len
- 1);
310 request
->desc
= desc
;
314 out_be32(priv
->chan
[ch
].reg
+ TALITOS_FF
,
315 upper_32_bits(request
->dma_desc
));
316 out_be32(priv
->chan
[ch
].reg
+ TALITOS_FF_LO
,
317 lower_32_bits(request
->dma_desc
));
319 spin_unlock_irqrestore(&priv
->chan
[ch
].head_lock
, flags
);
324 static __be32
get_request_hdr(struct talitos_request
*request
, bool is_sec1
)
326 struct talitos_edesc
*edesc
;
329 return request
->desc
->hdr
;
331 if (!request
->desc
->next_desc
)
332 return request
->desc
->hdr1
;
334 edesc
= container_of(request
->desc
, struct talitos_edesc
, desc
);
336 return ((struct talitos_desc
*)(edesc
->buf
+ edesc
->dma_len
))->hdr1
;
340 * process what was done, notify callback of error if not
342 static void flush_channel(struct device
*dev
, int ch
, int error
, int reset_ch
)
344 struct talitos_private
*priv
= dev_get_drvdata(dev
);
345 struct talitos_request
*request
, saved_req
;
348 bool is_sec1
= has_ftr_sec1(priv
);
350 spin_lock_irqsave(&priv
->chan
[ch
].tail_lock
, flags
);
352 tail
= priv
->chan
[ch
].tail
;
353 while (priv
->chan
[ch
].fifo
[tail
].desc
) {
356 request
= &priv
->chan
[ch
].fifo
[tail
];
358 /* descriptors with their done bits set don't get the error */
360 hdr
= get_request_hdr(request
, is_sec1
);
362 if ((hdr
& DESC_HDR_DONE
) == DESC_HDR_DONE
)
370 dma_unmap_single(dev
, request
->dma_desc
,
374 /* copy entries so we can call callback outside lock */
375 saved_req
.desc
= request
->desc
;
376 saved_req
.callback
= request
->callback
;
377 saved_req
.context
= request
->context
;
379 /* release request entry in fifo */
381 request
->desc
= NULL
;
383 /* increment fifo tail */
384 priv
->chan
[ch
].tail
= (tail
+ 1) & (priv
->fifo_len
- 1);
386 spin_unlock_irqrestore(&priv
->chan
[ch
].tail_lock
, flags
);
388 atomic_dec(&priv
->chan
[ch
].submit_count
);
390 saved_req
.callback(dev
, saved_req
.desc
, saved_req
.context
,
392 /* channel may resume processing in single desc error case */
393 if (error
&& !reset_ch
&& status
== error
)
395 spin_lock_irqsave(&priv
->chan
[ch
].tail_lock
, flags
);
396 tail
= priv
->chan
[ch
].tail
;
399 spin_unlock_irqrestore(&priv
->chan
[ch
].tail_lock
, flags
);
403 * process completed requests for channels that have done status
405 #define DEF_TALITOS1_DONE(name, ch_done_mask) \
406 static void talitos1_done_##name(unsigned long data) \
408 struct device *dev = (struct device *)data; \
409 struct talitos_private *priv = dev_get_drvdata(dev); \
410 unsigned long flags; \
412 if (ch_done_mask & 0x10000000) \
413 flush_channel(dev, 0, 0, 0); \
414 if (ch_done_mask & 0x40000000) \
415 flush_channel(dev, 1, 0, 0); \
416 if (ch_done_mask & 0x00010000) \
417 flush_channel(dev, 2, 0, 0); \
418 if (ch_done_mask & 0x00040000) \
419 flush_channel(dev, 3, 0, 0); \
421 /* At this point, all completed channels have been processed */ \
422 /* Unmask done interrupts for channels completed later on. */ \
423 spin_lock_irqsave(&priv->reg_lock, flags); \
424 clrbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
425 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT); \
426 spin_unlock_irqrestore(&priv->reg_lock, flags); \
429 DEF_TALITOS1_DONE(4ch
, TALITOS1_ISR_4CHDONE
)
430 DEF_TALITOS1_DONE(ch0
, TALITOS1_ISR_CH_0_DONE
)
432 #define DEF_TALITOS2_DONE(name, ch_done_mask) \
433 static void talitos2_done_##name(unsigned long data) \
435 struct device *dev = (struct device *)data; \
436 struct talitos_private *priv = dev_get_drvdata(dev); \
437 unsigned long flags; \
439 if (ch_done_mask & 1) \
440 flush_channel(dev, 0, 0, 0); \
441 if (ch_done_mask & (1 << 2)) \
442 flush_channel(dev, 1, 0, 0); \
443 if (ch_done_mask & (1 << 4)) \
444 flush_channel(dev, 2, 0, 0); \
445 if (ch_done_mask & (1 << 6)) \
446 flush_channel(dev, 3, 0, 0); \
448 /* At this point, all completed channels have been processed */ \
449 /* Unmask done interrupts for channels completed later on. */ \
450 spin_lock_irqsave(&priv->reg_lock, flags); \
451 setbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
452 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT); \
453 spin_unlock_irqrestore(&priv->reg_lock, flags); \
456 DEF_TALITOS2_DONE(4ch
, TALITOS2_ISR_4CHDONE
)
457 DEF_TALITOS2_DONE(ch0
, TALITOS2_ISR_CH_0_DONE
)
458 DEF_TALITOS2_DONE(ch0_2
, TALITOS2_ISR_CH_0_2_DONE
)
459 DEF_TALITOS2_DONE(ch1_3
, TALITOS2_ISR_CH_1_3_DONE
)
462 * locate current (offending) descriptor
464 static __be32
current_desc_hdr(struct device
*dev
, int ch
)
466 struct talitos_private
*priv
= dev_get_drvdata(dev
);
470 cur_desc
= ((u64
)in_be32(priv
->chan
[ch
].reg
+ TALITOS_CDPR
)) << 32;
471 cur_desc
|= in_be32(priv
->chan
[ch
].reg
+ TALITOS_CDPR_LO
);
474 dev_err(dev
, "CDPR is NULL, giving up search for offending descriptor\n");
478 tail
= priv
->chan
[ch
].tail
;
481 while (priv
->chan
[ch
].fifo
[iter
].dma_desc
!= cur_desc
&&
482 priv
->chan
[ch
].fifo
[iter
].desc
->next_desc
!= cpu_to_be32(cur_desc
)) {
483 iter
= (iter
+ 1) & (priv
->fifo_len
- 1);
485 dev_err(dev
, "couldn't locate current descriptor\n");
490 if (priv
->chan
[ch
].fifo
[iter
].desc
->next_desc
== cpu_to_be32(cur_desc
)) {
491 struct talitos_edesc
*edesc
;
493 edesc
= container_of(priv
->chan
[ch
].fifo
[iter
].desc
,
494 struct talitos_edesc
, desc
);
495 return ((struct talitos_desc
*)
496 (edesc
->buf
+ edesc
->dma_len
))->hdr
;
499 return priv
->chan
[ch
].fifo
[iter
].desc
->hdr
;
503 * user diagnostics; report root cause of error based on execution unit status
505 static void report_eu_error(struct device
*dev
, int ch
, __be32 desc_hdr
)
507 struct talitos_private
*priv
= dev_get_drvdata(dev
);
511 desc_hdr
= cpu_to_be32(in_be32(priv
->chan
[ch
].reg
+ TALITOS_DESCBUF
));
513 switch (desc_hdr
& DESC_HDR_SEL0_MASK
) {
514 case DESC_HDR_SEL0_AFEU
:
515 dev_err(dev
, "AFEUISR 0x%08x_%08x\n",
516 in_be32(priv
->reg_afeu
+ TALITOS_EUISR
),
517 in_be32(priv
->reg_afeu
+ TALITOS_EUISR_LO
));
519 case DESC_HDR_SEL0_DEU
:
520 dev_err(dev
, "DEUISR 0x%08x_%08x\n",
521 in_be32(priv
->reg_deu
+ TALITOS_EUISR
),
522 in_be32(priv
->reg_deu
+ TALITOS_EUISR_LO
));
524 case DESC_HDR_SEL0_MDEUA
:
525 case DESC_HDR_SEL0_MDEUB
:
526 dev_err(dev
, "MDEUISR 0x%08x_%08x\n",
527 in_be32(priv
->reg_mdeu
+ TALITOS_EUISR
),
528 in_be32(priv
->reg_mdeu
+ TALITOS_EUISR_LO
));
530 case DESC_HDR_SEL0_RNG
:
531 dev_err(dev
, "RNGUISR 0x%08x_%08x\n",
532 in_be32(priv
->reg_rngu
+ TALITOS_ISR
),
533 in_be32(priv
->reg_rngu
+ TALITOS_ISR_LO
));
535 case DESC_HDR_SEL0_PKEU
:
536 dev_err(dev
, "PKEUISR 0x%08x_%08x\n",
537 in_be32(priv
->reg_pkeu
+ TALITOS_EUISR
),
538 in_be32(priv
->reg_pkeu
+ TALITOS_EUISR_LO
));
540 case DESC_HDR_SEL0_AESU
:
541 dev_err(dev
, "AESUISR 0x%08x_%08x\n",
542 in_be32(priv
->reg_aesu
+ TALITOS_EUISR
),
543 in_be32(priv
->reg_aesu
+ TALITOS_EUISR_LO
));
545 case DESC_HDR_SEL0_CRCU
:
546 dev_err(dev
, "CRCUISR 0x%08x_%08x\n",
547 in_be32(priv
->reg_crcu
+ TALITOS_EUISR
),
548 in_be32(priv
->reg_crcu
+ TALITOS_EUISR_LO
));
550 case DESC_HDR_SEL0_KEU
:
551 dev_err(dev
, "KEUISR 0x%08x_%08x\n",
552 in_be32(priv
->reg_pkeu
+ TALITOS_EUISR
),
553 in_be32(priv
->reg_pkeu
+ TALITOS_EUISR_LO
));
557 switch (desc_hdr
& DESC_HDR_SEL1_MASK
) {
558 case DESC_HDR_SEL1_MDEUA
:
559 case DESC_HDR_SEL1_MDEUB
:
560 dev_err(dev
, "MDEUISR 0x%08x_%08x\n",
561 in_be32(priv
->reg_mdeu
+ TALITOS_EUISR
),
562 in_be32(priv
->reg_mdeu
+ TALITOS_EUISR_LO
));
564 case DESC_HDR_SEL1_CRCU
:
565 dev_err(dev
, "CRCUISR 0x%08x_%08x\n",
566 in_be32(priv
->reg_crcu
+ TALITOS_EUISR
),
567 in_be32(priv
->reg_crcu
+ TALITOS_EUISR_LO
));
571 for (i
= 0; i
< 8; i
++)
572 dev_err(dev
, "DESCBUF 0x%08x_%08x\n",
573 in_be32(priv
->chan
[ch
].reg
+ TALITOS_DESCBUF
+ 8*i
),
574 in_be32(priv
->chan
[ch
].reg
+ TALITOS_DESCBUF_LO
+ 8*i
));
578 * recover from error interrupts
580 static void talitos_error(struct device
*dev
, u32 isr
, u32 isr_lo
)
582 struct talitos_private
*priv
= dev_get_drvdata(dev
);
583 unsigned int timeout
= TALITOS_TIMEOUT
;
584 int ch
, error
, reset_dev
= 0;
586 bool is_sec1
= has_ftr_sec1(priv
);
587 int reset_ch
= is_sec1
? 1 : 0; /* only SEC2 supports continuation */
589 for (ch
= 0; ch
< priv
->num_channels
; ch
++) {
590 /* skip channels without errors */
592 /* bits 29, 31, 17, 19 */
593 if (!(isr
& (1 << (29 + (ch
& 1) * 2 - (ch
& 2) * 6))))
596 if (!(isr
& (1 << (ch
* 2 + 1))))
602 v_lo
= in_be32(priv
->chan
[ch
].reg
+ TALITOS_CCPSR_LO
);
604 if (v_lo
& TALITOS_CCPSR_LO_DOF
) {
605 dev_err(dev
, "double fetch fifo overflow error\n");
609 if (v_lo
& TALITOS_CCPSR_LO_SOF
) {
610 /* h/w dropped descriptor */
611 dev_err(dev
, "single fetch fifo overflow error\n");
614 if (v_lo
& TALITOS_CCPSR_LO_MDTE
)
615 dev_err(dev
, "master data transfer error\n");
616 if (v_lo
& TALITOS_CCPSR_LO_SGDLZ
)
617 dev_err(dev
, is_sec1
? "pointer not complete error\n"
618 : "s/g data length zero error\n");
619 if (v_lo
& TALITOS_CCPSR_LO_FPZ
)
620 dev_err(dev
, is_sec1
? "parity error\n"
621 : "fetch pointer zero error\n");
622 if (v_lo
& TALITOS_CCPSR_LO_IDH
)
623 dev_err(dev
, "illegal descriptor header error\n");
624 if (v_lo
& TALITOS_CCPSR_LO_IEU
)
625 dev_err(dev
, is_sec1
? "static assignment error\n"
626 : "invalid exec unit error\n");
627 if (v_lo
& TALITOS_CCPSR_LO_EU
)
628 report_eu_error(dev
, ch
, current_desc_hdr(dev
, ch
));
630 if (v_lo
& TALITOS_CCPSR_LO_GB
)
631 dev_err(dev
, "gather boundary error\n");
632 if (v_lo
& TALITOS_CCPSR_LO_GRL
)
633 dev_err(dev
, "gather return/length error\n");
634 if (v_lo
& TALITOS_CCPSR_LO_SB
)
635 dev_err(dev
, "scatter boundary error\n");
636 if (v_lo
& TALITOS_CCPSR_LO_SRL
)
637 dev_err(dev
, "scatter return/length error\n");
640 flush_channel(dev
, ch
, error
, reset_ch
);
643 reset_channel(dev
, ch
);
645 setbits32(priv
->chan
[ch
].reg
+ TALITOS_CCCR
,
647 setbits32(priv
->chan
[ch
].reg
+ TALITOS_CCCR_LO
, 0);
648 while ((in_be32(priv
->chan
[ch
].reg
+ TALITOS_CCCR
) &
649 TALITOS2_CCCR_CONT
) && --timeout
)
652 dev_err(dev
, "failed to restart channel %d\n",
658 if (reset_dev
|| (is_sec1
&& isr
& ~TALITOS1_ISR_4CHERR
) ||
659 (!is_sec1
&& isr
& ~TALITOS2_ISR_4CHERR
) || isr_lo
) {
660 if (is_sec1
&& (isr_lo
& TALITOS1_ISR_TEA_ERR
))
661 dev_err(dev
, "TEA error: ISR 0x%08x_%08x\n",
664 dev_err(dev
, "done overflow, internal time out, or "
665 "rngu error: ISR 0x%08x_%08x\n", isr
, isr_lo
);
667 /* purge request queues */
668 for (ch
= 0; ch
< priv
->num_channels
; ch
++)
669 flush_channel(dev
, ch
, -EIO
, 1);
671 /* reset and reinitialize the device */
676 #define DEF_TALITOS1_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet) \
677 static irqreturn_t talitos1_interrupt_##name(int irq, void *data) \
679 struct device *dev = data; \
680 struct talitos_private *priv = dev_get_drvdata(dev); \
682 unsigned long flags; \
684 spin_lock_irqsave(&priv->reg_lock, flags); \
685 isr = in_be32(priv->reg + TALITOS_ISR); \
686 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); \
687 /* Acknowledge interrupt */ \
688 out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
689 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); \
691 if (unlikely(isr & ch_err_mask || isr_lo & TALITOS1_IMR_LO_INIT)) { \
692 spin_unlock_irqrestore(&priv->reg_lock, flags); \
693 talitos_error(dev, isr & ch_err_mask, isr_lo); \
696 if (likely(isr & ch_done_mask)) { \
697 /* mask further done interrupts. */ \
698 setbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
699 /* done_task will unmask done interrupts at exit */ \
700 tasklet_schedule(&priv->done_task[tlet]); \
702 spin_unlock_irqrestore(&priv->reg_lock, flags); \
705 return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED : \
709 DEF_TALITOS1_INTERRUPT(4ch
, TALITOS1_ISR_4CHDONE
, TALITOS1_ISR_4CHERR
, 0)
711 #define DEF_TALITOS2_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet) \
712 static irqreturn_t talitos2_interrupt_##name(int irq, void *data) \
714 struct device *dev = data; \
715 struct talitos_private *priv = dev_get_drvdata(dev); \
717 unsigned long flags; \
719 spin_lock_irqsave(&priv->reg_lock, flags); \
720 isr = in_be32(priv->reg + TALITOS_ISR); \
721 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); \
722 /* Acknowledge interrupt */ \
723 out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
724 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); \
726 if (unlikely(isr & ch_err_mask || isr_lo)) { \
727 spin_unlock_irqrestore(&priv->reg_lock, flags); \
728 talitos_error(dev, isr & ch_err_mask, isr_lo); \
731 if (likely(isr & ch_done_mask)) { \
732 /* mask further done interrupts. */ \
733 clrbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
734 /* done_task will unmask done interrupts at exit */ \
735 tasklet_schedule(&priv->done_task[tlet]); \
737 spin_unlock_irqrestore(&priv->reg_lock, flags); \
740 return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED : \
744 DEF_TALITOS2_INTERRUPT(4ch
, TALITOS2_ISR_4CHDONE
, TALITOS2_ISR_4CHERR
, 0)
745 DEF_TALITOS2_INTERRUPT(ch0_2
, TALITOS2_ISR_CH_0_2_DONE
, TALITOS2_ISR_CH_0_2_ERR
,
747 DEF_TALITOS2_INTERRUPT(ch1_3
, TALITOS2_ISR_CH_1_3_DONE
, TALITOS2_ISR_CH_1_3_ERR
,
753 static int talitos_rng_data_present(struct hwrng
*rng
, int wait
)
755 struct device
*dev
= (struct device
*)rng
->priv
;
756 struct talitos_private
*priv
= dev_get_drvdata(dev
);
760 for (i
= 0; i
< 20; i
++) {
761 ofl
= in_be32(priv
->reg_rngu
+ TALITOS_EUSR_LO
) &
762 TALITOS_RNGUSR_LO_OFL
;
771 static int talitos_rng_data_read(struct hwrng
*rng
, u32
*data
)
773 struct device
*dev
= (struct device
*)rng
->priv
;
774 struct talitos_private
*priv
= dev_get_drvdata(dev
);
776 /* rng fifo requires 64-bit accesses */
777 *data
= in_be32(priv
->reg_rngu
+ TALITOS_EU_FIFO
);
778 *data
= in_be32(priv
->reg_rngu
+ TALITOS_EU_FIFO_LO
);
783 static int talitos_rng_init(struct hwrng
*rng
)
785 struct device
*dev
= (struct device
*)rng
->priv
;
786 struct talitos_private
*priv
= dev_get_drvdata(dev
);
787 unsigned int timeout
= TALITOS_TIMEOUT
;
789 setbits32(priv
->reg_rngu
+ TALITOS_EURCR_LO
, TALITOS_RNGURCR_LO_SR
);
790 while (!(in_be32(priv
->reg_rngu
+ TALITOS_EUSR_LO
)
791 & TALITOS_RNGUSR_LO_RD
)
795 dev_err(dev
, "failed to reset rng hw\n");
799 /* start generating */
800 setbits32(priv
->reg_rngu
+ TALITOS_EUDSR_LO
, 0);
805 static int talitos_register_rng(struct device
*dev
)
807 struct talitos_private
*priv
= dev_get_drvdata(dev
);
810 priv
->rng
.name
= dev_driver_string(dev
);
811 priv
->rng
.init
= talitos_rng_init
;
812 priv
->rng
.data_present
= talitos_rng_data_present
;
813 priv
->rng
.data_read
= talitos_rng_data_read
;
814 priv
->rng
.priv
= (unsigned long)dev
;
816 err
= hwrng_register(&priv
->rng
);
818 priv
->rng_registered
= true;
823 static void talitos_unregister_rng(struct device
*dev
)
825 struct talitos_private
*priv
= dev_get_drvdata(dev
);
827 if (!priv
->rng_registered
)
830 hwrng_unregister(&priv
->rng
);
831 priv
->rng_registered
= false;
837 #define TALITOS_CRA_PRIORITY 3000
839 * Defines a priority for doing AEAD with descriptors type
840 * HMAC_SNOOP_NO_AFEA (HSNA) instead of type IPSEC_ESP
842 #define TALITOS_CRA_PRIORITY_AEAD_HSNA (TALITOS_CRA_PRIORITY - 1)
843 #ifdef CONFIG_CRYPTO_DEV_TALITOS2
844 #define TALITOS_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + SHA512_BLOCK_SIZE)
846 #define TALITOS_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + SHA256_BLOCK_SIZE)
848 #define TALITOS_MAX_IV_LENGTH 16 /* max of AES_BLOCK_SIZE, DES3_EDE_BLOCK_SIZE */
853 __be32 desc_hdr_template
;
854 u8 key
[TALITOS_MAX_KEY_SIZE
];
855 u8 iv
[TALITOS_MAX_IV_LENGTH
];
858 unsigned int enckeylen
;
859 unsigned int authkeylen
;
862 #define HASH_MAX_BLOCK_SIZE SHA512_BLOCK_SIZE
863 #define TALITOS_MDEU_MAX_CONTEXT_SIZE TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512
865 struct talitos_ahash_req_ctx
{
866 u32 hw_context
[TALITOS_MDEU_MAX_CONTEXT_SIZE
/ sizeof(u32
)];
867 unsigned int hw_context_size
;
868 u8 buf
[2][HASH_MAX_BLOCK_SIZE
];
873 unsigned int to_hash_later
;
875 struct scatterlist bufsl
[2];
876 struct scatterlist
*psrc
;
879 struct talitos_export_state
{
880 u32 hw_context
[TALITOS_MDEU_MAX_CONTEXT_SIZE
/ sizeof(u32
)];
881 u8 buf
[HASH_MAX_BLOCK_SIZE
];
885 unsigned int to_hash_later
;
889 static int aead_setkey(struct crypto_aead
*authenc
,
890 const u8
*key
, unsigned int keylen
)
892 struct talitos_ctx
*ctx
= crypto_aead_ctx(authenc
);
893 struct device
*dev
= ctx
->dev
;
894 struct crypto_authenc_keys keys
;
896 if (crypto_authenc_extractkeys(&keys
, key
, keylen
) != 0)
899 if (keys
.authkeylen
+ keys
.enckeylen
> TALITOS_MAX_KEY_SIZE
)
903 dma_unmap_single(dev
, ctx
->dma_key
, ctx
->keylen
, DMA_TO_DEVICE
);
905 memcpy(ctx
->key
, keys
.authkey
, keys
.authkeylen
);
906 memcpy(&ctx
->key
[keys
.authkeylen
], keys
.enckey
, keys
.enckeylen
);
908 ctx
->keylen
= keys
.authkeylen
+ keys
.enckeylen
;
909 ctx
->enckeylen
= keys
.enckeylen
;
910 ctx
->authkeylen
= keys
.authkeylen
;
911 ctx
->dma_key
= dma_map_single(dev
, ctx
->key
, ctx
->keylen
,
914 memzero_explicit(&keys
, sizeof(keys
));
918 memzero_explicit(&keys
, sizeof(keys
));
922 static int aead_des3_setkey(struct crypto_aead
*authenc
,
923 const u8
*key
, unsigned int keylen
)
925 struct talitos_ctx
*ctx
= crypto_aead_ctx(authenc
);
926 struct device
*dev
= ctx
->dev
;
927 struct crypto_authenc_keys keys
;
930 err
= crypto_authenc_extractkeys(&keys
, key
, keylen
);
935 if (keys
.authkeylen
+ keys
.enckeylen
> TALITOS_MAX_KEY_SIZE
)
938 err
= verify_aead_des3_key(authenc
, keys
.enckey
, keys
.enckeylen
);
943 dma_unmap_single(dev
, ctx
->dma_key
, ctx
->keylen
, DMA_TO_DEVICE
);
945 memcpy(ctx
->key
, keys
.authkey
, keys
.authkeylen
);
946 memcpy(&ctx
->key
[keys
.authkeylen
], keys
.enckey
, keys
.enckeylen
);
948 ctx
->keylen
= keys
.authkeylen
+ keys
.enckeylen
;
949 ctx
->enckeylen
= keys
.enckeylen
;
950 ctx
->authkeylen
= keys
.authkeylen
;
951 ctx
->dma_key
= dma_map_single(dev
, ctx
->key
, ctx
->keylen
,
955 memzero_explicit(&keys
, sizeof(keys
));
959 static void talitos_sg_unmap(struct device
*dev
,
960 struct talitos_edesc
*edesc
,
961 struct scatterlist
*src
,
962 struct scatterlist
*dst
,
963 unsigned int len
, unsigned int offset
)
965 struct talitos_private
*priv
= dev_get_drvdata(dev
);
966 bool is_sec1
= has_ftr_sec1(priv
);
967 unsigned int src_nents
= edesc
->src_nents
? : 1;
968 unsigned int dst_nents
= edesc
->dst_nents
? : 1;
970 if (is_sec1
&& dst
&& dst_nents
> 1) {
971 dma_sync_single_for_device(dev
, edesc
->dma_link_tbl
+ offset
,
972 len
, DMA_FROM_DEVICE
);
973 sg_pcopy_from_buffer(dst
, dst_nents
, edesc
->buf
+ offset
, len
,
977 if (src_nents
== 1 || !is_sec1
)
978 dma_unmap_sg(dev
, src
, src_nents
, DMA_TO_DEVICE
);
980 if (dst
&& (dst_nents
== 1 || !is_sec1
))
981 dma_unmap_sg(dev
, dst
, dst_nents
, DMA_FROM_DEVICE
);
982 } else if (src_nents
== 1 || !is_sec1
) {
983 dma_unmap_sg(dev
, src
, src_nents
, DMA_BIDIRECTIONAL
);
987 static void ipsec_esp_unmap(struct device
*dev
,
988 struct talitos_edesc
*edesc
,
989 struct aead_request
*areq
, bool encrypt
)
991 struct crypto_aead
*aead
= crypto_aead_reqtfm(areq
);
992 struct talitos_ctx
*ctx
= crypto_aead_ctx(aead
);
993 unsigned int ivsize
= crypto_aead_ivsize(aead
);
994 unsigned int authsize
= crypto_aead_authsize(aead
);
995 unsigned int cryptlen
= areq
->cryptlen
- (encrypt
? 0 : authsize
);
996 bool is_ipsec_esp
= edesc
->desc
.hdr
& DESC_HDR_TYPE_IPSEC_ESP
;
997 struct talitos_ptr
*civ_ptr
= &edesc
->desc
.ptr
[is_ipsec_esp
? 2 : 3];
1000 unmap_single_talitos_ptr(dev
, &edesc
->desc
.ptr
[6],
1002 unmap_single_talitos_ptr(dev
, civ_ptr
, DMA_TO_DEVICE
);
1004 talitos_sg_unmap(dev
, edesc
, areq
->src
, areq
->dst
,
1005 cryptlen
+ authsize
, areq
->assoclen
);
1008 dma_unmap_single(dev
, edesc
->dma_link_tbl
, edesc
->dma_len
,
1011 if (!is_ipsec_esp
) {
1012 unsigned int dst_nents
= edesc
->dst_nents
? : 1;
1014 sg_pcopy_to_buffer(areq
->dst
, dst_nents
, ctx
->iv
, ivsize
,
1015 areq
->assoclen
+ cryptlen
- ivsize
);
1020 * ipsec_esp descriptor callbacks
1022 static void ipsec_esp_encrypt_done(struct device
*dev
,
1023 struct talitos_desc
*desc
, void *context
,
1026 struct aead_request
*areq
= context
;
1027 struct crypto_aead
*authenc
= crypto_aead_reqtfm(areq
);
1028 unsigned int ivsize
= crypto_aead_ivsize(authenc
);
1029 struct talitos_edesc
*edesc
;
1031 edesc
= container_of(desc
, struct talitos_edesc
, desc
);
1033 ipsec_esp_unmap(dev
, edesc
, areq
, true);
1035 dma_unmap_single(dev
, edesc
->iv_dma
, ivsize
, DMA_TO_DEVICE
);
1039 aead_request_complete(areq
, err
);
1042 static void ipsec_esp_decrypt_swauth_done(struct device
*dev
,
1043 struct talitos_desc
*desc
,
1044 void *context
, int err
)
1046 struct aead_request
*req
= context
;
1047 struct crypto_aead
*authenc
= crypto_aead_reqtfm(req
);
1048 unsigned int authsize
= crypto_aead_authsize(authenc
);
1049 struct talitos_edesc
*edesc
;
1052 edesc
= container_of(desc
, struct talitos_edesc
, desc
);
1054 ipsec_esp_unmap(dev
, edesc
, req
, false);
1058 oicv
= edesc
->buf
+ edesc
->dma_len
;
1059 icv
= oicv
- authsize
;
1061 err
= crypto_memneq(oicv
, icv
, authsize
) ? -EBADMSG
: 0;
1066 aead_request_complete(req
, err
);
1069 static void ipsec_esp_decrypt_hwauth_done(struct device
*dev
,
1070 struct talitos_desc
*desc
,
1071 void *context
, int err
)
1073 struct aead_request
*req
= context
;
1074 struct talitos_edesc
*edesc
;
1076 edesc
= container_of(desc
, struct talitos_edesc
, desc
);
1078 ipsec_esp_unmap(dev
, edesc
, req
, false);
1080 /* check ICV auth status */
1081 if (!err
&& ((desc
->hdr_lo
& DESC_HDR_LO_ICCR1_MASK
) !=
1082 DESC_HDR_LO_ICCR1_PASS
))
1087 aead_request_complete(req
, err
);
1091 * convert scatterlist to SEC h/w link table format
1092 * stop at cryptlen bytes
1094 static int sg_to_link_tbl_offset(struct scatterlist
*sg
, int sg_count
,
1095 unsigned int offset
, int datalen
, int elen
,
1096 struct talitos_ptr
*link_tbl_ptr
)
1098 int n_sg
= elen
? sg_count
+ 1 : sg_count
;
1100 int cryptlen
= datalen
+ elen
;
1102 while (cryptlen
&& sg
&& n_sg
--) {
1103 unsigned int len
= sg_dma_len(sg
);
1105 if (offset
>= len
) {
1115 if (datalen
> 0 && len
> datalen
) {
1116 to_talitos_ptr(link_tbl_ptr
+ count
,
1117 sg_dma_address(sg
) + offset
, datalen
, 0);
1118 to_talitos_ptr_ext_set(link_tbl_ptr
+ count
, 0, 0);
1123 to_talitos_ptr(link_tbl_ptr
+ count
,
1124 sg_dma_address(sg
) + offset
, len
, 0);
1125 to_talitos_ptr_ext_set(link_tbl_ptr
+ count
, 0, 0);
1135 /* tag end of link table */
1137 to_talitos_ptr_ext_set(link_tbl_ptr
+ count
- 1,
1138 DESC_PTR_LNKTBL_RET
, 0);
1143 static int talitos_sg_map_ext(struct device
*dev
, struct scatterlist
*src
,
1144 unsigned int len
, struct talitos_edesc
*edesc
,
1145 struct talitos_ptr
*ptr
, int sg_count
,
1146 unsigned int offset
, int tbl_off
, int elen
,
1149 struct talitos_private
*priv
= dev_get_drvdata(dev
);
1150 bool is_sec1
= has_ftr_sec1(priv
);
1153 to_talitos_ptr(ptr
, 0, 0, is_sec1
);
1156 to_talitos_ptr_ext_set(ptr
, elen
, is_sec1
);
1157 if (sg_count
== 1 && !force
) {
1158 to_talitos_ptr(ptr
, sg_dma_address(src
) + offset
, len
, is_sec1
);
1162 to_talitos_ptr(ptr
, edesc
->dma_link_tbl
+ offset
, len
, is_sec1
);
1165 sg_count
= sg_to_link_tbl_offset(src
, sg_count
, offset
, len
, elen
,
1166 &edesc
->link_tbl
[tbl_off
]);
1167 if (sg_count
== 1 && !force
) {
1168 /* Only one segment now, so no link tbl needed*/
1169 copy_talitos_ptr(ptr
, &edesc
->link_tbl
[tbl_off
], is_sec1
);
1172 to_talitos_ptr(ptr
, edesc
->dma_link_tbl
+
1173 tbl_off
* sizeof(struct talitos_ptr
), len
, is_sec1
);
1174 to_talitos_ptr_ext_or(ptr
, DESC_PTR_LNKTBL_JUMP
, is_sec1
);
1179 static int talitos_sg_map(struct device
*dev
, struct scatterlist
*src
,
1180 unsigned int len
, struct talitos_edesc
*edesc
,
1181 struct talitos_ptr
*ptr
, int sg_count
,
1182 unsigned int offset
, int tbl_off
)
1184 return talitos_sg_map_ext(dev
, src
, len
, edesc
, ptr
, sg_count
, offset
,
1189 * fill in and submit ipsec_esp descriptor
1191 static int ipsec_esp(struct talitos_edesc
*edesc
, struct aead_request
*areq
,
1193 void (*callback
)(struct device
*dev
,
1194 struct talitos_desc
*desc
,
1195 void *context
, int error
))
1197 struct crypto_aead
*aead
= crypto_aead_reqtfm(areq
);
1198 unsigned int authsize
= crypto_aead_authsize(aead
);
1199 struct talitos_ctx
*ctx
= crypto_aead_ctx(aead
);
1200 struct device
*dev
= ctx
->dev
;
1201 struct talitos_desc
*desc
= &edesc
->desc
;
1202 unsigned int cryptlen
= areq
->cryptlen
- (encrypt
? 0 : authsize
);
1203 unsigned int ivsize
= crypto_aead_ivsize(aead
);
1207 bool sync_needed
= false;
1208 struct talitos_private
*priv
= dev_get_drvdata(dev
);
1209 bool is_sec1
= has_ftr_sec1(priv
);
1210 bool is_ipsec_esp
= desc
->hdr
& DESC_HDR_TYPE_IPSEC_ESP
;
1211 struct talitos_ptr
*civ_ptr
= &desc
->ptr
[is_ipsec_esp
? 2 : 3];
1212 struct talitos_ptr
*ckey_ptr
= &desc
->ptr
[is_ipsec_esp
? 3 : 2];
1213 dma_addr_t dma_icv
= edesc
->dma_link_tbl
+ edesc
->dma_len
- authsize
;
1216 to_talitos_ptr(&desc
->ptr
[0], ctx
->dma_key
, ctx
->authkeylen
, is_sec1
);
1218 sg_count
= edesc
->src_nents
?: 1;
1219 if (is_sec1
&& sg_count
> 1)
1220 sg_copy_to_buffer(areq
->src
, sg_count
, edesc
->buf
,
1221 areq
->assoclen
+ cryptlen
);
1223 sg_count
= dma_map_sg(dev
, areq
->src
, sg_count
,
1224 (areq
->src
== areq
->dst
) ?
1225 DMA_BIDIRECTIONAL
: DMA_TO_DEVICE
);
1228 ret
= talitos_sg_map(dev
, areq
->src
, areq
->assoclen
, edesc
,
1229 &desc
->ptr
[1], sg_count
, 0, tbl_off
);
1237 to_talitos_ptr(civ_ptr
, edesc
->iv_dma
, ivsize
, is_sec1
);
1240 to_talitos_ptr(ckey_ptr
, ctx
->dma_key
+ ctx
->authkeylen
,
1241 ctx
->enckeylen
, is_sec1
);
1245 * map and adjust cipher len to aead request cryptlen.
1246 * extent is bytes of HMAC postpended to ciphertext,
1247 * typically 12 for ipsec
1249 if (is_ipsec_esp
&& (desc
->hdr
& DESC_HDR_MODE1_MDEU_CICV
))
1252 ret
= talitos_sg_map_ext(dev
, areq
->src
, cryptlen
, edesc
, &desc
->ptr
[4],
1253 sg_count
, areq
->assoclen
, tbl_off
, elen
,
1262 if (areq
->src
!= areq
->dst
) {
1263 sg_count
= edesc
->dst_nents
? : 1;
1264 if (!is_sec1
|| sg_count
== 1)
1265 dma_map_sg(dev
, areq
->dst
, sg_count
, DMA_FROM_DEVICE
);
1268 if (is_ipsec_esp
&& encrypt
)
1272 ret
= talitos_sg_map_ext(dev
, areq
->dst
, cryptlen
, edesc
, &desc
->ptr
[5],
1273 sg_count
, areq
->assoclen
, tbl_off
, elen
,
1274 is_ipsec_esp
&& !encrypt
);
1277 if (!encrypt
&& is_ipsec_esp
) {
1278 struct talitos_ptr
*tbl_ptr
= &edesc
->link_tbl
[tbl_off
];
1280 /* Add an entry to the link table for ICV data */
1281 to_talitos_ptr_ext_set(tbl_ptr
- 1, 0, is_sec1
);
1282 to_talitos_ptr_ext_set(tbl_ptr
, DESC_PTR_LNKTBL_RET
, is_sec1
);
1284 /* icv data follows link tables */
1285 to_talitos_ptr(tbl_ptr
, dma_icv
, authsize
, is_sec1
);
1286 to_talitos_ptr_ext_or(&desc
->ptr
[5], authsize
, is_sec1
);
1288 } else if (!encrypt
) {
1289 to_talitos_ptr(&desc
->ptr
[6], dma_icv
, authsize
, is_sec1
);
1291 } else if (!is_ipsec_esp
) {
1292 talitos_sg_map(dev
, areq
->dst
, authsize
, edesc
, &desc
->ptr
[6],
1293 sg_count
, areq
->assoclen
+ cryptlen
, tbl_off
);
1298 map_single_talitos_ptr(dev
, &desc
->ptr
[6], ivsize
, ctx
->iv
,
1302 dma_sync_single_for_device(dev
, edesc
->dma_link_tbl
,
1306 ret
= talitos_submit(dev
, ctx
->ch
, desc
, callback
, areq
);
1307 if (ret
!= -EINPROGRESS
) {
1308 ipsec_esp_unmap(dev
, edesc
, areq
, encrypt
);
1315 * allocate and map the extended descriptor
1317 static struct talitos_edesc
*talitos_edesc_alloc(struct device
*dev
,
1318 struct scatterlist
*src
,
1319 struct scatterlist
*dst
,
1321 unsigned int assoclen
,
1322 unsigned int cryptlen
,
1323 unsigned int authsize
,
1324 unsigned int ivsize
,
1329 struct talitos_edesc
*edesc
;
1330 int src_nents
, dst_nents
, alloc_len
, dma_len
, src_len
, dst_len
;
1331 dma_addr_t iv_dma
= 0;
1332 gfp_t flags
= cryptoflags
& CRYPTO_TFM_REQ_MAY_SLEEP
? GFP_KERNEL
:
1334 struct talitos_private
*priv
= dev_get_drvdata(dev
);
1335 bool is_sec1
= has_ftr_sec1(priv
);
1336 int max_len
= is_sec1
? TALITOS1_MAX_DATA_LEN
: TALITOS2_MAX_DATA_LEN
;
1338 if (cryptlen
+ authsize
> max_len
) {
1339 dev_err(dev
, "length exceeds h/w max limit\n");
1340 return ERR_PTR(-EINVAL
);
1343 if (!dst
|| dst
== src
) {
1344 src_len
= assoclen
+ cryptlen
+ authsize
;
1345 src_nents
= sg_nents_for_len(src
, src_len
);
1346 if (src_nents
< 0) {
1347 dev_err(dev
, "Invalid number of src SG.\n");
1348 return ERR_PTR(-EINVAL
);
1350 src_nents
= (src_nents
== 1) ? 0 : src_nents
;
1351 dst_nents
= dst
? src_nents
: 0;
1353 } else { /* dst && dst != src*/
1354 src_len
= assoclen
+ cryptlen
+ (encrypt
? 0 : authsize
);
1355 src_nents
= sg_nents_for_len(src
, src_len
);
1356 if (src_nents
< 0) {
1357 dev_err(dev
, "Invalid number of src SG.\n");
1358 return ERR_PTR(-EINVAL
);
1360 src_nents
= (src_nents
== 1) ? 0 : src_nents
;
1361 dst_len
= assoclen
+ cryptlen
+ (encrypt
? authsize
: 0);
1362 dst_nents
= sg_nents_for_len(dst
, dst_len
);
1363 if (dst_nents
< 0) {
1364 dev_err(dev
, "Invalid number of dst SG.\n");
1365 return ERR_PTR(-EINVAL
);
1367 dst_nents
= (dst_nents
== 1) ? 0 : dst_nents
;
1371 * allocate space for base edesc plus the link tables,
1372 * allowing for two separate entries for AD and generated ICV (+ 2),
1373 * and space for two sets of ICVs (stashed and generated)
1375 alloc_len
= sizeof(struct talitos_edesc
);
1376 if (src_nents
|| dst_nents
|| !encrypt
) {
1378 dma_len
= (src_nents
? src_len
: 0) +
1379 (dst_nents
? dst_len
: 0) + authsize
;
1381 dma_len
= (src_nents
+ dst_nents
+ 2) *
1382 sizeof(struct talitos_ptr
) + authsize
;
1383 alloc_len
+= dma_len
;
1387 alloc_len
+= icv_stashing
? authsize
: 0;
1389 /* if its a ahash, add space for a second desc next to the first one */
1390 if (is_sec1
&& !dst
)
1391 alloc_len
+= sizeof(struct talitos_desc
);
1392 alloc_len
+= ivsize
;
1394 edesc
= kmalloc(alloc_len
, GFP_DMA
| flags
);
1396 return ERR_PTR(-ENOMEM
);
1398 iv
= memcpy(((u8
*)edesc
) + alloc_len
- ivsize
, iv
, ivsize
);
1399 iv_dma
= dma_map_single(dev
, iv
, ivsize
, DMA_TO_DEVICE
);
1401 memset(&edesc
->desc
, 0, sizeof(edesc
->desc
));
1403 edesc
->src_nents
= src_nents
;
1404 edesc
->dst_nents
= dst_nents
;
1405 edesc
->iv_dma
= iv_dma
;
1406 edesc
->dma_len
= dma_len
;
1408 edesc
->dma_link_tbl
= dma_map_single(dev
, &edesc
->link_tbl
[0],
1415 static struct talitos_edesc
*aead_edesc_alloc(struct aead_request
*areq
, u8
*iv
,
1416 int icv_stashing
, bool encrypt
)
1418 struct crypto_aead
*authenc
= crypto_aead_reqtfm(areq
);
1419 unsigned int authsize
= crypto_aead_authsize(authenc
);
1420 struct talitos_ctx
*ctx
= crypto_aead_ctx(authenc
);
1421 unsigned int ivsize
= crypto_aead_ivsize(authenc
);
1422 unsigned int cryptlen
= areq
->cryptlen
- (encrypt
? 0 : authsize
);
1424 return talitos_edesc_alloc(ctx
->dev
, areq
->src
, areq
->dst
,
1425 iv
, areq
->assoclen
, cryptlen
,
1426 authsize
, ivsize
, icv_stashing
,
1427 areq
->base
.flags
, encrypt
);
1430 static int aead_encrypt(struct aead_request
*req
)
1432 struct crypto_aead
*authenc
= crypto_aead_reqtfm(req
);
1433 struct talitos_ctx
*ctx
= crypto_aead_ctx(authenc
);
1434 struct talitos_edesc
*edesc
;
1436 /* allocate extended descriptor */
1437 edesc
= aead_edesc_alloc(req
, req
->iv
, 0, true);
1439 return PTR_ERR(edesc
);
1442 edesc
->desc
.hdr
= ctx
->desc_hdr_template
| DESC_HDR_MODE0_ENCRYPT
;
1444 return ipsec_esp(edesc
, req
, true, ipsec_esp_encrypt_done
);
1447 static int aead_decrypt(struct aead_request
*req
)
1449 struct crypto_aead
*authenc
= crypto_aead_reqtfm(req
);
1450 unsigned int authsize
= crypto_aead_authsize(authenc
);
1451 struct talitos_ctx
*ctx
= crypto_aead_ctx(authenc
);
1452 struct talitos_private
*priv
= dev_get_drvdata(ctx
->dev
);
1453 struct talitos_edesc
*edesc
;
1456 /* allocate extended descriptor */
1457 edesc
= aead_edesc_alloc(req
, req
->iv
, 1, false);
1459 return PTR_ERR(edesc
);
1461 if ((edesc
->desc
.hdr
& DESC_HDR_TYPE_IPSEC_ESP
) &&
1462 (priv
->features
& TALITOS_FTR_HW_AUTH_CHECK
) &&
1463 ((!edesc
->src_nents
&& !edesc
->dst_nents
) ||
1464 priv
->features
& TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT
)) {
1466 /* decrypt and check the ICV */
1467 edesc
->desc
.hdr
= ctx
->desc_hdr_template
|
1468 DESC_HDR_DIR_INBOUND
|
1469 DESC_HDR_MODE1_MDEU_CICV
;
1471 /* reset integrity check result bits */
1473 return ipsec_esp(edesc
, req
, false,
1474 ipsec_esp_decrypt_hwauth_done
);
1477 /* Have to check the ICV with software */
1478 edesc
->desc
.hdr
= ctx
->desc_hdr_template
| DESC_HDR_DIR_INBOUND
;
1480 /* stash incoming ICV for later cmp with ICV generated by the h/w */
1481 icvdata
= edesc
->buf
+ edesc
->dma_len
;
1483 sg_pcopy_to_buffer(req
->src
, edesc
->src_nents
? : 1, icvdata
, authsize
,
1484 req
->assoclen
+ req
->cryptlen
- authsize
);
1486 return ipsec_esp(edesc
, req
, false, ipsec_esp_decrypt_swauth_done
);
1489 static int skcipher_setkey(struct crypto_skcipher
*cipher
,
1490 const u8
*key
, unsigned int keylen
)
1492 struct talitos_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
1493 struct device
*dev
= ctx
->dev
;
1496 dma_unmap_single(dev
, ctx
->dma_key
, ctx
->keylen
, DMA_TO_DEVICE
);
1498 memcpy(&ctx
->key
, key
, keylen
);
1499 ctx
->keylen
= keylen
;
1501 ctx
->dma_key
= dma_map_single(dev
, ctx
->key
, keylen
, DMA_TO_DEVICE
);
1506 static int skcipher_des_setkey(struct crypto_skcipher
*cipher
,
1507 const u8
*key
, unsigned int keylen
)
1509 return verify_skcipher_des_key(cipher
, key
) ?:
1510 skcipher_setkey(cipher
, key
, keylen
);
1513 static int skcipher_des3_setkey(struct crypto_skcipher
*cipher
,
1514 const u8
*key
, unsigned int keylen
)
1516 return verify_skcipher_des3_key(cipher
, key
) ?:
1517 skcipher_setkey(cipher
, key
, keylen
);
1520 static int skcipher_aes_setkey(struct crypto_skcipher
*cipher
,
1521 const u8
*key
, unsigned int keylen
)
1523 if (keylen
== AES_KEYSIZE_128
|| keylen
== AES_KEYSIZE_192
||
1524 keylen
== AES_KEYSIZE_256
)
1525 return skcipher_setkey(cipher
, key
, keylen
);
1530 static void common_nonsnoop_unmap(struct device
*dev
,
1531 struct talitos_edesc
*edesc
,
1532 struct skcipher_request
*areq
)
1534 unmap_single_talitos_ptr(dev
, &edesc
->desc
.ptr
[5], DMA_FROM_DEVICE
);
1536 talitos_sg_unmap(dev
, edesc
, areq
->src
, areq
->dst
, areq
->cryptlen
, 0);
1537 unmap_single_talitos_ptr(dev
, &edesc
->desc
.ptr
[1], DMA_TO_DEVICE
);
1540 dma_unmap_single(dev
, edesc
->dma_link_tbl
, edesc
->dma_len
,
1544 static void skcipher_done(struct device
*dev
,
1545 struct talitos_desc
*desc
, void *context
,
1548 struct skcipher_request
*areq
= context
;
1549 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(areq
);
1550 struct talitos_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
1551 unsigned int ivsize
= crypto_skcipher_ivsize(cipher
);
1552 struct talitos_edesc
*edesc
;
1554 edesc
= container_of(desc
, struct talitos_edesc
, desc
);
1556 common_nonsnoop_unmap(dev
, edesc
, areq
);
1557 memcpy(areq
->iv
, ctx
->iv
, ivsize
);
1561 areq
->base
.complete(&areq
->base
, err
);
1564 static int common_nonsnoop(struct talitos_edesc
*edesc
,
1565 struct skcipher_request
*areq
,
1566 void (*callback
) (struct device
*dev
,
1567 struct talitos_desc
*desc
,
1568 void *context
, int error
))
1570 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(areq
);
1571 struct talitos_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
1572 struct device
*dev
= ctx
->dev
;
1573 struct talitos_desc
*desc
= &edesc
->desc
;
1574 unsigned int cryptlen
= areq
->cryptlen
;
1575 unsigned int ivsize
= crypto_skcipher_ivsize(cipher
);
1577 bool sync_needed
= false;
1578 struct talitos_private
*priv
= dev_get_drvdata(dev
);
1579 bool is_sec1
= has_ftr_sec1(priv
);
1581 /* first DWORD empty */
1584 to_talitos_ptr(&desc
->ptr
[1], edesc
->iv_dma
, ivsize
, is_sec1
);
1587 to_talitos_ptr(&desc
->ptr
[2], ctx
->dma_key
, ctx
->keylen
, is_sec1
);
1589 sg_count
= edesc
->src_nents
?: 1;
1590 if (is_sec1
&& sg_count
> 1)
1591 sg_copy_to_buffer(areq
->src
, sg_count
, edesc
->buf
,
1594 sg_count
= dma_map_sg(dev
, areq
->src
, sg_count
,
1595 (areq
->src
== areq
->dst
) ?
1596 DMA_BIDIRECTIONAL
: DMA_TO_DEVICE
);
1600 sg_count
= talitos_sg_map(dev
, areq
->src
, cryptlen
, edesc
,
1601 &desc
->ptr
[3], sg_count
, 0, 0);
1606 if (areq
->src
!= areq
->dst
) {
1607 sg_count
= edesc
->dst_nents
? : 1;
1608 if (!is_sec1
|| sg_count
== 1)
1609 dma_map_sg(dev
, areq
->dst
, sg_count
, DMA_FROM_DEVICE
);
1612 ret
= talitos_sg_map(dev
, areq
->dst
, cryptlen
, edesc
, &desc
->ptr
[4],
1613 sg_count
, 0, (edesc
->src_nents
+ 1));
1618 map_single_talitos_ptr(dev
, &desc
->ptr
[5], ivsize
, ctx
->iv
,
1621 /* last DWORD empty */
1624 dma_sync_single_for_device(dev
, edesc
->dma_link_tbl
,
1625 edesc
->dma_len
, DMA_BIDIRECTIONAL
);
1627 ret
= talitos_submit(dev
, ctx
->ch
, desc
, callback
, areq
);
1628 if (ret
!= -EINPROGRESS
) {
1629 common_nonsnoop_unmap(dev
, edesc
, areq
);
1635 static struct talitos_edesc
*skcipher_edesc_alloc(struct skcipher_request
*
1638 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(areq
);
1639 struct talitos_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
1640 unsigned int ivsize
= crypto_skcipher_ivsize(cipher
);
1642 return talitos_edesc_alloc(ctx
->dev
, areq
->src
, areq
->dst
,
1643 areq
->iv
, 0, areq
->cryptlen
, 0, ivsize
, 0,
1644 areq
->base
.flags
, encrypt
);
1647 static int skcipher_encrypt(struct skcipher_request
*areq
)
1649 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(areq
);
1650 struct talitos_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
1651 struct talitos_edesc
*edesc
;
1652 unsigned int blocksize
=
1653 crypto_tfm_alg_blocksize(crypto_skcipher_tfm(cipher
));
1655 if (!areq
->cryptlen
)
1658 if (areq
->cryptlen
% blocksize
)
1661 /* allocate extended descriptor */
1662 edesc
= skcipher_edesc_alloc(areq
, true);
1664 return PTR_ERR(edesc
);
1667 edesc
->desc
.hdr
= ctx
->desc_hdr_template
| DESC_HDR_MODE0_ENCRYPT
;
1669 return common_nonsnoop(edesc
, areq
, skcipher_done
);
1672 static int skcipher_decrypt(struct skcipher_request
*areq
)
1674 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(areq
);
1675 struct talitos_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
1676 struct talitos_edesc
*edesc
;
1677 unsigned int blocksize
=
1678 crypto_tfm_alg_blocksize(crypto_skcipher_tfm(cipher
));
1680 if (!areq
->cryptlen
)
1683 if (areq
->cryptlen
% blocksize
)
1686 /* allocate extended descriptor */
1687 edesc
= skcipher_edesc_alloc(areq
, false);
1689 return PTR_ERR(edesc
);
1691 edesc
->desc
.hdr
= ctx
->desc_hdr_template
| DESC_HDR_DIR_INBOUND
;
1693 return common_nonsnoop(edesc
, areq
, skcipher_done
);
1696 static void common_nonsnoop_hash_unmap(struct device
*dev
,
1697 struct talitos_edesc
*edesc
,
1698 struct ahash_request
*areq
)
1700 struct talitos_ahash_req_ctx
*req_ctx
= ahash_request_ctx(areq
);
1701 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
1702 struct talitos_private
*priv
= dev_get_drvdata(dev
);
1703 bool is_sec1
= has_ftr_sec1(priv
);
1704 struct talitos_desc
*desc
= &edesc
->desc
;
1705 struct talitos_desc
*desc2
= (struct talitos_desc
*)
1706 (edesc
->buf
+ edesc
->dma_len
);
1708 unmap_single_talitos_ptr(dev
, &edesc
->desc
.ptr
[5], DMA_FROM_DEVICE
);
1709 if (desc
->next_desc
&&
1710 desc
->ptr
[5].ptr
!= desc2
->ptr
[5].ptr
)
1711 unmap_single_talitos_ptr(dev
, &desc2
->ptr
[5], DMA_FROM_DEVICE
);
1713 memcpy(areq
->result
, req_ctx
->hw_context
,
1714 crypto_ahash_digestsize(tfm
));
1717 talitos_sg_unmap(dev
, edesc
, req_ctx
->psrc
, NULL
, 0, 0);
1719 /* When using hashctx-in, must unmap it. */
1720 if (from_talitos_ptr_len(&edesc
->desc
.ptr
[1], is_sec1
))
1721 unmap_single_talitos_ptr(dev
, &edesc
->desc
.ptr
[1],
1723 else if (desc
->next_desc
)
1724 unmap_single_talitos_ptr(dev
, &desc2
->ptr
[1],
1727 if (is_sec1
&& req_ctx
->nbuf
)
1728 unmap_single_talitos_ptr(dev
, &desc
->ptr
[3],
1732 dma_unmap_single(dev
, edesc
->dma_link_tbl
, edesc
->dma_len
,
1735 if (edesc
->desc
.next_desc
)
1736 dma_unmap_single(dev
, be32_to_cpu(edesc
->desc
.next_desc
),
1737 TALITOS_DESC_SIZE
, DMA_BIDIRECTIONAL
);
1740 static void ahash_done(struct device
*dev
,
1741 struct talitos_desc
*desc
, void *context
,
1744 struct ahash_request
*areq
= context
;
1745 struct talitos_edesc
*edesc
=
1746 container_of(desc
, struct talitos_edesc
, desc
);
1747 struct talitos_ahash_req_ctx
*req_ctx
= ahash_request_ctx(areq
);
1749 if (!req_ctx
->last
&& req_ctx
->to_hash_later
) {
1750 /* Position any partial block for next update/final/finup */
1751 req_ctx
->buf_idx
= (req_ctx
->buf_idx
+ 1) & 1;
1752 req_ctx
->nbuf
= req_ctx
->to_hash_later
;
1754 common_nonsnoop_hash_unmap(dev
, edesc
, areq
);
1758 areq
->base
.complete(&areq
->base
, err
);
1762 * SEC1 doesn't like hashing of 0 sized message, so we do the padding
1763 * ourself and submit a padded block
1765 static void talitos_handle_buggy_hash(struct talitos_ctx
*ctx
,
1766 struct talitos_edesc
*edesc
,
1767 struct talitos_ptr
*ptr
)
1769 static u8 padded_hash
[64] = {
1770 0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1771 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1772 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1773 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1776 pr_err_once("Bug in SEC1, padding ourself\n");
1777 edesc
->desc
.hdr
&= ~DESC_HDR_MODE0_MDEU_PAD
;
1778 map_single_talitos_ptr(ctx
->dev
, ptr
, sizeof(padded_hash
),
1779 (char *)padded_hash
, DMA_TO_DEVICE
);
1782 static int common_nonsnoop_hash(struct talitos_edesc
*edesc
,
1783 struct ahash_request
*areq
, unsigned int length
,
1784 void (*callback
) (struct device
*dev
,
1785 struct talitos_desc
*desc
,
1786 void *context
, int error
))
1788 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
1789 struct talitos_ctx
*ctx
= crypto_ahash_ctx(tfm
);
1790 struct talitos_ahash_req_ctx
*req_ctx
= ahash_request_ctx(areq
);
1791 struct device
*dev
= ctx
->dev
;
1792 struct talitos_desc
*desc
= &edesc
->desc
;
1794 bool sync_needed
= false;
1795 struct talitos_private
*priv
= dev_get_drvdata(dev
);
1796 bool is_sec1
= has_ftr_sec1(priv
);
1799 /* first DWORD empty */
1801 /* hash context in */
1802 if (!req_ctx
->first
|| req_ctx
->swinit
) {
1803 map_single_talitos_ptr_nosync(dev
, &desc
->ptr
[1],
1804 req_ctx
->hw_context_size
,
1805 req_ctx
->hw_context
,
1807 req_ctx
->swinit
= 0;
1809 /* Indicate next op is not the first. */
1814 to_talitos_ptr(&desc
->ptr
[2], ctx
->dma_key
, ctx
->keylen
,
1817 if (is_sec1
&& req_ctx
->nbuf
)
1818 length
-= req_ctx
->nbuf
;
1820 sg_count
= edesc
->src_nents
?: 1;
1821 if (is_sec1
&& sg_count
> 1)
1822 sg_copy_to_buffer(req_ctx
->psrc
, sg_count
, edesc
->buf
, length
);
1824 sg_count
= dma_map_sg(dev
, req_ctx
->psrc
, sg_count
,
1829 if (is_sec1
&& req_ctx
->nbuf
) {
1830 map_single_talitos_ptr(dev
, &desc
->ptr
[3], req_ctx
->nbuf
,
1831 req_ctx
->buf
[req_ctx
->buf_idx
],
1834 sg_count
= talitos_sg_map(dev
, req_ctx
->psrc
, length
, edesc
,
1835 &desc
->ptr
[3], sg_count
, 0, 0);
1840 /* fifth DWORD empty */
1842 /* hash/HMAC out -or- hash context out */
1844 map_single_talitos_ptr(dev
, &desc
->ptr
[5],
1845 crypto_ahash_digestsize(tfm
),
1846 req_ctx
->hw_context
, DMA_FROM_DEVICE
);
1848 map_single_talitos_ptr_nosync(dev
, &desc
->ptr
[5],
1849 req_ctx
->hw_context_size
,
1850 req_ctx
->hw_context
,
1853 /* last DWORD empty */
1855 if (is_sec1
&& from_talitos_ptr_len(&desc
->ptr
[3], true) == 0)
1856 talitos_handle_buggy_hash(ctx
, edesc
, &desc
->ptr
[3]);
1858 if (is_sec1
&& req_ctx
->nbuf
&& length
) {
1859 struct talitos_desc
*desc2
= (struct talitos_desc
*)
1860 (edesc
->buf
+ edesc
->dma_len
);
1861 dma_addr_t next_desc
;
1863 memset(desc2
, 0, sizeof(*desc2
));
1864 desc2
->hdr
= desc
->hdr
;
1865 desc2
->hdr
&= ~DESC_HDR_MODE0_MDEU_INIT
;
1866 desc2
->hdr1
= desc2
->hdr
;
1867 desc
->hdr
&= ~DESC_HDR_MODE0_MDEU_PAD
;
1868 desc
->hdr
|= DESC_HDR_MODE0_MDEU_CONT
;
1869 desc
->hdr
&= ~DESC_HDR_DONE_NOTIFY
;
1871 if (desc
->ptr
[1].ptr
)
1872 copy_talitos_ptr(&desc2
->ptr
[1], &desc
->ptr
[1],
1875 map_single_talitos_ptr_nosync(dev
, &desc2
->ptr
[1],
1876 req_ctx
->hw_context_size
,
1877 req_ctx
->hw_context
,
1879 copy_talitos_ptr(&desc2
->ptr
[2], &desc
->ptr
[2], is_sec1
);
1880 sg_count
= talitos_sg_map(dev
, req_ctx
->psrc
, length
, edesc
,
1881 &desc2
->ptr
[3], sg_count
, 0, 0);
1884 copy_talitos_ptr(&desc2
->ptr
[5], &desc
->ptr
[5], is_sec1
);
1886 map_single_talitos_ptr_nosync(dev
, &desc
->ptr
[5],
1887 req_ctx
->hw_context_size
,
1888 req_ctx
->hw_context
,
1891 next_desc
= dma_map_single(dev
, &desc2
->hdr1
, TALITOS_DESC_SIZE
,
1893 desc
->next_desc
= cpu_to_be32(next_desc
);
1897 dma_sync_single_for_device(dev
, edesc
->dma_link_tbl
,
1898 edesc
->dma_len
, DMA_BIDIRECTIONAL
);
1900 ret
= talitos_submit(dev
, ctx
->ch
, desc
, callback
, areq
);
1901 if (ret
!= -EINPROGRESS
) {
1902 common_nonsnoop_hash_unmap(dev
, edesc
, areq
);
1908 static struct talitos_edesc
*ahash_edesc_alloc(struct ahash_request
*areq
,
1909 unsigned int nbytes
)
1911 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
1912 struct talitos_ctx
*ctx
= crypto_ahash_ctx(tfm
);
1913 struct talitos_ahash_req_ctx
*req_ctx
= ahash_request_ctx(areq
);
1914 struct talitos_private
*priv
= dev_get_drvdata(ctx
->dev
);
1915 bool is_sec1
= has_ftr_sec1(priv
);
1918 nbytes
-= req_ctx
->nbuf
;
1920 return talitos_edesc_alloc(ctx
->dev
, req_ctx
->psrc
, NULL
, NULL
, 0,
1921 nbytes
, 0, 0, 0, areq
->base
.flags
, false);
1924 static int ahash_init(struct ahash_request
*areq
)
1926 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
1927 struct talitos_ctx
*ctx
= crypto_ahash_ctx(tfm
);
1928 struct device
*dev
= ctx
->dev
;
1929 struct talitos_ahash_req_ctx
*req_ctx
= ahash_request_ctx(areq
);
1933 /* Initialize the context */
1934 req_ctx
->buf_idx
= 0;
1936 req_ctx
->first
= 1; /* first indicates h/w must init its context */
1937 req_ctx
->swinit
= 0; /* assume h/w init of context */
1938 size
= (crypto_ahash_digestsize(tfm
) <= SHA256_DIGEST_SIZE
)
1939 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
1940 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512
;
1941 req_ctx
->hw_context_size
= size
;
1943 dma
= dma_map_single(dev
, req_ctx
->hw_context
, req_ctx
->hw_context_size
,
1945 dma_unmap_single(dev
, dma
, req_ctx
->hw_context_size
, DMA_TO_DEVICE
);
1951 * on h/w without explicit sha224 support, we initialize h/w context
1952 * manually with sha224 constants, and tell it to run sha256.
1954 static int ahash_init_sha224_swinit(struct ahash_request
*areq
)
1956 struct talitos_ahash_req_ctx
*req_ctx
= ahash_request_ctx(areq
);
1958 req_ctx
->hw_context
[0] = SHA224_H0
;
1959 req_ctx
->hw_context
[1] = SHA224_H1
;
1960 req_ctx
->hw_context
[2] = SHA224_H2
;
1961 req_ctx
->hw_context
[3] = SHA224_H3
;
1962 req_ctx
->hw_context
[4] = SHA224_H4
;
1963 req_ctx
->hw_context
[5] = SHA224_H5
;
1964 req_ctx
->hw_context
[6] = SHA224_H6
;
1965 req_ctx
->hw_context
[7] = SHA224_H7
;
1967 /* init 64-bit count */
1968 req_ctx
->hw_context
[8] = 0;
1969 req_ctx
->hw_context
[9] = 0;
1972 req_ctx
->swinit
= 1;/* prevent h/w initting context with sha256 values*/
1977 static int ahash_process_req(struct ahash_request
*areq
, unsigned int nbytes
)
1979 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
1980 struct talitos_ctx
*ctx
= crypto_ahash_ctx(tfm
);
1981 struct talitos_ahash_req_ctx
*req_ctx
= ahash_request_ctx(areq
);
1982 struct talitos_edesc
*edesc
;
1983 unsigned int blocksize
=
1984 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm
));
1985 unsigned int nbytes_to_hash
;
1986 unsigned int to_hash_later
;
1989 struct device
*dev
= ctx
->dev
;
1990 struct talitos_private
*priv
= dev_get_drvdata(dev
);
1991 bool is_sec1
= has_ftr_sec1(priv
);
1992 u8
*ctx_buf
= req_ctx
->buf
[req_ctx
->buf_idx
];
1994 if (!req_ctx
->last
&& (nbytes
+ req_ctx
->nbuf
<= blocksize
)) {
1995 /* Buffer up to one whole block */
1996 nents
= sg_nents_for_len(areq
->src
, nbytes
);
1998 dev_err(ctx
->dev
, "Invalid number of src SG.\n");
2001 sg_copy_to_buffer(areq
->src
, nents
,
2002 ctx_buf
+ req_ctx
->nbuf
, nbytes
);
2003 req_ctx
->nbuf
+= nbytes
;
2007 /* At least (blocksize + 1) bytes are available to hash */
2008 nbytes_to_hash
= nbytes
+ req_ctx
->nbuf
;
2009 to_hash_later
= nbytes_to_hash
& (blocksize
- 1);
2013 else if (to_hash_later
)
2014 /* There is a partial block. Hash the full block(s) now */
2015 nbytes_to_hash
-= to_hash_later
;
2017 /* Keep one block buffered */
2018 nbytes_to_hash
-= blocksize
;
2019 to_hash_later
= blocksize
;
2022 /* Chain in any previously buffered data */
2023 if (!is_sec1
&& req_ctx
->nbuf
) {
2024 nsg
= (req_ctx
->nbuf
< nbytes_to_hash
) ? 2 : 1;
2025 sg_init_table(req_ctx
->bufsl
, nsg
);
2026 sg_set_buf(req_ctx
->bufsl
, ctx_buf
, req_ctx
->nbuf
);
2028 sg_chain(req_ctx
->bufsl
, 2, areq
->src
);
2029 req_ctx
->psrc
= req_ctx
->bufsl
;
2030 } else if (is_sec1
&& req_ctx
->nbuf
&& req_ctx
->nbuf
< blocksize
) {
2033 if (nbytes_to_hash
> blocksize
)
2034 offset
= blocksize
- req_ctx
->nbuf
;
2036 offset
= nbytes_to_hash
- req_ctx
->nbuf
;
2037 nents
= sg_nents_for_len(areq
->src
, offset
);
2039 dev_err(ctx
->dev
, "Invalid number of src SG.\n");
2042 sg_copy_to_buffer(areq
->src
, nents
,
2043 ctx_buf
+ req_ctx
->nbuf
, offset
);
2044 req_ctx
->nbuf
+= offset
;
2045 req_ctx
->psrc
= scatterwalk_ffwd(req_ctx
->bufsl
, areq
->src
,
2048 req_ctx
->psrc
= areq
->src
;
2050 if (to_hash_later
) {
2051 nents
= sg_nents_for_len(areq
->src
, nbytes
);
2053 dev_err(ctx
->dev
, "Invalid number of src SG.\n");
2056 sg_pcopy_to_buffer(areq
->src
, nents
,
2057 req_ctx
->buf
[(req_ctx
->buf_idx
+ 1) & 1],
2059 nbytes
- to_hash_later
);
2061 req_ctx
->to_hash_later
= to_hash_later
;
2063 /* Allocate extended descriptor */
2064 edesc
= ahash_edesc_alloc(areq
, nbytes_to_hash
);
2066 return PTR_ERR(edesc
);
2068 edesc
->desc
.hdr
= ctx
->desc_hdr_template
;
2070 /* On last one, request SEC to pad; otherwise continue */
2072 edesc
->desc
.hdr
|= DESC_HDR_MODE0_MDEU_PAD
;
2074 edesc
->desc
.hdr
|= DESC_HDR_MODE0_MDEU_CONT
;
2076 /* request SEC to INIT hash. */
2077 if (req_ctx
->first
&& !req_ctx
->swinit
)
2078 edesc
->desc
.hdr
|= DESC_HDR_MODE0_MDEU_INIT
;
2080 /* When the tfm context has a keylen, it's an HMAC.
2081 * A first or last (ie. not middle) descriptor must request HMAC.
2083 if (ctx
->keylen
&& (req_ctx
->first
|| req_ctx
->last
))
2084 edesc
->desc
.hdr
|= DESC_HDR_MODE0_MDEU_HMAC
;
2086 return common_nonsnoop_hash(edesc
, areq
, nbytes_to_hash
, ahash_done
);
2089 static int ahash_update(struct ahash_request
*areq
)
2091 struct talitos_ahash_req_ctx
*req_ctx
= ahash_request_ctx(areq
);
2095 return ahash_process_req(areq
, areq
->nbytes
);
2098 static int ahash_final(struct ahash_request
*areq
)
2100 struct talitos_ahash_req_ctx
*req_ctx
= ahash_request_ctx(areq
);
2104 return ahash_process_req(areq
, 0);
2107 static int ahash_finup(struct ahash_request
*areq
)
2109 struct talitos_ahash_req_ctx
*req_ctx
= ahash_request_ctx(areq
);
2113 return ahash_process_req(areq
, areq
->nbytes
);
2116 static int ahash_digest(struct ahash_request
*areq
)
2118 struct talitos_ahash_req_ctx
*req_ctx
= ahash_request_ctx(areq
);
2119 struct crypto_ahash
*ahash
= crypto_ahash_reqtfm(areq
);
2124 return ahash_process_req(areq
, areq
->nbytes
);
2127 static int ahash_export(struct ahash_request
*areq
, void *out
)
2129 struct talitos_ahash_req_ctx
*req_ctx
= ahash_request_ctx(areq
);
2130 struct talitos_export_state
*export
= out
;
2131 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
2132 struct talitos_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2133 struct device
*dev
= ctx
->dev
;
2136 dma
= dma_map_single(dev
, req_ctx
->hw_context
, req_ctx
->hw_context_size
,
2138 dma_unmap_single(dev
, dma
, req_ctx
->hw_context_size
, DMA_FROM_DEVICE
);
2140 memcpy(export
->hw_context
, req_ctx
->hw_context
,
2141 req_ctx
->hw_context_size
);
2142 memcpy(export
->buf
, req_ctx
->buf
[req_ctx
->buf_idx
], req_ctx
->nbuf
);
2143 export
->swinit
= req_ctx
->swinit
;
2144 export
->first
= req_ctx
->first
;
2145 export
->last
= req_ctx
->last
;
2146 export
->to_hash_later
= req_ctx
->to_hash_later
;
2147 export
->nbuf
= req_ctx
->nbuf
;
2152 static int ahash_import(struct ahash_request
*areq
, const void *in
)
2154 struct talitos_ahash_req_ctx
*req_ctx
= ahash_request_ctx(areq
);
2155 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
2156 struct talitos_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2157 struct device
*dev
= ctx
->dev
;
2158 const struct talitos_export_state
*export
= in
;
2162 memset(req_ctx
, 0, sizeof(*req_ctx
));
2163 size
= (crypto_ahash_digestsize(tfm
) <= SHA256_DIGEST_SIZE
)
2164 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
2165 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512
;
2166 req_ctx
->hw_context_size
= size
;
2167 memcpy(req_ctx
->hw_context
, export
->hw_context
, size
);
2168 memcpy(req_ctx
->buf
[0], export
->buf
, export
->nbuf
);
2169 req_ctx
->swinit
= export
->swinit
;
2170 req_ctx
->first
= export
->first
;
2171 req_ctx
->last
= export
->last
;
2172 req_ctx
->to_hash_later
= export
->to_hash_later
;
2173 req_ctx
->nbuf
= export
->nbuf
;
2175 dma
= dma_map_single(dev
, req_ctx
->hw_context
, req_ctx
->hw_context_size
,
2177 dma_unmap_single(dev
, dma
, req_ctx
->hw_context_size
, DMA_TO_DEVICE
);
2182 static int keyhash(struct crypto_ahash
*tfm
, const u8
*key
, unsigned int keylen
,
2185 struct talitos_ctx
*ctx
= crypto_tfm_ctx(crypto_ahash_tfm(tfm
));
2187 struct scatterlist sg
[1];
2188 struct ahash_request
*req
;
2189 struct crypto_wait wait
;
2192 crypto_init_wait(&wait
);
2194 req
= ahash_request_alloc(tfm
, GFP_KERNEL
);
2198 /* Keep tfm keylen == 0 during hash of the long key */
2200 ahash_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
2201 crypto_req_done
, &wait
);
2203 sg_init_one(&sg
[0], key
, keylen
);
2205 ahash_request_set_crypt(req
, sg
, hash
, keylen
);
2206 ret
= crypto_wait_req(crypto_ahash_digest(req
), &wait
);
2208 ahash_request_free(req
);
2213 static int ahash_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
2214 unsigned int keylen
)
2216 struct talitos_ctx
*ctx
= crypto_tfm_ctx(crypto_ahash_tfm(tfm
));
2217 struct device
*dev
= ctx
->dev
;
2218 unsigned int blocksize
=
2219 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm
));
2220 unsigned int digestsize
= crypto_ahash_digestsize(tfm
);
2221 unsigned int keysize
= keylen
;
2222 u8 hash
[SHA512_DIGEST_SIZE
];
2225 if (keylen
<= blocksize
)
2226 memcpy(ctx
->key
, key
, keysize
);
2228 /* Must get the hash of the long key */
2229 ret
= keyhash(tfm
, key
, keylen
, hash
);
2234 keysize
= digestsize
;
2235 memcpy(ctx
->key
, hash
, digestsize
);
2239 dma_unmap_single(dev
, ctx
->dma_key
, ctx
->keylen
, DMA_TO_DEVICE
);
2241 ctx
->keylen
= keysize
;
2242 ctx
->dma_key
= dma_map_single(dev
, ctx
->key
, keysize
, DMA_TO_DEVICE
);
2248 struct talitos_alg_template
{
2252 struct skcipher_alg skcipher
;
2253 struct ahash_alg hash
;
2254 struct aead_alg aead
;
2256 __be32 desc_hdr_template
;
2259 static struct talitos_alg_template driver_algs
[] = {
2260 /* AEAD algorithms. These use a single-pass ipsec_esp descriptor */
2261 { .type
= CRYPTO_ALG_TYPE_AEAD
,
2264 .cra_name
= "authenc(hmac(sha1),cbc(aes))",
2265 .cra_driver_name
= "authenc-hmac-sha1-"
2267 .cra_blocksize
= AES_BLOCK_SIZE
,
2268 .cra_flags
= CRYPTO_ALG_ASYNC
|
2269 CRYPTO_ALG_ALLOCATES_MEMORY
,
2271 .ivsize
= AES_BLOCK_SIZE
,
2272 .maxauthsize
= SHA1_DIGEST_SIZE
,
2274 .desc_hdr_template
= DESC_HDR_TYPE_IPSEC_ESP
|
2275 DESC_HDR_SEL0_AESU
|
2276 DESC_HDR_MODE0_AESU_CBC
|
2277 DESC_HDR_SEL1_MDEUA
|
2278 DESC_HDR_MODE1_MDEU_INIT
|
2279 DESC_HDR_MODE1_MDEU_PAD
|
2280 DESC_HDR_MODE1_MDEU_SHA1_HMAC
,
2282 { .type
= CRYPTO_ALG_TYPE_AEAD
,
2283 .priority
= TALITOS_CRA_PRIORITY_AEAD_HSNA
,
2286 .cra_name
= "authenc(hmac(sha1),cbc(aes))",
2287 .cra_driver_name
= "authenc-hmac-sha1-"
2288 "cbc-aes-talitos-hsna",
2289 .cra_blocksize
= AES_BLOCK_SIZE
,
2290 .cra_flags
= CRYPTO_ALG_ASYNC
|
2291 CRYPTO_ALG_ALLOCATES_MEMORY
,
2293 .ivsize
= AES_BLOCK_SIZE
,
2294 .maxauthsize
= SHA1_DIGEST_SIZE
,
2296 .desc_hdr_template
= DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU
|
2297 DESC_HDR_SEL0_AESU
|
2298 DESC_HDR_MODE0_AESU_CBC
|
2299 DESC_HDR_SEL1_MDEUA
|
2300 DESC_HDR_MODE1_MDEU_INIT
|
2301 DESC_HDR_MODE1_MDEU_PAD
|
2302 DESC_HDR_MODE1_MDEU_SHA1_HMAC
,
2304 { .type
= CRYPTO_ALG_TYPE_AEAD
,
2307 .cra_name
= "authenc(hmac(sha1),"
2309 .cra_driver_name
= "authenc-hmac-sha1-"
2311 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2312 .cra_flags
= CRYPTO_ALG_ASYNC
|
2313 CRYPTO_ALG_ALLOCATES_MEMORY
,
2315 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2316 .maxauthsize
= SHA1_DIGEST_SIZE
,
2317 .setkey
= aead_des3_setkey
,
2319 .desc_hdr_template
= DESC_HDR_TYPE_IPSEC_ESP
|
2321 DESC_HDR_MODE0_DEU_CBC
|
2322 DESC_HDR_MODE0_DEU_3DES
|
2323 DESC_HDR_SEL1_MDEUA
|
2324 DESC_HDR_MODE1_MDEU_INIT
|
2325 DESC_HDR_MODE1_MDEU_PAD
|
2326 DESC_HDR_MODE1_MDEU_SHA1_HMAC
,
2328 { .type
= CRYPTO_ALG_TYPE_AEAD
,
2329 .priority
= TALITOS_CRA_PRIORITY_AEAD_HSNA
,
2332 .cra_name
= "authenc(hmac(sha1),"
2334 .cra_driver_name
= "authenc-hmac-sha1-"
2335 "cbc-3des-talitos-hsna",
2336 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2337 .cra_flags
= CRYPTO_ALG_ASYNC
|
2338 CRYPTO_ALG_ALLOCATES_MEMORY
,
2340 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2341 .maxauthsize
= SHA1_DIGEST_SIZE
,
2342 .setkey
= aead_des3_setkey
,
2344 .desc_hdr_template
= DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU
|
2346 DESC_HDR_MODE0_DEU_CBC
|
2347 DESC_HDR_MODE0_DEU_3DES
|
2348 DESC_HDR_SEL1_MDEUA
|
2349 DESC_HDR_MODE1_MDEU_INIT
|
2350 DESC_HDR_MODE1_MDEU_PAD
|
2351 DESC_HDR_MODE1_MDEU_SHA1_HMAC
,
2353 { .type
= CRYPTO_ALG_TYPE_AEAD
,
2356 .cra_name
= "authenc(hmac(sha224),cbc(aes))",
2357 .cra_driver_name
= "authenc-hmac-sha224-"
2359 .cra_blocksize
= AES_BLOCK_SIZE
,
2360 .cra_flags
= CRYPTO_ALG_ASYNC
|
2361 CRYPTO_ALG_ALLOCATES_MEMORY
,
2363 .ivsize
= AES_BLOCK_SIZE
,
2364 .maxauthsize
= SHA224_DIGEST_SIZE
,
2366 .desc_hdr_template
= DESC_HDR_TYPE_IPSEC_ESP
|
2367 DESC_HDR_SEL0_AESU
|
2368 DESC_HDR_MODE0_AESU_CBC
|
2369 DESC_HDR_SEL1_MDEUA
|
2370 DESC_HDR_MODE1_MDEU_INIT
|
2371 DESC_HDR_MODE1_MDEU_PAD
|
2372 DESC_HDR_MODE1_MDEU_SHA224_HMAC
,
2374 { .type
= CRYPTO_ALG_TYPE_AEAD
,
2375 .priority
= TALITOS_CRA_PRIORITY_AEAD_HSNA
,
2378 .cra_name
= "authenc(hmac(sha224),cbc(aes))",
2379 .cra_driver_name
= "authenc-hmac-sha224-"
2380 "cbc-aes-talitos-hsna",
2381 .cra_blocksize
= AES_BLOCK_SIZE
,
2382 .cra_flags
= CRYPTO_ALG_ASYNC
|
2383 CRYPTO_ALG_ALLOCATES_MEMORY
,
2385 .ivsize
= AES_BLOCK_SIZE
,
2386 .maxauthsize
= SHA224_DIGEST_SIZE
,
2388 .desc_hdr_template
= DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU
|
2389 DESC_HDR_SEL0_AESU
|
2390 DESC_HDR_MODE0_AESU_CBC
|
2391 DESC_HDR_SEL1_MDEUA
|
2392 DESC_HDR_MODE1_MDEU_INIT
|
2393 DESC_HDR_MODE1_MDEU_PAD
|
2394 DESC_HDR_MODE1_MDEU_SHA224_HMAC
,
2396 { .type
= CRYPTO_ALG_TYPE_AEAD
,
2399 .cra_name
= "authenc(hmac(sha224),"
2401 .cra_driver_name
= "authenc-hmac-sha224-"
2403 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2404 .cra_flags
= CRYPTO_ALG_ASYNC
|
2405 CRYPTO_ALG_ALLOCATES_MEMORY
,
2407 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2408 .maxauthsize
= SHA224_DIGEST_SIZE
,
2409 .setkey
= aead_des3_setkey
,
2411 .desc_hdr_template
= DESC_HDR_TYPE_IPSEC_ESP
|
2413 DESC_HDR_MODE0_DEU_CBC
|
2414 DESC_HDR_MODE0_DEU_3DES
|
2415 DESC_HDR_SEL1_MDEUA
|
2416 DESC_HDR_MODE1_MDEU_INIT
|
2417 DESC_HDR_MODE1_MDEU_PAD
|
2418 DESC_HDR_MODE1_MDEU_SHA224_HMAC
,
2420 { .type
= CRYPTO_ALG_TYPE_AEAD
,
2421 .priority
= TALITOS_CRA_PRIORITY_AEAD_HSNA
,
2424 .cra_name
= "authenc(hmac(sha224),"
2426 .cra_driver_name
= "authenc-hmac-sha224-"
2427 "cbc-3des-talitos-hsna",
2428 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2429 .cra_flags
= CRYPTO_ALG_ASYNC
|
2430 CRYPTO_ALG_ALLOCATES_MEMORY
,
2432 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2433 .maxauthsize
= SHA224_DIGEST_SIZE
,
2434 .setkey
= aead_des3_setkey
,
2436 .desc_hdr_template
= DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU
|
2438 DESC_HDR_MODE0_DEU_CBC
|
2439 DESC_HDR_MODE0_DEU_3DES
|
2440 DESC_HDR_SEL1_MDEUA
|
2441 DESC_HDR_MODE1_MDEU_INIT
|
2442 DESC_HDR_MODE1_MDEU_PAD
|
2443 DESC_HDR_MODE1_MDEU_SHA224_HMAC
,
2445 { .type
= CRYPTO_ALG_TYPE_AEAD
,
2448 .cra_name
= "authenc(hmac(sha256),cbc(aes))",
2449 .cra_driver_name
= "authenc-hmac-sha256-"
2451 .cra_blocksize
= AES_BLOCK_SIZE
,
2452 .cra_flags
= CRYPTO_ALG_ASYNC
|
2453 CRYPTO_ALG_ALLOCATES_MEMORY
,
2455 .ivsize
= AES_BLOCK_SIZE
,
2456 .maxauthsize
= SHA256_DIGEST_SIZE
,
2458 .desc_hdr_template
= DESC_HDR_TYPE_IPSEC_ESP
|
2459 DESC_HDR_SEL0_AESU
|
2460 DESC_HDR_MODE0_AESU_CBC
|
2461 DESC_HDR_SEL1_MDEUA
|
2462 DESC_HDR_MODE1_MDEU_INIT
|
2463 DESC_HDR_MODE1_MDEU_PAD
|
2464 DESC_HDR_MODE1_MDEU_SHA256_HMAC
,
2466 { .type
= CRYPTO_ALG_TYPE_AEAD
,
2467 .priority
= TALITOS_CRA_PRIORITY_AEAD_HSNA
,
2470 .cra_name
= "authenc(hmac(sha256),cbc(aes))",
2471 .cra_driver_name
= "authenc-hmac-sha256-"
2472 "cbc-aes-talitos-hsna",
2473 .cra_blocksize
= AES_BLOCK_SIZE
,
2474 .cra_flags
= CRYPTO_ALG_ASYNC
|
2475 CRYPTO_ALG_ALLOCATES_MEMORY
,
2477 .ivsize
= AES_BLOCK_SIZE
,
2478 .maxauthsize
= SHA256_DIGEST_SIZE
,
2480 .desc_hdr_template
= DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU
|
2481 DESC_HDR_SEL0_AESU
|
2482 DESC_HDR_MODE0_AESU_CBC
|
2483 DESC_HDR_SEL1_MDEUA
|
2484 DESC_HDR_MODE1_MDEU_INIT
|
2485 DESC_HDR_MODE1_MDEU_PAD
|
2486 DESC_HDR_MODE1_MDEU_SHA256_HMAC
,
2488 { .type
= CRYPTO_ALG_TYPE_AEAD
,
2491 .cra_name
= "authenc(hmac(sha256),"
2493 .cra_driver_name
= "authenc-hmac-sha256-"
2495 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2496 .cra_flags
= CRYPTO_ALG_ASYNC
|
2497 CRYPTO_ALG_ALLOCATES_MEMORY
,
2499 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2500 .maxauthsize
= SHA256_DIGEST_SIZE
,
2501 .setkey
= aead_des3_setkey
,
2503 .desc_hdr_template
= DESC_HDR_TYPE_IPSEC_ESP
|
2505 DESC_HDR_MODE0_DEU_CBC
|
2506 DESC_HDR_MODE0_DEU_3DES
|
2507 DESC_HDR_SEL1_MDEUA
|
2508 DESC_HDR_MODE1_MDEU_INIT
|
2509 DESC_HDR_MODE1_MDEU_PAD
|
2510 DESC_HDR_MODE1_MDEU_SHA256_HMAC
,
2512 { .type
= CRYPTO_ALG_TYPE_AEAD
,
2513 .priority
= TALITOS_CRA_PRIORITY_AEAD_HSNA
,
2516 .cra_name
= "authenc(hmac(sha256),"
2518 .cra_driver_name
= "authenc-hmac-sha256-"
2519 "cbc-3des-talitos-hsna",
2520 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2521 .cra_flags
= CRYPTO_ALG_ASYNC
|
2522 CRYPTO_ALG_ALLOCATES_MEMORY
,
2524 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2525 .maxauthsize
= SHA256_DIGEST_SIZE
,
2526 .setkey
= aead_des3_setkey
,
2528 .desc_hdr_template
= DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU
|
2530 DESC_HDR_MODE0_DEU_CBC
|
2531 DESC_HDR_MODE0_DEU_3DES
|
2532 DESC_HDR_SEL1_MDEUA
|
2533 DESC_HDR_MODE1_MDEU_INIT
|
2534 DESC_HDR_MODE1_MDEU_PAD
|
2535 DESC_HDR_MODE1_MDEU_SHA256_HMAC
,
2537 { .type
= CRYPTO_ALG_TYPE_AEAD
,
2540 .cra_name
= "authenc(hmac(sha384),cbc(aes))",
2541 .cra_driver_name
= "authenc-hmac-sha384-"
2543 .cra_blocksize
= AES_BLOCK_SIZE
,
2544 .cra_flags
= CRYPTO_ALG_ASYNC
|
2545 CRYPTO_ALG_ALLOCATES_MEMORY
,
2547 .ivsize
= AES_BLOCK_SIZE
,
2548 .maxauthsize
= SHA384_DIGEST_SIZE
,
2550 .desc_hdr_template
= DESC_HDR_TYPE_IPSEC_ESP
|
2551 DESC_HDR_SEL0_AESU
|
2552 DESC_HDR_MODE0_AESU_CBC
|
2553 DESC_HDR_SEL1_MDEUB
|
2554 DESC_HDR_MODE1_MDEU_INIT
|
2555 DESC_HDR_MODE1_MDEU_PAD
|
2556 DESC_HDR_MODE1_MDEUB_SHA384_HMAC
,
2558 { .type
= CRYPTO_ALG_TYPE_AEAD
,
2561 .cra_name
= "authenc(hmac(sha384),"
2563 .cra_driver_name
= "authenc-hmac-sha384-"
2565 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2566 .cra_flags
= CRYPTO_ALG_ASYNC
|
2567 CRYPTO_ALG_ALLOCATES_MEMORY
,
2569 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2570 .maxauthsize
= SHA384_DIGEST_SIZE
,
2571 .setkey
= aead_des3_setkey
,
2573 .desc_hdr_template
= DESC_HDR_TYPE_IPSEC_ESP
|
2575 DESC_HDR_MODE0_DEU_CBC
|
2576 DESC_HDR_MODE0_DEU_3DES
|
2577 DESC_HDR_SEL1_MDEUB
|
2578 DESC_HDR_MODE1_MDEU_INIT
|
2579 DESC_HDR_MODE1_MDEU_PAD
|
2580 DESC_HDR_MODE1_MDEUB_SHA384_HMAC
,
2582 { .type
= CRYPTO_ALG_TYPE_AEAD
,
2585 .cra_name
= "authenc(hmac(sha512),cbc(aes))",
2586 .cra_driver_name
= "authenc-hmac-sha512-"
2588 .cra_blocksize
= AES_BLOCK_SIZE
,
2589 .cra_flags
= CRYPTO_ALG_ASYNC
|
2590 CRYPTO_ALG_ALLOCATES_MEMORY
,
2592 .ivsize
= AES_BLOCK_SIZE
,
2593 .maxauthsize
= SHA512_DIGEST_SIZE
,
2595 .desc_hdr_template
= DESC_HDR_TYPE_IPSEC_ESP
|
2596 DESC_HDR_SEL0_AESU
|
2597 DESC_HDR_MODE0_AESU_CBC
|
2598 DESC_HDR_SEL1_MDEUB
|
2599 DESC_HDR_MODE1_MDEU_INIT
|
2600 DESC_HDR_MODE1_MDEU_PAD
|
2601 DESC_HDR_MODE1_MDEUB_SHA512_HMAC
,
2603 { .type
= CRYPTO_ALG_TYPE_AEAD
,
2606 .cra_name
= "authenc(hmac(sha512),"
2608 .cra_driver_name
= "authenc-hmac-sha512-"
2610 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2611 .cra_flags
= CRYPTO_ALG_ASYNC
|
2612 CRYPTO_ALG_ALLOCATES_MEMORY
,
2614 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2615 .maxauthsize
= SHA512_DIGEST_SIZE
,
2616 .setkey
= aead_des3_setkey
,
2618 .desc_hdr_template
= DESC_HDR_TYPE_IPSEC_ESP
|
2620 DESC_HDR_MODE0_DEU_CBC
|
2621 DESC_HDR_MODE0_DEU_3DES
|
2622 DESC_HDR_SEL1_MDEUB
|
2623 DESC_HDR_MODE1_MDEU_INIT
|
2624 DESC_HDR_MODE1_MDEU_PAD
|
2625 DESC_HDR_MODE1_MDEUB_SHA512_HMAC
,
2627 { .type
= CRYPTO_ALG_TYPE_AEAD
,
2630 .cra_name
= "authenc(hmac(md5),cbc(aes))",
2631 .cra_driver_name
= "authenc-hmac-md5-"
2633 .cra_blocksize
= AES_BLOCK_SIZE
,
2634 .cra_flags
= CRYPTO_ALG_ASYNC
|
2635 CRYPTO_ALG_ALLOCATES_MEMORY
,
2637 .ivsize
= AES_BLOCK_SIZE
,
2638 .maxauthsize
= MD5_DIGEST_SIZE
,
2640 .desc_hdr_template
= DESC_HDR_TYPE_IPSEC_ESP
|
2641 DESC_HDR_SEL0_AESU
|
2642 DESC_HDR_MODE0_AESU_CBC
|
2643 DESC_HDR_SEL1_MDEUA
|
2644 DESC_HDR_MODE1_MDEU_INIT
|
2645 DESC_HDR_MODE1_MDEU_PAD
|
2646 DESC_HDR_MODE1_MDEU_MD5_HMAC
,
2648 { .type
= CRYPTO_ALG_TYPE_AEAD
,
2649 .priority
= TALITOS_CRA_PRIORITY_AEAD_HSNA
,
2652 .cra_name
= "authenc(hmac(md5),cbc(aes))",
2653 .cra_driver_name
= "authenc-hmac-md5-"
2654 "cbc-aes-talitos-hsna",
2655 .cra_blocksize
= AES_BLOCK_SIZE
,
2656 .cra_flags
= CRYPTO_ALG_ASYNC
|
2657 CRYPTO_ALG_ALLOCATES_MEMORY
,
2659 .ivsize
= AES_BLOCK_SIZE
,
2660 .maxauthsize
= MD5_DIGEST_SIZE
,
2662 .desc_hdr_template
= DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU
|
2663 DESC_HDR_SEL0_AESU
|
2664 DESC_HDR_MODE0_AESU_CBC
|
2665 DESC_HDR_SEL1_MDEUA
|
2666 DESC_HDR_MODE1_MDEU_INIT
|
2667 DESC_HDR_MODE1_MDEU_PAD
|
2668 DESC_HDR_MODE1_MDEU_MD5_HMAC
,
2670 { .type
= CRYPTO_ALG_TYPE_AEAD
,
2673 .cra_name
= "authenc(hmac(md5),cbc(des3_ede))",
2674 .cra_driver_name
= "authenc-hmac-md5-"
2676 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2677 .cra_flags
= CRYPTO_ALG_ASYNC
|
2678 CRYPTO_ALG_ALLOCATES_MEMORY
,
2680 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2681 .maxauthsize
= MD5_DIGEST_SIZE
,
2682 .setkey
= aead_des3_setkey
,
2684 .desc_hdr_template
= DESC_HDR_TYPE_IPSEC_ESP
|
2686 DESC_HDR_MODE0_DEU_CBC
|
2687 DESC_HDR_MODE0_DEU_3DES
|
2688 DESC_HDR_SEL1_MDEUA
|
2689 DESC_HDR_MODE1_MDEU_INIT
|
2690 DESC_HDR_MODE1_MDEU_PAD
|
2691 DESC_HDR_MODE1_MDEU_MD5_HMAC
,
2693 { .type
= CRYPTO_ALG_TYPE_AEAD
,
2694 .priority
= TALITOS_CRA_PRIORITY_AEAD_HSNA
,
2697 .cra_name
= "authenc(hmac(md5),cbc(des3_ede))",
2698 .cra_driver_name
= "authenc-hmac-md5-"
2699 "cbc-3des-talitos-hsna",
2700 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2701 .cra_flags
= CRYPTO_ALG_ASYNC
|
2702 CRYPTO_ALG_ALLOCATES_MEMORY
,
2704 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2705 .maxauthsize
= MD5_DIGEST_SIZE
,
2706 .setkey
= aead_des3_setkey
,
2708 .desc_hdr_template
= DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU
|
2710 DESC_HDR_MODE0_DEU_CBC
|
2711 DESC_HDR_MODE0_DEU_3DES
|
2712 DESC_HDR_SEL1_MDEUA
|
2713 DESC_HDR_MODE1_MDEU_INIT
|
2714 DESC_HDR_MODE1_MDEU_PAD
|
2715 DESC_HDR_MODE1_MDEU_MD5_HMAC
,
2717 /* SKCIPHER algorithms. */
2718 { .type
= CRYPTO_ALG_TYPE_SKCIPHER
,
2720 .base
.cra_name
= "ecb(aes)",
2721 .base
.cra_driver_name
= "ecb-aes-talitos",
2722 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
2723 .base
.cra_flags
= CRYPTO_ALG_ASYNC
|
2724 CRYPTO_ALG_ALLOCATES_MEMORY
,
2725 .min_keysize
= AES_MIN_KEY_SIZE
,
2726 .max_keysize
= AES_MAX_KEY_SIZE
,
2727 .setkey
= skcipher_aes_setkey
,
2729 .desc_hdr_template
= DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU
|
2732 { .type
= CRYPTO_ALG_TYPE_SKCIPHER
,
2734 .base
.cra_name
= "cbc(aes)",
2735 .base
.cra_driver_name
= "cbc-aes-talitos",
2736 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
2737 .base
.cra_flags
= CRYPTO_ALG_ASYNC
|
2738 CRYPTO_ALG_ALLOCATES_MEMORY
,
2739 .min_keysize
= AES_MIN_KEY_SIZE
,
2740 .max_keysize
= AES_MAX_KEY_SIZE
,
2741 .ivsize
= AES_BLOCK_SIZE
,
2742 .setkey
= skcipher_aes_setkey
,
2744 .desc_hdr_template
= DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU
|
2745 DESC_HDR_SEL0_AESU
|
2746 DESC_HDR_MODE0_AESU_CBC
,
2748 { .type
= CRYPTO_ALG_TYPE_SKCIPHER
,
2750 .base
.cra_name
= "ctr(aes)",
2751 .base
.cra_driver_name
= "ctr-aes-talitos",
2752 .base
.cra_blocksize
= 1,
2753 .base
.cra_flags
= CRYPTO_ALG_ASYNC
|
2754 CRYPTO_ALG_ALLOCATES_MEMORY
,
2755 .min_keysize
= AES_MIN_KEY_SIZE
,
2756 .max_keysize
= AES_MAX_KEY_SIZE
,
2757 .ivsize
= AES_BLOCK_SIZE
,
2758 .setkey
= skcipher_aes_setkey
,
2760 .desc_hdr_template
= DESC_HDR_TYPE_AESU_CTR_NONSNOOP
|
2761 DESC_HDR_SEL0_AESU
|
2762 DESC_HDR_MODE0_AESU_CTR
,
2764 { .type
= CRYPTO_ALG_TYPE_SKCIPHER
,
2766 .base
.cra_name
= "ecb(des)",
2767 .base
.cra_driver_name
= "ecb-des-talitos",
2768 .base
.cra_blocksize
= DES_BLOCK_SIZE
,
2769 .base
.cra_flags
= CRYPTO_ALG_ASYNC
|
2770 CRYPTO_ALG_ALLOCATES_MEMORY
,
2771 .min_keysize
= DES_KEY_SIZE
,
2772 .max_keysize
= DES_KEY_SIZE
,
2773 .setkey
= skcipher_des_setkey
,
2775 .desc_hdr_template
= DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU
|
2778 { .type
= CRYPTO_ALG_TYPE_SKCIPHER
,
2780 .base
.cra_name
= "cbc(des)",
2781 .base
.cra_driver_name
= "cbc-des-talitos",
2782 .base
.cra_blocksize
= DES_BLOCK_SIZE
,
2783 .base
.cra_flags
= CRYPTO_ALG_ASYNC
|
2784 CRYPTO_ALG_ALLOCATES_MEMORY
,
2785 .min_keysize
= DES_KEY_SIZE
,
2786 .max_keysize
= DES_KEY_SIZE
,
2787 .ivsize
= DES_BLOCK_SIZE
,
2788 .setkey
= skcipher_des_setkey
,
2790 .desc_hdr_template
= DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU
|
2792 DESC_HDR_MODE0_DEU_CBC
,
2794 { .type
= CRYPTO_ALG_TYPE_SKCIPHER
,
2796 .base
.cra_name
= "ecb(des3_ede)",
2797 .base
.cra_driver_name
= "ecb-3des-talitos",
2798 .base
.cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2799 .base
.cra_flags
= CRYPTO_ALG_ASYNC
|
2800 CRYPTO_ALG_ALLOCATES_MEMORY
,
2801 .min_keysize
= DES3_EDE_KEY_SIZE
,
2802 .max_keysize
= DES3_EDE_KEY_SIZE
,
2803 .setkey
= skcipher_des3_setkey
,
2805 .desc_hdr_template
= DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU
|
2807 DESC_HDR_MODE0_DEU_3DES
,
2809 { .type
= CRYPTO_ALG_TYPE_SKCIPHER
,
2811 .base
.cra_name
= "cbc(des3_ede)",
2812 .base
.cra_driver_name
= "cbc-3des-talitos",
2813 .base
.cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2814 .base
.cra_flags
= CRYPTO_ALG_ASYNC
|
2815 CRYPTO_ALG_ALLOCATES_MEMORY
,
2816 .min_keysize
= DES3_EDE_KEY_SIZE
,
2817 .max_keysize
= DES3_EDE_KEY_SIZE
,
2818 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2819 .setkey
= skcipher_des3_setkey
,
2821 .desc_hdr_template
= DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU
|
2823 DESC_HDR_MODE0_DEU_CBC
|
2824 DESC_HDR_MODE0_DEU_3DES
,
2826 /* AHASH algorithms. */
2827 { .type
= CRYPTO_ALG_TYPE_AHASH
,
2829 .halg
.digestsize
= MD5_DIGEST_SIZE
,
2830 .halg
.statesize
= sizeof(struct talitos_export_state
),
2833 .cra_driver_name
= "md5-talitos",
2834 .cra_blocksize
= MD5_HMAC_BLOCK_SIZE
,
2835 .cra_flags
= CRYPTO_ALG_ASYNC
|
2836 CRYPTO_ALG_ALLOCATES_MEMORY
,
2839 .desc_hdr_template
= DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU
|
2840 DESC_HDR_SEL0_MDEUA
|
2841 DESC_HDR_MODE0_MDEU_MD5
,
2843 { .type
= CRYPTO_ALG_TYPE_AHASH
,
2845 .halg
.digestsize
= SHA1_DIGEST_SIZE
,
2846 .halg
.statesize
= sizeof(struct talitos_export_state
),
2849 .cra_driver_name
= "sha1-talitos",
2850 .cra_blocksize
= SHA1_BLOCK_SIZE
,
2851 .cra_flags
= CRYPTO_ALG_ASYNC
|
2852 CRYPTO_ALG_ALLOCATES_MEMORY
,
2855 .desc_hdr_template
= DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU
|
2856 DESC_HDR_SEL0_MDEUA
|
2857 DESC_HDR_MODE0_MDEU_SHA1
,
2859 { .type
= CRYPTO_ALG_TYPE_AHASH
,
2861 .halg
.digestsize
= SHA224_DIGEST_SIZE
,
2862 .halg
.statesize
= sizeof(struct talitos_export_state
),
2864 .cra_name
= "sha224",
2865 .cra_driver_name
= "sha224-talitos",
2866 .cra_blocksize
= SHA224_BLOCK_SIZE
,
2867 .cra_flags
= CRYPTO_ALG_ASYNC
|
2868 CRYPTO_ALG_ALLOCATES_MEMORY
,
2871 .desc_hdr_template
= DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU
|
2872 DESC_HDR_SEL0_MDEUA
|
2873 DESC_HDR_MODE0_MDEU_SHA224
,
2875 { .type
= CRYPTO_ALG_TYPE_AHASH
,
2877 .halg
.digestsize
= SHA256_DIGEST_SIZE
,
2878 .halg
.statesize
= sizeof(struct talitos_export_state
),
2880 .cra_name
= "sha256",
2881 .cra_driver_name
= "sha256-talitos",
2882 .cra_blocksize
= SHA256_BLOCK_SIZE
,
2883 .cra_flags
= CRYPTO_ALG_ASYNC
|
2884 CRYPTO_ALG_ALLOCATES_MEMORY
,
2887 .desc_hdr_template
= DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU
|
2888 DESC_HDR_SEL0_MDEUA
|
2889 DESC_HDR_MODE0_MDEU_SHA256
,
2891 { .type
= CRYPTO_ALG_TYPE_AHASH
,
2893 .halg
.digestsize
= SHA384_DIGEST_SIZE
,
2894 .halg
.statesize
= sizeof(struct talitos_export_state
),
2896 .cra_name
= "sha384",
2897 .cra_driver_name
= "sha384-talitos",
2898 .cra_blocksize
= SHA384_BLOCK_SIZE
,
2899 .cra_flags
= CRYPTO_ALG_ASYNC
|
2900 CRYPTO_ALG_ALLOCATES_MEMORY
,
2903 .desc_hdr_template
= DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU
|
2904 DESC_HDR_SEL0_MDEUB
|
2905 DESC_HDR_MODE0_MDEUB_SHA384
,
2907 { .type
= CRYPTO_ALG_TYPE_AHASH
,
2909 .halg
.digestsize
= SHA512_DIGEST_SIZE
,
2910 .halg
.statesize
= sizeof(struct talitos_export_state
),
2912 .cra_name
= "sha512",
2913 .cra_driver_name
= "sha512-talitos",
2914 .cra_blocksize
= SHA512_BLOCK_SIZE
,
2915 .cra_flags
= CRYPTO_ALG_ASYNC
|
2916 CRYPTO_ALG_ALLOCATES_MEMORY
,
2919 .desc_hdr_template
= DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU
|
2920 DESC_HDR_SEL0_MDEUB
|
2921 DESC_HDR_MODE0_MDEUB_SHA512
,
2923 { .type
= CRYPTO_ALG_TYPE_AHASH
,
2925 .halg
.digestsize
= MD5_DIGEST_SIZE
,
2926 .halg
.statesize
= sizeof(struct talitos_export_state
),
2928 .cra_name
= "hmac(md5)",
2929 .cra_driver_name
= "hmac-md5-talitos",
2930 .cra_blocksize
= MD5_HMAC_BLOCK_SIZE
,
2931 .cra_flags
= CRYPTO_ALG_ASYNC
|
2932 CRYPTO_ALG_ALLOCATES_MEMORY
,
2935 .desc_hdr_template
= DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU
|
2936 DESC_HDR_SEL0_MDEUA
|
2937 DESC_HDR_MODE0_MDEU_MD5
,
2939 { .type
= CRYPTO_ALG_TYPE_AHASH
,
2941 .halg
.digestsize
= SHA1_DIGEST_SIZE
,
2942 .halg
.statesize
= sizeof(struct talitos_export_state
),
2944 .cra_name
= "hmac(sha1)",
2945 .cra_driver_name
= "hmac-sha1-talitos",
2946 .cra_blocksize
= SHA1_BLOCK_SIZE
,
2947 .cra_flags
= CRYPTO_ALG_ASYNC
|
2948 CRYPTO_ALG_ALLOCATES_MEMORY
,
2951 .desc_hdr_template
= DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU
|
2952 DESC_HDR_SEL0_MDEUA
|
2953 DESC_HDR_MODE0_MDEU_SHA1
,
2955 { .type
= CRYPTO_ALG_TYPE_AHASH
,
2957 .halg
.digestsize
= SHA224_DIGEST_SIZE
,
2958 .halg
.statesize
= sizeof(struct talitos_export_state
),
2960 .cra_name
= "hmac(sha224)",
2961 .cra_driver_name
= "hmac-sha224-talitos",
2962 .cra_blocksize
= SHA224_BLOCK_SIZE
,
2963 .cra_flags
= CRYPTO_ALG_ASYNC
|
2964 CRYPTO_ALG_ALLOCATES_MEMORY
,
2967 .desc_hdr_template
= DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU
|
2968 DESC_HDR_SEL0_MDEUA
|
2969 DESC_HDR_MODE0_MDEU_SHA224
,
2971 { .type
= CRYPTO_ALG_TYPE_AHASH
,
2973 .halg
.digestsize
= SHA256_DIGEST_SIZE
,
2974 .halg
.statesize
= sizeof(struct talitos_export_state
),
2976 .cra_name
= "hmac(sha256)",
2977 .cra_driver_name
= "hmac-sha256-talitos",
2978 .cra_blocksize
= SHA256_BLOCK_SIZE
,
2979 .cra_flags
= CRYPTO_ALG_ASYNC
|
2980 CRYPTO_ALG_ALLOCATES_MEMORY
,
2983 .desc_hdr_template
= DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU
|
2984 DESC_HDR_SEL0_MDEUA
|
2985 DESC_HDR_MODE0_MDEU_SHA256
,
2987 { .type
= CRYPTO_ALG_TYPE_AHASH
,
2989 .halg
.digestsize
= SHA384_DIGEST_SIZE
,
2990 .halg
.statesize
= sizeof(struct talitos_export_state
),
2992 .cra_name
= "hmac(sha384)",
2993 .cra_driver_name
= "hmac-sha384-talitos",
2994 .cra_blocksize
= SHA384_BLOCK_SIZE
,
2995 .cra_flags
= CRYPTO_ALG_ASYNC
|
2996 CRYPTO_ALG_ALLOCATES_MEMORY
,
2999 .desc_hdr_template
= DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU
|
3000 DESC_HDR_SEL0_MDEUB
|
3001 DESC_HDR_MODE0_MDEUB_SHA384
,
3003 { .type
= CRYPTO_ALG_TYPE_AHASH
,
3005 .halg
.digestsize
= SHA512_DIGEST_SIZE
,
3006 .halg
.statesize
= sizeof(struct talitos_export_state
),
3008 .cra_name
= "hmac(sha512)",
3009 .cra_driver_name
= "hmac-sha512-talitos",
3010 .cra_blocksize
= SHA512_BLOCK_SIZE
,
3011 .cra_flags
= CRYPTO_ALG_ASYNC
|
3012 CRYPTO_ALG_ALLOCATES_MEMORY
,
3015 .desc_hdr_template
= DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU
|
3016 DESC_HDR_SEL0_MDEUB
|
3017 DESC_HDR_MODE0_MDEUB_SHA512
,
3021 struct talitos_crypto_alg
{
3022 struct list_head entry
;
3024 struct talitos_alg_template algt
;
3027 static int talitos_init_common(struct talitos_ctx
*ctx
,
3028 struct talitos_crypto_alg
*talitos_alg
)
3030 struct talitos_private
*priv
;
3032 /* update context with ptr to dev */
3033 ctx
->dev
= talitos_alg
->dev
;
3035 /* assign SEC channel to tfm in round-robin fashion */
3036 priv
= dev_get_drvdata(ctx
->dev
);
3037 ctx
->ch
= atomic_inc_return(&priv
->last_chan
) &
3038 (priv
->num_channels
- 1);
3040 /* copy descriptor header template value */
3041 ctx
->desc_hdr_template
= talitos_alg
->algt
.desc_hdr_template
;
3043 /* select done notification */
3044 ctx
->desc_hdr_template
|= DESC_HDR_DONE_NOTIFY
;
3049 static int talitos_cra_init_aead(struct crypto_aead
*tfm
)
3051 struct aead_alg
*alg
= crypto_aead_alg(tfm
);
3052 struct talitos_crypto_alg
*talitos_alg
;
3053 struct talitos_ctx
*ctx
= crypto_aead_ctx(tfm
);
3055 talitos_alg
= container_of(alg
, struct talitos_crypto_alg
,
3058 return talitos_init_common(ctx
, talitos_alg
);
3061 static int talitos_cra_init_skcipher(struct crypto_skcipher
*tfm
)
3063 struct skcipher_alg
*alg
= crypto_skcipher_alg(tfm
);
3064 struct talitos_crypto_alg
*talitos_alg
;
3065 struct talitos_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
3067 talitos_alg
= container_of(alg
, struct talitos_crypto_alg
,
3070 return talitos_init_common(ctx
, talitos_alg
);
3073 static int talitos_cra_init_ahash(struct crypto_tfm
*tfm
)
3075 struct crypto_alg
*alg
= tfm
->__crt_alg
;
3076 struct talitos_crypto_alg
*talitos_alg
;
3077 struct talitos_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3079 talitos_alg
= container_of(__crypto_ahash_alg(alg
),
3080 struct talitos_crypto_alg
,
3084 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm
),
3085 sizeof(struct talitos_ahash_req_ctx
));
3087 return talitos_init_common(ctx
, talitos_alg
);
3090 static void talitos_cra_exit(struct crypto_tfm
*tfm
)
3092 struct talitos_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3093 struct device
*dev
= ctx
->dev
;
3096 dma_unmap_single(dev
, ctx
->dma_key
, ctx
->keylen
, DMA_TO_DEVICE
);
3100 * given the alg's descriptor header template, determine whether descriptor
3101 * type and primary/secondary execution units required match the hw
3102 * capabilities description provided in the device tree node.
3104 static int hw_supports(struct device
*dev
, __be32 desc_hdr_template
)
3106 struct talitos_private
*priv
= dev_get_drvdata(dev
);
3109 ret
= (1 << DESC_TYPE(desc_hdr_template
) & priv
->desc_types
) &&
3110 (1 << PRIMARY_EU(desc_hdr_template
) & priv
->exec_units
);
3112 if (SECONDARY_EU(desc_hdr_template
))
3113 ret
= ret
&& (1 << SECONDARY_EU(desc_hdr_template
)
3114 & priv
->exec_units
);
3119 static int talitos_remove(struct platform_device
*ofdev
)
3121 struct device
*dev
= &ofdev
->dev
;
3122 struct talitos_private
*priv
= dev_get_drvdata(dev
);
3123 struct talitos_crypto_alg
*t_alg
, *n
;
3126 list_for_each_entry_safe(t_alg
, n
, &priv
->alg_list
, entry
) {
3127 switch (t_alg
->algt
.type
) {
3128 case CRYPTO_ALG_TYPE_SKCIPHER
:
3129 crypto_unregister_skcipher(&t_alg
->algt
.alg
.skcipher
);
3131 case CRYPTO_ALG_TYPE_AEAD
:
3132 crypto_unregister_aead(&t_alg
->algt
.alg
.aead
);
3134 case CRYPTO_ALG_TYPE_AHASH
:
3135 crypto_unregister_ahash(&t_alg
->algt
.alg
.hash
);
3138 list_del(&t_alg
->entry
);
3141 if (hw_supports(dev
, DESC_HDR_SEL0_RNG
))
3142 talitos_unregister_rng(dev
);
3144 for (i
= 0; i
< 2; i
++)
3146 free_irq(priv
->irq
[i
], dev
);
3147 irq_dispose_mapping(priv
->irq
[i
]);
3150 tasklet_kill(&priv
->done_task
[0]);
3152 tasklet_kill(&priv
->done_task
[1]);
3157 static struct talitos_crypto_alg
*talitos_alg_alloc(struct device
*dev
,
3158 struct talitos_alg_template
3161 struct talitos_private
*priv
= dev_get_drvdata(dev
);
3162 struct talitos_crypto_alg
*t_alg
;
3163 struct crypto_alg
*alg
;
3165 t_alg
= devm_kzalloc(dev
, sizeof(struct talitos_crypto_alg
),
3168 return ERR_PTR(-ENOMEM
);
3170 t_alg
->algt
= *template;
3172 switch (t_alg
->algt
.type
) {
3173 case CRYPTO_ALG_TYPE_SKCIPHER
:
3174 alg
= &t_alg
->algt
.alg
.skcipher
.base
;
3175 alg
->cra_exit
= talitos_cra_exit
;
3176 t_alg
->algt
.alg
.skcipher
.init
= talitos_cra_init_skcipher
;
3177 t_alg
->algt
.alg
.skcipher
.setkey
=
3178 t_alg
->algt
.alg
.skcipher
.setkey
?: skcipher_setkey
;
3179 t_alg
->algt
.alg
.skcipher
.encrypt
= skcipher_encrypt
;
3180 t_alg
->algt
.alg
.skcipher
.decrypt
= skcipher_decrypt
;
3182 case CRYPTO_ALG_TYPE_AEAD
:
3183 alg
= &t_alg
->algt
.alg
.aead
.base
;
3184 alg
->cra_exit
= talitos_cra_exit
;
3185 t_alg
->algt
.alg
.aead
.init
= talitos_cra_init_aead
;
3186 t_alg
->algt
.alg
.aead
.setkey
= t_alg
->algt
.alg
.aead
.setkey
?:
3188 t_alg
->algt
.alg
.aead
.encrypt
= aead_encrypt
;
3189 t_alg
->algt
.alg
.aead
.decrypt
= aead_decrypt
;
3190 if (!(priv
->features
& TALITOS_FTR_SHA224_HWINIT
) &&
3191 !strncmp(alg
->cra_name
, "authenc(hmac(sha224)", 20)) {
3192 devm_kfree(dev
, t_alg
);
3193 return ERR_PTR(-ENOTSUPP
);
3196 case CRYPTO_ALG_TYPE_AHASH
:
3197 alg
= &t_alg
->algt
.alg
.hash
.halg
.base
;
3198 alg
->cra_init
= talitos_cra_init_ahash
;
3199 alg
->cra_exit
= talitos_cra_exit
;
3200 t_alg
->algt
.alg
.hash
.init
= ahash_init
;
3201 t_alg
->algt
.alg
.hash
.update
= ahash_update
;
3202 t_alg
->algt
.alg
.hash
.final
= ahash_final
;
3203 t_alg
->algt
.alg
.hash
.finup
= ahash_finup
;
3204 t_alg
->algt
.alg
.hash
.digest
= ahash_digest
;
3205 if (!strncmp(alg
->cra_name
, "hmac", 4))
3206 t_alg
->algt
.alg
.hash
.setkey
= ahash_setkey
;
3207 t_alg
->algt
.alg
.hash
.import
= ahash_import
;
3208 t_alg
->algt
.alg
.hash
.export
= ahash_export
;
3210 if (!(priv
->features
& TALITOS_FTR_HMAC_OK
) &&
3211 !strncmp(alg
->cra_name
, "hmac", 4)) {
3212 devm_kfree(dev
, t_alg
);
3213 return ERR_PTR(-ENOTSUPP
);
3215 if (!(priv
->features
& TALITOS_FTR_SHA224_HWINIT
) &&
3216 (!strcmp(alg
->cra_name
, "sha224") ||
3217 !strcmp(alg
->cra_name
, "hmac(sha224)"))) {
3218 t_alg
->algt
.alg
.hash
.init
= ahash_init_sha224_swinit
;
3219 t_alg
->algt
.desc_hdr_template
=
3220 DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU
|
3221 DESC_HDR_SEL0_MDEUA
|
3222 DESC_HDR_MODE0_MDEU_SHA256
;
3226 dev_err(dev
, "unknown algorithm type %d\n", t_alg
->algt
.type
);
3227 devm_kfree(dev
, t_alg
);
3228 return ERR_PTR(-EINVAL
);
3231 alg
->cra_module
= THIS_MODULE
;
3232 if (t_alg
->algt
.priority
)
3233 alg
->cra_priority
= t_alg
->algt
.priority
;
3235 alg
->cra_priority
= TALITOS_CRA_PRIORITY
;
3236 if (has_ftr_sec1(priv
))
3237 alg
->cra_alignmask
= 3;
3239 alg
->cra_alignmask
= 0;
3240 alg
->cra_ctxsize
= sizeof(struct talitos_ctx
);
3241 alg
->cra_flags
|= CRYPTO_ALG_KERN_DRIVER_ONLY
;
3248 static int talitos_probe_irq(struct platform_device
*ofdev
)
3250 struct device
*dev
= &ofdev
->dev
;
3251 struct device_node
*np
= ofdev
->dev
.of_node
;
3252 struct talitos_private
*priv
= dev_get_drvdata(dev
);
3254 bool is_sec1
= has_ftr_sec1(priv
);
3256 priv
->irq
[0] = irq_of_parse_and_map(np
, 0);
3257 if (!priv
->irq
[0]) {
3258 dev_err(dev
, "failed to map irq\n");
3262 err
= request_irq(priv
->irq
[0], talitos1_interrupt_4ch
, 0,
3263 dev_driver_string(dev
), dev
);
3267 priv
->irq
[1] = irq_of_parse_and_map(np
, 1);
3269 /* get the primary irq line */
3270 if (!priv
->irq
[1]) {
3271 err
= request_irq(priv
->irq
[0], talitos2_interrupt_4ch
, 0,
3272 dev_driver_string(dev
), dev
);
3276 err
= request_irq(priv
->irq
[0], talitos2_interrupt_ch0_2
, 0,
3277 dev_driver_string(dev
), dev
);
3281 /* get the secondary irq line */
3282 err
= request_irq(priv
->irq
[1], talitos2_interrupt_ch1_3
, 0,
3283 dev_driver_string(dev
), dev
);
3285 dev_err(dev
, "failed to request secondary irq\n");
3286 irq_dispose_mapping(priv
->irq
[1]);
3294 dev_err(dev
, "failed to request primary irq\n");
3295 irq_dispose_mapping(priv
->irq
[0]);
3302 static int talitos_probe(struct platform_device
*ofdev
)
3304 struct device
*dev
= &ofdev
->dev
;
3305 struct device_node
*np
= ofdev
->dev
.of_node
;
3306 struct talitos_private
*priv
;
3309 struct resource
*res
;
3311 priv
= devm_kzalloc(dev
, sizeof(struct talitos_private
), GFP_KERNEL
);
3315 INIT_LIST_HEAD(&priv
->alg_list
);
3317 dev_set_drvdata(dev
, priv
);
3319 priv
->ofdev
= ofdev
;
3321 spin_lock_init(&priv
->reg_lock
);
3323 res
= platform_get_resource(ofdev
, IORESOURCE_MEM
, 0);
3326 priv
->reg
= devm_ioremap(dev
, res
->start
, resource_size(res
));
3328 dev_err(dev
, "failed to of_iomap\n");
3333 /* get SEC version capabilities from device tree */
3334 of_property_read_u32(np
, "fsl,num-channels", &priv
->num_channels
);
3335 of_property_read_u32(np
, "fsl,channel-fifo-len", &priv
->chfifo_len
);
3336 of_property_read_u32(np
, "fsl,exec-units-mask", &priv
->exec_units
);
3337 of_property_read_u32(np
, "fsl,descriptor-types-mask",
3340 if (!is_power_of_2(priv
->num_channels
) || !priv
->chfifo_len
||
3341 !priv
->exec_units
|| !priv
->desc_types
) {
3342 dev_err(dev
, "invalid property data in device tree node\n");
3347 if (of_device_is_compatible(np
, "fsl,sec3.0"))
3348 priv
->features
|= TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT
;
3350 if (of_device_is_compatible(np
, "fsl,sec2.1"))
3351 priv
->features
|= TALITOS_FTR_HW_AUTH_CHECK
|
3352 TALITOS_FTR_SHA224_HWINIT
|
3353 TALITOS_FTR_HMAC_OK
;
3355 if (of_device_is_compatible(np
, "fsl,sec1.0"))
3356 priv
->features
|= TALITOS_FTR_SEC1
;
3358 if (of_device_is_compatible(np
, "fsl,sec1.2")) {
3359 priv
->reg_deu
= priv
->reg
+ TALITOS12_DEU
;
3360 priv
->reg_aesu
= priv
->reg
+ TALITOS12_AESU
;
3361 priv
->reg_mdeu
= priv
->reg
+ TALITOS12_MDEU
;
3362 stride
= TALITOS1_CH_STRIDE
;
3363 } else if (of_device_is_compatible(np
, "fsl,sec1.0")) {
3364 priv
->reg_deu
= priv
->reg
+ TALITOS10_DEU
;
3365 priv
->reg_aesu
= priv
->reg
+ TALITOS10_AESU
;
3366 priv
->reg_mdeu
= priv
->reg
+ TALITOS10_MDEU
;
3367 priv
->reg_afeu
= priv
->reg
+ TALITOS10_AFEU
;
3368 priv
->reg_rngu
= priv
->reg
+ TALITOS10_RNGU
;
3369 priv
->reg_pkeu
= priv
->reg
+ TALITOS10_PKEU
;
3370 stride
= TALITOS1_CH_STRIDE
;
3372 priv
->reg_deu
= priv
->reg
+ TALITOS2_DEU
;
3373 priv
->reg_aesu
= priv
->reg
+ TALITOS2_AESU
;
3374 priv
->reg_mdeu
= priv
->reg
+ TALITOS2_MDEU
;
3375 priv
->reg_afeu
= priv
->reg
+ TALITOS2_AFEU
;
3376 priv
->reg_rngu
= priv
->reg
+ TALITOS2_RNGU
;
3377 priv
->reg_pkeu
= priv
->reg
+ TALITOS2_PKEU
;
3378 priv
->reg_keu
= priv
->reg
+ TALITOS2_KEU
;
3379 priv
->reg_crcu
= priv
->reg
+ TALITOS2_CRCU
;
3380 stride
= TALITOS2_CH_STRIDE
;
3383 err
= talitos_probe_irq(ofdev
);
3387 if (has_ftr_sec1(priv
)) {
3388 if (priv
->num_channels
== 1)
3389 tasklet_init(&priv
->done_task
[0], talitos1_done_ch0
,
3390 (unsigned long)dev
);
3392 tasklet_init(&priv
->done_task
[0], talitos1_done_4ch
,
3393 (unsigned long)dev
);
3396 tasklet_init(&priv
->done_task
[0], talitos2_done_ch0_2
,
3397 (unsigned long)dev
);
3398 tasklet_init(&priv
->done_task
[1], talitos2_done_ch1_3
,
3399 (unsigned long)dev
);
3400 } else if (priv
->num_channels
== 1) {
3401 tasklet_init(&priv
->done_task
[0], talitos2_done_ch0
,
3402 (unsigned long)dev
);
3404 tasklet_init(&priv
->done_task
[0], talitos2_done_4ch
,
3405 (unsigned long)dev
);
3409 priv
->chan
= devm_kcalloc(dev
,
3411 sizeof(struct talitos_channel
),
3414 dev_err(dev
, "failed to allocate channel management space\n");
3419 priv
->fifo_len
= roundup_pow_of_two(priv
->chfifo_len
);
3421 for (i
= 0; i
< priv
->num_channels
; i
++) {
3422 priv
->chan
[i
].reg
= priv
->reg
+ stride
* (i
+ 1);
3423 if (!priv
->irq
[1] || !(i
& 1))
3424 priv
->chan
[i
].reg
+= TALITOS_CH_BASE_OFFSET
;
3426 spin_lock_init(&priv
->chan
[i
].head_lock
);
3427 spin_lock_init(&priv
->chan
[i
].tail_lock
);
3429 priv
->chan
[i
].fifo
= devm_kcalloc(dev
,
3431 sizeof(struct talitos_request
),
3433 if (!priv
->chan
[i
].fifo
) {
3434 dev_err(dev
, "failed to allocate request fifo %d\n", i
);
3439 atomic_set(&priv
->chan
[i
].submit_count
,
3440 -(priv
->chfifo_len
- 1));
3443 dma_set_mask(dev
, DMA_BIT_MASK(36));
3445 /* reset and initialize the h/w */
3446 err
= init_device(dev
);
3448 dev_err(dev
, "failed to initialize device\n");
3452 /* register the RNG, if available */
3453 if (hw_supports(dev
, DESC_HDR_SEL0_RNG
)) {
3454 err
= talitos_register_rng(dev
);
3456 dev_err(dev
, "failed to register hwrng: %d\n", err
);
3459 dev_info(dev
, "hwrng\n");
3462 /* register crypto algorithms the device supports */
3463 for (i
= 0; i
< ARRAY_SIZE(driver_algs
); i
++) {
3464 if (hw_supports(dev
, driver_algs
[i
].desc_hdr_template
)) {
3465 struct talitos_crypto_alg
*t_alg
;
3466 struct crypto_alg
*alg
= NULL
;
3468 t_alg
= talitos_alg_alloc(dev
, &driver_algs
[i
]);
3469 if (IS_ERR(t_alg
)) {
3470 err
= PTR_ERR(t_alg
);
3471 if (err
== -ENOTSUPP
)
3476 switch (t_alg
->algt
.type
) {
3477 case CRYPTO_ALG_TYPE_SKCIPHER
:
3478 err
= crypto_register_skcipher(
3479 &t_alg
->algt
.alg
.skcipher
);
3480 alg
= &t_alg
->algt
.alg
.skcipher
.base
;
3483 case CRYPTO_ALG_TYPE_AEAD
:
3484 err
= crypto_register_aead(
3485 &t_alg
->algt
.alg
.aead
);
3486 alg
= &t_alg
->algt
.alg
.aead
.base
;
3489 case CRYPTO_ALG_TYPE_AHASH
:
3490 err
= crypto_register_ahash(
3491 &t_alg
->algt
.alg
.hash
);
3492 alg
= &t_alg
->algt
.alg
.hash
.halg
.base
;
3496 dev_err(dev
, "%s alg registration failed\n",
3497 alg
->cra_driver_name
);
3498 devm_kfree(dev
, t_alg
);
3500 list_add_tail(&t_alg
->entry
, &priv
->alg_list
);
3503 if (!list_empty(&priv
->alg_list
))
3504 dev_info(dev
, "%s algorithms registered in /proc/crypto\n",
3505 (char *)of_get_property(np
, "compatible", NULL
));
3510 talitos_remove(ofdev
);
3515 static const struct of_device_id talitos_match
[] = {
3516 #ifdef CONFIG_CRYPTO_DEV_TALITOS1
3518 .compatible
= "fsl,sec1.0",
3521 #ifdef CONFIG_CRYPTO_DEV_TALITOS2
3523 .compatible
= "fsl,sec2.0",
3528 MODULE_DEVICE_TABLE(of
, talitos_match
);
3530 static struct platform_driver talitos_driver
= {
3533 .of_match_table
= talitos_match
,
3535 .probe
= talitos_probe
,
3536 .remove
= talitos_remove
,
3539 module_platform_driver(talitos_driver
);
3541 MODULE_LICENSE("GPL");
3542 MODULE_AUTHOR("Kim Phillips <kim.phillips@freescale.com>");
3543 MODULE_DESCRIPTION("Freescale integrated security engine (SEC) driver");