2 * Freescale FSL CAAM support for crypto API over QI backend.
5 * Copyright 2013-2016 Freescale Semiconductor, Inc.
6 * Copyright 2016-2017 NXP
13 #include "desc_constr.h"
15 #include "sg_sw_sec4.h"
20 #include "caamalg_desc.h"
25 #define CAAM_CRA_PRIORITY 2000
26 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
27 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
28 SHA512_DIGEST_SIZE * 2)
30 #define DESC_MAX_USED_BYTES (DESC_QI_AEAD_GIVENC_LEN + \
32 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
34 struct caam_alg_entry
{
41 struct caam_aead_alg
{
43 struct caam_alg_entry caam
;
52 u32 sh_desc_enc
[DESC_MAX_USED_LEN
];
53 u32 sh_desc_dec
[DESC_MAX_USED_LEN
];
54 u32 sh_desc_givenc
[DESC_MAX_USED_LEN
];
55 u8 key
[CAAM_MAX_KEY_SIZE
];
59 unsigned int authsize
;
61 spinlock_t lock
; /* Protects multiple init of driver context */
62 struct caam_drv_ctx
*drv_ctx
[NUM_OP
];
65 static int aead_set_sh_desc(struct crypto_aead
*aead
)
67 struct caam_aead_alg
*alg
= container_of(crypto_aead_alg(aead
),
69 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
70 unsigned int ivsize
= crypto_aead_ivsize(aead
);
73 unsigned int data_len
[2];
75 const bool ctr_mode
= ((ctx
->cdata
.algtype
& OP_ALG_AAI_MASK
) ==
76 OP_ALG_AAI_CTR_MOD128
);
77 const bool is_rfc3686
= alg
->caam
.rfc3686
;
79 if (!ctx
->cdata
.keylen
|| !ctx
->authsize
)
83 * AES-CTR needs to load IV in CONTEXT1 reg
84 * at an offset of 128bits (16bytes)
85 * CONTEXT1[255:128] = IV
92 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
95 ctx1_iv_off
= 16 + CTR_RFC3686_NONCE_SIZE
;
96 nonce
= (u32
*)((void *)ctx
->key
+ ctx
->adata
.keylen_pad
+
97 ctx
->cdata
.keylen
- CTR_RFC3686_NONCE_SIZE
);
100 data_len
[0] = ctx
->adata
.keylen_pad
;
101 data_len
[1] = ctx
->cdata
.keylen
;
106 /* aead_encrypt shared descriptor */
107 if (desc_inline_query(DESC_QI_AEAD_ENC_LEN
+
108 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0),
109 DESC_JOB_IO_LEN
, data_len
, &inl_mask
,
110 ARRAY_SIZE(data_len
)) < 0)
114 ctx
->adata
.key_virt
= ctx
->key
;
116 ctx
->adata
.key_dma
= ctx
->key_dma
;
119 ctx
->cdata
.key_virt
= ctx
->key
+ ctx
->adata
.keylen_pad
;
121 ctx
->cdata
.key_dma
= ctx
->key_dma
+ ctx
->adata
.keylen_pad
;
123 ctx
->adata
.key_inline
= !!(inl_mask
& 1);
124 ctx
->cdata
.key_inline
= !!(inl_mask
& 2);
126 cnstr_shdsc_aead_encap(ctx
->sh_desc_enc
, &ctx
->cdata
, &ctx
->adata
,
127 ivsize
, ctx
->authsize
, is_rfc3686
, nonce
,
131 /* aead_decrypt shared descriptor */
132 if (desc_inline_query(DESC_QI_AEAD_DEC_LEN
+
133 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0),
134 DESC_JOB_IO_LEN
, data_len
, &inl_mask
,
135 ARRAY_SIZE(data_len
)) < 0)
139 ctx
->adata
.key_virt
= ctx
->key
;
141 ctx
->adata
.key_dma
= ctx
->key_dma
;
144 ctx
->cdata
.key_virt
= ctx
->key
+ ctx
->adata
.keylen_pad
;
146 ctx
->cdata
.key_dma
= ctx
->key_dma
+ ctx
->adata
.keylen_pad
;
148 ctx
->adata
.key_inline
= !!(inl_mask
& 1);
149 ctx
->cdata
.key_inline
= !!(inl_mask
& 2);
151 cnstr_shdsc_aead_decap(ctx
->sh_desc_dec
, &ctx
->cdata
, &ctx
->adata
,
152 ivsize
, ctx
->authsize
, alg
->caam
.geniv
,
153 is_rfc3686
, nonce
, ctx1_iv_off
, true);
155 if (!alg
->caam
.geniv
)
158 /* aead_givencrypt shared descriptor */
159 if (desc_inline_query(DESC_QI_AEAD_GIVENC_LEN
+
160 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0),
161 DESC_JOB_IO_LEN
, data_len
, &inl_mask
,
162 ARRAY_SIZE(data_len
)) < 0)
166 ctx
->adata
.key_virt
= ctx
->key
;
168 ctx
->adata
.key_dma
= ctx
->key_dma
;
171 ctx
->cdata
.key_virt
= ctx
->key
+ ctx
->adata
.keylen_pad
;
173 ctx
->cdata
.key_dma
= ctx
->key_dma
+ ctx
->adata
.keylen_pad
;
175 ctx
->adata
.key_inline
= !!(inl_mask
& 1);
176 ctx
->cdata
.key_inline
= !!(inl_mask
& 2);
178 cnstr_shdsc_aead_givencap(ctx
->sh_desc_enc
, &ctx
->cdata
, &ctx
->adata
,
179 ivsize
, ctx
->authsize
, is_rfc3686
, nonce
,
186 static int aead_setauthsize(struct crypto_aead
*authenc
, unsigned int authsize
)
188 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
190 ctx
->authsize
= authsize
;
191 aead_set_sh_desc(authenc
);
196 static int aead_setkey(struct crypto_aead
*aead
, const u8
*key
,
199 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
200 struct device
*jrdev
= ctx
->jrdev
;
201 struct crypto_authenc_keys keys
;
204 if (crypto_authenc_extractkeys(&keys
, key
, keylen
) != 0)
208 dev_err(jrdev
, "keylen %d enckeylen %d authkeylen %d\n",
209 keys
.authkeylen
+ keys
.enckeylen
, keys
.enckeylen
,
211 print_hex_dump(KERN_ERR
, "key in @" __stringify(__LINE__
)": ",
212 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
215 ret
= gen_split_key(jrdev
, ctx
->key
, &ctx
->adata
, keys
.authkey
,
216 keys
.authkeylen
, CAAM_MAX_KEY_SIZE
-
221 /* postpend encryption key to auth split key */
222 memcpy(ctx
->key
+ ctx
->adata
.keylen_pad
, keys
.enckey
, keys
.enckeylen
);
223 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, ctx
->adata
.keylen_pad
+
224 keys
.enckeylen
, DMA_TO_DEVICE
);
226 print_hex_dump(KERN_ERR
, "ctx.key@" __stringify(__LINE__
)": ",
227 DUMP_PREFIX_ADDRESS
, 16, 4, ctx
->key
,
228 ctx
->adata
.keylen_pad
+ keys
.enckeylen
, 1);
231 ctx
->cdata
.keylen
= keys
.enckeylen
;
233 ret
= aead_set_sh_desc(aead
);
237 /* Now update the driver contexts with the new shared descriptor */
238 if (ctx
->drv_ctx
[ENCRYPT
]) {
239 ret
= caam_drv_ctx_update(ctx
->drv_ctx
[ENCRYPT
],
242 dev_err(jrdev
, "driver enc context update failed\n");
247 if (ctx
->drv_ctx
[DECRYPT
]) {
248 ret
= caam_drv_ctx_update(ctx
->drv_ctx
[DECRYPT
],
251 dev_err(jrdev
, "driver dec context update failed\n");
258 crypto_aead_set_flags(aead
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
262 static int ablkcipher_setkey(struct crypto_ablkcipher
*ablkcipher
,
263 const u8
*key
, unsigned int keylen
)
265 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
266 struct crypto_tfm
*tfm
= crypto_ablkcipher_tfm(ablkcipher
);
267 const char *alg_name
= crypto_tfm_alg_name(tfm
);
268 struct device
*jrdev
= ctx
->jrdev
;
269 unsigned int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
271 const bool ctr_mode
= ((ctx
->cdata
.algtype
& OP_ALG_AAI_MASK
) ==
272 OP_ALG_AAI_CTR_MOD128
);
273 const bool is_rfc3686
= (ctr_mode
&& strstr(alg_name
, "rfc3686"));
276 memcpy(ctx
->key
, key
, keylen
);
278 print_hex_dump(KERN_ERR
, "key in @" __stringify(__LINE__
)": ",
279 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
282 * AES-CTR needs to load IV in CONTEXT1 reg
283 * at an offset of 128bits (16bytes)
284 * CONTEXT1[255:128] = IV
291 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
292 * | *key = {KEY, NONCE}
295 ctx1_iv_off
= 16 + CTR_RFC3686_NONCE_SIZE
;
296 keylen
-= CTR_RFC3686_NONCE_SIZE
;
299 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, keylen
, DMA_TO_DEVICE
);
300 ctx
->cdata
.keylen
= keylen
;
301 ctx
->cdata
.key_virt
= ctx
->key
;
302 ctx
->cdata
.key_inline
= true;
304 /* ablkcipher encrypt, decrypt, givencrypt shared descriptors */
305 cnstr_shdsc_ablkcipher_encap(ctx
->sh_desc_enc
, &ctx
->cdata
, ivsize
,
306 is_rfc3686
, ctx1_iv_off
);
307 cnstr_shdsc_ablkcipher_decap(ctx
->sh_desc_dec
, &ctx
->cdata
, ivsize
,
308 is_rfc3686
, ctx1_iv_off
);
309 cnstr_shdsc_ablkcipher_givencap(ctx
->sh_desc_givenc
, &ctx
->cdata
,
310 ivsize
, is_rfc3686
, ctx1_iv_off
);
312 /* Now update the driver contexts with the new shared descriptor */
313 if (ctx
->drv_ctx
[ENCRYPT
]) {
314 ret
= caam_drv_ctx_update(ctx
->drv_ctx
[ENCRYPT
],
317 dev_err(jrdev
, "driver enc context update failed\n");
322 if (ctx
->drv_ctx
[DECRYPT
]) {
323 ret
= caam_drv_ctx_update(ctx
->drv_ctx
[DECRYPT
],
326 dev_err(jrdev
, "driver dec context update failed\n");
331 if (ctx
->drv_ctx
[GIVENCRYPT
]) {
332 ret
= caam_drv_ctx_update(ctx
->drv_ctx
[GIVENCRYPT
],
333 ctx
->sh_desc_givenc
);
335 dev_err(jrdev
, "driver givenc context update failed\n");
342 crypto_ablkcipher_set_flags(ablkcipher
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
346 static int xts_ablkcipher_setkey(struct crypto_ablkcipher
*ablkcipher
,
347 const u8
*key
, unsigned int keylen
)
349 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
350 struct device
*jrdev
= ctx
->jrdev
;
353 if (keylen
!= 2 * AES_MIN_KEY_SIZE
&& keylen
!= 2 * AES_MAX_KEY_SIZE
) {
354 crypto_ablkcipher_set_flags(ablkcipher
,
355 CRYPTO_TFM_RES_BAD_KEY_LEN
);
356 dev_err(jrdev
, "key size mismatch\n");
360 memcpy(ctx
->key
, key
, keylen
);
361 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, keylen
, DMA_TO_DEVICE
);
362 ctx
->cdata
.keylen
= keylen
;
363 ctx
->cdata
.key_virt
= ctx
->key
;
364 ctx
->cdata
.key_inline
= true;
366 /* xts ablkcipher encrypt, decrypt shared descriptors */
367 cnstr_shdsc_xts_ablkcipher_encap(ctx
->sh_desc_enc
, &ctx
->cdata
);
368 cnstr_shdsc_xts_ablkcipher_decap(ctx
->sh_desc_dec
, &ctx
->cdata
);
370 /* Now update the driver contexts with the new shared descriptor */
371 if (ctx
->drv_ctx
[ENCRYPT
]) {
372 ret
= caam_drv_ctx_update(ctx
->drv_ctx
[ENCRYPT
],
375 dev_err(jrdev
, "driver enc context update failed\n");
380 if (ctx
->drv_ctx
[DECRYPT
]) {
381 ret
= caam_drv_ctx_update(ctx
->drv_ctx
[DECRYPT
],
384 dev_err(jrdev
, "driver dec context update failed\n");
391 crypto_ablkcipher_set_flags(ablkcipher
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
396 * aead_edesc - s/w-extended aead descriptor
397 * @src_nents: number of segments in input scatterlist
398 * @dst_nents: number of segments in output scatterlist
399 * @iv_dma: dma address of iv for checking continuity and link table
400 * @qm_sg_bytes: length of dma mapped h/w link table
401 * @qm_sg_dma: bus physical mapped address of h/w link table
402 * @assoclen_dma: bus physical mapped address of req->assoclen
403 * @drv_req: driver-specific request structure
404 * @sgt: the h/w link table
411 dma_addr_t qm_sg_dma
;
412 dma_addr_t assoclen_dma
;
413 struct caam_drv_req drv_req
;
414 struct qm_sg_entry sgt
[0];
418 * ablkcipher_edesc - s/w-extended ablkcipher descriptor
419 * @src_nents: number of segments in input scatterlist
420 * @dst_nents: number of segments in output scatterlist
421 * @iv_dma: dma address of iv for checking continuity and link table
422 * @qm_sg_bytes: length of dma mapped h/w link table
423 * @qm_sg_dma: bus physical mapped address of h/w link table
424 * @drv_req: driver-specific request structure
425 * @sgt: the h/w link table
427 struct ablkcipher_edesc
{
432 dma_addr_t qm_sg_dma
;
433 struct caam_drv_req drv_req
;
434 struct qm_sg_entry sgt
[0];
437 static struct caam_drv_ctx
*get_drv_ctx(struct caam_ctx
*ctx
,
441 * This function is called on the fast path with values of 'type'
442 * known at compile time. Invalid arguments are not expected and
443 * thus no checks are made.
445 struct caam_drv_ctx
*drv_ctx
= ctx
->drv_ctx
[type
];
448 if (unlikely(!drv_ctx
)) {
449 spin_lock(&ctx
->lock
);
451 /* Read again to check if some other core init drv_ctx */
452 drv_ctx
= ctx
->drv_ctx
[type
];
457 desc
= ctx
->sh_desc_enc
;
458 else if (type
== DECRYPT
)
459 desc
= ctx
->sh_desc_dec
;
460 else /* (type == GIVENCRYPT) */
461 desc
= ctx
->sh_desc_givenc
;
463 cpu
= smp_processor_id();
464 drv_ctx
= caam_drv_ctx_init(ctx
->qidev
, &cpu
, desc
);
465 if (likely(!IS_ERR_OR_NULL(drv_ctx
)))
466 drv_ctx
->op_type
= type
;
468 ctx
->drv_ctx
[type
] = drv_ctx
;
471 spin_unlock(&ctx
->lock
);
477 static void caam_unmap(struct device
*dev
, struct scatterlist
*src
,
478 struct scatterlist
*dst
, int src_nents
,
479 int dst_nents
, dma_addr_t iv_dma
, int ivsize
,
480 enum optype op_type
, dma_addr_t qm_sg_dma
,
485 dma_unmap_sg(dev
, src
, src_nents
, DMA_TO_DEVICE
);
486 dma_unmap_sg(dev
, dst
, dst_nents
, DMA_FROM_DEVICE
);
488 dma_unmap_sg(dev
, src
, src_nents
, DMA_BIDIRECTIONAL
);
492 dma_unmap_single(dev
, iv_dma
, ivsize
,
493 op_type
== GIVENCRYPT
? DMA_FROM_DEVICE
:
496 dma_unmap_single(dev
, qm_sg_dma
, qm_sg_bytes
, DMA_TO_DEVICE
);
499 static void aead_unmap(struct device
*dev
,
500 struct aead_edesc
*edesc
,
501 struct aead_request
*req
)
503 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
504 int ivsize
= crypto_aead_ivsize(aead
);
506 caam_unmap(dev
, req
->src
, req
->dst
, edesc
->src_nents
, edesc
->dst_nents
,
507 edesc
->iv_dma
, ivsize
, edesc
->drv_req
.drv_ctx
->op_type
,
508 edesc
->qm_sg_dma
, edesc
->qm_sg_bytes
);
509 dma_unmap_single(dev
, edesc
->assoclen_dma
, 4, DMA_TO_DEVICE
);
512 static void ablkcipher_unmap(struct device
*dev
,
513 struct ablkcipher_edesc
*edesc
,
514 struct ablkcipher_request
*req
)
516 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
517 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
519 caam_unmap(dev
, req
->src
, req
->dst
, edesc
->src_nents
, edesc
->dst_nents
,
520 edesc
->iv_dma
, ivsize
, edesc
->drv_req
.drv_ctx
->op_type
,
521 edesc
->qm_sg_dma
, edesc
->qm_sg_bytes
);
524 static void aead_done(struct caam_drv_req
*drv_req
, u32 status
)
526 struct device
*qidev
;
527 struct aead_edesc
*edesc
;
528 struct aead_request
*aead_req
= drv_req
->app_ctx
;
529 struct crypto_aead
*aead
= crypto_aead_reqtfm(aead_req
);
530 struct caam_ctx
*caam_ctx
= crypto_aead_ctx(aead
);
533 qidev
= caam_ctx
->qidev
;
535 if (unlikely(status
)) {
536 caam_jr_strstatus(qidev
, status
);
540 edesc
= container_of(drv_req
, typeof(*edesc
), drv_req
);
541 aead_unmap(qidev
, edesc
, aead_req
);
543 aead_request_complete(aead_req
, ecode
);
544 qi_cache_free(edesc
);
548 * allocate and map the aead extended descriptor
550 static struct aead_edesc
*aead_edesc_alloc(struct aead_request
*req
,
553 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
554 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
555 struct caam_aead_alg
*alg
= container_of(crypto_aead_alg(aead
),
557 struct device
*qidev
= ctx
->qidev
;
558 gfp_t flags
= (req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ?
559 GFP_KERNEL
: GFP_ATOMIC
;
560 int src_nents
, mapped_src_nents
, dst_nents
= 0, mapped_dst_nents
= 0;
561 struct aead_edesc
*edesc
;
562 dma_addr_t qm_sg_dma
, iv_dma
= 0;
564 unsigned int authsize
= ctx
->authsize
;
565 int qm_sg_index
= 0, qm_sg_ents
= 0, qm_sg_bytes
;
567 struct qm_sg_entry
*sg_table
, *fd_sgt
;
568 struct caam_drv_ctx
*drv_ctx
;
569 enum optype op_type
= encrypt
? ENCRYPT
: DECRYPT
;
571 drv_ctx
= get_drv_ctx(ctx
, op_type
);
572 if (unlikely(IS_ERR_OR_NULL(drv_ctx
)))
573 return (struct aead_edesc
*)drv_ctx
;
575 /* allocate space for base edesc and hw desc commands, link tables */
576 edesc
= qi_cache_alloc(GFP_DMA
| flags
);
577 if (unlikely(!edesc
)) {
578 dev_err(qidev
, "could not allocate extended descriptor\n");
579 return ERR_PTR(-ENOMEM
);
582 if (likely(req
->src
== req
->dst
)) {
583 src_nents
= sg_nents_for_len(req
->src
, req
->assoclen
+
585 (encrypt
? authsize
: 0));
586 if (unlikely(src_nents
< 0)) {
587 dev_err(qidev
, "Insufficient bytes (%d) in src S/G\n",
588 req
->assoclen
+ req
->cryptlen
+
589 (encrypt
? authsize
: 0));
590 qi_cache_free(edesc
);
591 return ERR_PTR(src_nents
);
594 mapped_src_nents
= dma_map_sg(qidev
, req
->src
, src_nents
,
596 if (unlikely(!mapped_src_nents
)) {
597 dev_err(qidev
, "unable to map source\n");
598 qi_cache_free(edesc
);
599 return ERR_PTR(-ENOMEM
);
602 src_nents
= sg_nents_for_len(req
->src
, req
->assoclen
+
604 if (unlikely(src_nents
< 0)) {
605 dev_err(qidev
, "Insufficient bytes (%d) in src S/G\n",
606 req
->assoclen
+ req
->cryptlen
);
607 qi_cache_free(edesc
);
608 return ERR_PTR(src_nents
);
611 dst_nents
= sg_nents_for_len(req
->dst
, req
->assoclen
+
613 (encrypt
? authsize
:
615 if (unlikely(dst_nents
< 0)) {
616 dev_err(qidev
, "Insufficient bytes (%d) in dst S/G\n",
617 req
->assoclen
+ req
->cryptlen
+
618 (encrypt
? authsize
: (-authsize
)));
619 qi_cache_free(edesc
);
620 return ERR_PTR(dst_nents
);
624 mapped_src_nents
= dma_map_sg(qidev
, req
->src
,
625 src_nents
, DMA_TO_DEVICE
);
626 if (unlikely(!mapped_src_nents
)) {
627 dev_err(qidev
, "unable to map source\n");
628 qi_cache_free(edesc
);
629 return ERR_PTR(-ENOMEM
);
632 mapped_src_nents
= 0;
635 mapped_dst_nents
= dma_map_sg(qidev
, req
->dst
, dst_nents
,
637 if (unlikely(!mapped_dst_nents
)) {
638 dev_err(qidev
, "unable to map destination\n");
639 dma_unmap_sg(qidev
, req
->src
, src_nents
, DMA_TO_DEVICE
);
640 qi_cache_free(edesc
);
641 return ERR_PTR(-ENOMEM
);
645 if ((alg
->caam
.rfc3686
&& encrypt
) || !alg
->caam
.geniv
) {
646 ivsize
= crypto_aead_ivsize(aead
);
647 iv_dma
= dma_map_single(qidev
, req
->iv
, ivsize
, DMA_TO_DEVICE
);
648 if (dma_mapping_error(qidev
, iv_dma
)) {
649 dev_err(qidev
, "unable to map IV\n");
650 caam_unmap(qidev
, req
->src
, req
->dst
, src_nents
,
651 dst_nents
, 0, 0, op_type
, 0, 0);
652 qi_cache_free(edesc
);
653 return ERR_PTR(-ENOMEM
);
658 * Create S/G table: req->assoclen, [IV,] req->src [, req->dst].
659 * Input is not contiguous.
661 qm_sg_ents
= 1 + !!ivsize
+ mapped_src_nents
+
662 (mapped_dst_nents
> 1 ? mapped_dst_nents
: 0);
663 sg_table
= &edesc
->sgt
[0];
664 qm_sg_bytes
= qm_sg_ents
* sizeof(*sg_table
);
666 edesc
->src_nents
= src_nents
;
667 edesc
->dst_nents
= dst_nents
;
668 edesc
->iv_dma
= iv_dma
;
669 edesc
->drv_req
.app_ctx
= req
;
670 edesc
->drv_req
.cbk
= aead_done
;
671 edesc
->drv_req
.drv_ctx
= drv_ctx
;
673 edesc
->assoclen_dma
= dma_map_single(qidev
, &req
->assoclen
, 4,
675 if (dma_mapping_error(qidev
, edesc
->assoclen_dma
)) {
676 dev_err(qidev
, "unable to map assoclen\n");
677 caam_unmap(qidev
, req
->src
, req
->dst
, src_nents
, dst_nents
,
678 iv_dma
, ivsize
, op_type
, 0, 0);
679 qi_cache_free(edesc
);
680 return ERR_PTR(-ENOMEM
);
683 dma_to_qm_sg_one(sg_table
, edesc
->assoclen_dma
, 4, 0);
686 dma_to_qm_sg_one(sg_table
+ qm_sg_index
, iv_dma
, ivsize
, 0);
689 sg_to_qm_sg_last(req
->src
, mapped_src_nents
, sg_table
+ qm_sg_index
, 0);
690 qm_sg_index
+= mapped_src_nents
;
692 if (mapped_dst_nents
> 1)
693 sg_to_qm_sg_last(req
->dst
, mapped_dst_nents
, sg_table
+
696 qm_sg_dma
= dma_map_single(qidev
, sg_table
, qm_sg_bytes
, DMA_TO_DEVICE
);
697 if (dma_mapping_error(qidev
, qm_sg_dma
)) {
698 dev_err(qidev
, "unable to map S/G table\n");
699 dma_unmap_single(qidev
, edesc
->assoclen_dma
, 4, DMA_TO_DEVICE
);
700 caam_unmap(qidev
, req
->src
, req
->dst
, src_nents
, dst_nents
,
701 iv_dma
, ivsize
, op_type
, 0, 0);
702 qi_cache_free(edesc
);
703 return ERR_PTR(-ENOMEM
);
706 edesc
->qm_sg_dma
= qm_sg_dma
;
707 edesc
->qm_sg_bytes
= qm_sg_bytes
;
709 out_len
= req
->assoclen
+ req
->cryptlen
+
710 (encrypt
? ctx
->authsize
: (-ctx
->authsize
));
711 in_len
= 4 + ivsize
+ req
->assoclen
+ req
->cryptlen
;
713 fd_sgt
= &edesc
->drv_req
.fd_sgt
[0];
714 dma_to_qm_sg_one_last_ext(&fd_sgt
[1], qm_sg_dma
, in_len
, 0);
716 if (req
->dst
== req
->src
) {
717 if (mapped_src_nents
== 1)
718 dma_to_qm_sg_one(&fd_sgt
[0], sg_dma_address(req
->src
),
721 dma_to_qm_sg_one_ext(&fd_sgt
[0], qm_sg_dma
+
722 (1 + !!ivsize
) * sizeof(*sg_table
),
724 } else if (mapped_dst_nents
== 1) {
725 dma_to_qm_sg_one(&fd_sgt
[0], sg_dma_address(req
->dst
), out_len
,
728 dma_to_qm_sg_one_ext(&fd_sgt
[0], qm_sg_dma
+ sizeof(*sg_table
) *
729 qm_sg_index
, out_len
, 0);
735 static inline int aead_crypt(struct aead_request
*req
, bool encrypt
)
737 struct aead_edesc
*edesc
;
738 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
739 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
742 if (unlikely(caam_congested
))
745 /* allocate extended descriptor */
746 edesc
= aead_edesc_alloc(req
, encrypt
);
747 if (IS_ERR_OR_NULL(edesc
))
748 return PTR_ERR(edesc
);
750 /* Create and submit job descriptor */
751 ret
= caam_qi_enqueue(ctx
->qidev
, &edesc
->drv_req
);
755 aead_unmap(ctx
->qidev
, edesc
, req
);
756 qi_cache_free(edesc
);
762 static int aead_encrypt(struct aead_request
*req
)
764 return aead_crypt(req
, true);
767 static int aead_decrypt(struct aead_request
*req
)
769 return aead_crypt(req
, false);
772 static void ablkcipher_done(struct caam_drv_req
*drv_req
, u32 status
)
774 struct ablkcipher_edesc
*edesc
;
775 struct ablkcipher_request
*req
= drv_req
->app_ctx
;
776 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
777 struct caam_ctx
*caam_ctx
= crypto_ablkcipher_ctx(ablkcipher
);
778 struct device
*qidev
= caam_ctx
->qidev
;
780 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
782 dev_err(qidev
, "%s %d: status 0x%x\n", __func__
, __LINE__
, status
);
785 edesc
= container_of(drv_req
, typeof(*edesc
), drv_req
);
788 caam_jr_strstatus(qidev
, status
);
791 print_hex_dump(KERN_ERR
, "dstiv @" __stringify(__LINE__
)": ",
792 DUMP_PREFIX_ADDRESS
, 16, 4, req
->info
,
793 edesc
->src_nents
> 1 ? 100 : ivsize
, 1);
794 dbg_dump_sg(KERN_ERR
, "dst @" __stringify(__LINE__
)": ",
795 DUMP_PREFIX_ADDRESS
, 16, 4, req
->dst
,
796 edesc
->dst_nents
> 1 ? 100 : req
->nbytes
, 1);
799 ablkcipher_unmap(qidev
, edesc
, req
);
800 qi_cache_free(edesc
);
802 ablkcipher_request_complete(req
, status
);
805 static struct ablkcipher_edesc
*ablkcipher_edesc_alloc(struct ablkcipher_request
808 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
809 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
810 struct device
*qidev
= ctx
->qidev
;
811 gfp_t flags
= (req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ?
812 GFP_KERNEL
: GFP_ATOMIC
;
813 int src_nents
, mapped_src_nents
, dst_nents
= 0, mapped_dst_nents
= 0;
814 struct ablkcipher_edesc
*edesc
;
817 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
818 int dst_sg_idx
, qm_sg_ents
;
819 struct qm_sg_entry
*sg_table
, *fd_sgt
;
820 struct caam_drv_ctx
*drv_ctx
;
821 enum optype op_type
= encrypt
? ENCRYPT
: DECRYPT
;
823 drv_ctx
= get_drv_ctx(ctx
, op_type
);
824 if (unlikely(IS_ERR_OR_NULL(drv_ctx
)))
825 return (struct ablkcipher_edesc
*)drv_ctx
;
827 src_nents
= sg_nents_for_len(req
->src
, req
->nbytes
);
828 if (unlikely(src_nents
< 0)) {
829 dev_err(qidev
, "Insufficient bytes (%d) in src S/G\n",
831 return ERR_PTR(src_nents
);
834 if (unlikely(req
->src
!= req
->dst
)) {
835 dst_nents
= sg_nents_for_len(req
->dst
, req
->nbytes
);
836 if (unlikely(dst_nents
< 0)) {
837 dev_err(qidev
, "Insufficient bytes (%d) in dst S/G\n",
839 return ERR_PTR(dst_nents
);
842 mapped_src_nents
= dma_map_sg(qidev
, req
->src
, src_nents
,
844 if (unlikely(!mapped_src_nents
)) {
845 dev_err(qidev
, "unable to map source\n");
846 return ERR_PTR(-ENOMEM
);
849 mapped_dst_nents
= dma_map_sg(qidev
, req
->dst
, dst_nents
,
851 if (unlikely(!mapped_dst_nents
)) {
852 dev_err(qidev
, "unable to map destination\n");
853 dma_unmap_sg(qidev
, req
->src
, src_nents
, DMA_TO_DEVICE
);
854 return ERR_PTR(-ENOMEM
);
857 mapped_src_nents
= dma_map_sg(qidev
, req
->src
, src_nents
,
859 if (unlikely(!mapped_src_nents
)) {
860 dev_err(qidev
, "unable to map source\n");
861 return ERR_PTR(-ENOMEM
);
865 iv_dma
= dma_map_single(qidev
, req
->info
, ivsize
, DMA_TO_DEVICE
);
866 if (dma_mapping_error(qidev
, iv_dma
)) {
867 dev_err(qidev
, "unable to map IV\n");
868 caam_unmap(qidev
, req
->src
, req
->dst
, src_nents
, dst_nents
, 0,
870 return ERR_PTR(-ENOMEM
);
873 if (mapped_src_nents
== 1 &&
874 iv_dma
+ ivsize
== sg_dma_address(req
->src
)) {
879 qm_sg_ents
= 1 + mapped_src_nents
;
881 dst_sg_idx
= qm_sg_ents
;
883 /* allocate space for base edesc and link tables */
884 edesc
= qi_cache_alloc(GFP_DMA
| flags
);
885 if (unlikely(!edesc
)) {
886 dev_err(qidev
, "could not allocate extended descriptor\n");
887 caam_unmap(qidev
, req
->src
, req
->dst
, src_nents
, dst_nents
,
888 iv_dma
, ivsize
, op_type
, 0, 0);
889 return ERR_PTR(-ENOMEM
);
892 edesc
->src_nents
= src_nents
;
893 edesc
->dst_nents
= dst_nents
;
894 edesc
->iv_dma
= iv_dma
;
895 qm_sg_ents
+= mapped_dst_nents
> 1 ? mapped_dst_nents
: 0;
896 sg_table
= &edesc
->sgt
[0];
897 edesc
->qm_sg_bytes
= qm_sg_ents
* sizeof(*sg_table
);
898 edesc
->drv_req
.app_ctx
= req
;
899 edesc
->drv_req
.cbk
= ablkcipher_done
;
900 edesc
->drv_req
.drv_ctx
= drv_ctx
;
903 dma_to_qm_sg_one(sg_table
, iv_dma
, ivsize
, 0);
904 sg_to_qm_sg_last(req
->src
, mapped_src_nents
, sg_table
+ 1, 0);
907 if (mapped_dst_nents
> 1)
908 sg_to_qm_sg_last(req
->dst
, mapped_dst_nents
, sg_table
+
911 edesc
->qm_sg_dma
= dma_map_single(qidev
, sg_table
, edesc
->qm_sg_bytes
,
913 if (dma_mapping_error(qidev
, edesc
->qm_sg_dma
)) {
914 dev_err(qidev
, "unable to map S/G table\n");
915 caam_unmap(qidev
, req
->src
, req
->dst
, src_nents
, dst_nents
,
916 iv_dma
, ivsize
, op_type
, 0, 0);
917 qi_cache_free(edesc
);
918 return ERR_PTR(-ENOMEM
);
921 fd_sgt
= &edesc
->drv_req
.fd_sgt
[0];
924 dma_to_qm_sg_one_last_ext(&fd_sgt
[1], edesc
->qm_sg_dma
,
925 ivsize
+ req
->nbytes
, 0);
927 dma_to_qm_sg_one_last(&fd_sgt
[1], iv_dma
, ivsize
+ req
->nbytes
,
930 if (req
->src
== req
->dst
) {
932 dma_to_qm_sg_one_ext(&fd_sgt
[0], edesc
->qm_sg_dma
+
933 sizeof(*sg_table
), req
->nbytes
, 0);
935 dma_to_qm_sg_one(&fd_sgt
[0], sg_dma_address(req
->src
),
937 } else if (mapped_dst_nents
> 1) {
938 dma_to_qm_sg_one_ext(&fd_sgt
[0], edesc
->qm_sg_dma
+ dst_sg_idx
*
939 sizeof(*sg_table
), req
->nbytes
, 0);
941 dma_to_qm_sg_one(&fd_sgt
[0], sg_dma_address(req
->dst
),
948 static struct ablkcipher_edesc
*ablkcipher_giv_edesc_alloc(
949 struct skcipher_givcrypt_request
*creq
)
951 struct ablkcipher_request
*req
= &creq
->creq
;
952 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
953 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
954 struct device
*qidev
= ctx
->qidev
;
955 gfp_t flags
= (req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ?
956 GFP_KERNEL
: GFP_ATOMIC
;
957 int src_nents
, mapped_src_nents
, dst_nents
, mapped_dst_nents
;
958 struct ablkcipher_edesc
*edesc
;
961 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
962 struct qm_sg_entry
*sg_table
, *fd_sgt
;
963 int dst_sg_idx
, qm_sg_ents
;
964 struct caam_drv_ctx
*drv_ctx
;
966 drv_ctx
= get_drv_ctx(ctx
, GIVENCRYPT
);
967 if (unlikely(IS_ERR_OR_NULL(drv_ctx
)))
968 return (struct ablkcipher_edesc
*)drv_ctx
;
970 src_nents
= sg_nents_for_len(req
->src
, req
->nbytes
);
971 if (unlikely(src_nents
< 0)) {
972 dev_err(qidev
, "Insufficient bytes (%d) in src S/G\n",
974 return ERR_PTR(src_nents
);
977 if (unlikely(req
->src
!= req
->dst
)) {
978 dst_nents
= sg_nents_for_len(req
->dst
, req
->nbytes
);
979 if (unlikely(dst_nents
< 0)) {
980 dev_err(qidev
, "Insufficient bytes (%d) in dst S/G\n",
982 return ERR_PTR(dst_nents
);
985 mapped_src_nents
= dma_map_sg(qidev
, req
->src
, src_nents
,
987 if (unlikely(!mapped_src_nents
)) {
988 dev_err(qidev
, "unable to map source\n");
989 return ERR_PTR(-ENOMEM
);
992 mapped_dst_nents
= dma_map_sg(qidev
, req
->dst
, dst_nents
,
994 if (unlikely(!mapped_dst_nents
)) {
995 dev_err(qidev
, "unable to map destination\n");
996 dma_unmap_sg(qidev
, req
->src
, src_nents
, DMA_TO_DEVICE
);
997 return ERR_PTR(-ENOMEM
);
1000 mapped_src_nents
= dma_map_sg(qidev
, req
->src
, src_nents
,
1002 if (unlikely(!mapped_src_nents
)) {
1003 dev_err(qidev
, "unable to map source\n");
1004 return ERR_PTR(-ENOMEM
);
1007 dst_nents
= src_nents
;
1008 mapped_dst_nents
= src_nents
;
1011 iv_dma
= dma_map_single(qidev
, creq
->giv
, ivsize
, DMA_FROM_DEVICE
);
1012 if (dma_mapping_error(qidev
, iv_dma
)) {
1013 dev_err(qidev
, "unable to map IV\n");
1014 caam_unmap(qidev
, req
->src
, req
->dst
, src_nents
, dst_nents
, 0,
1016 return ERR_PTR(-ENOMEM
);
1019 qm_sg_ents
= mapped_src_nents
> 1 ? mapped_src_nents
: 0;
1020 dst_sg_idx
= qm_sg_ents
;
1021 if (mapped_dst_nents
== 1 &&
1022 iv_dma
+ ivsize
== sg_dma_address(req
->dst
)) {
1026 qm_sg_ents
+= 1 + mapped_dst_nents
;
1029 /* allocate space for base edesc and link tables */
1030 edesc
= qi_cache_alloc(GFP_DMA
| flags
);
1032 dev_err(qidev
, "could not allocate extended descriptor\n");
1033 caam_unmap(qidev
, req
->src
, req
->dst
, src_nents
, dst_nents
,
1034 iv_dma
, ivsize
, GIVENCRYPT
, 0, 0);
1035 return ERR_PTR(-ENOMEM
);
1038 edesc
->src_nents
= src_nents
;
1039 edesc
->dst_nents
= dst_nents
;
1040 edesc
->iv_dma
= iv_dma
;
1041 sg_table
= &edesc
->sgt
[0];
1042 edesc
->qm_sg_bytes
= qm_sg_ents
* sizeof(*sg_table
);
1043 edesc
->drv_req
.app_ctx
= req
;
1044 edesc
->drv_req
.cbk
= ablkcipher_done
;
1045 edesc
->drv_req
.drv_ctx
= drv_ctx
;
1047 if (mapped_src_nents
> 1)
1048 sg_to_qm_sg_last(req
->src
, mapped_src_nents
, sg_table
, 0);
1051 dma_to_qm_sg_one(sg_table
+ dst_sg_idx
, iv_dma
, ivsize
, 0);
1052 sg_to_qm_sg_last(req
->dst
, mapped_dst_nents
, sg_table
+
1056 edesc
->qm_sg_dma
= dma_map_single(qidev
, sg_table
, edesc
->qm_sg_bytes
,
1058 if (dma_mapping_error(qidev
, edesc
->qm_sg_dma
)) {
1059 dev_err(qidev
, "unable to map S/G table\n");
1060 caam_unmap(qidev
, req
->src
, req
->dst
, src_nents
, dst_nents
,
1061 iv_dma
, ivsize
, GIVENCRYPT
, 0, 0);
1062 qi_cache_free(edesc
);
1063 return ERR_PTR(-ENOMEM
);
1066 fd_sgt
= &edesc
->drv_req
.fd_sgt
[0];
1068 if (mapped_src_nents
> 1)
1069 dma_to_qm_sg_one_ext(&fd_sgt
[1], edesc
->qm_sg_dma
, req
->nbytes
,
1072 dma_to_qm_sg_one(&fd_sgt
[1], sg_dma_address(req
->src
),
1076 dma_to_qm_sg_one_ext(&fd_sgt
[0], edesc
->qm_sg_dma
+ dst_sg_idx
*
1077 sizeof(*sg_table
), ivsize
+ req
->nbytes
,
1080 dma_to_qm_sg_one(&fd_sgt
[0], sg_dma_address(req
->dst
),
1081 ivsize
+ req
->nbytes
, 0);
1086 static inline int ablkcipher_crypt(struct ablkcipher_request
*req
, bool encrypt
)
1088 struct ablkcipher_edesc
*edesc
;
1089 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1090 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1093 if (unlikely(caam_congested
))
1096 /* allocate extended descriptor */
1097 edesc
= ablkcipher_edesc_alloc(req
, encrypt
);
1099 return PTR_ERR(edesc
);
1101 ret
= caam_qi_enqueue(ctx
->qidev
, &edesc
->drv_req
);
1105 ablkcipher_unmap(ctx
->qidev
, edesc
, req
);
1106 qi_cache_free(edesc
);
1112 static int ablkcipher_encrypt(struct ablkcipher_request
*req
)
1114 return ablkcipher_crypt(req
, true);
1117 static int ablkcipher_decrypt(struct ablkcipher_request
*req
)
1119 return ablkcipher_crypt(req
, false);
1122 static int ablkcipher_givencrypt(struct skcipher_givcrypt_request
*creq
)
1124 struct ablkcipher_request
*req
= &creq
->creq
;
1125 struct ablkcipher_edesc
*edesc
;
1126 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1127 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1130 if (unlikely(caam_congested
))
1133 /* allocate extended descriptor */
1134 edesc
= ablkcipher_giv_edesc_alloc(creq
);
1136 return PTR_ERR(edesc
);
1138 ret
= caam_qi_enqueue(ctx
->qidev
, &edesc
->drv_req
);
1142 ablkcipher_unmap(ctx
->qidev
, edesc
, req
);
1143 qi_cache_free(edesc
);
1149 #define template_ablkcipher template_u.ablkcipher
1150 struct caam_alg_template
{
1151 char name
[CRYPTO_MAX_ALG_NAME
];
1152 char driver_name
[CRYPTO_MAX_ALG_NAME
];
1153 unsigned int blocksize
;
1156 struct ablkcipher_alg ablkcipher
;
1158 u32 class1_alg_type
;
1159 u32 class2_alg_type
;
1162 static struct caam_alg_template driver_algs
[] = {
1163 /* ablkcipher descriptor */
1166 .driver_name
= "cbc-aes-caam-qi",
1167 .blocksize
= AES_BLOCK_SIZE
,
1168 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
1169 .template_ablkcipher
= {
1170 .setkey
= ablkcipher_setkey
,
1171 .encrypt
= ablkcipher_encrypt
,
1172 .decrypt
= ablkcipher_decrypt
,
1173 .givencrypt
= ablkcipher_givencrypt
,
1174 .geniv
= "<built-in>",
1175 .min_keysize
= AES_MIN_KEY_SIZE
,
1176 .max_keysize
= AES_MAX_KEY_SIZE
,
1177 .ivsize
= AES_BLOCK_SIZE
,
1179 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1182 .name
= "cbc(des3_ede)",
1183 .driver_name
= "cbc-3des-caam-qi",
1184 .blocksize
= DES3_EDE_BLOCK_SIZE
,
1185 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
1186 .template_ablkcipher
= {
1187 .setkey
= ablkcipher_setkey
,
1188 .encrypt
= ablkcipher_encrypt
,
1189 .decrypt
= ablkcipher_decrypt
,
1190 .givencrypt
= ablkcipher_givencrypt
,
1191 .geniv
= "<built-in>",
1192 .min_keysize
= DES3_EDE_KEY_SIZE
,
1193 .max_keysize
= DES3_EDE_KEY_SIZE
,
1194 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1196 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1200 .driver_name
= "cbc-des-caam-qi",
1201 .blocksize
= DES_BLOCK_SIZE
,
1202 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
1203 .template_ablkcipher
= {
1204 .setkey
= ablkcipher_setkey
,
1205 .encrypt
= ablkcipher_encrypt
,
1206 .decrypt
= ablkcipher_decrypt
,
1207 .givencrypt
= ablkcipher_givencrypt
,
1208 .geniv
= "<built-in>",
1209 .min_keysize
= DES_KEY_SIZE
,
1210 .max_keysize
= DES_KEY_SIZE
,
1211 .ivsize
= DES_BLOCK_SIZE
,
1213 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
1217 .driver_name
= "ctr-aes-caam-qi",
1219 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
1220 .template_ablkcipher
= {
1221 .setkey
= ablkcipher_setkey
,
1222 .encrypt
= ablkcipher_encrypt
,
1223 .decrypt
= ablkcipher_decrypt
,
1225 .min_keysize
= AES_MIN_KEY_SIZE
,
1226 .max_keysize
= AES_MAX_KEY_SIZE
,
1227 .ivsize
= AES_BLOCK_SIZE
,
1229 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CTR_MOD128
,
1232 .name
= "rfc3686(ctr(aes))",
1233 .driver_name
= "rfc3686-ctr-aes-caam-qi",
1235 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
1236 .template_ablkcipher
= {
1237 .setkey
= ablkcipher_setkey
,
1238 .encrypt
= ablkcipher_encrypt
,
1239 .decrypt
= ablkcipher_decrypt
,
1240 .givencrypt
= ablkcipher_givencrypt
,
1241 .geniv
= "<built-in>",
1242 .min_keysize
= AES_MIN_KEY_SIZE
+
1243 CTR_RFC3686_NONCE_SIZE
,
1244 .max_keysize
= AES_MAX_KEY_SIZE
+
1245 CTR_RFC3686_NONCE_SIZE
,
1246 .ivsize
= CTR_RFC3686_IV_SIZE
,
1248 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CTR_MOD128
,
1252 .driver_name
= "xts-aes-caam-qi",
1253 .blocksize
= AES_BLOCK_SIZE
,
1254 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
1255 .template_ablkcipher
= {
1256 .setkey
= xts_ablkcipher_setkey
,
1257 .encrypt
= ablkcipher_encrypt
,
1258 .decrypt
= ablkcipher_decrypt
,
1260 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
1261 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
1262 .ivsize
= AES_BLOCK_SIZE
,
1264 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_XTS
,
1268 static struct caam_aead_alg driver_aeads
[] = {
1269 /* single-pass ipsec_esp descriptor */
1273 .cra_name
= "authenc(hmac(md5),cbc(aes))",
1274 .cra_driver_name
= "authenc-hmac-md5-"
1276 .cra_blocksize
= AES_BLOCK_SIZE
,
1278 .setkey
= aead_setkey
,
1279 .setauthsize
= aead_setauthsize
,
1280 .encrypt
= aead_encrypt
,
1281 .decrypt
= aead_decrypt
,
1282 .ivsize
= AES_BLOCK_SIZE
,
1283 .maxauthsize
= MD5_DIGEST_SIZE
,
1286 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1287 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
1288 OP_ALG_AAI_HMAC_PRECOMP
,
1294 .cra_name
= "echainiv(authenc(hmac(md5),"
1296 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
1298 .cra_blocksize
= AES_BLOCK_SIZE
,
1300 .setkey
= aead_setkey
,
1301 .setauthsize
= aead_setauthsize
,
1302 .encrypt
= aead_encrypt
,
1303 .decrypt
= aead_decrypt
,
1304 .ivsize
= AES_BLOCK_SIZE
,
1305 .maxauthsize
= MD5_DIGEST_SIZE
,
1308 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1309 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
1310 OP_ALG_AAI_HMAC_PRECOMP
,
1317 .cra_name
= "authenc(hmac(sha1),cbc(aes))",
1318 .cra_driver_name
= "authenc-hmac-sha1-"
1320 .cra_blocksize
= AES_BLOCK_SIZE
,
1322 .setkey
= aead_setkey
,
1323 .setauthsize
= aead_setauthsize
,
1324 .encrypt
= aead_encrypt
,
1325 .decrypt
= aead_decrypt
,
1326 .ivsize
= AES_BLOCK_SIZE
,
1327 .maxauthsize
= SHA1_DIGEST_SIZE
,
1330 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1331 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
1332 OP_ALG_AAI_HMAC_PRECOMP
,
1338 .cra_name
= "echainiv(authenc(hmac(sha1),"
1340 .cra_driver_name
= "echainiv-authenc-"
1341 "hmac-sha1-cbc-aes-caam-qi",
1342 .cra_blocksize
= AES_BLOCK_SIZE
,
1344 .setkey
= aead_setkey
,
1345 .setauthsize
= aead_setauthsize
,
1346 .encrypt
= aead_encrypt
,
1347 .decrypt
= aead_decrypt
,
1348 .ivsize
= AES_BLOCK_SIZE
,
1349 .maxauthsize
= SHA1_DIGEST_SIZE
,
1352 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1353 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
1354 OP_ALG_AAI_HMAC_PRECOMP
,
1361 .cra_name
= "authenc(hmac(sha224),cbc(aes))",
1362 .cra_driver_name
= "authenc-hmac-sha224-"
1364 .cra_blocksize
= AES_BLOCK_SIZE
,
1366 .setkey
= aead_setkey
,
1367 .setauthsize
= aead_setauthsize
,
1368 .encrypt
= aead_encrypt
,
1369 .decrypt
= aead_decrypt
,
1370 .ivsize
= AES_BLOCK_SIZE
,
1371 .maxauthsize
= SHA224_DIGEST_SIZE
,
1374 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1375 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
1376 OP_ALG_AAI_HMAC_PRECOMP
,
1382 .cra_name
= "echainiv(authenc(hmac(sha224),"
1384 .cra_driver_name
= "echainiv-authenc-"
1385 "hmac-sha224-cbc-aes-caam-qi",
1386 .cra_blocksize
= AES_BLOCK_SIZE
,
1388 .setkey
= aead_setkey
,
1389 .setauthsize
= aead_setauthsize
,
1390 .encrypt
= aead_encrypt
,
1391 .decrypt
= aead_decrypt
,
1392 .ivsize
= AES_BLOCK_SIZE
,
1393 .maxauthsize
= SHA224_DIGEST_SIZE
,
1396 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1397 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
1398 OP_ALG_AAI_HMAC_PRECOMP
,
1405 .cra_name
= "authenc(hmac(sha256),cbc(aes))",
1406 .cra_driver_name
= "authenc-hmac-sha256-"
1408 .cra_blocksize
= AES_BLOCK_SIZE
,
1410 .setkey
= aead_setkey
,
1411 .setauthsize
= aead_setauthsize
,
1412 .encrypt
= aead_encrypt
,
1413 .decrypt
= aead_decrypt
,
1414 .ivsize
= AES_BLOCK_SIZE
,
1415 .maxauthsize
= SHA256_DIGEST_SIZE
,
1418 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1419 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
1420 OP_ALG_AAI_HMAC_PRECOMP
,
1426 .cra_name
= "echainiv(authenc(hmac(sha256),"
1428 .cra_driver_name
= "echainiv-authenc-"
1429 "hmac-sha256-cbc-aes-"
1431 .cra_blocksize
= AES_BLOCK_SIZE
,
1433 .setkey
= aead_setkey
,
1434 .setauthsize
= aead_setauthsize
,
1435 .encrypt
= aead_encrypt
,
1436 .decrypt
= aead_decrypt
,
1437 .ivsize
= AES_BLOCK_SIZE
,
1438 .maxauthsize
= SHA256_DIGEST_SIZE
,
1441 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1442 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
1443 OP_ALG_AAI_HMAC_PRECOMP
,
1450 .cra_name
= "authenc(hmac(sha384),cbc(aes))",
1451 .cra_driver_name
= "authenc-hmac-sha384-"
1453 .cra_blocksize
= AES_BLOCK_SIZE
,
1455 .setkey
= aead_setkey
,
1456 .setauthsize
= aead_setauthsize
,
1457 .encrypt
= aead_encrypt
,
1458 .decrypt
= aead_decrypt
,
1459 .ivsize
= AES_BLOCK_SIZE
,
1460 .maxauthsize
= SHA384_DIGEST_SIZE
,
1463 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1464 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
1465 OP_ALG_AAI_HMAC_PRECOMP
,
1471 .cra_name
= "echainiv(authenc(hmac(sha384),"
1473 .cra_driver_name
= "echainiv-authenc-"
1474 "hmac-sha384-cbc-aes-"
1476 .cra_blocksize
= AES_BLOCK_SIZE
,
1478 .setkey
= aead_setkey
,
1479 .setauthsize
= aead_setauthsize
,
1480 .encrypt
= aead_encrypt
,
1481 .decrypt
= aead_decrypt
,
1482 .ivsize
= AES_BLOCK_SIZE
,
1483 .maxauthsize
= SHA384_DIGEST_SIZE
,
1486 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1487 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
1488 OP_ALG_AAI_HMAC_PRECOMP
,
1495 .cra_name
= "authenc(hmac(sha512),cbc(aes))",
1496 .cra_driver_name
= "authenc-hmac-sha512-"
1498 .cra_blocksize
= AES_BLOCK_SIZE
,
1500 .setkey
= aead_setkey
,
1501 .setauthsize
= aead_setauthsize
,
1502 .encrypt
= aead_encrypt
,
1503 .decrypt
= aead_decrypt
,
1504 .ivsize
= AES_BLOCK_SIZE
,
1505 .maxauthsize
= SHA512_DIGEST_SIZE
,
1508 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1509 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
1510 OP_ALG_AAI_HMAC_PRECOMP
,
1516 .cra_name
= "echainiv(authenc(hmac(sha512),"
1518 .cra_driver_name
= "echainiv-authenc-"
1519 "hmac-sha512-cbc-aes-"
1521 .cra_blocksize
= AES_BLOCK_SIZE
,
1523 .setkey
= aead_setkey
,
1524 .setauthsize
= aead_setauthsize
,
1525 .encrypt
= aead_encrypt
,
1526 .decrypt
= aead_decrypt
,
1527 .ivsize
= AES_BLOCK_SIZE
,
1528 .maxauthsize
= SHA512_DIGEST_SIZE
,
1531 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1532 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
1533 OP_ALG_AAI_HMAC_PRECOMP
,
1540 .cra_name
= "authenc(hmac(md5),cbc(des3_ede))",
1541 .cra_driver_name
= "authenc-hmac-md5-"
1542 "cbc-des3_ede-caam-qi",
1543 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1545 .setkey
= aead_setkey
,
1546 .setauthsize
= aead_setauthsize
,
1547 .encrypt
= aead_encrypt
,
1548 .decrypt
= aead_decrypt
,
1549 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1550 .maxauthsize
= MD5_DIGEST_SIZE
,
1553 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1554 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
1555 OP_ALG_AAI_HMAC_PRECOMP
,
1561 .cra_name
= "echainiv(authenc(hmac(md5),"
1563 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
1564 "cbc-des3_ede-caam-qi",
1565 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1567 .setkey
= aead_setkey
,
1568 .setauthsize
= aead_setauthsize
,
1569 .encrypt
= aead_encrypt
,
1570 .decrypt
= aead_decrypt
,
1571 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1572 .maxauthsize
= MD5_DIGEST_SIZE
,
1575 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1576 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
1577 OP_ALG_AAI_HMAC_PRECOMP
,
1584 .cra_name
= "authenc(hmac(sha1),"
1586 .cra_driver_name
= "authenc-hmac-sha1-"
1587 "cbc-des3_ede-caam-qi",
1588 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1590 .setkey
= aead_setkey
,
1591 .setauthsize
= aead_setauthsize
,
1592 .encrypt
= aead_encrypt
,
1593 .decrypt
= aead_decrypt
,
1594 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1595 .maxauthsize
= SHA1_DIGEST_SIZE
,
1598 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1599 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
1600 OP_ALG_AAI_HMAC_PRECOMP
,
1606 .cra_name
= "echainiv(authenc(hmac(sha1),"
1608 .cra_driver_name
= "echainiv-authenc-"
1610 "cbc-des3_ede-caam-qi",
1611 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1613 .setkey
= aead_setkey
,
1614 .setauthsize
= aead_setauthsize
,
1615 .encrypt
= aead_encrypt
,
1616 .decrypt
= aead_decrypt
,
1617 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1618 .maxauthsize
= SHA1_DIGEST_SIZE
,
1621 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1622 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
1623 OP_ALG_AAI_HMAC_PRECOMP
,
1630 .cra_name
= "authenc(hmac(sha224),"
1632 .cra_driver_name
= "authenc-hmac-sha224-"
1633 "cbc-des3_ede-caam-qi",
1634 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1636 .setkey
= aead_setkey
,
1637 .setauthsize
= aead_setauthsize
,
1638 .encrypt
= aead_encrypt
,
1639 .decrypt
= aead_decrypt
,
1640 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1641 .maxauthsize
= SHA224_DIGEST_SIZE
,
1644 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1645 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
1646 OP_ALG_AAI_HMAC_PRECOMP
,
1652 .cra_name
= "echainiv(authenc(hmac(sha224),"
1654 .cra_driver_name
= "echainiv-authenc-"
1656 "cbc-des3_ede-caam-qi",
1657 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1659 .setkey
= aead_setkey
,
1660 .setauthsize
= aead_setauthsize
,
1661 .encrypt
= aead_encrypt
,
1662 .decrypt
= aead_decrypt
,
1663 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1664 .maxauthsize
= SHA224_DIGEST_SIZE
,
1667 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1668 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
1669 OP_ALG_AAI_HMAC_PRECOMP
,
1676 .cra_name
= "authenc(hmac(sha256),"
1678 .cra_driver_name
= "authenc-hmac-sha256-"
1679 "cbc-des3_ede-caam-qi",
1680 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1682 .setkey
= aead_setkey
,
1683 .setauthsize
= aead_setauthsize
,
1684 .encrypt
= aead_encrypt
,
1685 .decrypt
= aead_decrypt
,
1686 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1687 .maxauthsize
= SHA256_DIGEST_SIZE
,
1690 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1691 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
1692 OP_ALG_AAI_HMAC_PRECOMP
,
1698 .cra_name
= "echainiv(authenc(hmac(sha256),"
1700 .cra_driver_name
= "echainiv-authenc-"
1702 "cbc-des3_ede-caam-qi",
1703 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1705 .setkey
= aead_setkey
,
1706 .setauthsize
= aead_setauthsize
,
1707 .encrypt
= aead_encrypt
,
1708 .decrypt
= aead_decrypt
,
1709 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1710 .maxauthsize
= SHA256_DIGEST_SIZE
,
1713 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1714 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
1715 OP_ALG_AAI_HMAC_PRECOMP
,
1722 .cra_name
= "authenc(hmac(sha384),"
1724 .cra_driver_name
= "authenc-hmac-sha384-"
1725 "cbc-des3_ede-caam-qi",
1726 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1728 .setkey
= aead_setkey
,
1729 .setauthsize
= aead_setauthsize
,
1730 .encrypt
= aead_encrypt
,
1731 .decrypt
= aead_decrypt
,
1732 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1733 .maxauthsize
= SHA384_DIGEST_SIZE
,
1736 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1737 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
1738 OP_ALG_AAI_HMAC_PRECOMP
,
1744 .cra_name
= "echainiv(authenc(hmac(sha384),"
1746 .cra_driver_name
= "echainiv-authenc-"
1748 "cbc-des3_ede-caam-qi",
1749 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1751 .setkey
= aead_setkey
,
1752 .setauthsize
= aead_setauthsize
,
1753 .encrypt
= aead_encrypt
,
1754 .decrypt
= aead_decrypt
,
1755 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1756 .maxauthsize
= SHA384_DIGEST_SIZE
,
1759 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1760 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
1761 OP_ALG_AAI_HMAC_PRECOMP
,
1768 .cra_name
= "authenc(hmac(sha512),"
1770 .cra_driver_name
= "authenc-hmac-sha512-"
1771 "cbc-des3_ede-caam-qi",
1772 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1774 .setkey
= aead_setkey
,
1775 .setauthsize
= aead_setauthsize
,
1776 .encrypt
= aead_encrypt
,
1777 .decrypt
= aead_decrypt
,
1778 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1779 .maxauthsize
= SHA512_DIGEST_SIZE
,
1782 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1783 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
1784 OP_ALG_AAI_HMAC_PRECOMP
,
1790 .cra_name
= "echainiv(authenc(hmac(sha512),"
1792 .cra_driver_name
= "echainiv-authenc-"
1794 "cbc-des3_ede-caam-qi",
1795 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1797 .setkey
= aead_setkey
,
1798 .setauthsize
= aead_setauthsize
,
1799 .encrypt
= aead_encrypt
,
1800 .decrypt
= aead_decrypt
,
1801 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1802 .maxauthsize
= SHA512_DIGEST_SIZE
,
1805 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1806 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
1807 OP_ALG_AAI_HMAC_PRECOMP
,
1814 .cra_name
= "authenc(hmac(md5),cbc(des))",
1815 .cra_driver_name
= "authenc-hmac-md5-"
1817 .cra_blocksize
= DES_BLOCK_SIZE
,
1819 .setkey
= aead_setkey
,
1820 .setauthsize
= aead_setauthsize
,
1821 .encrypt
= aead_encrypt
,
1822 .decrypt
= aead_decrypt
,
1823 .ivsize
= DES_BLOCK_SIZE
,
1824 .maxauthsize
= MD5_DIGEST_SIZE
,
1827 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
1828 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
1829 OP_ALG_AAI_HMAC_PRECOMP
,
1835 .cra_name
= "echainiv(authenc(hmac(md5),"
1837 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
1839 .cra_blocksize
= DES_BLOCK_SIZE
,
1841 .setkey
= aead_setkey
,
1842 .setauthsize
= aead_setauthsize
,
1843 .encrypt
= aead_encrypt
,
1844 .decrypt
= aead_decrypt
,
1845 .ivsize
= DES_BLOCK_SIZE
,
1846 .maxauthsize
= MD5_DIGEST_SIZE
,
1849 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
1850 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
1851 OP_ALG_AAI_HMAC_PRECOMP
,
1858 .cra_name
= "authenc(hmac(sha1),cbc(des))",
1859 .cra_driver_name
= "authenc-hmac-sha1-"
1861 .cra_blocksize
= DES_BLOCK_SIZE
,
1863 .setkey
= aead_setkey
,
1864 .setauthsize
= aead_setauthsize
,
1865 .encrypt
= aead_encrypt
,
1866 .decrypt
= aead_decrypt
,
1867 .ivsize
= DES_BLOCK_SIZE
,
1868 .maxauthsize
= SHA1_DIGEST_SIZE
,
1871 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
1872 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
1873 OP_ALG_AAI_HMAC_PRECOMP
,
1879 .cra_name
= "echainiv(authenc(hmac(sha1),"
1881 .cra_driver_name
= "echainiv-authenc-"
1882 "hmac-sha1-cbc-des-caam-qi",
1883 .cra_blocksize
= DES_BLOCK_SIZE
,
1885 .setkey
= aead_setkey
,
1886 .setauthsize
= aead_setauthsize
,
1887 .encrypt
= aead_encrypt
,
1888 .decrypt
= aead_decrypt
,
1889 .ivsize
= DES_BLOCK_SIZE
,
1890 .maxauthsize
= SHA1_DIGEST_SIZE
,
1893 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
1894 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
1895 OP_ALG_AAI_HMAC_PRECOMP
,
1902 .cra_name
= "authenc(hmac(sha224),cbc(des))",
1903 .cra_driver_name
= "authenc-hmac-sha224-"
1905 .cra_blocksize
= DES_BLOCK_SIZE
,
1907 .setkey
= aead_setkey
,
1908 .setauthsize
= aead_setauthsize
,
1909 .encrypt
= aead_encrypt
,
1910 .decrypt
= aead_decrypt
,
1911 .ivsize
= DES_BLOCK_SIZE
,
1912 .maxauthsize
= SHA224_DIGEST_SIZE
,
1915 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
1916 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
1917 OP_ALG_AAI_HMAC_PRECOMP
,
1923 .cra_name
= "echainiv(authenc(hmac(sha224),"
1925 .cra_driver_name
= "echainiv-authenc-"
1926 "hmac-sha224-cbc-des-"
1928 .cra_blocksize
= DES_BLOCK_SIZE
,
1930 .setkey
= aead_setkey
,
1931 .setauthsize
= aead_setauthsize
,
1932 .encrypt
= aead_encrypt
,
1933 .decrypt
= aead_decrypt
,
1934 .ivsize
= DES_BLOCK_SIZE
,
1935 .maxauthsize
= SHA224_DIGEST_SIZE
,
1938 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
1939 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
1940 OP_ALG_AAI_HMAC_PRECOMP
,
1947 .cra_name
= "authenc(hmac(sha256),cbc(des))",
1948 .cra_driver_name
= "authenc-hmac-sha256-"
1950 .cra_blocksize
= DES_BLOCK_SIZE
,
1952 .setkey
= aead_setkey
,
1953 .setauthsize
= aead_setauthsize
,
1954 .encrypt
= aead_encrypt
,
1955 .decrypt
= aead_decrypt
,
1956 .ivsize
= DES_BLOCK_SIZE
,
1957 .maxauthsize
= SHA256_DIGEST_SIZE
,
1960 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
1961 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
1962 OP_ALG_AAI_HMAC_PRECOMP
,
1968 .cra_name
= "echainiv(authenc(hmac(sha256),"
1970 .cra_driver_name
= "echainiv-authenc-"
1971 "hmac-sha256-cbc-desi-"
1973 .cra_blocksize
= DES_BLOCK_SIZE
,
1975 .setkey
= aead_setkey
,
1976 .setauthsize
= aead_setauthsize
,
1977 .encrypt
= aead_encrypt
,
1978 .decrypt
= aead_decrypt
,
1979 .ivsize
= DES_BLOCK_SIZE
,
1980 .maxauthsize
= SHA256_DIGEST_SIZE
,
1983 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
1984 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
1985 OP_ALG_AAI_HMAC_PRECOMP
,
1992 .cra_name
= "authenc(hmac(sha384),cbc(des))",
1993 .cra_driver_name
= "authenc-hmac-sha384-"
1995 .cra_blocksize
= DES_BLOCK_SIZE
,
1997 .setkey
= aead_setkey
,
1998 .setauthsize
= aead_setauthsize
,
1999 .encrypt
= aead_encrypt
,
2000 .decrypt
= aead_decrypt
,
2001 .ivsize
= DES_BLOCK_SIZE
,
2002 .maxauthsize
= SHA384_DIGEST_SIZE
,
2005 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2006 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2007 OP_ALG_AAI_HMAC_PRECOMP
,
2013 .cra_name
= "echainiv(authenc(hmac(sha384),"
2015 .cra_driver_name
= "echainiv-authenc-"
2016 "hmac-sha384-cbc-des-"
2018 .cra_blocksize
= DES_BLOCK_SIZE
,
2020 .setkey
= aead_setkey
,
2021 .setauthsize
= aead_setauthsize
,
2022 .encrypt
= aead_encrypt
,
2023 .decrypt
= aead_decrypt
,
2024 .ivsize
= DES_BLOCK_SIZE
,
2025 .maxauthsize
= SHA384_DIGEST_SIZE
,
2028 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2029 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2030 OP_ALG_AAI_HMAC_PRECOMP
,
2037 .cra_name
= "authenc(hmac(sha512),cbc(des))",
2038 .cra_driver_name
= "authenc-hmac-sha512-"
2040 .cra_blocksize
= DES_BLOCK_SIZE
,
2042 .setkey
= aead_setkey
,
2043 .setauthsize
= aead_setauthsize
,
2044 .encrypt
= aead_encrypt
,
2045 .decrypt
= aead_decrypt
,
2046 .ivsize
= DES_BLOCK_SIZE
,
2047 .maxauthsize
= SHA512_DIGEST_SIZE
,
2050 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2051 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2052 OP_ALG_AAI_HMAC_PRECOMP
,
2058 .cra_name
= "echainiv(authenc(hmac(sha512),"
2060 .cra_driver_name
= "echainiv-authenc-"
2061 "hmac-sha512-cbc-des-"
2063 .cra_blocksize
= DES_BLOCK_SIZE
,
2065 .setkey
= aead_setkey
,
2066 .setauthsize
= aead_setauthsize
,
2067 .encrypt
= aead_encrypt
,
2068 .decrypt
= aead_decrypt
,
2069 .ivsize
= DES_BLOCK_SIZE
,
2070 .maxauthsize
= SHA512_DIGEST_SIZE
,
2073 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2074 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2075 OP_ALG_AAI_HMAC_PRECOMP
,
2081 struct caam_crypto_alg
{
2082 struct list_head entry
;
2083 struct crypto_alg crypto_alg
;
2084 struct caam_alg_entry caam
;
2087 static int caam_init_common(struct caam_ctx
*ctx
, struct caam_alg_entry
*caam
)
2089 struct caam_drv_private
*priv
;
2092 * distribute tfms across job rings to ensure in-order
2093 * crypto request processing per tfm
2095 ctx
->jrdev
= caam_jr_alloc();
2096 if (IS_ERR(ctx
->jrdev
)) {
2097 pr_err("Job Ring Device allocation for transform failed\n");
2098 return PTR_ERR(ctx
->jrdev
);
2101 ctx
->key_dma
= dma_map_single(ctx
->jrdev
, ctx
->key
, sizeof(ctx
->key
),
2103 if (dma_mapping_error(ctx
->jrdev
, ctx
->key_dma
)) {
2104 dev_err(ctx
->jrdev
, "unable to map key\n");
2105 caam_jr_free(ctx
->jrdev
);
2109 /* copy descriptor header template value */
2110 ctx
->cdata
.algtype
= OP_TYPE_CLASS1_ALG
| caam
->class1_alg_type
;
2111 ctx
->adata
.algtype
= OP_TYPE_CLASS2_ALG
| caam
->class2_alg_type
;
2113 priv
= dev_get_drvdata(ctx
->jrdev
->parent
);
2114 ctx
->qidev
= priv
->qidev
;
2116 spin_lock_init(&ctx
->lock
);
2117 ctx
->drv_ctx
[ENCRYPT
] = NULL
;
2118 ctx
->drv_ctx
[DECRYPT
] = NULL
;
2119 ctx
->drv_ctx
[GIVENCRYPT
] = NULL
;
2124 static int caam_cra_init(struct crypto_tfm
*tfm
)
2126 struct crypto_alg
*alg
= tfm
->__crt_alg
;
2127 struct caam_crypto_alg
*caam_alg
= container_of(alg
, typeof(*caam_alg
),
2129 struct caam_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2131 return caam_init_common(ctx
, &caam_alg
->caam
);
2134 static int caam_aead_init(struct crypto_aead
*tfm
)
2136 struct aead_alg
*alg
= crypto_aead_alg(tfm
);
2137 struct caam_aead_alg
*caam_alg
= container_of(alg
, typeof(*caam_alg
),
2139 struct caam_ctx
*ctx
= crypto_aead_ctx(tfm
);
2141 return caam_init_common(ctx
, &caam_alg
->caam
);
2144 static void caam_exit_common(struct caam_ctx
*ctx
)
2146 caam_drv_ctx_rel(ctx
->drv_ctx
[ENCRYPT
]);
2147 caam_drv_ctx_rel(ctx
->drv_ctx
[DECRYPT
]);
2148 caam_drv_ctx_rel(ctx
->drv_ctx
[GIVENCRYPT
]);
2150 dma_unmap_single(ctx
->jrdev
, ctx
->key_dma
, sizeof(ctx
->key
),
2153 caam_jr_free(ctx
->jrdev
);
2156 static void caam_cra_exit(struct crypto_tfm
*tfm
)
2158 caam_exit_common(crypto_tfm_ctx(tfm
));
2161 static void caam_aead_exit(struct crypto_aead
*tfm
)
2163 caam_exit_common(crypto_aead_ctx(tfm
));
2166 static struct list_head alg_list
;
2167 static void __exit
caam_qi_algapi_exit(void)
2169 struct caam_crypto_alg
*t_alg
, *n
;
2172 for (i
= 0; i
< ARRAY_SIZE(driver_aeads
); i
++) {
2173 struct caam_aead_alg
*t_alg
= driver_aeads
+ i
;
2175 if (t_alg
->registered
)
2176 crypto_unregister_aead(&t_alg
->aead
);
2182 list_for_each_entry_safe(t_alg
, n
, &alg_list
, entry
) {
2183 crypto_unregister_alg(&t_alg
->crypto_alg
);
2184 list_del(&t_alg
->entry
);
2189 static struct caam_crypto_alg
*caam_alg_alloc(struct caam_alg_template
2192 struct caam_crypto_alg
*t_alg
;
2193 struct crypto_alg
*alg
;
2195 t_alg
= kzalloc(sizeof(*t_alg
), GFP_KERNEL
);
2197 return ERR_PTR(-ENOMEM
);
2199 alg
= &t_alg
->crypto_alg
;
2201 snprintf(alg
->cra_name
, CRYPTO_MAX_ALG_NAME
, "%s", template->name
);
2202 snprintf(alg
->cra_driver_name
, CRYPTO_MAX_ALG_NAME
, "%s",
2203 template->driver_name
);
2204 alg
->cra_module
= THIS_MODULE
;
2205 alg
->cra_init
= caam_cra_init
;
2206 alg
->cra_exit
= caam_cra_exit
;
2207 alg
->cra_priority
= CAAM_CRA_PRIORITY
;
2208 alg
->cra_blocksize
= template->blocksize
;
2209 alg
->cra_alignmask
= 0;
2210 alg
->cra_ctxsize
= sizeof(struct caam_ctx
);
2211 alg
->cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_KERN_DRIVER_ONLY
|
2213 switch (template->type
) {
2214 case CRYPTO_ALG_TYPE_GIVCIPHER
:
2215 alg
->cra_type
= &crypto_givcipher_type
;
2216 alg
->cra_ablkcipher
= template->template_ablkcipher
;
2218 case CRYPTO_ALG_TYPE_ABLKCIPHER
:
2219 alg
->cra_type
= &crypto_ablkcipher_type
;
2220 alg
->cra_ablkcipher
= template->template_ablkcipher
;
2224 t_alg
->caam
.class1_alg_type
= template->class1_alg_type
;
2225 t_alg
->caam
.class2_alg_type
= template->class2_alg_type
;
2230 static void caam_aead_alg_init(struct caam_aead_alg
*t_alg
)
2232 struct aead_alg
*alg
= &t_alg
->aead
;
2234 alg
->base
.cra_module
= THIS_MODULE
;
2235 alg
->base
.cra_priority
= CAAM_CRA_PRIORITY
;
2236 alg
->base
.cra_ctxsize
= sizeof(struct caam_ctx
);
2237 alg
->base
.cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_KERN_DRIVER_ONLY
;
2239 alg
->init
= caam_aead_init
;
2240 alg
->exit
= caam_aead_exit
;
2243 static int __init
caam_qi_algapi_init(void)
2245 struct device_node
*dev_node
;
2246 struct platform_device
*pdev
;
2247 struct device
*ctrldev
;
2248 struct caam_drv_private
*priv
;
2250 u32 cha_vid
, cha_inst
, des_inst
, aes_inst
, md_inst
;
2251 unsigned int md_limit
= SHA512_DIGEST_SIZE
;
2252 bool registered
= false;
2254 dev_node
= of_find_compatible_node(NULL
, NULL
, "fsl,sec-v4.0");
2256 dev_node
= of_find_compatible_node(NULL
, NULL
, "fsl,sec4.0");
2261 pdev
= of_find_device_by_node(dev_node
);
2262 of_node_put(dev_node
);
2266 ctrldev
= &pdev
->dev
;
2267 priv
= dev_get_drvdata(ctrldev
);
2270 * If priv is NULL, it's probably because the caam driver wasn't
2271 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
2273 if (!priv
|| !priv
->qi_present
)
2276 INIT_LIST_HEAD(&alg_list
);
2279 * Register crypto algorithms the device supports.
2280 * First, detect presence and attributes of DES, AES, and MD blocks.
2282 cha_vid
= rd_reg32(&priv
->ctrl
->perfmon
.cha_id_ls
);
2283 cha_inst
= rd_reg32(&priv
->ctrl
->perfmon
.cha_num_ls
);
2284 des_inst
= (cha_inst
& CHA_ID_LS_DES_MASK
) >> CHA_ID_LS_DES_SHIFT
;
2285 aes_inst
= (cha_inst
& CHA_ID_LS_AES_MASK
) >> CHA_ID_LS_AES_SHIFT
;
2286 md_inst
= (cha_inst
& CHA_ID_LS_MD_MASK
) >> CHA_ID_LS_MD_SHIFT
;
2288 /* If MD is present, limit digest size based on LP256 */
2289 if (md_inst
&& ((cha_vid
& CHA_ID_LS_MD_MASK
) == CHA_ID_LS_MD_LP256
))
2290 md_limit
= SHA256_DIGEST_SIZE
;
2292 for (i
= 0; i
< ARRAY_SIZE(driver_algs
); i
++) {
2293 struct caam_crypto_alg
*t_alg
;
2294 struct caam_alg_template
*alg
= driver_algs
+ i
;
2295 u32 alg_sel
= alg
->class1_alg_type
& OP_ALG_ALGSEL_MASK
;
2297 /* Skip DES algorithms if not supported by device */
2299 ((alg_sel
== OP_ALG_ALGSEL_3DES
) ||
2300 (alg_sel
== OP_ALG_ALGSEL_DES
)))
2303 /* Skip AES algorithms if not supported by device */
2304 if (!aes_inst
&& (alg_sel
== OP_ALG_ALGSEL_AES
))
2307 t_alg
= caam_alg_alloc(alg
);
2308 if (IS_ERR(t_alg
)) {
2309 err
= PTR_ERR(t_alg
);
2310 dev_warn(priv
->qidev
, "%s alg allocation failed\n",
2315 err
= crypto_register_alg(&t_alg
->crypto_alg
);
2317 dev_warn(priv
->qidev
, "%s alg registration failed\n",
2318 t_alg
->crypto_alg
.cra_driver_name
);
2323 list_add_tail(&t_alg
->entry
, &alg_list
);
2327 for (i
= 0; i
< ARRAY_SIZE(driver_aeads
); i
++) {
2328 struct caam_aead_alg
*t_alg
= driver_aeads
+ i
;
2329 u32 c1_alg_sel
= t_alg
->caam
.class1_alg_type
&
2331 u32 c2_alg_sel
= t_alg
->caam
.class2_alg_type
&
2333 u32 alg_aai
= t_alg
->caam
.class1_alg_type
& OP_ALG_AAI_MASK
;
2335 /* Skip DES algorithms if not supported by device */
2337 ((c1_alg_sel
== OP_ALG_ALGSEL_3DES
) ||
2338 (c1_alg_sel
== OP_ALG_ALGSEL_DES
)))
2341 /* Skip AES algorithms if not supported by device */
2342 if (!aes_inst
&& (c1_alg_sel
== OP_ALG_ALGSEL_AES
))
2346 * Check support for AES algorithms not available
2349 if (((cha_vid
& CHA_ID_LS_AES_MASK
) == CHA_ID_LS_AES_LP
) &&
2350 (alg_aai
== OP_ALG_AAI_GCM
))
2354 * Skip algorithms requiring message digests
2355 * if MD or MD size is not supported by device.
2358 (!md_inst
|| (t_alg
->aead
.maxauthsize
> md_limit
)))
2361 caam_aead_alg_init(t_alg
);
2363 err
= crypto_register_aead(&t_alg
->aead
);
2365 pr_warn("%s alg registration failed\n",
2366 t_alg
->aead
.base
.cra_driver_name
);
2370 t_alg
->registered
= true;
2375 dev_info(priv
->qidev
, "algorithms registered in /proc/crypto\n");
2380 module_init(caam_qi_algapi_init
);
2381 module_exit(caam_qi_algapi_exit
);
2383 MODULE_LICENSE("GPL");
2384 MODULE_DESCRIPTION("Support for crypto API using CAAM-QI backend");
2385 MODULE_AUTHOR("Freescale Semiconductor");