1 // SPDX-License-Identifier: GPL-2.0+
3 * Freescale FSL CAAM support for crypto API over QI backend.
6 * Copyright 2013-2016 Freescale Semiconductor, Inc.
7 * Copyright 2016-2018 NXP
14 #include "desc_constr.h"
20 #include "caamalg_desc.h"
25 #define CAAM_CRA_PRIORITY 2000
26 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
27 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
28 SHA512_DIGEST_SIZE * 2)
30 #define DESC_MAX_USED_BYTES (DESC_QI_AEAD_GIVENC_LEN + \
32 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
34 struct caam_alg_entry
{
41 struct caam_aead_alg
{
43 struct caam_alg_entry caam
;
47 struct caam_skcipher_alg
{
48 struct skcipher_alg skcipher
;
49 struct caam_alg_entry caam
;
58 u32 sh_desc_enc
[DESC_MAX_USED_LEN
];
59 u32 sh_desc_dec
[DESC_MAX_USED_LEN
];
60 u8 key
[CAAM_MAX_KEY_SIZE
];
62 enum dma_data_direction dir
;
65 unsigned int authsize
;
67 spinlock_t lock
; /* Protects multiple init of driver context */
68 struct caam_drv_ctx
*drv_ctx
[NUM_OP
];
71 static int aead_set_sh_desc(struct crypto_aead
*aead
)
73 struct caam_aead_alg
*alg
= container_of(crypto_aead_alg(aead
),
75 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
76 unsigned int ivsize
= crypto_aead_ivsize(aead
);
79 unsigned int data_len
[2];
81 const bool ctr_mode
= ((ctx
->cdata
.algtype
& OP_ALG_AAI_MASK
) ==
82 OP_ALG_AAI_CTR_MOD128
);
83 const bool is_rfc3686
= alg
->caam
.rfc3686
;
84 struct caam_drv_private
*ctrlpriv
= dev_get_drvdata(ctx
->jrdev
->parent
);
86 if (!ctx
->cdata
.keylen
|| !ctx
->authsize
)
90 * AES-CTR needs to load IV in CONTEXT1 reg
91 * at an offset of 128bits (16bytes)
92 * CONTEXT1[255:128] = IV
99 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
102 ctx1_iv_off
= 16 + CTR_RFC3686_NONCE_SIZE
;
103 nonce
= (u32
*)((void *)ctx
->key
+ ctx
->adata
.keylen_pad
+
104 ctx
->cdata
.keylen
- CTR_RFC3686_NONCE_SIZE
);
107 data_len
[0] = ctx
->adata
.keylen_pad
;
108 data_len
[1] = ctx
->cdata
.keylen
;
113 /* aead_encrypt shared descriptor */
114 if (desc_inline_query(DESC_QI_AEAD_ENC_LEN
+
115 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0),
116 DESC_JOB_IO_LEN
, data_len
, &inl_mask
,
117 ARRAY_SIZE(data_len
)) < 0)
121 ctx
->adata
.key_virt
= ctx
->key
;
123 ctx
->adata
.key_dma
= ctx
->key_dma
;
126 ctx
->cdata
.key_virt
= ctx
->key
+ ctx
->adata
.keylen_pad
;
128 ctx
->cdata
.key_dma
= ctx
->key_dma
+ ctx
->adata
.keylen_pad
;
130 ctx
->adata
.key_inline
= !!(inl_mask
& 1);
131 ctx
->cdata
.key_inline
= !!(inl_mask
& 2);
133 cnstr_shdsc_aead_encap(ctx
->sh_desc_enc
, &ctx
->cdata
, &ctx
->adata
,
134 ivsize
, ctx
->authsize
, is_rfc3686
, nonce
,
135 ctx1_iv_off
, true, ctrlpriv
->era
);
138 /* aead_decrypt shared descriptor */
139 if (desc_inline_query(DESC_QI_AEAD_DEC_LEN
+
140 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0),
141 DESC_JOB_IO_LEN
, data_len
, &inl_mask
,
142 ARRAY_SIZE(data_len
)) < 0)
146 ctx
->adata
.key_virt
= ctx
->key
;
148 ctx
->adata
.key_dma
= ctx
->key_dma
;
151 ctx
->cdata
.key_virt
= ctx
->key
+ ctx
->adata
.keylen_pad
;
153 ctx
->cdata
.key_dma
= ctx
->key_dma
+ ctx
->adata
.keylen_pad
;
155 ctx
->adata
.key_inline
= !!(inl_mask
& 1);
156 ctx
->cdata
.key_inline
= !!(inl_mask
& 2);
158 cnstr_shdsc_aead_decap(ctx
->sh_desc_dec
, &ctx
->cdata
, &ctx
->adata
,
159 ivsize
, ctx
->authsize
, alg
->caam
.geniv
,
160 is_rfc3686
, nonce
, ctx1_iv_off
, true,
163 if (!alg
->caam
.geniv
)
166 /* aead_givencrypt shared descriptor */
167 if (desc_inline_query(DESC_QI_AEAD_GIVENC_LEN
+
168 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0),
169 DESC_JOB_IO_LEN
, data_len
, &inl_mask
,
170 ARRAY_SIZE(data_len
)) < 0)
174 ctx
->adata
.key_virt
= ctx
->key
;
176 ctx
->adata
.key_dma
= ctx
->key_dma
;
179 ctx
->cdata
.key_virt
= ctx
->key
+ ctx
->adata
.keylen_pad
;
181 ctx
->cdata
.key_dma
= ctx
->key_dma
+ ctx
->adata
.keylen_pad
;
183 ctx
->adata
.key_inline
= !!(inl_mask
& 1);
184 ctx
->cdata
.key_inline
= !!(inl_mask
& 2);
186 cnstr_shdsc_aead_givencap(ctx
->sh_desc_enc
, &ctx
->cdata
, &ctx
->adata
,
187 ivsize
, ctx
->authsize
, is_rfc3686
, nonce
,
188 ctx1_iv_off
, true, ctrlpriv
->era
);
194 static int aead_setauthsize(struct crypto_aead
*authenc
, unsigned int authsize
)
196 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
198 ctx
->authsize
= authsize
;
199 aead_set_sh_desc(authenc
);
204 static int aead_setkey(struct crypto_aead
*aead
, const u8
*key
,
207 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
208 struct device
*jrdev
= ctx
->jrdev
;
209 struct caam_drv_private
*ctrlpriv
= dev_get_drvdata(jrdev
->parent
);
210 struct crypto_authenc_keys keys
;
213 if (crypto_authenc_extractkeys(&keys
, key
, keylen
) != 0)
217 dev_err(jrdev
, "keylen %d enckeylen %d authkeylen %d\n",
218 keys
.authkeylen
+ keys
.enckeylen
, keys
.enckeylen
,
220 print_hex_dump(KERN_ERR
, "key in @" __stringify(__LINE__
)": ",
221 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
225 * If DKP is supported, use it in the shared descriptor to generate
228 if (ctrlpriv
->era
>= 6) {
229 ctx
->adata
.keylen
= keys
.authkeylen
;
230 ctx
->adata
.keylen_pad
= split_key_len(ctx
->adata
.algtype
&
233 if (ctx
->adata
.keylen_pad
+ keys
.enckeylen
> CAAM_MAX_KEY_SIZE
)
236 memcpy(ctx
->key
, keys
.authkey
, keys
.authkeylen
);
237 memcpy(ctx
->key
+ ctx
->adata
.keylen_pad
, keys
.enckey
,
239 dma_sync_single_for_device(jrdev
, ctx
->key_dma
,
240 ctx
->adata
.keylen_pad
+
241 keys
.enckeylen
, ctx
->dir
);
245 ret
= gen_split_key(jrdev
, ctx
->key
, &ctx
->adata
, keys
.authkey
,
246 keys
.authkeylen
, CAAM_MAX_KEY_SIZE
-
251 /* postpend encryption key to auth split key */
252 memcpy(ctx
->key
+ ctx
->adata
.keylen_pad
, keys
.enckey
, keys
.enckeylen
);
253 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, ctx
->adata
.keylen_pad
+
254 keys
.enckeylen
, ctx
->dir
);
256 print_hex_dump(KERN_ERR
, "ctx.key@" __stringify(__LINE__
)": ",
257 DUMP_PREFIX_ADDRESS
, 16, 4, ctx
->key
,
258 ctx
->adata
.keylen_pad
+ keys
.enckeylen
, 1);
262 ctx
->cdata
.keylen
= keys
.enckeylen
;
264 ret
= aead_set_sh_desc(aead
);
268 /* Now update the driver contexts with the new shared descriptor */
269 if (ctx
->drv_ctx
[ENCRYPT
]) {
270 ret
= caam_drv_ctx_update(ctx
->drv_ctx
[ENCRYPT
],
273 dev_err(jrdev
, "driver enc context update failed\n");
278 if (ctx
->drv_ctx
[DECRYPT
]) {
279 ret
= caam_drv_ctx_update(ctx
->drv_ctx
[DECRYPT
],
282 dev_err(jrdev
, "driver dec context update failed\n");
287 memzero_explicit(&keys
, sizeof(keys
));
290 crypto_aead_set_flags(aead
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
291 memzero_explicit(&keys
, sizeof(keys
));
295 static int gcm_set_sh_desc(struct crypto_aead
*aead
)
297 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
298 unsigned int ivsize
= crypto_aead_ivsize(aead
);
299 int rem_bytes
= CAAM_DESC_BYTES_MAX
- DESC_JOB_IO_LEN
-
302 if (!ctx
->cdata
.keylen
|| !ctx
->authsize
)
306 * Job Descriptor and Shared Descriptor
307 * must fit into the 64-word Descriptor h/w Buffer
309 if (rem_bytes
>= DESC_QI_GCM_ENC_LEN
) {
310 ctx
->cdata
.key_inline
= true;
311 ctx
->cdata
.key_virt
= ctx
->key
;
313 ctx
->cdata
.key_inline
= false;
314 ctx
->cdata
.key_dma
= ctx
->key_dma
;
317 cnstr_shdsc_gcm_encap(ctx
->sh_desc_enc
, &ctx
->cdata
, ivsize
,
318 ctx
->authsize
, true);
321 * Job Descriptor and Shared Descriptor
322 * must fit into the 64-word Descriptor h/w Buffer
324 if (rem_bytes
>= DESC_QI_GCM_DEC_LEN
) {
325 ctx
->cdata
.key_inline
= true;
326 ctx
->cdata
.key_virt
= ctx
->key
;
328 ctx
->cdata
.key_inline
= false;
329 ctx
->cdata
.key_dma
= ctx
->key_dma
;
332 cnstr_shdsc_gcm_decap(ctx
->sh_desc_dec
, &ctx
->cdata
, ivsize
,
333 ctx
->authsize
, true);
338 static int gcm_setauthsize(struct crypto_aead
*authenc
, unsigned int authsize
)
340 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
342 ctx
->authsize
= authsize
;
343 gcm_set_sh_desc(authenc
);
348 static int gcm_setkey(struct crypto_aead
*aead
,
349 const u8
*key
, unsigned int keylen
)
351 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
352 struct device
*jrdev
= ctx
->jrdev
;
356 print_hex_dump(KERN_ERR
, "key in @" __stringify(__LINE__
)": ",
357 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
360 memcpy(ctx
->key
, key
, keylen
);
361 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, keylen
, ctx
->dir
);
362 ctx
->cdata
.keylen
= keylen
;
364 ret
= gcm_set_sh_desc(aead
);
368 /* Now update the driver contexts with the new shared descriptor */
369 if (ctx
->drv_ctx
[ENCRYPT
]) {
370 ret
= caam_drv_ctx_update(ctx
->drv_ctx
[ENCRYPT
],
373 dev_err(jrdev
, "driver enc context update failed\n");
378 if (ctx
->drv_ctx
[DECRYPT
]) {
379 ret
= caam_drv_ctx_update(ctx
->drv_ctx
[DECRYPT
],
382 dev_err(jrdev
, "driver dec context update failed\n");
390 static int rfc4106_set_sh_desc(struct crypto_aead
*aead
)
392 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
393 unsigned int ivsize
= crypto_aead_ivsize(aead
);
394 int rem_bytes
= CAAM_DESC_BYTES_MAX
- DESC_JOB_IO_LEN
-
397 if (!ctx
->cdata
.keylen
|| !ctx
->authsize
)
400 ctx
->cdata
.key_virt
= ctx
->key
;
403 * Job Descriptor and Shared Descriptor
404 * must fit into the 64-word Descriptor h/w Buffer
406 if (rem_bytes
>= DESC_QI_RFC4106_ENC_LEN
) {
407 ctx
->cdata
.key_inline
= true;
409 ctx
->cdata
.key_inline
= false;
410 ctx
->cdata
.key_dma
= ctx
->key_dma
;
413 cnstr_shdsc_rfc4106_encap(ctx
->sh_desc_enc
, &ctx
->cdata
, ivsize
,
414 ctx
->authsize
, true);
417 * Job Descriptor and Shared Descriptor
418 * must fit into the 64-word Descriptor h/w Buffer
420 if (rem_bytes
>= DESC_QI_RFC4106_DEC_LEN
) {
421 ctx
->cdata
.key_inline
= true;
423 ctx
->cdata
.key_inline
= false;
424 ctx
->cdata
.key_dma
= ctx
->key_dma
;
427 cnstr_shdsc_rfc4106_decap(ctx
->sh_desc_dec
, &ctx
->cdata
, ivsize
,
428 ctx
->authsize
, true);
433 static int rfc4106_setauthsize(struct crypto_aead
*authenc
,
434 unsigned int authsize
)
436 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
438 ctx
->authsize
= authsize
;
439 rfc4106_set_sh_desc(authenc
);
444 static int rfc4106_setkey(struct crypto_aead
*aead
,
445 const u8
*key
, unsigned int keylen
)
447 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
448 struct device
*jrdev
= ctx
->jrdev
;
455 print_hex_dump(KERN_ERR
, "key in @" __stringify(__LINE__
)": ",
456 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
459 memcpy(ctx
->key
, key
, keylen
);
461 * The last four bytes of the key material are used as the salt value
462 * in the nonce. Update the AES key length.
464 ctx
->cdata
.keylen
= keylen
- 4;
465 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, ctx
->cdata
.keylen
,
468 ret
= rfc4106_set_sh_desc(aead
);
472 /* Now update the driver contexts with the new shared descriptor */
473 if (ctx
->drv_ctx
[ENCRYPT
]) {
474 ret
= caam_drv_ctx_update(ctx
->drv_ctx
[ENCRYPT
],
477 dev_err(jrdev
, "driver enc context update failed\n");
482 if (ctx
->drv_ctx
[DECRYPT
]) {
483 ret
= caam_drv_ctx_update(ctx
->drv_ctx
[DECRYPT
],
486 dev_err(jrdev
, "driver dec context update failed\n");
494 static int rfc4543_set_sh_desc(struct crypto_aead
*aead
)
496 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
497 unsigned int ivsize
= crypto_aead_ivsize(aead
);
498 int rem_bytes
= CAAM_DESC_BYTES_MAX
- DESC_JOB_IO_LEN
-
501 if (!ctx
->cdata
.keylen
|| !ctx
->authsize
)
504 ctx
->cdata
.key_virt
= ctx
->key
;
507 * Job Descriptor and Shared Descriptor
508 * must fit into the 64-word Descriptor h/w Buffer
510 if (rem_bytes
>= DESC_QI_RFC4543_ENC_LEN
) {
511 ctx
->cdata
.key_inline
= true;
513 ctx
->cdata
.key_inline
= false;
514 ctx
->cdata
.key_dma
= ctx
->key_dma
;
517 cnstr_shdsc_rfc4543_encap(ctx
->sh_desc_enc
, &ctx
->cdata
, ivsize
,
518 ctx
->authsize
, true);
521 * Job Descriptor and Shared Descriptor
522 * must fit into the 64-word Descriptor h/w Buffer
524 if (rem_bytes
>= DESC_QI_RFC4543_DEC_LEN
) {
525 ctx
->cdata
.key_inline
= true;
527 ctx
->cdata
.key_inline
= false;
528 ctx
->cdata
.key_dma
= ctx
->key_dma
;
531 cnstr_shdsc_rfc4543_decap(ctx
->sh_desc_dec
, &ctx
->cdata
, ivsize
,
532 ctx
->authsize
, true);
537 static int rfc4543_setauthsize(struct crypto_aead
*authenc
,
538 unsigned int authsize
)
540 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
542 ctx
->authsize
= authsize
;
543 rfc4543_set_sh_desc(authenc
);
548 static int rfc4543_setkey(struct crypto_aead
*aead
,
549 const u8
*key
, unsigned int keylen
)
551 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
552 struct device
*jrdev
= ctx
->jrdev
;
559 print_hex_dump(KERN_ERR
, "key in @" __stringify(__LINE__
)": ",
560 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
563 memcpy(ctx
->key
, key
, keylen
);
565 * The last four bytes of the key material are used as the salt value
566 * in the nonce. Update the AES key length.
568 ctx
->cdata
.keylen
= keylen
- 4;
569 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, ctx
->cdata
.keylen
,
572 ret
= rfc4543_set_sh_desc(aead
);
576 /* Now update the driver contexts with the new shared descriptor */
577 if (ctx
->drv_ctx
[ENCRYPT
]) {
578 ret
= caam_drv_ctx_update(ctx
->drv_ctx
[ENCRYPT
],
581 dev_err(jrdev
, "driver enc context update failed\n");
586 if (ctx
->drv_ctx
[DECRYPT
]) {
587 ret
= caam_drv_ctx_update(ctx
->drv_ctx
[DECRYPT
],
590 dev_err(jrdev
, "driver dec context update failed\n");
598 static int skcipher_setkey(struct crypto_skcipher
*skcipher
, const u8
*key
,
601 struct caam_ctx
*ctx
= crypto_skcipher_ctx(skcipher
);
602 struct caam_skcipher_alg
*alg
=
603 container_of(crypto_skcipher_alg(skcipher
), typeof(*alg
),
605 struct device
*jrdev
= ctx
->jrdev
;
606 unsigned int ivsize
= crypto_skcipher_ivsize(skcipher
);
608 const bool ctr_mode
= ((ctx
->cdata
.algtype
& OP_ALG_AAI_MASK
) ==
609 OP_ALG_AAI_CTR_MOD128
);
610 const bool is_rfc3686
= alg
->caam
.rfc3686
;
614 print_hex_dump(KERN_ERR
, "key in @" __stringify(__LINE__
)": ",
615 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
618 * AES-CTR needs to load IV in CONTEXT1 reg
619 * at an offset of 128bits (16bytes)
620 * CONTEXT1[255:128] = IV
627 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
628 * | *key = {KEY, NONCE}
631 ctx1_iv_off
= 16 + CTR_RFC3686_NONCE_SIZE
;
632 keylen
-= CTR_RFC3686_NONCE_SIZE
;
635 ctx
->cdata
.keylen
= keylen
;
636 ctx
->cdata
.key_virt
= key
;
637 ctx
->cdata
.key_inline
= true;
639 /* skcipher encrypt, decrypt shared descriptors */
640 cnstr_shdsc_skcipher_encap(ctx
->sh_desc_enc
, &ctx
->cdata
, ivsize
,
641 is_rfc3686
, ctx1_iv_off
);
642 cnstr_shdsc_skcipher_decap(ctx
->sh_desc_dec
, &ctx
->cdata
, ivsize
,
643 is_rfc3686
, ctx1_iv_off
);
645 /* Now update the driver contexts with the new shared descriptor */
646 if (ctx
->drv_ctx
[ENCRYPT
]) {
647 ret
= caam_drv_ctx_update(ctx
->drv_ctx
[ENCRYPT
],
650 dev_err(jrdev
, "driver enc context update failed\n");
655 if (ctx
->drv_ctx
[DECRYPT
]) {
656 ret
= caam_drv_ctx_update(ctx
->drv_ctx
[DECRYPT
],
659 dev_err(jrdev
, "driver dec context update failed\n");
666 crypto_skcipher_set_flags(skcipher
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
670 static int xts_skcipher_setkey(struct crypto_skcipher
*skcipher
, const u8
*key
,
673 struct caam_ctx
*ctx
= crypto_skcipher_ctx(skcipher
);
674 struct device
*jrdev
= ctx
->jrdev
;
677 if (keylen
!= 2 * AES_MIN_KEY_SIZE
&& keylen
!= 2 * AES_MAX_KEY_SIZE
) {
678 dev_err(jrdev
, "key size mismatch\n");
682 ctx
->cdata
.keylen
= keylen
;
683 ctx
->cdata
.key_virt
= key
;
684 ctx
->cdata
.key_inline
= true;
686 /* xts skcipher encrypt, decrypt shared descriptors */
687 cnstr_shdsc_xts_skcipher_encap(ctx
->sh_desc_enc
, &ctx
->cdata
);
688 cnstr_shdsc_xts_skcipher_decap(ctx
->sh_desc_dec
, &ctx
->cdata
);
690 /* Now update the driver contexts with the new shared descriptor */
691 if (ctx
->drv_ctx
[ENCRYPT
]) {
692 ret
= caam_drv_ctx_update(ctx
->drv_ctx
[ENCRYPT
],
695 dev_err(jrdev
, "driver enc context update failed\n");
700 if (ctx
->drv_ctx
[DECRYPT
]) {
701 ret
= caam_drv_ctx_update(ctx
->drv_ctx
[DECRYPT
],
704 dev_err(jrdev
, "driver dec context update failed\n");
711 crypto_skcipher_set_flags(skcipher
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
716 * aead_edesc - s/w-extended aead descriptor
717 * @src_nents: number of segments in input scatterlist
718 * @dst_nents: number of segments in output scatterlist
719 * @iv_dma: dma address of iv for checking continuity and link table
720 * @qm_sg_bytes: length of dma mapped h/w link table
721 * @qm_sg_dma: bus physical mapped address of h/w link table
722 * @assoclen: associated data length, in CAAM endianness
723 * @assoclen_dma: bus physical mapped address of req->assoclen
724 * @drv_req: driver-specific request structure
725 * @sgt: the h/w link table, followed by IV
732 dma_addr_t qm_sg_dma
;
733 unsigned int assoclen
;
734 dma_addr_t assoclen_dma
;
735 struct caam_drv_req drv_req
;
736 struct qm_sg_entry sgt
[0];
740 * skcipher_edesc - s/w-extended skcipher descriptor
741 * @src_nents: number of segments in input scatterlist
742 * @dst_nents: number of segments in output scatterlist
743 * @iv_dma: dma address of iv for checking continuity and link table
744 * @qm_sg_bytes: length of dma mapped h/w link table
745 * @qm_sg_dma: bus physical mapped address of h/w link table
746 * @drv_req: driver-specific request structure
747 * @sgt: the h/w link table, followed by IV
749 struct skcipher_edesc
{
754 dma_addr_t qm_sg_dma
;
755 struct caam_drv_req drv_req
;
756 struct qm_sg_entry sgt
[0];
759 static struct caam_drv_ctx
*get_drv_ctx(struct caam_ctx
*ctx
,
763 * This function is called on the fast path with values of 'type'
764 * known at compile time. Invalid arguments are not expected and
765 * thus no checks are made.
767 struct caam_drv_ctx
*drv_ctx
= ctx
->drv_ctx
[type
];
770 if (unlikely(!drv_ctx
)) {
771 spin_lock(&ctx
->lock
);
773 /* Read again to check if some other core init drv_ctx */
774 drv_ctx
= ctx
->drv_ctx
[type
];
779 desc
= ctx
->sh_desc_enc
;
780 else /* (type == DECRYPT) */
781 desc
= ctx
->sh_desc_dec
;
783 cpu
= smp_processor_id();
784 drv_ctx
= caam_drv_ctx_init(ctx
->qidev
, &cpu
, desc
);
785 if (!IS_ERR_OR_NULL(drv_ctx
))
786 drv_ctx
->op_type
= type
;
788 ctx
->drv_ctx
[type
] = drv_ctx
;
791 spin_unlock(&ctx
->lock
);
797 static void caam_unmap(struct device
*dev
, struct scatterlist
*src
,
798 struct scatterlist
*dst
, int src_nents
,
799 int dst_nents
, dma_addr_t iv_dma
, int ivsize
,
800 dma_addr_t qm_sg_dma
, int qm_sg_bytes
)
804 dma_unmap_sg(dev
, src
, src_nents
, DMA_TO_DEVICE
);
806 dma_unmap_sg(dev
, dst
, dst_nents
, DMA_FROM_DEVICE
);
808 dma_unmap_sg(dev
, src
, src_nents
, DMA_BIDIRECTIONAL
);
812 dma_unmap_single(dev
, iv_dma
, ivsize
, DMA_TO_DEVICE
);
814 dma_unmap_single(dev
, qm_sg_dma
, qm_sg_bytes
, DMA_TO_DEVICE
);
817 static void aead_unmap(struct device
*dev
,
818 struct aead_edesc
*edesc
,
819 struct aead_request
*req
)
821 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
822 int ivsize
= crypto_aead_ivsize(aead
);
824 caam_unmap(dev
, req
->src
, req
->dst
, edesc
->src_nents
, edesc
->dst_nents
,
825 edesc
->iv_dma
, ivsize
, edesc
->qm_sg_dma
, edesc
->qm_sg_bytes
);
826 dma_unmap_single(dev
, edesc
->assoclen_dma
, 4, DMA_TO_DEVICE
);
829 static void skcipher_unmap(struct device
*dev
, struct skcipher_edesc
*edesc
,
830 struct skcipher_request
*req
)
832 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(req
);
833 int ivsize
= crypto_skcipher_ivsize(skcipher
);
835 caam_unmap(dev
, req
->src
, req
->dst
, edesc
->src_nents
, edesc
->dst_nents
,
836 edesc
->iv_dma
, ivsize
, edesc
->qm_sg_dma
, edesc
->qm_sg_bytes
);
839 static void aead_done(struct caam_drv_req
*drv_req
, u32 status
)
841 struct device
*qidev
;
842 struct aead_edesc
*edesc
;
843 struct aead_request
*aead_req
= drv_req
->app_ctx
;
844 struct crypto_aead
*aead
= crypto_aead_reqtfm(aead_req
);
845 struct caam_ctx
*caam_ctx
= crypto_aead_ctx(aead
);
848 qidev
= caam_ctx
->qidev
;
850 if (unlikely(status
)) {
851 u32 ssrc
= status
& JRSTA_SSRC_MASK
;
852 u8 err_id
= status
& JRSTA_CCBERR_ERRID_MASK
;
854 caam_jr_strstatus(qidev
, status
);
856 * verify hw auth check passed else return -EBADMSG
858 if (ssrc
== JRSTA_SSRC_CCB_ERROR
&&
859 err_id
== JRSTA_CCBERR_ERRID_ICVCHK
)
865 edesc
= container_of(drv_req
, typeof(*edesc
), drv_req
);
866 aead_unmap(qidev
, edesc
, aead_req
);
868 aead_request_complete(aead_req
, ecode
);
869 qi_cache_free(edesc
);
873 * allocate and map the aead extended descriptor
875 static struct aead_edesc
*aead_edesc_alloc(struct aead_request
*req
,
878 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
879 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
880 struct caam_aead_alg
*alg
= container_of(crypto_aead_alg(aead
),
882 struct device
*qidev
= ctx
->qidev
;
883 gfp_t flags
= (req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ?
884 GFP_KERNEL
: GFP_ATOMIC
;
885 int src_nents
, mapped_src_nents
, dst_nents
= 0, mapped_dst_nents
= 0;
886 struct aead_edesc
*edesc
;
887 dma_addr_t qm_sg_dma
, iv_dma
= 0;
889 unsigned int authsize
= ctx
->authsize
;
890 int qm_sg_index
= 0, qm_sg_ents
= 0, qm_sg_bytes
;
892 struct qm_sg_entry
*sg_table
, *fd_sgt
;
893 struct caam_drv_ctx
*drv_ctx
;
895 drv_ctx
= get_drv_ctx(ctx
, encrypt
? ENCRYPT
: DECRYPT
);
896 if (IS_ERR_OR_NULL(drv_ctx
))
897 return (struct aead_edesc
*)drv_ctx
;
899 /* allocate space for base edesc and hw desc commands, link tables */
900 edesc
= qi_cache_alloc(GFP_DMA
| flags
);
901 if (unlikely(!edesc
)) {
902 dev_err(qidev
, "could not allocate extended descriptor\n");
903 return ERR_PTR(-ENOMEM
);
906 if (likely(req
->src
== req
->dst
)) {
907 src_nents
= sg_nents_for_len(req
->src
, req
->assoclen
+
909 (encrypt
? authsize
: 0));
910 if (unlikely(src_nents
< 0)) {
911 dev_err(qidev
, "Insufficient bytes (%d) in src S/G\n",
912 req
->assoclen
+ req
->cryptlen
+
913 (encrypt
? authsize
: 0));
914 qi_cache_free(edesc
);
915 return ERR_PTR(src_nents
);
918 mapped_src_nents
= dma_map_sg(qidev
, req
->src
, src_nents
,
920 if (unlikely(!mapped_src_nents
)) {
921 dev_err(qidev
, "unable to map source\n");
922 qi_cache_free(edesc
);
923 return ERR_PTR(-ENOMEM
);
926 src_nents
= sg_nents_for_len(req
->src
, req
->assoclen
+
928 if (unlikely(src_nents
< 0)) {
929 dev_err(qidev
, "Insufficient bytes (%d) in src S/G\n",
930 req
->assoclen
+ req
->cryptlen
);
931 qi_cache_free(edesc
);
932 return ERR_PTR(src_nents
);
935 dst_nents
= sg_nents_for_len(req
->dst
, req
->assoclen
+
937 (encrypt
? authsize
:
939 if (unlikely(dst_nents
< 0)) {
940 dev_err(qidev
, "Insufficient bytes (%d) in dst S/G\n",
941 req
->assoclen
+ req
->cryptlen
+
942 (encrypt
? authsize
: (-authsize
)));
943 qi_cache_free(edesc
);
944 return ERR_PTR(dst_nents
);
948 mapped_src_nents
= dma_map_sg(qidev
, req
->src
,
949 src_nents
, DMA_TO_DEVICE
);
950 if (unlikely(!mapped_src_nents
)) {
951 dev_err(qidev
, "unable to map source\n");
952 qi_cache_free(edesc
);
953 return ERR_PTR(-ENOMEM
);
956 mapped_src_nents
= 0;
960 mapped_dst_nents
= dma_map_sg(qidev
, req
->dst
,
963 if (unlikely(!mapped_dst_nents
)) {
964 dev_err(qidev
, "unable to map destination\n");
965 dma_unmap_sg(qidev
, req
->src
, src_nents
,
967 qi_cache_free(edesc
);
968 return ERR_PTR(-ENOMEM
);
971 mapped_dst_nents
= 0;
975 if ((alg
->caam
.rfc3686
&& encrypt
) || !alg
->caam
.geniv
)
976 ivsize
= crypto_aead_ivsize(aead
);
979 * Create S/G table: req->assoclen, [IV,] req->src [, req->dst].
980 * Input is not contiguous.
982 qm_sg_ents
= 1 + !!ivsize
+ mapped_src_nents
+
983 (mapped_dst_nents
> 1 ? mapped_dst_nents
: 0);
984 sg_table
= &edesc
->sgt
[0];
985 qm_sg_bytes
= qm_sg_ents
* sizeof(*sg_table
);
986 if (unlikely(offsetof(struct aead_edesc
, sgt
) + qm_sg_bytes
+ ivsize
>
987 CAAM_QI_MEMCACHE_SIZE
)) {
988 dev_err(qidev
, "No space for %d S/G entries and/or %dB IV\n",
990 caam_unmap(qidev
, req
->src
, req
->dst
, src_nents
, dst_nents
, 0,
992 qi_cache_free(edesc
);
993 return ERR_PTR(-ENOMEM
);
997 u8
*iv
= (u8
*)(sg_table
+ qm_sg_ents
);
999 /* Make sure IV is located in a DMAable area */
1000 memcpy(iv
, req
->iv
, ivsize
);
1002 iv_dma
= dma_map_single(qidev
, iv
, ivsize
, DMA_TO_DEVICE
);
1003 if (dma_mapping_error(qidev
, iv_dma
)) {
1004 dev_err(qidev
, "unable to map IV\n");
1005 caam_unmap(qidev
, req
->src
, req
->dst
, src_nents
,
1006 dst_nents
, 0, 0, 0, 0);
1007 qi_cache_free(edesc
);
1008 return ERR_PTR(-ENOMEM
);
1012 edesc
->src_nents
= src_nents
;
1013 edesc
->dst_nents
= dst_nents
;
1014 edesc
->iv_dma
= iv_dma
;
1015 edesc
->drv_req
.app_ctx
= req
;
1016 edesc
->drv_req
.cbk
= aead_done
;
1017 edesc
->drv_req
.drv_ctx
= drv_ctx
;
1019 edesc
->assoclen
= cpu_to_caam32(req
->assoclen
);
1020 edesc
->assoclen_dma
= dma_map_single(qidev
, &edesc
->assoclen
, 4,
1022 if (dma_mapping_error(qidev
, edesc
->assoclen_dma
)) {
1023 dev_err(qidev
, "unable to map assoclen\n");
1024 caam_unmap(qidev
, req
->src
, req
->dst
, src_nents
, dst_nents
,
1025 iv_dma
, ivsize
, 0, 0);
1026 qi_cache_free(edesc
);
1027 return ERR_PTR(-ENOMEM
);
1030 dma_to_qm_sg_one(sg_table
, edesc
->assoclen_dma
, 4, 0);
1033 dma_to_qm_sg_one(sg_table
+ qm_sg_index
, iv_dma
, ivsize
, 0);
1036 sg_to_qm_sg_last(req
->src
, mapped_src_nents
, sg_table
+ qm_sg_index
, 0);
1037 qm_sg_index
+= mapped_src_nents
;
1039 if (mapped_dst_nents
> 1)
1040 sg_to_qm_sg_last(req
->dst
, mapped_dst_nents
, sg_table
+
1043 qm_sg_dma
= dma_map_single(qidev
, sg_table
, qm_sg_bytes
, DMA_TO_DEVICE
);
1044 if (dma_mapping_error(qidev
, qm_sg_dma
)) {
1045 dev_err(qidev
, "unable to map S/G table\n");
1046 dma_unmap_single(qidev
, edesc
->assoclen_dma
, 4, DMA_TO_DEVICE
);
1047 caam_unmap(qidev
, req
->src
, req
->dst
, src_nents
, dst_nents
,
1048 iv_dma
, ivsize
, 0, 0);
1049 qi_cache_free(edesc
);
1050 return ERR_PTR(-ENOMEM
);
1053 edesc
->qm_sg_dma
= qm_sg_dma
;
1054 edesc
->qm_sg_bytes
= qm_sg_bytes
;
1056 out_len
= req
->assoclen
+ req
->cryptlen
+
1057 (encrypt
? ctx
->authsize
: (-ctx
->authsize
));
1058 in_len
= 4 + ivsize
+ req
->assoclen
+ req
->cryptlen
;
1060 fd_sgt
= &edesc
->drv_req
.fd_sgt
[0];
1061 dma_to_qm_sg_one_last_ext(&fd_sgt
[1], qm_sg_dma
, in_len
, 0);
1063 if (req
->dst
== req
->src
) {
1064 if (mapped_src_nents
== 1)
1065 dma_to_qm_sg_one(&fd_sgt
[0], sg_dma_address(req
->src
),
1068 dma_to_qm_sg_one_ext(&fd_sgt
[0], qm_sg_dma
+
1069 (1 + !!ivsize
) * sizeof(*sg_table
),
1071 } else if (mapped_dst_nents
== 1) {
1072 dma_to_qm_sg_one(&fd_sgt
[0], sg_dma_address(req
->dst
), out_len
,
1075 dma_to_qm_sg_one_ext(&fd_sgt
[0], qm_sg_dma
+ sizeof(*sg_table
) *
1076 qm_sg_index
, out_len
, 0);
1082 static inline int aead_crypt(struct aead_request
*req
, bool encrypt
)
1084 struct aead_edesc
*edesc
;
1085 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1086 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1089 if (unlikely(caam_congested
))
1092 /* allocate extended descriptor */
1093 edesc
= aead_edesc_alloc(req
, encrypt
);
1094 if (IS_ERR_OR_NULL(edesc
))
1095 return PTR_ERR(edesc
);
1097 /* Create and submit job descriptor */
1098 ret
= caam_qi_enqueue(ctx
->qidev
, &edesc
->drv_req
);
1102 aead_unmap(ctx
->qidev
, edesc
, req
);
1103 qi_cache_free(edesc
);
1109 static int aead_encrypt(struct aead_request
*req
)
1111 return aead_crypt(req
, true);
1114 static int aead_decrypt(struct aead_request
*req
)
1116 return aead_crypt(req
, false);
1119 static int ipsec_gcm_encrypt(struct aead_request
*req
)
1121 if (req
->assoclen
< 8)
1124 return aead_crypt(req
, true);
1127 static int ipsec_gcm_decrypt(struct aead_request
*req
)
1129 if (req
->assoclen
< 8)
1132 return aead_crypt(req
, false);
1135 static void skcipher_done(struct caam_drv_req
*drv_req
, u32 status
)
1137 struct skcipher_edesc
*edesc
;
1138 struct skcipher_request
*req
= drv_req
->app_ctx
;
1139 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(req
);
1140 struct caam_ctx
*caam_ctx
= crypto_skcipher_ctx(skcipher
);
1141 struct device
*qidev
= caam_ctx
->qidev
;
1142 int ivsize
= crypto_skcipher_ivsize(skcipher
);
1145 dev_err(qidev
, "%s %d: status 0x%x\n", __func__
, __LINE__
, status
);
1148 edesc
= container_of(drv_req
, typeof(*edesc
), drv_req
);
1151 caam_jr_strstatus(qidev
, status
);
1154 print_hex_dump(KERN_ERR
, "dstiv @" __stringify(__LINE__
)": ",
1155 DUMP_PREFIX_ADDRESS
, 16, 4, req
->iv
,
1156 edesc
->src_nents
> 1 ? 100 : ivsize
, 1);
1157 caam_dump_sg(KERN_ERR
, "dst @" __stringify(__LINE__
)": ",
1158 DUMP_PREFIX_ADDRESS
, 16, 4, req
->dst
,
1159 edesc
->dst_nents
> 1 ? 100 : req
->cryptlen
, 1);
1162 skcipher_unmap(qidev
, edesc
, req
);
1165 * The crypto API expects us to set the IV (req->iv) to the last
1166 * ciphertext block. This is used e.g. by the CTS mode.
1168 if (edesc
->drv_req
.drv_ctx
->op_type
== ENCRYPT
)
1169 scatterwalk_map_and_copy(req
->iv
, req
->dst
, req
->cryptlen
-
1172 qi_cache_free(edesc
);
1173 skcipher_request_complete(req
, status
);
1176 static struct skcipher_edesc
*skcipher_edesc_alloc(struct skcipher_request
*req
,
1179 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(req
);
1180 struct caam_ctx
*ctx
= crypto_skcipher_ctx(skcipher
);
1181 struct device
*qidev
= ctx
->qidev
;
1182 gfp_t flags
= (req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ?
1183 GFP_KERNEL
: GFP_ATOMIC
;
1184 int src_nents
, mapped_src_nents
, dst_nents
= 0, mapped_dst_nents
= 0;
1185 struct skcipher_edesc
*edesc
;
1188 int ivsize
= crypto_skcipher_ivsize(skcipher
);
1189 int dst_sg_idx
, qm_sg_ents
, qm_sg_bytes
;
1190 struct qm_sg_entry
*sg_table
, *fd_sgt
;
1191 struct caam_drv_ctx
*drv_ctx
;
1193 drv_ctx
= get_drv_ctx(ctx
, encrypt
? ENCRYPT
: DECRYPT
);
1194 if (IS_ERR_OR_NULL(drv_ctx
))
1195 return (struct skcipher_edesc
*)drv_ctx
;
1197 src_nents
= sg_nents_for_len(req
->src
, req
->cryptlen
);
1198 if (unlikely(src_nents
< 0)) {
1199 dev_err(qidev
, "Insufficient bytes (%d) in src S/G\n",
1201 return ERR_PTR(src_nents
);
1204 if (unlikely(req
->src
!= req
->dst
)) {
1205 dst_nents
= sg_nents_for_len(req
->dst
, req
->cryptlen
);
1206 if (unlikely(dst_nents
< 0)) {
1207 dev_err(qidev
, "Insufficient bytes (%d) in dst S/G\n",
1209 return ERR_PTR(dst_nents
);
1212 mapped_src_nents
= dma_map_sg(qidev
, req
->src
, src_nents
,
1214 if (unlikely(!mapped_src_nents
)) {
1215 dev_err(qidev
, "unable to map source\n");
1216 return ERR_PTR(-ENOMEM
);
1219 mapped_dst_nents
= dma_map_sg(qidev
, req
->dst
, dst_nents
,
1221 if (unlikely(!mapped_dst_nents
)) {
1222 dev_err(qidev
, "unable to map destination\n");
1223 dma_unmap_sg(qidev
, req
->src
, src_nents
, DMA_TO_DEVICE
);
1224 return ERR_PTR(-ENOMEM
);
1227 mapped_src_nents
= dma_map_sg(qidev
, req
->src
, src_nents
,
1229 if (unlikely(!mapped_src_nents
)) {
1230 dev_err(qidev
, "unable to map source\n");
1231 return ERR_PTR(-ENOMEM
);
1235 qm_sg_ents
= 1 + mapped_src_nents
;
1236 dst_sg_idx
= qm_sg_ents
;
1238 qm_sg_ents
+= mapped_dst_nents
> 1 ? mapped_dst_nents
: 0;
1239 qm_sg_bytes
= qm_sg_ents
* sizeof(struct qm_sg_entry
);
1240 if (unlikely(offsetof(struct skcipher_edesc
, sgt
) + qm_sg_bytes
+
1241 ivsize
> CAAM_QI_MEMCACHE_SIZE
)) {
1242 dev_err(qidev
, "No space for %d S/G entries and/or %dB IV\n",
1243 qm_sg_ents
, ivsize
);
1244 caam_unmap(qidev
, req
->src
, req
->dst
, src_nents
, dst_nents
, 0,
1246 return ERR_PTR(-ENOMEM
);
1249 /* allocate space for base edesc, link tables and IV */
1250 edesc
= qi_cache_alloc(GFP_DMA
| flags
);
1251 if (unlikely(!edesc
)) {
1252 dev_err(qidev
, "could not allocate extended descriptor\n");
1253 caam_unmap(qidev
, req
->src
, req
->dst
, src_nents
, dst_nents
, 0,
1255 return ERR_PTR(-ENOMEM
);
1258 /* Make sure IV is located in a DMAable area */
1259 sg_table
= &edesc
->sgt
[0];
1260 iv
= (u8
*)(sg_table
+ qm_sg_ents
);
1261 memcpy(iv
, req
->iv
, ivsize
);
1263 iv_dma
= dma_map_single(qidev
, iv
, ivsize
, DMA_TO_DEVICE
);
1264 if (dma_mapping_error(qidev
, iv_dma
)) {
1265 dev_err(qidev
, "unable to map IV\n");
1266 caam_unmap(qidev
, req
->src
, req
->dst
, src_nents
, dst_nents
, 0,
1268 qi_cache_free(edesc
);
1269 return ERR_PTR(-ENOMEM
);
1272 edesc
->src_nents
= src_nents
;
1273 edesc
->dst_nents
= dst_nents
;
1274 edesc
->iv_dma
= iv_dma
;
1275 edesc
->qm_sg_bytes
= qm_sg_bytes
;
1276 edesc
->drv_req
.app_ctx
= req
;
1277 edesc
->drv_req
.cbk
= skcipher_done
;
1278 edesc
->drv_req
.drv_ctx
= drv_ctx
;
1280 dma_to_qm_sg_one(sg_table
, iv_dma
, ivsize
, 0);
1281 sg_to_qm_sg_last(req
->src
, mapped_src_nents
, sg_table
+ 1, 0);
1283 if (mapped_dst_nents
> 1)
1284 sg_to_qm_sg_last(req
->dst
, mapped_dst_nents
, sg_table
+
1287 edesc
->qm_sg_dma
= dma_map_single(qidev
, sg_table
, edesc
->qm_sg_bytes
,
1289 if (dma_mapping_error(qidev
, edesc
->qm_sg_dma
)) {
1290 dev_err(qidev
, "unable to map S/G table\n");
1291 caam_unmap(qidev
, req
->src
, req
->dst
, src_nents
, dst_nents
,
1292 iv_dma
, ivsize
, 0, 0);
1293 qi_cache_free(edesc
);
1294 return ERR_PTR(-ENOMEM
);
1297 fd_sgt
= &edesc
->drv_req
.fd_sgt
[0];
1299 dma_to_qm_sg_one_last_ext(&fd_sgt
[1], edesc
->qm_sg_dma
,
1300 ivsize
+ req
->cryptlen
, 0);
1302 if (req
->src
== req
->dst
) {
1303 dma_to_qm_sg_one_ext(&fd_sgt
[0], edesc
->qm_sg_dma
+
1304 sizeof(*sg_table
), req
->cryptlen
, 0);
1305 } else if (mapped_dst_nents
> 1) {
1306 dma_to_qm_sg_one_ext(&fd_sgt
[0], edesc
->qm_sg_dma
+ dst_sg_idx
*
1307 sizeof(*sg_table
), req
->cryptlen
, 0);
1309 dma_to_qm_sg_one(&fd_sgt
[0], sg_dma_address(req
->dst
),
1316 static inline int skcipher_crypt(struct skcipher_request
*req
, bool encrypt
)
1318 struct skcipher_edesc
*edesc
;
1319 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(req
);
1320 struct caam_ctx
*ctx
= crypto_skcipher_ctx(skcipher
);
1321 int ivsize
= crypto_skcipher_ivsize(skcipher
);
1324 if (unlikely(caam_congested
))
1327 /* allocate extended descriptor */
1328 edesc
= skcipher_edesc_alloc(req
, encrypt
);
1330 return PTR_ERR(edesc
);
1333 * The crypto API expects us to set the IV (req->iv) to the last
1337 scatterwalk_map_and_copy(req
->iv
, req
->src
, req
->cryptlen
-
1340 ret
= caam_qi_enqueue(ctx
->qidev
, &edesc
->drv_req
);
1344 skcipher_unmap(ctx
->qidev
, edesc
, req
);
1345 qi_cache_free(edesc
);
1351 static int skcipher_encrypt(struct skcipher_request
*req
)
1353 return skcipher_crypt(req
, true);
1356 static int skcipher_decrypt(struct skcipher_request
*req
)
1358 return skcipher_crypt(req
, false);
1361 static struct caam_skcipher_alg driver_algs
[] = {
1365 .cra_name
= "cbc(aes)",
1366 .cra_driver_name
= "cbc-aes-caam-qi",
1367 .cra_blocksize
= AES_BLOCK_SIZE
,
1369 .setkey
= skcipher_setkey
,
1370 .encrypt
= skcipher_encrypt
,
1371 .decrypt
= skcipher_decrypt
,
1372 .min_keysize
= AES_MIN_KEY_SIZE
,
1373 .max_keysize
= AES_MAX_KEY_SIZE
,
1374 .ivsize
= AES_BLOCK_SIZE
,
1376 .caam
.class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1381 .cra_name
= "cbc(des3_ede)",
1382 .cra_driver_name
= "cbc-3des-caam-qi",
1383 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1385 .setkey
= skcipher_setkey
,
1386 .encrypt
= skcipher_encrypt
,
1387 .decrypt
= skcipher_decrypt
,
1388 .min_keysize
= DES3_EDE_KEY_SIZE
,
1389 .max_keysize
= DES3_EDE_KEY_SIZE
,
1390 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1392 .caam
.class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1397 .cra_name
= "cbc(des)",
1398 .cra_driver_name
= "cbc-des-caam-qi",
1399 .cra_blocksize
= DES_BLOCK_SIZE
,
1401 .setkey
= skcipher_setkey
,
1402 .encrypt
= skcipher_encrypt
,
1403 .decrypt
= skcipher_decrypt
,
1404 .min_keysize
= DES_KEY_SIZE
,
1405 .max_keysize
= DES_KEY_SIZE
,
1406 .ivsize
= DES_BLOCK_SIZE
,
1408 .caam
.class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
1413 .cra_name
= "ctr(aes)",
1414 .cra_driver_name
= "ctr-aes-caam-qi",
1417 .setkey
= skcipher_setkey
,
1418 .encrypt
= skcipher_encrypt
,
1419 .decrypt
= skcipher_decrypt
,
1420 .min_keysize
= AES_MIN_KEY_SIZE
,
1421 .max_keysize
= AES_MAX_KEY_SIZE
,
1422 .ivsize
= AES_BLOCK_SIZE
,
1423 .chunksize
= AES_BLOCK_SIZE
,
1425 .caam
.class1_alg_type
= OP_ALG_ALGSEL_AES
|
1426 OP_ALG_AAI_CTR_MOD128
,
1431 .cra_name
= "rfc3686(ctr(aes))",
1432 .cra_driver_name
= "rfc3686-ctr-aes-caam-qi",
1435 .setkey
= skcipher_setkey
,
1436 .encrypt
= skcipher_encrypt
,
1437 .decrypt
= skcipher_decrypt
,
1438 .min_keysize
= AES_MIN_KEY_SIZE
+
1439 CTR_RFC3686_NONCE_SIZE
,
1440 .max_keysize
= AES_MAX_KEY_SIZE
+
1441 CTR_RFC3686_NONCE_SIZE
,
1442 .ivsize
= CTR_RFC3686_IV_SIZE
,
1443 .chunksize
= AES_BLOCK_SIZE
,
1446 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
1447 OP_ALG_AAI_CTR_MOD128
,
1454 .cra_name
= "xts(aes)",
1455 .cra_driver_name
= "xts-aes-caam-qi",
1456 .cra_blocksize
= AES_BLOCK_SIZE
,
1458 .setkey
= xts_skcipher_setkey
,
1459 .encrypt
= skcipher_encrypt
,
1460 .decrypt
= skcipher_decrypt
,
1461 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
1462 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
1463 .ivsize
= AES_BLOCK_SIZE
,
1465 .caam
.class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_XTS
,
1469 static struct caam_aead_alg driver_aeads
[] = {
1473 .cra_name
= "rfc4106(gcm(aes))",
1474 .cra_driver_name
= "rfc4106-gcm-aes-caam-qi",
1477 .setkey
= rfc4106_setkey
,
1478 .setauthsize
= rfc4106_setauthsize
,
1479 .encrypt
= ipsec_gcm_encrypt
,
1480 .decrypt
= ipsec_gcm_decrypt
,
1482 .maxauthsize
= AES_BLOCK_SIZE
,
1485 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_GCM
,
1491 .cra_name
= "rfc4543(gcm(aes))",
1492 .cra_driver_name
= "rfc4543-gcm-aes-caam-qi",
1495 .setkey
= rfc4543_setkey
,
1496 .setauthsize
= rfc4543_setauthsize
,
1497 .encrypt
= ipsec_gcm_encrypt
,
1498 .decrypt
= ipsec_gcm_decrypt
,
1500 .maxauthsize
= AES_BLOCK_SIZE
,
1503 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_GCM
,
1506 /* Galois Counter Mode */
1510 .cra_name
= "gcm(aes)",
1511 .cra_driver_name
= "gcm-aes-caam-qi",
1514 .setkey
= gcm_setkey
,
1515 .setauthsize
= gcm_setauthsize
,
1516 .encrypt
= aead_encrypt
,
1517 .decrypt
= aead_decrypt
,
1519 .maxauthsize
= AES_BLOCK_SIZE
,
1522 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_GCM
,
1525 /* single-pass ipsec_esp descriptor */
1529 .cra_name
= "authenc(hmac(md5),cbc(aes))",
1530 .cra_driver_name
= "authenc-hmac-md5-"
1532 .cra_blocksize
= AES_BLOCK_SIZE
,
1534 .setkey
= aead_setkey
,
1535 .setauthsize
= aead_setauthsize
,
1536 .encrypt
= aead_encrypt
,
1537 .decrypt
= aead_decrypt
,
1538 .ivsize
= AES_BLOCK_SIZE
,
1539 .maxauthsize
= MD5_DIGEST_SIZE
,
1542 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1543 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
1544 OP_ALG_AAI_HMAC_PRECOMP
,
1550 .cra_name
= "echainiv(authenc(hmac(md5),"
1552 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
1554 .cra_blocksize
= AES_BLOCK_SIZE
,
1556 .setkey
= aead_setkey
,
1557 .setauthsize
= aead_setauthsize
,
1558 .encrypt
= aead_encrypt
,
1559 .decrypt
= aead_decrypt
,
1560 .ivsize
= AES_BLOCK_SIZE
,
1561 .maxauthsize
= MD5_DIGEST_SIZE
,
1564 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1565 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
1566 OP_ALG_AAI_HMAC_PRECOMP
,
1573 .cra_name
= "authenc(hmac(sha1),cbc(aes))",
1574 .cra_driver_name
= "authenc-hmac-sha1-"
1576 .cra_blocksize
= AES_BLOCK_SIZE
,
1578 .setkey
= aead_setkey
,
1579 .setauthsize
= aead_setauthsize
,
1580 .encrypt
= aead_encrypt
,
1581 .decrypt
= aead_decrypt
,
1582 .ivsize
= AES_BLOCK_SIZE
,
1583 .maxauthsize
= SHA1_DIGEST_SIZE
,
1586 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1587 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
1588 OP_ALG_AAI_HMAC_PRECOMP
,
1594 .cra_name
= "echainiv(authenc(hmac(sha1),"
1596 .cra_driver_name
= "echainiv-authenc-"
1597 "hmac-sha1-cbc-aes-caam-qi",
1598 .cra_blocksize
= AES_BLOCK_SIZE
,
1600 .setkey
= aead_setkey
,
1601 .setauthsize
= aead_setauthsize
,
1602 .encrypt
= aead_encrypt
,
1603 .decrypt
= aead_decrypt
,
1604 .ivsize
= AES_BLOCK_SIZE
,
1605 .maxauthsize
= SHA1_DIGEST_SIZE
,
1608 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1609 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
1610 OP_ALG_AAI_HMAC_PRECOMP
,
1617 .cra_name
= "authenc(hmac(sha224),cbc(aes))",
1618 .cra_driver_name
= "authenc-hmac-sha224-"
1620 .cra_blocksize
= AES_BLOCK_SIZE
,
1622 .setkey
= aead_setkey
,
1623 .setauthsize
= aead_setauthsize
,
1624 .encrypt
= aead_encrypt
,
1625 .decrypt
= aead_decrypt
,
1626 .ivsize
= AES_BLOCK_SIZE
,
1627 .maxauthsize
= SHA224_DIGEST_SIZE
,
1630 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1631 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
1632 OP_ALG_AAI_HMAC_PRECOMP
,
1638 .cra_name
= "echainiv(authenc(hmac(sha224),"
1640 .cra_driver_name
= "echainiv-authenc-"
1641 "hmac-sha224-cbc-aes-caam-qi",
1642 .cra_blocksize
= AES_BLOCK_SIZE
,
1644 .setkey
= aead_setkey
,
1645 .setauthsize
= aead_setauthsize
,
1646 .encrypt
= aead_encrypt
,
1647 .decrypt
= aead_decrypt
,
1648 .ivsize
= AES_BLOCK_SIZE
,
1649 .maxauthsize
= SHA224_DIGEST_SIZE
,
1652 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1653 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
1654 OP_ALG_AAI_HMAC_PRECOMP
,
1661 .cra_name
= "authenc(hmac(sha256),cbc(aes))",
1662 .cra_driver_name
= "authenc-hmac-sha256-"
1664 .cra_blocksize
= AES_BLOCK_SIZE
,
1666 .setkey
= aead_setkey
,
1667 .setauthsize
= aead_setauthsize
,
1668 .encrypt
= aead_encrypt
,
1669 .decrypt
= aead_decrypt
,
1670 .ivsize
= AES_BLOCK_SIZE
,
1671 .maxauthsize
= SHA256_DIGEST_SIZE
,
1674 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1675 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
1676 OP_ALG_AAI_HMAC_PRECOMP
,
1682 .cra_name
= "echainiv(authenc(hmac(sha256),"
1684 .cra_driver_name
= "echainiv-authenc-"
1685 "hmac-sha256-cbc-aes-"
1687 .cra_blocksize
= AES_BLOCK_SIZE
,
1689 .setkey
= aead_setkey
,
1690 .setauthsize
= aead_setauthsize
,
1691 .encrypt
= aead_encrypt
,
1692 .decrypt
= aead_decrypt
,
1693 .ivsize
= AES_BLOCK_SIZE
,
1694 .maxauthsize
= SHA256_DIGEST_SIZE
,
1697 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1698 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
1699 OP_ALG_AAI_HMAC_PRECOMP
,
1706 .cra_name
= "authenc(hmac(sha384),cbc(aes))",
1707 .cra_driver_name
= "authenc-hmac-sha384-"
1709 .cra_blocksize
= AES_BLOCK_SIZE
,
1711 .setkey
= aead_setkey
,
1712 .setauthsize
= aead_setauthsize
,
1713 .encrypt
= aead_encrypt
,
1714 .decrypt
= aead_decrypt
,
1715 .ivsize
= AES_BLOCK_SIZE
,
1716 .maxauthsize
= SHA384_DIGEST_SIZE
,
1719 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1720 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
1721 OP_ALG_AAI_HMAC_PRECOMP
,
1727 .cra_name
= "echainiv(authenc(hmac(sha384),"
1729 .cra_driver_name
= "echainiv-authenc-"
1730 "hmac-sha384-cbc-aes-"
1732 .cra_blocksize
= AES_BLOCK_SIZE
,
1734 .setkey
= aead_setkey
,
1735 .setauthsize
= aead_setauthsize
,
1736 .encrypt
= aead_encrypt
,
1737 .decrypt
= aead_decrypt
,
1738 .ivsize
= AES_BLOCK_SIZE
,
1739 .maxauthsize
= SHA384_DIGEST_SIZE
,
1742 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1743 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
1744 OP_ALG_AAI_HMAC_PRECOMP
,
1751 .cra_name
= "authenc(hmac(sha512),cbc(aes))",
1752 .cra_driver_name
= "authenc-hmac-sha512-"
1754 .cra_blocksize
= AES_BLOCK_SIZE
,
1756 .setkey
= aead_setkey
,
1757 .setauthsize
= aead_setauthsize
,
1758 .encrypt
= aead_encrypt
,
1759 .decrypt
= aead_decrypt
,
1760 .ivsize
= AES_BLOCK_SIZE
,
1761 .maxauthsize
= SHA512_DIGEST_SIZE
,
1764 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1765 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
1766 OP_ALG_AAI_HMAC_PRECOMP
,
1772 .cra_name
= "echainiv(authenc(hmac(sha512),"
1774 .cra_driver_name
= "echainiv-authenc-"
1775 "hmac-sha512-cbc-aes-"
1777 .cra_blocksize
= AES_BLOCK_SIZE
,
1779 .setkey
= aead_setkey
,
1780 .setauthsize
= aead_setauthsize
,
1781 .encrypt
= aead_encrypt
,
1782 .decrypt
= aead_decrypt
,
1783 .ivsize
= AES_BLOCK_SIZE
,
1784 .maxauthsize
= SHA512_DIGEST_SIZE
,
1787 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1788 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
1789 OP_ALG_AAI_HMAC_PRECOMP
,
1796 .cra_name
= "authenc(hmac(md5),cbc(des3_ede))",
1797 .cra_driver_name
= "authenc-hmac-md5-"
1798 "cbc-des3_ede-caam-qi",
1799 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1801 .setkey
= aead_setkey
,
1802 .setauthsize
= aead_setauthsize
,
1803 .encrypt
= aead_encrypt
,
1804 .decrypt
= aead_decrypt
,
1805 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1806 .maxauthsize
= MD5_DIGEST_SIZE
,
1809 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1810 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
1811 OP_ALG_AAI_HMAC_PRECOMP
,
1817 .cra_name
= "echainiv(authenc(hmac(md5),"
1819 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
1820 "cbc-des3_ede-caam-qi",
1821 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1823 .setkey
= aead_setkey
,
1824 .setauthsize
= aead_setauthsize
,
1825 .encrypt
= aead_encrypt
,
1826 .decrypt
= aead_decrypt
,
1827 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1828 .maxauthsize
= MD5_DIGEST_SIZE
,
1831 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1832 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
1833 OP_ALG_AAI_HMAC_PRECOMP
,
1840 .cra_name
= "authenc(hmac(sha1),"
1842 .cra_driver_name
= "authenc-hmac-sha1-"
1843 "cbc-des3_ede-caam-qi",
1844 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1846 .setkey
= aead_setkey
,
1847 .setauthsize
= aead_setauthsize
,
1848 .encrypt
= aead_encrypt
,
1849 .decrypt
= aead_decrypt
,
1850 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1851 .maxauthsize
= SHA1_DIGEST_SIZE
,
1854 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1855 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
1856 OP_ALG_AAI_HMAC_PRECOMP
,
1862 .cra_name
= "echainiv(authenc(hmac(sha1),"
1864 .cra_driver_name
= "echainiv-authenc-"
1866 "cbc-des3_ede-caam-qi",
1867 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1869 .setkey
= aead_setkey
,
1870 .setauthsize
= aead_setauthsize
,
1871 .encrypt
= aead_encrypt
,
1872 .decrypt
= aead_decrypt
,
1873 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1874 .maxauthsize
= SHA1_DIGEST_SIZE
,
1877 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1878 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
1879 OP_ALG_AAI_HMAC_PRECOMP
,
1886 .cra_name
= "authenc(hmac(sha224),"
1888 .cra_driver_name
= "authenc-hmac-sha224-"
1889 "cbc-des3_ede-caam-qi",
1890 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1892 .setkey
= aead_setkey
,
1893 .setauthsize
= aead_setauthsize
,
1894 .encrypt
= aead_encrypt
,
1895 .decrypt
= aead_decrypt
,
1896 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1897 .maxauthsize
= SHA224_DIGEST_SIZE
,
1900 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1901 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
1902 OP_ALG_AAI_HMAC_PRECOMP
,
1908 .cra_name
= "echainiv(authenc(hmac(sha224),"
1910 .cra_driver_name
= "echainiv-authenc-"
1912 "cbc-des3_ede-caam-qi",
1913 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1915 .setkey
= aead_setkey
,
1916 .setauthsize
= aead_setauthsize
,
1917 .encrypt
= aead_encrypt
,
1918 .decrypt
= aead_decrypt
,
1919 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1920 .maxauthsize
= SHA224_DIGEST_SIZE
,
1923 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1924 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
1925 OP_ALG_AAI_HMAC_PRECOMP
,
1932 .cra_name
= "authenc(hmac(sha256),"
1934 .cra_driver_name
= "authenc-hmac-sha256-"
1935 "cbc-des3_ede-caam-qi",
1936 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1938 .setkey
= aead_setkey
,
1939 .setauthsize
= aead_setauthsize
,
1940 .encrypt
= aead_encrypt
,
1941 .decrypt
= aead_decrypt
,
1942 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1943 .maxauthsize
= SHA256_DIGEST_SIZE
,
1946 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1947 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
1948 OP_ALG_AAI_HMAC_PRECOMP
,
1954 .cra_name
= "echainiv(authenc(hmac(sha256),"
1956 .cra_driver_name
= "echainiv-authenc-"
1958 "cbc-des3_ede-caam-qi",
1959 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1961 .setkey
= aead_setkey
,
1962 .setauthsize
= aead_setauthsize
,
1963 .encrypt
= aead_encrypt
,
1964 .decrypt
= aead_decrypt
,
1965 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1966 .maxauthsize
= SHA256_DIGEST_SIZE
,
1969 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1970 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
1971 OP_ALG_AAI_HMAC_PRECOMP
,
1978 .cra_name
= "authenc(hmac(sha384),"
1980 .cra_driver_name
= "authenc-hmac-sha384-"
1981 "cbc-des3_ede-caam-qi",
1982 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1984 .setkey
= aead_setkey
,
1985 .setauthsize
= aead_setauthsize
,
1986 .encrypt
= aead_encrypt
,
1987 .decrypt
= aead_decrypt
,
1988 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1989 .maxauthsize
= SHA384_DIGEST_SIZE
,
1992 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1993 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
1994 OP_ALG_AAI_HMAC_PRECOMP
,
2000 .cra_name
= "echainiv(authenc(hmac(sha384),"
2002 .cra_driver_name
= "echainiv-authenc-"
2004 "cbc-des3_ede-caam-qi",
2005 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2007 .setkey
= aead_setkey
,
2008 .setauthsize
= aead_setauthsize
,
2009 .encrypt
= aead_encrypt
,
2010 .decrypt
= aead_decrypt
,
2011 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2012 .maxauthsize
= SHA384_DIGEST_SIZE
,
2015 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2016 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2017 OP_ALG_AAI_HMAC_PRECOMP
,
2024 .cra_name
= "authenc(hmac(sha512),"
2026 .cra_driver_name
= "authenc-hmac-sha512-"
2027 "cbc-des3_ede-caam-qi",
2028 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2030 .setkey
= aead_setkey
,
2031 .setauthsize
= aead_setauthsize
,
2032 .encrypt
= aead_encrypt
,
2033 .decrypt
= aead_decrypt
,
2034 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2035 .maxauthsize
= SHA512_DIGEST_SIZE
,
2038 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2039 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2040 OP_ALG_AAI_HMAC_PRECOMP
,
2046 .cra_name
= "echainiv(authenc(hmac(sha512),"
2048 .cra_driver_name
= "echainiv-authenc-"
2050 "cbc-des3_ede-caam-qi",
2051 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2053 .setkey
= aead_setkey
,
2054 .setauthsize
= aead_setauthsize
,
2055 .encrypt
= aead_encrypt
,
2056 .decrypt
= aead_decrypt
,
2057 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2058 .maxauthsize
= SHA512_DIGEST_SIZE
,
2061 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2062 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2063 OP_ALG_AAI_HMAC_PRECOMP
,
2070 .cra_name
= "authenc(hmac(md5),cbc(des))",
2071 .cra_driver_name
= "authenc-hmac-md5-"
2073 .cra_blocksize
= DES_BLOCK_SIZE
,
2075 .setkey
= aead_setkey
,
2076 .setauthsize
= aead_setauthsize
,
2077 .encrypt
= aead_encrypt
,
2078 .decrypt
= aead_decrypt
,
2079 .ivsize
= DES_BLOCK_SIZE
,
2080 .maxauthsize
= MD5_DIGEST_SIZE
,
2083 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2084 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2085 OP_ALG_AAI_HMAC_PRECOMP
,
2091 .cra_name
= "echainiv(authenc(hmac(md5),"
2093 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
2095 .cra_blocksize
= DES_BLOCK_SIZE
,
2097 .setkey
= aead_setkey
,
2098 .setauthsize
= aead_setauthsize
,
2099 .encrypt
= aead_encrypt
,
2100 .decrypt
= aead_decrypt
,
2101 .ivsize
= DES_BLOCK_SIZE
,
2102 .maxauthsize
= MD5_DIGEST_SIZE
,
2105 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2106 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2107 OP_ALG_AAI_HMAC_PRECOMP
,
2114 .cra_name
= "authenc(hmac(sha1),cbc(des))",
2115 .cra_driver_name
= "authenc-hmac-sha1-"
2117 .cra_blocksize
= DES_BLOCK_SIZE
,
2119 .setkey
= aead_setkey
,
2120 .setauthsize
= aead_setauthsize
,
2121 .encrypt
= aead_encrypt
,
2122 .decrypt
= aead_decrypt
,
2123 .ivsize
= DES_BLOCK_SIZE
,
2124 .maxauthsize
= SHA1_DIGEST_SIZE
,
2127 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2128 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2129 OP_ALG_AAI_HMAC_PRECOMP
,
2135 .cra_name
= "echainiv(authenc(hmac(sha1),"
2137 .cra_driver_name
= "echainiv-authenc-"
2138 "hmac-sha1-cbc-des-caam-qi",
2139 .cra_blocksize
= DES_BLOCK_SIZE
,
2141 .setkey
= aead_setkey
,
2142 .setauthsize
= aead_setauthsize
,
2143 .encrypt
= aead_encrypt
,
2144 .decrypt
= aead_decrypt
,
2145 .ivsize
= DES_BLOCK_SIZE
,
2146 .maxauthsize
= SHA1_DIGEST_SIZE
,
2149 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2150 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2151 OP_ALG_AAI_HMAC_PRECOMP
,
2158 .cra_name
= "authenc(hmac(sha224),cbc(des))",
2159 .cra_driver_name
= "authenc-hmac-sha224-"
2161 .cra_blocksize
= DES_BLOCK_SIZE
,
2163 .setkey
= aead_setkey
,
2164 .setauthsize
= aead_setauthsize
,
2165 .encrypt
= aead_encrypt
,
2166 .decrypt
= aead_decrypt
,
2167 .ivsize
= DES_BLOCK_SIZE
,
2168 .maxauthsize
= SHA224_DIGEST_SIZE
,
2171 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2172 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2173 OP_ALG_AAI_HMAC_PRECOMP
,
2179 .cra_name
= "echainiv(authenc(hmac(sha224),"
2181 .cra_driver_name
= "echainiv-authenc-"
2182 "hmac-sha224-cbc-des-"
2184 .cra_blocksize
= DES_BLOCK_SIZE
,
2186 .setkey
= aead_setkey
,
2187 .setauthsize
= aead_setauthsize
,
2188 .encrypt
= aead_encrypt
,
2189 .decrypt
= aead_decrypt
,
2190 .ivsize
= DES_BLOCK_SIZE
,
2191 .maxauthsize
= SHA224_DIGEST_SIZE
,
2194 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2195 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2196 OP_ALG_AAI_HMAC_PRECOMP
,
2203 .cra_name
= "authenc(hmac(sha256),cbc(des))",
2204 .cra_driver_name
= "authenc-hmac-sha256-"
2206 .cra_blocksize
= DES_BLOCK_SIZE
,
2208 .setkey
= aead_setkey
,
2209 .setauthsize
= aead_setauthsize
,
2210 .encrypt
= aead_encrypt
,
2211 .decrypt
= aead_decrypt
,
2212 .ivsize
= DES_BLOCK_SIZE
,
2213 .maxauthsize
= SHA256_DIGEST_SIZE
,
2216 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2217 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2218 OP_ALG_AAI_HMAC_PRECOMP
,
2224 .cra_name
= "echainiv(authenc(hmac(sha256),"
2226 .cra_driver_name
= "echainiv-authenc-"
2227 "hmac-sha256-cbc-des-"
2229 .cra_blocksize
= DES_BLOCK_SIZE
,
2231 .setkey
= aead_setkey
,
2232 .setauthsize
= aead_setauthsize
,
2233 .encrypt
= aead_encrypt
,
2234 .decrypt
= aead_decrypt
,
2235 .ivsize
= DES_BLOCK_SIZE
,
2236 .maxauthsize
= SHA256_DIGEST_SIZE
,
2239 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2240 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2241 OP_ALG_AAI_HMAC_PRECOMP
,
2248 .cra_name
= "authenc(hmac(sha384),cbc(des))",
2249 .cra_driver_name
= "authenc-hmac-sha384-"
2251 .cra_blocksize
= DES_BLOCK_SIZE
,
2253 .setkey
= aead_setkey
,
2254 .setauthsize
= aead_setauthsize
,
2255 .encrypt
= aead_encrypt
,
2256 .decrypt
= aead_decrypt
,
2257 .ivsize
= DES_BLOCK_SIZE
,
2258 .maxauthsize
= SHA384_DIGEST_SIZE
,
2261 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2262 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2263 OP_ALG_AAI_HMAC_PRECOMP
,
2269 .cra_name
= "echainiv(authenc(hmac(sha384),"
2271 .cra_driver_name
= "echainiv-authenc-"
2272 "hmac-sha384-cbc-des-"
2274 .cra_blocksize
= DES_BLOCK_SIZE
,
2276 .setkey
= aead_setkey
,
2277 .setauthsize
= aead_setauthsize
,
2278 .encrypt
= aead_encrypt
,
2279 .decrypt
= aead_decrypt
,
2280 .ivsize
= DES_BLOCK_SIZE
,
2281 .maxauthsize
= SHA384_DIGEST_SIZE
,
2284 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2285 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2286 OP_ALG_AAI_HMAC_PRECOMP
,
2293 .cra_name
= "authenc(hmac(sha512),cbc(des))",
2294 .cra_driver_name
= "authenc-hmac-sha512-"
2296 .cra_blocksize
= DES_BLOCK_SIZE
,
2298 .setkey
= aead_setkey
,
2299 .setauthsize
= aead_setauthsize
,
2300 .encrypt
= aead_encrypt
,
2301 .decrypt
= aead_decrypt
,
2302 .ivsize
= DES_BLOCK_SIZE
,
2303 .maxauthsize
= SHA512_DIGEST_SIZE
,
2306 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2307 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2308 OP_ALG_AAI_HMAC_PRECOMP
,
2314 .cra_name
= "echainiv(authenc(hmac(sha512),"
2316 .cra_driver_name
= "echainiv-authenc-"
2317 "hmac-sha512-cbc-des-"
2319 .cra_blocksize
= DES_BLOCK_SIZE
,
2321 .setkey
= aead_setkey
,
2322 .setauthsize
= aead_setauthsize
,
2323 .encrypt
= aead_encrypt
,
2324 .decrypt
= aead_decrypt
,
2325 .ivsize
= DES_BLOCK_SIZE
,
2326 .maxauthsize
= SHA512_DIGEST_SIZE
,
2329 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2330 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2331 OP_ALG_AAI_HMAC_PRECOMP
,
2337 static int caam_init_common(struct caam_ctx
*ctx
, struct caam_alg_entry
*caam
,
2340 struct caam_drv_private
*priv
;
2343 * distribute tfms across job rings to ensure in-order
2344 * crypto request processing per tfm
2346 ctx
->jrdev
= caam_jr_alloc();
2347 if (IS_ERR(ctx
->jrdev
)) {
2348 pr_err("Job Ring Device allocation for transform failed\n");
2349 return PTR_ERR(ctx
->jrdev
);
2352 priv
= dev_get_drvdata(ctx
->jrdev
->parent
);
2353 if (priv
->era
>= 6 && uses_dkp
)
2354 ctx
->dir
= DMA_BIDIRECTIONAL
;
2356 ctx
->dir
= DMA_TO_DEVICE
;
2358 ctx
->key_dma
= dma_map_single(ctx
->jrdev
, ctx
->key
, sizeof(ctx
->key
),
2360 if (dma_mapping_error(ctx
->jrdev
, ctx
->key_dma
)) {
2361 dev_err(ctx
->jrdev
, "unable to map key\n");
2362 caam_jr_free(ctx
->jrdev
);
2366 /* copy descriptor header template value */
2367 ctx
->cdata
.algtype
= OP_TYPE_CLASS1_ALG
| caam
->class1_alg_type
;
2368 ctx
->adata
.algtype
= OP_TYPE_CLASS2_ALG
| caam
->class2_alg_type
;
2370 ctx
->qidev
= priv
->qidev
;
2372 spin_lock_init(&ctx
->lock
);
2373 ctx
->drv_ctx
[ENCRYPT
] = NULL
;
2374 ctx
->drv_ctx
[DECRYPT
] = NULL
;
2379 static int caam_cra_init(struct crypto_skcipher
*tfm
)
2381 struct skcipher_alg
*alg
= crypto_skcipher_alg(tfm
);
2382 struct caam_skcipher_alg
*caam_alg
=
2383 container_of(alg
, typeof(*caam_alg
), skcipher
);
2385 return caam_init_common(crypto_skcipher_ctx(tfm
), &caam_alg
->caam
,
2389 static int caam_aead_init(struct crypto_aead
*tfm
)
2391 struct aead_alg
*alg
= crypto_aead_alg(tfm
);
2392 struct caam_aead_alg
*caam_alg
= container_of(alg
, typeof(*caam_alg
),
2394 struct caam_ctx
*ctx
= crypto_aead_ctx(tfm
);
2396 return caam_init_common(ctx
, &caam_alg
->caam
,
2397 alg
->setkey
== aead_setkey
);
2400 static void caam_exit_common(struct caam_ctx
*ctx
)
2402 caam_drv_ctx_rel(ctx
->drv_ctx
[ENCRYPT
]);
2403 caam_drv_ctx_rel(ctx
->drv_ctx
[DECRYPT
]);
2405 dma_unmap_single(ctx
->jrdev
, ctx
->key_dma
, sizeof(ctx
->key
), ctx
->dir
);
2407 caam_jr_free(ctx
->jrdev
);
2410 static void caam_cra_exit(struct crypto_skcipher
*tfm
)
2412 caam_exit_common(crypto_skcipher_ctx(tfm
));
2415 static void caam_aead_exit(struct crypto_aead
*tfm
)
2417 caam_exit_common(crypto_aead_ctx(tfm
));
2420 static void __exit
caam_qi_algapi_exit(void)
2424 for (i
= 0; i
< ARRAY_SIZE(driver_aeads
); i
++) {
2425 struct caam_aead_alg
*t_alg
= driver_aeads
+ i
;
2427 if (t_alg
->registered
)
2428 crypto_unregister_aead(&t_alg
->aead
);
2431 for (i
= 0; i
< ARRAY_SIZE(driver_algs
); i
++) {
2432 struct caam_skcipher_alg
*t_alg
= driver_algs
+ i
;
2434 if (t_alg
->registered
)
2435 crypto_unregister_skcipher(&t_alg
->skcipher
);
2439 static void caam_skcipher_alg_init(struct caam_skcipher_alg
*t_alg
)
2441 struct skcipher_alg
*alg
= &t_alg
->skcipher
;
2443 alg
->base
.cra_module
= THIS_MODULE
;
2444 alg
->base
.cra_priority
= CAAM_CRA_PRIORITY
;
2445 alg
->base
.cra_ctxsize
= sizeof(struct caam_ctx
);
2446 alg
->base
.cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_KERN_DRIVER_ONLY
;
2448 alg
->init
= caam_cra_init
;
2449 alg
->exit
= caam_cra_exit
;
2452 static void caam_aead_alg_init(struct caam_aead_alg
*t_alg
)
2454 struct aead_alg
*alg
= &t_alg
->aead
;
2456 alg
->base
.cra_module
= THIS_MODULE
;
2457 alg
->base
.cra_priority
= CAAM_CRA_PRIORITY
;
2458 alg
->base
.cra_ctxsize
= sizeof(struct caam_ctx
);
2459 alg
->base
.cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_KERN_DRIVER_ONLY
;
2461 alg
->init
= caam_aead_init
;
2462 alg
->exit
= caam_aead_exit
;
2465 static int __init
caam_qi_algapi_init(void)
2467 struct device_node
*dev_node
;
2468 struct platform_device
*pdev
;
2469 struct device
*ctrldev
;
2470 struct caam_drv_private
*priv
;
2472 u32 aes_vid
, aes_inst
, des_inst
, md_vid
, md_inst
;
2473 unsigned int md_limit
= SHA512_DIGEST_SIZE
;
2474 bool registered
= false;
2476 dev_node
= of_find_compatible_node(NULL
, NULL
, "fsl,sec-v4.0");
2478 dev_node
= of_find_compatible_node(NULL
, NULL
, "fsl,sec4.0");
2483 pdev
= of_find_device_by_node(dev_node
);
2484 of_node_put(dev_node
);
2488 ctrldev
= &pdev
->dev
;
2489 priv
= dev_get_drvdata(ctrldev
);
2492 * If priv is NULL, it's probably because the caam driver wasn't
2493 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
2495 if (!priv
|| !priv
->qi_present
) {
2501 dev_info(ctrldev
, "caam/qi frontend driver not suitable for DPAA 2.x, aborting...\n");
2507 * Register crypto algorithms the device supports.
2508 * First, detect presence and attributes of DES, AES, and MD blocks.
2510 if (priv
->era
< 10) {
2511 u32 cha_vid
, cha_inst
;
2513 cha_vid
= rd_reg32(&priv
->ctrl
->perfmon
.cha_id_ls
);
2514 aes_vid
= cha_vid
& CHA_ID_LS_AES_MASK
;
2515 md_vid
= (cha_vid
& CHA_ID_LS_MD_MASK
) >> CHA_ID_LS_MD_SHIFT
;
2517 cha_inst
= rd_reg32(&priv
->ctrl
->perfmon
.cha_num_ls
);
2518 des_inst
= (cha_inst
& CHA_ID_LS_DES_MASK
) >>
2519 CHA_ID_LS_DES_SHIFT
;
2520 aes_inst
= cha_inst
& CHA_ID_LS_AES_MASK
;
2521 md_inst
= (cha_inst
& CHA_ID_LS_MD_MASK
) >> CHA_ID_LS_MD_SHIFT
;
2525 aesa
= rd_reg32(&priv
->ctrl
->vreg
.aesa
);
2526 mdha
= rd_reg32(&priv
->ctrl
->vreg
.mdha
);
2528 aes_vid
= (aesa
& CHA_VER_VID_MASK
) >> CHA_VER_VID_SHIFT
;
2529 md_vid
= (mdha
& CHA_VER_VID_MASK
) >> CHA_VER_VID_SHIFT
;
2531 des_inst
= rd_reg32(&priv
->ctrl
->vreg
.desa
) & CHA_VER_NUM_MASK
;
2532 aes_inst
= aesa
& CHA_VER_NUM_MASK
;
2533 md_inst
= mdha
& CHA_VER_NUM_MASK
;
2536 /* If MD is present, limit digest size based on LP256 */
2537 if (md_inst
&& md_vid
== CHA_VER_VID_MD_LP256
)
2538 md_limit
= SHA256_DIGEST_SIZE
;
2540 for (i
= 0; i
< ARRAY_SIZE(driver_algs
); i
++) {
2541 struct caam_skcipher_alg
*t_alg
= driver_algs
+ i
;
2542 u32 alg_sel
= t_alg
->caam
.class1_alg_type
& OP_ALG_ALGSEL_MASK
;
2544 /* Skip DES algorithms if not supported by device */
2546 ((alg_sel
== OP_ALG_ALGSEL_3DES
) ||
2547 (alg_sel
== OP_ALG_ALGSEL_DES
)))
2550 /* Skip AES algorithms if not supported by device */
2551 if (!aes_inst
&& (alg_sel
== OP_ALG_ALGSEL_AES
))
2554 caam_skcipher_alg_init(t_alg
);
2556 err
= crypto_register_skcipher(&t_alg
->skcipher
);
2558 dev_warn(priv
->qidev
, "%s alg registration failed\n",
2559 t_alg
->skcipher
.base
.cra_driver_name
);
2563 t_alg
->registered
= true;
2567 for (i
= 0; i
< ARRAY_SIZE(driver_aeads
); i
++) {
2568 struct caam_aead_alg
*t_alg
= driver_aeads
+ i
;
2569 u32 c1_alg_sel
= t_alg
->caam
.class1_alg_type
&
2571 u32 c2_alg_sel
= t_alg
->caam
.class2_alg_type
&
2573 u32 alg_aai
= t_alg
->caam
.class1_alg_type
& OP_ALG_AAI_MASK
;
2575 /* Skip DES algorithms if not supported by device */
2577 ((c1_alg_sel
== OP_ALG_ALGSEL_3DES
) ||
2578 (c1_alg_sel
== OP_ALG_ALGSEL_DES
)))
2581 /* Skip AES algorithms if not supported by device */
2582 if (!aes_inst
&& (c1_alg_sel
== OP_ALG_ALGSEL_AES
))
2586 * Check support for AES algorithms not available
2589 if (aes_vid
== CHA_VER_VID_AES_LP
&& alg_aai
== OP_ALG_AAI_GCM
)
2593 * Skip algorithms requiring message digests
2594 * if MD or MD size is not supported by device.
2597 (!md_inst
|| (t_alg
->aead
.maxauthsize
> md_limit
)))
2600 caam_aead_alg_init(t_alg
);
2602 err
= crypto_register_aead(&t_alg
->aead
);
2604 pr_warn("%s alg registration failed\n",
2605 t_alg
->aead
.base
.cra_driver_name
);
2609 t_alg
->registered
= true;
2614 dev_info(priv
->qidev
, "algorithms registered in /proc/crypto\n");
2617 put_device(ctrldev
);
2621 module_init(caam_qi_algapi_init
);
2622 module_exit(caam_qi_algapi_exit
);
2624 MODULE_LICENSE("GPL");
2625 MODULE_DESCRIPTION("Support for crypto API using CAAM-QI backend");
2626 MODULE_AUTHOR("Freescale Semiconductor");