1 // SPDX-License-Identifier: GPL-2.0+
3 * caam - Freescale FSL CAAM support for crypto API
5 * Copyright 2008-2011 Freescale Semiconductor, Inc.
6 * Copyright 2016-2019, 2023 NXP
8 * Based on talitos crypto API driver.
10 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
12 * --------------- ---------------
13 * | JobDesc #1 |-------------------->| ShareDesc |
14 * | *(packet 1) | | (PDB) |
15 * --------------- |------------->| (hashKey) |
17 * . | |-------->| (operation) |
18 * --------------- | | ---------------
19 * | JobDesc #2 |------| |
25 * | JobDesc #3 |------------
29 * The SharedDesc never changes for a connection unless rekeyed, but
30 * each packet will likely be in a different place. So all we need
31 * to know to process the packet is where the input is, where the
32 * output goes, and what context we want to process with. Context is
33 * in the SharedDesc, packet references in the JobDesc.
35 * So, a job desc looks like:
37 * ---------------------
39 * | ShareDesc Pointer |
46 * ---------------------
53 #include "desc_constr.h"
56 #include "sg_sw_sec4.h"
58 #include "caamalg_desc.h"
59 #include <linux/unaligned.h>
60 #include <crypto/internal/aead.h>
61 #include <crypto/internal/engine.h>
62 #include <crypto/internal/skcipher.h>
63 #include <crypto/xts.h>
64 #include <linux/dma-mapping.h>
65 #include <linux/device.h>
66 #include <linux/err.h>
67 #include <linux/module.h>
68 #include <linux/kernel.h>
69 #include <linux/slab.h>
70 #include <linux/string.h>
75 #define CAAM_CRA_PRIORITY 3000
76 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
77 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
78 CTR_RFC3686_NONCE_SIZE + \
79 SHA512_DIGEST_SIZE * 2)
81 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
82 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
84 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
87 #define CHACHAPOLY_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + CAAM_CMD_SZ * 6)
89 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN_MIN)
90 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
92 struct caam_alg_entry
{
100 struct caam_aead_alg
{
101 struct aead_engine_alg aead
;
102 struct caam_alg_entry caam
;
106 struct caam_skcipher_alg
{
107 struct skcipher_engine_alg skcipher
;
108 struct caam_alg_entry caam
;
113 * per-session context
116 u32 sh_desc_enc
[DESC_MAX_USED_LEN
];
117 u32 sh_desc_dec
[DESC_MAX_USED_LEN
];
118 u8 key
[CAAM_MAX_KEY_SIZE
];
119 dma_addr_t sh_desc_enc_dma
;
120 dma_addr_t sh_desc_dec_dma
;
122 enum dma_data_direction dir
;
123 struct device
*jrdev
;
124 struct alginfo adata
;
125 struct alginfo cdata
;
126 unsigned int authsize
;
127 bool xts_key_fallback
;
128 struct crypto_skcipher
*fallback
;
131 struct caam_skcipher_req_ctx
{
132 struct skcipher_edesc
*edesc
;
133 struct skcipher_request fallback_req
;
136 struct caam_aead_req_ctx
{
137 struct aead_edesc
*edesc
;
140 static int aead_null_set_sh_desc(struct crypto_aead
*aead
)
142 struct caam_ctx
*ctx
= crypto_aead_ctx_dma(aead
);
143 struct device
*jrdev
= ctx
->jrdev
;
144 struct caam_drv_private
*ctrlpriv
= dev_get_drvdata(jrdev
->parent
);
146 int rem_bytes
= CAAM_DESC_BYTES_MAX
- AEAD_DESC_JOB_IO_LEN
-
147 ctx
->adata
.keylen_pad
;
150 * Job Descriptor and Shared Descriptors
151 * must all fit into the 64-word Descriptor h/w Buffer
153 if (rem_bytes
>= DESC_AEAD_NULL_ENC_LEN
) {
154 ctx
->adata
.key_inline
= true;
155 ctx
->adata
.key_virt
= ctx
->key
;
157 ctx
->adata
.key_inline
= false;
158 ctx
->adata
.key_dma
= ctx
->key_dma
;
161 /* aead_encrypt shared descriptor */
162 desc
= ctx
->sh_desc_enc
;
163 cnstr_shdsc_aead_null_encap(desc
, &ctx
->adata
, ctx
->authsize
,
165 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
166 desc_bytes(desc
), ctx
->dir
);
169 * Job Descriptor and Shared Descriptors
170 * must all fit into the 64-word Descriptor h/w Buffer
172 if (rem_bytes
>= DESC_AEAD_NULL_DEC_LEN
) {
173 ctx
->adata
.key_inline
= true;
174 ctx
->adata
.key_virt
= ctx
->key
;
176 ctx
->adata
.key_inline
= false;
177 ctx
->adata
.key_dma
= ctx
->key_dma
;
180 /* aead_decrypt shared descriptor */
181 desc
= ctx
->sh_desc_dec
;
182 cnstr_shdsc_aead_null_decap(desc
, &ctx
->adata
, ctx
->authsize
,
184 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
185 desc_bytes(desc
), ctx
->dir
);
190 static int aead_set_sh_desc(struct crypto_aead
*aead
)
192 struct caam_aead_alg
*alg
= container_of(crypto_aead_alg(aead
),
193 struct caam_aead_alg
,
195 unsigned int ivsize
= crypto_aead_ivsize(aead
);
196 struct caam_ctx
*ctx
= crypto_aead_ctx_dma(aead
);
197 struct device
*jrdev
= ctx
->jrdev
;
198 struct caam_drv_private
*ctrlpriv
= dev_get_drvdata(jrdev
->parent
);
200 u32
*desc
, *nonce
= NULL
;
202 unsigned int data_len
[2];
203 const bool ctr_mode
= ((ctx
->cdata
.algtype
& OP_ALG_AAI_MASK
) ==
204 OP_ALG_AAI_CTR_MOD128
);
205 const bool is_rfc3686
= alg
->caam
.rfc3686
;
210 /* NULL encryption / decryption */
211 if (!ctx
->cdata
.keylen
)
212 return aead_null_set_sh_desc(aead
);
215 * AES-CTR needs to load IV in CONTEXT1 reg
216 * at an offset of 128bits (16bytes)
217 * CONTEXT1[255:128] = IV
224 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
227 ctx1_iv_off
= 16 + CTR_RFC3686_NONCE_SIZE
;
228 nonce
= (u32
*)((void *)ctx
->key
+ ctx
->adata
.keylen_pad
+
229 ctx
->cdata
.keylen
- CTR_RFC3686_NONCE_SIZE
);
233 * In case |user key| > |derived key|, using DKP<imm,imm>
234 * would result in invalid opcodes (last bytes of user key) in
235 * the resulting descriptor. Use DKP<ptr,imm> instead => both
236 * virtual and dma key addresses are needed.
238 ctx
->adata
.key_virt
= ctx
->key
;
239 ctx
->adata
.key_dma
= ctx
->key_dma
;
241 ctx
->cdata
.key_virt
= ctx
->key
+ ctx
->adata
.keylen_pad
;
242 ctx
->cdata
.key_dma
= ctx
->key_dma
+ ctx
->adata
.keylen_pad
;
244 data_len
[0] = ctx
->adata
.keylen_pad
;
245 data_len
[1] = ctx
->cdata
.keylen
;
251 * Job Descriptor and Shared Descriptors
252 * must all fit into the 64-word Descriptor h/w Buffer
254 if (desc_inline_query(DESC_AEAD_ENC_LEN
+
255 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0),
256 AUTHENC_DESC_JOB_IO_LEN
, data_len
, &inl_mask
,
257 ARRAY_SIZE(data_len
)) < 0)
260 ctx
->adata
.key_inline
= !!(inl_mask
& 1);
261 ctx
->cdata
.key_inline
= !!(inl_mask
& 2);
263 /* aead_encrypt shared descriptor */
264 desc
= ctx
->sh_desc_enc
;
265 cnstr_shdsc_aead_encap(desc
, &ctx
->cdata
, &ctx
->adata
, ivsize
,
266 ctx
->authsize
, is_rfc3686
, nonce
, ctx1_iv_off
,
267 false, ctrlpriv
->era
);
268 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
269 desc_bytes(desc
), ctx
->dir
);
273 * Job Descriptor and Shared Descriptors
274 * must all fit into the 64-word Descriptor h/w Buffer
276 if (desc_inline_query(DESC_AEAD_DEC_LEN
+
277 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0),
278 AUTHENC_DESC_JOB_IO_LEN
, data_len
, &inl_mask
,
279 ARRAY_SIZE(data_len
)) < 0)
282 ctx
->adata
.key_inline
= !!(inl_mask
& 1);
283 ctx
->cdata
.key_inline
= !!(inl_mask
& 2);
285 /* aead_decrypt shared descriptor */
286 desc
= ctx
->sh_desc_dec
;
287 cnstr_shdsc_aead_decap(desc
, &ctx
->cdata
, &ctx
->adata
, ivsize
,
288 ctx
->authsize
, alg
->caam
.geniv
, is_rfc3686
,
289 nonce
, ctx1_iv_off
, false, ctrlpriv
->era
);
290 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
291 desc_bytes(desc
), ctx
->dir
);
293 if (!alg
->caam
.geniv
)
297 * Job Descriptor and Shared Descriptors
298 * must all fit into the 64-word Descriptor h/w Buffer
300 if (desc_inline_query(DESC_AEAD_GIVENC_LEN
+
301 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0),
302 AUTHENC_DESC_JOB_IO_LEN
, data_len
, &inl_mask
,
303 ARRAY_SIZE(data_len
)) < 0)
306 ctx
->adata
.key_inline
= !!(inl_mask
& 1);
307 ctx
->cdata
.key_inline
= !!(inl_mask
& 2);
309 /* aead_givencrypt shared descriptor */
310 desc
= ctx
->sh_desc_enc
;
311 cnstr_shdsc_aead_givencap(desc
, &ctx
->cdata
, &ctx
->adata
, ivsize
,
312 ctx
->authsize
, is_rfc3686
, nonce
,
313 ctx1_iv_off
, false, ctrlpriv
->era
);
314 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
315 desc_bytes(desc
), ctx
->dir
);
321 static int aead_setauthsize(struct crypto_aead
*authenc
,
322 unsigned int authsize
)
324 struct caam_ctx
*ctx
= crypto_aead_ctx_dma(authenc
);
326 ctx
->authsize
= authsize
;
327 aead_set_sh_desc(authenc
);
332 static int gcm_set_sh_desc(struct crypto_aead
*aead
)
334 struct caam_ctx
*ctx
= crypto_aead_ctx_dma(aead
);
335 struct device
*jrdev
= ctx
->jrdev
;
336 unsigned int ivsize
= crypto_aead_ivsize(aead
);
338 int rem_bytes
= CAAM_DESC_BYTES_MAX
- GCM_DESC_JOB_IO_LEN
-
341 if (!ctx
->cdata
.keylen
|| !ctx
->authsize
)
345 * AES GCM encrypt shared descriptor
346 * Job Descriptor and Shared Descriptor
347 * must fit into the 64-word Descriptor h/w Buffer
349 if (rem_bytes
>= DESC_GCM_ENC_LEN
) {
350 ctx
->cdata
.key_inline
= true;
351 ctx
->cdata
.key_virt
= ctx
->key
;
353 ctx
->cdata
.key_inline
= false;
354 ctx
->cdata
.key_dma
= ctx
->key_dma
;
357 desc
= ctx
->sh_desc_enc
;
358 cnstr_shdsc_gcm_encap(desc
, &ctx
->cdata
, ivsize
, ctx
->authsize
, false);
359 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
360 desc_bytes(desc
), ctx
->dir
);
363 * Job Descriptor and Shared Descriptors
364 * must all fit into the 64-word Descriptor h/w Buffer
366 if (rem_bytes
>= DESC_GCM_DEC_LEN
) {
367 ctx
->cdata
.key_inline
= true;
368 ctx
->cdata
.key_virt
= ctx
->key
;
370 ctx
->cdata
.key_inline
= false;
371 ctx
->cdata
.key_dma
= ctx
->key_dma
;
374 desc
= ctx
->sh_desc_dec
;
375 cnstr_shdsc_gcm_decap(desc
, &ctx
->cdata
, ivsize
, ctx
->authsize
, false);
376 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
377 desc_bytes(desc
), ctx
->dir
);
382 static int gcm_setauthsize(struct crypto_aead
*authenc
, unsigned int authsize
)
384 struct caam_ctx
*ctx
= crypto_aead_ctx_dma(authenc
);
387 err
= crypto_gcm_check_authsize(authsize
);
391 ctx
->authsize
= authsize
;
392 gcm_set_sh_desc(authenc
);
397 static int rfc4106_set_sh_desc(struct crypto_aead
*aead
)
399 struct caam_ctx
*ctx
= crypto_aead_ctx_dma(aead
);
400 struct device
*jrdev
= ctx
->jrdev
;
401 unsigned int ivsize
= crypto_aead_ivsize(aead
);
403 int rem_bytes
= CAAM_DESC_BYTES_MAX
- GCM_DESC_JOB_IO_LEN
-
406 if (!ctx
->cdata
.keylen
|| !ctx
->authsize
)
410 * RFC4106 encrypt shared descriptor
411 * Job Descriptor and Shared Descriptor
412 * must fit into the 64-word Descriptor h/w Buffer
414 if (rem_bytes
>= DESC_RFC4106_ENC_LEN
) {
415 ctx
->cdata
.key_inline
= true;
416 ctx
->cdata
.key_virt
= ctx
->key
;
418 ctx
->cdata
.key_inline
= false;
419 ctx
->cdata
.key_dma
= ctx
->key_dma
;
422 desc
= ctx
->sh_desc_enc
;
423 cnstr_shdsc_rfc4106_encap(desc
, &ctx
->cdata
, ivsize
, ctx
->authsize
,
425 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
426 desc_bytes(desc
), ctx
->dir
);
429 * Job Descriptor and Shared Descriptors
430 * must all fit into the 64-word Descriptor h/w Buffer
432 if (rem_bytes
>= DESC_RFC4106_DEC_LEN
) {
433 ctx
->cdata
.key_inline
= true;
434 ctx
->cdata
.key_virt
= ctx
->key
;
436 ctx
->cdata
.key_inline
= false;
437 ctx
->cdata
.key_dma
= ctx
->key_dma
;
440 desc
= ctx
->sh_desc_dec
;
441 cnstr_shdsc_rfc4106_decap(desc
, &ctx
->cdata
, ivsize
, ctx
->authsize
,
443 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
444 desc_bytes(desc
), ctx
->dir
);
449 static int rfc4106_setauthsize(struct crypto_aead
*authenc
,
450 unsigned int authsize
)
452 struct caam_ctx
*ctx
= crypto_aead_ctx_dma(authenc
);
455 err
= crypto_rfc4106_check_authsize(authsize
);
459 ctx
->authsize
= authsize
;
460 rfc4106_set_sh_desc(authenc
);
465 static int rfc4543_set_sh_desc(struct crypto_aead
*aead
)
467 struct caam_ctx
*ctx
= crypto_aead_ctx_dma(aead
);
468 struct device
*jrdev
= ctx
->jrdev
;
469 unsigned int ivsize
= crypto_aead_ivsize(aead
);
471 int rem_bytes
= CAAM_DESC_BYTES_MAX
- GCM_DESC_JOB_IO_LEN
-
474 if (!ctx
->cdata
.keylen
|| !ctx
->authsize
)
478 * RFC4543 encrypt shared descriptor
479 * Job Descriptor and Shared Descriptor
480 * must fit into the 64-word Descriptor h/w Buffer
482 if (rem_bytes
>= DESC_RFC4543_ENC_LEN
) {
483 ctx
->cdata
.key_inline
= true;
484 ctx
->cdata
.key_virt
= ctx
->key
;
486 ctx
->cdata
.key_inline
= false;
487 ctx
->cdata
.key_dma
= ctx
->key_dma
;
490 desc
= ctx
->sh_desc_enc
;
491 cnstr_shdsc_rfc4543_encap(desc
, &ctx
->cdata
, ivsize
, ctx
->authsize
,
493 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
494 desc_bytes(desc
), ctx
->dir
);
497 * Job Descriptor and Shared Descriptors
498 * must all fit into the 64-word Descriptor h/w Buffer
500 if (rem_bytes
>= DESC_RFC4543_DEC_LEN
) {
501 ctx
->cdata
.key_inline
= true;
502 ctx
->cdata
.key_virt
= ctx
->key
;
504 ctx
->cdata
.key_inline
= false;
505 ctx
->cdata
.key_dma
= ctx
->key_dma
;
508 desc
= ctx
->sh_desc_dec
;
509 cnstr_shdsc_rfc4543_decap(desc
, &ctx
->cdata
, ivsize
, ctx
->authsize
,
511 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
512 desc_bytes(desc
), ctx
->dir
);
517 static int rfc4543_setauthsize(struct crypto_aead
*authenc
,
518 unsigned int authsize
)
520 struct caam_ctx
*ctx
= crypto_aead_ctx_dma(authenc
);
525 ctx
->authsize
= authsize
;
526 rfc4543_set_sh_desc(authenc
);
531 static int chachapoly_set_sh_desc(struct crypto_aead
*aead
)
533 struct caam_ctx
*ctx
= crypto_aead_ctx_dma(aead
);
534 struct device
*jrdev
= ctx
->jrdev
;
535 unsigned int ivsize
= crypto_aead_ivsize(aead
);
538 if (!ctx
->cdata
.keylen
|| !ctx
->authsize
)
541 desc
= ctx
->sh_desc_enc
;
542 cnstr_shdsc_chachapoly(desc
, &ctx
->cdata
, &ctx
->adata
, ivsize
,
543 ctx
->authsize
, true, false);
544 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
545 desc_bytes(desc
), ctx
->dir
);
547 desc
= ctx
->sh_desc_dec
;
548 cnstr_shdsc_chachapoly(desc
, &ctx
->cdata
, &ctx
->adata
, ivsize
,
549 ctx
->authsize
, false, false);
550 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
551 desc_bytes(desc
), ctx
->dir
);
556 static int chachapoly_setauthsize(struct crypto_aead
*aead
,
557 unsigned int authsize
)
559 struct caam_ctx
*ctx
= crypto_aead_ctx_dma(aead
);
561 if (authsize
!= POLY1305_DIGEST_SIZE
)
564 ctx
->authsize
= authsize
;
565 return chachapoly_set_sh_desc(aead
);
568 static int chachapoly_setkey(struct crypto_aead
*aead
, const u8
*key
,
571 struct caam_ctx
*ctx
= crypto_aead_ctx_dma(aead
);
572 unsigned int ivsize
= crypto_aead_ivsize(aead
);
573 unsigned int saltlen
= CHACHAPOLY_IV_SIZE
- ivsize
;
575 if (keylen
!= CHACHA_KEY_SIZE
+ saltlen
)
578 memcpy(ctx
->key
, key
, keylen
);
579 ctx
->cdata
.key_virt
= ctx
->key
;
580 ctx
->cdata
.keylen
= keylen
- saltlen
;
582 return chachapoly_set_sh_desc(aead
);
585 static int aead_setkey(struct crypto_aead
*aead
,
586 const u8
*key
, unsigned int keylen
)
588 struct caam_ctx
*ctx
= crypto_aead_ctx_dma(aead
);
589 struct device
*jrdev
= ctx
->jrdev
;
590 struct caam_drv_private
*ctrlpriv
= dev_get_drvdata(jrdev
->parent
);
591 struct crypto_authenc_keys keys
;
594 if (crypto_authenc_extractkeys(&keys
, key
, keylen
) != 0)
597 dev_dbg(jrdev
, "keylen %d enckeylen %d authkeylen %d\n",
598 keys
.authkeylen
+ keys
.enckeylen
, keys
.enckeylen
,
600 print_hex_dump_debug("key in @"__stringify(__LINE__
)": ",
601 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
604 * If DKP is supported, use it in the shared descriptor to generate
607 if (ctrlpriv
->era
>= 6) {
608 ctx
->adata
.keylen
= keys
.authkeylen
;
609 ctx
->adata
.keylen_pad
= split_key_len(ctx
->adata
.algtype
&
612 if (ctx
->adata
.keylen_pad
+ keys
.enckeylen
> CAAM_MAX_KEY_SIZE
)
615 memcpy(ctx
->key
, keys
.authkey
, keys
.authkeylen
);
616 memcpy(ctx
->key
+ ctx
->adata
.keylen_pad
, keys
.enckey
,
618 dma_sync_single_for_device(jrdev
, ctx
->key_dma
,
619 ctx
->adata
.keylen_pad
+
620 keys
.enckeylen
, ctx
->dir
);
624 ret
= gen_split_key(ctx
->jrdev
, ctx
->key
, &ctx
->adata
, keys
.authkey
,
625 keys
.authkeylen
, CAAM_MAX_KEY_SIZE
-
631 /* postpend encryption key to auth split key */
632 memcpy(ctx
->key
+ ctx
->adata
.keylen_pad
, keys
.enckey
, keys
.enckeylen
);
633 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, ctx
->adata
.keylen_pad
+
634 keys
.enckeylen
, ctx
->dir
);
636 print_hex_dump_debug("ctx.key@"__stringify(__LINE__
)": ",
637 DUMP_PREFIX_ADDRESS
, 16, 4, ctx
->key
,
638 ctx
->adata
.keylen_pad
+ keys
.enckeylen
, 1);
641 ctx
->cdata
.keylen
= keys
.enckeylen
;
642 memzero_explicit(&keys
, sizeof(keys
));
643 return aead_set_sh_desc(aead
);
645 memzero_explicit(&keys
, sizeof(keys
));
649 static int des3_aead_setkey(struct crypto_aead
*aead
, const u8
*key
,
652 struct crypto_authenc_keys keys
;
655 err
= crypto_authenc_extractkeys(&keys
, key
, keylen
);
659 err
= verify_aead_des3_key(aead
, keys
.enckey
, keys
.enckeylen
) ?:
660 aead_setkey(aead
, key
, keylen
);
662 memzero_explicit(&keys
, sizeof(keys
));
666 static int gcm_setkey(struct crypto_aead
*aead
,
667 const u8
*key
, unsigned int keylen
)
669 struct caam_ctx
*ctx
= crypto_aead_ctx_dma(aead
);
670 struct device
*jrdev
= ctx
->jrdev
;
673 err
= aes_check_keylen(keylen
);
677 print_hex_dump_debug("key in @"__stringify(__LINE__
)": ",
678 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
680 memcpy(ctx
->key
, key
, keylen
);
681 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, keylen
, ctx
->dir
);
682 ctx
->cdata
.keylen
= keylen
;
684 return gcm_set_sh_desc(aead
);
687 static int rfc4106_setkey(struct crypto_aead
*aead
,
688 const u8
*key
, unsigned int keylen
)
690 struct caam_ctx
*ctx
= crypto_aead_ctx_dma(aead
);
691 struct device
*jrdev
= ctx
->jrdev
;
694 err
= aes_check_keylen(keylen
- 4);
698 print_hex_dump_debug("key in @"__stringify(__LINE__
)": ",
699 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
701 memcpy(ctx
->key
, key
, keylen
);
704 * The last four bytes of the key material are used as the salt value
705 * in the nonce. Update the AES key length.
707 ctx
->cdata
.keylen
= keylen
- 4;
708 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, ctx
->cdata
.keylen
,
710 return rfc4106_set_sh_desc(aead
);
713 static int rfc4543_setkey(struct crypto_aead
*aead
,
714 const u8
*key
, unsigned int keylen
)
716 struct caam_ctx
*ctx
= crypto_aead_ctx_dma(aead
);
717 struct device
*jrdev
= ctx
->jrdev
;
720 err
= aes_check_keylen(keylen
- 4);
724 print_hex_dump_debug("key in @"__stringify(__LINE__
)": ",
725 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
727 memcpy(ctx
->key
, key
, keylen
);
730 * The last four bytes of the key material are used as the salt value
731 * in the nonce. Update the AES key length.
733 ctx
->cdata
.keylen
= keylen
- 4;
734 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, ctx
->cdata
.keylen
,
736 return rfc4543_set_sh_desc(aead
);
739 static int skcipher_setkey(struct crypto_skcipher
*skcipher
, const u8
*key
,
740 unsigned int keylen
, const u32 ctx1_iv_off
)
742 struct caam_ctx
*ctx
= crypto_skcipher_ctx_dma(skcipher
);
743 struct caam_skcipher_alg
*alg
=
744 container_of(crypto_skcipher_alg(skcipher
), typeof(*alg
),
746 struct device
*jrdev
= ctx
->jrdev
;
747 unsigned int ivsize
= crypto_skcipher_ivsize(skcipher
);
749 const bool is_rfc3686
= alg
->caam
.rfc3686
;
751 print_hex_dump_debug("key in @"__stringify(__LINE__
)": ",
752 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
754 ctx
->cdata
.keylen
= keylen
;
755 ctx
->cdata
.key_virt
= key
;
756 ctx
->cdata
.key_inline
= true;
758 /* skcipher_encrypt shared descriptor */
759 desc
= ctx
->sh_desc_enc
;
760 cnstr_shdsc_skcipher_encap(desc
, &ctx
->cdata
, ivsize
, is_rfc3686
,
762 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
763 desc_bytes(desc
), ctx
->dir
);
765 /* skcipher_decrypt shared descriptor */
766 desc
= ctx
->sh_desc_dec
;
767 cnstr_shdsc_skcipher_decap(desc
, &ctx
->cdata
, ivsize
, is_rfc3686
,
769 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
770 desc_bytes(desc
), ctx
->dir
);
775 static int aes_skcipher_setkey(struct crypto_skcipher
*skcipher
,
776 const u8
*key
, unsigned int keylen
)
780 err
= aes_check_keylen(keylen
);
784 return skcipher_setkey(skcipher
, key
, keylen
, 0);
787 static int rfc3686_skcipher_setkey(struct crypto_skcipher
*skcipher
,
788 const u8
*key
, unsigned int keylen
)
795 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
796 * | *key = {KEY, NONCE}
798 ctx1_iv_off
= 16 + CTR_RFC3686_NONCE_SIZE
;
799 keylen
-= CTR_RFC3686_NONCE_SIZE
;
801 err
= aes_check_keylen(keylen
);
805 return skcipher_setkey(skcipher
, key
, keylen
, ctx1_iv_off
);
808 static int ctr_skcipher_setkey(struct crypto_skcipher
*skcipher
,
809 const u8
*key
, unsigned int keylen
)
815 * AES-CTR needs to load IV in CONTEXT1 reg
816 * at an offset of 128bits (16bytes)
817 * CONTEXT1[255:128] = IV
821 err
= aes_check_keylen(keylen
);
825 return skcipher_setkey(skcipher
, key
, keylen
, ctx1_iv_off
);
828 static int des_skcipher_setkey(struct crypto_skcipher
*skcipher
,
829 const u8
*key
, unsigned int keylen
)
831 return verify_skcipher_des_key(skcipher
, key
) ?:
832 skcipher_setkey(skcipher
, key
, keylen
, 0);
835 static int des3_skcipher_setkey(struct crypto_skcipher
*skcipher
,
836 const u8
*key
, unsigned int keylen
)
838 return verify_skcipher_des3_key(skcipher
, key
) ?:
839 skcipher_setkey(skcipher
, key
, keylen
, 0);
842 static int xts_skcipher_setkey(struct crypto_skcipher
*skcipher
, const u8
*key
,
845 struct caam_ctx
*ctx
= crypto_skcipher_ctx_dma(skcipher
);
846 struct device
*jrdev
= ctx
->jrdev
;
847 struct caam_drv_private
*ctrlpriv
= dev_get_drvdata(jrdev
->parent
);
851 err
= xts_verify_key(skcipher
, key
, keylen
);
853 dev_dbg(jrdev
, "key size mismatch\n");
857 if (keylen
!= 2 * AES_KEYSIZE_128
&& keylen
!= 2 * AES_KEYSIZE_256
)
858 ctx
->xts_key_fallback
= true;
860 if (ctrlpriv
->era
<= 8 || ctx
->xts_key_fallback
) {
861 err
= crypto_skcipher_setkey(ctx
->fallback
, key
, keylen
);
866 ctx
->cdata
.keylen
= keylen
;
867 ctx
->cdata
.key_virt
= key
;
868 ctx
->cdata
.key_inline
= true;
870 /* xts_skcipher_encrypt shared descriptor */
871 desc
= ctx
->sh_desc_enc
;
872 cnstr_shdsc_xts_skcipher_encap(desc
, &ctx
->cdata
);
873 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
874 desc_bytes(desc
), ctx
->dir
);
876 /* xts_skcipher_decrypt shared descriptor */
877 desc
= ctx
->sh_desc_dec
;
878 cnstr_shdsc_xts_skcipher_decap(desc
, &ctx
->cdata
);
879 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
880 desc_bytes(desc
), ctx
->dir
);
886 * aead_edesc - s/w-extended aead descriptor
887 * @src_nents: number of segments in input s/w scatterlist
888 * @dst_nents: number of segments in output s/w scatterlist
889 * @mapped_src_nents: number of segments in input h/w link table
890 * @mapped_dst_nents: number of segments in output h/w link table
891 * @sec4_sg_bytes: length of dma mapped sec4_sg space
892 * @bklog: stored to determine if the request needs backlog
893 * @sec4_sg_dma: bus physical mapped address of h/w link table
894 * @sec4_sg: pointer to h/w link table
895 * @hw_desc: the h/w job descriptor followed by any referenced link tables
900 int mapped_src_nents
;
901 int mapped_dst_nents
;
904 dma_addr_t sec4_sg_dma
;
905 struct sec4_sg_entry
*sec4_sg
;
910 * skcipher_edesc - s/w-extended skcipher descriptor
911 * @src_nents: number of segments in input s/w scatterlist
912 * @dst_nents: number of segments in output s/w scatterlist
913 * @mapped_src_nents: number of segments in input h/w link table
914 * @mapped_dst_nents: number of segments in output h/w link table
915 * @iv_dma: dma address of iv for checking continuity and link table
916 * @sec4_sg_bytes: length of dma mapped sec4_sg space
917 * @bklog: stored to determine if the request needs backlog
918 * @sec4_sg_dma: bus physical mapped address of h/w link table
919 * @sec4_sg: pointer to h/w link table
920 * @hw_desc: the h/w job descriptor followed by any referenced link tables
923 struct skcipher_edesc
{
926 int mapped_src_nents
;
927 int mapped_dst_nents
;
931 dma_addr_t sec4_sg_dma
;
932 struct sec4_sg_entry
*sec4_sg
;
936 static void caam_unmap(struct device
*dev
, struct scatterlist
*src
,
937 struct scatterlist
*dst
, int src_nents
,
939 dma_addr_t iv_dma
, int ivsize
, dma_addr_t sec4_sg_dma
,
944 dma_unmap_sg(dev
, src
, src_nents
, DMA_TO_DEVICE
);
946 dma_unmap_sg(dev
, dst
, dst_nents
, DMA_FROM_DEVICE
);
948 dma_unmap_sg(dev
, src
, src_nents
, DMA_BIDIRECTIONAL
);
952 dma_unmap_single(dev
, iv_dma
, ivsize
, DMA_BIDIRECTIONAL
);
954 dma_unmap_single(dev
, sec4_sg_dma
, sec4_sg_bytes
,
958 static void aead_unmap(struct device
*dev
,
959 struct aead_edesc
*edesc
,
960 struct aead_request
*req
)
962 caam_unmap(dev
, req
->src
, req
->dst
,
963 edesc
->src_nents
, edesc
->dst_nents
, 0, 0,
964 edesc
->sec4_sg_dma
, edesc
->sec4_sg_bytes
);
967 static void skcipher_unmap(struct device
*dev
, struct skcipher_edesc
*edesc
,
968 struct skcipher_request
*req
)
970 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(req
);
971 int ivsize
= crypto_skcipher_ivsize(skcipher
);
973 caam_unmap(dev
, req
->src
, req
->dst
,
974 edesc
->src_nents
, edesc
->dst_nents
,
975 edesc
->iv_dma
, ivsize
,
976 edesc
->sec4_sg_dma
, edesc
->sec4_sg_bytes
);
979 static void aead_crypt_done(struct device
*jrdev
, u32
*desc
, u32 err
,
982 struct aead_request
*req
= context
;
983 struct caam_aead_req_ctx
*rctx
= aead_request_ctx(req
);
984 struct caam_drv_private_jr
*jrp
= dev_get_drvdata(jrdev
);
985 struct aead_edesc
*edesc
;
989 dev_dbg(jrdev
, "%s %d: err 0x%x\n", __func__
, __LINE__
, err
);
992 has_bklog
= edesc
->bklog
;
995 ecode
= caam_jr_strstatus(jrdev
, err
);
997 aead_unmap(jrdev
, edesc
, req
);
1002 * If no backlog flag, the completion of the request is done
1003 * by CAAM, not crypto engine.
1006 aead_request_complete(req
, ecode
);
1008 crypto_finalize_aead_request(jrp
->engine
, req
, ecode
);
1011 static inline u8
*skcipher_edesc_iv(struct skcipher_edesc
*edesc
)
1014 return PTR_ALIGN((u8
*)edesc
->sec4_sg
+ edesc
->sec4_sg_bytes
,
1015 dma_get_cache_alignment());
1018 static void skcipher_crypt_done(struct device
*jrdev
, u32
*desc
, u32 err
,
1021 struct skcipher_request
*req
= context
;
1022 struct skcipher_edesc
*edesc
;
1023 struct caam_skcipher_req_ctx
*rctx
= skcipher_request_ctx(req
);
1024 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(req
);
1025 struct caam_drv_private_jr
*jrp
= dev_get_drvdata(jrdev
);
1026 int ivsize
= crypto_skcipher_ivsize(skcipher
);
1030 dev_dbg(jrdev
, "%s %d: err 0x%x\n", __func__
, __LINE__
, err
);
1032 edesc
= rctx
->edesc
;
1033 has_bklog
= edesc
->bklog
;
1035 ecode
= caam_jr_strstatus(jrdev
, err
);
1037 skcipher_unmap(jrdev
, edesc
, req
);
1040 * The crypto API expects us to set the IV (req->iv) to the last
1041 * ciphertext block (CBC mode) or last counter (CTR mode).
1042 * This is used e.g. by the CTS mode.
1044 if (ivsize
&& !ecode
) {
1045 memcpy(req
->iv
, skcipher_edesc_iv(edesc
), ivsize
);
1047 print_hex_dump_debug("dstiv @" __stringify(__LINE__
)": ",
1048 DUMP_PREFIX_ADDRESS
, 16, 4, req
->iv
,
1052 caam_dump_sg("dst @" __stringify(__LINE__
)": ",
1053 DUMP_PREFIX_ADDRESS
, 16, 4, req
->dst
,
1054 edesc
->dst_nents
> 1 ? 100 : req
->cryptlen
, 1);
1059 * If no backlog flag, the completion of the request is done
1060 * by CAAM, not crypto engine.
1063 skcipher_request_complete(req
, ecode
);
1065 crypto_finalize_skcipher_request(jrp
->engine
, req
, ecode
);
1069 * Fill in aead job descriptor
1071 static void init_aead_job(struct aead_request
*req
,
1072 struct aead_edesc
*edesc
,
1073 bool all_contig
, bool encrypt
)
1075 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1076 struct caam_ctx
*ctx
= crypto_aead_ctx_dma(aead
);
1077 int authsize
= ctx
->authsize
;
1078 u32
*desc
= edesc
->hw_desc
;
1079 u32 out_options
, in_options
;
1080 dma_addr_t dst_dma
, src_dma
;
1081 int len
, sec4_sg_index
= 0;
1085 sh_desc
= encrypt
? ctx
->sh_desc_enc
: ctx
->sh_desc_dec
;
1086 ptr
= encrypt
? ctx
->sh_desc_enc_dma
: ctx
->sh_desc_dec_dma
;
1088 len
= desc_len(sh_desc
);
1089 init_job_desc_shared(desc
, ptr
, len
, HDR_SHARE_DEFER
| HDR_REVERSE
);
1092 src_dma
= edesc
->mapped_src_nents
? sg_dma_address(req
->src
) :
1096 src_dma
= edesc
->sec4_sg_dma
;
1097 sec4_sg_index
+= edesc
->mapped_src_nents
;
1098 in_options
= LDST_SGF
;
1101 append_seq_in_ptr(desc
, src_dma
, req
->assoclen
+ req
->cryptlen
,
1105 out_options
= in_options
;
1107 if (unlikely(req
->src
!= req
->dst
)) {
1108 if (!edesc
->mapped_dst_nents
) {
1111 } else if (edesc
->mapped_dst_nents
== 1) {
1112 dst_dma
= sg_dma_address(req
->dst
);
1115 dst_dma
= edesc
->sec4_sg_dma
+
1117 sizeof(struct sec4_sg_entry
);
1118 out_options
= LDST_SGF
;
1123 append_seq_out_ptr(desc
, dst_dma
,
1124 req
->assoclen
+ req
->cryptlen
+ authsize
,
1127 append_seq_out_ptr(desc
, dst_dma
,
1128 req
->assoclen
+ req
->cryptlen
- authsize
,
1132 static void init_gcm_job(struct aead_request
*req
,
1133 struct aead_edesc
*edesc
,
1134 bool all_contig
, bool encrypt
)
1136 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1137 struct caam_ctx
*ctx
= crypto_aead_ctx_dma(aead
);
1138 unsigned int ivsize
= crypto_aead_ivsize(aead
);
1139 u32
*desc
= edesc
->hw_desc
;
1140 bool generic_gcm
= (ivsize
== GCM_AES_IV_SIZE
);
1143 init_aead_job(req
, edesc
, all_contig
, encrypt
);
1144 append_math_add_imm_u32(desc
, REG3
, ZERO
, IMM
, req
->assoclen
);
1146 /* BUG This should not be specific to generic GCM. */
1148 if (encrypt
&& generic_gcm
&& !(req
->assoclen
+ req
->cryptlen
))
1149 last
= FIFOLD_TYPE_LAST1
;
1152 append_cmd(desc
, CMD_FIFO_LOAD
| FIFOLD_CLASS_CLASS1
| IMMEDIATE
|
1153 FIFOLD_TYPE_IV
| FIFOLD_TYPE_FLUSH1
| GCM_AES_IV_SIZE
| last
);
1156 append_data(desc
, ctx
->key
+ ctx
->cdata
.keylen
, 4);
1158 append_data(desc
, req
->iv
, ivsize
);
1159 /* End of blank commands */
1162 static void init_chachapoly_job(struct aead_request
*req
,
1163 struct aead_edesc
*edesc
, bool all_contig
,
1166 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1167 unsigned int ivsize
= crypto_aead_ivsize(aead
);
1168 unsigned int assoclen
= req
->assoclen
;
1169 u32
*desc
= edesc
->hw_desc
;
1172 init_aead_job(req
, edesc
, all_contig
, encrypt
);
1174 if (ivsize
!= CHACHAPOLY_IV_SIZE
) {
1175 /* IPsec specific: CONTEXT1[223:128] = {NONCE, IV} */
1179 * The associated data comes already with the IV but we need
1180 * to skip it when we authenticate or encrypt...
1185 append_math_add_imm_u32(desc
, REG3
, ZERO
, IMM
, assoclen
);
1188 * For IPsec load the IV further in the same register.
1189 * For RFC7539 simply load the 12 bytes nonce in a single operation
1191 append_load_as_imm(desc
, req
->iv
, ivsize
, LDST_CLASS_1_CCB
|
1192 LDST_SRCDST_BYTE_CONTEXT
|
1193 ctx_iv_off
<< LDST_OFFSET_SHIFT
);
1196 static void init_authenc_job(struct aead_request
*req
,
1197 struct aead_edesc
*edesc
,
1198 bool all_contig
, bool encrypt
)
1200 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1201 struct caam_aead_alg
*alg
= container_of(crypto_aead_alg(aead
),
1202 struct caam_aead_alg
,
1204 unsigned int ivsize
= crypto_aead_ivsize(aead
);
1205 struct caam_ctx
*ctx
= crypto_aead_ctx_dma(aead
);
1206 struct caam_drv_private
*ctrlpriv
= dev_get_drvdata(ctx
->jrdev
->parent
);
1207 const bool ctr_mode
= ((ctx
->cdata
.algtype
& OP_ALG_AAI_MASK
) ==
1208 OP_ALG_AAI_CTR_MOD128
);
1209 const bool is_rfc3686
= alg
->caam
.rfc3686
;
1210 u32
*desc
= edesc
->hw_desc
;
1214 * AES-CTR needs to load IV in CONTEXT1 reg
1215 * at an offset of 128bits (16bytes)
1216 * CONTEXT1[255:128] = IV
1223 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1226 ivoffset
= 16 + CTR_RFC3686_NONCE_SIZE
;
1228 init_aead_job(req
, edesc
, all_contig
, encrypt
);
1231 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports
1232 * having DPOVRD as destination.
1234 if (ctrlpriv
->era
< 3)
1235 append_math_add_imm_u32(desc
, REG3
, ZERO
, IMM
, req
->assoclen
);
1237 append_math_add_imm_u32(desc
, DPOVRD
, ZERO
, IMM
, req
->assoclen
);
1239 if (ivsize
&& ((is_rfc3686
&& encrypt
) || !alg
->caam
.geniv
))
1240 append_load_as_imm(desc
, req
->iv
, ivsize
,
1242 LDST_SRCDST_BYTE_CONTEXT
|
1243 (ivoffset
<< LDST_OFFSET_SHIFT
));
1247 * Fill in skcipher job descriptor
1249 static void init_skcipher_job(struct skcipher_request
*req
,
1250 struct skcipher_edesc
*edesc
,
1253 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(req
);
1254 struct caam_ctx
*ctx
= crypto_skcipher_ctx_dma(skcipher
);
1255 struct device
*jrdev
= ctx
->jrdev
;
1256 int ivsize
= crypto_skcipher_ivsize(skcipher
);
1257 u32
*desc
= edesc
->hw_desc
;
1259 u32 in_options
= 0, out_options
= 0;
1260 dma_addr_t src_dma
, dst_dma
, ptr
;
1261 int len
, sec4_sg_index
= 0;
1263 print_hex_dump_debug("presciv@"__stringify(__LINE__
)": ",
1264 DUMP_PREFIX_ADDRESS
, 16, 4, req
->iv
, ivsize
, 1);
1265 dev_dbg(jrdev
, "asked=%d, cryptlen%d\n",
1266 (int)edesc
->src_nents
> 1 ? 100 : req
->cryptlen
, req
->cryptlen
);
1268 caam_dump_sg("src @" __stringify(__LINE__
)": ",
1269 DUMP_PREFIX_ADDRESS
, 16, 4, req
->src
,
1270 edesc
->src_nents
> 1 ? 100 : req
->cryptlen
, 1);
1272 sh_desc
= encrypt
? ctx
->sh_desc_enc
: ctx
->sh_desc_dec
;
1273 ptr
= encrypt
? ctx
->sh_desc_enc_dma
: ctx
->sh_desc_dec_dma
;
1275 len
= desc_len(sh_desc
);
1276 init_job_desc_shared(desc
, ptr
, len
, HDR_SHARE_DEFER
| HDR_REVERSE
);
1278 if (ivsize
|| edesc
->mapped_src_nents
> 1) {
1279 src_dma
= edesc
->sec4_sg_dma
;
1280 sec4_sg_index
= edesc
->mapped_src_nents
+ !!ivsize
;
1281 in_options
= LDST_SGF
;
1283 src_dma
= sg_dma_address(req
->src
);
1286 append_seq_in_ptr(desc
, src_dma
, req
->cryptlen
+ ivsize
, in_options
);
1288 if (likely(req
->src
== req
->dst
)) {
1289 dst_dma
= src_dma
+ !!ivsize
* sizeof(struct sec4_sg_entry
);
1290 out_options
= in_options
;
1291 } else if (!ivsize
&& edesc
->mapped_dst_nents
== 1) {
1292 dst_dma
= sg_dma_address(req
->dst
);
1294 dst_dma
= edesc
->sec4_sg_dma
+ sec4_sg_index
*
1295 sizeof(struct sec4_sg_entry
);
1296 out_options
= LDST_SGF
;
1299 append_seq_out_ptr(desc
, dst_dma
, req
->cryptlen
+ ivsize
, out_options
);
1303 * allocate and map the aead extended descriptor
1305 static struct aead_edesc
*aead_edesc_alloc(struct aead_request
*req
,
1306 int desc_bytes
, bool *all_contig_ptr
,
1309 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1310 struct caam_ctx
*ctx
= crypto_aead_ctx_dma(aead
);
1311 struct device
*jrdev
= ctx
->jrdev
;
1312 struct caam_aead_req_ctx
*rctx
= aead_request_ctx(req
);
1313 gfp_t flags
= (req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ?
1314 GFP_KERNEL
: GFP_ATOMIC
;
1315 int src_nents
, mapped_src_nents
, dst_nents
= 0, mapped_dst_nents
= 0;
1316 int src_len
, dst_len
= 0;
1317 struct aead_edesc
*edesc
;
1318 int sec4_sg_index
, sec4_sg_len
, sec4_sg_bytes
;
1319 unsigned int authsize
= ctx
->authsize
;
1321 if (unlikely(req
->dst
!= req
->src
)) {
1322 src_len
= req
->assoclen
+ req
->cryptlen
;
1323 dst_len
= src_len
+ (encrypt
? authsize
: (-authsize
));
1325 src_nents
= sg_nents_for_len(req
->src
, src_len
);
1326 if (unlikely(src_nents
< 0)) {
1327 dev_err(jrdev
, "Insufficient bytes (%d) in src S/G\n",
1329 return ERR_PTR(src_nents
);
1332 dst_nents
= sg_nents_for_len(req
->dst
, dst_len
);
1333 if (unlikely(dst_nents
< 0)) {
1334 dev_err(jrdev
, "Insufficient bytes (%d) in dst S/G\n",
1336 return ERR_PTR(dst_nents
);
1339 src_len
= req
->assoclen
+ req
->cryptlen
+
1340 (encrypt
? authsize
: 0);
1342 src_nents
= sg_nents_for_len(req
->src
, src_len
);
1343 if (unlikely(src_nents
< 0)) {
1344 dev_err(jrdev
, "Insufficient bytes (%d) in src S/G\n",
1346 return ERR_PTR(src_nents
);
1350 if (likely(req
->src
== req
->dst
)) {
1351 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
, src_nents
,
1353 if (unlikely(!mapped_src_nents
)) {
1354 dev_err(jrdev
, "unable to map source\n");
1355 return ERR_PTR(-ENOMEM
);
1358 /* Cover also the case of null (zero length) input data */
1360 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
,
1361 src_nents
, DMA_TO_DEVICE
);
1362 if (unlikely(!mapped_src_nents
)) {
1363 dev_err(jrdev
, "unable to map source\n");
1364 return ERR_PTR(-ENOMEM
);
1367 mapped_src_nents
= 0;
1370 /* Cover also the case of null (zero length) output data */
1372 mapped_dst_nents
= dma_map_sg(jrdev
, req
->dst
,
1375 if (unlikely(!mapped_dst_nents
)) {
1376 dev_err(jrdev
, "unable to map destination\n");
1377 dma_unmap_sg(jrdev
, req
->src
, src_nents
,
1379 return ERR_PTR(-ENOMEM
);
1382 mapped_dst_nents
= 0;
1387 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond
1388 * the end of the table by allocating more S/G entries.
1390 sec4_sg_len
= mapped_src_nents
> 1 ? mapped_src_nents
: 0;
1391 if (mapped_dst_nents
> 1)
1392 sec4_sg_len
+= pad_sg_nents(mapped_dst_nents
);
1394 sec4_sg_len
= pad_sg_nents(sec4_sg_len
);
1396 sec4_sg_bytes
= sec4_sg_len
* sizeof(struct sec4_sg_entry
);
1398 /* allocate space for base edesc and hw desc commands, link tables */
1399 edesc
= kzalloc(sizeof(*edesc
) + desc_bytes
+ sec4_sg_bytes
, flags
);
1401 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
, 0,
1403 return ERR_PTR(-ENOMEM
);
1406 edesc
->src_nents
= src_nents
;
1407 edesc
->dst_nents
= dst_nents
;
1408 edesc
->mapped_src_nents
= mapped_src_nents
;
1409 edesc
->mapped_dst_nents
= mapped_dst_nents
;
1410 edesc
->sec4_sg
= (void *)edesc
+ sizeof(struct aead_edesc
) +
1413 rctx
->edesc
= edesc
;
1415 *all_contig_ptr
= !(mapped_src_nents
> 1);
1418 if (mapped_src_nents
> 1) {
1419 sg_to_sec4_sg_last(req
->src
, src_len
,
1420 edesc
->sec4_sg
+ sec4_sg_index
, 0);
1421 sec4_sg_index
+= mapped_src_nents
;
1423 if (mapped_dst_nents
> 1) {
1424 sg_to_sec4_sg_last(req
->dst
, dst_len
,
1425 edesc
->sec4_sg
+ sec4_sg_index
, 0);
1431 edesc
->sec4_sg_dma
= dma_map_single(jrdev
, edesc
->sec4_sg
,
1432 sec4_sg_bytes
, DMA_TO_DEVICE
);
1433 if (dma_mapping_error(jrdev
, edesc
->sec4_sg_dma
)) {
1434 dev_err(jrdev
, "unable to map S/G table\n");
1435 aead_unmap(jrdev
, edesc
, req
);
1437 return ERR_PTR(-ENOMEM
);
1440 edesc
->sec4_sg_bytes
= sec4_sg_bytes
;
1445 static int aead_enqueue_req(struct device
*jrdev
, struct aead_request
*req
)
1447 struct caam_drv_private_jr
*jrpriv
= dev_get_drvdata(jrdev
);
1448 struct caam_aead_req_ctx
*rctx
= aead_request_ctx(req
);
1449 struct aead_edesc
*edesc
= rctx
->edesc
;
1450 u32
*desc
= edesc
->hw_desc
;
1454 * Only the backlog request are sent to crypto-engine since the others
1455 * can be handled by CAAM, if free, especially since JR has up to 1024
1456 * entries (more than the 10 entries from crypto-engine).
1458 if (req
->base
.flags
& CRYPTO_TFM_REQ_MAY_BACKLOG
)
1459 ret
= crypto_transfer_aead_request_to_engine(jrpriv
->engine
,
1462 ret
= caam_jr_enqueue(jrdev
, desc
, aead_crypt_done
, req
);
1464 if ((ret
!= -EINPROGRESS
) && (ret
!= -EBUSY
)) {
1465 aead_unmap(jrdev
, edesc
, req
);
1472 static inline int chachapoly_crypt(struct aead_request
*req
, bool encrypt
)
1474 struct aead_edesc
*edesc
;
1475 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1476 struct caam_ctx
*ctx
= crypto_aead_ctx_dma(aead
);
1477 struct device
*jrdev
= ctx
->jrdev
;
1481 edesc
= aead_edesc_alloc(req
, CHACHAPOLY_DESC_JOB_IO_LEN
, &all_contig
,
1484 return PTR_ERR(edesc
);
1486 desc
= edesc
->hw_desc
;
1488 init_chachapoly_job(req
, edesc
, all_contig
, encrypt
);
1489 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__
)": ",
1490 DUMP_PREFIX_ADDRESS
, 16, 4, desc
, desc_bytes(desc
),
1493 return aead_enqueue_req(jrdev
, req
);
1496 static int chachapoly_encrypt(struct aead_request
*req
)
1498 return chachapoly_crypt(req
, true);
1501 static int chachapoly_decrypt(struct aead_request
*req
)
1503 return chachapoly_crypt(req
, false);
1506 static inline int aead_crypt(struct aead_request
*req
, bool encrypt
)
1508 struct aead_edesc
*edesc
;
1509 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1510 struct caam_ctx
*ctx
= crypto_aead_ctx_dma(aead
);
1511 struct device
*jrdev
= ctx
->jrdev
;
1514 /* allocate extended descriptor */
1515 edesc
= aead_edesc_alloc(req
, AUTHENC_DESC_JOB_IO_LEN
,
1516 &all_contig
, encrypt
);
1518 return PTR_ERR(edesc
);
1520 /* Create and submit job descriptor */
1521 init_authenc_job(req
, edesc
, all_contig
, encrypt
);
1523 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__
)": ",
1524 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1525 desc_bytes(edesc
->hw_desc
), 1);
1527 return aead_enqueue_req(jrdev
, req
);
1530 static int aead_encrypt(struct aead_request
*req
)
1532 return aead_crypt(req
, true);
1535 static int aead_decrypt(struct aead_request
*req
)
1537 return aead_crypt(req
, false);
1540 static int aead_do_one_req(struct crypto_engine
*engine
, void *areq
)
1542 struct aead_request
*req
= aead_request_cast(areq
);
1543 struct caam_ctx
*ctx
= crypto_aead_ctx_dma(crypto_aead_reqtfm(req
));
1544 struct caam_aead_req_ctx
*rctx
= aead_request_ctx(req
);
1545 u32
*desc
= rctx
->edesc
->hw_desc
;
1548 rctx
->edesc
->bklog
= true;
1550 ret
= caam_jr_enqueue(ctx
->jrdev
, desc
, aead_crypt_done
, req
);
1552 if (ret
== -ENOSPC
&& engine
->retry_support
)
1555 if (ret
!= -EINPROGRESS
) {
1556 aead_unmap(ctx
->jrdev
, rctx
->edesc
, req
);
1565 static inline int gcm_crypt(struct aead_request
*req
, bool encrypt
)
1567 struct aead_edesc
*edesc
;
1568 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1569 struct caam_ctx
*ctx
= crypto_aead_ctx_dma(aead
);
1570 struct device
*jrdev
= ctx
->jrdev
;
1573 /* allocate extended descriptor */
1574 edesc
= aead_edesc_alloc(req
, GCM_DESC_JOB_IO_LEN
, &all_contig
,
1577 return PTR_ERR(edesc
);
1579 /* Create and submit job descriptor */
1580 init_gcm_job(req
, edesc
, all_contig
, encrypt
);
1582 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__
)": ",
1583 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1584 desc_bytes(edesc
->hw_desc
), 1);
1586 return aead_enqueue_req(jrdev
, req
);
1589 static int gcm_encrypt(struct aead_request
*req
)
1591 return gcm_crypt(req
, true);
1594 static int gcm_decrypt(struct aead_request
*req
)
1596 return gcm_crypt(req
, false);
1599 static int ipsec_gcm_encrypt(struct aead_request
*req
)
1601 return crypto_ipsec_check_assoclen(req
->assoclen
) ? : gcm_encrypt(req
);
1604 static int ipsec_gcm_decrypt(struct aead_request
*req
)
1606 return crypto_ipsec_check_assoclen(req
->assoclen
) ? : gcm_decrypt(req
);
1610 * allocate and map the skcipher extended descriptor for skcipher
1612 static struct skcipher_edesc
*skcipher_edesc_alloc(struct skcipher_request
*req
,
1615 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(req
);
1616 struct caam_ctx
*ctx
= crypto_skcipher_ctx_dma(skcipher
);
1617 struct caam_skcipher_req_ctx
*rctx
= skcipher_request_ctx(req
);
1618 struct device
*jrdev
= ctx
->jrdev
;
1619 gfp_t flags
= (req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ?
1620 GFP_KERNEL
: GFP_ATOMIC
;
1621 int src_nents
, mapped_src_nents
, dst_nents
= 0, mapped_dst_nents
= 0;
1622 struct skcipher_edesc
*edesc
;
1623 dma_addr_t iv_dma
= 0;
1625 int ivsize
= crypto_skcipher_ivsize(skcipher
);
1626 int dst_sg_idx
, sec4_sg_ents
, sec4_sg_bytes
;
1627 unsigned int aligned_size
;
1629 src_nents
= sg_nents_for_len(req
->src
, req
->cryptlen
);
1630 if (unlikely(src_nents
< 0)) {
1631 dev_err(jrdev
, "Insufficient bytes (%d) in src S/G\n",
1633 return ERR_PTR(src_nents
);
1636 if (req
->dst
!= req
->src
) {
1637 dst_nents
= sg_nents_for_len(req
->dst
, req
->cryptlen
);
1638 if (unlikely(dst_nents
< 0)) {
1639 dev_err(jrdev
, "Insufficient bytes (%d) in dst S/G\n",
1641 return ERR_PTR(dst_nents
);
1645 if (likely(req
->src
== req
->dst
)) {
1646 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
, src_nents
,
1648 if (unlikely(!mapped_src_nents
)) {
1649 dev_err(jrdev
, "unable to map source\n");
1650 return ERR_PTR(-ENOMEM
);
1653 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
, src_nents
,
1655 if (unlikely(!mapped_src_nents
)) {
1656 dev_err(jrdev
, "unable to map source\n");
1657 return ERR_PTR(-ENOMEM
);
1659 mapped_dst_nents
= dma_map_sg(jrdev
, req
->dst
, dst_nents
,
1661 if (unlikely(!mapped_dst_nents
)) {
1662 dev_err(jrdev
, "unable to map destination\n");
1663 dma_unmap_sg(jrdev
, req
->src
, src_nents
, DMA_TO_DEVICE
);
1664 return ERR_PTR(-ENOMEM
);
1668 if (!ivsize
&& mapped_src_nents
== 1)
1669 sec4_sg_ents
= 0; // no need for an input hw s/g table
1671 sec4_sg_ents
= mapped_src_nents
+ !!ivsize
;
1672 dst_sg_idx
= sec4_sg_ents
;
1675 * Input, output HW S/G tables: [IV, src][dst, IV]
1676 * IV entries point to the same buffer
1677 * If src == dst, S/G entries are reused (S/G tables overlap)
1679 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond
1680 * the end of the table by allocating more S/G entries. Logic:
1682 * pad output S/G, if needed
1683 * else if (input S/G) ...
1684 * pad input S/G, if needed
1686 if (ivsize
|| mapped_dst_nents
> 1) {
1687 if (req
->src
== req
->dst
)
1688 sec4_sg_ents
= !!ivsize
+ pad_sg_nents(sec4_sg_ents
);
1690 sec4_sg_ents
+= pad_sg_nents(mapped_dst_nents
+
1693 sec4_sg_ents
= pad_sg_nents(sec4_sg_ents
);
1696 sec4_sg_bytes
= sec4_sg_ents
* sizeof(struct sec4_sg_entry
);
1699 * allocate space for base edesc and hw desc commands, link tables, IV
1701 aligned_size
= sizeof(*edesc
) + desc_bytes
+ sec4_sg_bytes
;
1702 aligned_size
= ALIGN(aligned_size
, dma_get_cache_alignment());
1703 aligned_size
+= ~(ARCH_KMALLOC_MINALIGN
- 1) &
1704 (dma_get_cache_alignment() - 1);
1705 aligned_size
+= ALIGN(ivsize
, dma_get_cache_alignment());
1706 edesc
= kzalloc(aligned_size
, flags
);
1708 dev_err(jrdev
, "could not allocate extended descriptor\n");
1709 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
, 0,
1711 return ERR_PTR(-ENOMEM
);
1714 edesc
->src_nents
= src_nents
;
1715 edesc
->dst_nents
= dst_nents
;
1716 edesc
->mapped_src_nents
= mapped_src_nents
;
1717 edesc
->mapped_dst_nents
= mapped_dst_nents
;
1718 edesc
->sec4_sg_bytes
= sec4_sg_bytes
;
1719 edesc
->sec4_sg
= (struct sec4_sg_entry
*)((u8
*)edesc
->hw_desc
+
1721 rctx
->edesc
= edesc
;
1723 /* Make sure IV is located in a DMAable area */
1725 iv
= skcipher_edesc_iv(edesc
);
1726 memcpy(iv
, req
->iv
, ivsize
);
1728 iv_dma
= dma_map_single(jrdev
, iv
, ivsize
, DMA_BIDIRECTIONAL
);
1729 if (dma_mapping_error(jrdev
, iv_dma
)) {
1730 dev_err(jrdev
, "unable to map IV\n");
1731 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
,
1732 dst_nents
, 0, 0, 0, 0);
1734 return ERR_PTR(-ENOMEM
);
1737 dma_to_sec4_sg_one(edesc
->sec4_sg
, iv_dma
, ivsize
, 0);
1740 sg_to_sec4_sg(req
->src
, req
->cryptlen
, edesc
->sec4_sg
+
1743 if (req
->src
!= req
->dst
&& (ivsize
|| mapped_dst_nents
> 1))
1744 sg_to_sec4_sg(req
->dst
, req
->cryptlen
, edesc
->sec4_sg
+
1748 dma_to_sec4_sg_one(edesc
->sec4_sg
+ dst_sg_idx
+
1749 mapped_dst_nents
, iv_dma
, ivsize
, 0);
1751 if (ivsize
|| mapped_dst_nents
> 1)
1752 sg_to_sec4_set_last(edesc
->sec4_sg
+ dst_sg_idx
+
1753 mapped_dst_nents
- 1 + !!ivsize
);
1755 if (sec4_sg_bytes
) {
1756 edesc
->sec4_sg_dma
= dma_map_single(jrdev
, edesc
->sec4_sg
,
1759 if (dma_mapping_error(jrdev
, edesc
->sec4_sg_dma
)) {
1760 dev_err(jrdev
, "unable to map S/G table\n");
1761 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
,
1762 dst_nents
, iv_dma
, ivsize
, 0, 0);
1764 return ERR_PTR(-ENOMEM
);
1768 edesc
->iv_dma
= iv_dma
;
1770 print_hex_dump_debug("skcipher sec4_sg@" __stringify(__LINE__
)": ",
1771 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->sec4_sg
,
1777 static int skcipher_do_one_req(struct crypto_engine
*engine
, void *areq
)
1779 struct skcipher_request
*req
= skcipher_request_cast(areq
);
1780 struct caam_ctx
*ctx
= crypto_skcipher_ctx_dma(crypto_skcipher_reqtfm(req
));
1781 struct caam_skcipher_req_ctx
*rctx
= skcipher_request_ctx(req
);
1782 u32
*desc
= rctx
->edesc
->hw_desc
;
1785 rctx
->edesc
->bklog
= true;
1787 ret
= caam_jr_enqueue(ctx
->jrdev
, desc
, skcipher_crypt_done
, req
);
1789 if (ret
== -ENOSPC
&& engine
->retry_support
)
1792 if (ret
!= -EINPROGRESS
) {
1793 skcipher_unmap(ctx
->jrdev
, rctx
->edesc
, req
);
1802 static inline bool xts_skcipher_ivsize(struct skcipher_request
*req
)
1804 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(req
);
1805 unsigned int ivsize
= crypto_skcipher_ivsize(skcipher
);
1807 return !!get_unaligned((u64
*)(req
->iv
+ (ivsize
/ 2)));
1810 static inline int skcipher_crypt(struct skcipher_request
*req
, bool encrypt
)
1812 struct skcipher_edesc
*edesc
;
1813 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(req
);
1814 struct caam_ctx
*ctx
= crypto_skcipher_ctx_dma(skcipher
);
1815 struct device
*jrdev
= ctx
->jrdev
;
1816 struct caam_drv_private_jr
*jrpriv
= dev_get_drvdata(jrdev
);
1817 struct caam_drv_private
*ctrlpriv
= dev_get_drvdata(jrdev
->parent
);
1822 * XTS is expected to return an error even for input length = 0
1823 * Note that the case input length < block size will be caught during
1824 * HW offloading and return an error.
1826 if (!req
->cryptlen
&& !ctx
->fallback
)
1829 if (ctx
->fallback
&& ((ctrlpriv
->era
<= 8 && xts_skcipher_ivsize(req
)) ||
1830 ctx
->xts_key_fallback
)) {
1831 struct caam_skcipher_req_ctx
*rctx
= skcipher_request_ctx(req
);
1833 skcipher_request_set_tfm(&rctx
->fallback_req
, ctx
->fallback
);
1834 skcipher_request_set_callback(&rctx
->fallback_req
,
1838 skcipher_request_set_crypt(&rctx
->fallback_req
, req
->src
,
1839 req
->dst
, req
->cryptlen
, req
->iv
);
1841 return encrypt
? crypto_skcipher_encrypt(&rctx
->fallback_req
) :
1842 crypto_skcipher_decrypt(&rctx
->fallback_req
);
1845 /* allocate extended descriptor */
1846 edesc
= skcipher_edesc_alloc(req
, DESC_JOB_IO_LEN
* CAAM_CMD_SZ
);
1848 return PTR_ERR(edesc
);
1850 /* Create and submit job descriptor*/
1851 init_skcipher_job(req
, edesc
, encrypt
);
1853 print_hex_dump_debug("skcipher jobdesc@" __stringify(__LINE__
)": ",
1854 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1855 desc_bytes(edesc
->hw_desc
), 1);
1857 desc
= edesc
->hw_desc
;
1859 * Only the backlog request are sent to crypto-engine since the others
1860 * can be handled by CAAM, if free, especially since JR has up to 1024
1861 * entries (more than the 10 entries from crypto-engine).
1863 if (req
->base
.flags
& CRYPTO_TFM_REQ_MAY_BACKLOG
)
1864 ret
= crypto_transfer_skcipher_request_to_engine(jrpriv
->engine
,
1867 ret
= caam_jr_enqueue(jrdev
, desc
, skcipher_crypt_done
, req
);
1869 if ((ret
!= -EINPROGRESS
) && (ret
!= -EBUSY
)) {
1870 skcipher_unmap(jrdev
, edesc
, req
);
1877 static int skcipher_encrypt(struct skcipher_request
*req
)
1879 return skcipher_crypt(req
, true);
1882 static int skcipher_decrypt(struct skcipher_request
*req
)
1884 return skcipher_crypt(req
, false);
1887 static struct caam_skcipher_alg driver_algs
[] = {
1891 .cra_name
= "cbc(aes)",
1892 .cra_driver_name
= "cbc-aes-caam",
1893 .cra_blocksize
= AES_BLOCK_SIZE
,
1895 .setkey
= aes_skcipher_setkey
,
1896 .encrypt
= skcipher_encrypt
,
1897 .decrypt
= skcipher_decrypt
,
1898 .min_keysize
= AES_MIN_KEY_SIZE
,
1899 .max_keysize
= AES_MAX_KEY_SIZE
,
1900 .ivsize
= AES_BLOCK_SIZE
,
1903 .do_one_request
= skcipher_do_one_req
,
1905 .caam
.class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1910 .cra_name
= "cbc(des3_ede)",
1911 .cra_driver_name
= "cbc-3des-caam",
1912 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1914 .setkey
= des3_skcipher_setkey
,
1915 .encrypt
= skcipher_encrypt
,
1916 .decrypt
= skcipher_decrypt
,
1917 .min_keysize
= DES3_EDE_KEY_SIZE
,
1918 .max_keysize
= DES3_EDE_KEY_SIZE
,
1919 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1922 .do_one_request
= skcipher_do_one_req
,
1924 .caam
.class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1929 .cra_name
= "cbc(des)",
1930 .cra_driver_name
= "cbc-des-caam",
1931 .cra_blocksize
= DES_BLOCK_SIZE
,
1933 .setkey
= des_skcipher_setkey
,
1934 .encrypt
= skcipher_encrypt
,
1935 .decrypt
= skcipher_decrypt
,
1936 .min_keysize
= DES_KEY_SIZE
,
1937 .max_keysize
= DES_KEY_SIZE
,
1938 .ivsize
= DES_BLOCK_SIZE
,
1941 .do_one_request
= skcipher_do_one_req
,
1943 .caam
.class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
1948 .cra_name
= "ctr(aes)",
1949 .cra_driver_name
= "ctr-aes-caam",
1952 .setkey
= ctr_skcipher_setkey
,
1953 .encrypt
= skcipher_encrypt
,
1954 .decrypt
= skcipher_decrypt
,
1955 .min_keysize
= AES_MIN_KEY_SIZE
,
1956 .max_keysize
= AES_MAX_KEY_SIZE
,
1957 .ivsize
= AES_BLOCK_SIZE
,
1958 .chunksize
= AES_BLOCK_SIZE
,
1961 .do_one_request
= skcipher_do_one_req
,
1963 .caam
.class1_alg_type
= OP_ALG_ALGSEL_AES
|
1964 OP_ALG_AAI_CTR_MOD128
,
1969 .cra_name
= "rfc3686(ctr(aes))",
1970 .cra_driver_name
= "rfc3686-ctr-aes-caam",
1973 .setkey
= rfc3686_skcipher_setkey
,
1974 .encrypt
= skcipher_encrypt
,
1975 .decrypt
= skcipher_decrypt
,
1976 .min_keysize
= AES_MIN_KEY_SIZE
+
1977 CTR_RFC3686_NONCE_SIZE
,
1978 .max_keysize
= AES_MAX_KEY_SIZE
+
1979 CTR_RFC3686_NONCE_SIZE
,
1980 .ivsize
= CTR_RFC3686_IV_SIZE
,
1981 .chunksize
= AES_BLOCK_SIZE
,
1984 .do_one_request
= skcipher_do_one_req
,
1987 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
1988 OP_ALG_AAI_CTR_MOD128
,
1995 .cra_name
= "xts(aes)",
1996 .cra_driver_name
= "xts-aes-caam",
1997 .cra_flags
= CRYPTO_ALG_NEED_FALLBACK
,
1998 .cra_blocksize
= AES_BLOCK_SIZE
,
2000 .setkey
= xts_skcipher_setkey
,
2001 .encrypt
= skcipher_encrypt
,
2002 .decrypt
= skcipher_decrypt
,
2003 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
2004 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
2005 .ivsize
= AES_BLOCK_SIZE
,
2008 .do_one_request
= skcipher_do_one_req
,
2010 .caam
.class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_XTS
,
2015 .cra_name
= "ecb(des)",
2016 .cra_driver_name
= "ecb-des-caam",
2017 .cra_blocksize
= DES_BLOCK_SIZE
,
2019 .setkey
= des_skcipher_setkey
,
2020 .encrypt
= skcipher_encrypt
,
2021 .decrypt
= skcipher_decrypt
,
2022 .min_keysize
= DES_KEY_SIZE
,
2023 .max_keysize
= DES_KEY_SIZE
,
2026 .do_one_request
= skcipher_do_one_req
,
2028 .caam
.class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_ECB
,
2033 .cra_name
= "ecb(aes)",
2034 .cra_driver_name
= "ecb-aes-caam",
2035 .cra_blocksize
= AES_BLOCK_SIZE
,
2037 .setkey
= aes_skcipher_setkey
,
2038 .encrypt
= skcipher_encrypt
,
2039 .decrypt
= skcipher_decrypt
,
2040 .min_keysize
= AES_MIN_KEY_SIZE
,
2041 .max_keysize
= AES_MAX_KEY_SIZE
,
2044 .do_one_request
= skcipher_do_one_req
,
2046 .caam
.class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_ECB
,
2051 .cra_name
= "ecb(des3_ede)",
2052 .cra_driver_name
= "ecb-des3-caam",
2053 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2055 .setkey
= des3_skcipher_setkey
,
2056 .encrypt
= skcipher_encrypt
,
2057 .decrypt
= skcipher_decrypt
,
2058 .min_keysize
= DES3_EDE_KEY_SIZE
,
2059 .max_keysize
= DES3_EDE_KEY_SIZE
,
2062 .do_one_request
= skcipher_do_one_req
,
2064 .caam
.class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_ECB
,
2068 static struct caam_aead_alg driver_aeads
[] = {
2072 .cra_name
= "rfc4106(gcm(aes))",
2073 .cra_driver_name
= "rfc4106-gcm-aes-caam",
2076 .setkey
= rfc4106_setkey
,
2077 .setauthsize
= rfc4106_setauthsize
,
2078 .encrypt
= ipsec_gcm_encrypt
,
2079 .decrypt
= ipsec_gcm_decrypt
,
2080 .ivsize
= GCM_RFC4106_IV_SIZE
,
2081 .maxauthsize
= AES_BLOCK_SIZE
,
2084 .do_one_request
= aead_do_one_req
,
2087 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_GCM
,
2094 .cra_name
= "rfc4543(gcm(aes))",
2095 .cra_driver_name
= "rfc4543-gcm-aes-caam",
2098 .setkey
= rfc4543_setkey
,
2099 .setauthsize
= rfc4543_setauthsize
,
2100 .encrypt
= ipsec_gcm_encrypt
,
2101 .decrypt
= ipsec_gcm_decrypt
,
2102 .ivsize
= GCM_RFC4543_IV_SIZE
,
2103 .maxauthsize
= AES_BLOCK_SIZE
,
2106 .do_one_request
= aead_do_one_req
,
2109 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_GCM
,
2113 /* Galois Counter Mode */
2117 .cra_name
= "gcm(aes)",
2118 .cra_driver_name
= "gcm-aes-caam",
2121 .setkey
= gcm_setkey
,
2122 .setauthsize
= gcm_setauthsize
,
2123 .encrypt
= gcm_encrypt
,
2124 .decrypt
= gcm_decrypt
,
2125 .ivsize
= GCM_AES_IV_SIZE
,
2126 .maxauthsize
= AES_BLOCK_SIZE
,
2129 .do_one_request
= aead_do_one_req
,
2132 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_GCM
,
2136 /* single-pass ipsec_esp descriptor */
2140 .cra_name
= "authenc(hmac(md5),"
2141 "ecb(cipher_null))",
2142 .cra_driver_name
= "authenc-hmac-md5-"
2143 "ecb-cipher_null-caam",
2144 .cra_blocksize
= NULL_BLOCK_SIZE
,
2146 .setkey
= aead_setkey
,
2147 .setauthsize
= aead_setauthsize
,
2148 .encrypt
= aead_encrypt
,
2149 .decrypt
= aead_decrypt
,
2150 .ivsize
= NULL_IV_SIZE
,
2151 .maxauthsize
= MD5_DIGEST_SIZE
,
2154 .do_one_request
= aead_do_one_req
,
2157 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2158 OP_ALG_AAI_HMAC_PRECOMP
,
2164 .cra_name
= "authenc(hmac(sha1),"
2165 "ecb(cipher_null))",
2166 .cra_driver_name
= "authenc-hmac-sha1-"
2167 "ecb-cipher_null-caam",
2168 .cra_blocksize
= NULL_BLOCK_SIZE
,
2170 .setkey
= aead_setkey
,
2171 .setauthsize
= aead_setauthsize
,
2172 .encrypt
= aead_encrypt
,
2173 .decrypt
= aead_decrypt
,
2174 .ivsize
= NULL_IV_SIZE
,
2175 .maxauthsize
= SHA1_DIGEST_SIZE
,
2178 .do_one_request
= aead_do_one_req
,
2181 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2182 OP_ALG_AAI_HMAC_PRECOMP
,
2188 .cra_name
= "authenc(hmac(sha224),"
2189 "ecb(cipher_null))",
2190 .cra_driver_name
= "authenc-hmac-sha224-"
2191 "ecb-cipher_null-caam",
2192 .cra_blocksize
= NULL_BLOCK_SIZE
,
2194 .setkey
= aead_setkey
,
2195 .setauthsize
= aead_setauthsize
,
2196 .encrypt
= aead_encrypt
,
2197 .decrypt
= aead_decrypt
,
2198 .ivsize
= NULL_IV_SIZE
,
2199 .maxauthsize
= SHA224_DIGEST_SIZE
,
2202 .do_one_request
= aead_do_one_req
,
2205 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2206 OP_ALG_AAI_HMAC_PRECOMP
,
2212 .cra_name
= "authenc(hmac(sha256),"
2213 "ecb(cipher_null))",
2214 .cra_driver_name
= "authenc-hmac-sha256-"
2215 "ecb-cipher_null-caam",
2216 .cra_blocksize
= NULL_BLOCK_SIZE
,
2218 .setkey
= aead_setkey
,
2219 .setauthsize
= aead_setauthsize
,
2220 .encrypt
= aead_encrypt
,
2221 .decrypt
= aead_decrypt
,
2222 .ivsize
= NULL_IV_SIZE
,
2223 .maxauthsize
= SHA256_DIGEST_SIZE
,
2226 .do_one_request
= aead_do_one_req
,
2229 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2230 OP_ALG_AAI_HMAC_PRECOMP
,
2236 .cra_name
= "authenc(hmac(sha384),"
2237 "ecb(cipher_null))",
2238 .cra_driver_name
= "authenc-hmac-sha384-"
2239 "ecb-cipher_null-caam",
2240 .cra_blocksize
= NULL_BLOCK_SIZE
,
2242 .setkey
= aead_setkey
,
2243 .setauthsize
= aead_setauthsize
,
2244 .encrypt
= aead_encrypt
,
2245 .decrypt
= aead_decrypt
,
2246 .ivsize
= NULL_IV_SIZE
,
2247 .maxauthsize
= SHA384_DIGEST_SIZE
,
2250 .do_one_request
= aead_do_one_req
,
2253 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2254 OP_ALG_AAI_HMAC_PRECOMP
,
2260 .cra_name
= "authenc(hmac(sha512),"
2261 "ecb(cipher_null))",
2262 .cra_driver_name
= "authenc-hmac-sha512-"
2263 "ecb-cipher_null-caam",
2264 .cra_blocksize
= NULL_BLOCK_SIZE
,
2266 .setkey
= aead_setkey
,
2267 .setauthsize
= aead_setauthsize
,
2268 .encrypt
= aead_encrypt
,
2269 .decrypt
= aead_decrypt
,
2270 .ivsize
= NULL_IV_SIZE
,
2271 .maxauthsize
= SHA512_DIGEST_SIZE
,
2274 .do_one_request
= aead_do_one_req
,
2277 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2278 OP_ALG_AAI_HMAC_PRECOMP
,
2284 .cra_name
= "authenc(hmac(md5),cbc(aes))",
2285 .cra_driver_name
= "authenc-hmac-md5-"
2287 .cra_blocksize
= AES_BLOCK_SIZE
,
2289 .setkey
= aead_setkey
,
2290 .setauthsize
= aead_setauthsize
,
2291 .encrypt
= aead_encrypt
,
2292 .decrypt
= aead_decrypt
,
2293 .ivsize
= AES_BLOCK_SIZE
,
2294 .maxauthsize
= MD5_DIGEST_SIZE
,
2297 .do_one_request
= aead_do_one_req
,
2300 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2301 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2302 OP_ALG_AAI_HMAC_PRECOMP
,
2308 .cra_name
= "echainiv(authenc(hmac(md5),"
2310 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
2312 .cra_blocksize
= AES_BLOCK_SIZE
,
2314 .setkey
= aead_setkey
,
2315 .setauthsize
= aead_setauthsize
,
2316 .encrypt
= aead_encrypt
,
2317 .decrypt
= aead_decrypt
,
2318 .ivsize
= AES_BLOCK_SIZE
,
2319 .maxauthsize
= MD5_DIGEST_SIZE
,
2322 .do_one_request
= aead_do_one_req
,
2325 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2326 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2327 OP_ALG_AAI_HMAC_PRECOMP
,
2334 .cra_name
= "authenc(hmac(sha1),cbc(aes))",
2335 .cra_driver_name
= "authenc-hmac-sha1-"
2337 .cra_blocksize
= AES_BLOCK_SIZE
,
2339 .setkey
= aead_setkey
,
2340 .setauthsize
= aead_setauthsize
,
2341 .encrypt
= aead_encrypt
,
2342 .decrypt
= aead_decrypt
,
2343 .ivsize
= AES_BLOCK_SIZE
,
2344 .maxauthsize
= SHA1_DIGEST_SIZE
,
2347 .do_one_request
= aead_do_one_req
,
2350 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2351 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2352 OP_ALG_AAI_HMAC_PRECOMP
,
2358 .cra_name
= "echainiv(authenc(hmac(sha1),"
2360 .cra_driver_name
= "echainiv-authenc-"
2361 "hmac-sha1-cbc-aes-caam",
2362 .cra_blocksize
= AES_BLOCK_SIZE
,
2364 .setkey
= aead_setkey
,
2365 .setauthsize
= aead_setauthsize
,
2366 .encrypt
= aead_encrypt
,
2367 .decrypt
= aead_decrypt
,
2368 .ivsize
= AES_BLOCK_SIZE
,
2369 .maxauthsize
= SHA1_DIGEST_SIZE
,
2372 .do_one_request
= aead_do_one_req
,
2375 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2376 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2377 OP_ALG_AAI_HMAC_PRECOMP
,
2384 .cra_name
= "authenc(hmac(sha224),cbc(aes))",
2385 .cra_driver_name
= "authenc-hmac-sha224-"
2387 .cra_blocksize
= AES_BLOCK_SIZE
,
2389 .setkey
= aead_setkey
,
2390 .setauthsize
= aead_setauthsize
,
2391 .encrypt
= aead_encrypt
,
2392 .decrypt
= aead_decrypt
,
2393 .ivsize
= AES_BLOCK_SIZE
,
2394 .maxauthsize
= SHA224_DIGEST_SIZE
,
2397 .do_one_request
= aead_do_one_req
,
2400 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2401 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2402 OP_ALG_AAI_HMAC_PRECOMP
,
2408 .cra_name
= "echainiv(authenc(hmac(sha224),"
2410 .cra_driver_name
= "echainiv-authenc-"
2411 "hmac-sha224-cbc-aes-caam",
2412 .cra_blocksize
= AES_BLOCK_SIZE
,
2414 .setkey
= aead_setkey
,
2415 .setauthsize
= aead_setauthsize
,
2416 .encrypt
= aead_encrypt
,
2417 .decrypt
= aead_decrypt
,
2418 .ivsize
= AES_BLOCK_SIZE
,
2419 .maxauthsize
= SHA224_DIGEST_SIZE
,
2422 .do_one_request
= aead_do_one_req
,
2425 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2426 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2427 OP_ALG_AAI_HMAC_PRECOMP
,
2434 .cra_name
= "authenc(hmac(sha256),cbc(aes))",
2435 .cra_driver_name
= "authenc-hmac-sha256-"
2437 .cra_blocksize
= AES_BLOCK_SIZE
,
2439 .setkey
= aead_setkey
,
2440 .setauthsize
= aead_setauthsize
,
2441 .encrypt
= aead_encrypt
,
2442 .decrypt
= aead_decrypt
,
2443 .ivsize
= AES_BLOCK_SIZE
,
2444 .maxauthsize
= SHA256_DIGEST_SIZE
,
2447 .do_one_request
= aead_do_one_req
,
2450 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2451 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2452 OP_ALG_AAI_HMAC_PRECOMP
,
2458 .cra_name
= "echainiv(authenc(hmac(sha256),"
2460 .cra_driver_name
= "echainiv-authenc-"
2461 "hmac-sha256-cbc-aes-caam",
2462 .cra_blocksize
= AES_BLOCK_SIZE
,
2464 .setkey
= aead_setkey
,
2465 .setauthsize
= aead_setauthsize
,
2466 .encrypt
= aead_encrypt
,
2467 .decrypt
= aead_decrypt
,
2468 .ivsize
= AES_BLOCK_SIZE
,
2469 .maxauthsize
= SHA256_DIGEST_SIZE
,
2472 .do_one_request
= aead_do_one_req
,
2475 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2476 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2477 OP_ALG_AAI_HMAC_PRECOMP
,
2484 .cra_name
= "authenc(hmac(sha384),cbc(aes))",
2485 .cra_driver_name
= "authenc-hmac-sha384-"
2487 .cra_blocksize
= AES_BLOCK_SIZE
,
2489 .setkey
= aead_setkey
,
2490 .setauthsize
= aead_setauthsize
,
2491 .encrypt
= aead_encrypt
,
2492 .decrypt
= aead_decrypt
,
2493 .ivsize
= AES_BLOCK_SIZE
,
2494 .maxauthsize
= SHA384_DIGEST_SIZE
,
2497 .do_one_request
= aead_do_one_req
,
2500 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2501 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2502 OP_ALG_AAI_HMAC_PRECOMP
,
2508 .cra_name
= "echainiv(authenc(hmac(sha384),"
2510 .cra_driver_name
= "echainiv-authenc-"
2511 "hmac-sha384-cbc-aes-caam",
2512 .cra_blocksize
= AES_BLOCK_SIZE
,
2514 .setkey
= aead_setkey
,
2515 .setauthsize
= aead_setauthsize
,
2516 .encrypt
= aead_encrypt
,
2517 .decrypt
= aead_decrypt
,
2518 .ivsize
= AES_BLOCK_SIZE
,
2519 .maxauthsize
= SHA384_DIGEST_SIZE
,
2522 .do_one_request
= aead_do_one_req
,
2525 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2526 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2527 OP_ALG_AAI_HMAC_PRECOMP
,
2534 .cra_name
= "authenc(hmac(sha512),cbc(aes))",
2535 .cra_driver_name
= "authenc-hmac-sha512-"
2537 .cra_blocksize
= AES_BLOCK_SIZE
,
2539 .setkey
= aead_setkey
,
2540 .setauthsize
= aead_setauthsize
,
2541 .encrypt
= aead_encrypt
,
2542 .decrypt
= aead_decrypt
,
2543 .ivsize
= AES_BLOCK_SIZE
,
2544 .maxauthsize
= SHA512_DIGEST_SIZE
,
2547 .do_one_request
= aead_do_one_req
,
2550 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2551 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2552 OP_ALG_AAI_HMAC_PRECOMP
,
2558 .cra_name
= "echainiv(authenc(hmac(sha512),"
2560 .cra_driver_name
= "echainiv-authenc-"
2561 "hmac-sha512-cbc-aes-caam",
2562 .cra_blocksize
= AES_BLOCK_SIZE
,
2564 .setkey
= aead_setkey
,
2565 .setauthsize
= aead_setauthsize
,
2566 .encrypt
= aead_encrypt
,
2567 .decrypt
= aead_decrypt
,
2568 .ivsize
= AES_BLOCK_SIZE
,
2569 .maxauthsize
= SHA512_DIGEST_SIZE
,
2572 .do_one_request
= aead_do_one_req
,
2575 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2576 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2577 OP_ALG_AAI_HMAC_PRECOMP
,
2584 .cra_name
= "authenc(hmac(md5),cbc(des3_ede))",
2585 .cra_driver_name
= "authenc-hmac-md5-"
2586 "cbc-des3_ede-caam",
2587 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2589 .setkey
= des3_aead_setkey
,
2590 .setauthsize
= aead_setauthsize
,
2591 .encrypt
= aead_encrypt
,
2592 .decrypt
= aead_decrypt
,
2593 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2594 .maxauthsize
= MD5_DIGEST_SIZE
,
2597 .do_one_request
= aead_do_one_req
,
2600 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2601 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2602 OP_ALG_AAI_HMAC_PRECOMP
,
2608 .cra_name
= "echainiv(authenc(hmac(md5),"
2610 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
2611 "cbc-des3_ede-caam",
2612 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2614 .setkey
= des3_aead_setkey
,
2615 .setauthsize
= aead_setauthsize
,
2616 .encrypt
= aead_encrypt
,
2617 .decrypt
= aead_decrypt
,
2618 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2619 .maxauthsize
= MD5_DIGEST_SIZE
,
2622 .do_one_request
= aead_do_one_req
,
2625 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2626 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2627 OP_ALG_AAI_HMAC_PRECOMP
,
2634 .cra_name
= "authenc(hmac(sha1),"
2636 .cra_driver_name
= "authenc-hmac-sha1-"
2637 "cbc-des3_ede-caam",
2638 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2640 .setkey
= des3_aead_setkey
,
2641 .setauthsize
= aead_setauthsize
,
2642 .encrypt
= aead_encrypt
,
2643 .decrypt
= aead_decrypt
,
2644 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2645 .maxauthsize
= SHA1_DIGEST_SIZE
,
2648 .do_one_request
= aead_do_one_req
,
2651 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2652 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2653 OP_ALG_AAI_HMAC_PRECOMP
,
2659 .cra_name
= "echainiv(authenc(hmac(sha1),"
2661 .cra_driver_name
= "echainiv-authenc-"
2663 "cbc-des3_ede-caam",
2664 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2666 .setkey
= des3_aead_setkey
,
2667 .setauthsize
= aead_setauthsize
,
2668 .encrypt
= aead_encrypt
,
2669 .decrypt
= aead_decrypt
,
2670 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2671 .maxauthsize
= SHA1_DIGEST_SIZE
,
2674 .do_one_request
= aead_do_one_req
,
2677 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2678 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2679 OP_ALG_AAI_HMAC_PRECOMP
,
2686 .cra_name
= "authenc(hmac(sha224),"
2688 .cra_driver_name
= "authenc-hmac-sha224-"
2689 "cbc-des3_ede-caam",
2690 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2692 .setkey
= des3_aead_setkey
,
2693 .setauthsize
= aead_setauthsize
,
2694 .encrypt
= aead_encrypt
,
2695 .decrypt
= aead_decrypt
,
2696 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2697 .maxauthsize
= SHA224_DIGEST_SIZE
,
2700 .do_one_request
= aead_do_one_req
,
2703 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2704 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2705 OP_ALG_AAI_HMAC_PRECOMP
,
2711 .cra_name
= "echainiv(authenc(hmac(sha224),"
2713 .cra_driver_name
= "echainiv-authenc-"
2715 "cbc-des3_ede-caam",
2716 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2718 .setkey
= des3_aead_setkey
,
2719 .setauthsize
= aead_setauthsize
,
2720 .encrypt
= aead_encrypt
,
2721 .decrypt
= aead_decrypt
,
2722 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2723 .maxauthsize
= SHA224_DIGEST_SIZE
,
2726 .do_one_request
= aead_do_one_req
,
2729 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2730 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2731 OP_ALG_AAI_HMAC_PRECOMP
,
2738 .cra_name
= "authenc(hmac(sha256),"
2740 .cra_driver_name
= "authenc-hmac-sha256-"
2741 "cbc-des3_ede-caam",
2742 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2744 .setkey
= des3_aead_setkey
,
2745 .setauthsize
= aead_setauthsize
,
2746 .encrypt
= aead_encrypt
,
2747 .decrypt
= aead_decrypt
,
2748 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2749 .maxauthsize
= SHA256_DIGEST_SIZE
,
2752 .do_one_request
= aead_do_one_req
,
2755 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2756 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2757 OP_ALG_AAI_HMAC_PRECOMP
,
2763 .cra_name
= "echainiv(authenc(hmac(sha256),"
2765 .cra_driver_name
= "echainiv-authenc-"
2767 "cbc-des3_ede-caam",
2768 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2770 .setkey
= des3_aead_setkey
,
2771 .setauthsize
= aead_setauthsize
,
2772 .encrypt
= aead_encrypt
,
2773 .decrypt
= aead_decrypt
,
2774 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2775 .maxauthsize
= SHA256_DIGEST_SIZE
,
2778 .do_one_request
= aead_do_one_req
,
2781 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2782 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2783 OP_ALG_AAI_HMAC_PRECOMP
,
2790 .cra_name
= "authenc(hmac(sha384),"
2792 .cra_driver_name
= "authenc-hmac-sha384-"
2793 "cbc-des3_ede-caam",
2794 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2796 .setkey
= des3_aead_setkey
,
2797 .setauthsize
= aead_setauthsize
,
2798 .encrypt
= aead_encrypt
,
2799 .decrypt
= aead_decrypt
,
2800 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2801 .maxauthsize
= SHA384_DIGEST_SIZE
,
2804 .do_one_request
= aead_do_one_req
,
2807 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2808 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2809 OP_ALG_AAI_HMAC_PRECOMP
,
2815 .cra_name
= "echainiv(authenc(hmac(sha384),"
2817 .cra_driver_name
= "echainiv-authenc-"
2819 "cbc-des3_ede-caam",
2820 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2822 .setkey
= des3_aead_setkey
,
2823 .setauthsize
= aead_setauthsize
,
2824 .encrypt
= aead_encrypt
,
2825 .decrypt
= aead_decrypt
,
2826 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2827 .maxauthsize
= SHA384_DIGEST_SIZE
,
2830 .do_one_request
= aead_do_one_req
,
2833 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2834 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2835 OP_ALG_AAI_HMAC_PRECOMP
,
2842 .cra_name
= "authenc(hmac(sha512),"
2844 .cra_driver_name
= "authenc-hmac-sha512-"
2845 "cbc-des3_ede-caam",
2846 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2848 .setkey
= des3_aead_setkey
,
2849 .setauthsize
= aead_setauthsize
,
2850 .encrypt
= aead_encrypt
,
2851 .decrypt
= aead_decrypt
,
2852 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2853 .maxauthsize
= SHA512_DIGEST_SIZE
,
2856 .do_one_request
= aead_do_one_req
,
2859 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2860 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2861 OP_ALG_AAI_HMAC_PRECOMP
,
2867 .cra_name
= "echainiv(authenc(hmac(sha512),"
2869 .cra_driver_name
= "echainiv-authenc-"
2871 "cbc-des3_ede-caam",
2872 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2874 .setkey
= des3_aead_setkey
,
2875 .setauthsize
= aead_setauthsize
,
2876 .encrypt
= aead_encrypt
,
2877 .decrypt
= aead_decrypt
,
2878 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2879 .maxauthsize
= SHA512_DIGEST_SIZE
,
2882 .do_one_request
= aead_do_one_req
,
2885 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2886 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2887 OP_ALG_AAI_HMAC_PRECOMP
,
2894 .cra_name
= "authenc(hmac(md5),cbc(des))",
2895 .cra_driver_name
= "authenc-hmac-md5-"
2897 .cra_blocksize
= DES_BLOCK_SIZE
,
2899 .setkey
= aead_setkey
,
2900 .setauthsize
= aead_setauthsize
,
2901 .encrypt
= aead_encrypt
,
2902 .decrypt
= aead_decrypt
,
2903 .ivsize
= DES_BLOCK_SIZE
,
2904 .maxauthsize
= MD5_DIGEST_SIZE
,
2907 .do_one_request
= aead_do_one_req
,
2910 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2911 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2912 OP_ALG_AAI_HMAC_PRECOMP
,
2918 .cra_name
= "echainiv(authenc(hmac(md5),"
2920 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
2922 .cra_blocksize
= DES_BLOCK_SIZE
,
2924 .setkey
= aead_setkey
,
2925 .setauthsize
= aead_setauthsize
,
2926 .encrypt
= aead_encrypt
,
2927 .decrypt
= aead_decrypt
,
2928 .ivsize
= DES_BLOCK_SIZE
,
2929 .maxauthsize
= MD5_DIGEST_SIZE
,
2932 .do_one_request
= aead_do_one_req
,
2935 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2936 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2937 OP_ALG_AAI_HMAC_PRECOMP
,
2944 .cra_name
= "authenc(hmac(sha1),cbc(des))",
2945 .cra_driver_name
= "authenc-hmac-sha1-"
2947 .cra_blocksize
= DES_BLOCK_SIZE
,
2949 .setkey
= aead_setkey
,
2950 .setauthsize
= aead_setauthsize
,
2951 .encrypt
= aead_encrypt
,
2952 .decrypt
= aead_decrypt
,
2953 .ivsize
= DES_BLOCK_SIZE
,
2954 .maxauthsize
= SHA1_DIGEST_SIZE
,
2957 .do_one_request
= aead_do_one_req
,
2960 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2961 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2962 OP_ALG_AAI_HMAC_PRECOMP
,
2968 .cra_name
= "echainiv(authenc(hmac(sha1),"
2970 .cra_driver_name
= "echainiv-authenc-"
2971 "hmac-sha1-cbc-des-caam",
2972 .cra_blocksize
= DES_BLOCK_SIZE
,
2974 .setkey
= aead_setkey
,
2975 .setauthsize
= aead_setauthsize
,
2976 .encrypt
= aead_encrypt
,
2977 .decrypt
= aead_decrypt
,
2978 .ivsize
= DES_BLOCK_SIZE
,
2979 .maxauthsize
= SHA1_DIGEST_SIZE
,
2982 .do_one_request
= aead_do_one_req
,
2985 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2986 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2987 OP_ALG_AAI_HMAC_PRECOMP
,
2994 .cra_name
= "authenc(hmac(sha224),cbc(des))",
2995 .cra_driver_name
= "authenc-hmac-sha224-"
2997 .cra_blocksize
= DES_BLOCK_SIZE
,
2999 .setkey
= aead_setkey
,
3000 .setauthsize
= aead_setauthsize
,
3001 .encrypt
= aead_encrypt
,
3002 .decrypt
= aead_decrypt
,
3003 .ivsize
= DES_BLOCK_SIZE
,
3004 .maxauthsize
= SHA224_DIGEST_SIZE
,
3007 .do_one_request
= aead_do_one_req
,
3010 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3011 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
3012 OP_ALG_AAI_HMAC_PRECOMP
,
3018 .cra_name
= "echainiv(authenc(hmac(sha224),"
3020 .cra_driver_name
= "echainiv-authenc-"
3021 "hmac-sha224-cbc-des-caam",
3022 .cra_blocksize
= DES_BLOCK_SIZE
,
3024 .setkey
= aead_setkey
,
3025 .setauthsize
= aead_setauthsize
,
3026 .encrypt
= aead_encrypt
,
3027 .decrypt
= aead_decrypt
,
3028 .ivsize
= DES_BLOCK_SIZE
,
3029 .maxauthsize
= SHA224_DIGEST_SIZE
,
3032 .do_one_request
= aead_do_one_req
,
3035 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3036 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
3037 OP_ALG_AAI_HMAC_PRECOMP
,
3044 .cra_name
= "authenc(hmac(sha256),cbc(des))",
3045 .cra_driver_name
= "authenc-hmac-sha256-"
3047 .cra_blocksize
= DES_BLOCK_SIZE
,
3049 .setkey
= aead_setkey
,
3050 .setauthsize
= aead_setauthsize
,
3051 .encrypt
= aead_encrypt
,
3052 .decrypt
= aead_decrypt
,
3053 .ivsize
= DES_BLOCK_SIZE
,
3054 .maxauthsize
= SHA256_DIGEST_SIZE
,
3057 .do_one_request
= aead_do_one_req
,
3060 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3061 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
3062 OP_ALG_AAI_HMAC_PRECOMP
,
3068 .cra_name
= "echainiv(authenc(hmac(sha256),"
3070 .cra_driver_name
= "echainiv-authenc-"
3071 "hmac-sha256-cbc-des-caam",
3072 .cra_blocksize
= DES_BLOCK_SIZE
,
3074 .setkey
= aead_setkey
,
3075 .setauthsize
= aead_setauthsize
,
3076 .encrypt
= aead_encrypt
,
3077 .decrypt
= aead_decrypt
,
3078 .ivsize
= DES_BLOCK_SIZE
,
3079 .maxauthsize
= SHA256_DIGEST_SIZE
,
3082 .do_one_request
= aead_do_one_req
,
3085 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3086 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
3087 OP_ALG_AAI_HMAC_PRECOMP
,
3094 .cra_name
= "authenc(hmac(sha384),cbc(des))",
3095 .cra_driver_name
= "authenc-hmac-sha384-"
3097 .cra_blocksize
= DES_BLOCK_SIZE
,
3099 .setkey
= aead_setkey
,
3100 .setauthsize
= aead_setauthsize
,
3101 .encrypt
= aead_encrypt
,
3102 .decrypt
= aead_decrypt
,
3103 .ivsize
= DES_BLOCK_SIZE
,
3104 .maxauthsize
= SHA384_DIGEST_SIZE
,
3107 .do_one_request
= aead_do_one_req
,
3110 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3111 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
3112 OP_ALG_AAI_HMAC_PRECOMP
,
3118 .cra_name
= "echainiv(authenc(hmac(sha384),"
3120 .cra_driver_name
= "echainiv-authenc-"
3121 "hmac-sha384-cbc-des-caam",
3122 .cra_blocksize
= DES_BLOCK_SIZE
,
3124 .setkey
= aead_setkey
,
3125 .setauthsize
= aead_setauthsize
,
3126 .encrypt
= aead_encrypt
,
3127 .decrypt
= aead_decrypt
,
3128 .ivsize
= DES_BLOCK_SIZE
,
3129 .maxauthsize
= SHA384_DIGEST_SIZE
,
3132 .do_one_request
= aead_do_one_req
,
3135 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3136 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
3137 OP_ALG_AAI_HMAC_PRECOMP
,
3144 .cra_name
= "authenc(hmac(sha512),cbc(des))",
3145 .cra_driver_name
= "authenc-hmac-sha512-"
3147 .cra_blocksize
= DES_BLOCK_SIZE
,
3149 .setkey
= aead_setkey
,
3150 .setauthsize
= aead_setauthsize
,
3151 .encrypt
= aead_encrypt
,
3152 .decrypt
= aead_decrypt
,
3153 .ivsize
= DES_BLOCK_SIZE
,
3154 .maxauthsize
= SHA512_DIGEST_SIZE
,
3157 .do_one_request
= aead_do_one_req
,
3160 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3161 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
3162 OP_ALG_AAI_HMAC_PRECOMP
,
3168 .cra_name
= "echainiv(authenc(hmac(sha512),"
3170 .cra_driver_name
= "echainiv-authenc-"
3171 "hmac-sha512-cbc-des-caam",
3172 .cra_blocksize
= DES_BLOCK_SIZE
,
3174 .setkey
= aead_setkey
,
3175 .setauthsize
= aead_setauthsize
,
3176 .encrypt
= aead_encrypt
,
3177 .decrypt
= aead_decrypt
,
3178 .ivsize
= DES_BLOCK_SIZE
,
3179 .maxauthsize
= SHA512_DIGEST_SIZE
,
3182 .do_one_request
= aead_do_one_req
,
3185 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3186 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
3187 OP_ALG_AAI_HMAC_PRECOMP
,
3194 .cra_name
= "authenc(hmac(md5),"
3195 "rfc3686(ctr(aes)))",
3196 .cra_driver_name
= "authenc-hmac-md5-"
3197 "rfc3686-ctr-aes-caam",
3200 .setkey
= aead_setkey
,
3201 .setauthsize
= aead_setauthsize
,
3202 .encrypt
= aead_encrypt
,
3203 .decrypt
= aead_decrypt
,
3204 .ivsize
= CTR_RFC3686_IV_SIZE
,
3205 .maxauthsize
= MD5_DIGEST_SIZE
,
3208 .do_one_request
= aead_do_one_req
,
3211 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3212 OP_ALG_AAI_CTR_MOD128
,
3213 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
3214 OP_ALG_AAI_HMAC_PRECOMP
,
3221 .cra_name
= "seqiv(authenc("
3222 "hmac(md5),rfc3686(ctr(aes))))",
3223 .cra_driver_name
= "seqiv-authenc-hmac-md5-"
3224 "rfc3686-ctr-aes-caam",
3227 .setkey
= aead_setkey
,
3228 .setauthsize
= aead_setauthsize
,
3229 .encrypt
= aead_encrypt
,
3230 .decrypt
= aead_decrypt
,
3231 .ivsize
= CTR_RFC3686_IV_SIZE
,
3232 .maxauthsize
= MD5_DIGEST_SIZE
,
3235 .do_one_request
= aead_do_one_req
,
3238 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3239 OP_ALG_AAI_CTR_MOD128
,
3240 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
3241 OP_ALG_AAI_HMAC_PRECOMP
,
3249 .cra_name
= "authenc(hmac(sha1),"
3250 "rfc3686(ctr(aes)))",
3251 .cra_driver_name
= "authenc-hmac-sha1-"
3252 "rfc3686-ctr-aes-caam",
3255 .setkey
= aead_setkey
,
3256 .setauthsize
= aead_setauthsize
,
3257 .encrypt
= aead_encrypt
,
3258 .decrypt
= aead_decrypt
,
3259 .ivsize
= CTR_RFC3686_IV_SIZE
,
3260 .maxauthsize
= SHA1_DIGEST_SIZE
,
3263 .do_one_request
= aead_do_one_req
,
3266 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3267 OP_ALG_AAI_CTR_MOD128
,
3268 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
3269 OP_ALG_AAI_HMAC_PRECOMP
,
3276 .cra_name
= "seqiv(authenc("
3277 "hmac(sha1),rfc3686(ctr(aes))))",
3278 .cra_driver_name
= "seqiv-authenc-hmac-sha1-"
3279 "rfc3686-ctr-aes-caam",
3282 .setkey
= aead_setkey
,
3283 .setauthsize
= aead_setauthsize
,
3284 .encrypt
= aead_encrypt
,
3285 .decrypt
= aead_decrypt
,
3286 .ivsize
= CTR_RFC3686_IV_SIZE
,
3287 .maxauthsize
= SHA1_DIGEST_SIZE
,
3290 .do_one_request
= aead_do_one_req
,
3293 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3294 OP_ALG_AAI_CTR_MOD128
,
3295 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
3296 OP_ALG_AAI_HMAC_PRECOMP
,
3304 .cra_name
= "authenc(hmac(sha224),"
3305 "rfc3686(ctr(aes)))",
3306 .cra_driver_name
= "authenc-hmac-sha224-"
3307 "rfc3686-ctr-aes-caam",
3310 .setkey
= aead_setkey
,
3311 .setauthsize
= aead_setauthsize
,
3312 .encrypt
= aead_encrypt
,
3313 .decrypt
= aead_decrypt
,
3314 .ivsize
= CTR_RFC3686_IV_SIZE
,
3315 .maxauthsize
= SHA224_DIGEST_SIZE
,
3318 .do_one_request
= aead_do_one_req
,
3321 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3322 OP_ALG_AAI_CTR_MOD128
,
3323 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
3324 OP_ALG_AAI_HMAC_PRECOMP
,
3331 .cra_name
= "seqiv(authenc("
3332 "hmac(sha224),rfc3686(ctr(aes))))",
3333 .cra_driver_name
= "seqiv-authenc-hmac-sha224-"
3334 "rfc3686-ctr-aes-caam",
3337 .setkey
= aead_setkey
,
3338 .setauthsize
= aead_setauthsize
,
3339 .encrypt
= aead_encrypt
,
3340 .decrypt
= aead_decrypt
,
3341 .ivsize
= CTR_RFC3686_IV_SIZE
,
3342 .maxauthsize
= SHA224_DIGEST_SIZE
,
3345 .do_one_request
= aead_do_one_req
,
3348 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3349 OP_ALG_AAI_CTR_MOD128
,
3350 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
3351 OP_ALG_AAI_HMAC_PRECOMP
,
3359 .cra_name
= "authenc(hmac(sha256),"
3360 "rfc3686(ctr(aes)))",
3361 .cra_driver_name
= "authenc-hmac-sha256-"
3362 "rfc3686-ctr-aes-caam",
3365 .setkey
= aead_setkey
,
3366 .setauthsize
= aead_setauthsize
,
3367 .encrypt
= aead_encrypt
,
3368 .decrypt
= aead_decrypt
,
3369 .ivsize
= CTR_RFC3686_IV_SIZE
,
3370 .maxauthsize
= SHA256_DIGEST_SIZE
,
3373 .do_one_request
= aead_do_one_req
,
3376 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3377 OP_ALG_AAI_CTR_MOD128
,
3378 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
3379 OP_ALG_AAI_HMAC_PRECOMP
,
3386 .cra_name
= "seqiv(authenc(hmac(sha256),"
3387 "rfc3686(ctr(aes))))",
3388 .cra_driver_name
= "seqiv-authenc-hmac-sha256-"
3389 "rfc3686-ctr-aes-caam",
3392 .setkey
= aead_setkey
,
3393 .setauthsize
= aead_setauthsize
,
3394 .encrypt
= aead_encrypt
,
3395 .decrypt
= aead_decrypt
,
3396 .ivsize
= CTR_RFC3686_IV_SIZE
,
3397 .maxauthsize
= SHA256_DIGEST_SIZE
,
3400 .do_one_request
= aead_do_one_req
,
3403 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3404 OP_ALG_AAI_CTR_MOD128
,
3405 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
3406 OP_ALG_AAI_HMAC_PRECOMP
,
3414 .cra_name
= "authenc(hmac(sha384),"
3415 "rfc3686(ctr(aes)))",
3416 .cra_driver_name
= "authenc-hmac-sha384-"
3417 "rfc3686-ctr-aes-caam",
3420 .setkey
= aead_setkey
,
3421 .setauthsize
= aead_setauthsize
,
3422 .encrypt
= aead_encrypt
,
3423 .decrypt
= aead_decrypt
,
3424 .ivsize
= CTR_RFC3686_IV_SIZE
,
3425 .maxauthsize
= SHA384_DIGEST_SIZE
,
3428 .do_one_request
= aead_do_one_req
,
3431 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3432 OP_ALG_AAI_CTR_MOD128
,
3433 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
3434 OP_ALG_AAI_HMAC_PRECOMP
,
3441 .cra_name
= "seqiv(authenc(hmac(sha384),"
3442 "rfc3686(ctr(aes))))",
3443 .cra_driver_name
= "seqiv-authenc-hmac-sha384-"
3444 "rfc3686-ctr-aes-caam",
3447 .setkey
= aead_setkey
,
3448 .setauthsize
= aead_setauthsize
,
3449 .encrypt
= aead_encrypt
,
3450 .decrypt
= aead_decrypt
,
3451 .ivsize
= CTR_RFC3686_IV_SIZE
,
3452 .maxauthsize
= SHA384_DIGEST_SIZE
,
3455 .do_one_request
= aead_do_one_req
,
3458 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3459 OP_ALG_AAI_CTR_MOD128
,
3460 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
3461 OP_ALG_AAI_HMAC_PRECOMP
,
3469 .cra_name
= "authenc(hmac(sha512),"
3470 "rfc3686(ctr(aes)))",
3471 .cra_driver_name
= "authenc-hmac-sha512-"
3472 "rfc3686-ctr-aes-caam",
3475 .setkey
= aead_setkey
,
3476 .setauthsize
= aead_setauthsize
,
3477 .encrypt
= aead_encrypt
,
3478 .decrypt
= aead_decrypt
,
3479 .ivsize
= CTR_RFC3686_IV_SIZE
,
3480 .maxauthsize
= SHA512_DIGEST_SIZE
,
3483 .do_one_request
= aead_do_one_req
,
3486 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3487 OP_ALG_AAI_CTR_MOD128
,
3488 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
3489 OP_ALG_AAI_HMAC_PRECOMP
,
3496 .cra_name
= "seqiv(authenc(hmac(sha512),"
3497 "rfc3686(ctr(aes))))",
3498 .cra_driver_name
= "seqiv-authenc-hmac-sha512-"
3499 "rfc3686-ctr-aes-caam",
3502 .setkey
= aead_setkey
,
3503 .setauthsize
= aead_setauthsize
,
3504 .encrypt
= aead_encrypt
,
3505 .decrypt
= aead_decrypt
,
3506 .ivsize
= CTR_RFC3686_IV_SIZE
,
3507 .maxauthsize
= SHA512_DIGEST_SIZE
,
3510 .do_one_request
= aead_do_one_req
,
3513 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3514 OP_ALG_AAI_CTR_MOD128
,
3515 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
3516 OP_ALG_AAI_HMAC_PRECOMP
,
3524 .cra_name
= "rfc7539(chacha20,poly1305)",
3525 .cra_driver_name
= "rfc7539-chacha20-poly1305-"
3529 .setkey
= chachapoly_setkey
,
3530 .setauthsize
= chachapoly_setauthsize
,
3531 .encrypt
= chachapoly_encrypt
,
3532 .decrypt
= chachapoly_decrypt
,
3533 .ivsize
= CHACHAPOLY_IV_SIZE
,
3534 .maxauthsize
= POLY1305_DIGEST_SIZE
,
3537 .do_one_request
= aead_do_one_req
,
3540 .class1_alg_type
= OP_ALG_ALGSEL_CHACHA20
|
3542 .class2_alg_type
= OP_ALG_ALGSEL_POLY1305
|
3550 .cra_name
= "rfc7539esp(chacha20,poly1305)",
3551 .cra_driver_name
= "rfc7539esp-chacha20-"
3555 .setkey
= chachapoly_setkey
,
3556 .setauthsize
= chachapoly_setauthsize
,
3557 .encrypt
= chachapoly_encrypt
,
3558 .decrypt
= chachapoly_decrypt
,
3560 .maxauthsize
= POLY1305_DIGEST_SIZE
,
3563 .do_one_request
= aead_do_one_req
,
3566 .class1_alg_type
= OP_ALG_ALGSEL_CHACHA20
|
3568 .class2_alg_type
= OP_ALG_ALGSEL_POLY1305
|
3575 static int caam_init_common(struct caam_ctx
*ctx
, struct caam_alg_entry
*caam
,
3578 dma_addr_t dma_addr
;
3579 struct caam_drv_private
*priv
;
3580 const size_t sh_desc_enc_offset
= offsetof(struct caam_ctx
,
3583 ctx
->jrdev
= caam_jr_alloc();
3584 if (IS_ERR(ctx
->jrdev
)) {
3585 pr_err("Job Ring Device allocation for transform failed\n");
3586 return PTR_ERR(ctx
->jrdev
);
3589 priv
= dev_get_drvdata(ctx
->jrdev
->parent
);
3590 if (priv
->era
>= 6 && uses_dkp
)
3591 ctx
->dir
= DMA_BIDIRECTIONAL
;
3593 ctx
->dir
= DMA_TO_DEVICE
;
3595 dma_addr
= dma_map_single_attrs(ctx
->jrdev
, ctx
->sh_desc_enc
,
3596 offsetof(struct caam_ctx
,
3599 ctx
->dir
, DMA_ATTR_SKIP_CPU_SYNC
);
3600 if (dma_mapping_error(ctx
->jrdev
, dma_addr
)) {
3601 dev_err(ctx
->jrdev
, "unable to map key, shared descriptors\n");
3602 caam_jr_free(ctx
->jrdev
);
3606 ctx
->sh_desc_enc_dma
= dma_addr
;
3607 ctx
->sh_desc_dec_dma
= dma_addr
+ offsetof(struct caam_ctx
,
3610 ctx
->key_dma
= dma_addr
+ offsetof(struct caam_ctx
, key
) -
3613 /* copy descriptor header template value */
3614 ctx
->cdata
.algtype
= OP_TYPE_CLASS1_ALG
| caam
->class1_alg_type
;
3615 ctx
->adata
.algtype
= OP_TYPE_CLASS2_ALG
| caam
->class2_alg_type
;
3620 static int caam_cra_init(struct crypto_skcipher
*tfm
)
3622 struct skcipher_alg
*alg
= crypto_skcipher_alg(tfm
);
3623 struct caam_skcipher_alg
*caam_alg
=
3624 container_of(alg
, typeof(*caam_alg
), skcipher
.base
);
3625 struct caam_ctx
*ctx
= crypto_skcipher_ctx_dma(tfm
);
3626 u32 alg_aai
= caam_alg
->caam
.class1_alg_type
& OP_ALG_AAI_MASK
;
3629 if (alg_aai
== OP_ALG_AAI_XTS
) {
3630 const char *tfm_name
= crypto_tfm_alg_name(&tfm
->base
);
3631 struct crypto_skcipher
*fallback
;
3633 fallback
= crypto_alloc_skcipher(tfm_name
, 0,
3634 CRYPTO_ALG_NEED_FALLBACK
);
3635 if (IS_ERR(fallback
)) {
3636 pr_err("Failed to allocate %s fallback: %ld\n",
3637 tfm_name
, PTR_ERR(fallback
));
3638 return PTR_ERR(fallback
);
3641 ctx
->fallback
= fallback
;
3642 crypto_skcipher_set_reqsize(tfm
, sizeof(struct caam_skcipher_req_ctx
) +
3643 crypto_skcipher_reqsize(fallback
));
3645 crypto_skcipher_set_reqsize(tfm
, sizeof(struct caam_skcipher_req_ctx
));
3648 ret
= caam_init_common(ctx
, &caam_alg
->caam
, false);
3649 if (ret
&& ctx
->fallback
)
3650 crypto_free_skcipher(ctx
->fallback
);
3655 static int caam_aead_init(struct crypto_aead
*tfm
)
3657 struct aead_alg
*alg
= crypto_aead_alg(tfm
);
3658 struct caam_aead_alg
*caam_alg
=
3659 container_of(alg
, struct caam_aead_alg
, aead
.base
);
3660 struct caam_ctx
*ctx
= crypto_aead_ctx_dma(tfm
);
3662 crypto_aead_set_reqsize(tfm
, sizeof(struct caam_aead_req_ctx
));
3664 return caam_init_common(ctx
, &caam_alg
->caam
, !caam_alg
->caam
.nodkp
);
3667 static void caam_exit_common(struct caam_ctx
*ctx
)
3669 dma_unmap_single_attrs(ctx
->jrdev
, ctx
->sh_desc_enc_dma
,
3670 offsetof(struct caam_ctx
, sh_desc_enc_dma
) -
3671 offsetof(struct caam_ctx
, sh_desc_enc
),
3672 ctx
->dir
, DMA_ATTR_SKIP_CPU_SYNC
);
3673 caam_jr_free(ctx
->jrdev
);
3676 static void caam_cra_exit(struct crypto_skcipher
*tfm
)
3678 struct caam_ctx
*ctx
= crypto_skcipher_ctx_dma(tfm
);
3681 crypto_free_skcipher(ctx
->fallback
);
3682 caam_exit_common(ctx
);
3685 static void caam_aead_exit(struct crypto_aead
*tfm
)
3687 caam_exit_common(crypto_aead_ctx_dma(tfm
));
3690 void caam_algapi_exit(void)
3694 for (i
= 0; i
< ARRAY_SIZE(driver_aeads
); i
++) {
3695 struct caam_aead_alg
*t_alg
= driver_aeads
+ i
;
3697 if (t_alg
->registered
)
3698 crypto_engine_unregister_aead(&t_alg
->aead
);
3701 for (i
= 0; i
< ARRAY_SIZE(driver_algs
); i
++) {
3702 struct caam_skcipher_alg
*t_alg
= driver_algs
+ i
;
3704 if (t_alg
->registered
)
3705 crypto_engine_unregister_skcipher(&t_alg
->skcipher
);
3709 static void caam_skcipher_alg_init(struct caam_skcipher_alg
*t_alg
)
3711 struct skcipher_alg
*alg
= &t_alg
->skcipher
.base
;
3713 alg
->base
.cra_module
= THIS_MODULE
;
3714 alg
->base
.cra_priority
= CAAM_CRA_PRIORITY
;
3715 alg
->base
.cra_ctxsize
= sizeof(struct caam_ctx
) + crypto_dma_padding();
3716 alg
->base
.cra_flags
|= (CRYPTO_ALG_ASYNC
| CRYPTO_ALG_ALLOCATES_MEMORY
|
3717 CRYPTO_ALG_KERN_DRIVER_ONLY
);
3719 alg
->init
= caam_cra_init
;
3720 alg
->exit
= caam_cra_exit
;
3723 static void caam_aead_alg_init(struct caam_aead_alg
*t_alg
)
3725 struct aead_alg
*alg
= &t_alg
->aead
.base
;
3727 alg
->base
.cra_module
= THIS_MODULE
;
3728 alg
->base
.cra_priority
= CAAM_CRA_PRIORITY
;
3729 alg
->base
.cra_ctxsize
= sizeof(struct caam_ctx
) + crypto_dma_padding();
3730 alg
->base
.cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_ALLOCATES_MEMORY
|
3731 CRYPTO_ALG_KERN_DRIVER_ONLY
;
3733 alg
->init
= caam_aead_init
;
3734 alg
->exit
= caam_aead_exit
;
3737 int caam_algapi_init(struct device
*ctrldev
)
3739 struct caam_drv_private
*priv
= dev_get_drvdata(ctrldev
);
3741 u32 aes_vid
, aes_inst
, des_inst
, md_vid
, md_inst
, ccha_inst
, ptha_inst
;
3742 unsigned int md_limit
= SHA512_DIGEST_SIZE
;
3743 bool registered
= false, gcm_support
;
3746 * Register crypto algorithms the device supports.
3747 * First, detect presence and attributes of DES, AES, and MD blocks.
3749 if (priv
->era
< 10) {
3750 struct caam_perfmon __iomem
*perfmon
= &priv
->jr
[0]->perfmon
;
3751 u32 cha_vid
, cha_inst
, aes_rn
;
3753 cha_vid
= rd_reg32(&perfmon
->cha_id_ls
);
3754 aes_vid
= cha_vid
& CHA_ID_LS_AES_MASK
;
3755 md_vid
= (cha_vid
& CHA_ID_LS_MD_MASK
) >> CHA_ID_LS_MD_SHIFT
;
3757 cha_inst
= rd_reg32(&perfmon
->cha_num_ls
);
3758 des_inst
= (cha_inst
& CHA_ID_LS_DES_MASK
) >>
3759 CHA_ID_LS_DES_SHIFT
;
3760 aes_inst
= cha_inst
& CHA_ID_LS_AES_MASK
;
3761 md_inst
= (cha_inst
& CHA_ID_LS_MD_MASK
) >> CHA_ID_LS_MD_SHIFT
;
3765 aes_rn
= rd_reg32(&perfmon
->cha_rev_ls
) & CHA_ID_LS_AES_MASK
;
3766 gcm_support
= !(aes_vid
== CHA_VER_VID_AES_LP
&& aes_rn
< 8);
3768 struct version_regs __iomem
*vreg
= &priv
->jr
[0]->vreg
;
3771 aesa
= rd_reg32(&vreg
->aesa
);
3772 mdha
= rd_reg32(&vreg
->mdha
);
3774 aes_vid
= (aesa
& CHA_VER_VID_MASK
) >> CHA_VER_VID_SHIFT
;
3775 md_vid
= (mdha
& CHA_VER_VID_MASK
) >> CHA_VER_VID_SHIFT
;
3777 des_inst
= rd_reg32(&vreg
->desa
) & CHA_VER_NUM_MASK
;
3778 aes_inst
= aesa
& CHA_VER_NUM_MASK
;
3779 md_inst
= mdha
& CHA_VER_NUM_MASK
;
3780 ccha_inst
= rd_reg32(&vreg
->ccha
) & CHA_VER_NUM_MASK
;
3781 ptha_inst
= rd_reg32(&vreg
->ptha
) & CHA_VER_NUM_MASK
;
3783 gcm_support
= aesa
& CHA_VER_MISC_AES_GCM
;
3786 /* If MD is present, limit digest size based on LP256 */
3787 if (md_inst
&& md_vid
== CHA_VER_VID_MD_LP256
)
3788 md_limit
= SHA256_DIGEST_SIZE
;
3790 for (i
= 0; i
< ARRAY_SIZE(driver_algs
); i
++) {
3791 struct caam_skcipher_alg
*t_alg
= driver_algs
+ i
;
3792 u32 alg_sel
= t_alg
->caam
.class1_alg_type
& OP_ALG_ALGSEL_MASK
;
3794 /* Skip DES algorithms if not supported by device */
3796 ((alg_sel
== OP_ALG_ALGSEL_3DES
) ||
3797 (alg_sel
== OP_ALG_ALGSEL_DES
)))
3800 /* Skip AES algorithms if not supported by device */
3801 if (!aes_inst
&& (alg_sel
== OP_ALG_ALGSEL_AES
))
3805 * Check support for AES modes not available
3808 if (aes_vid
== CHA_VER_VID_AES_LP
&&
3809 (t_alg
->caam
.class1_alg_type
& OP_ALG_AAI_MASK
) ==
3813 caam_skcipher_alg_init(t_alg
);
3815 err
= crypto_engine_register_skcipher(&t_alg
->skcipher
);
3817 pr_warn("%s alg registration failed\n",
3818 t_alg
->skcipher
.base
.base
.cra_driver_name
);
3822 t_alg
->registered
= true;
3826 for (i
= 0; i
< ARRAY_SIZE(driver_aeads
); i
++) {
3827 struct caam_aead_alg
*t_alg
= driver_aeads
+ i
;
3828 u32 c1_alg_sel
= t_alg
->caam
.class1_alg_type
&
3830 u32 c2_alg_sel
= t_alg
->caam
.class2_alg_type
&
3832 u32 alg_aai
= t_alg
->caam
.class1_alg_type
& OP_ALG_AAI_MASK
;
3834 /* Skip DES algorithms if not supported by device */
3836 ((c1_alg_sel
== OP_ALG_ALGSEL_3DES
) ||
3837 (c1_alg_sel
== OP_ALG_ALGSEL_DES
)))
3840 /* Skip AES algorithms if not supported by device */
3841 if (!aes_inst
&& (c1_alg_sel
== OP_ALG_ALGSEL_AES
))
3844 /* Skip CHACHA20 algorithms if not supported by device */
3845 if (c1_alg_sel
== OP_ALG_ALGSEL_CHACHA20
&& !ccha_inst
)
3848 /* Skip POLY1305 algorithms if not supported by device */
3849 if (c2_alg_sel
== OP_ALG_ALGSEL_POLY1305
&& !ptha_inst
)
3852 /* Skip GCM algorithms if not supported by device */
3853 if (c1_alg_sel
== OP_ALG_ALGSEL_AES
&&
3854 alg_aai
== OP_ALG_AAI_GCM
&& !gcm_support
)
3858 * Skip algorithms requiring message digests
3859 * if MD or MD size is not supported by device.
3861 if (is_mdha(c2_alg_sel
) &&
3862 (!md_inst
|| t_alg
->aead
.base
.maxauthsize
> md_limit
))
3865 caam_aead_alg_init(t_alg
);
3867 err
= crypto_engine_register_aead(&t_alg
->aead
);
3869 pr_warn("%s alg registration failed\n",
3870 t_alg
->aead
.base
.base
.cra_driver_name
);
3874 t_alg
->registered
= true;
3879 pr_info("caam algorithms registered in /proc/crypto\n");