2 * caam - Freescale FSL CAAM support for crypto API
4 * Copyright 2008-2011 Freescale Semiconductor, Inc.
7 * Based on talitos crypto API driver.
9 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
11 * --------------- ---------------
12 * | JobDesc #1 |-------------------->| ShareDesc |
13 * | *(packet 1) | | (PDB) |
14 * --------------- |------------->| (hashKey) |
16 * . | |-------->| (operation) |
17 * --------------- | | ---------------
18 * | JobDesc #2 |------| |
24 * | JobDesc #3 |------------
28 * The SharedDesc never changes for a connection unless rekeyed, but
29 * each packet will likely be in a different place. So all we need
30 * to know to process the packet is where the input is, where the
31 * output goes, and what context we want to process with. Context is
32 * in the SharedDesc, packet references in the JobDesc.
34 * So, a job desc looks like:
36 * ---------------------
38 * | ShareDesc Pointer |
45 * ---------------------
52 #include "desc_constr.h"
55 #include "sg_sw_sec4.h"
57 #include "caamalg_desc.h"
62 #define CAAM_CRA_PRIORITY 3000
63 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
64 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
65 CTR_RFC3686_NONCE_SIZE + \
66 SHA512_DIGEST_SIZE * 2)
68 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
69 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
71 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
74 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
75 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
78 /* for print_hex_dumps with line references */
79 #define debug(format, arg...) printk(format, arg)
81 #define debug(format, arg...)
84 static struct list_head alg_list
;
86 struct caam_alg_entry
{
93 struct caam_aead_alg
{
95 struct caam_alg_entry caam
;
100 * per-session context
103 u32 sh_desc_enc
[DESC_MAX_USED_LEN
];
104 u32 sh_desc_dec
[DESC_MAX_USED_LEN
];
105 u32 sh_desc_givenc
[DESC_MAX_USED_LEN
];
106 u8 key
[CAAM_MAX_KEY_SIZE
];
107 dma_addr_t sh_desc_enc_dma
;
108 dma_addr_t sh_desc_dec_dma
;
109 dma_addr_t sh_desc_givenc_dma
;
111 enum dma_data_direction dir
;
112 struct device
*jrdev
;
113 struct alginfo adata
;
114 struct alginfo cdata
;
115 unsigned int authsize
;
118 static int aead_null_set_sh_desc(struct crypto_aead
*aead
)
120 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
121 struct device
*jrdev
= ctx
->jrdev
;
122 struct caam_drv_private
*ctrlpriv
= dev_get_drvdata(jrdev
->parent
);
124 int rem_bytes
= CAAM_DESC_BYTES_MAX
- AEAD_DESC_JOB_IO_LEN
-
125 ctx
->adata
.keylen_pad
;
128 * Job Descriptor and Shared Descriptors
129 * must all fit into the 64-word Descriptor h/w Buffer
131 if (rem_bytes
>= DESC_AEAD_NULL_ENC_LEN
) {
132 ctx
->adata
.key_inline
= true;
133 ctx
->adata
.key_virt
= ctx
->key
;
135 ctx
->adata
.key_inline
= false;
136 ctx
->adata
.key_dma
= ctx
->key_dma
;
139 /* aead_encrypt shared descriptor */
140 desc
= ctx
->sh_desc_enc
;
141 cnstr_shdsc_aead_null_encap(desc
, &ctx
->adata
, ctx
->authsize
,
143 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
144 desc_bytes(desc
), ctx
->dir
);
147 * Job Descriptor and Shared Descriptors
148 * must all fit into the 64-word Descriptor h/w Buffer
150 if (rem_bytes
>= DESC_AEAD_NULL_DEC_LEN
) {
151 ctx
->adata
.key_inline
= true;
152 ctx
->adata
.key_virt
= ctx
->key
;
154 ctx
->adata
.key_inline
= false;
155 ctx
->adata
.key_dma
= ctx
->key_dma
;
158 /* aead_decrypt shared descriptor */
159 desc
= ctx
->sh_desc_dec
;
160 cnstr_shdsc_aead_null_decap(desc
, &ctx
->adata
, ctx
->authsize
,
162 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
163 desc_bytes(desc
), ctx
->dir
);
168 static int aead_set_sh_desc(struct crypto_aead
*aead
)
170 struct caam_aead_alg
*alg
= container_of(crypto_aead_alg(aead
),
171 struct caam_aead_alg
, aead
);
172 unsigned int ivsize
= crypto_aead_ivsize(aead
);
173 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
174 struct device
*jrdev
= ctx
->jrdev
;
175 struct caam_drv_private
*ctrlpriv
= dev_get_drvdata(jrdev
->parent
);
177 u32
*desc
, *nonce
= NULL
;
179 unsigned int data_len
[2];
180 const bool ctr_mode
= ((ctx
->cdata
.algtype
& OP_ALG_AAI_MASK
) ==
181 OP_ALG_AAI_CTR_MOD128
);
182 const bool is_rfc3686
= alg
->caam
.rfc3686
;
187 /* NULL encryption / decryption */
188 if (!ctx
->cdata
.keylen
)
189 return aead_null_set_sh_desc(aead
);
192 * AES-CTR needs to load IV in CONTEXT1 reg
193 * at an offset of 128bits (16bytes)
194 * CONTEXT1[255:128] = IV
201 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
204 ctx1_iv_off
= 16 + CTR_RFC3686_NONCE_SIZE
;
205 nonce
= (u32
*)((void *)ctx
->key
+ ctx
->adata
.keylen_pad
+
206 ctx
->cdata
.keylen
- CTR_RFC3686_NONCE_SIZE
);
209 data_len
[0] = ctx
->adata
.keylen_pad
;
210 data_len
[1] = ctx
->cdata
.keylen
;
216 * Job Descriptor and Shared Descriptors
217 * must all fit into the 64-word Descriptor h/w Buffer
219 if (desc_inline_query(DESC_AEAD_ENC_LEN
+
220 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0),
221 AUTHENC_DESC_JOB_IO_LEN
, data_len
, &inl_mask
,
222 ARRAY_SIZE(data_len
)) < 0)
226 ctx
->adata
.key_virt
= ctx
->key
;
228 ctx
->adata
.key_dma
= ctx
->key_dma
;
231 ctx
->cdata
.key_virt
= ctx
->key
+ ctx
->adata
.keylen_pad
;
233 ctx
->cdata
.key_dma
= ctx
->key_dma
+ ctx
->adata
.keylen_pad
;
235 ctx
->adata
.key_inline
= !!(inl_mask
& 1);
236 ctx
->cdata
.key_inline
= !!(inl_mask
& 2);
238 /* aead_encrypt shared descriptor */
239 desc
= ctx
->sh_desc_enc
;
240 cnstr_shdsc_aead_encap(desc
, &ctx
->cdata
, &ctx
->adata
, ivsize
,
241 ctx
->authsize
, is_rfc3686
, nonce
, ctx1_iv_off
,
242 false, ctrlpriv
->era
);
243 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
244 desc_bytes(desc
), ctx
->dir
);
248 * Job Descriptor and Shared Descriptors
249 * must all fit into the 64-word Descriptor h/w Buffer
251 if (desc_inline_query(DESC_AEAD_DEC_LEN
+
252 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0),
253 AUTHENC_DESC_JOB_IO_LEN
, data_len
, &inl_mask
,
254 ARRAY_SIZE(data_len
)) < 0)
258 ctx
->adata
.key_virt
= ctx
->key
;
260 ctx
->adata
.key_dma
= ctx
->key_dma
;
263 ctx
->cdata
.key_virt
= ctx
->key
+ ctx
->adata
.keylen_pad
;
265 ctx
->cdata
.key_dma
= ctx
->key_dma
+ ctx
->adata
.keylen_pad
;
267 ctx
->adata
.key_inline
= !!(inl_mask
& 1);
268 ctx
->cdata
.key_inline
= !!(inl_mask
& 2);
270 /* aead_decrypt shared descriptor */
271 desc
= ctx
->sh_desc_dec
;
272 cnstr_shdsc_aead_decap(desc
, &ctx
->cdata
, &ctx
->adata
, ivsize
,
273 ctx
->authsize
, alg
->caam
.geniv
, is_rfc3686
,
274 nonce
, ctx1_iv_off
, false, ctrlpriv
->era
);
275 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
276 desc_bytes(desc
), ctx
->dir
);
278 if (!alg
->caam
.geniv
)
282 * Job Descriptor and Shared Descriptors
283 * must all fit into the 64-word Descriptor h/w Buffer
285 if (desc_inline_query(DESC_AEAD_GIVENC_LEN
+
286 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0),
287 AUTHENC_DESC_JOB_IO_LEN
, data_len
, &inl_mask
,
288 ARRAY_SIZE(data_len
)) < 0)
292 ctx
->adata
.key_virt
= ctx
->key
;
294 ctx
->adata
.key_dma
= ctx
->key_dma
;
297 ctx
->cdata
.key_virt
= ctx
->key
+ ctx
->adata
.keylen_pad
;
299 ctx
->cdata
.key_dma
= ctx
->key_dma
+ ctx
->adata
.keylen_pad
;
301 ctx
->adata
.key_inline
= !!(inl_mask
& 1);
302 ctx
->cdata
.key_inline
= !!(inl_mask
& 2);
304 /* aead_givencrypt shared descriptor */
305 desc
= ctx
->sh_desc_enc
;
306 cnstr_shdsc_aead_givencap(desc
, &ctx
->cdata
, &ctx
->adata
, ivsize
,
307 ctx
->authsize
, is_rfc3686
, nonce
,
308 ctx1_iv_off
, false, ctrlpriv
->era
);
309 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
310 desc_bytes(desc
), ctx
->dir
);
316 static int aead_setauthsize(struct crypto_aead
*authenc
,
317 unsigned int authsize
)
319 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
321 ctx
->authsize
= authsize
;
322 aead_set_sh_desc(authenc
);
327 static int gcm_set_sh_desc(struct crypto_aead
*aead
)
329 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
330 struct device
*jrdev
= ctx
->jrdev
;
332 int rem_bytes
= CAAM_DESC_BYTES_MAX
- GCM_DESC_JOB_IO_LEN
-
335 if (!ctx
->cdata
.keylen
|| !ctx
->authsize
)
339 * AES GCM encrypt shared descriptor
340 * Job Descriptor and Shared Descriptor
341 * must fit into the 64-word Descriptor h/w Buffer
343 if (rem_bytes
>= DESC_GCM_ENC_LEN
) {
344 ctx
->cdata
.key_inline
= true;
345 ctx
->cdata
.key_virt
= ctx
->key
;
347 ctx
->cdata
.key_inline
= false;
348 ctx
->cdata
.key_dma
= ctx
->key_dma
;
351 desc
= ctx
->sh_desc_enc
;
352 cnstr_shdsc_gcm_encap(desc
, &ctx
->cdata
, ctx
->authsize
);
353 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
354 desc_bytes(desc
), ctx
->dir
);
357 * Job Descriptor and Shared Descriptors
358 * must all fit into the 64-word Descriptor h/w Buffer
360 if (rem_bytes
>= DESC_GCM_DEC_LEN
) {
361 ctx
->cdata
.key_inline
= true;
362 ctx
->cdata
.key_virt
= ctx
->key
;
364 ctx
->cdata
.key_inline
= false;
365 ctx
->cdata
.key_dma
= ctx
->key_dma
;
368 desc
= ctx
->sh_desc_dec
;
369 cnstr_shdsc_gcm_decap(desc
, &ctx
->cdata
, ctx
->authsize
);
370 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
371 desc_bytes(desc
), ctx
->dir
);
376 static int gcm_setauthsize(struct crypto_aead
*authenc
, unsigned int authsize
)
378 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
380 ctx
->authsize
= authsize
;
381 gcm_set_sh_desc(authenc
);
386 static int rfc4106_set_sh_desc(struct crypto_aead
*aead
)
388 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
389 struct device
*jrdev
= ctx
->jrdev
;
391 int rem_bytes
= CAAM_DESC_BYTES_MAX
- GCM_DESC_JOB_IO_LEN
-
394 if (!ctx
->cdata
.keylen
|| !ctx
->authsize
)
398 * RFC4106 encrypt shared descriptor
399 * Job Descriptor and Shared Descriptor
400 * must fit into the 64-word Descriptor h/w Buffer
402 if (rem_bytes
>= DESC_RFC4106_ENC_LEN
) {
403 ctx
->cdata
.key_inline
= true;
404 ctx
->cdata
.key_virt
= ctx
->key
;
406 ctx
->cdata
.key_inline
= false;
407 ctx
->cdata
.key_dma
= ctx
->key_dma
;
410 desc
= ctx
->sh_desc_enc
;
411 cnstr_shdsc_rfc4106_encap(desc
, &ctx
->cdata
, ctx
->authsize
);
412 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
413 desc_bytes(desc
), ctx
->dir
);
416 * Job Descriptor and Shared Descriptors
417 * must all fit into the 64-word Descriptor h/w Buffer
419 if (rem_bytes
>= DESC_RFC4106_DEC_LEN
) {
420 ctx
->cdata
.key_inline
= true;
421 ctx
->cdata
.key_virt
= ctx
->key
;
423 ctx
->cdata
.key_inline
= false;
424 ctx
->cdata
.key_dma
= ctx
->key_dma
;
427 desc
= ctx
->sh_desc_dec
;
428 cnstr_shdsc_rfc4106_decap(desc
, &ctx
->cdata
, ctx
->authsize
);
429 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
430 desc_bytes(desc
), ctx
->dir
);
435 static int rfc4106_setauthsize(struct crypto_aead
*authenc
,
436 unsigned int authsize
)
438 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
440 ctx
->authsize
= authsize
;
441 rfc4106_set_sh_desc(authenc
);
446 static int rfc4543_set_sh_desc(struct crypto_aead
*aead
)
448 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
449 struct device
*jrdev
= ctx
->jrdev
;
451 int rem_bytes
= CAAM_DESC_BYTES_MAX
- GCM_DESC_JOB_IO_LEN
-
454 if (!ctx
->cdata
.keylen
|| !ctx
->authsize
)
458 * RFC4543 encrypt shared descriptor
459 * Job Descriptor and Shared Descriptor
460 * must fit into the 64-word Descriptor h/w Buffer
462 if (rem_bytes
>= DESC_RFC4543_ENC_LEN
) {
463 ctx
->cdata
.key_inline
= true;
464 ctx
->cdata
.key_virt
= ctx
->key
;
466 ctx
->cdata
.key_inline
= false;
467 ctx
->cdata
.key_dma
= ctx
->key_dma
;
470 desc
= ctx
->sh_desc_enc
;
471 cnstr_shdsc_rfc4543_encap(desc
, &ctx
->cdata
, ctx
->authsize
);
472 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
473 desc_bytes(desc
), ctx
->dir
);
476 * Job Descriptor and Shared Descriptors
477 * must all fit into the 64-word Descriptor h/w Buffer
479 if (rem_bytes
>= DESC_RFC4543_DEC_LEN
) {
480 ctx
->cdata
.key_inline
= true;
481 ctx
->cdata
.key_virt
= ctx
->key
;
483 ctx
->cdata
.key_inline
= false;
484 ctx
->cdata
.key_dma
= ctx
->key_dma
;
487 desc
= ctx
->sh_desc_dec
;
488 cnstr_shdsc_rfc4543_decap(desc
, &ctx
->cdata
, ctx
->authsize
);
489 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
490 desc_bytes(desc
), ctx
->dir
);
495 static int rfc4543_setauthsize(struct crypto_aead
*authenc
,
496 unsigned int authsize
)
498 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
500 ctx
->authsize
= authsize
;
501 rfc4543_set_sh_desc(authenc
);
506 static int aead_setkey(struct crypto_aead
*aead
,
507 const u8
*key
, unsigned int keylen
)
509 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
510 struct device
*jrdev
= ctx
->jrdev
;
511 struct caam_drv_private
*ctrlpriv
= dev_get_drvdata(jrdev
->parent
);
512 struct crypto_authenc_keys keys
;
515 if (crypto_authenc_extractkeys(&keys
, key
, keylen
) != 0)
519 printk(KERN_ERR
"keylen %d enckeylen %d authkeylen %d\n",
520 keys
.authkeylen
+ keys
.enckeylen
, keys
.enckeylen
,
522 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
523 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
527 * If DKP is supported, use it in the shared descriptor to generate
530 if (ctrlpriv
->era
>= 6) {
531 ctx
->adata
.keylen
= keys
.authkeylen
;
532 ctx
->adata
.keylen_pad
= split_key_len(ctx
->adata
.algtype
&
535 if (ctx
->adata
.keylen_pad
+ keys
.enckeylen
> CAAM_MAX_KEY_SIZE
)
538 memcpy(ctx
->key
, keys
.authkey
, keys
.authkeylen
);
539 memcpy(ctx
->key
+ ctx
->adata
.keylen_pad
, keys
.enckey
,
541 dma_sync_single_for_device(jrdev
, ctx
->key_dma
,
542 ctx
->adata
.keylen_pad
+
543 keys
.enckeylen
, ctx
->dir
);
547 ret
= gen_split_key(ctx
->jrdev
, ctx
->key
, &ctx
->adata
, keys
.authkey
,
548 keys
.authkeylen
, CAAM_MAX_KEY_SIZE
-
554 /* postpend encryption key to auth split key */
555 memcpy(ctx
->key
+ ctx
->adata
.keylen_pad
, keys
.enckey
, keys
.enckeylen
);
556 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, ctx
->adata
.keylen_pad
+
557 keys
.enckeylen
, ctx
->dir
);
559 print_hex_dump(KERN_ERR
, "ctx.key@"__stringify(__LINE__
)": ",
560 DUMP_PREFIX_ADDRESS
, 16, 4, ctx
->key
,
561 ctx
->adata
.keylen_pad
+ keys
.enckeylen
, 1);
565 ctx
->cdata
.keylen
= keys
.enckeylen
;
566 return aead_set_sh_desc(aead
);
568 crypto_aead_set_flags(aead
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
572 static int gcm_setkey(struct crypto_aead
*aead
,
573 const u8
*key
, unsigned int keylen
)
575 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
576 struct device
*jrdev
= ctx
->jrdev
;
579 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
580 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
583 memcpy(ctx
->key
, key
, keylen
);
584 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, keylen
, ctx
->dir
);
585 ctx
->cdata
.keylen
= keylen
;
587 return gcm_set_sh_desc(aead
);
590 static int rfc4106_setkey(struct crypto_aead
*aead
,
591 const u8
*key
, unsigned int keylen
)
593 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
594 struct device
*jrdev
= ctx
->jrdev
;
600 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
601 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
604 memcpy(ctx
->key
, key
, keylen
);
607 * The last four bytes of the key material are used as the salt value
608 * in the nonce. Update the AES key length.
610 ctx
->cdata
.keylen
= keylen
- 4;
611 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, ctx
->cdata
.keylen
,
613 return rfc4106_set_sh_desc(aead
);
616 static int rfc4543_setkey(struct crypto_aead
*aead
,
617 const u8
*key
, unsigned int keylen
)
619 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
620 struct device
*jrdev
= ctx
->jrdev
;
626 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
627 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
630 memcpy(ctx
->key
, key
, keylen
);
633 * The last four bytes of the key material are used as the salt value
634 * in the nonce. Update the AES key length.
636 ctx
->cdata
.keylen
= keylen
- 4;
637 dma_sync_single_for_device(jrdev
, ctx
->key_dma
, ctx
->cdata
.keylen
,
639 return rfc4543_set_sh_desc(aead
);
642 static int ablkcipher_setkey(struct crypto_ablkcipher
*ablkcipher
,
643 const u8
*key
, unsigned int keylen
)
645 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
646 struct crypto_tfm
*tfm
= crypto_ablkcipher_tfm(ablkcipher
);
647 const char *alg_name
= crypto_tfm_alg_name(tfm
);
648 struct device
*jrdev
= ctx
->jrdev
;
649 unsigned int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
652 const bool ctr_mode
= ((ctx
->cdata
.algtype
& OP_ALG_AAI_MASK
) ==
653 OP_ALG_AAI_CTR_MOD128
);
654 const bool is_rfc3686
= (ctr_mode
&&
655 (strstr(alg_name
, "rfc3686") != NULL
));
658 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
659 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
662 * AES-CTR needs to load IV in CONTEXT1 reg
663 * at an offset of 128bits (16bytes)
664 * CONTEXT1[255:128] = IV
671 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
672 * | *key = {KEY, NONCE}
675 ctx1_iv_off
= 16 + CTR_RFC3686_NONCE_SIZE
;
676 keylen
-= CTR_RFC3686_NONCE_SIZE
;
679 ctx
->cdata
.keylen
= keylen
;
680 ctx
->cdata
.key_virt
= key
;
681 ctx
->cdata
.key_inline
= true;
683 /* ablkcipher_encrypt shared descriptor */
684 desc
= ctx
->sh_desc_enc
;
685 cnstr_shdsc_ablkcipher_encap(desc
, &ctx
->cdata
, ivsize
, is_rfc3686
,
687 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
688 desc_bytes(desc
), ctx
->dir
);
690 /* ablkcipher_decrypt shared descriptor */
691 desc
= ctx
->sh_desc_dec
;
692 cnstr_shdsc_ablkcipher_decap(desc
, &ctx
->cdata
, ivsize
, is_rfc3686
,
694 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
695 desc_bytes(desc
), ctx
->dir
);
697 /* ablkcipher_givencrypt shared descriptor */
698 desc
= ctx
->sh_desc_givenc
;
699 cnstr_shdsc_ablkcipher_givencap(desc
, &ctx
->cdata
, ivsize
, is_rfc3686
,
701 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_givenc_dma
,
702 desc_bytes(desc
), ctx
->dir
);
707 static int xts_ablkcipher_setkey(struct crypto_ablkcipher
*ablkcipher
,
708 const u8
*key
, unsigned int keylen
)
710 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
711 struct device
*jrdev
= ctx
->jrdev
;
714 if (keylen
!= 2 * AES_MIN_KEY_SIZE
&& keylen
!= 2 * AES_MAX_KEY_SIZE
) {
715 crypto_ablkcipher_set_flags(ablkcipher
,
716 CRYPTO_TFM_RES_BAD_KEY_LEN
);
717 dev_err(jrdev
, "key size mismatch\n");
721 ctx
->cdata
.keylen
= keylen
;
722 ctx
->cdata
.key_virt
= key
;
723 ctx
->cdata
.key_inline
= true;
725 /* xts_ablkcipher_encrypt shared descriptor */
726 desc
= ctx
->sh_desc_enc
;
727 cnstr_shdsc_xts_ablkcipher_encap(desc
, &ctx
->cdata
);
728 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_enc_dma
,
729 desc_bytes(desc
), ctx
->dir
);
731 /* xts_ablkcipher_decrypt shared descriptor */
732 desc
= ctx
->sh_desc_dec
;
733 cnstr_shdsc_xts_ablkcipher_decap(desc
, &ctx
->cdata
);
734 dma_sync_single_for_device(jrdev
, ctx
->sh_desc_dec_dma
,
735 desc_bytes(desc
), ctx
->dir
);
741 * aead_edesc - s/w-extended aead descriptor
742 * @src_nents: number of segments in input s/w scatterlist
743 * @dst_nents: number of segments in output s/w scatterlist
744 * @sec4_sg_bytes: length of dma mapped sec4_sg space
745 * @sec4_sg_dma: bus physical mapped address of h/w link table
746 * @sec4_sg: pointer to h/w link table
747 * @hw_desc: the h/w job descriptor followed by any referenced link tables
753 dma_addr_t sec4_sg_dma
;
754 struct sec4_sg_entry
*sec4_sg
;
759 * ablkcipher_edesc - s/w-extended ablkcipher descriptor
760 * @src_nents: number of segments in input s/w scatterlist
761 * @dst_nents: number of segments in output s/w scatterlist
762 * @iv_dma: dma address of iv for checking continuity and link table
763 * @sec4_sg_bytes: length of dma mapped sec4_sg space
764 * @sec4_sg_dma: bus physical mapped address of h/w link table
765 * @sec4_sg: pointer to h/w link table
766 * @hw_desc: the h/w job descriptor followed by any referenced link tables
768 struct ablkcipher_edesc
{
773 dma_addr_t sec4_sg_dma
;
774 struct sec4_sg_entry
*sec4_sg
;
778 static void caam_unmap(struct device
*dev
, struct scatterlist
*src
,
779 struct scatterlist
*dst
, int src_nents
,
781 dma_addr_t iv_dma
, int ivsize
, dma_addr_t sec4_sg_dma
,
786 dma_unmap_sg(dev
, src
, src_nents
, DMA_TO_DEVICE
);
787 dma_unmap_sg(dev
, dst
, dst_nents
, DMA_FROM_DEVICE
);
789 dma_unmap_sg(dev
, src
, src_nents
, DMA_BIDIRECTIONAL
);
793 dma_unmap_single(dev
, iv_dma
, ivsize
, DMA_TO_DEVICE
);
795 dma_unmap_single(dev
, sec4_sg_dma
, sec4_sg_bytes
,
799 static void aead_unmap(struct device
*dev
,
800 struct aead_edesc
*edesc
,
801 struct aead_request
*req
)
803 caam_unmap(dev
, req
->src
, req
->dst
,
804 edesc
->src_nents
, edesc
->dst_nents
, 0, 0,
805 edesc
->sec4_sg_dma
, edesc
->sec4_sg_bytes
);
808 static void ablkcipher_unmap(struct device
*dev
,
809 struct ablkcipher_edesc
*edesc
,
810 struct ablkcipher_request
*req
)
812 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
813 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
815 caam_unmap(dev
, req
->src
, req
->dst
,
816 edesc
->src_nents
, edesc
->dst_nents
,
817 edesc
->iv_dma
, ivsize
,
818 edesc
->sec4_sg_dma
, edesc
->sec4_sg_bytes
);
821 static void aead_encrypt_done(struct device
*jrdev
, u32
*desc
, u32 err
,
824 struct aead_request
*req
= context
;
825 struct aead_edesc
*edesc
;
828 dev_err(jrdev
, "%s %d: err 0x%x\n", __func__
, __LINE__
, err
);
831 edesc
= container_of(desc
, struct aead_edesc
, hw_desc
[0]);
834 caam_jr_strstatus(jrdev
, err
);
836 aead_unmap(jrdev
, edesc
, req
);
840 aead_request_complete(req
, err
);
843 static void aead_decrypt_done(struct device
*jrdev
, u32
*desc
, u32 err
,
846 struct aead_request
*req
= context
;
847 struct aead_edesc
*edesc
;
850 dev_err(jrdev
, "%s %d: err 0x%x\n", __func__
, __LINE__
, err
);
853 edesc
= container_of(desc
, struct aead_edesc
, hw_desc
[0]);
856 caam_jr_strstatus(jrdev
, err
);
858 aead_unmap(jrdev
, edesc
, req
);
861 * verify hw auth check passed else return -EBADMSG
863 if ((err
& JRSTA_CCBERR_ERRID_MASK
) == JRSTA_CCBERR_ERRID_ICVCHK
)
868 aead_request_complete(req
, err
);
871 static void ablkcipher_encrypt_done(struct device
*jrdev
, u32
*desc
, u32 err
,
874 struct ablkcipher_request
*req
= context
;
875 struct ablkcipher_edesc
*edesc
;
876 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
877 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
880 dev_err(jrdev
, "%s %d: err 0x%x\n", __func__
, __LINE__
, err
);
883 edesc
= container_of(desc
, struct ablkcipher_edesc
, hw_desc
[0]);
886 caam_jr_strstatus(jrdev
, err
);
889 print_hex_dump(KERN_ERR
, "dstiv @"__stringify(__LINE__
)": ",
890 DUMP_PREFIX_ADDRESS
, 16, 4, req
->info
,
891 edesc
->src_nents
> 1 ? 100 : ivsize
, 1);
893 caam_dump_sg(KERN_ERR
, "dst @" __stringify(__LINE__
)": ",
894 DUMP_PREFIX_ADDRESS
, 16, 4, req
->dst
,
895 edesc
->dst_nents
> 1 ? 100 : req
->nbytes
, 1);
897 ablkcipher_unmap(jrdev
, edesc
, req
);
900 * The crypto API expects us to set the IV (req->info) to the last
901 * ciphertext block. This is used e.g. by the CTS mode.
903 scatterwalk_map_and_copy(req
->info
, req
->dst
, req
->nbytes
- ivsize
,
908 ablkcipher_request_complete(req
, err
);
911 static void ablkcipher_decrypt_done(struct device
*jrdev
, u32
*desc
, u32 err
,
914 struct ablkcipher_request
*req
= context
;
915 struct ablkcipher_edesc
*edesc
;
916 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
917 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
920 dev_err(jrdev
, "%s %d: err 0x%x\n", __func__
, __LINE__
, err
);
923 edesc
= container_of(desc
, struct ablkcipher_edesc
, hw_desc
[0]);
925 caam_jr_strstatus(jrdev
, err
);
928 print_hex_dump(KERN_ERR
, "dstiv @"__stringify(__LINE__
)": ",
929 DUMP_PREFIX_ADDRESS
, 16, 4, req
->info
,
932 caam_dump_sg(KERN_ERR
, "dst @" __stringify(__LINE__
)": ",
933 DUMP_PREFIX_ADDRESS
, 16, 4, req
->dst
,
934 edesc
->dst_nents
> 1 ? 100 : req
->nbytes
, 1);
936 ablkcipher_unmap(jrdev
, edesc
, req
);
939 * The crypto API expects us to set the IV (req->info) to the last
942 scatterwalk_map_and_copy(req
->info
, req
->src
, req
->nbytes
- ivsize
,
947 ablkcipher_request_complete(req
, err
);
951 * Fill in aead job descriptor
953 static void init_aead_job(struct aead_request
*req
,
954 struct aead_edesc
*edesc
,
955 bool all_contig
, bool encrypt
)
957 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
958 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
959 int authsize
= ctx
->authsize
;
960 u32
*desc
= edesc
->hw_desc
;
961 u32 out_options
, in_options
;
962 dma_addr_t dst_dma
, src_dma
;
963 int len
, sec4_sg_index
= 0;
967 sh_desc
= encrypt
? ctx
->sh_desc_enc
: ctx
->sh_desc_dec
;
968 ptr
= encrypt
? ctx
->sh_desc_enc_dma
: ctx
->sh_desc_dec_dma
;
970 len
= desc_len(sh_desc
);
971 init_job_desc_shared(desc
, ptr
, len
, HDR_SHARE_DEFER
| HDR_REVERSE
);
974 src_dma
= edesc
->src_nents
? sg_dma_address(req
->src
) : 0;
977 src_dma
= edesc
->sec4_sg_dma
;
978 sec4_sg_index
+= edesc
->src_nents
;
979 in_options
= LDST_SGF
;
982 append_seq_in_ptr(desc
, src_dma
, req
->assoclen
+ req
->cryptlen
,
986 out_options
= in_options
;
988 if (unlikely(req
->src
!= req
->dst
)) {
989 if (edesc
->dst_nents
== 1) {
990 dst_dma
= sg_dma_address(req
->dst
);
992 dst_dma
= edesc
->sec4_sg_dma
+
994 sizeof(struct sec4_sg_entry
);
995 out_options
= LDST_SGF
;
1000 append_seq_out_ptr(desc
, dst_dma
,
1001 req
->assoclen
+ req
->cryptlen
+ authsize
,
1004 append_seq_out_ptr(desc
, dst_dma
,
1005 req
->assoclen
+ req
->cryptlen
- authsize
,
1009 static void init_gcm_job(struct aead_request
*req
,
1010 struct aead_edesc
*edesc
,
1011 bool all_contig
, bool encrypt
)
1013 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1014 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1015 unsigned int ivsize
= crypto_aead_ivsize(aead
);
1016 u32
*desc
= edesc
->hw_desc
;
1017 bool generic_gcm
= (ivsize
== GCM_AES_IV_SIZE
);
1020 init_aead_job(req
, edesc
, all_contig
, encrypt
);
1021 append_math_add_imm_u32(desc
, REG3
, ZERO
, IMM
, req
->assoclen
);
1023 /* BUG This should not be specific to generic GCM. */
1025 if (encrypt
&& generic_gcm
&& !(req
->assoclen
+ req
->cryptlen
))
1026 last
= FIFOLD_TYPE_LAST1
;
1029 append_cmd(desc
, CMD_FIFO_LOAD
| FIFOLD_CLASS_CLASS1
| IMMEDIATE
|
1030 FIFOLD_TYPE_IV
| FIFOLD_TYPE_FLUSH1
| GCM_AES_IV_SIZE
| last
);
1033 append_data(desc
, ctx
->key
+ ctx
->cdata
.keylen
, 4);
1035 append_data(desc
, req
->iv
, ivsize
);
1036 /* End of blank commands */
1039 static void init_authenc_job(struct aead_request
*req
,
1040 struct aead_edesc
*edesc
,
1041 bool all_contig
, bool encrypt
)
1043 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1044 struct caam_aead_alg
*alg
= container_of(crypto_aead_alg(aead
),
1045 struct caam_aead_alg
, aead
);
1046 unsigned int ivsize
= crypto_aead_ivsize(aead
);
1047 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1048 struct caam_drv_private
*ctrlpriv
= dev_get_drvdata(ctx
->jrdev
->parent
);
1049 const bool ctr_mode
= ((ctx
->cdata
.algtype
& OP_ALG_AAI_MASK
) ==
1050 OP_ALG_AAI_CTR_MOD128
);
1051 const bool is_rfc3686
= alg
->caam
.rfc3686
;
1052 u32
*desc
= edesc
->hw_desc
;
1056 * AES-CTR needs to load IV in CONTEXT1 reg
1057 * at an offset of 128bits (16bytes)
1058 * CONTEXT1[255:128] = IV
1065 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1068 ivoffset
= 16 + CTR_RFC3686_NONCE_SIZE
;
1070 init_aead_job(req
, edesc
, all_contig
, encrypt
);
1073 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports
1074 * having DPOVRD as destination.
1076 if (ctrlpriv
->era
< 3)
1077 append_math_add_imm_u32(desc
, REG3
, ZERO
, IMM
, req
->assoclen
);
1079 append_math_add_imm_u32(desc
, DPOVRD
, ZERO
, IMM
, req
->assoclen
);
1081 if (ivsize
&& ((is_rfc3686
&& encrypt
) || !alg
->caam
.geniv
))
1082 append_load_as_imm(desc
, req
->iv
, ivsize
,
1084 LDST_SRCDST_BYTE_CONTEXT
|
1085 (ivoffset
<< LDST_OFFSET_SHIFT
));
1089 * Fill in ablkcipher job descriptor
1091 static void init_ablkcipher_job(u32
*sh_desc
, dma_addr_t ptr
,
1092 struct ablkcipher_edesc
*edesc
,
1093 struct ablkcipher_request
*req
,
1096 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1097 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
1098 u32
*desc
= edesc
->hw_desc
;
1099 u32 out_options
= 0, in_options
;
1100 dma_addr_t dst_dma
, src_dma
;
1101 int len
, sec4_sg_index
= 0;
1104 print_hex_dump(KERN_ERR
, "presciv@"__stringify(__LINE__
)": ",
1105 DUMP_PREFIX_ADDRESS
, 16, 4, req
->info
,
1107 pr_err("asked=%d, nbytes%d\n",
1108 (int)edesc
->src_nents
> 1 ? 100 : req
->nbytes
, req
->nbytes
);
1110 caam_dump_sg(KERN_ERR
, "src @" __stringify(__LINE__
)": ",
1111 DUMP_PREFIX_ADDRESS
, 16, 4, req
->src
,
1112 edesc
->src_nents
> 1 ? 100 : req
->nbytes
, 1);
1114 len
= desc_len(sh_desc
);
1115 init_job_desc_shared(desc
, ptr
, len
, HDR_SHARE_DEFER
| HDR_REVERSE
);
1118 src_dma
= edesc
->iv_dma
;
1121 src_dma
= edesc
->sec4_sg_dma
;
1122 sec4_sg_index
+= edesc
->src_nents
+ 1;
1123 in_options
= LDST_SGF
;
1125 append_seq_in_ptr(desc
, src_dma
, req
->nbytes
+ ivsize
, in_options
);
1127 if (likely(req
->src
== req
->dst
)) {
1128 if (edesc
->src_nents
== 1 && iv_contig
) {
1129 dst_dma
= sg_dma_address(req
->src
);
1131 dst_dma
= edesc
->sec4_sg_dma
+
1132 sizeof(struct sec4_sg_entry
);
1133 out_options
= LDST_SGF
;
1136 if (edesc
->dst_nents
== 1) {
1137 dst_dma
= sg_dma_address(req
->dst
);
1139 dst_dma
= edesc
->sec4_sg_dma
+
1140 sec4_sg_index
* sizeof(struct sec4_sg_entry
);
1141 out_options
= LDST_SGF
;
1144 append_seq_out_ptr(desc
, dst_dma
, req
->nbytes
, out_options
);
1148 * Fill in ablkcipher givencrypt job descriptor
1150 static void init_ablkcipher_giv_job(u32
*sh_desc
, dma_addr_t ptr
,
1151 struct ablkcipher_edesc
*edesc
,
1152 struct ablkcipher_request
*req
,
1155 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1156 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
1157 u32
*desc
= edesc
->hw_desc
;
1158 u32 out_options
, in_options
;
1159 dma_addr_t dst_dma
, src_dma
;
1160 int len
, sec4_sg_index
= 0;
1163 print_hex_dump(KERN_ERR
, "presciv@" __stringify(__LINE__
) ": ",
1164 DUMP_PREFIX_ADDRESS
, 16, 4, req
->info
,
1167 caam_dump_sg(KERN_ERR
, "src @" __stringify(__LINE__
) ": ",
1168 DUMP_PREFIX_ADDRESS
, 16, 4, req
->src
,
1169 edesc
->src_nents
> 1 ? 100 : req
->nbytes
, 1);
1171 len
= desc_len(sh_desc
);
1172 init_job_desc_shared(desc
, ptr
, len
, HDR_SHARE_DEFER
| HDR_REVERSE
);
1174 if (edesc
->src_nents
== 1) {
1175 src_dma
= sg_dma_address(req
->src
);
1178 src_dma
= edesc
->sec4_sg_dma
;
1179 sec4_sg_index
+= edesc
->src_nents
;
1180 in_options
= LDST_SGF
;
1182 append_seq_in_ptr(desc
, src_dma
, req
->nbytes
, in_options
);
1185 dst_dma
= edesc
->iv_dma
;
1188 dst_dma
= edesc
->sec4_sg_dma
+
1189 sec4_sg_index
* sizeof(struct sec4_sg_entry
);
1190 out_options
= LDST_SGF
;
1192 append_seq_out_ptr(desc
, dst_dma
, req
->nbytes
+ ivsize
, out_options
);
1196 * allocate and map the aead extended descriptor
1198 static struct aead_edesc
*aead_edesc_alloc(struct aead_request
*req
,
1199 int desc_bytes
, bool *all_contig_ptr
,
1202 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1203 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1204 struct device
*jrdev
= ctx
->jrdev
;
1205 gfp_t flags
= (req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ?
1206 GFP_KERNEL
: GFP_ATOMIC
;
1207 int src_nents
, mapped_src_nents
, dst_nents
= 0, mapped_dst_nents
= 0;
1208 struct aead_edesc
*edesc
;
1209 int sec4_sg_index
, sec4_sg_len
, sec4_sg_bytes
;
1210 unsigned int authsize
= ctx
->authsize
;
1212 if (unlikely(req
->dst
!= req
->src
)) {
1213 src_nents
= sg_nents_for_len(req
->src
, req
->assoclen
+
1215 if (unlikely(src_nents
< 0)) {
1216 dev_err(jrdev
, "Insufficient bytes (%d) in src S/G\n",
1217 req
->assoclen
+ req
->cryptlen
);
1218 return ERR_PTR(src_nents
);
1221 dst_nents
= sg_nents_for_len(req
->dst
, req
->assoclen
+
1223 (encrypt
? authsize
:
1225 if (unlikely(dst_nents
< 0)) {
1226 dev_err(jrdev
, "Insufficient bytes (%d) in dst S/G\n",
1227 req
->assoclen
+ req
->cryptlen
+
1228 (encrypt
? authsize
: (-authsize
)));
1229 return ERR_PTR(dst_nents
);
1232 src_nents
= sg_nents_for_len(req
->src
, req
->assoclen
+
1234 (encrypt
? authsize
: 0));
1235 if (unlikely(src_nents
< 0)) {
1236 dev_err(jrdev
, "Insufficient bytes (%d) in src S/G\n",
1237 req
->assoclen
+ req
->cryptlen
+
1238 (encrypt
? authsize
: 0));
1239 return ERR_PTR(src_nents
);
1243 if (likely(req
->src
== req
->dst
)) {
1244 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
, src_nents
,
1246 if (unlikely(!mapped_src_nents
)) {
1247 dev_err(jrdev
, "unable to map source\n");
1248 return ERR_PTR(-ENOMEM
);
1251 /* Cover also the case of null (zero length) input data */
1253 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
,
1254 src_nents
, DMA_TO_DEVICE
);
1255 if (unlikely(!mapped_src_nents
)) {
1256 dev_err(jrdev
, "unable to map source\n");
1257 return ERR_PTR(-ENOMEM
);
1260 mapped_src_nents
= 0;
1263 mapped_dst_nents
= dma_map_sg(jrdev
, req
->dst
, dst_nents
,
1265 if (unlikely(!mapped_dst_nents
)) {
1266 dev_err(jrdev
, "unable to map destination\n");
1267 dma_unmap_sg(jrdev
, req
->src
, src_nents
, DMA_TO_DEVICE
);
1268 return ERR_PTR(-ENOMEM
);
1272 sec4_sg_len
= mapped_src_nents
> 1 ? mapped_src_nents
: 0;
1273 sec4_sg_len
+= mapped_dst_nents
> 1 ? mapped_dst_nents
: 0;
1274 sec4_sg_bytes
= sec4_sg_len
* sizeof(struct sec4_sg_entry
);
1276 /* allocate space for base edesc and hw desc commands, link tables */
1277 edesc
= kzalloc(sizeof(*edesc
) + desc_bytes
+ sec4_sg_bytes
,
1280 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
, 0,
1282 return ERR_PTR(-ENOMEM
);
1285 edesc
->src_nents
= src_nents
;
1286 edesc
->dst_nents
= dst_nents
;
1287 edesc
->sec4_sg
= (void *)edesc
+ sizeof(struct aead_edesc
) +
1289 *all_contig_ptr
= !(mapped_src_nents
> 1);
1292 if (mapped_src_nents
> 1) {
1293 sg_to_sec4_sg_last(req
->src
, mapped_src_nents
,
1294 edesc
->sec4_sg
+ sec4_sg_index
, 0);
1295 sec4_sg_index
+= mapped_src_nents
;
1297 if (mapped_dst_nents
> 1) {
1298 sg_to_sec4_sg_last(req
->dst
, mapped_dst_nents
,
1299 edesc
->sec4_sg
+ sec4_sg_index
, 0);
1305 edesc
->sec4_sg_dma
= dma_map_single(jrdev
, edesc
->sec4_sg
,
1306 sec4_sg_bytes
, DMA_TO_DEVICE
);
1307 if (dma_mapping_error(jrdev
, edesc
->sec4_sg_dma
)) {
1308 dev_err(jrdev
, "unable to map S/G table\n");
1309 aead_unmap(jrdev
, edesc
, req
);
1311 return ERR_PTR(-ENOMEM
);
1314 edesc
->sec4_sg_bytes
= sec4_sg_bytes
;
1319 static int gcm_encrypt(struct aead_request
*req
)
1321 struct aead_edesc
*edesc
;
1322 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1323 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1324 struct device
*jrdev
= ctx
->jrdev
;
1329 /* allocate extended descriptor */
1330 edesc
= aead_edesc_alloc(req
, GCM_DESC_JOB_IO_LEN
, &all_contig
, true);
1332 return PTR_ERR(edesc
);
1334 /* Create and submit job descriptor */
1335 init_gcm_job(req
, edesc
, all_contig
, true);
1337 print_hex_dump(KERN_ERR
, "aead jobdesc@"__stringify(__LINE__
)": ",
1338 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1339 desc_bytes(edesc
->hw_desc
), 1);
1342 desc
= edesc
->hw_desc
;
1343 ret
= caam_jr_enqueue(jrdev
, desc
, aead_encrypt_done
, req
);
1347 aead_unmap(jrdev
, edesc
, req
);
1354 static int ipsec_gcm_encrypt(struct aead_request
*req
)
1356 if (req
->assoclen
< 8)
1359 return gcm_encrypt(req
);
1362 static int aead_encrypt(struct aead_request
*req
)
1364 struct aead_edesc
*edesc
;
1365 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1366 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1367 struct device
*jrdev
= ctx
->jrdev
;
1372 /* allocate extended descriptor */
1373 edesc
= aead_edesc_alloc(req
, AUTHENC_DESC_JOB_IO_LEN
,
1376 return PTR_ERR(edesc
);
1378 /* Create and submit job descriptor */
1379 init_authenc_job(req
, edesc
, all_contig
, true);
1381 print_hex_dump(KERN_ERR
, "aead jobdesc@"__stringify(__LINE__
)": ",
1382 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1383 desc_bytes(edesc
->hw_desc
), 1);
1386 desc
= edesc
->hw_desc
;
1387 ret
= caam_jr_enqueue(jrdev
, desc
, aead_encrypt_done
, req
);
1391 aead_unmap(jrdev
, edesc
, req
);
1398 static int gcm_decrypt(struct aead_request
*req
)
1400 struct aead_edesc
*edesc
;
1401 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1402 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1403 struct device
*jrdev
= ctx
->jrdev
;
1408 /* allocate extended descriptor */
1409 edesc
= aead_edesc_alloc(req
, GCM_DESC_JOB_IO_LEN
, &all_contig
, false);
1411 return PTR_ERR(edesc
);
1413 /* Create and submit job descriptor*/
1414 init_gcm_job(req
, edesc
, all_contig
, false);
1416 print_hex_dump(KERN_ERR
, "aead jobdesc@"__stringify(__LINE__
)": ",
1417 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1418 desc_bytes(edesc
->hw_desc
), 1);
1421 desc
= edesc
->hw_desc
;
1422 ret
= caam_jr_enqueue(jrdev
, desc
, aead_decrypt_done
, req
);
1426 aead_unmap(jrdev
, edesc
, req
);
1433 static int ipsec_gcm_decrypt(struct aead_request
*req
)
1435 if (req
->assoclen
< 8)
1438 return gcm_decrypt(req
);
1441 static int aead_decrypt(struct aead_request
*req
)
1443 struct aead_edesc
*edesc
;
1444 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
1445 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1446 struct device
*jrdev
= ctx
->jrdev
;
1451 caam_dump_sg(KERN_ERR
, "dec src@" __stringify(__LINE__
)": ",
1452 DUMP_PREFIX_ADDRESS
, 16, 4, req
->src
,
1453 req
->assoclen
+ req
->cryptlen
, 1);
1455 /* allocate extended descriptor */
1456 edesc
= aead_edesc_alloc(req
, AUTHENC_DESC_JOB_IO_LEN
,
1457 &all_contig
, false);
1459 return PTR_ERR(edesc
);
1461 /* Create and submit job descriptor*/
1462 init_authenc_job(req
, edesc
, all_contig
, false);
1464 print_hex_dump(KERN_ERR
, "aead jobdesc@"__stringify(__LINE__
)": ",
1465 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1466 desc_bytes(edesc
->hw_desc
), 1);
1469 desc
= edesc
->hw_desc
;
1470 ret
= caam_jr_enqueue(jrdev
, desc
, aead_decrypt_done
, req
);
1474 aead_unmap(jrdev
, edesc
, req
);
1482 * allocate and map the ablkcipher extended descriptor for ablkcipher
1484 static struct ablkcipher_edesc
*ablkcipher_edesc_alloc(struct ablkcipher_request
1485 *req
, int desc_bytes
,
1486 bool *iv_contig_out
)
1488 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1489 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1490 struct device
*jrdev
= ctx
->jrdev
;
1491 gfp_t flags
= (req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ?
1492 GFP_KERNEL
: GFP_ATOMIC
;
1493 int src_nents
, mapped_src_nents
, dst_nents
= 0, mapped_dst_nents
= 0;
1494 struct ablkcipher_edesc
*edesc
;
1495 dma_addr_t iv_dma
= 0;
1497 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
1498 int dst_sg_idx
, sec4_sg_ents
, sec4_sg_bytes
;
1500 src_nents
= sg_nents_for_len(req
->src
, req
->nbytes
);
1501 if (unlikely(src_nents
< 0)) {
1502 dev_err(jrdev
, "Insufficient bytes (%d) in src S/G\n",
1504 return ERR_PTR(src_nents
);
1507 if (req
->dst
!= req
->src
) {
1508 dst_nents
= sg_nents_for_len(req
->dst
, req
->nbytes
);
1509 if (unlikely(dst_nents
< 0)) {
1510 dev_err(jrdev
, "Insufficient bytes (%d) in dst S/G\n",
1512 return ERR_PTR(dst_nents
);
1516 if (likely(req
->src
== req
->dst
)) {
1517 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
, src_nents
,
1519 if (unlikely(!mapped_src_nents
)) {
1520 dev_err(jrdev
, "unable to map source\n");
1521 return ERR_PTR(-ENOMEM
);
1524 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
, src_nents
,
1526 if (unlikely(!mapped_src_nents
)) {
1527 dev_err(jrdev
, "unable to map source\n");
1528 return ERR_PTR(-ENOMEM
);
1531 mapped_dst_nents
= dma_map_sg(jrdev
, req
->dst
, dst_nents
,
1533 if (unlikely(!mapped_dst_nents
)) {
1534 dev_err(jrdev
, "unable to map destination\n");
1535 dma_unmap_sg(jrdev
, req
->src
, src_nents
, DMA_TO_DEVICE
);
1536 return ERR_PTR(-ENOMEM
);
1540 iv_dma
= dma_map_single(jrdev
, req
->info
, ivsize
, DMA_TO_DEVICE
);
1541 if (dma_mapping_error(jrdev
, iv_dma
)) {
1542 dev_err(jrdev
, "unable to map IV\n");
1543 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
, 0,
1545 return ERR_PTR(-ENOMEM
);
1548 if (mapped_src_nents
== 1 &&
1549 iv_dma
+ ivsize
== sg_dma_address(req
->src
)) {
1554 sec4_sg_ents
= 1 + mapped_src_nents
;
1556 dst_sg_idx
= sec4_sg_ents
;
1557 sec4_sg_ents
+= mapped_dst_nents
> 1 ? mapped_dst_nents
: 0;
1558 sec4_sg_bytes
= sec4_sg_ents
* sizeof(struct sec4_sg_entry
);
1560 /* allocate space for base edesc and hw desc commands, link tables */
1561 edesc
= kzalloc(sizeof(*edesc
) + desc_bytes
+ sec4_sg_bytes
,
1564 dev_err(jrdev
, "could not allocate extended descriptor\n");
1565 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
,
1566 iv_dma
, ivsize
, 0, 0);
1567 return ERR_PTR(-ENOMEM
);
1570 edesc
->src_nents
= src_nents
;
1571 edesc
->dst_nents
= dst_nents
;
1572 edesc
->sec4_sg_bytes
= sec4_sg_bytes
;
1573 edesc
->sec4_sg
= (void *)edesc
+ sizeof(struct ablkcipher_edesc
) +
1577 dma_to_sec4_sg_one(edesc
->sec4_sg
, iv_dma
, ivsize
, 0);
1578 sg_to_sec4_sg_last(req
->src
, mapped_src_nents
,
1579 edesc
->sec4_sg
+ 1, 0);
1582 if (mapped_dst_nents
> 1) {
1583 sg_to_sec4_sg_last(req
->dst
, mapped_dst_nents
,
1584 edesc
->sec4_sg
+ dst_sg_idx
, 0);
1587 edesc
->sec4_sg_dma
= dma_map_single(jrdev
, edesc
->sec4_sg
,
1588 sec4_sg_bytes
, DMA_TO_DEVICE
);
1589 if (dma_mapping_error(jrdev
, edesc
->sec4_sg_dma
)) {
1590 dev_err(jrdev
, "unable to map S/G table\n");
1591 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
,
1592 iv_dma
, ivsize
, 0, 0);
1594 return ERR_PTR(-ENOMEM
);
1597 edesc
->iv_dma
= iv_dma
;
1600 print_hex_dump(KERN_ERR
, "ablkcipher sec4_sg@"__stringify(__LINE__
)": ",
1601 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->sec4_sg
,
1605 *iv_contig_out
= in_contig
;
1609 static int ablkcipher_encrypt(struct ablkcipher_request
*req
)
1611 struct ablkcipher_edesc
*edesc
;
1612 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1613 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1614 struct device
*jrdev
= ctx
->jrdev
;
1619 /* allocate extended descriptor */
1620 edesc
= ablkcipher_edesc_alloc(req
, DESC_JOB_IO_LEN
*
1621 CAAM_CMD_SZ
, &iv_contig
);
1623 return PTR_ERR(edesc
);
1625 /* Create and submit job descriptor*/
1626 init_ablkcipher_job(ctx
->sh_desc_enc
,
1627 ctx
->sh_desc_enc_dma
, edesc
, req
, iv_contig
);
1629 print_hex_dump(KERN_ERR
, "ablkcipher jobdesc@"__stringify(__LINE__
)": ",
1630 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1631 desc_bytes(edesc
->hw_desc
), 1);
1633 desc
= edesc
->hw_desc
;
1634 ret
= caam_jr_enqueue(jrdev
, desc
, ablkcipher_encrypt_done
, req
);
1639 ablkcipher_unmap(jrdev
, edesc
, req
);
1646 static int ablkcipher_decrypt(struct ablkcipher_request
*req
)
1648 struct ablkcipher_edesc
*edesc
;
1649 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1650 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1651 struct device
*jrdev
= ctx
->jrdev
;
1656 /* allocate extended descriptor */
1657 edesc
= ablkcipher_edesc_alloc(req
, DESC_JOB_IO_LEN
*
1658 CAAM_CMD_SZ
, &iv_contig
);
1660 return PTR_ERR(edesc
);
1662 /* Create and submit job descriptor*/
1663 init_ablkcipher_job(ctx
->sh_desc_dec
,
1664 ctx
->sh_desc_dec_dma
, edesc
, req
, iv_contig
);
1665 desc
= edesc
->hw_desc
;
1667 print_hex_dump(KERN_ERR
, "ablkcipher jobdesc@"__stringify(__LINE__
)": ",
1668 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1669 desc_bytes(edesc
->hw_desc
), 1);
1672 ret
= caam_jr_enqueue(jrdev
, desc
, ablkcipher_decrypt_done
, req
);
1676 ablkcipher_unmap(jrdev
, edesc
, req
);
1684 * allocate and map the ablkcipher extended descriptor
1685 * for ablkcipher givencrypt
1687 static struct ablkcipher_edesc
*ablkcipher_giv_edesc_alloc(
1688 struct skcipher_givcrypt_request
*greq
,
1690 bool *iv_contig_out
)
1692 struct ablkcipher_request
*req
= &greq
->creq
;
1693 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1694 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1695 struct device
*jrdev
= ctx
->jrdev
;
1696 gfp_t flags
= (req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ?
1697 GFP_KERNEL
: GFP_ATOMIC
;
1698 int src_nents
, mapped_src_nents
, dst_nents
, mapped_dst_nents
;
1699 struct ablkcipher_edesc
*edesc
;
1700 dma_addr_t iv_dma
= 0;
1702 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
1703 int dst_sg_idx
, sec4_sg_ents
, sec4_sg_bytes
;
1705 src_nents
= sg_nents_for_len(req
->src
, req
->nbytes
);
1706 if (unlikely(src_nents
< 0)) {
1707 dev_err(jrdev
, "Insufficient bytes (%d) in src S/G\n",
1709 return ERR_PTR(src_nents
);
1712 if (likely(req
->src
== req
->dst
)) {
1713 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
, src_nents
,
1715 if (unlikely(!mapped_src_nents
)) {
1716 dev_err(jrdev
, "unable to map source\n");
1717 return ERR_PTR(-ENOMEM
);
1720 dst_nents
= src_nents
;
1721 mapped_dst_nents
= src_nents
;
1723 mapped_src_nents
= dma_map_sg(jrdev
, req
->src
, src_nents
,
1725 if (unlikely(!mapped_src_nents
)) {
1726 dev_err(jrdev
, "unable to map source\n");
1727 return ERR_PTR(-ENOMEM
);
1730 dst_nents
= sg_nents_for_len(req
->dst
, req
->nbytes
);
1731 if (unlikely(dst_nents
< 0)) {
1732 dev_err(jrdev
, "Insufficient bytes (%d) in dst S/G\n",
1734 return ERR_PTR(dst_nents
);
1737 mapped_dst_nents
= dma_map_sg(jrdev
, req
->dst
, dst_nents
,
1739 if (unlikely(!mapped_dst_nents
)) {
1740 dev_err(jrdev
, "unable to map destination\n");
1741 dma_unmap_sg(jrdev
, req
->src
, src_nents
, DMA_TO_DEVICE
);
1742 return ERR_PTR(-ENOMEM
);
1747 * Check if iv can be contiguous with source and destination.
1748 * If so, include it. If not, create scatterlist.
1750 iv_dma
= dma_map_single(jrdev
, greq
->giv
, ivsize
, DMA_TO_DEVICE
);
1751 if (dma_mapping_error(jrdev
, iv_dma
)) {
1752 dev_err(jrdev
, "unable to map IV\n");
1753 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
, 0,
1755 return ERR_PTR(-ENOMEM
);
1758 sec4_sg_ents
= mapped_src_nents
> 1 ? mapped_src_nents
: 0;
1759 dst_sg_idx
= sec4_sg_ents
;
1760 if (mapped_dst_nents
== 1 &&
1761 iv_dma
+ ivsize
== sg_dma_address(req
->dst
)) {
1765 sec4_sg_ents
+= 1 + mapped_dst_nents
;
1768 /* allocate space for base edesc and hw desc commands, link tables */
1769 sec4_sg_bytes
= sec4_sg_ents
* sizeof(struct sec4_sg_entry
);
1770 edesc
= kzalloc(sizeof(*edesc
) + desc_bytes
+ sec4_sg_bytes
,
1773 dev_err(jrdev
, "could not allocate extended descriptor\n");
1774 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
,
1775 iv_dma
, ivsize
, 0, 0);
1776 return ERR_PTR(-ENOMEM
);
1779 edesc
->src_nents
= src_nents
;
1780 edesc
->dst_nents
= dst_nents
;
1781 edesc
->sec4_sg_bytes
= sec4_sg_bytes
;
1782 edesc
->sec4_sg
= (void *)edesc
+ sizeof(struct ablkcipher_edesc
) +
1785 if (mapped_src_nents
> 1)
1786 sg_to_sec4_sg_last(req
->src
, mapped_src_nents
, edesc
->sec4_sg
,
1790 dma_to_sec4_sg_one(edesc
->sec4_sg
+ dst_sg_idx
,
1792 sg_to_sec4_sg_last(req
->dst
, mapped_dst_nents
,
1793 edesc
->sec4_sg
+ dst_sg_idx
+ 1, 0);
1796 edesc
->sec4_sg_dma
= dma_map_single(jrdev
, edesc
->sec4_sg
,
1797 sec4_sg_bytes
, DMA_TO_DEVICE
);
1798 if (dma_mapping_error(jrdev
, edesc
->sec4_sg_dma
)) {
1799 dev_err(jrdev
, "unable to map S/G table\n");
1800 caam_unmap(jrdev
, req
->src
, req
->dst
, src_nents
, dst_nents
,
1801 iv_dma
, ivsize
, 0, 0);
1803 return ERR_PTR(-ENOMEM
);
1805 edesc
->iv_dma
= iv_dma
;
1808 print_hex_dump(KERN_ERR
,
1809 "ablkcipher sec4_sg@" __stringify(__LINE__
) ": ",
1810 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->sec4_sg
,
1814 *iv_contig_out
= out_contig
;
1818 static int ablkcipher_givencrypt(struct skcipher_givcrypt_request
*creq
)
1820 struct ablkcipher_request
*req
= &creq
->creq
;
1821 struct ablkcipher_edesc
*edesc
;
1822 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1823 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1824 struct device
*jrdev
= ctx
->jrdev
;
1825 bool iv_contig
= false;
1829 /* allocate extended descriptor */
1830 edesc
= ablkcipher_giv_edesc_alloc(creq
, DESC_JOB_IO_LEN
*
1831 CAAM_CMD_SZ
, &iv_contig
);
1833 return PTR_ERR(edesc
);
1835 /* Create and submit job descriptor*/
1836 init_ablkcipher_giv_job(ctx
->sh_desc_givenc
, ctx
->sh_desc_givenc_dma
,
1837 edesc
, req
, iv_contig
);
1839 print_hex_dump(KERN_ERR
,
1840 "ablkcipher jobdesc@" __stringify(__LINE__
) ": ",
1841 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
1842 desc_bytes(edesc
->hw_desc
), 1);
1844 desc
= edesc
->hw_desc
;
1845 ret
= caam_jr_enqueue(jrdev
, desc
, ablkcipher_encrypt_done
, req
);
1850 ablkcipher_unmap(jrdev
, edesc
, req
);
1857 #define template_aead template_u.aead
1858 #define template_ablkcipher template_u.ablkcipher
1859 struct caam_alg_template
{
1860 char name
[CRYPTO_MAX_ALG_NAME
];
1861 char driver_name
[CRYPTO_MAX_ALG_NAME
];
1862 unsigned int blocksize
;
1865 struct ablkcipher_alg ablkcipher
;
1867 u32 class1_alg_type
;
1868 u32 class2_alg_type
;
1871 static struct caam_alg_template driver_algs
[] = {
1872 /* ablkcipher descriptor */
1875 .driver_name
= "cbc-aes-caam",
1876 .blocksize
= AES_BLOCK_SIZE
,
1877 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
1878 .template_ablkcipher
= {
1879 .setkey
= ablkcipher_setkey
,
1880 .encrypt
= ablkcipher_encrypt
,
1881 .decrypt
= ablkcipher_decrypt
,
1882 .givencrypt
= ablkcipher_givencrypt
,
1883 .geniv
= "<built-in>",
1884 .min_keysize
= AES_MIN_KEY_SIZE
,
1885 .max_keysize
= AES_MAX_KEY_SIZE
,
1886 .ivsize
= AES_BLOCK_SIZE
,
1888 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
1891 .name
= "cbc(des3_ede)",
1892 .driver_name
= "cbc-3des-caam",
1893 .blocksize
= DES3_EDE_BLOCK_SIZE
,
1894 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
1895 .template_ablkcipher
= {
1896 .setkey
= ablkcipher_setkey
,
1897 .encrypt
= ablkcipher_encrypt
,
1898 .decrypt
= ablkcipher_decrypt
,
1899 .givencrypt
= ablkcipher_givencrypt
,
1900 .geniv
= "<built-in>",
1901 .min_keysize
= DES3_EDE_KEY_SIZE
,
1902 .max_keysize
= DES3_EDE_KEY_SIZE
,
1903 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1905 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
1909 .driver_name
= "cbc-des-caam",
1910 .blocksize
= DES_BLOCK_SIZE
,
1911 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
1912 .template_ablkcipher
= {
1913 .setkey
= ablkcipher_setkey
,
1914 .encrypt
= ablkcipher_encrypt
,
1915 .decrypt
= ablkcipher_decrypt
,
1916 .givencrypt
= ablkcipher_givencrypt
,
1917 .geniv
= "<built-in>",
1918 .min_keysize
= DES_KEY_SIZE
,
1919 .max_keysize
= DES_KEY_SIZE
,
1920 .ivsize
= DES_BLOCK_SIZE
,
1922 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
1926 .driver_name
= "ctr-aes-caam",
1928 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
1929 .template_ablkcipher
= {
1930 .setkey
= ablkcipher_setkey
,
1931 .encrypt
= ablkcipher_encrypt
,
1932 .decrypt
= ablkcipher_decrypt
,
1934 .min_keysize
= AES_MIN_KEY_SIZE
,
1935 .max_keysize
= AES_MAX_KEY_SIZE
,
1936 .ivsize
= AES_BLOCK_SIZE
,
1938 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CTR_MOD128
,
1941 .name
= "rfc3686(ctr(aes))",
1942 .driver_name
= "rfc3686-ctr-aes-caam",
1944 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
1945 .template_ablkcipher
= {
1946 .setkey
= ablkcipher_setkey
,
1947 .encrypt
= ablkcipher_encrypt
,
1948 .decrypt
= ablkcipher_decrypt
,
1949 .givencrypt
= ablkcipher_givencrypt
,
1950 .geniv
= "<built-in>",
1951 .min_keysize
= AES_MIN_KEY_SIZE
+
1952 CTR_RFC3686_NONCE_SIZE
,
1953 .max_keysize
= AES_MAX_KEY_SIZE
+
1954 CTR_RFC3686_NONCE_SIZE
,
1955 .ivsize
= CTR_RFC3686_IV_SIZE
,
1957 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CTR_MOD128
,
1961 .driver_name
= "xts-aes-caam",
1962 .blocksize
= AES_BLOCK_SIZE
,
1963 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
1964 .template_ablkcipher
= {
1965 .setkey
= xts_ablkcipher_setkey
,
1966 .encrypt
= ablkcipher_encrypt
,
1967 .decrypt
= ablkcipher_decrypt
,
1969 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
1970 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
1971 .ivsize
= AES_BLOCK_SIZE
,
1973 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_XTS
,
1977 static struct caam_aead_alg driver_aeads
[] = {
1981 .cra_name
= "rfc4106(gcm(aes))",
1982 .cra_driver_name
= "rfc4106-gcm-aes-caam",
1985 .setkey
= rfc4106_setkey
,
1986 .setauthsize
= rfc4106_setauthsize
,
1987 .encrypt
= ipsec_gcm_encrypt
,
1988 .decrypt
= ipsec_gcm_decrypt
,
1989 .ivsize
= GCM_RFC4106_IV_SIZE
,
1990 .maxauthsize
= AES_BLOCK_SIZE
,
1993 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_GCM
,
1999 .cra_name
= "rfc4543(gcm(aes))",
2000 .cra_driver_name
= "rfc4543-gcm-aes-caam",
2003 .setkey
= rfc4543_setkey
,
2004 .setauthsize
= rfc4543_setauthsize
,
2005 .encrypt
= ipsec_gcm_encrypt
,
2006 .decrypt
= ipsec_gcm_decrypt
,
2007 .ivsize
= GCM_RFC4543_IV_SIZE
,
2008 .maxauthsize
= AES_BLOCK_SIZE
,
2011 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_GCM
,
2014 /* Galois Counter Mode */
2018 .cra_name
= "gcm(aes)",
2019 .cra_driver_name
= "gcm-aes-caam",
2022 .setkey
= gcm_setkey
,
2023 .setauthsize
= gcm_setauthsize
,
2024 .encrypt
= gcm_encrypt
,
2025 .decrypt
= gcm_decrypt
,
2026 .ivsize
= GCM_AES_IV_SIZE
,
2027 .maxauthsize
= AES_BLOCK_SIZE
,
2030 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_GCM
,
2033 /* single-pass ipsec_esp descriptor */
2037 .cra_name
= "authenc(hmac(md5),"
2038 "ecb(cipher_null))",
2039 .cra_driver_name
= "authenc-hmac-md5-"
2040 "ecb-cipher_null-caam",
2041 .cra_blocksize
= NULL_BLOCK_SIZE
,
2043 .setkey
= aead_setkey
,
2044 .setauthsize
= aead_setauthsize
,
2045 .encrypt
= aead_encrypt
,
2046 .decrypt
= aead_decrypt
,
2047 .ivsize
= NULL_IV_SIZE
,
2048 .maxauthsize
= MD5_DIGEST_SIZE
,
2051 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2052 OP_ALG_AAI_HMAC_PRECOMP
,
2058 .cra_name
= "authenc(hmac(sha1),"
2059 "ecb(cipher_null))",
2060 .cra_driver_name
= "authenc-hmac-sha1-"
2061 "ecb-cipher_null-caam",
2062 .cra_blocksize
= NULL_BLOCK_SIZE
,
2064 .setkey
= aead_setkey
,
2065 .setauthsize
= aead_setauthsize
,
2066 .encrypt
= aead_encrypt
,
2067 .decrypt
= aead_decrypt
,
2068 .ivsize
= NULL_IV_SIZE
,
2069 .maxauthsize
= SHA1_DIGEST_SIZE
,
2072 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2073 OP_ALG_AAI_HMAC_PRECOMP
,
2079 .cra_name
= "authenc(hmac(sha224),"
2080 "ecb(cipher_null))",
2081 .cra_driver_name
= "authenc-hmac-sha224-"
2082 "ecb-cipher_null-caam",
2083 .cra_blocksize
= NULL_BLOCK_SIZE
,
2085 .setkey
= aead_setkey
,
2086 .setauthsize
= aead_setauthsize
,
2087 .encrypt
= aead_encrypt
,
2088 .decrypt
= aead_decrypt
,
2089 .ivsize
= NULL_IV_SIZE
,
2090 .maxauthsize
= SHA224_DIGEST_SIZE
,
2093 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2094 OP_ALG_AAI_HMAC_PRECOMP
,
2100 .cra_name
= "authenc(hmac(sha256),"
2101 "ecb(cipher_null))",
2102 .cra_driver_name
= "authenc-hmac-sha256-"
2103 "ecb-cipher_null-caam",
2104 .cra_blocksize
= NULL_BLOCK_SIZE
,
2106 .setkey
= aead_setkey
,
2107 .setauthsize
= aead_setauthsize
,
2108 .encrypt
= aead_encrypt
,
2109 .decrypt
= aead_decrypt
,
2110 .ivsize
= NULL_IV_SIZE
,
2111 .maxauthsize
= SHA256_DIGEST_SIZE
,
2114 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2115 OP_ALG_AAI_HMAC_PRECOMP
,
2121 .cra_name
= "authenc(hmac(sha384),"
2122 "ecb(cipher_null))",
2123 .cra_driver_name
= "authenc-hmac-sha384-"
2124 "ecb-cipher_null-caam",
2125 .cra_blocksize
= NULL_BLOCK_SIZE
,
2127 .setkey
= aead_setkey
,
2128 .setauthsize
= aead_setauthsize
,
2129 .encrypt
= aead_encrypt
,
2130 .decrypt
= aead_decrypt
,
2131 .ivsize
= NULL_IV_SIZE
,
2132 .maxauthsize
= SHA384_DIGEST_SIZE
,
2135 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2136 OP_ALG_AAI_HMAC_PRECOMP
,
2142 .cra_name
= "authenc(hmac(sha512),"
2143 "ecb(cipher_null))",
2144 .cra_driver_name
= "authenc-hmac-sha512-"
2145 "ecb-cipher_null-caam",
2146 .cra_blocksize
= NULL_BLOCK_SIZE
,
2148 .setkey
= aead_setkey
,
2149 .setauthsize
= aead_setauthsize
,
2150 .encrypt
= aead_encrypt
,
2151 .decrypt
= aead_decrypt
,
2152 .ivsize
= NULL_IV_SIZE
,
2153 .maxauthsize
= SHA512_DIGEST_SIZE
,
2156 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2157 OP_ALG_AAI_HMAC_PRECOMP
,
2163 .cra_name
= "authenc(hmac(md5),cbc(aes))",
2164 .cra_driver_name
= "authenc-hmac-md5-"
2166 .cra_blocksize
= AES_BLOCK_SIZE
,
2168 .setkey
= aead_setkey
,
2169 .setauthsize
= aead_setauthsize
,
2170 .encrypt
= aead_encrypt
,
2171 .decrypt
= aead_decrypt
,
2172 .ivsize
= AES_BLOCK_SIZE
,
2173 .maxauthsize
= MD5_DIGEST_SIZE
,
2176 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2177 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2178 OP_ALG_AAI_HMAC_PRECOMP
,
2184 .cra_name
= "echainiv(authenc(hmac(md5),"
2186 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
2188 .cra_blocksize
= AES_BLOCK_SIZE
,
2190 .setkey
= aead_setkey
,
2191 .setauthsize
= aead_setauthsize
,
2192 .encrypt
= aead_encrypt
,
2193 .decrypt
= aead_decrypt
,
2194 .ivsize
= AES_BLOCK_SIZE
,
2195 .maxauthsize
= MD5_DIGEST_SIZE
,
2198 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2199 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2200 OP_ALG_AAI_HMAC_PRECOMP
,
2207 .cra_name
= "authenc(hmac(sha1),cbc(aes))",
2208 .cra_driver_name
= "authenc-hmac-sha1-"
2210 .cra_blocksize
= AES_BLOCK_SIZE
,
2212 .setkey
= aead_setkey
,
2213 .setauthsize
= aead_setauthsize
,
2214 .encrypt
= aead_encrypt
,
2215 .decrypt
= aead_decrypt
,
2216 .ivsize
= AES_BLOCK_SIZE
,
2217 .maxauthsize
= SHA1_DIGEST_SIZE
,
2220 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2221 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2222 OP_ALG_AAI_HMAC_PRECOMP
,
2228 .cra_name
= "echainiv(authenc(hmac(sha1),"
2230 .cra_driver_name
= "echainiv-authenc-"
2231 "hmac-sha1-cbc-aes-caam",
2232 .cra_blocksize
= AES_BLOCK_SIZE
,
2234 .setkey
= aead_setkey
,
2235 .setauthsize
= aead_setauthsize
,
2236 .encrypt
= aead_encrypt
,
2237 .decrypt
= aead_decrypt
,
2238 .ivsize
= AES_BLOCK_SIZE
,
2239 .maxauthsize
= SHA1_DIGEST_SIZE
,
2242 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2243 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2244 OP_ALG_AAI_HMAC_PRECOMP
,
2251 .cra_name
= "authenc(hmac(sha224),cbc(aes))",
2252 .cra_driver_name
= "authenc-hmac-sha224-"
2254 .cra_blocksize
= AES_BLOCK_SIZE
,
2256 .setkey
= aead_setkey
,
2257 .setauthsize
= aead_setauthsize
,
2258 .encrypt
= aead_encrypt
,
2259 .decrypt
= aead_decrypt
,
2260 .ivsize
= AES_BLOCK_SIZE
,
2261 .maxauthsize
= SHA224_DIGEST_SIZE
,
2264 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2265 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2266 OP_ALG_AAI_HMAC_PRECOMP
,
2272 .cra_name
= "echainiv(authenc(hmac(sha224),"
2274 .cra_driver_name
= "echainiv-authenc-"
2275 "hmac-sha224-cbc-aes-caam",
2276 .cra_blocksize
= AES_BLOCK_SIZE
,
2278 .setkey
= aead_setkey
,
2279 .setauthsize
= aead_setauthsize
,
2280 .encrypt
= aead_encrypt
,
2281 .decrypt
= aead_decrypt
,
2282 .ivsize
= AES_BLOCK_SIZE
,
2283 .maxauthsize
= SHA224_DIGEST_SIZE
,
2286 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2287 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2288 OP_ALG_AAI_HMAC_PRECOMP
,
2295 .cra_name
= "authenc(hmac(sha256),cbc(aes))",
2296 .cra_driver_name
= "authenc-hmac-sha256-"
2298 .cra_blocksize
= AES_BLOCK_SIZE
,
2300 .setkey
= aead_setkey
,
2301 .setauthsize
= aead_setauthsize
,
2302 .encrypt
= aead_encrypt
,
2303 .decrypt
= aead_decrypt
,
2304 .ivsize
= AES_BLOCK_SIZE
,
2305 .maxauthsize
= SHA256_DIGEST_SIZE
,
2308 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2309 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2310 OP_ALG_AAI_HMAC_PRECOMP
,
2316 .cra_name
= "echainiv(authenc(hmac(sha256),"
2318 .cra_driver_name
= "echainiv-authenc-"
2319 "hmac-sha256-cbc-aes-caam",
2320 .cra_blocksize
= AES_BLOCK_SIZE
,
2322 .setkey
= aead_setkey
,
2323 .setauthsize
= aead_setauthsize
,
2324 .encrypt
= aead_encrypt
,
2325 .decrypt
= aead_decrypt
,
2326 .ivsize
= AES_BLOCK_SIZE
,
2327 .maxauthsize
= SHA256_DIGEST_SIZE
,
2330 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2331 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2332 OP_ALG_AAI_HMAC_PRECOMP
,
2339 .cra_name
= "authenc(hmac(sha384),cbc(aes))",
2340 .cra_driver_name
= "authenc-hmac-sha384-"
2342 .cra_blocksize
= AES_BLOCK_SIZE
,
2344 .setkey
= aead_setkey
,
2345 .setauthsize
= aead_setauthsize
,
2346 .encrypt
= aead_encrypt
,
2347 .decrypt
= aead_decrypt
,
2348 .ivsize
= AES_BLOCK_SIZE
,
2349 .maxauthsize
= SHA384_DIGEST_SIZE
,
2352 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2353 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2354 OP_ALG_AAI_HMAC_PRECOMP
,
2360 .cra_name
= "echainiv(authenc(hmac(sha384),"
2362 .cra_driver_name
= "echainiv-authenc-"
2363 "hmac-sha384-cbc-aes-caam",
2364 .cra_blocksize
= AES_BLOCK_SIZE
,
2366 .setkey
= aead_setkey
,
2367 .setauthsize
= aead_setauthsize
,
2368 .encrypt
= aead_encrypt
,
2369 .decrypt
= aead_decrypt
,
2370 .ivsize
= AES_BLOCK_SIZE
,
2371 .maxauthsize
= SHA384_DIGEST_SIZE
,
2374 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2375 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2376 OP_ALG_AAI_HMAC_PRECOMP
,
2383 .cra_name
= "authenc(hmac(sha512),cbc(aes))",
2384 .cra_driver_name
= "authenc-hmac-sha512-"
2386 .cra_blocksize
= AES_BLOCK_SIZE
,
2388 .setkey
= aead_setkey
,
2389 .setauthsize
= aead_setauthsize
,
2390 .encrypt
= aead_encrypt
,
2391 .decrypt
= aead_decrypt
,
2392 .ivsize
= AES_BLOCK_SIZE
,
2393 .maxauthsize
= SHA512_DIGEST_SIZE
,
2396 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2397 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2398 OP_ALG_AAI_HMAC_PRECOMP
,
2404 .cra_name
= "echainiv(authenc(hmac(sha512),"
2406 .cra_driver_name
= "echainiv-authenc-"
2407 "hmac-sha512-cbc-aes-caam",
2408 .cra_blocksize
= AES_BLOCK_SIZE
,
2410 .setkey
= aead_setkey
,
2411 .setauthsize
= aead_setauthsize
,
2412 .encrypt
= aead_encrypt
,
2413 .decrypt
= aead_decrypt
,
2414 .ivsize
= AES_BLOCK_SIZE
,
2415 .maxauthsize
= SHA512_DIGEST_SIZE
,
2418 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2419 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2420 OP_ALG_AAI_HMAC_PRECOMP
,
2427 .cra_name
= "authenc(hmac(md5),cbc(des3_ede))",
2428 .cra_driver_name
= "authenc-hmac-md5-"
2429 "cbc-des3_ede-caam",
2430 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2432 .setkey
= aead_setkey
,
2433 .setauthsize
= aead_setauthsize
,
2434 .encrypt
= aead_encrypt
,
2435 .decrypt
= aead_decrypt
,
2436 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2437 .maxauthsize
= MD5_DIGEST_SIZE
,
2440 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2441 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2442 OP_ALG_AAI_HMAC_PRECOMP
,
2448 .cra_name
= "echainiv(authenc(hmac(md5),"
2450 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
2451 "cbc-des3_ede-caam",
2452 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2454 .setkey
= aead_setkey
,
2455 .setauthsize
= aead_setauthsize
,
2456 .encrypt
= aead_encrypt
,
2457 .decrypt
= aead_decrypt
,
2458 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2459 .maxauthsize
= MD5_DIGEST_SIZE
,
2462 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2463 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2464 OP_ALG_AAI_HMAC_PRECOMP
,
2471 .cra_name
= "authenc(hmac(sha1),"
2473 .cra_driver_name
= "authenc-hmac-sha1-"
2474 "cbc-des3_ede-caam",
2475 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2477 .setkey
= aead_setkey
,
2478 .setauthsize
= aead_setauthsize
,
2479 .encrypt
= aead_encrypt
,
2480 .decrypt
= aead_decrypt
,
2481 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2482 .maxauthsize
= SHA1_DIGEST_SIZE
,
2485 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2486 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2487 OP_ALG_AAI_HMAC_PRECOMP
,
2493 .cra_name
= "echainiv(authenc(hmac(sha1),"
2495 .cra_driver_name
= "echainiv-authenc-"
2497 "cbc-des3_ede-caam",
2498 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2500 .setkey
= aead_setkey
,
2501 .setauthsize
= aead_setauthsize
,
2502 .encrypt
= aead_encrypt
,
2503 .decrypt
= aead_decrypt
,
2504 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2505 .maxauthsize
= SHA1_DIGEST_SIZE
,
2508 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2509 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2510 OP_ALG_AAI_HMAC_PRECOMP
,
2517 .cra_name
= "authenc(hmac(sha224),"
2519 .cra_driver_name
= "authenc-hmac-sha224-"
2520 "cbc-des3_ede-caam",
2521 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2523 .setkey
= aead_setkey
,
2524 .setauthsize
= aead_setauthsize
,
2525 .encrypt
= aead_encrypt
,
2526 .decrypt
= aead_decrypt
,
2527 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2528 .maxauthsize
= SHA224_DIGEST_SIZE
,
2531 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2532 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2533 OP_ALG_AAI_HMAC_PRECOMP
,
2539 .cra_name
= "echainiv(authenc(hmac(sha224),"
2541 .cra_driver_name
= "echainiv-authenc-"
2543 "cbc-des3_ede-caam",
2544 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2546 .setkey
= aead_setkey
,
2547 .setauthsize
= aead_setauthsize
,
2548 .encrypt
= aead_encrypt
,
2549 .decrypt
= aead_decrypt
,
2550 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2551 .maxauthsize
= SHA224_DIGEST_SIZE
,
2554 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2555 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2556 OP_ALG_AAI_HMAC_PRECOMP
,
2563 .cra_name
= "authenc(hmac(sha256),"
2565 .cra_driver_name
= "authenc-hmac-sha256-"
2566 "cbc-des3_ede-caam",
2567 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2569 .setkey
= aead_setkey
,
2570 .setauthsize
= aead_setauthsize
,
2571 .encrypt
= aead_encrypt
,
2572 .decrypt
= aead_decrypt
,
2573 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2574 .maxauthsize
= SHA256_DIGEST_SIZE
,
2577 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2578 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2579 OP_ALG_AAI_HMAC_PRECOMP
,
2585 .cra_name
= "echainiv(authenc(hmac(sha256),"
2587 .cra_driver_name
= "echainiv-authenc-"
2589 "cbc-des3_ede-caam",
2590 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2592 .setkey
= aead_setkey
,
2593 .setauthsize
= aead_setauthsize
,
2594 .encrypt
= aead_encrypt
,
2595 .decrypt
= aead_decrypt
,
2596 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2597 .maxauthsize
= SHA256_DIGEST_SIZE
,
2600 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2601 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2602 OP_ALG_AAI_HMAC_PRECOMP
,
2609 .cra_name
= "authenc(hmac(sha384),"
2611 .cra_driver_name
= "authenc-hmac-sha384-"
2612 "cbc-des3_ede-caam",
2613 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2615 .setkey
= aead_setkey
,
2616 .setauthsize
= aead_setauthsize
,
2617 .encrypt
= aead_encrypt
,
2618 .decrypt
= aead_decrypt
,
2619 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2620 .maxauthsize
= SHA384_DIGEST_SIZE
,
2623 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2624 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2625 OP_ALG_AAI_HMAC_PRECOMP
,
2631 .cra_name
= "echainiv(authenc(hmac(sha384),"
2633 .cra_driver_name
= "echainiv-authenc-"
2635 "cbc-des3_ede-caam",
2636 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2638 .setkey
= aead_setkey
,
2639 .setauthsize
= aead_setauthsize
,
2640 .encrypt
= aead_encrypt
,
2641 .decrypt
= aead_decrypt
,
2642 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2643 .maxauthsize
= SHA384_DIGEST_SIZE
,
2646 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2647 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2648 OP_ALG_AAI_HMAC_PRECOMP
,
2655 .cra_name
= "authenc(hmac(sha512),"
2657 .cra_driver_name
= "authenc-hmac-sha512-"
2658 "cbc-des3_ede-caam",
2659 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2661 .setkey
= aead_setkey
,
2662 .setauthsize
= aead_setauthsize
,
2663 .encrypt
= aead_encrypt
,
2664 .decrypt
= aead_decrypt
,
2665 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2666 .maxauthsize
= SHA512_DIGEST_SIZE
,
2669 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2670 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2671 OP_ALG_AAI_HMAC_PRECOMP
,
2677 .cra_name
= "echainiv(authenc(hmac(sha512),"
2679 .cra_driver_name
= "echainiv-authenc-"
2681 "cbc-des3_ede-caam",
2682 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2684 .setkey
= aead_setkey
,
2685 .setauthsize
= aead_setauthsize
,
2686 .encrypt
= aead_encrypt
,
2687 .decrypt
= aead_decrypt
,
2688 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2689 .maxauthsize
= SHA512_DIGEST_SIZE
,
2692 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2693 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2694 OP_ALG_AAI_HMAC_PRECOMP
,
2701 .cra_name
= "authenc(hmac(md5),cbc(des))",
2702 .cra_driver_name
= "authenc-hmac-md5-"
2704 .cra_blocksize
= DES_BLOCK_SIZE
,
2706 .setkey
= aead_setkey
,
2707 .setauthsize
= aead_setauthsize
,
2708 .encrypt
= aead_encrypt
,
2709 .decrypt
= aead_decrypt
,
2710 .ivsize
= DES_BLOCK_SIZE
,
2711 .maxauthsize
= MD5_DIGEST_SIZE
,
2714 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2715 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2716 OP_ALG_AAI_HMAC_PRECOMP
,
2722 .cra_name
= "echainiv(authenc(hmac(md5),"
2724 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
2726 .cra_blocksize
= DES_BLOCK_SIZE
,
2728 .setkey
= aead_setkey
,
2729 .setauthsize
= aead_setauthsize
,
2730 .encrypt
= aead_encrypt
,
2731 .decrypt
= aead_decrypt
,
2732 .ivsize
= DES_BLOCK_SIZE
,
2733 .maxauthsize
= MD5_DIGEST_SIZE
,
2736 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2737 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2738 OP_ALG_AAI_HMAC_PRECOMP
,
2745 .cra_name
= "authenc(hmac(sha1),cbc(des))",
2746 .cra_driver_name
= "authenc-hmac-sha1-"
2748 .cra_blocksize
= DES_BLOCK_SIZE
,
2750 .setkey
= aead_setkey
,
2751 .setauthsize
= aead_setauthsize
,
2752 .encrypt
= aead_encrypt
,
2753 .decrypt
= aead_decrypt
,
2754 .ivsize
= DES_BLOCK_SIZE
,
2755 .maxauthsize
= SHA1_DIGEST_SIZE
,
2758 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2759 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2760 OP_ALG_AAI_HMAC_PRECOMP
,
2766 .cra_name
= "echainiv(authenc(hmac(sha1),"
2768 .cra_driver_name
= "echainiv-authenc-"
2769 "hmac-sha1-cbc-des-caam",
2770 .cra_blocksize
= DES_BLOCK_SIZE
,
2772 .setkey
= aead_setkey
,
2773 .setauthsize
= aead_setauthsize
,
2774 .encrypt
= aead_encrypt
,
2775 .decrypt
= aead_decrypt
,
2776 .ivsize
= DES_BLOCK_SIZE
,
2777 .maxauthsize
= SHA1_DIGEST_SIZE
,
2780 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2781 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
2782 OP_ALG_AAI_HMAC_PRECOMP
,
2789 .cra_name
= "authenc(hmac(sha224),cbc(des))",
2790 .cra_driver_name
= "authenc-hmac-sha224-"
2792 .cra_blocksize
= DES_BLOCK_SIZE
,
2794 .setkey
= aead_setkey
,
2795 .setauthsize
= aead_setauthsize
,
2796 .encrypt
= aead_encrypt
,
2797 .decrypt
= aead_decrypt
,
2798 .ivsize
= DES_BLOCK_SIZE
,
2799 .maxauthsize
= SHA224_DIGEST_SIZE
,
2802 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2803 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2804 OP_ALG_AAI_HMAC_PRECOMP
,
2810 .cra_name
= "echainiv(authenc(hmac(sha224),"
2812 .cra_driver_name
= "echainiv-authenc-"
2813 "hmac-sha224-cbc-des-caam",
2814 .cra_blocksize
= DES_BLOCK_SIZE
,
2816 .setkey
= aead_setkey
,
2817 .setauthsize
= aead_setauthsize
,
2818 .encrypt
= aead_encrypt
,
2819 .decrypt
= aead_decrypt
,
2820 .ivsize
= DES_BLOCK_SIZE
,
2821 .maxauthsize
= SHA224_DIGEST_SIZE
,
2824 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2825 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
2826 OP_ALG_AAI_HMAC_PRECOMP
,
2833 .cra_name
= "authenc(hmac(sha256),cbc(des))",
2834 .cra_driver_name
= "authenc-hmac-sha256-"
2836 .cra_blocksize
= DES_BLOCK_SIZE
,
2838 .setkey
= aead_setkey
,
2839 .setauthsize
= aead_setauthsize
,
2840 .encrypt
= aead_encrypt
,
2841 .decrypt
= aead_decrypt
,
2842 .ivsize
= DES_BLOCK_SIZE
,
2843 .maxauthsize
= SHA256_DIGEST_SIZE
,
2846 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2847 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2848 OP_ALG_AAI_HMAC_PRECOMP
,
2854 .cra_name
= "echainiv(authenc(hmac(sha256),"
2856 .cra_driver_name
= "echainiv-authenc-"
2857 "hmac-sha256-cbc-des-caam",
2858 .cra_blocksize
= DES_BLOCK_SIZE
,
2860 .setkey
= aead_setkey
,
2861 .setauthsize
= aead_setauthsize
,
2862 .encrypt
= aead_encrypt
,
2863 .decrypt
= aead_decrypt
,
2864 .ivsize
= DES_BLOCK_SIZE
,
2865 .maxauthsize
= SHA256_DIGEST_SIZE
,
2868 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2869 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
2870 OP_ALG_AAI_HMAC_PRECOMP
,
2877 .cra_name
= "authenc(hmac(sha384),cbc(des))",
2878 .cra_driver_name
= "authenc-hmac-sha384-"
2880 .cra_blocksize
= DES_BLOCK_SIZE
,
2882 .setkey
= aead_setkey
,
2883 .setauthsize
= aead_setauthsize
,
2884 .encrypt
= aead_encrypt
,
2885 .decrypt
= aead_decrypt
,
2886 .ivsize
= DES_BLOCK_SIZE
,
2887 .maxauthsize
= SHA384_DIGEST_SIZE
,
2890 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2891 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2892 OP_ALG_AAI_HMAC_PRECOMP
,
2898 .cra_name
= "echainiv(authenc(hmac(sha384),"
2900 .cra_driver_name
= "echainiv-authenc-"
2901 "hmac-sha384-cbc-des-caam",
2902 .cra_blocksize
= DES_BLOCK_SIZE
,
2904 .setkey
= aead_setkey
,
2905 .setauthsize
= aead_setauthsize
,
2906 .encrypt
= aead_encrypt
,
2907 .decrypt
= aead_decrypt
,
2908 .ivsize
= DES_BLOCK_SIZE
,
2909 .maxauthsize
= SHA384_DIGEST_SIZE
,
2912 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2913 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
2914 OP_ALG_AAI_HMAC_PRECOMP
,
2921 .cra_name
= "authenc(hmac(sha512),cbc(des))",
2922 .cra_driver_name
= "authenc-hmac-sha512-"
2924 .cra_blocksize
= DES_BLOCK_SIZE
,
2926 .setkey
= aead_setkey
,
2927 .setauthsize
= aead_setauthsize
,
2928 .encrypt
= aead_encrypt
,
2929 .decrypt
= aead_decrypt
,
2930 .ivsize
= DES_BLOCK_SIZE
,
2931 .maxauthsize
= SHA512_DIGEST_SIZE
,
2934 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2935 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2936 OP_ALG_AAI_HMAC_PRECOMP
,
2942 .cra_name
= "echainiv(authenc(hmac(sha512),"
2944 .cra_driver_name
= "echainiv-authenc-"
2945 "hmac-sha512-cbc-des-caam",
2946 .cra_blocksize
= DES_BLOCK_SIZE
,
2948 .setkey
= aead_setkey
,
2949 .setauthsize
= aead_setauthsize
,
2950 .encrypt
= aead_encrypt
,
2951 .decrypt
= aead_decrypt
,
2952 .ivsize
= DES_BLOCK_SIZE
,
2953 .maxauthsize
= SHA512_DIGEST_SIZE
,
2956 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2957 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
2958 OP_ALG_AAI_HMAC_PRECOMP
,
2965 .cra_name
= "authenc(hmac(md5),"
2966 "rfc3686(ctr(aes)))",
2967 .cra_driver_name
= "authenc-hmac-md5-"
2968 "rfc3686-ctr-aes-caam",
2971 .setkey
= aead_setkey
,
2972 .setauthsize
= aead_setauthsize
,
2973 .encrypt
= aead_encrypt
,
2974 .decrypt
= aead_decrypt
,
2975 .ivsize
= CTR_RFC3686_IV_SIZE
,
2976 .maxauthsize
= MD5_DIGEST_SIZE
,
2979 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
2980 OP_ALG_AAI_CTR_MOD128
,
2981 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
2982 OP_ALG_AAI_HMAC_PRECOMP
,
2989 .cra_name
= "seqiv(authenc("
2990 "hmac(md5),rfc3686(ctr(aes))))",
2991 .cra_driver_name
= "seqiv-authenc-hmac-md5-"
2992 "rfc3686-ctr-aes-caam",
2995 .setkey
= aead_setkey
,
2996 .setauthsize
= aead_setauthsize
,
2997 .encrypt
= aead_encrypt
,
2998 .decrypt
= aead_decrypt
,
2999 .ivsize
= CTR_RFC3686_IV_SIZE
,
3000 .maxauthsize
= MD5_DIGEST_SIZE
,
3003 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3004 OP_ALG_AAI_CTR_MOD128
,
3005 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
3006 OP_ALG_AAI_HMAC_PRECOMP
,
3014 .cra_name
= "authenc(hmac(sha1),"
3015 "rfc3686(ctr(aes)))",
3016 .cra_driver_name
= "authenc-hmac-sha1-"
3017 "rfc3686-ctr-aes-caam",
3020 .setkey
= aead_setkey
,
3021 .setauthsize
= aead_setauthsize
,
3022 .encrypt
= aead_encrypt
,
3023 .decrypt
= aead_decrypt
,
3024 .ivsize
= CTR_RFC3686_IV_SIZE
,
3025 .maxauthsize
= SHA1_DIGEST_SIZE
,
3028 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3029 OP_ALG_AAI_CTR_MOD128
,
3030 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
3031 OP_ALG_AAI_HMAC_PRECOMP
,
3038 .cra_name
= "seqiv(authenc("
3039 "hmac(sha1),rfc3686(ctr(aes))))",
3040 .cra_driver_name
= "seqiv-authenc-hmac-sha1-"
3041 "rfc3686-ctr-aes-caam",
3044 .setkey
= aead_setkey
,
3045 .setauthsize
= aead_setauthsize
,
3046 .encrypt
= aead_encrypt
,
3047 .decrypt
= aead_decrypt
,
3048 .ivsize
= CTR_RFC3686_IV_SIZE
,
3049 .maxauthsize
= SHA1_DIGEST_SIZE
,
3052 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3053 OP_ALG_AAI_CTR_MOD128
,
3054 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
3055 OP_ALG_AAI_HMAC_PRECOMP
,
3063 .cra_name
= "authenc(hmac(sha224),"
3064 "rfc3686(ctr(aes)))",
3065 .cra_driver_name
= "authenc-hmac-sha224-"
3066 "rfc3686-ctr-aes-caam",
3069 .setkey
= aead_setkey
,
3070 .setauthsize
= aead_setauthsize
,
3071 .encrypt
= aead_encrypt
,
3072 .decrypt
= aead_decrypt
,
3073 .ivsize
= CTR_RFC3686_IV_SIZE
,
3074 .maxauthsize
= SHA224_DIGEST_SIZE
,
3077 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3078 OP_ALG_AAI_CTR_MOD128
,
3079 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
3080 OP_ALG_AAI_HMAC_PRECOMP
,
3087 .cra_name
= "seqiv(authenc("
3088 "hmac(sha224),rfc3686(ctr(aes))))",
3089 .cra_driver_name
= "seqiv-authenc-hmac-sha224-"
3090 "rfc3686-ctr-aes-caam",
3093 .setkey
= aead_setkey
,
3094 .setauthsize
= aead_setauthsize
,
3095 .encrypt
= aead_encrypt
,
3096 .decrypt
= aead_decrypt
,
3097 .ivsize
= CTR_RFC3686_IV_SIZE
,
3098 .maxauthsize
= SHA224_DIGEST_SIZE
,
3101 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3102 OP_ALG_AAI_CTR_MOD128
,
3103 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
3104 OP_ALG_AAI_HMAC_PRECOMP
,
3112 .cra_name
= "authenc(hmac(sha256),"
3113 "rfc3686(ctr(aes)))",
3114 .cra_driver_name
= "authenc-hmac-sha256-"
3115 "rfc3686-ctr-aes-caam",
3118 .setkey
= aead_setkey
,
3119 .setauthsize
= aead_setauthsize
,
3120 .encrypt
= aead_encrypt
,
3121 .decrypt
= aead_decrypt
,
3122 .ivsize
= CTR_RFC3686_IV_SIZE
,
3123 .maxauthsize
= SHA256_DIGEST_SIZE
,
3126 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3127 OP_ALG_AAI_CTR_MOD128
,
3128 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
3129 OP_ALG_AAI_HMAC_PRECOMP
,
3136 .cra_name
= "seqiv(authenc(hmac(sha256),"
3137 "rfc3686(ctr(aes))))",
3138 .cra_driver_name
= "seqiv-authenc-hmac-sha256-"
3139 "rfc3686-ctr-aes-caam",
3142 .setkey
= aead_setkey
,
3143 .setauthsize
= aead_setauthsize
,
3144 .encrypt
= aead_encrypt
,
3145 .decrypt
= aead_decrypt
,
3146 .ivsize
= CTR_RFC3686_IV_SIZE
,
3147 .maxauthsize
= SHA256_DIGEST_SIZE
,
3150 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3151 OP_ALG_AAI_CTR_MOD128
,
3152 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
3153 OP_ALG_AAI_HMAC_PRECOMP
,
3161 .cra_name
= "authenc(hmac(sha384),"
3162 "rfc3686(ctr(aes)))",
3163 .cra_driver_name
= "authenc-hmac-sha384-"
3164 "rfc3686-ctr-aes-caam",
3167 .setkey
= aead_setkey
,
3168 .setauthsize
= aead_setauthsize
,
3169 .encrypt
= aead_encrypt
,
3170 .decrypt
= aead_decrypt
,
3171 .ivsize
= CTR_RFC3686_IV_SIZE
,
3172 .maxauthsize
= SHA384_DIGEST_SIZE
,
3175 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3176 OP_ALG_AAI_CTR_MOD128
,
3177 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
3178 OP_ALG_AAI_HMAC_PRECOMP
,
3185 .cra_name
= "seqiv(authenc(hmac(sha384),"
3186 "rfc3686(ctr(aes))))",
3187 .cra_driver_name
= "seqiv-authenc-hmac-sha384-"
3188 "rfc3686-ctr-aes-caam",
3191 .setkey
= aead_setkey
,
3192 .setauthsize
= aead_setauthsize
,
3193 .encrypt
= aead_encrypt
,
3194 .decrypt
= aead_decrypt
,
3195 .ivsize
= CTR_RFC3686_IV_SIZE
,
3196 .maxauthsize
= SHA384_DIGEST_SIZE
,
3199 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3200 OP_ALG_AAI_CTR_MOD128
,
3201 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
3202 OP_ALG_AAI_HMAC_PRECOMP
,
3210 .cra_name
= "authenc(hmac(sha512),"
3211 "rfc3686(ctr(aes)))",
3212 .cra_driver_name
= "authenc-hmac-sha512-"
3213 "rfc3686-ctr-aes-caam",
3216 .setkey
= aead_setkey
,
3217 .setauthsize
= aead_setauthsize
,
3218 .encrypt
= aead_encrypt
,
3219 .decrypt
= aead_decrypt
,
3220 .ivsize
= CTR_RFC3686_IV_SIZE
,
3221 .maxauthsize
= SHA512_DIGEST_SIZE
,
3224 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3225 OP_ALG_AAI_CTR_MOD128
,
3226 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
3227 OP_ALG_AAI_HMAC_PRECOMP
,
3234 .cra_name
= "seqiv(authenc(hmac(sha512),"
3235 "rfc3686(ctr(aes))))",
3236 .cra_driver_name
= "seqiv-authenc-hmac-sha512-"
3237 "rfc3686-ctr-aes-caam",
3240 .setkey
= aead_setkey
,
3241 .setauthsize
= aead_setauthsize
,
3242 .encrypt
= aead_encrypt
,
3243 .decrypt
= aead_decrypt
,
3244 .ivsize
= CTR_RFC3686_IV_SIZE
,
3245 .maxauthsize
= SHA512_DIGEST_SIZE
,
3248 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
3249 OP_ALG_AAI_CTR_MOD128
,
3250 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
3251 OP_ALG_AAI_HMAC_PRECOMP
,
3258 struct caam_crypto_alg
{
3259 struct crypto_alg crypto_alg
;
3260 struct list_head entry
;
3261 struct caam_alg_entry caam
;
3264 static int caam_init_common(struct caam_ctx
*ctx
, struct caam_alg_entry
*caam
,
3267 dma_addr_t dma_addr
;
3268 struct caam_drv_private
*priv
;
3270 ctx
->jrdev
= caam_jr_alloc();
3271 if (IS_ERR(ctx
->jrdev
)) {
3272 pr_err("Job Ring Device allocation for transform failed\n");
3273 return PTR_ERR(ctx
->jrdev
);
3276 priv
= dev_get_drvdata(ctx
->jrdev
->parent
);
3277 if (priv
->era
>= 6 && uses_dkp
)
3278 ctx
->dir
= DMA_BIDIRECTIONAL
;
3280 ctx
->dir
= DMA_TO_DEVICE
;
3282 dma_addr
= dma_map_single_attrs(ctx
->jrdev
, ctx
->sh_desc_enc
,
3283 offsetof(struct caam_ctx
,
3285 ctx
->dir
, DMA_ATTR_SKIP_CPU_SYNC
);
3286 if (dma_mapping_error(ctx
->jrdev
, dma_addr
)) {
3287 dev_err(ctx
->jrdev
, "unable to map key, shared descriptors\n");
3288 caam_jr_free(ctx
->jrdev
);
3292 ctx
->sh_desc_enc_dma
= dma_addr
;
3293 ctx
->sh_desc_dec_dma
= dma_addr
+ offsetof(struct caam_ctx
,
3295 ctx
->sh_desc_givenc_dma
= dma_addr
+ offsetof(struct caam_ctx
,
3297 ctx
->key_dma
= dma_addr
+ offsetof(struct caam_ctx
, key
);
3299 /* copy descriptor header template value */
3300 ctx
->cdata
.algtype
= OP_TYPE_CLASS1_ALG
| caam
->class1_alg_type
;
3301 ctx
->adata
.algtype
= OP_TYPE_CLASS2_ALG
| caam
->class2_alg_type
;
3306 static int caam_cra_init(struct crypto_tfm
*tfm
)
3308 struct crypto_alg
*alg
= tfm
->__crt_alg
;
3309 struct caam_crypto_alg
*caam_alg
=
3310 container_of(alg
, struct caam_crypto_alg
, crypto_alg
);
3311 struct caam_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3313 return caam_init_common(ctx
, &caam_alg
->caam
, false);
3316 static int caam_aead_init(struct crypto_aead
*tfm
)
3318 struct aead_alg
*alg
= crypto_aead_alg(tfm
);
3319 struct caam_aead_alg
*caam_alg
=
3320 container_of(alg
, struct caam_aead_alg
, aead
);
3321 struct caam_ctx
*ctx
= crypto_aead_ctx(tfm
);
3323 return caam_init_common(ctx
, &caam_alg
->caam
,
3324 alg
->setkey
== aead_setkey
);
3327 static void caam_exit_common(struct caam_ctx
*ctx
)
3329 dma_unmap_single_attrs(ctx
->jrdev
, ctx
->sh_desc_enc_dma
,
3330 offsetof(struct caam_ctx
, sh_desc_enc_dma
),
3331 ctx
->dir
, DMA_ATTR_SKIP_CPU_SYNC
);
3332 caam_jr_free(ctx
->jrdev
);
3335 static void caam_cra_exit(struct crypto_tfm
*tfm
)
3337 caam_exit_common(crypto_tfm_ctx(tfm
));
3340 static void caam_aead_exit(struct crypto_aead
*tfm
)
3342 caam_exit_common(crypto_aead_ctx(tfm
));
3345 static void __exit
caam_algapi_exit(void)
3348 struct caam_crypto_alg
*t_alg
, *n
;
3351 for (i
= 0; i
< ARRAY_SIZE(driver_aeads
); i
++) {
3352 struct caam_aead_alg
*t_alg
= driver_aeads
+ i
;
3354 if (t_alg
->registered
)
3355 crypto_unregister_aead(&t_alg
->aead
);
3361 list_for_each_entry_safe(t_alg
, n
, &alg_list
, entry
) {
3362 crypto_unregister_alg(&t_alg
->crypto_alg
);
3363 list_del(&t_alg
->entry
);
3368 static struct caam_crypto_alg
*caam_alg_alloc(struct caam_alg_template
3371 struct caam_crypto_alg
*t_alg
;
3372 struct crypto_alg
*alg
;
3374 t_alg
= kzalloc(sizeof(*t_alg
), GFP_KERNEL
);
3376 pr_err("failed to allocate t_alg\n");
3377 return ERR_PTR(-ENOMEM
);
3380 alg
= &t_alg
->crypto_alg
;
3382 snprintf(alg
->cra_name
, CRYPTO_MAX_ALG_NAME
, "%s", template->name
);
3383 snprintf(alg
->cra_driver_name
, CRYPTO_MAX_ALG_NAME
, "%s",
3384 template->driver_name
);
3385 alg
->cra_module
= THIS_MODULE
;
3386 alg
->cra_init
= caam_cra_init
;
3387 alg
->cra_exit
= caam_cra_exit
;
3388 alg
->cra_priority
= CAAM_CRA_PRIORITY
;
3389 alg
->cra_blocksize
= template->blocksize
;
3390 alg
->cra_alignmask
= 0;
3391 alg
->cra_ctxsize
= sizeof(struct caam_ctx
);
3392 alg
->cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_KERN_DRIVER_ONLY
|
3394 switch (template->type
) {
3395 case CRYPTO_ALG_TYPE_GIVCIPHER
:
3396 alg
->cra_type
= &crypto_givcipher_type
;
3397 alg
->cra_ablkcipher
= template->template_ablkcipher
;
3399 case CRYPTO_ALG_TYPE_ABLKCIPHER
:
3400 alg
->cra_type
= &crypto_ablkcipher_type
;
3401 alg
->cra_ablkcipher
= template->template_ablkcipher
;
3405 t_alg
->caam
.class1_alg_type
= template->class1_alg_type
;
3406 t_alg
->caam
.class2_alg_type
= template->class2_alg_type
;
3411 static void caam_aead_alg_init(struct caam_aead_alg
*t_alg
)
3413 struct aead_alg
*alg
= &t_alg
->aead
;
3415 alg
->base
.cra_module
= THIS_MODULE
;
3416 alg
->base
.cra_priority
= CAAM_CRA_PRIORITY
;
3417 alg
->base
.cra_ctxsize
= sizeof(struct caam_ctx
);
3418 alg
->base
.cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_KERN_DRIVER_ONLY
;
3420 alg
->init
= caam_aead_init
;
3421 alg
->exit
= caam_aead_exit
;
3424 static int __init
caam_algapi_init(void)
3426 struct device_node
*dev_node
;
3427 struct platform_device
*pdev
;
3428 struct device
*ctrldev
;
3429 struct caam_drv_private
*priv
;
3431 u32 cha_vid
, cha_inst
, des_inst
, aes_inst
, md_inst
;
3432 unsigned int md_limit
= SHA512_DIGEST_SIZE
;
3433 bool registered
= false;
3435 dev_node
= of_find_compatible_node(NULL
, NULL
, "fsl,sec-v4.0");
3437 dev_node
= of_find_compatible_node(NULL
, NULL
, "fsl,sec4.0");
3442 pdev
= of_find_device_by_node(dev_node
);
3444 of_node_put(dev_node
);
3448 ctrldev
= &pdev
->dev
;
3449 priv
= dev_get_drvdata(ctrldev
);
3450 of_node_put(dev_node
);
3453 * If priv is NULL, it's probably because the caam driver wasn't
3454 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
3460 INIT_LIST_HEAD(&alg_list
);
3463 * Register crypto algorithms the device supports.
3464 * First, detect presence and attributes of DES, AES, and MD blocks.
3466 cha_vid
= rd_reg32(&priv
->ctrl
->perfmon
.cha_id_ls
);
3467 cha_inst
= rd_reg32(&priv
->ctrl
->perfmon
.cha_num_ls
);
3468 des_inst
= (cha_inst
& CHA_ID_LS_DES_MASK
) >> CHA_ID_LS_DES_SHIFT
;
3469 aes_inst
= (cha_inst
& CHA_ID_LS_AES_MASK
) >> CHA_ID_LS_AES_SHIFT
;
3470 md_inst
= (cha_inst
& CHA_ID_LS_MD_MASK
) >> CHA_ID_LS_MD_SHIFT
;
3472 /* If MD is present, limit digest size based on LP256 */
3473 if (md_inst
&& ((cha_vid
& CHA_ID_LS_MD_MASK
) == CHA_ID_LS_MD_LP256
))
3474 md_limit
= SHA256_DIGEST_SIZE
;
3476 for (i
= 0; i
< ARRAY_SIZE(driver_algs
); i
++) {
3477 struct caam_crypto_alg
*t_alg
;
3478 struct caam_alg_template
*alg
= driver_algs
+ i
;
3479 u32 alg_sel
= alg
->class1_alg_type
& OP_ALG_ALGSEL_MASK
;
3481 /* Skip DES algorithms if not supported by device */
3483 ((alg_sel
== OP_ALG_ALGSEL_3DES
) ||
3484 (alg_sel
== OP_ALG_ALGSEL_DES
)))
3487 /* Skip AES algorithms if not supported by device */
3488 if (!aes_inst
&& (alg_sel
== OP_ALG_ALGSEL_AES
))
3492 * Check support for AES modes not available
3495 if ((cha_vid
& CHA_ID_LS_AES_MASK
) == CHA_ID_LS_AES_LP
)
3496 if ((alg
->class1_alg_type
& OP_ALG_AAI_MASK
) ==
3500 t_alg
= caam_alg_alloc(alg
);
3501 if (IS_ERR(t_alg
)) {
3502 err
= PTR_ERR(t_alg
);
3503 pr_warn("%s alg allocation failed\n", alg
->driver_name
);
3507 err
= crypto_register_alg(&t_alg
->crypto_alg
);
3509 pr_warn("%s alg registration failed\n",
3510 t_alg
->crypto_alg
.cra_driver_name
);
3515 list_add_tail(&t_alg
->entry
, &alg_list
);
3519 for (i
= 0; i
< ARRAY_SIZE(driver_aeads
); i
++) {
3520 struct caam_aead_alg
*t_alg
= driver_aeads
+ i
;
3521 u32 c1_alg_sel
= t_alg
->caam
.class1_alg_type
&
3523 u32 c2_alg_sel
= t_alg
->caam
.class2_alg_type
&
3525 u32 alg_aai
= t_alg
->caam
.class1_alg_type
& OP_ALG_AAI_MASK
;
3527 /* Skip DES algorithms if not supported by device */
3529 ((c1_alg_sel
== OP_ALG_ALGSEL_3DES
) ||
3530 (c1_alg_sel
== OP_ALG_ALGSEL_DES
)))
3533 /* Skip AES algorithms if not supported by device */
3534 if (!aes_inst
&& (c1_alg_sel
== OP_ALG_ALGSEL_AES
))
3538 * Check support for AES algorithms not available
3541 if ((cha_vid
& CHA_ID_LS_AES_MASK
) == CHA_ID_LS_AES_LP
)
3542 if (alg_aai
== OP_ALG_AAI_GCM
)
3546 * Skip algorithms requiring message digests
3547 * if MD or MD size is not supported by device.
3550 (!md_inst
|| (t_alg
->aead
.maxauthsize
> md_limit
)))
3553 caam_aead_alg_init(t_alg
);
3555 err
= crypto_register_aead(&t_alg
->aead
);
3557 pr_warn("%s alg registration failed\n",
3558 t_alg
->aead
.base
.cra_driver_name
);
3562 t_alg
->registered
= true;
3567 pr_info("caam algorithms registered in /proc/crypto\n");
3572 module_init(caam_algapi_init
);
3573 module_exit(caam_algapi_exit
);
3575 MODULE_LICENSE("GPL");
3576 MODULE_DESCRIPTION("FSL CAAM support for crypto API");
3577 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");