Merge tag 'for_linus' of git://git.kernel.org/pub/scm/linux/kernel/git/mst/vhost
[cris-mirror.git] / drivers / crypto / caam / caamalg.c
blob2188235be02dfc2487125e54bfb04e8dc4416845
1 /*
2 * caam - Freescale FSL CAAM support for crypto API
4 * Copyright 2008-2011 Freescale Semiconductor, Inc.
5 * Copyright 2016 NXP
7 * Based on talitos crypto API driver.
9 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
11 * --------------- ---------------
12 * | JobDesc #1 |-------------------->| ShareDesc |
13 * | *(packet 1) | | (PDB) |
14 * --------------- |------------->| (hashKey) |
15 * . | | (cipherKey) |
16 * . | |-------->| (operation) |
17 * --------------- | | ---------------
18 * | JobDesc #2 |------| |
19 * | *(packet 2) | |
20 * --------------- |
21 * . |
22 * . |
23 * --------------- |
24 * | JobDesc #3 |------------
25 * | *(packet 3) |
26 * ---------------
28 * The SharedDesc never changes for a connection unless rekeyed, but
29 * each packet will likely be in a different place. So all we need
30 * to know to process the packet is where the input is, where the
31 * output goes, and what context we want to process with. Context is
32 * in the SharedDesc, packet references in the JobDesc.
34 * So, a job desc looks like:
36 * ---------------------
37 * | Header |
38 * | ShareDesc Pointer |
39 * | SEQ_OUT_PTR |
40 * | (output buffer) |
41 * | (output length) |
42 * | SEQ_IN_PTR |
43 * | (input buffer) |
44 * | (input length) |
45 * ---------------------
48 #include "compat.h"
50 #include "regs.h"
51 #include "intern.h"
52 #include "desc_constr.h"
53 #include "jr.h"
54 #include "error.h"
55 #include "sg_sw_sec4.h"
56 #include "key_gen.h"
57 #include "caamalg_desc.h"
60 * crypto alg
62 #define CAAM_CRA_PRIORITY 3000
63 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
64 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
65 CTR_RFC3686_NONCE_SIZE + \
66 SHA512_DIGEST_SIZE * 2)
68 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
69 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
70 CAAM_CMD_SZ * 4)
71 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
72 CAAM_CMD_SZ * 5)
74 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
75 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
77 #ifdef DEBUG
78 /* for print_hex_dumps with line references */
79 #define debug(format, arg...) printk(format, arg)
80 #else
81 #define debug(format, arg...)
82 #endif
84 static struct list_head alg_list;
86 struct caam_alg_entry {
87 int class1_alg_type;
88 int class2_alg_type;
89 bool rfc3686;
90 bool geniv;
93 struct caam_aead_alg {
94 struct aead_alg aead;
95 struct caam_alg_entry caam;
96 bool registered;
100 * per-session context
102 struct caam_ctx {
103 u32 sh_desc_enc[DESC_MAX_USED_LEN];
104 u32 sh_desc_dec[DESC_MAX_USED_LEN];
105 u32 sh_desc_givenc[DESC_MAX_USED_LEN];
106 u8 key[CAAM_MAX_KEY_SIZE];
107 dma_addr_t sh_desc_enc_dma;
108 dma_addr_t sh_desc_dec_dma;
109 dma_addr_t sh_desc_givenc_dma;
110 dma_addr_t key_dma;
111 enum dma_data_direction dir;
112 struct device *jrdev;
113 struct alginfo adata;
114 struct alginfo cdata;
115 unsigned int authsize;
118 static int aead_null_set_sh_desc(struct crypto_aead *aead)
120 struct caam_ctx *ctx = crypto_aead_ctx(aead);
121 struct device *jrdev = ctx->jrdev;
122 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
123 u32 *desc;
124 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
125 ctx->adata.keylen_pad;
128 * Job Descriptor and Shared Descriptors
129 * must all fit into the 64-word Descriptor h/w Buffer
131 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
132 ctx->adata.key_inline = true;
133 ctx->adata.key_virt = ctx->key;
134 } else {
135 ctx->adata.key_inline = false;
136 ctx->adata.key_dma = ctx->key_dma;
139 /* aead_encrypt shared descriptor */
140 desc = ctx->sh_desc_enc;
141 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize,
142 ctrlpriv->era);
143 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
144 desc_bytes(desc), ctx->dir);
147 * Job Descriptor and Shared Descriptors
148 * must all fit into the 64-word Descriptor h/w Buffer
150 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
151 ctx->adata.key_inline = true;
152 ctx->adata.key_virt = ctx->key;
153 } else {
154 ctx->adata.key_inline = false;
155 ctx->adata.key_dma = ctx->key_dma;
158 /* aead_decrypt shared descriptor */
159 desc = ctx->sh_desc_dec;
160 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize,
161 ctrlpriv->era);
162 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
163 desc_bytes(desc), ctx->dir);
165 return 0;
168 static int aead_set_sh_desc(struct crypto_aead *aead)
170 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
171 struct caam_aead_alg, aead);
172 unsigned int ivsize = crypto_aead_ivsize(aead);
173 struct caam_ctx *ctx = crypto_aead_ctx(aead);
174 struct device *jrdev = ctx->jrdev;
175 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
176 u32 ctx1_iv_off = 0;
177 u32 *desc, *nonce = NULL;
178 u32 inl_mask;
179 unsigned int data_len[2];
180 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
181 OP_ALG_AAI_CTR_MOD128);
182 const bool is_rfc3686 = alg->caam.rfc3686;
184 if (!ctx->authsize)
185 return 0;
187 /* NULL encryption / decryption */
188 if (!ctx->cdata.keylen)
189 return aead_null_set_sh_desc(aead);
192 * AES-CTR needs to load IV in CONTEXT1 reg
193 * at an offset of 128bits (16bytes)
194 * CONTEXT1[255:128] = IV
196 if (ctr_mode)
197 ctx1_iv_off = 16;
200 * RFC3686 specific:
201 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
203 if (is_rfc3686) {
204 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
205 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
206 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
209 data_len[0] = ctx->adata.keylen_pad;
210 data_len[1] = ctx->cdata.keylen;
212 if (alg->caam.geniv)
213 goto skip_enc;
216 * Job Descriptor and Shared Descriptors
217 * must all fit into the 64-word Descriptor h/w Buffer
219 if (desc_inline_query(DESC_AEAD_ENC_LEN +
220 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
221 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
222 ARRAY_SIZE(data_len)) < 0)
223 return -EINVAL;
225 if (inl_mask & 1)
226 ctx->adata.key_virt = ctx->key;
227 else
228 ctx->adata.key_dma = ctx->key_dma;
230 if (inl_mask & 2)
231 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
232 else
233 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
235 ctx->adata.key_inline = !!(inl_mask & 1);
236 ctx->cdata.key_inline = !!(inl_mask & 2);
238 /* aead_encrypt shared descriptor */
239 desc = ctx->sh_desc_enc;
240 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
241 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
242 false, ctrlpriv->era);
243 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
244 desc_bytes(desc), ctx->dir);
246 skip_enc:
248 * Job Descriptor and Shared Descriptors
249 * must all fit into the 64-word Descriptor h/w Buffer
251 if (desc_inline_query(DESC_AEAD_DEC_LEN +
252 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
253 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
254 ARRAY_SIZE(data_len)) < 0)
255 return -EINVAL;
257 if (inl_mask & 1)
258 ctx->adata.key_virt = ctx->key;
259 else
260 ctx->adata.key_dma = ctx->key_dma;
262 if (inl_mask & 2)
263 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
264 else
265 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
267 ctx->adata.key_inline = !!(inl_mask & 1);
268 ctx->cdata.key_inline = !!(inl_mask & 2);
270 /* aead_decrypt shared descriptor */
271 desc = ctx->sh_desc_dec;
272 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
273 ctx->authsize, alg->caam.geniv, is_rfc3686,
274 nonce, ctx1_iv_off, false, ctrlpriv->era);
275 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
276 desc_bytes(desc), ctx->dir);
278 if (!alg->caam.geniv)
279 goto skip_givenc;
282 * Job Descriptor and Shared Descriptors
283 * must all fit into the 64-word Descriptor h/w Buffer
285 if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
286 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
287 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
288 ARRAY_SIZE(data_len)) < 0)
289 return -EINVAL;
291 if (inl_mask & 1)
292 ctx->adata.key_virt = ctx->key;
293 else
294 ctx->adata.key_dma = ctx->key_dma;
296 if (inl_mask & 2)
297 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
298 else
299 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
301 ctx->adata.key_inline = !!(inl_mask & 1);
302 ctx->cdata.key_inline = !!(inl_mask & 2);
304 /* aead_givencrypt shared descriptor */
305 desc = ctx->sh_desc_enc;
306 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
307 ctx->authsize, is_rfc3686, nonce,
308 ctx1_iv_off, false, ctrlpriv->era);
309 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
310 desc_bytes(desc), ctx->dir);
312 skip_givenc:
313 return 0;
316 static int aead_setauthsize(struct crypto_aead *authenc,
317 unsigned int authsize)
319 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
321 ctx->authsize = authsize;
322 aead_set_sh_desc(authenc);
324 return 0;
327 static int gcm_set_sh_desc(struct crypto_aead *aead)
329 struct caam_ctx *ctx = crypto_aead_ctx(aead);
330 struct device *jrdev = ctx->jrdev;
331 u32 *desc;
332 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
333 ctx->cdata.keylen;
335 if (!ctx->cdata.keylen || !ctx->authsize)
336 return 0;
339 * AES GCM encrypt shared descriptor
340 * Job Descriptor and Shared Descriptor
341 * must fit into the 64-word Descriptor h/w Buffer
343 if (rem_bytes >= DESC_GCM_ENC_LEN) {
344 ctx->cdata.key_inline = true;
345 ctx->cdata.key_virt = ctx->key;
346 } else {
347 ctx->cdata.key_inline = false;
348 ctx->cdata.key_dma = ctx->key_dma;
351 desc = ctx->sh_desc_enc;
352 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ctx->authsize);
353 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
354 desc_bytes(desc), ctx->dir);
357 * Job Descriptor and Shared Descriptors
358 * must all fit into the 64-word Descriptor h/w Buffer
360 if (rem_bytes >= DESC_GCM_DEC_LEN) {
361 ctx->cdata.key_inline = true;
362 ctx->cdata.key_virt = ctx->key;
363 } else {
364 ctx->cdata.key_inline = false;
365 ctx->cdata.key_dma = ctx->key_dma;
368 desc = ctx->sh_desc_dec;
369 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ctx->authsize);
370 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
371 desc_bytes(desc), ctx->dir);
373 return 0;
376 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
378 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
380 ctx->authsize = authsize;
381 gcm_set_sh_desc(authenc);
383 return 0;
386 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
388 struct caam_ctx *ctx = crypto_aead_ctx(aead);
389 struct device *jrdev = ctx->jrdev;
390 u32 *desc;
391 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
392 ctx->cdata.keylen;
394 if (!ctx->cdata.keylen || !ctx->authsize)
395 return 0;
398 * RFC4106 encrypt shared descriptor
399 * Job Descriptor and Shared Descriptor
400 * must fit into the 64-word Descriptor h/w Buffer
402 if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
403 ctx->cdata.key_inline = true;
404 ctx->cdata.key_virt = ctx->key;
405 } else {
406 ctx->cdata.key_inline = false;
407 ctx->cdata.key_dma = ctx->key_dma;
410 desc = ctx->sh_desc_enc;
411 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ctx->authsize);
412 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
413 desc_bytes(desc), ctx->dir);
416 * Job Descriptor and Shared Descriptors
417 * must all fit into the 64-word Descriptor h/w Buffer
419 if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
420 ctx->cdata.key_inline = true;
421 ctx->cdata.key_virt = ctx->key;
422 } else {
423 ctx->cdata.key_inline = false;
424 ctx->cdata.key_dma = ctx->key_dma;
427 desc = ctx->sh_desc_dec;
428 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ctx->authsize);
429 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
430 desc_bytes(desc), ctx->dir);
432 return 0;
435 static int rfc4106_setauthsize(struct crypto_aead *authenc,
436 unsigned int authsize)
438 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
440 ctx->authsize = authsize;
441 rfc4106_set_sh_desc(authenc);
443 return 0;
446 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
448 struct caam_ctx *ctx = crypto_aead_ctx(aead);
449 struct device *jrdev = ctx->jrdev;
450 u32 *desc;
451 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
452 ctx->cdata.keylen;
454 if (!ctx->cdata.keylen || !ctx->authsize)
455 return 0;
458 * RFC4543 encrypt shared descriptor
459 * Job Descriptor and Shared Descriptor
460 * must fit into the 64-word Descriptor h/w Buffer
462 if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
463 ctx->cdata.key_inline = true;
464 ctx->cdata.key_virt = ctx->key;
465 } else {
466 ctx->cdata.key_inline = false;
467 ctx->cdata.key_dma = ctx->key_dma;
470 desc = ctx->sh_desc_enc;
471 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ctx->authsize);
472 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
473 desc_bytes(desc), ctx->dir);
476 * Job Descriptor and Shared Descriptors
477 * must all fit into the 64-word Descriptor h/w Buffer
479 if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
480 ctx->cdata.key_inline = true;
481 ctx->cdata.key_virt = ctx->key;
482 } else {
483 ctx->cdata.key_inline = false;
484 ctx->cdata.key_dma = ctx->key_dma;
487 desc = ctx->sh_desc_dec;
488 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ctx->authsize);
489 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
490 desc_bytes(desc), ctx->dir);
492 return 0;
495 static int rfc4543_setauthsize(struct crypto_aead *authenc,
496 unsigned int authsize)
498 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
500 ctx->authsize = authsize;
501 rfc4543_set_sh_desc(authenc);
503 return 0;
506 static int aead_setkey(struct crypto_aead *aead,
507 const u8 *key, unsigned int keylen)
509 struct caam_ctx *ctx = crypto_aead_ctx(aead);
510 struct device *jrdev = ctx->jrdev;
511 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
512 struct crypto_authenc_keys keys;
513 int ret = 0;
515 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
516 goto badkey;
518 #ifdef DEBUG
519 printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
520 keys.authkeylen + keys.enckeylen, keys.enckeylen,
521 keys.authkeylen);
522 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
523 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
524 #endif
527 * If DKP is supported, use it in the shared descriptor to generate
528 * the split key.
530 if (ctrlpriv->era >= 6) {
531 ctx->adata.keylen = keys.authkeylen;
532 ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
533 OP_ALG_ALGSEL_MASK);
535 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
536 goto badkey;
538 memcpy(ctx->key, keys.authkey, keys.authkeylen);
539 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
540 keys.enckeylen);
541 dma_sync_single_for_device(jrdev, ctx->key_dma,
542 ctx->adata.keylen_pad +
543 keys.enckeylen, ctx->dir);
544 goto skip_split_key;
547 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
548 keys.authkeylen, CAAM_MAX_KEY_SIZE -
549 keys.enckeylen);
550 if (ret) {
551 goto badkey;
554 /* postpend encryption key to auth split key */
555 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
556 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
557 keys.enckeylen, ctx->dir);
558 #ifdef DEBUG
559 print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
560 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
561 ctx->adata.keylen_pad + keys.enckeylen, 1);
562 #endif
564 skip_split_key:
565 ctx->cdata.keylen = keys.enckeylen;
566 return aead_set_sh_desc(aead);
567 badkey:
568 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
569 return -EINVAL;
572 static int gcm_setkey(struct crypto_aead *aead,
573 const u8 *key, unsigned int keylen)
575 struct caam_ctx *ctx = crypto_aead_ctx(aead);
576 struct device *jrdev = ctx->jrdev;
578 #ifdef DEBUG
579 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
580 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
581 #endif
583 memcpy(ctx->key, key, keylen);
584 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
585 ctx->cdata.keylen = keylen;
587 return gcm_set_sh_desc(aead);
590 static int rfc4106_setkey(struct crypto_aead *aead,
591 const u8 *key, unsigned int keylen)
593 struct caam_ctx *ctx = crypto_aead_ctx(aead);
594 struct device *jrdev = ctx->jrdev;
596 if (keylen < 4)
597 return -EINVAL;
599 #ifdef DEBUG
600 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
601 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
602 #endif
604 memcpy(ctx->key, key, keylen);
607 * The last four bytes of the key material are used as the salt value
608 * in the nonce. Update the AES key length.
610 ctx->cdata.keylen = keylen - 4;
611 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
612 ctx->dir);
613 return rfc4106_set_sh_desc(aead);
616 static int rfc4543_setkey(struct crypto_aead *aead,
617 const u8 *key, unsigned int keylen)
619 struct caam_ctx *ctx = crypto_aead_ctx(aead);
620 struct device *jrdev = ctx->jrdev;
622 if (keylen < 4)
623 return -EINVAL;
625 #ifdef DEBUG
626 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
627 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
628 #endif
630 memcpy(ctx->key, key, keylen);
633 * The last four bytes of the key material are used as the salt value
634 * in the nonce. Update the AES key length.
636 ctx->cdata.keylen = keylen - 4;
637 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
638 ctx->dir);
639 return rfc4543_set_sh_desc(aead);
642 static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
643 const u8 *key, unsigned int keylen)
645 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
646 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablkcipher);
647 const char *alg_name = crypto_tfm_alg_name(tfm);
648 struct device *jrdev = ctx->jrdev;
649 unsigned int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
650 u32 *desc;
651 u32 ctx1_iv_off = 0;
652 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
653 OP_ALG_AAI_CTR_MOD128);
654 const bool is_rfc3686 = (ctr_mode &&
655 (strstr(alg_name, "rfc3686") != NULL));
657 #ifdef DEBUG
658 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
659 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
660 #endif
662 * AES-CTR needs to load IV in CONTEXT1 reg
663 * at an offset of 128bits (16bytes)
664 * CONTEXT1[255:128] = IV
666 if (ctr_mode)
667 ctx1_iv_off = 16;
670 * RFC3686 specific:
671 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
672 * | *key = {KEY, NONCE}
674 if (is_rfc3686) {
675 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
676 keylen -= CTR_RFC3686_NONCE_SIZE;
679 ctx->cdata.keylen = keylen;
680 ctx->cdata.key_virt = key;
681 ctx->cdata.key_inline = true;
683 /* ablkcipher_encrypt shared descriptor */
684 desc = ctx->sh_desc_enc;
685 cnstr_shdsc_ablkcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
686 ctx1_iv_off);
687 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
688 desc_bytes(desc), ctx->dir);
690 /* ablkcipher_decrypt shared descriptor */
691 desc = ctx->sh_desc_dec;
692 cnstr_shdsc_ablkcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
693 ctx1_iv_off);
694 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
695 desc_bytes(desc), ctx->dir);
697 /* ablkcipher_givencrypt shared descriptor */
698 desc = ctx->sh_desc_givenc;
699 cnstr_shdsc_ablkcipher_givencap(desc, &ctx->cdata, ivsize, is_rfc3686,
700 ctx1_iv_off);
701 dma_sync_single_for_device(jrdev, ctx->sh_desc_givenc_dma,
702 desc_bytes(desc), ctx->dir);
704 return 0;
707 static int xts_ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
708 const u8 *key, unsigned int keylen)
710 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
711 struct device *jrdev = ctx->jrdev;
712 u32 *desc;
714 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
715 crypto_ablkcipher_set_flags(ablkcipher,
716 CRYPTO_TFM_RES_BAD_KEY_LEN);
717 dev_err(jrdev, "key size mismatch\n");
718 return -EINVAL;
721 ctx->cdata.keylen = keylen;
722 ctx->cdata.key_virt = key;
723 ctx->cdata.key_inline = true;
725 /* xts_ablkcipher_encrypt shared descriptor */
726 desc = ctx->sh_desc_enc;
727 cnstr_shdsc_xts_ablkcipher_encap(desc, &ctx->cdata);
728 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
729 desc_bytes(desc), ctx->dir);
731 /* xts_ablkcipher_decrypt shared descriptor */
732 desc = ctx->sh_desc_dec;
733 cnstr_shdsc_xts_ablkcipher_decap(desc, &ctx->cdata);
734 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
735 desc_bytes(desc), ctx->dir);
737 return 0;
741 * aead_edesc - s/w-extended aead descriptor
742 * @src_nents: number of segments in input s/w scatterlist
743 * @dst_nents: number of segments in output s/w scatterlist
744 * @sec4_sg_bytes: length of dma mapped sec4_sg space
745 * @sec4_sg_dma: bus physical mapped address of h/w link table
746 * @sec4_sg: pointer to h/w link table
747 * @hw_desc: the h/w job descriptor followed by any referenced link tables
749 struct aead_edesc {
750 int src_nents;
751 int dst_nents;
752 int sec4_sg_bytes;
753 dma_addr_t sec4_sg_dma;
754 struct sec4_sg_entry *sec4_sg;
755 u32 hw_desc[];
759 * ablkcipher_edesc - s/w-extended ablkcipher descriptor
760 * @src_nents: number of segments in input s/w scatterlist
761 * @dst_nents: number of segments in output s/w scatterlist
762 * @iv_dma: dma address of iv for checking continuity and link table
763 * @sec4_sg_bytes: length of dma mapped sec4_sg space
764 * @sec4_sg_dma: bus physical mapped address of h/w link table
765 * @sec4_sg: pointer to h/w link table
766 * @hw_desc: the h/w job descriptor followed by any referenced link tables
768 struct ablkcipher_edesc {
769 int src_nents;
770 int dst_nents;
771 dma_addr_t iv_dma;
772 int sec4_sg_bytes;
773 dma_addr_t sec4_sg_dma;
774 struct sec4_sg_entry *sec4_sg;
775 u32 hw_desc[0];
778 static void caam_unmap(struct device *dev, struct scatterlist *src,
779 struct scatterlist *dst, int src_nents,
780 int dst_nents,
781 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
782 int sec4_sg_bytes)
784 if (dst != src) {
785 if (src_nents)
786 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
787 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
788 } else {
789 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
792 if (iv_dma)
793 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
794 if (sec4_sg_bytes)
795 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
796 DMA_TO_DEVICE);
799 static void aead_unmap(struct device *dev,
800 struct aead_edesc *edesc,
801 struct aead_request *req)
803 caam_unmap(dev, req->src, req->dst,
804 edesc->src_nents, edesc->dst_nents, 0, 0,
805 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
808 static void ablkcipher_unmap(struct device *dev,
809 struct ablkcipher_edesc *edesc,
810 struct ablkcipher_request *req)
812 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
813 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
815 caam_unmap(dev, req->src, req->dst,
816 edesc->src_nents, edesc->dst_nents,
817 edesc->iv_dma, ivsize,
818 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
821 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
822 void *context)
824 struct aead_request *req = context;
825 struct aead_edesc *edesc;
827 #ifdef DEBUG
828 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
829 #endif
831 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
833 if (err)
834 caam_jr_strstatus(jrdev, err);
836 aead_unmap(jrdev, edesc, req);
838 kfree(edesc);
840 aead_request_complete(req, err);
843 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
844 void *context)
846 struct aead_request *req = context;
847 struct aead_edesc *edesc;
849 #ifdef DEBUG
850 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
851 #endif
853 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
855 if (err)
856 caam_jr_strstatus(jrdev, err);
858 aead_unmap(jrdev, edesc, req);
861 * verify hw auth check passed else return -EBADMSG
863 if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
864 err = -EBADMSG;
866 kfree(edesc);
868 aead_request_complete(req, err);
871 static void ablkcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
872 void *context)
874 struct ablkcipher_request *req = context;
875 struct ablkcipher_edesc *edesc;
876 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
877 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
879 #ifdef DEBUG
880 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
881 #endif
883 edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]);
885 if (err)
886 caam_jr_strstatus(jrdev, err);
888 #ifdef DEBUG
889 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
890 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
891 edesc->src_nents > 1 ? 100 : ivsize, 1);
892 #endif
893 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ",
894 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
895 edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
897 ablkcipher_unmap(jrdev, edesc, req);
900 * The crypto API expects us to set the IV (req->info) to the last
901 * ciphertext block. This is used e.g. by the CTS mode.
903 scatterwalk_map_and_copy(req->info, req->dst, req->nbytes - ivsize,
904 ivsize, 0);
906 kfree(edesc);
908 ablkcipher_request_complete(req, err);
911 static void ablkcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
912 void *context)
914 struct ablkcipher_request *req = context;
915 struct ablkcipher_edesc *edesc;
916 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
917 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
919 #ifdef DEBUG
920 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
921 #endif
923 edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]);
924 if (err)
925 caam_jr_strstatus(jrdev, err);
927 #ifdef DEBUG
928 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
929 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
930 ivsize, 1);
931 #endif
932 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ",
933 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
934 edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
936 ablkcipher_unmap(jrdev, edesc, req);
939 * The crypto API expects us to set the IV (req->info) to the last
940 * ciphertext block.
942 scatterwalk_map_and_copy(req->info, req->src, req->nbytes - ivsize,
943 ivsize, 0);
945 kfree(edesc);
947 ablkcipher_request_complete(req, err);
951 * Fill in aead job descriptor
953 static void init_aead_job(struct aead_request *req,
954 struct aead_edesc *edesc,
955 bool all_contig, bool encrypt)
957 struct crypto_aead *aead = crypto_aead_reqtfm(req);
958 struct caam_ctx *ctx = crypto_aead_ctx(aead);
959 int authsize = ctx->authsize;
960 u32 *desc = edesc->hw_desc;
961 u32 out_options, in_options;
962 dma_addr_t dst_dma, src_dma;
963 int len, sec4_sg_index = 0;
964 dma_addr_t ptr;
965 u32 *sh_desc;
967 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
968 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
970 len = desc_len(sh_desc);
971 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
973 if (all_contig) {
974 src_dma = edesc->src_nents ? sg_dma_address(req->src) : 0;
975 in_options = 0;
976 } else {
977 src_dma = edesc->sec4_sg_dma;
978 sec4_sg_index += edesc->src_nents;
979 in_options = LDST_SGF;
982 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
983 in_options);
985 dst_dma = src_dma;
986 out_options = in_options;
988 if (unlikely(req->src != req->dst)) {
989 if (edesc->dst_nents == 1) {
990 dst_dma = sg_dma_address(req->dst);
991 } else {
992 dst_dma = edesc->sec4_sg_dma +
993 sec4_sg_index *
994 sizeof(struct sec4_sg_entry);
995 out_options = LDST_SGF;
999 if (encrypt)
1000 append_seq_out_ptr(desc, dst_dma,
1001 req->assoclen + req->cryptlen + authsize,
1002 out_options);
1003 else
1004 append_seq_out_ptr(desc, dst_dma,
1005 req->assoclen + req->cryptlen - authsize,
1006 out_options);
1009 static void init_gcm_job(struct aead_request *req,
1010 struct aead_edesc *edesc,
1011 bool all_contig, bool encrypt)
1013 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1014 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1015 unsigned int ivsize = crypto_aead_ivsize(aead);
1016 u32 *desc = edesc->hw_desc;
1017 bool generic_gcm = (ivsize == GCM_AES_IV_SIZE);
1018 unsigned int last;
1020 init_aead_job(req, edesc, all_contig, encrypt);
1021 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1023 /* BUG This should not be specific to generic GCM. */
1024 last = 0;
1025 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
1026 last = FIFOLD_TYPE_LAST1;
1028 /* Read GCM IV */
1029 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
1030 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last);
1031 /* Append Salt */
1032 if (!generic_gcm)
1033 append_data(desc, ctx->key + ctx->cdata.keylen, 4);
1034 /* Append IV */
1035 append_data(desc, req->iv, ivsize);
1036 /* End of blank commands */
1039 static void init_authenc_job(struct aead_request *req,
1040 struct aead_edesc *edesc,
1041 bool all_contig, bool encrypt)
1043 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1044 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
1045 struct caam_aead_alg, aead);
1046 unsigned int ivsize = crypto_aead_ivsize(aead);
1047 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1048 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
1049 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
1050 OP_ALG_AAI_CTR_MOD128);
1051 const bool is_rfc3686 = alg->caam.rfc3686;
1052 u32 *desc = edesc->hw_desc;
1053 u32 ivoffset = 0;
1056 * AES-CTR needs to load IV in CONTEXT1 reg
1057 * at an offset of 128bits (16bytes)
1058 * CONTEXT1[255:128] = IV
1060 if (ctr_mode)
1061 ivoffset = 16;
1064 * RFC3686 specific:
1065 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1067 if (is_rfc3686)
1068 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
1070 init_aead_job(req, edesc, all_contig, encrypt);
1073 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports
1074 * having DPOVRD as destination.
1076 if (ctrlpriv->era < 3)
1077 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1078 else
1079 append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen);
1081 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
1082 append_load_as_imm(desc, req->iv, ivsize,
1083 LDST_CLASS_1_CCB |
1084 LDST_SRCDST_BYTE_CONTEXT |
1085 (ivoffset << LDST_OFFSET_SHIFT));
1089 * Fill in ablkcipher job descriptor
1091 static void init_ablkcipher_job(u32 *sh_desc, dma_addr_t ptr,
1092 struct ablkcipher_edesc *edesc,
1093 struct ablkcipher_request *req,
1094 bool iv_contig)
1096 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1097 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1098 u32 *desc = edesc->hw_desc;
1099 u32 out_options = 0, in_options;
1100 dma_addr_t dst_dma, src_dma;
1101 int len, sec4_sg_index = 0;
1103 #ifdef DEBUG
1104 print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
1105 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
1106 ivsize, 1);
1107 pr_err("asked=%d, nbytes%d\n",
1108 (int)edesc->src_nents > 1 ? 100 : req->nbytes, req->nbytes);
1109 #endif
1110 caam_dump_sg(KERN_ERR, "src @" __stringify(__LINE__)": ",
1111 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1112 edesc->src_nents > 1 ? 100 : req->nbytes, 1);
1114 len = desc_len(sh_desc);
1115 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1117 if (iv_contig) {
1118 src_dma = edesc->iv_dma;
1119 in_options = 0;
1120 } else {
1121 src_dma = edesc->sec4_sg_dma;
1122 sec4_sg_index += edesc->src_nents + 1;
1123 in_options = LDST_SGF;
1125 append_seq_in_ptr(desc, src_dma, req->nbytes + ivsize, in_options);
1127 if (likely(req->src == req->dst)) {
1128 if (edesc->src_nents == 1 && iv_contig) {
1129 dst_dma = sg_dma_address(req->src);
1130 } else {
1131 dst_dma = edesc->sec4_sg_dma +
1132 sizeof(struct sec4_sg_entry);
1133 out_options = LDST_SGF;
1135 } else {
1136 if (edesc->dst_nents == 1) {
1137 dst_dma = sg_dma_address(req->dst);
1138 } else {
1139 dst_dma = edesc->sec4_sg_dma +
1140 sec4_sg_index * sizeof(struct sec4_sg_entry);
1141 out_options = LDST_SGF;
1144 append_seq_out_ptr(desc, dst_dma, req->nbytes, out_options);
1148 * Fill in ablkcipher givencrypt job descriptor
1150 static void init_ablkcipher_giv_job(u32 *sh_desc, dma_addr_t ptr,
1151 struct ablkcipher_edesc *edesc,
1152 struct ablkcipher_request *req,
1153 bool iv_contig)
1155 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1156 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1157 u32 *desc = edesc->hw_desc;
1158 u32 out_options, in_options;
1159 dma_addr_t dst_dma, src_dma;
1160 int len, sec4_sg_index = 0;
1162 #ifdef DEBUG
1163 print_hex_dump(KERN_ERR, "presciv@" __stringify(__LINE__) ": ",
1164 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
1165 ivsize, 1);
1166 #endif
1167 caam_dump_sg(KERN_ERR, "src @" __stringify(__LINE__) ": ",
1168 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1169 edesc->src_nents > 1 ? 100 : req->nbytes, 1);
1171 len = desc_len(sh_desc);
1172 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1174 if (edesc->src_nents == 1) {
1175 src_dma = sg_dma_address(req->src);
1176 in_options = 0;
1177 } else {
1178 src_dma = edesc->sec4_sg_dma;
1179 sec4_sg_index += edesc->src_nents;
1180 in_options = LDST_SGF;
1182 append_seq_in_ptr(desc, src_dma, req->nbytes, in_options);
1184 if (iv_contig) {
1185 dst_dma = edesc->iv_dma;
1186 out_options = 0;
1187 } else {
1188 dst_dma = edesc->sec4_sg_dma +
1189 sec4_sg_index * sizeof(struct sec4_sg_entry);
1190 out_options = LDST_SGF;
1192 append_seq_out_ptr(desc, dst_dma, req->nbytes + ivsize, out_options);
1196 * allocate and map the aead extended descriptor
1198 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1199 int desc_bytes, bool *all_contig_ptr,
1200 bool encrypt)
1202 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1203 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1204 struct device *jrdev = ctx->jrdev;
1205 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1206 GFP_KERNEL : GFP_ATOMIC;
1207 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1208 struct aead_edesc *edesc;
1209 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
1210 unsigned int authsize = ctx->authsize;
1212 if (unlikely(req->dst != req->src)) {
1213 src_nents = sg_nents_for_len(req->src, req->assoclen +
1214 req->cryptlen);
1215 if (unlikely(src_nents < 0)) {
1216 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1217 req->assoclen + req->cryptlen);
1218 return ERR_PTR(src_nents);
1221 dst_nents = sg_nents_for_len(req->dst, req->assoclen +
1222 req->cryptlen +
1223 (encrypt ? authsize :
1224 (-authsize)));
1225 if (unlikely(dst_nents < 0)) {
1226 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1227 req->assoclen + req->cryptlen +
1228 (encrypt ? authsize : (-authsize)));
1229 return ERR_PTR(dst_nents);
1231 } else {
1232 src_nents = sg_nents_for_len(req->src, req->assoclen +
1233 req->cryptlen +
1234 (encrypt ? authsize : 0));
1235 if (unlikely(src_nents < 0)) {
1236 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1237 req->assoclen + req->cryptlen +
1238 (encrypt ? authsize : 0));
1239 return ERR_PTR(src_nents);
1243 if (likely(req->src == req->dst)) {
1244 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1245 DMA_BIDIRECTIONAL);
1246 if (unlikely(!mapped_src_nents)) {
1247 dev_err(jrdev, "unable to map source\n");
1248 return ERR_PTR(-ENOMEM);
1250 } else {
1251 /* Cover also the case of null (zero length) input data */
1252 if (src_nents) {
1253 mapped_src_nents = dma_map_sg(jrdev, req->src,
1254 src_nents, DMA_TO_DEVICE);
1255 if (unlikely(!mapped_src_nents)) {
1256 dev_err(jrdev, "unable to map source\n");
1257 return ERR_PTR(-ENOMEM);
1259 } else {
1260 mapped_src_nents = 0;
1263 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1264 DMA_FROM_DEVICE);
1265 if (unlikely(!mapped_dst_nents)) {
1266 dev_err(jrdev, "unable to map destination\n");
1267 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1268 return ERR_PTR(-ENOMEM);
1272 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
1273 sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1274 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
1276 /* allocate space for base edesc and hw desc commands, link tables */
1277 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1278 GFP_DMA | flags);
1279 if (!edesc) {
1280 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1281 0, 0, 0);
1282 return ERR_PTR(-ENOMEM);
1285 edesc->src_nents = src_nents;
1286 edesc->dst_nents = dst_nents;
1287 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
1288 desc_bytes;
1289 *all_contig_ptr = !(mapped_src_nents > 1);
1291 sec4_sg_index = 0;
1292 if (mapped_src_nents > 1) {
1293 sg_to_sec4_sg_last(req->src, mapped_src_nents,
1294 edesc->sec4_sg + sec4_sg_index, 0);
1295 sec4_sg_index += mapped_src_nents;
1297 if (mapped_dst_nents > 1) {
1298 sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1299 edesc->sec4_sg + sec4_sg_index, 0);
1302 if (!sec4_sg_bytes)
1303 return edesc;
1305 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1306 sec4_sg_bytes, DMA_TO_DEVICE);
1307 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1308 dev_err(jrdev, "unable to map S/G table\n");
1309 aead_unmap(jrdev, edesc, req);
1310 kfree(edesc);
1311 return ERR_PTR(-ENOMEM);
1314 edesc->sec4_sg_bytes = sec4_sg_bytes;
1316 return edesc;
1319 static int gcm_encrypt(struct aead_request *req)
1321 struct aead_edesc *edesc;
1322 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1323 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1324 struct device *jrdev = ctx->jrdev;
1325 bool all_contig;
1326 u32 *desc;
1327 int ret = 0;
1329 /* allocate extended descriptor */
1330 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
1331 if (IS_ERR(edesc))
1332 return PTR_ERR(edesc);
1334 /* Create and submit job descriptor */
1335 init_gcm_job(req, edesc, all_contig, true);
1336 #ifdef DEBUG
1337 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1338 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1339 desc_bytes(edesc->hw_desc), 1);
1340 #endif
1342 desc = edesc->hw_desc;
1343 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1344 if (!ret) {
1345 ret = -EINPROGRESS;
1346 } else {
1347 aead_unmap(jrdev, edesc, req);
1348 kfree(edesc);
1351 return ret;
1354 static int ipsec_gcm_encrypt(struct aead_request *req)
1356 if (req->assoclen < 8)
1357 return -EINVAL;
1359 return gcm_encrypt(req);
1362 static int aead_encrypt(struct aead_request *req)
1364 struct aead_edesc *edesc;
1365 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1366 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1367 struct device *jrdev = ctx->jrdev;
1368 bool all_contig;
1369 u32 *desc;
1370 int ret = 0;
1372 /* allocate extended descriptor */
1373 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1374 &all_contig, true);
1375 if (IS_ERR(edesc))
1376 return PTR_ERR(edesc);
1378 /* Create and submit job descriptor */
1379 init_authenc_job(req, edesc, all_contig, true);
1380 #ifdef DEBUG
1381 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1382 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1383 desc_bytes(edesc->hw_desc), 1);
1384 #endif
1386 desc = edesc->hw_desc;
1387 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1388 if (!ret) {
1389 ret = -EINPROGRESS;
1390 } else {
1391 aead_unmap(jrdev, edesc, req);
1392 kfree(edesc);
1395 return ret;
1398 static int gcm_decrypt(struct aead_request *req)
1400 struct aead_edesc *edesc;
1401 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1402 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1403 struct device *jrdev = ctx->jrdev;
1404 bool all_contig;
1405 u32 *desc;
1406 int ret = 0;
1408 /* allocate extended descriptor */
1409 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
1410 if (IS_ERR(edesc))
1411 return PTR_ERR(edesc);
1413 /* Create and submit job descriptor*/
1414 init_gcm_job(req, edesc, all_contig, false);
1415 #ifdef DEBUG
1416 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1417 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1418 desc_bytes(edesc->hw_desc), 1);
1419 #endif
1421 desc = edesc->hw_desc;
1422 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1423 if (!ret) {
1424 ret = -EINPROGRESS;
1425 } else {
1426 aead_unmap(jrdev, edesc, req);
1427 kfree(edesc);
1430 return ret;
1433 static int ipsec_gcm_decrypt(struct aead_request *req)
1435 if (req->assoclen < 8)
1436 return -EINVAL;
1438 return gcm_decrypt(req);
1441 static int aead_decrypt(struct aead_request *req)
1443 struct aead_edesc *edesc;
1444 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1445 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1446 struct device *jrdev = ctx->jrdev;
1447 bool all_contig;
1448 u32 *desc;
1449 int ret = 0;
1451 caam_dump_sg(KERN_ERR, "dec src@" __stringify(__LINE__)": ",
1452 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1453 req->assoclen + req->cryptlen, 1);
1455 /* allocate extended descriptor */
1456 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1457 &all_contig, false);
1458 if (IS_ERR(edesc))
1459 return PTR_ERR(edesc);
1461 /* Create and submit job descriptor*/
1462 init_authenc_job(req, edesc, all_contig, false);
1463 #ifdef DEBUG
1464 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1465 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1466 desc_bytes(edesc->hw_desc), 1);
1467 #endif
1469 desc = edesc->hw_desc;
1470 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1471 if (!ret) {
1472 ret = -EINPROGRESS;
1473 } else {
1474 aead_unmap(jrdev, edesc, req);
1475 kfree(edesc);
1478 return ret;
1482 * allocate and map the ablkcipher extended descriptor for ablkcipher
1484 static struct ablkcipher_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request
1485 *req, int desc_bytes,
1486 bool *iv_contig_out)
1488 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1489 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1490 struct device *jrdev = ctx->jrdev;
1491 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1492 GFP_KERNEL : GFP_ATOMIC;
1493 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1494 struct ablkcipher_edesc *edesc;
1495 dma_addr_t iv_dma = 0;
1496 bool in_contig;
1497 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1498 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
1500 src_nents = sg_nents_for_len(req->src, req->nbytes);
1501 if (unlikely(src_nents < 0)) {
1502 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1503 req->nbytes);
1504 return ERR_PTR(src_nents);
1507 if (req->dst != req->src) {
1508 dst_nents = sg_nents_for_len(req->dst, req->nbytes);
1509 if (unlikely(dst_nents < 0)) {
1510 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1511 req->nbytes);
1512 return ERR_PTR(dst_nents);
1516 if (likely(req->src == req->dst)) {
1517 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1518 DMA_BIDIRECTIONAL);
1519 if (unlikely(!mapped_src_nents)) {
1520 dev_err(jrdev, "unable to map source\n");
1521 return ERR_PTR(-ENOMEM);
1523 } else {
1524 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1525 DMA_TO_DEVICE);
1526 if (unlikely(!mapped_src_nents)) {
1527 dev_err(jrdev, "unable to map source\n");
1528 return ERR_PTR(-ENOMEM);
1531 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1532 DMA_FROM_DEVICE);
1533 if (unlikely(!mapped_dst_nents)) {
1534 dev_err(jrdev, "unable to map destination\n");
1535 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1536 return ERR_PTR(-ENOMEM);
1540 iv_dma = dma_map_single(jrdev, req->info, ivsize, DMA_TO_DEVICE);
1541 if (dma_mapping_error(jrdev, iv_dma)) {
1542 dev_err(jrdev, "unable to map IV\n");
1543 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1544 0, 0, 0);
1545 return ERR_PTR(-ENOMEM);
1548 if (mapped_src_nents == 1 &&
1549 iv_dma + ivsize == sg_dma_address(req->src)) {
1550 in_contig = true;
1551 sec4_sg_ents = 0;
1552 } else {
1553 in_contig = false;
1554 sec4_sg_ents = 1 + mapped_src_nents;
1556 dst_sg_idx = sec4_sg_ents;
1557 sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1558 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
1560 /* allocate space for base edesc and hw desc commands, link tables */
1561 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1562 GFP_DMA | flags);
1563 if (!edesc) {
1564 dev_err(jrdev, "could not allocate extended descriptor\n");
1565 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1566 iv_dma, ivsize, 0, 0);
1567 return ERR_PTR(-ENOMEM);
1570 edesc->src_nents = src_nents;
1571 edesc->dst_nents = dst_nents;
1572 edesc->sec4_sg_bytes = sec4_sg_bytes;
1573 edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
1574 desc_bytes;
1576 if (!in_contig) {
1577 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
1578 sg_to_sec4_sg_last(req->src, mapped_src_nents,
1579 edesc->sec4_sg + 1, 0);
1582 if (mapped_dst_nents > 1) {
1583 sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1584 edesc->sec4_sg + dst_sg_idx, 0);
1587 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1588 sec4_sg_bytes, DMA_TO_DEVICE);
1589 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1590 dev_err(jrdev, "unable to map S/G table\n");
1591 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1592 iv_dma, ivsize, 0, 0);
1593 kfree(edesc);
1594 return ERR_PTR(-ENOMEM);
1597 edesc->iv_dma = iv_dma;
1599 #ifdef DEBUG
1600 print_hex_dump(KERN_ERR, "ablkcipher sec4_sg@"__stringify(__LINE__)": ",
1601 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1602 sec4_sg_bytes, 1);
1603 #endif
1605 *iv_contig_out = in_contig;
1606 return edesc;
1609 static int ablkcipher_encrypt(struct ablkcipher_request *req)
1611 struct ablkcipher_edesc *edesc;
1612 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1613 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1614 struct device *jrdev = ctx->jrdev;
1615 bool iv_contig;
1616 u32 *desc;
1617 int ret = 0;
1619 /* allocate extended descriptor */
1620 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
1621 CAAM_CMD_SZ, &iv_contig);
1622 if (IS_ERR(edesc))
1623 return PTR_ERR(edesc);
1625 /* Create and submit job descriptor*/
1626 init_ablkcipher_job(ctx->sh_desc_enc,
1627 ctx->sh_desc_enc_dma, edesc, req, iv_contig);
1628 #ifdef DEBUG
1629 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
1630 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1631 desc_bytes(edesc->hw_desc), 1);
1632 #endif
1633 desc = edesc->hw_desc;
1634 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
1636 if (!ret) {
1637 ret = -EINPROGRESS;
1638 } else {
1639 ablkcipher_unmap(jrdev, edesc, req);
1640 kfree(edesc);
1643 return ret;
1646 static int ablkcipher_decrypt(struct ablkcipher_request *req)
1648 struct ablkcipher_edesc *edesc;
1649 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1650 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1651 struct device *jrdev = ctx->jrdev;
1652 bool iv_contig;
1653 u32 *desc;
1654 int ret = 0;
1656 /* allocate extended descriptor */
1657 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
1658 CAAM_CMD_SZ, &iv_contig);
1659 if (IS_ERR(edesc))
1660 return PTR_ERR(edesc);
1662 /* Create and submit job descriptor*/
1663 init_ablkcipher_job(ctx->sh_desc_dec,
1664 ctx->sh_desc_dec_dma, edesc, req, iv_contig);
1665 desc = edesc->hw_desc;
1666 #ifdef DEBUG
1667 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
1668 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1669 desc_bytes(edesc->hw_desc), 1);
1670 #endif
1672 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_decrypt_done, req);
1673 if (!ret) {
1674 ret = -EINPROGRESS;
1675 } else {
1676 ablkcipher_unmap(jrdev, edesc, req);
1677 kfree(edesc);
1680 return ret;
1684 * allocate and map the ablkcipher extended descriptor
1685 * for ablkcipher givencrypt
1687 static struct ablkcipher_edesc *ablkcipher_giv_edesc_alloc(
1688 struct skcipher_givcrypt_request *greq,
1689 int desc_bytes,
1690 bool *iv_contig_out)
1692 struct ablkcipher_request *req = &greq->creq;
1693 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1694 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1695 struct device *jrdev = ctx->jrdev;
1696 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1697 GFP_KERNEL : GFP_ATOMIC;
1698 int src_nents, mapped_src_nents, dst_nents, mapped_dst_nents;
1699 struct ablkcipher_edesc *edesc;
1700 dma_addr_t iv_dma = 0;
1701 bool out_contig;
1702 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1703 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
1705 src_nents = sg_nents_for_len(req->src, req->nbytes);
1706 if (unlikely(src_nents < 0)) {
1707 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1708 req->nbytes);
1709 return ERR_PTR(src_nents);
1712 if (likely(req->src == req->dst)) {
1713 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1714 DMA_BIDIRECTIONAL);
1715 if (unlikely(!mapped_src_nents)) {
1716 dev_err(jrdev, "unable to map source\n");
1717 return ERR_PTR(-ENOMEM);
1720 dst_nents = src_nents;
1721 mapped_dst_nents = src_nents;
1722 } else {
1723 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1724 DMA_TO_DEVICE);
1725 if (unlikely(!mapped_src_nents)) {
1726 dev_err(jrdev, "unable to map source\n");
1727 return ERR_PTR(-ENOMEM);
1730 dst_nents = sg_nents_for_len(req->dst, req->nbytes);
1731 if (unlikely(dst_nents < 0)) {
1732 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1733 req->nbytes);
1734 return ERR_PTR(dst_nents);
1737 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1738 DMA_FROM_DEVICE);
1739 if (unlikely(!mapped_dst_nents)) {
1740 dev_err(jrdev, "unable to map destination\n");
1741 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1742 return ERR_PTR(-ENOMEM);
1747 * Check if iv can be contiguous with source and destination.
1748 * If so, include it. If not, create scatterlist.
1750 iv_dma = dma_map_single(jrdev, greq->giv, ivsize, DMA_TO_DEVICE);
1751 if (dma_mapping_error(jrdev, iv_dma)) {
1752 dev_err(jrdev, "unable to map IV\n");
1753 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1754 0, 0, 0);
1755 return ERR_PTR(-ENOMEM);
1758 sec4_sg_ents = mapped_src_nents > 1 ? mapped_src_nents : 0;
1759 dst_sg_idx = sec4_sg_ents;
1760 if (mapped_dst_nents == 1 &&
1761 iv_dma + ivsize == sg_dma_address(req->dst)) {
1762 out_contig = true;
1763 } else {
1764 out_contig = false;
1765 sec4_sg_ents += 1 + mapped_dst_nents;
1768 /* allocate space for base edesc and hw desc commands, link tables */
1769 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
1770 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1771 GFP_DMA | flags);
1772 if (!edesc) {
1773 dev_err(jrdev, "could not allocate extended descriptor\n");
1774 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1775 iv_dma, ivsize, 0, 0);
1776 return ERR_PTR(-ENOMEM);
1779 edesc->src_nents = src_nents;
1780 edesc->dst_nents = dst_nents;
1781 edesc->sec4_sg_bytes = sec4_sg_bytes;
1782 edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
1783 desc_bytes;
1785 if (mapped_src_nents > 1)
1786 sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg,
1789 if (!out_contig) {
1790 dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx,
1791 iv_dma, ivsize, 0);
1792 sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1793 edesc->sec4_sg + dst_sg_idx + 1, 0);
1796 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1797 sec4_sg_bytes, DMA_TO_DEVICE);
1798 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1799 dev_err(jrdev, "unable to map S/G table\n");
1800 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1801 iv_dma, ivsize, 0, 0);
1802 kfree(edesc);
1803 return ERR_PTR(-ENOMEM);
1805 edesc->iv_dma = iv_dma;
1807 #ifdef DEBUG
1808 print_hex_dump(KERN_ERR,
1809 "ablkcipher sec4_sg@" __stringify(__LINE__) ": ",
1810 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1811 sec4_sg_bytes, 1);
1812 #endif
1814 *iv_contig_out = out_contig;
1815 return edesc;
1818 static int ablkcipher_givencrypt(struct skcipher_givcrypt_request *creq)
1820 struct ablkcipher_request *req = &creq->creq;
1821 struct ablkcipher_edesc *edesc;
1822 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1823 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1824 struct device *jrdev = ctx->jrdev;
1825 bool iv_contig = false;
1826 u32 *desc;
1827 int ret = 0;
1829 /* allocate extended descriptor */
1830 edesc = ablkcipher_giv_edesc_alloc(creq, DESC_JOB_IO_LEN *
1831 CAAM_CMD_SZ, &iv_contig);
1832 if (IS_ERR(edesc))
1833 return PTR_ERR(edesc);
1835 /* Create and submit job descriptor*/
1836 init_ablkcipher_giv_job(ctx->sh_desc_givenc, ctx->sh_desc_givenc_dma,
1837 edesc, req, iv_contig);
1838 #ifdef DEBUG
1839 print_hex_dump(KERN_ERR,
1840 "ablkcipher jobdesc@" __stringify(__LINE__) ": ",
1841 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1842 desc_bytes(edesc->hw_desc), 1);
1843 #endif
1844 desc = edesc->hw_desc;
1845 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
1847 if (!ret) {
1848 ret = -EINPROGRESS;
1849 } else {
1850 ablkcipher_unmap(jrdev, edesc, req);
1851 kfree(edesc);
1854 return ret;
1857 #define template_aead template_u.aead
1858 #define template_ablkcipher template_u.ablkcipher
1859 struct caam_alg_template {
1860 char name[CRYPTO_MAX_ALG_NAME];
1861 char driver_name[CRYPTO_MAX_ALG_NAME];
1862 unsigned int blocksize;
1863 u32 type;
1864 union {
1865 struct ablkcipher_alg ablkcipher;
1866 } template_u;
1867 u32 class1_alg_type;
1868 u32 class2_alg_type;
1871 static struct caam_alg_template driver_algs[] = {
1872 /* ablkcipher descriptor */
1874 .name = "cbc(aes)",
1875 .driver_name = "cbc-aes-caam",
1876 .blocksize = AES_BLOCK_SIZE,
1877 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
1878 .template_ablkcipher = {
1879 .setkey = ablkcipher_setkey,
1880 .encrypt = ablkcipher_encrypt,
1881 .decrypt = ablkcipher_decrypt,
1882 .givencrypt = ablkcipher_givencrypt,
1883 .geniv = "<built-in>",
1884 .min_keysize = AES_MIN_KEY_SIZE,
1885 .max_keysize = AES_MAX_KEY_SIZE,
1886 .ivsize = AES_BLOCK_SIZE,
1888 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1891 .name = "cbc(des3_ede)",
1892 .driver_name = "cbc-3des-caam",
1893 .blocksize = DES3_EDE_BLOCK_SIZE,
1894 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
1895 .template_ablkcipher = {
1896 .setkey = ablkcipher_setkey,
1897 .encrypt = ablkcipher_encrypt,
1898 .decrypt = ablkcipher_decrypt,
1899 .givencrypt = ablkcipher_givencrypt,
1900 .geniv = "<built-in>",
1901 .min_keysize = DES3_EDE_KEY_SIZE,
1902 .max_keysize = DES3_EDE_KEY_SIZE,
1903 .ivsize = DES3_EDE_BLOCK_SIZE,
1905 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1908 .name = "cbc(des)",
1909 .driver_name = "cbc-des-caam",
1910 .blocksize = DES_BLOCK_SIZE,
1911 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
1912 .template_ablkcipher = {
1913 .setkey = ablkcipher_setkey,
1914 .encrypt = ablkcipher_encrypt,
1915 .decrypt = ablkcipher_decrypt,
1916 .givencrypt = ablkcipher_givencrypt,
1917 .geniv = "<built-in>",
1918 .min_keysize = DES_KEY_SIZE,
1919 .max_keysize = DES_KEY_SIZE,
1920 .ivsize = DES_BLOCK_SIZE,
1922 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
1925 .name = "ctr(aes)",
1926 .driver_name = "ctr-aes-caam",
1927 .blocksize = 1,
1928 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1929 .template_ablkcipher = {
1930 .setkey = ablkcipher_setkey,
1931 .encrypt = ablkcipher_encrypt,
1932 .decrypt = ablkcipher_decrypt,
1933 .geniv = "chainiv",
1934 .min_keysize = AES_MIN_KEY_SIZE,
1935 .max_keysize = AES_MAX_KEY_SIZE,
1936 .ivsize = AES_BLOCK_SIZE,
1938 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
1941 .name = "rfc3686(ctr(aes))",
1942 .driver_name = "rfc3686-ctr-aes-caam",
1943 .blocksize = 1,
1944 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
1945 .template_ablkcipher = {
1946 .setkey = ablkcipher_setkey,
1947 .encrypt = ablkcipher_encrypt,
1948 .decrypt = ablkcipher_decrypt,
1949 .givencrypt = ablkcipher_givencrypt,
1950 .geniv = "<built-in>",
1951 .min_keysize = AES_MIN_KEY_SIZE +
1952 CTR_RFC3686_NONCE_SIZE,
1953 .max_keysize = AES_MAX_KEY_SIZE +
1954 CTR_RFC3686_NONCE_SIZE,
1955 .ivsize = CTR_RFC3686_IV_SIZE,
1957 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
1960 .name = "xts(aes)",
1961 .driver_name = "xts-aes-caam",
1962 .blocksize = AES_BLOCK_SIZE,
1963 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1964 .template_ablkcipher = {
1965 .setkey = xts_ablkcipher_setkey,
1966 .encrypt = ablkcipher_encrypt,
1967 .decrypt = ablkcipher_decrypt,
1968 .geniv = "eseqiv",
1969 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1970 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1971 .ivsize = AES_BLOCK_SIZE,
1973 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
1977 static struct caam_aead_alg driver_aeads[] = {
1979 .aead = {
1980 .base = {
1981 .cra_name = "rfc4106(gcm(aes))",
1982 .cra_driver_name = "rfc4106-gcm-aes-caam",
1983 .cra_blocksize = 1,
1985 .setkey = rfc4106_setkey,
1986 .setauthsize = rfc4106_setauthsize,
1987 .encrypt = ipsec_gcm_encrypt,
1988 .decrypt = ipsec_gcm_decrypt,
1989 .ivsize = GCM_RFC4106_IV_SIZE,
1990 .maxauthsize = AES_BLOCK_SIZE,
1992 .caam = {
1993 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1997 .aead = {
1998 .base = {
1999 .cra_name = "rfc4543(gcm(aes))",
2000 .cra_driver_name = "rfc4543-gcm-aes-caam",
2001 .cra_blocksize = 1,
2003 .setkey = rfc4543_setkey,
2004 .setauthsize = rfc4543_setauthsize,
2005 .encrypt = ipsec_gcm_encrypt,
2006 .decrypt = ipsec_gcm_decrypt,
2007 .ivsize = GCM_RFC4543_IV_SIZE,
2008 .maxauthsize = AES_BLOCK_SIZE,
2010 .caam = {
2011 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2014 /* Galois Counter Mode */
2016 .aead = {
2017 .base = {
2018 .cra_name = "gcm(aes)",
2019 .cra_driver_name = "gcm-aes-caam",
2020 .cra_blocksize = 1,
2022 .setkey = gcm_setkey,
2023 .setauthsize = gcm_setauthsize,
2024 .encrypt = gcm_encrypt,
2025 .decrypt = gcm_decrypt,
2026 .ivsize = GCM_AES_IV_SIZE,
2027 .maxauthsize = AES_BLOCK_SIZE,
2029 .caam = {
2030 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2033 /* single-pass ipsec_esp descriptor */
2035 .aead = {
2036 .base = {
2037 .cra_name = "authenc(hmac(md5),"
2038 "ecb(cipher_null))",
2039 .cra_driver_name = "authenc-hmac-md5-"
2040 "ecb-cipher_null-caam",
2041 .cra_blocksize = NULL_BLOCK_SIZE,
2043 .setkey = aead_setkey,
2044 .setauthsize = aead_setauthsize,
2045 .encrypt = aead_encrypt,
2046 .decrypt = aead_decrypt,
2047 .ivsize = NULL_IV_SIZE,
2048 .maxauthsize = MD5_DIGEST_SIZE,
2050 .caam = {
2051 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2052 OP_ALG_AAI_HMAC_PRECOMP,
2056 .aead = {
2057 .base = {
2058 .cra_name = "authenc(hmac(sha1),"
2059 "ecb(cipher_null))",
2060 .cra_driver_name = "authenc-hmac-sha1-"
2061 "ecb-cipher_null-caam",
2062 .cra_blocksize = NULL_BLOCK_SIZE,
2064 .setkey = aead_setkey,
2065 .setauthsize = aead_setauthsize,
2066 .encrypt = aead_encrypt,
2067 .decrypt = aead_decrypt,
2068 .ivsize = NULL_IV_SIZE,
2069 .maxauthsize = SHA1_DIGEST_SIZE,
2071 .caam = {
2072 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2073 OP_ALG_AAI_HMAC_PRECOMP,
2077 .aead = {
2078 .base = {
2079 .cra_name = "authenc(hmac(sha224),"
2080 "ecb(cipher_null))",
2081 .cra_driver_name = "authenc-hmac-sha224-"
2082 "ecb-cipher_null-caam",
2083 .cra_blocksize = NULL_BLOCK_SIZE,
2085 .setkey = aead_setkey,
2086 .setauthsize = aead_setauthsize,
2087 .encrypt = aead_encrypt,
2088 .decrypt = aead_decrypt,
2089 .ivsize = NULL_IV_SIZE,
2090 .maxauthsize = SHA224_DIGEST_SIZE,
2092 .caam = {
2093 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2094 OP_ALG_AAI_HMAC_PRECOMP,
2098 .aead = {
2099 .base = {
2100 .cra_name = "authenc(hmac(sha256),"
2101 "ecb(cipher_null))",
2102 .cra_driver_name = "authenc-hmac-sha256-"
2103 "ecb-cipher_null-caam",
2104 .cra_blocksize = NULL_BLOCK_SIZE,
2106 .setkey = aead_setkey,
2107 .setauthsize = aead_setauthsize,
2108 .encrypt = aead_encrypt,
2109 .decrypt = aead_decrypt,
2110 .ivsize = NULL_IV_SIZE,
2111 .maxauthsize = SHA256_DIGEST_SIZE,
2113 .caam = {
2114 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2115 OP_ALG_AAI_HMAC_PRECOMP,
2119 .aead = {
2120 .base = {
2121 .cra_name = "authenc(hmac(sha384),"
2122 "ecb(cipher_null))",
2123 .cra_driver_name = "authenc-hmac-sha384-"
2124 "ecb-cipher_null-caam",
2125 .cra_blocksize = NULL_BLOCK_SIZE,
2127 .setkey = aead_setkey,
2128 .setauthsize = aead_setauthsize,
2129 .encrypt = aead_encrypt,
2130 .decrypt = aead_decrypt,
2131 .ivsize = NULL_IV_SIZE,
2132 .maxauthsize = SHA384_DIGEST_SIZE,
2134 .caam = {
2135 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2136 OP_ALG_AAI_HMAC_PRECOMP,
2140 .aead = {
2141 .base = {
2142 .cra_name = "authenc(hmac(sha512),"
2143 "ecb(cipher_null))",
2144 .cra_driver_name = "authenc-hmac-sha512-"
2145 "ecb-cipher_null-caam",
2146 .cra_blocksize = NULL_BLOCK_SIZE,
2148 .setkey = aead_setkey,
2149 .setauthsize = aead_setauthsize,
2150 .encrypt = aead_encrypt,
2151 .decrypt = aead_decrypt,
2152 .ivsize = NULL_IV_SIZE,
2153 .maxauthsize = SHA512_DIGEST_SIZE,
2155 .caam = {
2156 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2157 OP_ALG_AAI_HMAC_PRECOMP,
2161 .aead = {
2162 .base = {
2163 .cra_name = "authenc(hmac(md5),cbc(aes))",
2164 .cra_driver_name = "authenc-hmac-md5-"
2165 "cbc-aes-caam",
2166 .cra_blocksize = AES_BLOCK_SIZE,
2168 .setkey = aead_setkey,
2169 .setauthsize = aead_setauthsize,
2170 .encrypt = aead_encrypt,
2171 .decrypt = aead_decrypt,
2172 .ivsize = AES_BLOCK_SIZE,
2173 .maxauthsize = MD5_DIGEST_SIZE,
2175 .caam = {
2176 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2177 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2178 OP_ALG_AAI_HMAC_PRECOMP,
2182 .aead = {
2183 .base = {
2184 .cra_name = "echainiv(authenc(hmac(md5),"
2185 "cbc(aes)))",
2186 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2187 "cbc-aes-caam",
2188 .cra_blocksize = AES_BLOCK_SIZE,
2190 .setkey = aead_setkey,
2191 .setauthsize = aead_setauthsize,
2192 .encrypt = aead_encrypt,
2193 .decrypt = aead_decrypt,
2194 .ivsize = AES_BLOCK_SIZE,
2195 .maxauthsize = MD5_DIGEST_SIZE,
2197 .caam = {
2198 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2199 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2200 OP_ALG_AAI_HMAC_PRECOMP,
2201 .geniv = true,
2205 .aead = {
2206 .base = {
2207 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2208 .cra_driver_name = "authenc-hmac-sha1-"
2209 "cbc-aes-caam",
2210 .cra_blocksize = AES_BLOCK_SIZE,
2212 .setkey = aead_setkey,
2213 .setauthsize = aead_setauthsize,
2214 .encrypt = aead_encrypt,
2215 .decrypt = aead_decrypt,
2216 .ivsize = AES_BLOCK_SIZE,
2217 .maxauthsize = SHA1_DIGEST_SIZE,
2219 .caam = {
2220 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2221 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2222 OP_ALG_AAI_HMAC_PRECOMP,
2226 .aead = {
2227 .base = {
2228 .cra_name = "echainiv(authenc(hmac(sha1),"
2229 "cbc(aes)))",
2230 .cra_driver_name = "echainiv-authenc-"
2231 "hmac-sha1-cbc-aes-caam",
2232 .cra_blocksize = AES_BLOCK_SIZE,
2234 .setkey = aead_setkey,
2235 .setauthsize = aead_setauthsize,
2236 .encrypt = aead_encrypt,
2237 .decrypt = aead_decrypt,
2238 .ivsize = AES_BLOCK_SIZE,
2239 .maxauthsize = SHA1_DIGEST_SIZE,
2241 .caam = {
2242 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2243 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2244 OP_ALG_AAI_HMAC_PRECOMP,
2245 .geniv = true,
2249 .aead = {
2250 .base = {
2251 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2252 .cra_driver_name = "authenc-hmac-sha224-"
2253 "cbc-aes-caam",
2254 .cra_blocksize = AES_BLOCK_SIZE,
2256 .setkey = aead_setkey,
2257 .setauthsize = aead_setauthsize,
2258 .encrypt = aead_encrypt,
2259 .decrypt = aead_decrypt,
2260 .ivsize = AES_BLOCK_SIZE,
2261 .maxauthsize = SHA224_DIGEST_SIZE,
2263 .caam = {
2264 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2265 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2266 OP_ALG_AAI_HMAC_PRECOMP,
2270 .aead = {
2271 .base = {
2272 .cra_name = "echainiv(authenc(hmac(sha224),"
2273 "cbc(aes)))",
2274 .cra_driver_name = "echainiv-authenc-"
2275 "hmac-sha224-cbc-aes-caam",
2276 .cra_blocksize = AES_BLOCK_SIZE,
2278 .setkey = aead_setkey,
2279 .setauthsize = aead_setauthsize,
2280 .encrypt = aead_encrypt,
2281 .decrypt = aead_decrypt,
2282 .ivsize = AES_BLOCK_SIZE,
2283 .maxauthsize = SHA224_DIGEST_SIZE,
2285 .caam = {
2286 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2287 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2288 OP_ALG_AAI_HMAC_PRECOMP,
2289 .geniv = true,
2293 .aead = {
2294 .base = {
2295 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2296 .cra_driver_name = "authenc-hmac-sha256-"
2297 "cbc-aes-caam",
2298 .cra_blocksize = AES_BLOCK_SIZE,
2300 .setkey = aead_setkey,
2301 .setauthsize = aead_setauthsize,
2302 .encrypt = aead_encrypt,
2303 .decrypt = aead_decrypt,
2304 .ivsize = AES_BLOCK_SIZE,
2305 .maxauthsize = SHA256_DIGEST_SIZE,
2307 .caam = {
2308 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2309 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2310 OP_ALG_AAI_HMAC_PRECOMP,
2314 .aead = {
2315 .base = {
2316 .cra_name = "echainiv(authenc(hmac(sha256),"
2317 "cbc(aes)))",
2318 .cra_driver_name = "echainiv-authenc-"
2319 "hmac-sha256-cbc-aes-caam",
2320 .cra_blocksize = AES_BLOCK_SIZE,
2322 .setkey = aead_setkey,
2323 .setauthsize = aead_setauthsize,
2324 .encrypt = aead_encrypt,
2325 .decrypt = aead_decrypt,
2326 .ivsize = AES_BLOCK_SIZE,
2327 .maxauthsize = SHA256_DIGEST_SIZE,
2329 .caam = {
2330 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2331 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2332 OP_ALG_AAI_HMAC_PRECOMP,
2333 .geniv = true,
2337 .aead = {
2338 .base = {
2339 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2340 .cra_driver_name = "authenc-hmac-sha384-"
2341 "cbc-aes-caam",
2342 .cra_blocksize = AES_BLOCK_SIZE,
2344 .setkey = aead_setkey,
2345 .setauthsize = aead_setauthsize,
2346 .encrypt = aead_encrypt,
2347 .decrypt = aead_decrypt,
2348 .ivsize = AES_BLOCK_SIZE,
2349 .maxauthsize = SHA384_DIGEST_SIZE,
2351 .caam = {
2352 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2353 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2354 OP_ALG_AAI_HMAC_PRECOMP,
2358 .aead = {
2359 .base = {
2360 .cra_name = "echainiv(authenc(hmac(sha384),"
2361 "cbc(aes)))",
2362 .cra_driver_name = "echainiv-authenc-"
2363 "hmac-sha384-cbc-aes-caam",
2364 .cra_blocksize = AES_BLOCK_SIZE,
2366 .setkey = aead_setkey,
2367 .setauthsize = aead_setauthsize,
2368 .encrypt = aead_encrypt,
2369 .decrypt = aead_decrypt,
2370 .ivsize = AES_BLOCK_SIZE,
2371 .maxauthsize = SHA384_DIGEST_SIZE,
2373 .caam = {
2374 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2375 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2376 OP_ALG_AAI_HMAC_PRECOMP,
2377 .geniv = true,
2381 .aead = {
2382 .base = {
2383 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2384 .cra_driver_name = "authenc-hmac-sha512-"
2385 "cbc-aes-caam",
2386 .cra_blocksize = AES_BLOCK_SIZE,
2388 .setkey = aead_setkey,
2389 .setauthsize = aead_setauthsize,
2390 .encrypt = aead_encrypt,
2391 .decrypt = aead_decrypt,
2392 .ivsize = AES_BLOCK_SIZE,
2393 .maxauthsize = SHA512_DIGEST_SIZE,
2395 .caam = {
2396 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2397 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2398 OP_ALG_AAI_HMAC_PRECOMP,
2402 .aead = {
2403 .base = {
2404 .cra_name = "echainiv(authenc(hmac(sha512),"
2405 "cbc(aes)))",
2406 .cra_driver_name = "echainiv-authenc-"
2407 "hmac-sha512-cbc-aes-caam",
2408 .cra_blocksize = AES_BLOCK_SIZE,
2410 .setkey = aead_setkey,
2411 .setauthsize = aead_setauthsize,
2412 .encrypt = aead_encrypt,
2413 .decrypt = aead_decrypt,
2414 .ivsize = AES_BLOCK_SIZE,
2415 .maxauthsize = SHA512_DIGEST_SIZE,
2417 .caam = {
2418 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2419 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2420 OP_ALG_AAI_HMAC_PRECOMP,
2421 .geniv = true,
2425 .aead = {
2426 .base = {
2427 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2428 .cra_driver_name = "authenc-hmac-md5-"
2429 "cbc-des3_ede-caam",
2430 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2432 .setkey = aead_setkey,
2433 .setauthsize = aead_setauthsize,
2434 .encrypt = aead_encrypt,
2435 .decrypt = aead_decrypt,
2436 .ivsize = DES3_EDE_BLOCK_SIZE,
2437 .maxauthsize = MD5_DIGEST_SIZE,
2439 .caam = {
2440 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2441 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2442 OP_ALG_AAI_HMAC_PRECOMP,
2446 .aead = {
2447 .base = {
2448 .cra_name = "echainiv(authenc(hmac(md5),"
2449 "cbc(des3_ede)))",
2450 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2451 "cbc-des3_ede-caam",
2452 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2454 .setkey = aead_setkey,
2455 .setauthsize = aead_setauthsize,
2456 .encrypt = aead_encrypt,
2457 .decrypt = aead_decrypt,
2458 .ivsize = DES3_EDE_BLOCK_SIZE,
2459 .maxauthsize = MD5_DIGEST_SIZE,
2461 .caam = {
2462 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2463 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2464 OP_ALG_AAI_HMAC_PRECOMP,
2465 .geniv = true,
2469 .aead = {
2470 .base = {
2471 .cra_name = "authenc(hmac(sha1),"
2472 "cbc(des3_ede))",
2473 .cra_driver_name = "authenc-hmac-sha1-"
2474 "cbc-des3_ede-caam",
2475 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2477 .setkey = aead_setkey,
2478 .setauthsize = aead_setauthsize,
2479 .encrypt = aead_encrypt,
2480 .decrypt = aead_decrypt,
2481 .ivsize = DES3_EDE_BLOCK_SIZE,
2482 .maxauthsize = SHA1_DIGEST_SIZE,
2484 .caam = {
2485 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2486 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2487 OP_ALG_AAI_HMAC_PRECOMP,
2491 .aead = {
2492 .base = {
2493 .cra_name = "echainiv(authenc(hmac(sha1),"
2494 "cbc(des3_ede)))",
2495 .cra_driver_name = "echainiv-authenc-"
2496 "hmac-sha1-"
2497 "cbc-des3_ede-caam",
2498 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2500 .setkey = aead_setkey,
2501 .setauthsize = aead_setauthsize,
2502 .encrypt = aead_encrypt,
2503 .decrypt = aead_decrypt,
2504 .ivsize = DES3_EDE_BLOCK_SIZE,
2505 .maxauthsize = SHA1_DIGEST_SIZE,
2507 .caam = {
2508 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2509 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2510 OP_ALG_AAI_HMAC_PRECOMP,
2511 .geniv = true,
2515 .aead = {
2516 .base = {
2517 .cra_name = "authenc(hmac(sha224),"
2518 "cbc(des3_ede))",
2519 .cra_driver_name = "authenc-hmac-sha224-"
2520 "cbc-des3_ede-caam",
2521 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2523 .setkey = aead_setkey,
2524 .setauthsize = aead_setauthsize,
2525 .encrypt = aead_encrypt,
2526 .decrypt = aead_decrypt,
2527 .ivsize = DES3_EDE_BLOCK_SIZE,
2528 .maxauthsize = SHA224_DIGEST_SIZE,
2530 .caam = {
2531 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2532 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2533 OP_ALG_AAI_HMAC_PRECOMP,
2537 .aead = {
2538 .base = {
2539 .cra_name = "echainiv(authenc(hmac(sha224),"
2540 "cbc(des3_ede)))",
2541 .cra_driver_name = "echainiv-authenc-"
2542 "hmac-sha224-"
2543 "cbc-des3_ede-caam",
2544 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2546 .setkey = aead_setkey,
2547 .setauthsize = aead_setauthsize,
2548 .encrypt = aead_encrypt,
2549 .decrypt = aead_decrypt,
2550 .ivsize = DES3_EDE_BLOCK_SIZE,
2551 .maxauthsize = SHA224_DIGEST_SIZE,
2553 .caam = {
2554 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2555 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2556 OP_ALG_AAI_HMAC_PRECOMP,
2557 .geniv = true,
2561 .aead = {
2562 .base = {
2563 .cra_name = "authenc(hmac(sha256),"
2564 "cbc(des3_ede))",
2565 .cra_driver_name = "authenc-hmac-sha256-"
2566 "cbc-des3_ede-caam",
2567 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2569 .setkey = aead_setkey,
2570 .setauthsize = aead_setauthsize,
2571 .encrypt = aead_encrypt,
2572 .decrypt = aead_decrypt,
2573 .ivsize = DES3_EDE_BLOCK_SIZE,
2574 .maxauthsize = SHA256_DIGEST_SIZE,
2576 .caam = {
2577 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2578 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2579 OP_ALG_AAI_HMAC_PRECOMP,
2583 .aead = {
2584 .base = {
2585 .cra_name = "echainiv(authenc(hmac(sha256),"
2586 "cbc(des3_ede)))",
2587 .cra_driver_name = "echainiv-authenc-"
2588 "hmac-sha256-"
2589 "cbc-des3_ede-caam",
2590 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2592 .setkey = aead_setkey,
2593 .setauthsize = aead_setauthsize,
2594 .encrypt = aead_encrypt,
2595 .decrypt = aead_decrypt,
2596 .ivsize = DES3_EDE_BLOCK_SIZE,
2597 .maxauthsize = SHA256_DIGEST_SIZE,
2599 .caam = {
2600 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2601 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2602 OP_ALG_AAI_HMAC_PRECOMP,
2603 .geniv = true,
2607 .aead = {
2608 .base = {
2609 .cra_name = "authenc(hmac(sha384),"
2610 "cbc(des3_ede))",
2611 .cra_driver_name = "authenc-hmac-sha384-"
2612 "cbc-des3_ede-caam",
2613 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2615 .setkey = aead_setkey,
2616 .setauthsize = aead_setauthsize,
2617 .encrypt = aead_encrypt,
2618 .decrypt = aead_decrypt,
2619 .ivsize = DES3_EDE_BLOCK_SIZE,
2620 .maxauthsize = SHA384_DIGEST_SIZE,
2622 .caam = {
2623 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2624 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2625 OP_ALG_AAI_HMAC_PRECOMP,
2629 .aead = {
2630 .base = {
2631 .cra_name = "echainiv(authenc(hmac(sha384),"
2632 "cbc(des3_ede)))",
2633 .cra_driver_name = "echainiv-authenc-"
2634 "hmac-sha384-"
2635 "cbc-des3_ede-caam",
2636 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2638 .setkey = aead_setkey,
2639 .setauthsize = aead_setauthsize,
2640 .encrypt = aead_encrypt,
2641 .decrypt = aead_decrypt,
2642 .ivsize = DES3_EDE_BLOCK_SIZE,
2643 .maxauthsize = SHA384_DIGEST_SIZE,
2645 .caam = {
2646 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2647 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2648 OP_ALG_AAI_HMAC_PRECOMP,
2649 .geniv = true,
2653 .aead = {
2654 .base = {
2655 .cra_name = "authenc(hmac(sha512),"
2656 "cbc(des3_ede))",
2657 .cra_driver_name = "authenc-hmac-sha512-"
2658 "cbc-des3_ede-caam",
2659 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2661 .setkey = aead_setkey,
2662 .setauthsize = aead_setauthsize,
2663 .encrypt = aead_encrypt,
2664 .decrypt = aead_decrypt,
2665 .ivsize = DES3_EDE_BLOCK_SIZE,
2666 .maxauthsize = SHA512_DIGEST_SIZE,
2668 .caam = {
2669 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2670 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2671 OP_ALG_AAI_HMAC_PRECOMP,
2675 .aead = {
2676 .base = {
2677 .cra_name = "echainiv(authenc(hmac(sha512),"
2678 "cbc(des3_ede)))",
2679 .cra_driver_name = "echainiv-authenc-"
2680 "hmac-sha512-"
2681 "cbc-des3_ede-caam",
2682 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2684 .setkey = aead_setkey,
2685 .setauthsize = aead_setauthsize,
2686 .encrypt = aead_encrypt,
2687 .decrypt = aead_decrypt,
2688 .ivsize = DES3_EDE_BLOCK_SIZE,
2689 .maxauthsize = SHA512_DIGEST_SIZE,
2691 .caam = {
2692 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2693 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2694 OP_ALG_AAI_HMAC_PRECOMP,
2695 .geniv = true,
2699 .aead = {
2700 .base = {
2701 .cra_name = "authenc(hmac(md5),cbc(des))",
2702 .cra_driver_name = "authenc-hmac-md5-"
2703 "cbc-des-caam",
2704 .cra_blocksize = DES_BLOCK_SIZE,
2706 .setkey = aead_setkey,
2707 .setauthsize = aead_setauthsize,
2708 .encrypt = aead_encrypt,
2709 .decrypt = aead_decrypt,
2710 .ivsize = DES_BLOCK_SIZE,
2711 .maxauthsize = MD5_DIGEST_SIZE,
2713 .caam = {
2714 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2715 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2716 OP_ALG_AAI_HMAC_PRECOMP,
2720 .aead = {
2721 .base = {
2722 .cra_name = "echainiv(authenc(hmac(md5),"
2723 "cbc(des)))",
2724 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2725 "cbc-des-caam",
2726 .cra_blocksize = DES_BLOCK_SIZE,
2728 .setkey = aead_setkey,
2729 .setauthsize = aead_setauthsize,
2730 .encrypt = aead_encrypt,
2731 .decrypt = aead_decrypt,
2732 .ivsize = DES_BLOCK_SIZE,
2733 .maxauthsize = MD5_DIGEST_SIZE,
2735 .caam = {
2736 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2737 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2738 OP_ALG_AAI_HMAC_PRECOMP,
2739 .geniv = true,
2743 .aead = {
2744 .base = {
2745 .cra_name = "authenc(hmac(sha1),cbc(des))",
2746 .cra_driver_name = "authenc-hmac-sha1-"
2747 "cbc-des-caam",
2748 .cra_blocksize = DES_BLOCK_SIZE,
2750 .setkey = aead_setkey,
2751 .setauthsize = aead_setauthsize,
2752 .encrypt = aead_encrypt,
2753 .decrypt = aead_decrypt,
2754 .ivsize = DES_BLOCK_SIZE,
2755 .maxauthsize = SHA1_DIGEST_SIZE,
2757 .caam = {
2758 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2759 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2760 OP_ALG_AAI_HMAC_PRECOMP,
2764 .aead = {
2765 .base = {
2766 .cra_name = "echainiv(authenc(hmac(sha1),"
2767 "cbc(des)))",
2768 .cra_driver_name = "echainiv-authenc-"
2769 "hmac-sha1-cbc-des-caam",
2770 .cra_blocksize = DES_BLOCK_SIZE,
2772 .setkey = aead_setkey,
2773 .setauthsize = aead_setauthsize,
2774 .encrypt = aead_encrypt,
2775 .decrypt = aead_decrypt,
2776 .ivsize = DES_BLOCK_SIZE,
2777 .maxauthsize = SHA1_DIGEST_SIZE,
2779 .caam = {
2780 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2781 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2782 OP_ALG_AAI_HMAC_PRECOMP,
2783 .geniv = true,
2787 .aead = {
2788 .base = {
2789 .cra_name = "authenc(hmac(sha224),cbc(des))",
2790 .cra_driver_name = "authenc-hmac-sha224-"
2791 "cbc-des-caam",
2792 .cra_blocksize = DES_BLOCK_SIZE,
2794 .setkey = aead_setkey,
2795 .setauthsize = aead_setauthsize,
2796 .encrypt = aead_encrypt,
2797 .decrypt = aead_decrypt,
2798 .ivsize = DES_BLOCK_SIZE,
2799 .maxauthsize = SHA224_DIGEST_SIZE,
2801 .caam = {
2802 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2803 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2804 OP_ALG_AAI_HMAC_PRECOMP,
2808 .aead = {
2809 .base = {
2810 .cra_name = "echainiv(authenc(hmac(sha224),"
2811 "cbc(des)))",
2812 .cra_driver_name = "echainiv-authenc-"
2813 "hmac-sha224-cbc-des-caam",
2814 .cra_blocksize = DES_BLOCK_SIZE,
2816 .setkey = aead_setkey,
2817 .setauthsize = aead_setauthsize,
2818 .encrypt = aead_encrypt,
2819 .decrypt = aead_decrypt,
2820 .ivsize = DES_BLOCK_SIZE,
2821 .maxauthsize = SHA224_DIGEST_SIZE,
2823 .caam = {
2824 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2825 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2826 OP_ALG_AAI_HMAC_PRECOMP,
2827 .geniv = true,
2831 .aead = {
2832 .base = {
2833 .cra_name = "authenc(hmac(sha256),cbc(des))",
2834 .cra_driver_name = "authenc-hmac-sha256-"
2835 "cbc-des-caam",
2836 .cra_blocksize = DES_BLOCK_SIZE,
2838 .setkey = aead_setkey,
2839 .setauthsize = aead_setauthsize,
2840 .encrypt = aead_encrypt,
2841 .decrypt = aead_decrypt,
2842 .ivsize = DES_BLOCK_SIZE,
2843 .maxauthsize = SHA256_DIGEST_SIZE,
2845 .caam = {
2846 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2847 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2848 OP_ALG_AAI_HMAC_PRECOMP,
2852 .aead = {
2853 .base = {
2854 .cra_name = "echainiv(authenc(hmac(sha256),"
2855 "cbc(des)))",
2856 .cra_driver_name = "echainiv-authenc-"
2857 "hmac-sha256-cbc-des-caam",
2858 .cra_blocksize = DES_BLOCK_SIZE,
2860 .setkey = aead_setkey,
2861 .setauthsize = aead_setauthsize,
2862 .encrypt = aead_encrypt,
2863 .decrypt = aead_decrypt,
2864 .ivsize = DES_BLOCK_SIZE,
2865 .maxauthsize = SHA256_DIGEST_SIZE,
2867 .caam = {
2868 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2869 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2870 OP_ALG_AAI_HMAC_PRECOMP,
2871 .geniv = true,
2875 .aead = {
2876 .base = {
2877 .cra_name = "authenc(hmac(sha384),cbc(des))",
2878 .cra_driver_name = "authenc-hmac-sha384-"
2879 "cbc-des-caam",
2880 .cra_blocksize = DES_BLOCK_SIZE,
2882 .setkey = aead_setkey,
2883 .setauthsize = aead_setauthsize,
2884 .encrypt = aead_encrypt,
2885 .decrypt = aead_decrypt,
2886 .ivsize = DES_BLOCK_SIZE,
2887 .maxauthsize = SHA384_DIGEST_SIZE,
2889 .caam = {
2890 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2891 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2892 OP_ALG_AAI_HMAC_PRECOMP,
2896 .aead = {
2897 .base = {
2898 .cra_name = "echainiv(authenc(hmac(sha384),"
2899 "cbc(des)))",
2900 .cra_driver_name = "echainiv-authenc-"
2901 "hmac-sha384-cbc-des-caam",
2902 .cra_blocksize = DES_BLOCK_SIZE,
2904 .setkey = aead_setkey,
2905 .setauthsize = aead_setauthsize,
2906 .encrypt = aead_encrypt,
2907 .decrypt = aead_decrypt,
2908 .ivsize = DES_BLOCK_SIZE,
2909 .maxauthsize = SHA384_DIGEST_SIZE,
2911 .caam = {
2912 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2913 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2914 OP_ALG_AAI_HMAC_PRECOMP,
2915 .geniv = true,
2919 .aead = {
2920 .base = {
2921 .cra_name = "authenc(hmac(sha512),cbc(des))",
2922 .cra_driver_name = "authenc-hmac-sha512-"
2923 "cbc-des-caam",
2924 .cra_blocksize = DES_BLOCK_SIZE,
2926 .setkey = aead_setkey,
2927 .setauthsize = aead_setauthsize,
2928 .encrypt = aead_encrypt,
2929 .decrypt = aead_decrypt,
2930 .ivsize = DES_BLOCK_SIZE,
2931 .maxauthsize = SHA512_DIGEST_SIZE,
2933 .caam = {
2934 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2935 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2936 OP_ALG_AAI_HMAC_PRECOMP,
2940 .aead = {
2941 .base = {
2942 .cra_name = "echainiv(authenc(hmac(sha512),"
2943 "cbc(des)))",
2944 .cra_driver_name = "echainiv-authenc-"
2945 "hmac-sha512-cbc-des-caam",
2946 .cra_blocksize = DES_BLOCK_SIZE,
2948 .setkey = aead_setkey,
2949 .setauthsize = aead_setauthsize,
2950 .encrypt = aead_encrypt,
2951 .decrypt = aead_decrypt,
2952 .ivsize = DES_BLOCK_SIZE,
2953 .maxauthsize = SHA512_DIGEST_SIZE,
2955 .caam = {
2956 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2957 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2958 OP_ALG_AAI_HMAC_PRECOMP,
2959 .geniv = true,
2963 .aead = {
2964 .base = {
2965 .cra_name = "authenc(hmac(md5),"
2966 "rfc3686(ctr(aes)))",
2967 .cra_driver_name = "authenc-hmac-md5-"
2968 "rfc3686-ctr-aes-caam",
2969 .cra_blocksize = 1,
2971 .setkey = aead_setkey,
2972 .setauthsize = aead_setauthsize,
2973 .encrypt = aead_encrypt,
2974 .decrypt = aead_decrypt,
2975 .ivsize = CTR_RFC3686_IV_SIZE,
2976 .maxauthsize = MD5_DIGEST_SIZE,
2978 .caam = {
2979 .class1_alg_type = OP_ALG_ALGSEL_AES |
2980 OP_ALG_AAI_CTR_MOD128,
2981 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2982 OP_ALG_AAI_HMAC_PRECOMP,
2983 .rfc3686 = true,
2987 .aead = {
2988 .base = {
2989 .cra_name = "seqiv(authenc("
2990 "hmac(md5),rfc3686(ctr(aes))))",
2991 .cra_driver_name = "seqiv-authenc-hmac-md5-"
2992 "rfc3686-ctr-aes-caam",
2993 .cra_blocksize = 1,
2995 .setkey = aead_setkey,
2996 .setauthsize = aead_setauthsize,
2997 .encrypt = aead_encrypt,
2998 .decrypt = aead_decrypt,
2999 .ivsize = CTR_RFC3686_IV_SIZE,
3000 .maxauthsize = MD5_DIGEST_SIZE,
3002 .caam = {
3003 .class1_alg_type = OP_ALG_ALGSEL_AES |
3004 OP_ALG_AAI_CTR_MOD128,
3005 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3006 OP_ALG_AAI_HMAC_PRECOMP,
3007 .rfc3686 = true,
3008 .geniv = true,
3012 .aead = {
3013 .base = {
3014 .cra_name = "authenc(hmac(sha1),"
3015 "rfc3686(ctr(aes)))",
3016 .cra_driver_name = "authenc-hmac-sha1-"
3017 "rfc3686-ctr-aes-caam",
3018 .cra_blocksize = 1,
3020 .setkey = aead_setkey,
3021 .setauthsize = aead_setauthsize,
3022 .encrypt = aead_encrypt,
3023 .decrypt = aead_decrypt,
3024 .ivsize = CTR_RFC3686_IV_SIZE,
3025 .maxauthsize = SHA1_DIGEST_SIZE,
3027 .caam = {
3028 .class1_alg_type = OP_ALG_ALGSEL_AES |
3029 OP_ALG_AAI_CTR_MOD128,
3030 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3031 OP_ALG_AAI_HMAC_PRECOMP,
3032 .rfc3686 = true,
3036 .aead = {
3037 .base = {
3038 .cra_name = "seqiv(authenc("
3039 "hmac(sha1),rfc3686(ctr(aes))))",
3040 .cra_driver_name = "seqiv-authenc-hmac-sha1-"
3041 "rfc3686-ctr-aes-caam",
3042 .cra_blocksize = 1,
3044 .setkey = aead_setkey,
3045 .setauthsize = aead_setauthsize,
3046 .encrypt = aead_encrypt,
3047 .decrypt = aead_decrypt,
3048 .ivsize = CTR_RFC3686_IV_SIZE,
3049 .maxauthsize = SHA1_DIGEST_SIZE,
3051 .caam = {
3052 .class1_alg_type = OP_ALG_ALGSEL_AES |
3053 OP_ALG_AAI_CTR_MOD128,
3054 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3055 OP_ALG_AAI_HMAC_PRECOMP,
3056 .rfc3686 = true,
3057 .geniv = true,
3061 .aead = {
3062 .base = {
3063 .cra_name = "authenc(hmac(sha224),"
3064 "rfc3686(ctr(aes)))",
3065 .cra_driver_name = "authenc-hmac-sha224-"
3066 "rfc3686-ctr-aes-caam",
3067 .cra_blocksize = 1,
3069 .setkey = aead_setkey,
3070 .setauthsize = aead_setauthsize,
3071 .encrypt = aead_encrypt,
3072 .decrypt = aead_decrypt,
3073 .ivsize = CTR_RFC3686_IV_SIZE,
3074 .maxauthsize = SHA224_DIGEST_SIZE,
3076 .caam = {
3077 .class1_alg_type = OP_ALG_ALGSEL_AES |
3078 OP_ALG_AAI_CTR_MOD128,
3079 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3080 OP_ALG_AAI_HMAC_PRECOMP,
3081 .rfc3686 = true,
3085 .aead = {
3086 .base = {
3087 .cra_name = "seqiv(authenc("
3088 "hmac(sha224),rfc3686(ctr(aes))))",
3089 .cra_driver_name = "seqiv-authenc-hmac-sha224-"
3090 "rfc3686-ctr-aes-caam",
3091 .cra_blocksize = 1,
3093 .setkey = aead_setkey,
3094 .setauthsize = aead_setauthsize,
3095 .encrypt = aead_encrypt,
3096 .decrypt = aead_decrypt,
3097 .ivsize = CTR_RFC3686_IV_SIZE,
3098 .maxauthsize = SHA224_DIGEST_SIZE,
3100 .caam = {
3101 .class1_alg_type = OP_ALG_ALGSEL_AES |
3102 OP_ALG_AAI_CTR_MOD128,
3103 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3104 OP_ALG_AAI_HMAC_PRECOMP,
3105 .rfc3686 = true,
3106 .geniv = true,
3110 .aead = {
3111 .base = {
3112 .cra_name = "authenc(hmac(sha256),"
3113 "rfc3686(ctr(aes)))",
3114 .cra_driver_name = "authenc-hmac-sha256-"
3115 "rfc3686-ctr-aes-caam",
3116 .cra_blocksize = 1,
3118 .setkey = aead_setkey,
3119 .setauthsize = aead_setauthsize,
3120 .encrypt = aead_encrypt,
3121 .decrypt = aead_decrypt,
3122 .ivsize = CTR_RFC3686_IV_SIZE,
3123 .maxauthsize = SHA256_DIGEST_SIZE,
3125 .caam = {
3126 .class1_alg_type = OP_ALG_ALGSEL_AES |
3127 OP_ALG_AAI_CTR_MOD128,
3128 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3129 OP_ALG_AAI_HMAC_PRECOMP,
3130 .rfc3686 = true,
3134 .aead = {
3135 .base = {
3136 .cra_name = "seqiv(authenc(hmac(sha256),"
3137 "rfc3686(ctr(aes))))",
3138 .cra_driver_name = "seqiv-authenc-hmac-sha256-"
3139 "rfc3686-ctr-aes-caam",
3140 .cra_blocksize = 1,
3142 .setkey = aead_setkey,
3143 .setauthsize = aead_setauthsize,
3144 .encrypt = aead_encrypt,
3145 .decrypt = aead_decrypt,
3146 .ivsize = CTR_RFC3686_IV_SIZE,
3147 .maxauthsize = SHA256_DIGEST_SIZE,
3149 .caam = {
3150 .class1_alg_type = OP_ALG_ALGSEL_AES |
3151 OP_ALG_AAI_CTR_MOD128,
3152 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3153 OP_ALG_AAI_HMAC_PRECOMP,
3154 .rfc3686 = true,
3155 .geniv = true,
3159 .aead = {
3160 .base = {
3161 .cra_name = "authenc(hmac(sha384),"
3162 "rfc3686(ctr(aes)))",
3163 .cra_driver_name = "authenc-hmac-sha384-"
3164 "rfc3686-ctr-aes-caam",
3165 .cra_blocksize = 1,
3167 .setkey = aead_setkey,
3168 .setauthsize = aead_setauthsize,
3169 .encrypt = aead_encrypt,
3170 .decrypt = aead_decrypt,
3171 .ivsize = CTR_RFC3686_IV_SIZE,
3172 .maxauthsize = SHA384_DIGEST_SIZE,
3174 .caam = {
3175 .class1_alg_type = OP_ALG_ALGSEL_AES |
3176 OP_ALG_AAI_CTR_MOD128,
3177 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3178 OP_ALG_AAI_HMAC_PRECOMP,
3179 .rfc3686 = true,
3183 .aead = {
3184 .base = {
3185 .cra_name = "seqiv(authenc(hmac(sha384),"
3186 "rfc3686(ctr(aes))))",
3187 .cra_driver_name = "seqiv-authenc-hmac-sha384-"
3188 "rfc3686-ctr-aes-caam",
3189 .cra_blocksize = 1,
3191 .setkey = aead_setkey,
3192 .setauthsize = aead_setauthsize,
3193 .encrypt = aead_encrypt,
3194 .decrypt = aead_decrypt,
3195 .ivsize = CTR_RFC3686_IV_SIZE,
3196 .maxauthsize = SHA384_DIGEST_SIZE,
3198 .caam = {
3199 .class1_alg_type = OP_ALG_ALGSEL_AES |
3200 OP_ALG_AAI_CTR_MOD128,
3201 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3202 OP_ALG_AAI_HMAC_PRECOMP,
3203 .rfc3686 = true,
3204 .geniv = true,
3208 .aead = {
3209 .base = {
3210 .cra_name = "authenc(hmac(sha512),"
3211 "rfc3686(ctr(aes)))",
3212 .cra_driver_name = "authenc-hmac-sha512-"
3213 "rfc3686-ctr-aes-caam",
3214 .cra_blocksize = 1,
3216 .setkey = aead_setkey,
3217 .setauthsize = aead_setauthsize,
3218 .encrypt = aead_encrypt,
3219 .decrypt = aead_decrypt,
3220 .ivsize = CTR_RFC3686_IV_SIZE,
3221 .maxauthsize = SHA512_DIGEST_SIZE,
3223 .caam = {
3224 .class1_alg_type = OP_ALG_ALGSEL_AES |
3225 OP_ALG_AAI_CTR_MOD128,
3226 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3227 OP_ALG_AAI_HMAC_PRECOMP,
3228 .rfc3686 = true,
3232 .aead = {
3233 .base = {
3234 .cra_name = "seqiv(authenc(hmac(sha512),"
3235 "rfc3686(ctr(aes))))",
3236 .cra_driver_name = "seqiv-authenc-hmac-sha512-"
3237 "rfc3686-ctr-aes-caam",
3238 .cra_blocksize = 1,
3240 .setkey = aead_setkey,
3241 .setauthsize = aead_setauthsize,
3242 .encrypt = aead_encrypt,
3243 .decrypt = aead_decrypt,
3244 .ivsize = CTR_RFC3686_IV_SIZE,
3245 .maxauthsize = SHA512_DIGEST_SIZE,
3247 .caam = {
3248 .class1_alg_type = OP_ALG_ALGSEL_AES |
3249 OP_ALG_AAI_CTR_MOD128,
3250 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3251 OP_ALG_AAI_HMAC_PRECOMP,
3252 .rfc3686 = true,
3253 .geniv = true,
3258 struct caam_crypto_alg {
3259 struct crypto_alg crypto_alg;
3260 struct list_head entry;
3261 struct caam_alg_entry caam;
3264 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
3265 bool uses_dkp)
3267 dma_addr_t dma_addr;
3268 struct caam_drv_private *priv;
3270 ctx->jrdev = caam_jr_alloc();
3271 if (IS_ERR(ctx->jrdev)) {
3272 pr_err("Job Ring Device allocation for transform failed\n");
3273 return PTR_ERR(ctx->jrdev);
3276 priv = dev_get_drvdata(ctx->jrdev->parent);
3277 if (priv->era >= 6 && uses_dkp)
3278 ctx->dir = DMA_BIDIRECTIONAL;
3279 else
3280 ctx->dir = DMA_TO_DEVICE;
3282 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
3283 offsetof(struct caam_ctx,
3284 sh_desc_enc_dma),
3285 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3286 if (dma_mapping_error(ctx->jrdev, dma_addr)) {
3287 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
3288 caam_jr_free(ctx->jrdev);
3289 return -ENOMEM;
3292 ctx->sh_desc_enc_dma = dma_addr;
3293 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
3294 sh_desc_dec);
3295 ctx->sh_desc_givenc_dma = dma_addr + offsetof(struct caam_ctx,
3296 sh_desc_givenc);
3297 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key);
3299 /* copy descriptor header template value */
3300 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
3301 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
3303 return 0;
3306 static int caam_cra_init(struct crypto_tfm *tfm)
3308 struct crypto_alg *alg = tfm->__crt_alg;
3309 struct caam_crypto_alg *caam_alg =
3310 container_of(alg, struct caam_crypto_alg, crypto_alg);
3311 struct caam_ctx *ctx = crypto_tfm_ctx(tfm);
3313 return caam_init_common(ctx, &caam_alg->caam, false);
3316 static int caam_aead_init(struct crypto_aead *tfm)
3318 struct aead_alg *alg = crypto_aead_alg(tfm);
3319 struct caam_aead_alg *caam_alg =
3320 container_of(alg, struct caam_aead_alg, aead);
3321 struct caam_ctx *ctx = crypto_aead_ctx(tfm);
3323 return caam_init_common(ctx, &caam_alg->caam,
3324 alg->setkey == aead_setkey);
3327 static void caam_exit_common(struct caam_ctx *ctx)
3329 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
3330 offsetof(struct caam_ctx, sh_desc_enc_dma),
3331 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3332 caam_jr_free(ctx->jrdev);
3335 static void caam_cra_exit(struct crypto_tfm *tfm)
3337 caam_exit_common(crypto_tfm_ctx(tfm));
3340 static void caam_aead_exit(struct crypto_aead *tfm)
3342 caam_exit_common(crypto_aead_ctx(tfm));
3345 static void __exit caam_algapi_exit(void)
3348 struct caam_crypto_alg *t_alg, *n;
3349 int i;
3351 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3352 struct caam_aead_alg *t_alg = driver_aeads + i;
3354 if (t_alg->registered)
3355 crypto_unregister_aead(&t_alg->aead);
3358 if (!alg_list.next)
3359 return;
3361 list_for_each_entry_safe(t_alg, n, &alg_list, entry) {
3362 crypto_unregister_alg(&t_alg->crypto_alg);
3363 list_del(&t_alg->entry);
3364 kfree(t_alg);
3368 static struct caam_crypto_alg *caam_alg_alloc(struct caam_alg_template
3369 *template)
3371 struct caam_crypto_alg *t_alg;
3372 struct crypto_alg *alg;
3374 t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
3375 if (!t_alg) {
3376 pr_err("failed to allocate t_alg\n");
3377 return ERR_PTR(-ENOMEM);
3380 alg = &t_alg->crypto_alg;
3382 snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name);
3383 snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
3384 template->driver_name);
3385 alg->cra_module = THIS_MODULE;
3386 alg->cra_init = caam_cra_init;
3387 alg->cra_exit = caam_cra_exit;
3388 alg->cra_priority = CAAM_CRA_PRIORITY;
3389 alg->cra_blocksize = template->blocksize;
3390 alg->cra_alignmask = 0;
3391 alg->cra_ctxsize = sizeof(struct caam_ctx);
3392 alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
3393 template->type;
3394 switch (template->type) {
3395 case CRYPTO_ALG_TYPE_GIVCIPHER:
3396 alg->cra_type = &crypto_givcipher_type;
3397 alg->cra_ablkcipher = template->template_ablkcipher;
3398 break;
3399 case CRYPTO_ALG_TYPE_ABLKCIPHER:
3400 alg->cra_type = &crypto_ablkcipher_type;
3401 alg->cra_ablkcipher = template->template_ablkcipher;
3402 break;
3405 t_alg->caam.class1_alg_type = template->class1_alg_type;
3406 t_alg->caam.class2_alg_type = template->class2_alg_type;
3408 return t_alg;
3411 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
3413 struct aead_alg *alg = &t_alg->aead;
3415 alg->base.cra_module = THIS_MODULE;
3416 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3417 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3418 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
3420 alg->init = caam_aead_init;
3421 alg->exit = caam_aead_exit;
3424 static int __init caam_algapi_init(void)
3426 struct device_node *dev_node;
3427 struct platform_device *pdev;
3428 struct device *ctrldev;
3429 struct caam_drv_private *priv;
3430 int i = 0, err = 0;
3431 u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst;
3432 unsigned int md_limit = SHA512_DIGEST_SIZE;
3433 bool registered = false;
3435 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
3436 if (!dev_node) {
3437 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
3438 if (!dev_node)
3439 return -ENODEV;
3442 pdev = of_find_device_by_node(dev_node);
3443 if (!pdev) {
3444 of_node_put(dev_node);
3445 return -ENODEV;
3448 ctrldev = &pdev->dev;
3449 priv = dev_get_drvdata(ctrldev);
3450 of_node_put(dev_node);
3453 * If priv is NULL, it's probably because the caam driver wasn't
3454 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
3456 if (!priv)
3457 return -ENODEV;
3460 INIT_LIST_HEAD(&alg_list);
3463 * Register crypto algorithms the device supports.
3464 * First, detect presence and attributes of DES, AES, and MD blocks.
3466 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
3467 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
3468 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT;
3469 aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT;
3470 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3472 /* If MD is present, limit digest size based on LP256 */
3473 if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256))
3474 md_limit = SHA256_DIGEST_SIZE;
3476 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3477 struct caam_crypto_alg *t_alg;
3478 struct caam_alg_template *alg = driver_algs + i;
3479 u32 alg_sel = alg->class1_alg_type & OP_ALG_ALGSEL_MASK;
3481 /* Skip DES algorithms if not supported by device */
3482 if (!des_inst &&
3483 ((alg_sel == OP_ALG_ALGSEL_3DES) ||
3484 (alg_sel == OP_ALG_ALGSEL_DES)))
3485 continue;
3487 /* Skip AES algorithms if not supported by device */
3488 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
3489 continue;
3492 * Check support for AES modes not available
3493 * on LP devices.
3495 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
3496 if ((alg->class1_alg_type & OP_ALG_AAI_MASK) ==
3497 OP_ALG_AAI_XTS)
3498 continue;
3500 t_alg = caam_alg_alloc(alg);
3501 if (IS_ERR(t_alg)) {
3502 err = PTR_ERR(t_alg);
3503 pr_warn("%s alg allocation failed\n", alg->driver_name);
3504 continue;
3507 err = crypto_register_alg(&t_alg->crypto_alg);
3508 if (err) {
3509 pr_warn("%s alg registration failed\n",
3510 t_alg->crypto_alg.cra_driver_name);
3511 kfree(t_alg);
3512 continue;
3515 list_add_tail(&t_alg->entry, &alg_list);
3516 registered = true;
3519 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3520 struct caam_aead_alg *t_alg = driver_aeads + i;
3521 u32 c1_alg_sel = t_alg->caam.class1_alg_type &
3522 OP_ALG_ALGSEL_MASK;
3523 u32 c2_alg_sel = t_alg->caam.class2_alg_type &
3524 OP_ALG_ALGSEL_MASK;
3525 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3527 /* Skip DES algorithms if not supported by device */
3528 if (!des_inst &&
3529 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
3530 (c1_alg_sel == OP_ALG_ALGSEL_DES)))
3531 continue;
3533 /* Skip AES algorithms if not supported by device */
3534 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
3535 continue;
3538 * Check support for AES algorithms not available
3539 * on LP devices.
3541 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
3542 if (alg_aai == OP_ALG_AAI_GCM)
3543 continue;
3546 * Skip algorithms requiring message digests
3547 * if MD or MD size is not supported by device.
3549 if (c2_alg_sel &&
3550 (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
3551 continue;
3553 caam_aead_alg_init(t_alg);
3555 err = crypto_register_aead(&t_alg->aead);
3556 if (err) {
3557 pr_warn("%s alg registration failed\n",
3558 t_alg->aead.base.cra_driver_name);
3559 continue;
3562 t_alg->registered = true;
3563 registered = true;
3566 if (registered)
3567 pr_info("caam algorithms registered in /proc/crypto\n");
3569 return err;
3572 module_init(caam_algapi_init);
3573 module_exit(caam_algapi_exit);
3575 MODULE_LICENSE("GPL");
3576 MODULE_DESCRIPTION("FSL CAAM support for crypto API");
3577 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");