Merge tag 'regmap-fix-v5.11-rc2' of git://git.kernel.org/pub/scm/linux/kernel/git...
[linux/fpc-iii.git] / drivers / crypto / caam / caamalg.c
blob8697ae53b0633cb4f6dbfe6b93ea039a8574d669
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3 * caam - Freescale FSL CAAM support for crypto API
5 * Copyright 2008-2011 Freescale Semiconductor, Inc.
6 * Copyright 2016-2019 NXP
8 * Based on talitos crypto API driver.
10 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
12 * --------------- ---------------
13 * | JobDesc #1 |-------------------->| ShareDesc |
14 * | *(packet 1) | | (PDB) |
15 * --------------- |------------->| (hashKey) |
16 * . | | (cipherKey) |
17 * . | |-------->| (operation) |
18 * --------------- | | ---------------
19 * | JobDesc #2 |------| |
20 * | *(packet 2) | |
21 * --------------- |
22 * . |
23 * . |
24 * --------------- |
25 * | JobDesc #3 |------------
26 * | *(packet 3) |
27 * ---------------
29 * The SharedDesc never changes for a connection unless rekeyed, but
30 * each packet will likely be in a different place. So all we need
31 * to know to process the packet is where the input is, where the
32 * output goes, and what context we want to process with. Context is
33 * in the SharedDesc, packet references in the JobDesc.
35 * So, a job desc looks like:
37 * ---------------------
38 * | Header |
39 * | ShareDesc Pointer |
40 * | SEQ_OUT_PTR |
41 * | (output buffer) |
42 * | (output length) |
43 * | SEQ_IN_PTR |
44 * | (input buffer) |
45 * | (input length) |
46 * ---------------------
49 #include "compat.h"
51 #include "regs.h"
52 #include "intern.h"
53 #include "desc_constr.h"
54 #include "jr.h"
55 #include "error.h"
56 #include "sg_sw_sec4.h"
57 #include "key_gen.h"
58 #include "caamalg_desc.h"
59 #include <crypto/engine.h>
60 #include <crypto/xts.h>
61 #include <asm/unaligned.h>
64 * crypto alg
66 #define CAAM_CRA_PRIORITY 3000
67 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
68 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
69 CTR_RFC3686_NONCE_SIZE + \
70 SHA512_DIGEST_SIZE * 2)
72 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
73 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
74 CAAM_CMD_SZ * 4)
75 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
76 CAAM_CMD_SZ * 5)
78 #define CHACHAPOLY_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + CAAM_CMD_SZ * 6)
80 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN_MIN)
81 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
83 struct caam_alg_entry {
84 int class1_alg_type;
85 int class2_alg_type;
86 bool rfc3686;
87 bool geniv;
88 bool nodkp;
91 struct caam_aead_alg {
92 struct aead_alg aead;
93 struct caam_alg_entry caam;
94 bool registered;
97 struct caam_skcipher_alg {
98 struct skcipher_alg skcipher;
99 struct caam_alg_entry caam;
100 bool registered;
104 * per-session context
106 struct caam_ctx {
107 struct crypto_engine_ctx enginectx;
108 u32 sh_desc_enc[DESC_MAX_USED_LEN];
109 u32 sh_desc_dec[DESC_MAX_USED_LEN];
110 u8 key[CAAM_MAX_KEY_SIZE];
111 dma_addr_t sh_desc_enc_dma;
112 dma_addr_t sh_desc_dec_dma;
113 dma_addr_t key_dma;
114 enum dma_data_direction dir;
115 struct device *jrdev;
116 struct alginfo adata;
117 struct alginfo cdata;
118 unsigned int authsize;
119 bool xts_key_fallback;
120 struct crypto_skcipher *fallback;
123 struct caam_skcipher_req_ctx {
124 struct skcipher_edesc *edesc;
125 struct skcipher_request fallback_req;
128 struct caam_aead_req_ctx {
129 struct aead_edesc *edesc;
132 static int aead_null_set_sh_desc(struct crypto_aead *aead)
134 struct caam_ctx *ctx = crypto_aead_ctx(aead);
135 struct device *jrdev = ctx->jrdev;
136 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
137 u32 *desc;
138 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
139 ctx->adata.keylen_pad;
142 * Job Descriptor and Shared Descriptors
143 * must all fit into the 64-word Descriptor h/w Buffer
145 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
146 ctx->adata.key_inline = true;
147 ctx->adata.key_virt = ctx->key;
148 } else {
149 ctx->adata.key_inline = false;
150 ctx->adata.key_dma = ctx->key_dma;
153 /* aead_encrypt shared descriptor */
154 desc = ctx->sh_desc_enc;
155 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize,
156 ctrlpriv->era);
157 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
158 desc_bytes(desc), ctx->dir);
161 * Job Descriptor and Shared Descriptors
162 * must all fit into the 64-word Descriptor h/w Buffer
164 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
165 ctx->adata.key_inline = true;
166 ctx->adata.key_virt = ctx->key;
167 } else {
168 ctx->adata.key_inline = false;
169 ctx->adata.key_dma = ctx->key_dma;
172 /* aead_decrypt shared descriptor */
173 desc = ctx->sh_desc_dec;
174 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize,
175 ctrlpriv->era);
176 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
177 desc_bytes(desc), ctx->dir);
179 return 0;
182 static int aead_set_sh_desc(struct crypto_aead *aead)
184 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
185 struct caam_aead_alg, aead);
186 unsigned int ivsize = crypto_aead_ivsize(aead);
187 struct caam_ctx *ctx = crypto_aead_ctx(aead);
188 struct device *jrdev = ctx->jrdev;
189 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
190 u32 ctx1_iv_off = 0;
191 u32 *desc, *nonce = NULL;
192 u32 inl_mask;
193 unsigned int data_len[2];
194 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
195 OP_ALG_AAI_CTR_MOD128);
196 const bool is_rfc3686 = alg->caam.rfc3686;
198 if (!ctx->authsize)
199 return 0;
201 /* NULL encryption / decryption */
202 if (!ctx->cdata.keylen)
203 return aead_null_set_sh_desc(aead);
206 * AES-CTR needs to load IV in CONTEXT1 reg
207 * at an offset of 128bits (16bytes)
208 * CONTEXT1[255:128] = IV
210 if (ctr_mode)
211 ctx1_iv_off = 16;
214 * RFC3686 specific:
215 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
217 if (is_rfc3686) {
218 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
219 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
220 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
224 * In case |user key| > |derived key|, using DKP<imm,imm>
225 * would result in invalid opcodes (last bytes of user key) in
226 * the resulting descriptor. Use DKP<ptr,imm> instead => both
227 * virtual and dma key addresses are needed.
229 ctx->adata.key_virt = ctx->key;
230 ctx->adata.key_dma = ctx->key_dma;
232 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
233 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
235 data_len[0] = ctx->adata.keylen_pad;
236 data_len[1] = ctx->cdata.keylen;
238 if (alg->caam.geniv)
239 goto skip_enc;
242 * Job Descriptor and Shared Descriptors
243 * must all fit into the 64-word Descriptor h/w Buffer
245 if (desc_inline_query(DESC_AEAD_ENC_LEN +
246 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
247 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
248 ARRAY_SIZE(data_len)) < 0)
249 return -EINVAL;
251 ctx->adata.key_inline = !!(inl_mask & 1);
252 ctx->cdata.key_inline = !!(inl_mask & 2);
254 /* aead_encrypt shared descriptor */
255 desc = ctx->sh_desc_enc;
256 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
257 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
258 false, ctrlpriv->era);
259 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
260 desc_bytes(desc), ctx->dir);
262 skip_enc:
264 * Job Descriptor and Shared Descriptors
265 * must all fit into the 64-word Descriptor h/w Buffer
267 if (desc_inline_query(DESC_AEAD_DEC_LEN +
268 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
269 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
270 ARRAY_SIZE(data_len)) < 0)
271 return -EINVAL;
273 ctx->adata.key_inline = !!(inl_mask & 1);
274 ctx->cdata.key_inline = !!(inl_mask & 2);
276 /* aead_decrypt shared descriptor */
277 desc = ctx->sh_desc_dec;
278 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
279 ctx->authsize, alg->caam.geniv, is_rfc3686,
280 nonce, ctx1_iv_off, false, ctrlpriv->era);
281 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
282 desc_bytes(desc), ctx->dir);
284 if (!alg->caam.geniv)
285 goto skip_givenc;
288 * Job Descriptor and Shared Descriptors
289 * must all fit into the 64-word Descriptor h/w Buffer
291 if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
292 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
293 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
294 ARRAY_SIZE(data_len)) < 0)
295 return -EINVAL;
297 ctx->adata.key_inline = !!(inl_mask & 1);
298 ctx->cdata.key_inline = !!(inl_mask & 2);
300 /* aead_givencrypt shared descriptor */
301 desc = ctx->sh_desc_enc;
302 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
303 ctx->authsize, is_rfc3686, nonce,
304 ctx1_iv_off, false, ctrlpriv->era);
305 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
306 desc_bytes(desc), ctx->dir);
308 skip_givenc:
309 return 0;
312 static int aead_setauthsize(struct crypto_aead *authenc,
313 unsigned int authsize)
315 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
317 ctx->authsize = authsize;
318 aead_set_sh_desc(authenc);
320 return 0;
323 static int gcm_set_sh_desc(struct crypto_aead *aead)
325 struct caam_ctx *ctx = crypto_aead_ctx(aead);
326 struct device *jrdev = ctx->jrdev;
327 unsigned int ivsize = crypto_aead_ivsize(aead);
328 u32 *desc;
329 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
330 ctx->cdata.keylen;
332 if (!ctx->cdata.keylen || !ctx->authsize)
333 return 0;
336 * AES GCM encrypt shared descriptor
337 * Job Descriptor and Shared Descriptor
338 * must fit into the 64-word Descriptor h/w Buffer
340 if (rem_bytes >= DESC_GCM_ENC_LEN) {
341 ctx->cdata.key_inline = true;
342 ctx->cdata.key_virt = ctx->key;
343 } else {
344 ctx->cdata.key_inline = false;
345 ctx->cdata.key_dma = ctx->key_dma;
348 desc = ctx->sh_desc_enc;
349 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
350 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
351 desc_bytes(desc), ctx->dir);
354 * Job Descriptor and Shared Descriptors
355 * must all fit into the 64-word Descriptor h/w Buffer
357 if (rem_bytes >= DESC_GCM_DEC_LEN) {
358 ctx->cdata.key_inline = true;
359 ctx->cdata.key_virt = ctx->key;
360 } else {
361 ctx->cdata.key_inline = false;
362 ctx->cdata.key_dma = ctx->key_dma;
365 desc = ctx->sh_desc_dec;
366 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
367 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
368 desc_bytes(desc), ctx->dir);
370 return 0;
373 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
375 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
376 int err;
378 err = crypto_gcm_check_authsize(authsize);
379 if (err)
380 return err;
382 ctx->authsize = authsize;
383 gcm_set_sh_desc(authenc);
385 return 0;
388 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
390 struct caam_ctx *ctx = crypto_aead_ctx(aead);
391 struct device *jrdev = ctx->jrdev;
392 unsigned int ivsize = crypto_aead_ivsize(aead);
393 u32 *desc;
394 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
395 ctx->cdata.keylen;
397 if (!ctx->cdata.keylen || !ctx->authsize)
398 return 0;
401 * RFC4106 encrypt shared descriptor
402 * Job Descriptor and Shared Descriptor
403 * must fit into the 64-word Descriptor h/w Buffer
405 if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
406 ctx->cdata.key_inline = true;
407 ctx->cdata.key_virt = ctx->key;
408 } else {
409 ctx->cdata.key_inline = false;
410 ctx->cdata.key_dma = ctx->key_dma;
413 desc = ctx->sh_desc_enc;
414 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
415 false);
416 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
417 desc_bytes(desc), ctx->dir);
420 * Job Descriptor and Shared Descriptors
421 * must all fit into the 64-word Descriptor h/w Buffer
423 if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
424 ctx->cdata.key_inline = true;
425 ctx->cdata.key_virt = ctx->key;
426 } else {
427 ctx->cdata.key_inline = false;
428 ctx->cdata.key_dma = ctx->key_dma;
431 desc = ctx->sh_desc_dec;
432 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
433 false);
434 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
435 desc_bytes(desc), ctx->dir);
437 return 0;
440 static int rfc4106_setauthsize(struct crypto_aead *authenc,
441 unsigned int authsize)
443 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
444 int err;
446 err = crypto_rfc4106_check_authsize(authsize);
447 if (err)
448 return err;
450 ctx->authsize = authsize;
451 rfc4106_set_sh_desc(authenc);
453 return 0;
456 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
458 struct caam_ctx *ctx = crypto_aead_ctx(aead);
459 struct device *jrdev = ctx->jrdev;
460 unsigned int ivsize = crypto_aead_ivsize(aead);
461 u32 *desc;
462 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
463 ctx->cdata.keylen;
465 if (!ctx->cdata.keylen || !ctx->authsize)
466 return 0;
469 * RFC4543 encrypt shared descriptor
470 * Job Descriptor and Shared Descriptor
471 * must fit into the 64-word Descriptor h/w Buffer
473 if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
474 ctx->cdata.key_inline = true;
475 ctx->cdata.key_virt = ctx->key;
476 } else {
477 ctx->cdata.key_inline = false;
478 ctx->cdata.key_dma = ctx->key_dma;
481 desc = ctx->sh_desc_enc;
482 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
483 false);
484 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
485 desc_bytes(desc), ctx->dir);
488 * Job Descriptor and Shared Descriptors
489 * must all fit into the 64-word Descriptor h/w Buffer
491 if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
492 ctx->cdata.key_inline = true;
493 ctx->cdata.key_virt = ctx->key;
494 } else {
495 ctx->cdata.key_inline = false;
496 ctx->cdata.key_dma = ctx->key_dma;
499 desc = ctx->sh_desc_dec;
500 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
501 false);
502 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
503 desc_bytes(desc), ctx->dir);
505 return 0;
508 static int rfc4543_setauthsize(struct crypto_aead *authenc,
509 unsigned int authsize)
511 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
513 if (authsize != 16)
514 return -EINVAL;
516 ctx->authsize = authsize;
517 rfc4543_set_sh_desc(authenc);
519 return 0;
522 static int chachapoly_set_sh_desc(struct crypto_aead *aead)
524 struct caam_ctx *ctx = crypto_aead_ctx(aead);
525 struct device *jrdev = ctx->jrdev;
526 unsigned int ivsize = crypto_aead_ivsize(aead);
527 u32 *desc;
529 if (!ctx->cdata.keylen || !ctx->authsize)
530 return 0;
532 desc = ctx->sh_desc_enc;
533 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
534 ctx->authsize, true, false);
535 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
536 desc_bytes(desc), ctx->dir);
538 desc = ctx->sh_desc_dec;
539 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
540 ctx->authsize, false, false);
541 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
542 desc_bytes(desc), ctx->dir);
544 return 0;
547 static int chachapoly_setauthsize(struct crypto_aead *aead,
548 unsigned int authsize)
550 struct caam_ctx *ctx = crypto_aead_ctx(aead);
552 if (authsize != POLY1305_DIGEST_SIZE)
553 return -EINVAL;
555 ctx->authsize = authsize;
556 return chachapoly_set_sh_desc(aead);
559 static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key,
560 unsigned int keylen)
562 struct caam_ctx *ctx = crypto_aead_ctx(aead);
563 unsigned int ivsize = crypto_aead_ivsize(aead);
564 unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize;
566 if (keylen != CHACHA_KEY_SIZE + saltlen)
567 return -EINVAL;
569 ctx->cdata.key_virt = key;
570 ctx->cdata.keylen = keylen - saltlen;
572 return chachapoly_set_sh_desc(aead);
575 static int aead_setkey(struct crypto_aead *aead,
576 const u8 *key, unsigned int keylen)
578 struct caam_ctx *ctx = crypto_aead_ctx(aead);
579 struct device *jrdev = ctx->jrdev;
580 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
581 struct crypto_authenc_keys keys;
582 int ret = 0;
584 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
585 goto badkey;
587 dev_dbg(jrdev, "keylen %d enckeylen %d authkeylen %d\n",
588 keys.authkeylen + keys.enckeylen, keys.enckeylen,
589 keys.authkeylen);
590 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
591 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
594 * If DKP is supported, use it in the shared descriptor to generate
595 * the split key.
597 if (ctrlpriv->era >= 6) {
598 ctx->adata.keylen = keys.authkeylen;
599 ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
600 OP_ALG_ALGSEL_MASK);
602 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
603 goto badkey;
605 memcpy(ctx->key, keys.authkey, keys.authkeylen);
606 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
607 keys.enckeylen);
608 dma_sync_single_for_device(jrdev, ctx->key_dma,
609 ctx->adata.keylen_pad +
610 keys.enckeylen, ctx->dir);
611 goto skip_split_key;
614 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
615 keys.authkeylen, CAAM_MAX_KEY_SIZE -
616 keys.enckeylen);
617 if (ret) {
618 goto badkey;
621 /* postpend encryption key to auth split key */
622 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
623 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
624 keys.enckeylen, ctx->dir);
626 print_hex_dump_debug("ctx.key@"__stringify(__LINE__)": ",
627 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
628 ctx->adata.keylen_pad + keys.enckeylen, 1);
630 skip_split_key:
631 ctx->cdata.keylen = keys.enckeylen;
632 memzero_explicit(&keys, sizeof(keys));
633 return aead_set_sh_desc(aead);
634 badkey:
635 memzero_explicit(&keys, sizeof(keys));
636 return -EINVAL;
639 static int des3_aead_setkey(struct crypto_aead *aead, const u8 *key,
640 unsigned int keylen)
642 struct crypto_authenc_keys keys;
643 int err;
645 err = crypto_authenc_extractkeys(&keys, key, keylen);
646 if (unlikely(err))
647 return err;
649 err = verify_aead_des3_key(aead, keys.enckey, keys.enckeylen) ?:
650 aead_setkey(aead, key, keylen);
652 memzero_explicit(&keys, sizeof(keys));
653 return err;
656 static int gcm_setkey(struct crypto_aead *aead,
657 const u8 *key, unsigned int keylen)
659 struct caam_ctx *ctx = crypto_aead_ctx(aead);
660 struct device *jrdev = ctx->jrdev;
661 int err;
663 err = aes_check_keylen(keylen);
664 if (err)
665 return err;
667 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
668 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
670 memcpy(ctx->key, key, keylen);
671 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
672 ctx->cdata.keylen = keylen;
674 return gcm_set_sh_desc(aead);
677 static int rfc4106_setkey(struct crypto_aead *aead,
678 const u8 *key, unsigned int keylen)
680 struct caam_ctx *ctx = crypto_aead_ctx(aead);
681 struct device *jrdev = ctx->jrdev;
682 int err;
684 err = aes_check_keylen(keylen - 4);
685 if (err)
686 return err;
688 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
689 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
691 memcpy(ctx->key, key, keylen);
694 * The last four bytes of the key material are used as the salt value
695 * in the nonce. Update the AES key length.
697 ctx->cdata.keylen = keylen - 4;
698 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
699 ctx->dir);
700 return rfc4106_set_sh_desc(aead);
703 static int rfc4543_setkey(struct crypto_aead *aead,
704 const u8 *key, unsigned int keylen)
706 struct caam_ctx *ctx = crypto_aead_ctx(aead);
707 struct device *jrdev = ctx->jrdev;
708 int err;
710 err = aes_check_keylen(keylen - 4);
711 if (err)
712 return err;
714 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
715 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
717 memcpy(ctx->key, key, keylen);
720 * The last four bytes of the key material are used as the salt value
721 * in the nonce. Update the AES key length.
723 ctx->cdata.keylen = keylen - 4;
724 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
725 ctx->dir);
726 return rfc4543_set_sh_desc(aead);
729 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
730 unsigned int keylen, const u32 ctx1_iv_off)
732 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
733 struct caam_skcipher_alg *alg =
734 container_of(crypto_skcipher_alg(skcipher), typeof(*alg),
735 skcipher);
736 struct device *jrdev = ctx->jrdev;
737 unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
738 u32 *desc;
739 const bool is_rfc3686 = alg->caam.rfc3686;
741 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
742 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
744 ctx->cdata.keylen = keylen;
745 ctx->cdata.key_virt = key;
746 ctx->cdata.key_inline = true;
748 /* skcipher_encrypt shared descriptor */
749 desc = ctx->sh_desc_enc;
750 cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
751 ctx1_iv_off);
752 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
753 desc_bytes(desc), ctx->dir);
755 /* skcipher_decrypt shared descriptor */
756 desc = ctx->sh_desc_dec;
757 cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
758 ctx1_iv_off);
759 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
760 desc_bytes(desc), ctx->dir);
762 return 0;
765 static int aes_skcipher_setkey(struct crypto_skcipher *skcipher,
766 const u8 *key, unsigned int keylen)
768 int err;
770 err = aes_check_keylen(keylen);
771 if (err)
772 return err;
774 return skcipher_setkey(skcipher, key, keylen, 0);
777 static int rfc3686_skcipher_setkey(struct crypto_skcipher *skcipher,
778 const u8 *key, unsigned int keylen)
780 u32 ctx1_iv_off;
781 int err;
784 * RFC3686 specific:
785 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
786 * | *key = {KEY, NONCE}
788 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
789 keylen -= CTR_RFC3686_NONCE_SIZE;
791 err = aes_check_keylen(keylen);
792 if (err)
793 return err;
795 return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off);
798 static int ctr_skcipher_setkey(struct crypto_skcipher *skcipher,
799 const u8 *key, unsigned int keylen)
801 u32 ctx1_iv_off;
802 int err;
805 * AES-CTR needs to load IV in CONTEXT1 reg
806 * at an offset of 128bits (16bytes)
807 * CONTEXT1[255:128] = IV
809 ctx1_iv_off = 16;
811 err = aes_check_keylen(keylen);
812 if (err)
813 return err;
815 return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off);
818 static int des_skcipher_setkey(struct crypto_skcipher *skcipher,
819 const u8 *key, unsigned int keylen)
821 return verify_skcipher_des_key(skcipher, key) ?:
822 skcipher_setkey(skcipher, key, keylen, 0);
825 static int des3_skcipher_setkey(struct crypto_skcipher *skcipher,
826 const u8 *key, unsigned int keylen)
828 return verify_skcipher_des3_key(skcipher, key) ?:
829 skcipher_setkey(skcipher, key, keylen, 0);
832 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
833 unsigned int keylen)
835 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
836 struct device *jrdev = ctx->jrdev;
837 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
838 u32 *desc;
839 int err;
841 err = xts_verify_key(skcipher, key, keylen);
842 if (err) {
843 dev_dbg(jrdev, "key size mismatch\n");
844 return err;
847 if (keylen != 2 * AES_KEYSIZE_128 && keylen != 2 * AES_KEYSIZE_256)
848 ctx->xts_key_fallback = true;
850 if (ctrlpriv->era <= 8 || ctx->xts_key_fallback) {
851 err = crypto_skcipher_setkey(ctx->fallback, key, keylen);
852 if (err)
853 return err;
856 ctx->cdata.keylen = keylen;
857 ctx->cdata.key_virt = key;
858 ctx->cdata.key_inline = true;
860 /* xts_skcipher_encrypt shared descriptor */
861 desc = ctx->sh_desc_enc;
862 cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata);
863 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
864 desc_bytes(desc), ctx->dir);
866 /* xts_skcipher_decrypt shared descriptor */
867 desc = ctx->sh_desc_dec;
868 cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata);
869 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
870 desc_bytes(desc), ctx->dir);
872 return 0;
876 * aead_edesc - s/w-extended aead descriptor
877 * @src_nents: number of segments in input s/w scatterlist
878 * @dst_nents: number of segments in output s/w scatterlist
879 * @mapped_src_nents: number of segments in input h/w link table
880 * @mapped_dst_nents: number of segments in output h/w link table
881 * @sec4_sg_bytes: length of dma mapped sec4_sg space
882 * @bklog: stored to determine if the request needs backlog
883 * @sec4_sg_dma: bus physical mapped address of h/w link table
884 * @sec4_sg: pointer to h/w link table
885 * @hw_desc: the h/w job descriptor followed by any referenced link tables
887 struct aead_edesc {
888 int src_nents;
889 int dst_nents;
890 int mapped_src_nents;
891 int mapped_dst_nents;
892 int sec4_sg_bytes;
893 bool bklog;
894 dma_addr_t sec4_sg_dma;
895 struct sec4_sg_entry *sec4_sg;
896 u32 hw_desc[];
900 * skcipher_edesc - s/w-extended skcipher descriptor
901 * @src_nents: number of segments in input s/w scatterlist
902 * @dst_nents: number of segments in output s/w scatterlist
903 * @mapped_src_nents: number of segments in input h/w link table
904 * @mapped_dst_nents: number of segments in output h/w link table
905 * @iv_dma: dma address of iv for checking continuity and link table
906 * @sec4_sg_bytes: length of dma mapped sec4_sg space
907 * @bklog: stored to determine if the request needs backlog
908 * @sec4_sg_dma: bus physical mapped address of h/w link table
909 * @sec4_sg: pointer to h/w link table
910 * @hw_desc: the h/w job descriptor followed by any referenced link tables
911 * and IV
913 struct skcipher_edesc {
914 int src_nents;
915 int dst_nents;
916 int mapped_src_nents;
917 int mapped_dst_nents;
918 dma_addr_t iv_dma;
919 int sec4_sg_bytes;
920 bool bklog;
921 dma_addr_t sec4_sg_dma;
922 struct sec4_sg_entry *sec4_sg;
923 u32 hw_desc[];
926 static void caam_unmap(struct device *dev, struct scatterlist *src,
927 struct scatterlist *dst, int src_nents,
928 int dst_nents,
929 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
930 int sec4_sg_bytes)
932 if (dst != src) {
933 if (src_nents)
934 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
935 if (dst_nents)
936 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
937 } else {
938 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
941 if (iv_dma)
942 dma_unmap_single(dev, iv_dma, ivsize, DMA_BIDIRECTIONAL);
943 if (sec4_sg_bytes)
944 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
945 DMA_TO_DEVICE);
948 static void aead_unmap(struct device *dev,
949 struct aead_edesc *edesc,
950 struct aead_request *req)
952 caam_unmap(dev, req->src, req->dst,
953 edesc->src_nents, edesc->dst_nents, 0, 0,
954 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
957 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc,
958 struct skcipher_request *req)
960 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
961 int ivsize = crypto_skcipher_ivsize(skcipher);
963 caam_unmap(dev, req->src, req->dst,
964 edesc->src_nents, edesc->dst_nents,
965 edesc->iv_dma, ivsize,
966 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
969 static void aead_crypt_done(struct device *jrdev, u32 *desc, u32 err,
970 void *context)
972 struct aead_request *req = context;
973 struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
974 struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev);
975 struct aead_edesc *edesc;
976 int ecode = 0;
977 bool has_bklog;
979 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
981 edesc = rctx->edesc;
982 has_bklog = edesc->bklog;
984 if (err)
985 ecode = caam_jr_strstatus(jrdev, err);
987 aead_unmap(jrdev, edesc, req);
989 kfree(edesc);
992 * If no backlog flag, the completion of the request is done
993 * by CAAM, not crypto engine.
995 if (!has_bklog)
996 aead_request_complete(req, ecode);
997 else
998 crypto_finalize_aead_request(jrp->engine, req, ecode);
1001 static void skcipher_crypt_done(struct device *jrdev, u32 *desc, u32 err,
1002 void *context)
1004 struct skcipher_request *req = context;
1005 struct skcipher_edesc *edesc;
1006 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1007 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1008 struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev);
1009 int ivsize = crypto_skcipher_ivsize(skcipher);
1010 int ecode = 0;
1011 bool has_bklog;
1013 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1015 edesc = rctx->edesc;
1016 has_bklog = edesc->bklog;
1017 if (err)
1018 ecode = caam_jr_strstatus(jrdev, err);
1020 skcipher_unmap(jrdev, edesc, req);
1023 * The crypto API expects us to set the IV (req->iv) to the last
1024 * ciphertext block (CBC mode) or last counter (CTR mode).
1025 * This is used e.g. by the CTS mode.
1027 if (ivsize && !ecode) {
1028 memcpy(req->iv, (u8 *)edesc->sec4_sg + edesc->sec4_sg_bytes,
1029 ivsize);
1031 print_hex_dump_debug("dstiv @" __stringify(__LINE__)": ",
1032 DUMP_PREFIX_ADDRESS, 16, 4, req->iv,
1033 ivsize, 1);
1036 caam_dump_sg("dst @" __stringify(__LINE__)": ",
1037 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
1038 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
1040 kfree(edesc);
1043 * If no backlog flag, the completion of the request is done
1044 * by CAAM, not crypto engine.
1046 if (!has_bklog)
1047 skcipher_request_complete(req, ecode);
1048 else
1049 crypto_finalize_skcipher_request(jrp->engine, req, ecode);
1053 * Fill in aead job descriptor
1055 static void init_aead_job(struct aead_request *req,
1056 struct aead_edesc *edesc,
1057 bool all_contig, bool encrypt)
1059 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1060 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1061 int authsize = ctx->authsize;
1062 u32 *desc = edesc->hw_desc;
1063 u32 out_options, in_options;
1064 dma_addr_t dst_dma, src_dma;
1065 int len, sec4_sg_index = 0;
1066 dma_addr_t ptr;
1067 u32 *sh_desc;
1069 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1070 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1072 len = desc_len(sh_desc);
1073 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1075 if (all_contig) {
1076 src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) :
1078 in_options = 0;
1079 } else {
1080 src_dma = edesc->sec4_sg_dma;
1081 sec4_sg_index += edesc->mapped_src_nents;
1082 in_options = LDST_SGF;
1085 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
1086 in_options);
1088 dst_dma = src_dma;
1089 out_options = in_options;
1091 if (unlikely(req->src != req->dst)) {
1092 if (!edesc->mapped_dst_nents) {
1093 dst_dma = 0;
1094 out_options = 0;
1095 } else if (edesc->mapped_dst_nents == 1) {
1096 dst_dma = sg_dma_address(req->dst);
1097 out_options = 0;
1098 } else {
1099 dst_dma = edesc->sec4_sg_dma +
1100 sec4_sg_index *
1101 sizeof(struct sec4_sg_entry);
1102 out_options = LDST_SGF;
1106 if (encrypt)
1107 append_seq_out_ptr(desc, dst_dma,
1108 req->assoclen + req->cryptlen + authsize,
1109 out_options);
1110 else
1111 append_seq_out_ptr(desc, dst_dma,
1112 req->assoclen + req->cryptlen - authsize,
1113 out_options);
1116 static void init_gcm_job(struct aead_request *req,
1117 struct aead_edesc *edesc,
1118 bool all_contig, bool encrypt)
1120 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1121 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1122 unsigned int ivsize = crypto_aead_ivsize(aead);
1123 u32 *desc = edesc->hw_desc;
1124 bool generic_gcm = (ivsize == GCM_AES_IV_SIZE);
1125 unsigned int last;
1127 init_aead_job(req, edesc, all_contig, encrypt);
1128 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1130 /* BUG This should not be specific to generic GCM. */
1131 last = 0;
1132 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
1133 last = FIFOLD_TYPE_LAST1;
1135 /* Read GCM IV */
1136 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
1137 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last);
1138 /* Append Salt */
1139 if (!generic_gcm)
1140 append_data(desc, ctx->key + ctx->cdata.keylen, 4);
1141 /* Append IV */
1142 append_data(desc, req->iv, ivsize);
1143 /* End of blank commands */
1146 static void init_chachapoly_job(struct aead_request *req,
1147 struct aead_edesc *edesc, bool all_contig,
1148 bool encrypt)
1150 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1151 unsigned int ivsize = crypto_aead_ivsize(aead);
1152 unsigned int assoclen = req->assoclen;
1153 u32 *desc = edesc->hw_desc;
1154 u32 ctx_iv_off = 4;
1156 init_aead_job(req, edesc, all_contig, encrypt);
1158 if (ivsize != CHACHAPOLY_IV_SIZE) {
1159 /* IPsec specific: CONTEXT1[223:128] = {NONCE, IV} */
1160 ctx_iv_off += 4;
1163 * The associated data comes already with the IV but we need
1164 * to skip it when we authenticate or encrypt...
1166 assoclen -= ivsize;
1169 append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen);
1172 * For IPsec load the IV further in the same register.
1173 * For RFC7539 simply load the 12 bytes nonce in a single operation
1175 append_load_as_imm(desc, req->iv, ivsize, LDST_CLASS_1_CCB |
1176 LDST_SRCDST_BYTE_CONTEXT |
1177 ctx_iv_off << LDST_OFFSET_SHIFT);
1180 static void init_authenc_job(struct aead_request *req,
1181 struct aead_edesc *edesc,
1182 bool all_contig, bool encrypt)
1184 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1185 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
1186 struct caam_aead_alg, aead);
1187 unsigned int ivsize = crypto_aead_ivsize(aead);
1188 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1189 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
1190 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
1191 OP_ALG_AAI_CTR_MOD128);
1192 const bool is_rfc3686 = alg->caam.rfc3686;
1193 u32 *desc = edesc->hw_desc;
1194 u32 ivoffset = 0;
1197 * AES-CTR needs to load IV in CONTEXT1 reg
1198 * at an offset of 128bits (16bytes)
1199 * CONTEXT1[255:128] = IV
1201 if (ctr_mode)
1202 ivoffset = 16;
1205 * RFC3686 specific:
1206 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1208 if (is_rfc3686)
1209 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
1211 init_aead_job(req, edesc, all_contig, encrypt);
1214 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports
1215 * having DPOVRD as destination.
1217 if (ctrlpriv->era < 3)
1218 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1219 else
1220 append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen);
1222 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
1223 append_load_as_imm(desc, req->iv, ivsize,
1224 LDST_CLASS_1_CCB |
1225 LDST_SRCDST_BYTE_CONTEXT |
1226 (ivoffset << LDST_OFFSET_SHIFT));
1230 * Fill in skcipher job descriptor
1232 static void init_skcipher_job(struct skcipher_request *req,
1233 struct skcipher_edesc *edesc,
1234 const bool encrypt)
1236 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1237 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1238 struct device *jrdev = ctx->jrdev;
1239 int ivsize = crypto_skcipher_ivsize(skcipher);
1240 u32 *desc = edesc->hw_desc;
1241 u32 *sh_desc;
1242 u32 in_options = 0, out_options = 0;
1243 dma_addr_t src_dma, dst_dma, ptr;
1244 int len, sec4_sg_index = 0;
1246 print_hex_dump_debug("presciv@"__stringify(__LINE__)": ",
1247 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
1248 dev_dbg(jrdev, "asked=%d, cryptlen%d\n",
1249 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen);
1251 caam_dump_sg("src @" __stringify(__LINE__)": ",
1252 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1253 edesc->src_nents > 1 ? 100 : req->cryptlen, 1);
1255 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1256 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1258 len = desc_len(sh_desc);
1259 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1261 if (ivsize || edesc->mapped_src_nents > 1) {
1262 src_dma = edesc->sec4_sg_dma;
1263 sec4_sg_index = edesc->mapped_src_nents + !!ivsize;
1264 in_options = LDST_SGF;
1265 } else {
1266 src_dma = sg_dma_address(req->src);
1269 append_seq_in_ptr(desc, src_dma, req->cryptlen + ivsize, in_options);
1271 if (likely(req->src == req->dst)) {
1272 dst_dma = src_dma + !!ivsize * sizeof(struct sec4_sg_entry);
1273 out_options = in_options;
1274 } else if (!ivsize && edesc->mapped_dst_nents == 1) {
1275 dst_dma = sg_dma_address(req->dst);
1276 } else {
1277 dst_dma = edesc->sec4_sg_dma + sec4_sg_index *
1278 sizeof(struct sec4_sg_entry);
1279 out_options = LDST_SGF;
1282 append_seq_out_ptr(desc, dst_dma, req->cryptlen + ivsize, out_options);
1286 * allocate and map the aead extended descriptor
1288 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1289 int desc_bytes, bool *all_contig_ptr,
1290 bool encrypt)
1292 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1293 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1294 struct device *jrdev = ctx->jrdev;
1295 struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
1296 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1297 GFP_KERNEL : GFP_ATOMIC;
1298 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1299 int src_len, dst_len = 0;
1300 struct aead_edesc *edesc;
1301 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
1302 unsigned int authsize = ctx->authsize;
1304 if (unlikely(req->dst != req->src)) {
1305 src_len = req->assoclen + req->cryptlen;
1306 dst_len = src_len + (encrypt ? authsize : (-authsize));
1308 src_nents = sg_nents_for_len(req->src, src_len);
1309 if (unlikely(src_nents < 0)) {
1310 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1311 src_len);
1312 return ERR_PTR(src_nents);
1315 dst_nents = sg_nents_for_len(req->dst, dst_len);
1316 if (unlikely(dst_nents < 0)) {
1317 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1318 dst_len);
1319 return ERR_PTR(dst_nents);
1321 } else {
1322 src_len = req->assoclen + req->cryptlen +
1323 (encrypt ? authsize : 0);
1325 src_nents = sg_nents_for_len(req->src, src_len);
1326 if (unlikely(src_nents < 0)) {
1327 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1328 src_len);
1329 return ERR_PTR(src_nents);
1333 if (likely(req->src == req->dst)) {
1334 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1335 DMA_BIDIRECTIONAL);
1336 if (unlikely(!mapped_src_nents)) {
1337 dev_err(jrdev, "unable to map source\n");
1338 return ERR_PTR(-ENOMEM);
1340 } else {
1341 /* Cover also the case of null (zero length) input data */
1342 if (src_nents) {
1343 mapped_src_nents = dma_map_sg(jrdev, req->src,
1344 src_nents, DMA_TO_DEVICE);
1345 if (unlikely(!mapped_src_nents)) {
1346 dev_err(jrdev, "unable to map source\n");
1347 return ERR_PTR(-ENOMEM);
1349 } else {
1350 mapped_src_nents = 0;
1353 /* Cover also the case of null (zero length) output data */
1354 if (dst_nents) {
1355 mapped_dst_nents = dma_map_sg(jrdev, req->dst,
1356 dst_nents,
1357 DMA_FROM_DEVICE);
1358 if (unlikely(!mapped_dst_nents)) {
1359 dev_err(jrdev, "unable to map destination\n");
1360 dma_unmap_sg(jrdev, req->src, src_nents,
1361 DMA_TO_DEVICE);
1362 return ERR_PTR(-ENOMEM);
1364 } else {
1365 mapped_dst_nents = 0;
1370 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond
1371 * the end of the table by allocating more S/G entries.
1373 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
1374 if (mapped_dst_nents > 1)
1375 sec4_sg_len += pad_sg_nents(mapped_dst_nents);
1376 else
1377 sec4_sg_len = pad_sg_nents(sec4_sg_len);
1379 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
1381 /* allocate space for base edesc and hw desc commands, link tables */
1382 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1383 GFP_DMA | flags);
1384 if (!edesc) {
1385 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1386 0, 0, 0);
1387 return ERR_PTR(-ENOMEM);
1390 edesc->src_nents = src_nents;
1391 edesc->dst_nents = dst_nents;
1392 edesc->mapped_src_nents = mapped_src_nents;
1393 edesc->mapped_dst_nents = mapped_dst_nents;
1394 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
1395 desc_bytes;
1397 rctx->edesc = edesc;
1399 *all_contig_ptr = !(mapped_src_nents > 1);
1401 sec4_sg_index = 0;
1402 if (mapped_src_nents > 1) {
1403 sg_to_sec4_sg_last(req->src, src_len,
1404 edesc->sec4_sg + sec4_sg_index, 0);
1405 sec4_sg_index += mapped_src_nents;
1407 if (mapped_dst_nents > 1) {
1408 sg_to_sec4_sg_last(req->dst, dst_len,
1409 edesc->sec4_sg + sec4_sg_index, 0);
1412 if (!sec4_sg_bytes)
1413 return edesc;
1415 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1416 sec4_sg_bytes, DMA_TO_DEVICE);
1417 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1418 dev_err(jrdev, "unable to map S/G table\n");
1419 aead_unmap(jrdev, edesc, req);
1420 kfree(edesc);
1421 return ERR_PTR(-ENOMEM);
1424 edesc->sec4_sg_bytes = sec4_sg_bytes;
1426 return edesc;
1429 static int aead_enqueue_req(struct device *jrdev, struct aead_request *req)
1431 struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev);
1432 struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
1433 struct aead_edesc *edesc = rctx->edesc;
1434 u32 *desc = edesc->hw_desc;
1435 int ret;
1438 * Only the backlog request are sent to crypto-engine since the others
1439 * can be handled by CAAM, if free, especially since JR has up to 1024
1440 * entries (more than the 10 entries from crypto-engine).
1442 if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)
1443 ret = crypto_transfer_aead_request_to_engine(jrpriv->engine,
1444 req);
1445 else
1446 ret = caam_jr_enqueue(jrdev, desc, aead_crypt_done, req);
1448 if ((ret != -EINPROGRESS) && (ret != -EBUSY)) {
1449 aead_unmap(jrdev, edesc, req);
1450 kfree(rctx->edesc);
1453 return ret;
1456 static inline int chachapoly_crypt(struct aead_request *req, bool encrypt)
1458 struct aead_edesc *edesc;
1459 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1460 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1461 struct device *jrdev = ctx->jrdev;
1462 bool all_contig;
1463 u32 *desc;
1465 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig,
1466 encrypt);
1467 if (IS_ERR(edesc))
1468 return PTR_ERR(edesc);
1470 desc = edesc->hw_desc;
1472 init_chachapoly_job(req, edesc, all_contig, encrypt);
1473 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ",
1474 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1477 return aead_enqueue_req(jrdev, req);
1480 static int chachapoly_encrypt(struct aead_request *req)
1482 return chachapoly_crypt(req, true);
1485 static int chachapoly_decrypt(struct aead_request *req)
1487 return chachapoly_crypt(req, false);
1490 static inline int aead_crypt(struct aead_request *req, bool encrypt)
1492 struct aead_edesc *edesc;
1493 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1494 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1495 struct device *jrdev = ctx->jrdev;
1496 bool all_contig;
1498 /* allocate extended descriptor */
1499 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1500 &all_contig, encrypt);
1501 if (IS_ERR(edesc))
1502 return PTR_ERR(edesc);
1504 /* Create and submit job descriptor */
1505 init_authenc_job(req, edesc, all_contig, encrypt);
1507 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ",
1508 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1509 desc_bytes(edesc->hw_desc), 1);
1511 return aead_enqueue_req(jrdev, req);
1514 static int aead_encrypt(struct aead_request *req)
1516 return aead_crypt(req, true);
1519 static int aead_decrypt(struct aead_request *req)
1521 return aead_crypt(req, false);
1524 static int aead_do_one_req(struct crypto_engine *engine, void *areq)
1526 struct aead_request *req = aead_request_cast(areq);
1527 struct caam_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
1528 struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
1529 u32 *desc = rctx->edesc->hw_desc;
1530 int ret;
1532 rctx->edesc->bklog = true;
1534 ret = caam_jr_enqueue(ctx->jrdev, desc, aead_crypt_done, req);
1536 if (ret != -EINPROGRESS) {
1537 aead_unmap(ctx->jrdev, rctx->edesc, req);
1538 kfree(rctx->edesc);
1539 } else {
1540 ret = 0;
1543 return ret;
1546 static inline int gcm_crypt(struct aead_request *req, bool encrypt)
1548 struct aead_edesc *edesc;
1549 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1550 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1551 struct device *jrdev = ctx->jrdev;
1552 bool all_contig;
1554 /* allocate extended descriptor */
1555 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig,
1556 encrypt);
1557 if (IS_ERR(edesc))
1558 return PTR_ERR(edesc);
1560 /* Create and submit job descriptor */
1561 init_gcm_job(req, edesc, all_contig, encrypt);
1563 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ",
1564 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1565 desc_bytes(edesc->hw_desc), 1);
1567 return aead_enqueue_req(jrdev, req);
1570 static int gcm_encrypt(struct aead_request *req)
1572 return gcm_crypt(req, true);
1575 static int gcm_decrypt(struct aead_request *req)
1577 return gcm_crypt(req, false);
1580 static int ipsec_gcm_encrypt(struct aead_request *req)
1582 return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_encrypt(req);
1585 static int ipsec_gcm_decrypt(struct aead_request *req)
1587 return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_decrypt(req);
1591 * allocate and map the skcipher extended descriptor for skcipher
1593 static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
1594 int desc_bytes)
1596 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1597 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1598 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1599 struct device *jrdev = ctx->jrdev;
1600 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1601 GFP_KERNEL : GFP_ATOMIC;
1602 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1603 struct skcipher_edesc *edesc;
1604 dma_addr_t iv_dma = 0;
1605 u8 *iv;
1606 int ivsize = crypto_skcipher_ivsize(skcipher);
1607 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
1609 src_nents = sg_nents_for_len(req->src, req->cryptlen);
1610 if (unlikely(src_nents < 0)) {
1611 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1612 req->cryptlen);
1613 return ERR_PTR(src_nents);
1616 if (req->dst != req->src) {
1617 dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
1618 if (unlikely(dst_nents < 0)) {
1619 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1620 req->cryptlen);
1621 return ERR_PTR(dst_nents);
1625 if (likely(req->src == req->dst)) {
1626 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1627 DMA_BIDIRECTIONAL);
1628 if (unlikely(!mapped_src_nents)) {
1629 dev_err(jrdev, "unable to map source\n");
1630 return ERR_PTR(-ENOMEM);
1632 } else {
1633 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1634 DMA_TO_DEVICE);
1635 if (unlikely(!mapped_src_nents)) {
1636 dev_err(jrdev, "unable to map source\n");
1637 return ERR_PTR(-ENOMEM);
1639 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1640 DMA_FROM_DEVICE);
1641 if (unlikely(!mapped_dst_nents)) {
1642 dev_err(jrdev, "unable to map destination\n");
1643 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1644 return ERR_PTR(-ENOMEM);
1648 if (!ivsize && mapped_src_nents == 1)
1649 sec4_sg_ents = 0; // no need for an input hw s/g table
1650 else
1651 sec4_sg_ents = mapped_src_nents + !!ivsize;
1652 dst_sg_idx = sec4_sg_ents;
1655 * Input, output HW S/G tables: [IV, src][dst, IV]
1656 * IV entries point to the same buffer
1657 * If src == dst, S/G entries are reused (S/G tables overlap)
1659 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond
1660 * the end of the table by allocating more S/G entries. Logic:
1661 * if (output S/G)
1662 * pad output S/G, if needed
1663 * else if (input S/G) ...
1664 * pad input S/G, if needed
1666 if (ivsize || mapped_dst_nents > 1) {
1667 if (req->src == req->dst)
1668 sec4_sg_ents = !!ivsize + pad_sg_nents(sec4_sg_ents);
1669 else
1670 sec4_sg_ents += pad_sg_nents(mapped_dst_nents +
1671 !!ivsize);
1672 } else {
1673 sec4_sg_ents = pad_sg_nents(sec4_sg_ents);
1676 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
1679 * allocate space for base edesc and hw desc commands, link tables, IV
1681 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize,
1682 GFP_DMA | flags);
1683 if (!edesc) {
1684 dev_err(jrdev, "could not allocate extended descriptor\n");
1685 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1686 0, 0, 0);
1687 return ERR_PTR(-ENOMEM);
1690 edesc->src_nents = src_nents;
1691 edesc->dst_nents = dst_nents;
1692 edesc->mapped_src_nents = mapped_src_nents;
1693 edesc->mapped_dst_nents = mapped_dst_nents;
1694 edesc->sec4_sg_bytes = sec4_sg_bytes;
1695 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc +
1696 desc_bytes);
1697 rctx->edesc = edesc;
1699 /* Make sure IV is located in a DMAable area */
1700 if (ivsize) {
1701 iv = (u8 *)edesc->sec4_sg + sec4_sg_bytes;
1702 memcpy(iv, req->iv, ivsize);
1704 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_BIDIRECTIONAL);
1705 if (dma_mapping_error(jrdev, iv_dma)) {
1706 dev_err(jrdev, "unable to map IV\n");
1707 caam_unmap(jrdev, req->src, req->dst, src_nents,
1708 dst_nents, 0, 0, 0, 0);
1709 kfree(edesc);
1710 return ERR_PTR(-ENOMEM);
1713 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
1715 if (dst_sg_idx)
1716 sg_to_sec4_sg(req->src, req->cryptlen, edesc->sec4_sg +
1717 !!ivsize, 0);
1719 if (req->src != req->dst && (ivsize || mapped_dst_nents > 1))
1720 sg_to_sec4_sg(req->dst, req->cryptlen, edesc->sec4_sg +
1721 dst_sg_idx, 0);
1723 if (ivsize)
1724 dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx +
1725 mapped_dst_nents, iv_dma, ivsize, 0);
1727 if (ivsize || mapped_dst_nents > 1)
1728 sg_to_sec4_set_last(edesc->sec4_sg + dst_sg_idx +
1729 mapped_dst_nents - 1 + !!ivsize);
1731 if (sec4_sg_bytes) {
1732 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1733 sec4_sg_bytes,
1734 DMA_TO_DEVICE);
1735 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1736 dev_err(jrdev, "unable to map S/G table\n");
1737 caam_unmap(jrdev, req->src, req->dst, src_nents,
1738 dst_nents, iv_dma, ivsize, 0, 0);
1739 kfree(edesc);
1740 return ERR_PTR(-ENOMEM);
1744 edesc->iv_dma = iv_dma;
1746 print_hex_dump_debug("skcipher sec4_sg@" __stringify(__LINE__)": ",
1747 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1748 sec4_sg_bytes, 1);
1750 return edesc;
1753 static int skcipher_do_one_req(struct crypto_engine *engine, void *areq)
1755 struct skcipher_request *req = skcipher_request_cast(areq);
1756 struct caam_ctx *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
1757 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1758 u32 *desc = rctx->edesc->hw_desc;
1759 int ret;
1761 rctx->edesc->bklog = true;
1763 ret = caam_jr_enqueue(ctx->jrdev, desc, skcipher_crypt_done, req);
1765 if (ret != -EINPROGRESS) {
1766 skcipher_unmap(ctx->jrdev, rctx->edesc, req);
1767 kfree(rctx->edesc);
1768 } else {
1769 ret = 0;
1772 return ret;
1775 static inline bool xts_skcipher_ivsize(struct skcipher_request *req)
1777 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1778 unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
1780 return !!get_unaligned((u64 *)(req->iv + (ivsize / 2)));
1783 static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt)
1785 struct skcipher_edesc *edesc;
1786 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1787 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1788 struct device *jrdev = ctx->jrdev;
1789 struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev);
1790 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
1791 u32 *desc;
1792 int ret = 0;
1795 * XTS is expected to return an error even for input length = 0
1796 * Note that the case input length < block size will be caught during
1797 * HW offloading and return an error.
1799 if (!req->cryptlen && !ctx->fallback)
1800 return 0;
1802 if (ctx->fallback && ((ctrlpriv->era <= 8 && xts_skcipher_ivsize(req)) ||
1803 ctx->xts_key_fallback)) {
1804 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1806 skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback);
1807 skcipher_request_set_callback(&rctx->fallback_req,
1808 req->base.flags,
1809 req->base.complete,
1810 req->base.data);
1811 skcipher_request_set_crypt(&rctx->fallback_req, req->src,
1812 req->dst, req->cryptlen, req->iv);
1814 return encrypt ? crypto_skcipher_encrypt(&rctx->fallback_req) :
1815 crypto_skcipher_decrypt(&rctx->fallback_req);
1818 /* allocate extended descriptor */
1819 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
1820 if (IS_ERR(edesc))
1821 return PTR_ERR(edesc);
1823 /* Create and submit job descriptor*/
1824 init_skcipher_job(req, edesc, encrypt);
1826 print_hex_dump_debug("skcipher jobdesc@" __stringify(__LINE__)": ",
1827 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1828 desc_bytes(edesc->hw_desc), 1);
1830 desc = edesc->hw_desc;
1832 * Only the backlog request are sent to crypto-engine since the others
1833 * can be handled by CAAM, if free, especially since JR has up to 1024
1834 * entries (more than the 10 entries from crypto-engine).
1836 if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)
1837 ret = crypto_transfer_skcipher_request_to_engine(jrpriv->engine,
1838 req);
1839 else
1840 ret = caam_jr_enqueue(jrdev, desc, skcipher_crypt_done, req);
1842 if ((ret != -EINPROGRESS) && (ret != -EBUSY)) {
1843 skcipher_unmap(jrdev, edesc, req);
1844 kfree(edesc);
1847 return ret;
1850 static int skcipher_encrypt(struct skcipher_request *req)
1852 return skcipher_crypt(req, true);
1855 static int skcipher_decrypt(struct skcipher_request *req)
1857 return skcipher_crypt(req, false);
1860 static struct caam_skcipher_alg driver_algs[] = {
1862 .skcipher = {
1863 .base = {
1864 .cra_name = "cbc(aes)",
1865 .cra_driver_name = "cbc-aes-caam",
1866 .cra_blocksize = AES_BLOCK_SIZE,
1868 .setkey = aes_skcipher_setkey,
1869 .encrypt = skcipher_encrypt,
1870 .decrypt = skcipher_decrypt,
1871 .min_keysize = AES_MIN_KEY_SIZE,
1872 .max_keysize = AES_MAX_KEY_SIZE,
1873 .ivsize = AES_BLOCK_SIZE,
1875 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1878 .skcipher = {
1879 .base = {
1880 .cra_name = "cbc(des3_ede)",
1881 .cra_driver_name = "cbc-3des-caam",
1882 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1884 .setkey = des3_skcipher_setkey,
1885 .encrypt = skcipher_encrypt,
1886 .decrypt = skcipher_decrypt,
1887 .min_keysize = DES3_EDE_KEY_SIZE,
1888 .max_keysize = DES3_EDE_KEY_SIZE,
1889 .ivsize = DES3_EDE_BLOCK_SIZE,
1891 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1894 .skcipher = {
1895 .base = {
1896 .cra_name = "cbc(des)",
1897 .cra_driver_name = "cbc-des-caam",
1898 .cra_blocksize = DES_BLOCK_SIZE,
1900 .setkey = des_skcipher_setkey,
1901 .encrypt = skcipher_encrypt,
1902 .decrypt = skcipher_decrypt,
1903 .min_keysize = DES_KEY_SIZE,
1904 .max_keysize = DES_KEY_SIZE,
1905 .ivsize = DES_BLOCK_SIZE,
1907 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
1910 .skcipher = {
1911 .base = {
1912 .cra_name = "ctr(aes)",
1913 .cra_driver_name = "ctr-aes-caam",
1914 .cra_blocksize = 1,
1916 .setkey = ctr_skcipher_setkey,
1917 .encrypt = skcipher_encrypt,
1918 .decrypt = skcipher_decrypt,
1919 .min_keysize = AES_MIN_KEY_SIZE,
1920 .max_keysize = AES_MAX_KEY_SIZE,
1921 .ivsize = AES_BLOCK_SIZE,
1922 .chunksize = AES_BLOCK_SIZE,
1924 .caam.class1_alg_type = OP_ALG_ALGSEL_AES |
1925 OP_ALG_AAI_CTR_MOD128,
1928 .skcipher = {
1929 .base = {
1930 .cra_name = "rfc3686(ctr(aes))",
1931 .cra_driver_name = "rfc3686-ctr-aes-caam",
1932 .cra_blocksize = 1,
1934 .setkey = rfc3686_skcipher_setkey,
1935 .encrypt = skcipher_encrypt,
1936 .decrypt = skcipher_decrypt,
1937 .min_keysize = AES_MIN_KEY_SIZE +
1938 CTR_RFC3686_NONCE_SIZE,
1939 .max_keysize = AES_MAX_KEY_SIZE +
1940 CTR_RFC3686_NONCE_SIZE,
1941 .ivsize = CTR_RFC3686_IV_SIZE,
1942 .chunksize = AES_BLOCK_SIZE,
1944 .caam = {
1945 .class1_alg_type = OP_ALG_ALGSEL_AES |
1946 OP_ALG_AAI_CTR_MOD128,
1947 .rfc3686 = true,
1951 .skcipher = {
1952 .base = {
1953 .cra_name = "xts(aes)",
1954 .cra_driver_name = "xts-aes-caam",
1955 .cra_flags = CRYPTO_ALG_NEED_FALLBACK,
1956 .cra_blocksize = AES_BLOCK_SIZE,
1958 .setkey = xts_skcipher_setkey,
1959 .encrypt = skcipher_encrypt,
1960 .decrypt = skcipher_decrypt,
1961 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1962 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1963 .ivsize = AES_BLOCK_SIZE,
1965 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
1968 .skcipher = {
1969 .base = {
1970 .cra_name = "ecb(des)",
1971 .cra_driver_name = "ecb-des-caam",
1972 .cra_blocksize = DES_BLOCK_SIZE,
1974 .setkey = des_skcipher_setkey,
1975 .encrypt = skcipher_encrypt,
1976 .decrypt = skcipher_decrypt,
1977 .min_keysize = DES_KEY_SIZE,
1978 .max_keysize = DES_KEY_SIZE,
1980 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_ECB,
1983 .skcipher = {
1984 .base = {
1985 .cra_name = "ecb(aes)",
1986 .cra_driver_name = "ecb-aes-caam",
1987 .cra_blocksize = AES_BLOCK_SIZE,
1989 .setkey = aes_skcipher_setkey,
1990 .encrypt = skcipher_encrypt,
1991 .decrypt = skcipher_decrypt,
1992 .min_keysize = AES_MIN_KEY_SIZE,
1993 .max_keysize = AES_MAX_KEY_SIZE,
1995 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_ECB,
1998 .skcipher = {
1999 .base = {
2000 .cra_name = "ecb(des3_ede)",
2001 .cra_driver_name = "ecb-des3-caam",
2002 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2004 .setkey = des3_skcipher_setkey,
2005 .encrypt = skcipher_encrypt,
2006 .decrypt = skcipher_decrypt,
2007 .min_keysize = DES3_EDE_KEY_SIZE,
2008 .max_keysize = DES3_EDE_KEY_SIZE,
2010 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_ECB,
2014 static struct caam_aead_alg driver_aeads[] = {
2016 .aead = {
2017 .base = {
2018 .cra_name = "rfc4106(gcm(aes))",
2019 .cra_driver_name = "rfc4106-gcm-aes-caam",
2020 .cra_blocksize = 1,
2022 .setkey = rfc4106_setkey,
2023 .setauthsize = rfc4106_setauthsize,
2024 .encrypt = ipsec_gcm_encrypt,
2025 .decrypt = ipsec_gcm_decrypt,
2026 .ivsize = GCM_RFC4106_IV_SIZE,
2027 .maxauthsize = AES_BLOCK_SIZE,
2029 .caam = {
2030 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2031 .nodkp = true,
2035 .aead = {
2036 .base = {
2037 .cra_name = "rfc4543(gcm(aes))",
2038 .cra_driver_name = "rfc4543-gcm-aes-caam",
2039 .cra_blocksize = 1,
2041 .setkey = rfc4543_setkey,
2042 .setauthsize = rfc4543_setauthsize,
2043 .encrypt = ipsec_gcm_encrypt,
2044 .decrypt = ipsec_gcm_decrypt,
2045 .ivsize = GCM_RFC4543_IV_SIZE,
2046 .maxauthsize = AES_BLOCK_SIZE,
2048 .caam = {
2049 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2050 .nodkp = true,
2053 /* Galois Counter Mode */
2055 .aead = {
2056 .base = {
2057 .cra_name = "gcm(aes)",
2058 .cra_driver_name = "gcm-aes-caam",
2059 .cra_blocksize = 1,
2061 .setkey = gcm_setkey,
2062 .setauthsize = gcm_setauthsize,
2063 .encrypt = gcm_encrypt,
2064 .decrypt = gcm_decrypt,
2065 .ivsize = GCM_AES_IV_SIZE,
2066 .maxauthsize = AES_BLOCK_SIZE,
2068 .caam = {
2069 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2070 .nodkp = true,
2073 /* single-pass ipsec_esp descriptor */
2075 .aead = {
2076 .base = {
2077 .cra_name = "authenc(hmac(md5),"
2078 "ecb(cipher_null))",
2079 .cra_driver_name = "authenc-hmac-md5-"
2080 "ecb-cipher_null-caam",
2081 .cra_blocksize = NULL_BLOCK_SIZE,
2083 .setkey = aead_setkey,
2084 .setauthsize = aead_setauthsize,
2085 .encrypt = aead_encrypt,
2086 .decrypt = aead_decrypt,
2087 .ivsize = NULL_IV_SIZE,
2088 .maxauthsize = MD5_DIGEST_SIZE,
2090 .caam = {
2091 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2092 OP_ALG_AAI_HMAC_PRECOMP,
2096 .aead = {
2097 .base = {
2098 .cra_name = "authenc(hmac(sha1),"
2099 "ecb(cipher_null))",
2100 .cra_driver_name = "authenc-hmac-sha1-"
2101 "ecb-cipher_null-caam",
2102 .cra_blocksize = NULL_BLOCK_SIZE,
2104 .setkey = aead_setkey,
2105 .setauthsize = aead_setauthsize,
2106 .encrypt = aead_encrypt,
2107 .decrypt = aead_decrypt,
2108 .ivsize = NULL_IV_SIZE,
2109 .maxauthsize = SHA1_DIGEST_SIZE,
2111 .caam = {
2112 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2113 OP_ALG_AAI_HMAC_PRECOMP,
2117 .aead = {
2118 .base = {
2119 .cra_name = "authenc(hmac(sha224),"
2120 "ecb(cipher_null))",
2121 .cra_driver_name = "authenc-hmac-sha224-"
2122 "ecb-cipher_null-caam",
2123 .cra_blocksize = NULL_BLOCK_SIZE,
2125 .setkey = aead_setkey,
2126 .setauthsize = aead_setauthsize,
2127 .encrypt = aead_encrypt,
2128 .decrypt = aead_decrypt,
2129 .ivsize = NULL_IV_SIZE,
2130 .maxauthsize = SHA224_DIGEST_SIZE,
2132 .caam = {
2133 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2134 OP_ALG_AAI_HMAC_PRECOMP,
2138 .aead = {
2139 .base = {
2140 .cra_name = "authenc(hmac(sha256),"
2141 "ecb(cipher_null))",
2142 .cra_driver_name = "authenc-hmac-sha256-"
2143 "ecb-cipher_null-caam",
2144 .cra_blocksize = NULL_BLOCK_SIZE,
2146 .setkey = aead_setkey,
2147 .setauthsize = aead_setauthsize,
2148 .encrypt = aead_encrypt,
2149 .decrypt = aead_decrypt,
2150 .ivsize = NULL_IV_SIZE,
2151 .maxauthsize = SHA256_DIGEST_SIZE,
2153 .caam = {
2154 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2155 OP_ALG_AAI_HMAC_PRECOMP,
2159 .aead = {
2160 .base = {
2161 .cra_name = "authenc(hmac(sha384),"
2162 "ecb(cipher_null))",
2163 .cra_driver_name = "authenc-hmac-sha384-"
2164 "ecb-cipher_null-caam",
2165 .cra_blocksize = NULL_BLOCK_SIZE,
2167 .setkey = aead_setkey,
2168 .setauthsize = aead_setauthsize,
2169 .encrypt = aead_encrypt,
2170 .decrypt = aead_decrypt,
2171 .ivsize = NULL_IV_SIZE,
2172 .maxauthsize = SHA384_DIGEST_SIZE,
2174 .caam = {
2175 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2176 OP_ALG_AAI_HMAC_PRECOMP,
2180 .aead = {
2181 .base = {
2182 .cra_name = "authenc(hmac(sha512),"
2183 "ecb(cipher_null))",
2184 .cra_driver_name = "authenc-hmac-sha512-"
2185 "ecb-cipher_null-caam",
2186 .cra_blocksize = NULL_BLOCK_SIZE,
2188 .setkey = aead_setkey,
2189 .setauthsize = aead_setauthsize,
2190 .encrypt = aead_encrypt,
2191 .decrypt = aead_decrypt,
2192 .ivsize = NULL_IV_SIZE,
2193 .maxauthsize = SHA512_DIGEST_SIZE,
2195 .caam = {
2196 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2197 OP_ALG_AAI_HMAC_PRECOMP,
2201 .aead = {
2202 .base = {
2203 .cra_name = "authenc(hmac(md5),cbc(aes))",
2204 .cra_driver_name = "authenc-hmac-md5-"
2205 "cbc-aes-caam",
2206 .cra_blocksize = AES_BLOCK_SIZE,
2208 .setkey = aead_setkey,
2209 .setauthsize = aead_setauthsize,
2210 .encrypt = aead_encrypt,
2211 .decrypt = aead_decrypt,
2212 .ivsize = AES_BLOCK_SIZE,
2213 .maxauthsize = MD5_DIGEST_SIZE,
2215 .caam = {
2216 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2217 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2218 OP_ALG_AAI_HMAC_PRECOMP,
2222 .aead = {
2223 .base = {
2224 .cra_name = "echainiv(authenc(hmac(md5),"
2225 "cbc(aes)))",
2226 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2227 "cbc-aes-caam",
2228 .cra_blocksize = AES_BLOCK_SIZE,
2230 .setkey = aead_setkey,
2231 .setauthsize = aead_setauthsize,
2232 .encrypt = aead_encrypt,
2233 .decrypt = aead_decrypt,
2234 .ivsize = AES_BLOCK_SIZE,
2235 .maxauthsize = MD5_DIGEST_SIZE,
2237 .caam = {
2238 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2239 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2240 OP_ALG_AAI_HMAC_PRECOMP,
2241 .geniv = true,
2245 .aead = {
2246 .base = {
2247 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2248 .cra_driver_name = "authenc-hmac-sha1-"
2249 "cbc-aes-caam",
2250 .cra_blocksize = AES_BLOCK_SIZE,
2252 .setkey = aead_setkey,
2253 .setauthsize = aead_setauthsize,
2254 .encrypt = aead_encrypt,
2255 .decrypt = aead_decrypt,
2256 .ivsize = AES_BLOCK_SIZE,
2257 .maxauthsize = SHA1_DIGEST_SIZE,
2259 .caam = {
2260 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2261 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2262 OP_ALG_AAI_HMAC_PRECOMP,
2266 .aead = {
2267 .base = {
2268 .cra_name = "echainiv(authenc(hmac(sha1),"
2269 "cbc(aes)))",
2270 .cra_driver_name = "echainiv-authenc-"
2271 "hmac-sha1-cbc-aes-caam",
2272 .cra_blocksize = AES_BLOCK_SIZE,
2274 .setkey = aead_setkey,
2275 .setauthsize = aead_setauthsize,
2276 .encrypt = aead_encrypt,
2277 .decrypt = aead_decrypt,
2278 .ivsize = AES_BLOCK_SIZE,
2279 .maxauthsize = SHA1_DIGEST_SIZE,
2281 .caam = {
2282 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2283 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2284 OP_ALG_AAI_HMAC_PRECOMP,
2285 .geniv = true,
2289 .aead = {
2290 .base = {
2291 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2292 .cra_driver_name = "authenc-hmac-sha224-"
2293 "cbc-aes-caam",
2294 .cra_blocksize = AES_BLOCK_SIZE,
2296 .setkey = aead_setkey,
2297 .setauthsize = aead_setauthsize,
2298 .encrypt = aead_encrypt,
2299 .decrypt = aead_decrypt,
2300 .ivsize = AES_BLOCK_SIZE,
2301 .maxauthsize = SHA224_DIGEST_SIZE,
2303 .caam = {
2304 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2305 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2306 OP_ALG_AAI_HMAC_PRECOMP,
2310 .aead = {
2311 .base = {
2312 .cra_name = "echainiv(authenc(hmac(sha224),"
2313 "cbc(aes)))",
2314 .cra_driver_name = "echainiv-authenc-"
2315 "hmac-sha224-cbc-aes-caam",
2316 .cra_blocksize = AES_BLOCK_SIZE,
2318 .setkey = aead_setkey,
2319 .setauthsize = aead_setauthsize,
2320 .encrypt = aead_encrypt,
2321 .decrypt = aead_decrypt,
2322 .ivsize = AES_BLOCK_SIZE,
2323 .maxauthsize = SHA224_DIGEST_SIZE,
2325 .caam = {
2326 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2327 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2328 OP_ALG_AAI_HMAC_PRECOMP,
2329 .geniv = true,
2333 .aead = {
2334 .base = {
2335 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2336 .cra_driver_name = "authenc-hmac-sha256-"
2337 "cbc-aes-caam",
2338 .cra_blocksize = AES_BLOCK_SIZE,
2340 .setkey = aead_setkey,
2341 .setauthsize = aead_setauthsize,
2342 .encrypt = aead_encrypt,
2343 .decrypt = aead_decrypt,
2344 .ivsize = AES_BLOCK_SIZE,
2345 .maxauthsize = SHA256_DIGEST_SIZE,
2347 .caam = {
2348 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2349 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2350 OP_ALG_AAI_HMAC_PRECOMP,
2354 .aead = {
2355 .base = {
2356 .cra_name = "echainiv(authenc(hmac(sha256),"
2357 "cbc(aes)))",
2358 .cra_driver_name = "echainiv-authenc-"
2359 "hmac-sha256-cbc-aes-caam",
2360 .cra_blocksize = AES_BLOCK_SIZE,
2362 .setkey = aead_setkey,
2363 .setauthsize = aead_setauthsize,
2364 .encrypt = aead_encrypt,
2365 .decrypt = aead_decrypt,
2366 .ivsize = AES_BLOCK_SIZE,
2367 .maxauthsize = SHA256_DIGEST_SIZE,
2369 .caam = {
2370 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2371 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2372 OP_ALG_AAI_HMAC_PRECOMP,
2373 .geniv = true,
2377 .aead = {
2378 .base = {
2379 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2380 .cra_driver_name = "authenc-hmac-sha384-"
2381 "cbc-aes-caam",
2382 .cra_blocksize = AES_BLOCK_SIZE,
2384 .setkey = aead_setkey,
2385 .setauthsize = aead_setauthsize,
2386 .encrypt = aead_encrypt,
2387 .decrypt = aead_decrypt,
2388 .ivsize = AES_BLOCK_SIZE,
2389 .maxauthsize = SHA384_DIGEST_SIZE,
2391 .caam = {
2392 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2393 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2394 OP_ALG_AAI_HMAC_PRECOMP,
2398 .aead = {
2399 .base = {
2400 .cra_name = "echainiv(authenc(hmac(sha384),"
2401 "cbc(aes)))",
2402 .cra_driver_name = "echainiv-authenc-"
2403 "hmac-sha384-cbc-aes-caam",
2404 .cra_blocksize = AES_BLOCK_SIZE,
2406 .setkey = aead_setkey,
2407 .setauthsize = aead_setauthsize,
2408 .encrypt = aead_encrypt,
2409 .decrypt = aead_decrypt,
2410 .ivsize = AES_BLOCK_SIZE,
2411 .maxauthsize = SHA384_DIGEST_SIZE,
2413 .caam = {
2414 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2415 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2416 OP_ALG_AAI_HMAC_PRECOMP,
2417 .geniv = true,
2421 .aead = {
2422 .base = {
2423 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2424 .cra_driver_name = "authenc-hmac-sha512-"
2425 "cbc-aes-caam",
2426 .cra_blocksize = AES_BLOCK_SIZE,
2428 .setkey = aead_setkey,
2429 .setauthsize = aead_setauthsize,
2430 .encrypt = aead_encrypt,
2431 .decrypt = aead_decrypt,
2432 .ivsize = AES_BLOCK_SIZE,
2433 .maxauthsize = SHA512_DIGEST_SIZE,
2435 .caam = {
2436 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2437 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2438 OP_ALG_AAI_HMAC_PRECOMP,
2442 .aead = {
2443 .base = {
2444 .cra_name = "echainiv(authenc(hmac(sha512),"
2445 "cbc(aes)))",
2446 .cra_driver_name = "echainiv-authenc-"
2447 "hmac-sha512-cbc-aes-caam",
2448 .cra_blocksize = AES_BLOCK_SIZE,
2450 .setkey = aead_setkey,
2451 .setauthsize = aead_setauthsize,
2452 .encrypt = aead_encrypt,
2453 .decrypt = aead_decrypt,
2454 .ivsize = AES_BLOCK_SIZE,
2455 .maxauthsize = SHA512_DIGEST_SIZE,
2457 .caam = {
2458 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2459 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2460 OP_ALG_AAI_HMAC_PRECOMP,
2461 .geniv = true,
2465 .aead = {
2466 .base = {
2467 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2468 .cra_driver_name = "authenc-hmac-md5-"
2469 "cbc-des3_ede-caam",
2470 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2472 .setkey = des3_aead_setkey,
2473 .setauthsize = aead_setauthsize,
2474 .encrypt = aead_encrypt,
2475 .decrypt = aead_decrypt,
2476 .ivsize = DES3_EDE_BLOCK_SIZE,
2477 .maxauthsize = MD5_DIGEST_SIZE,
2479 .caam = {
2480 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2481 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2482 OP_ALG_AAI_HMAC_PRECOMP,
2486 .aead = {
2487 .base = {
2488 .cra_name = "echainiv(authenc(hmac(md5),"
2489 "cbc(des3_ede)))",
2490 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2491 "cbc-des3_ede-caam",
2492 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2494 .setkey = des3_aead_setkey,
2495 .setauthsize = aead_setauthsize,
2496 .encrypt = aead_encrypt,
2497 .decrypt = aead_decrypt,
2498 .ivsize = DES3_EDE_BLOCK_SIZE,
2499 .maxauthsize = MD5_DIGEST_SIZE,
2501 .caam = {
2502 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2503 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2504 OP_ALG_AAI_HMAC_PRECOMP,
2505 .geniv = true,
2509 .aead = {
2510 .base = {
2511 .cra_name = "authenc(hmac(sha1),"
2512 "cbc(des3_ede))",
2513 .cra_driver_name = "authenc-hmac-sha1-"
2514 "cbc-des3_ede-caam",
2515 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2517 .setkey = des3_aead_setkey,
2518 .setauthsize = aead_setauthsize,
2519 .encrypt = aead_encrypt,
2520 .decrypt = aead_decrypt,
2521 .ivsize = DES3_EDE_BLOCK_SIZE,
2522 .maxauthsize = SHA1_DIGEST_SIZE,
2524 .caam = {
2525 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2526 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2527 OP_ALG_AAI_HMAC_PRECOMP,
2531 .aead = {
2532 .base = {
2533 .cra_name = "echainiv(authenc(hmac(sha1),"
2534 "cbc(des3_ede)))",
2535 .cra_driver_name = "echainiv-authenc-"
2536 "hmac-sha1-"
2537 "cbc-des3_ede-caam",
2538 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2540 .setkey = des3_aead_setkey,
2541 .setauthsize = aead_setauthsize,
2542 .encrypt = aead_encrypt,
2543 .decrypt = aead_decrypt,
2544 .ivsize = DES3_EDE_BLOCK_SIZE,
2545 .maxauthsize = SHA1_DIGEST_SIZE,
2547 .caam = {
2548 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2549 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2550 OP_ALG_AAI_HMAC_PRECOMP,
2551 .geniv = true,
2555 .aead = {
2556 .base = {
2557 .cra_name = "authenc(hmac(sha224),"
2558 "cbc(des3_ede))",
2559 .cra_driver_name = "authenc-hmac-sha224-"
2560 "cbc-des3_ede-caam",
2561 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2563 .setkey = des3_aead_setkey,
2564 .setauthsize = aead_setauthsize,
2565 .encrypt = aead_encrypt,
2566 .decrypt = aead_decrypt,
2567 .ivsize = DES3_EDE_BLOCK_SIZE,
2568 .maxauthsize = SHA224_DIGEST_SIZE,
2570 .caam = {
2571 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2572 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2573 OP_ALG_AAI_HMAC_PRECOMP,
2577 .aead = {
2578 .base = {
2579 .cra_name = "echainiv(authenc(hmac(sha224),"
2580 "cbc(des3_ede)))",
2581 .cra_driver_name = "echainiv-authenc-"
2582 "hmac-sha224-"
2583 "cbc-des3_ede-caam",
2584 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2586 .setkey = des3_aead_setkey,
2587 .setauthsize = aead_setauthsize,
2588 .encrypt = aead_encrypt,
2589 .decrypt = aead_decrypt,
2590 .ivsize = DES3_EDE_BLOCK_SIZE,
2591 .maxauthsize = SHA224_DIGEST_SIZE,
2593 .caam = {
2594 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2595 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2596 OP_ALG_AAI_HMAC_PRECOMP,
2597 .geniv = true,
2601 .aead = {
2602 .base = {
2603 .cra_name = "authenc(hmac(sha256),"
2604 "cbc(des3_ede))",
2605 .cra_driver_name = "authenc-hmac-sha256-"
2606 "cbc-des3_ede-caam",
2607 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2609 .setkey = des3_aead_setkey,
2610 .setauthsize = aead_setauthsize,
2611 .encrypt = aead_encrypt,
2612 .decrypt = aead_decrypt,
2613 .ivsize = DES3_EDE_BLOCK_SIZE,
2614 .maxauthsize = SHA256_DIGEST_SIZE,
2616 .caam = {
2617 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2618 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2619 OP_ALG_AAI_HMAC_PRECOMP,
2623 .aead = {
2624 .base = {
2625 .cra_name = "echainiv(authenc(hmac(sha256),"
2626 "cbc(des3_ede)))",
2627 .cra_driver_name = "echainiv-authenc-"
2628 "hmac-sha256-"
2629 "cbc-des3_ede-caam",
2630 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2632 .setkey = des3_aead_setkey,
2633 .setauthsize = aead_setauthsize,
2634 .encrypt = aead_encrypt,
2635 .decrypt = aead_decrypt,
2636 .ivsize = DES3_EDE_BLOCK_SIZE,
2637 .maxauthsize = SHA256_DIGEST_SIZE,
2639 .caam = {
2640 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2641 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2642 OP_ALG_AAI_HMAC_PRECOMP,
2643 .geniv = true,
2647 .aead = {
2648 .base = {
2649 .cra_name = "authenc(hmac(sha384),"
2650 "cbc(des3_ede))",
2651 .cra_driver_name = "authenc-hmac-sha384-"
2652 "cbc-des3_ede-caam",
2653 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2655 .setkey = des3_aead_setkey,
2656 .setauthsize = aead_setauthsize,
2657 .encrypt = aead_encrypt,
2658 .decrypt = aead_decrypt,
2659 .ivsize = DES3_EDE_BLOCK_SIZE,
2660 .maxauthsize = SHA384_DIGEST_SIZE,
2662 .caam = {
2663 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2664 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2665 OP_ALG_AAI_HMAC_PRECOMP,
2669 .aead = {
2670 .base = {
2671 .cra_name = "echainiv(authenc(hmac(sha384),"
2672 "cbc(des3_ede)))",
2673 .cra_driver_name = "echainiv-authenc-"
2674 "hmac-sha384-"
2675 "cbc-des3_ede-caam",
2676 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2678 .setkey = des3_aead_setkey,
2679 .setauthsize = aead_setauthsize,
2680 .encrypt = aead_encrypt,
2681 .decrypt = aead_decrypt,
2682 .ivsize = DES3_EDE_BLOCK_SIZE,
2683 .maxauthsize = SHA384_DIGEST_SIZE,
2685 .caam = {
2686 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2687 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2688 OP_ALG_AAI_HMAC_PRECOMP,
2689 .geniv = true,
2693 .aead = {
2694 .base = {
2695 .cra_name = "authenc(hmac(sha512),"
2696 "cbc(des3_ede))",
2697 .cra_driver_name = "authenc-hmac-sha512-"
2698 "cbc-des3_ede-caam",
2699 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2701 .setkey = des3_aead_setkey,
2702 .setauthsize = aead_setauthsize,
2703 .encrypt = aead_encrypt,
2704 .decrypt = aead_decrypt,
2705 .ivsize = DES3_EDE_BLOCK_SIZE,
2706 .maxauthsize = SHA512_DIGEST_SIZE,
2708 .caam = {
2709 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2710 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2711 OP_ALG_AAI_HMAC_PRECOMP,
2715 .aead = {
2716 .base = {
2717 .cra_name = "echainiv(authenc(hmac(sha512),"
2718 "cbc(des3_ede)))",
2719 .cra_driver_name = "echainiv-authenc-"
2720 "hmac-sha512-"
2721 "cbc-des3_ede-caam",
2722 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2724 .setkey = des3_aead_setkey,
2725 .setauthsize = aead_setauthsize,
2726 .encrypt = aead_encrypt,
2727 .decrypt = aead_decrypt,
2728 .ivsize = DES3_EDE_BLOCK_SIZE,
2729 .maxauthsize = SHA512_DIGEST_SIZE,
2731 .caam = {
2732 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2733 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2734 OP_ALG_AAI_HMAC_PRECOMP,
2735 .geniv = true,
2739 .aead = {
2740 .base = {
2741 .cra_name = "authenc(hmac(md5),cbc(des))",
2742 .cra_driver_name = "authenc-hmac-md5-"
2743 "cbc-des-caam",
2744 .cra_blocksize = DES_BLOCK_SIZE,
2746 .setkey = aead_setkey,
2747 .setauthsize = aead_setauthsize,
2748 .encrypt = aead_encrypt,
2749 .decrypt = aead_decrypt,
2750 .ivsize = DES_BLOCK_SIZE,
2751 .maxauthsize = MD5_DIGEST_SIZE,
2753 .caam = {
2754 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2755 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2756 OP_ALG_AAI_HMAC_PRECOMP,
2760 .aead = {
2761 .base = {
2762 .cra_name = "echainiv(authenc(hmac(md5),"
2763 "cbc(des)))",
2764 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2765 "cbc-des-caam",
2766 .cra_blocksize = DES_BLOCK_SIZE,
2768 .setkey = aead_setkey,
2769 .setauthsize = aead_setauthsize,
2770 .encrypt = aead_encrypt,
2771 .decrypt = aead_decrypt,
2772 .ivsize = DES_BLOCK_SIZE,
2773 .maxauthsize = MD5_DIGEST_SIZE,
2775 .caam = {
2776 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2777 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2778 OP_ALG_AAI_HMAC_PRECOMP,
2779 .geniv = true,
2783 .aead = {
2784 .base = {
2785 .cra_name = "authenc(hmac(sha1),cbc(des))",
2786 .cra_driver_name = "authenc-hmac-sha1-"
2787 "cbc-des-caam",
2788 .cra_blocksize = DES_BLOCK_SIZE,
2790 .setkey = aead_setkey,
2791 .setauthsize = aead_setauthsize,
2792 .encrypt = aead_encrypt,
2793 .decrypt = aead_decrypt,
2794 .ivsize = DES_BLOCK_SIZE,
2795 .maxauthsize = SHA1_DIGEST_SIZE,
2797 .caam = {
2798 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2799 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2800 OP_ALG_AAI_HMAC_PRECOMP,
2804 .aead = {
2805 .base = {
2806 .cra_name = "echainiv(authenc(hmac(sha1),"
2807 "cbc(des)))",
2808 .cra_driver_name = "echainiv-authenc-"
2809 "hmac-sha1-cbc-des-caam",
2810 .cra_blocksize = DES_BLOCK_SIZE,
2812 .setkey = aead_setkey,
2813 .setauthsize = aead_setauthsize,
2814 .encrypt = aead_encrypt,
2815 .decrypt = aead_decrypt,
2816 .ivsize = DES_BLOCK_SIZE,
2817 .maxauthsize = SHA1_DIGEST_SIZE,
2819 .caam = {
2820 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2821 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2822 OP_ALG_AAI_HMAC_PRECOMP,
2823 .geniv = true,
2827 .aead = {
2828 .base = {
2829 .cra_name = "authenc(hmac(sha224),cbc(des))",
2830 .cra_driver_name = "authenc-hmac-sha224-"
2831 "cbc-des-caam",
2832 .cra_blocksize = DES_BLOCK_SIZE,
2834 .setkey = aead_setkey,
2835 .setauthsize = aead_setauthsize,
2836 .encrypt = aead_encrypt,
2837 .decrypt = aead_decrypt,
2838 .ivsize = DES_BLOCK_SIZE,
2839 .maxauthsize = SHA224_DIGEST_SIZE,
2841 .caam = {
2842 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2843 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2844 OP_ALG_AAI_HMAC_PRECOMP,
2848 .aead = {
2849 .base = {
2850 .cra_name = "echainiv(authenc(hmac(sha224),"
2851 "cbc(des)))",
2852 .cra_driver_name = "echainiv-authenc-"
2853 "hmac-sha224-cbc-des-caam",
2854 .cra_blocksize = DES_BLOCK_SIZE,
2856 .setkey = aead_setkey,
2857 .setauthsize = aead_setauthsize,
2858 .encrypt = aead_encrypt,
2859 .decrypt = aead_decrypt,
2860 .ivsize = DES_BLOCK_SIZE,
2861 .maxauthsize = SHA224_DIGEST_SIZE,
2863 .caam = {
2864 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2865 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2866 OP_ALG_AAI_HMAC_PRECOMP,
2867 .geniv = true,
2871 .aead = {
2872 .base = {
2873 .cra_name = "authenc(hmac(sha256),cbc(des))",
2874 .cra_driver_name = "authenc-hmac-sha256-"
2875 "cbc-des-caam",
2876 .cra_blocksize = DES_BLOCK_SIZE,
2878 .setkey = aead_setkey,
2879 .setauthsize = aead_setauthsize,
2880 .encrypt = aead_encrypt,
2881 .decrypt = aead_decrypt,
2882 .ivsize = DES_BLOCK_SIZE,
2883 .maxauthsize = SHA256_DIGEST_SIZE,
2885 .caam = {
2886 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2887 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2888 OP_ALG_AAI_HMAC_PRECOMP,
2892 .aead = {
2893 .base = {
2894 .cra_name = "echainiv(authenc(hmac(sha256),"
2895 "cbc(des)))",
2896 .cra_driver_name = "echainiv-authenc-"
2897 "hmac-sha256-cbc-des-caam",
2898 .cra_blocksize = DES_BLOCK_SIZE,
2900 .setkey = aead_setkey,
2901 .setauthsize = aead_setauthsize,
2902 .encrypt = aead_encrypt,
2903 .decrypt = aead_decrypt,
2904 .ivsize = DES_BLOCK_SIZE,
2905 .maxauthsize = SHA256_DIGEST_SIZE,
2907 .caam = {
2908 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2909 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2910 OP_ALG_AAI_HMAC_PRECOMP,
2911 .geniv = true,
2915 .aead = {
2916 .base = {
2917 .cra_name = "authenc(hmac(sha384),cbc(des))",
2918 .cra_driver_name = "authenc-hmac-sha384-"
2919 "cbc-des-caam",
2920 .cra_blocksize = DES_BLOCK_SIZE,
2922 .setkey = aead_setkey,
2923 .setauthsize = aead_setauthsize,
2924 .encrypt = aead_encrypt,
2925 .decrypt = aead_decrypt,
2926 .ivsize = DES_BLOCK_SIZE,
2927 .maxauthsize = SHA384_DIGEST_SIZE,
2929 .caam = {
2930 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2931 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2932 OP_ALG_AAI_HMAC_PRECOMP,
2936 .aead = {
2937 .base = {
2938 .cra_name = "echainiv(authenc(hmac(sha384),"
2939 "cbc(des)))",
2940 .cra_driver_name = "echainiv-authenc-"
2941 "hmac-sha384-cbc-des-caam",
2942 .cra_blocksize = DES_BLOCK_SIZE,
2944 .setkey = aead_setkey,
2945 .setauthsize = aead_setauthsize,
2946 .encrypt = aead_encrypt,
2947 .decrypt = aead_decrypt,
2948 .ivsize = DES_BLOCK_SIZE,
2949 .maxauthsize = SHA384_DIGEST_SIZE,
2951 .caam = {
2952 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2953 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2954 OP_ALG_AAI_HMAC_PRECOMP,
2955 .geniv = true,
2959 .aead = {
2960 .base = {
2961 .cra_name = "authenc(hmac(sha512),cbc(des))",
2962 .cra_driver_name = "authenc-hmac-sha512-"
2963 "cbc-des-caam",
2964 .cra_blocksize = DES_BLOCK_SIZE,
2966 .setkey = aead_setkey,
2967 .setauthsize = aead_setauthsize,
2968 .encrypt = aead_encrypt,
2969 .decrypt = aead_decrypt,
2970 .ivsize = DES_BLOCK_SIZE,
2971 .maxauthsize = SHA512_DIGEST_SIZE,
2973 .caam = {
2974 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2975 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2976 OP_ALG_AAI_HMAC_PRECOMP,
2980 .aead = {
2981 .base = {
2982 .cra_name = "echainiv(authenc(hmac(sha512),"
2983 "cbc(des)))",
2984 .cra_driver_name = "echainiv-authenc-"
2985 "hmac-sha512-cbc-des-caam",
2986 .cra_blocksize = DES_BLOCK_SIZE,
2988 .setkey = aead_setkey,
2989 .setauthsize = aead_setauthsize,
2990 .encrypt = aead_encrypt,
2991 .decrypt = aead_decrypt,
2992 .ivsize = DES_BLOCK_SIZE,
2993 .maxauthsize = SHA512_DIGEST_SIZE,
2995 .caam = {
2996 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2997 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2998 OP_ALG_AAI_HMAC_PRECOMP,
2999 .geniv = true,
3003 .aead = {
3004 .base = {
3005 .cra_name = "authenc(hmac(md5),"
3006 "rfc3686(ctr(aes)))",
3007 .cra_driver_name = "authenc-hmac-md5-"
3008 "rfc3686-ctr-aes-caam",
3009 .cra_blocksize = 1,
3011 .setkey = aead_setkey,
3012 .setauthsize = aead_setauthsize,
3013 .encrypt = aead_encrypt,
3014 .decrypt = aead_decrypt,
3015 .ivsize = CTR_RFC3686_IV_SIZE,
3016 .maxauthsize = MD5_DIGEST_SIZE,
3018 .caam = {
3019 .class1_alg_type = OP_ALG_ALGSEL_AES |
3020 OP_ALG_AAI_CTR_MOD128,
3021 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3022 OP_ALG_AAI_HMAC_PRECOMP,
3023 .rfc3686 = true,
3027 .aead = {
3028 .base = {
3029 .cra_name = "seqiv(authenc("
3030 "hmac(md5),rfc3686(ctr(aes))))",
3031 .cra_driver_name = "seqiv-authenc-hmac-md5-"
3032 "rfc3686-ctr-aes-caam",
3033 .cra_blocksize = 1,
3035 .setkey = aead_setkey,
3036 .setauthsize = aead_setauthsize,
3037 .encrypt = aead_encrypt,
3038 .decrypt = aead_decrypt,
3039 .ivsize = CTR_RFC3686_IV_SIZE,
3040 .maxauthsize = MD5_DIGEST_SIZE,
3042 .caam = {
3043 .class1_alg_type = OP_ALG_ALGSEL_AES |
3044 OP_ALG_AAI_CTR_MOD128,
3045 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3046 OP_ALG_AAI_HMAC_PRECOMP,
3047 .rfc3686 = true,
3048 .geniv = true,
3052 .aead = {
3053 .base = {
3054 .cra_name = "authenc(hmac(sha1),"
3055 "rfc3686(ctr(aes)))",
3056 .cra_driver_name = "authenc-hmac-sha1-"
3057 "rfc3686-ctr-aes-caam",
3058 .cra_blocksize = 1,
3060 .setkey = aead_setkey,
3061 .setauthsize = aead_setauthsize,
3062 .encrypt = aead_encrypt,
3063 .decrypt = aead_decrypt,
3064 .ivsize = CTR_RFC3686_IV_SIZE,
3065 .maxauthsize = SHA1_DIGEST_SIZE,
3067 .caam = {
3068 .class1_alg_type = OP_ALG_ALGSEL_AES |
3069 OP_ALG_AAI_CTR_MOD128,
3070 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3071 OP_ALG_AAI_HMAC_PRECOMP,
3072 .rfc3686 = true,
3076 .aead = {
3077 .base = {
3078 .cra_name = "seqiv(authenc("
3079 "hmac(sha1),rfc3686(ctr(aes))))",
3080 .cra_driver_name = "seqiv-authenc-hmac-sha1-"
3081 "rfc3686-ctr-aes-caam",
3082 .cra_blocksize = 1,
3084 .setkey = aead_setkey,
3085 .setauthsize = aead_setauthsize,
3086 .encrypt = aead_encrypt,
3087 .decrypt = aead_decrypt,
3088 .ivsize = CTR_RFC3686_IV_SIZE,
3089 .maxauthsize = SHA1_DIGEST_SIZE,
3091 .caam = {
3092 .class1_alg_type = OP_ALG_ALGSEL_AES |
3093 OP_ALG_AAI_CTR_MOD128,
3094 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3095 OP_ALG_AAI_HMAC_PRECOMP,
3096 .rfc3686 = true,
3097 .geniv = true,
3101 .aead = {
3102 .base = {
3103 .cra_name = "authenc(hmac(sha224),"
3104 "rfc3686(ctr(aes)))",
3105 .cra_driver_name = "authenc-hmac-sha224-"
3106 "rfc3686-ctr-aes-caam",
3107 .cra_blocksize = 1,
3109 .setkey = aead_setkey,
3110 .setauthsize = aead_setauthsize,
3111 .encrypt = aead_encrypt,
3112 .decrypt = aead_decrypt,
3113 .ivsize = CTR_RFC3686_IV_SIZE,
3114 .maxauthsize = SHA224_DIGEST_SIZE,
3116 .caam = {
3117 .class1_alg_type = OP_ALG_ALGSEL_AES |
3118 OP_ALG_AAI_CTR_MOD128,
3119 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3120 OP_ALG_AAI_HMAC_PRECOMP,
3121 .rfc3686 = true,
3125 .aead = {
3126 .base = {
3127 .cra_name = "seqiv(authenc("
3128 "hmac(sha224),rfc3686(ctr(aes))))",
3129 .cra_driver_name = "seqiv-authenc-hmac-sha224-"
3130 "rfc3686-ctr-aes-caam",
3131 .cra_blocksize = 1,
3133 .setkey = aead_setkey,
3134 .setauthsize = aead_setauthsize,
3135 .encrypt = aead_encrypt,
3136 .decrypt = aead_decrypt,
3137 .ivsize = CTR_RFC3686_IV_SIZE,
3138 .maxauthsize = SHA224_DIGEST_SIZE,
3140 .caam = {
3141 .class1_alg_type = OP_ALG_ALGSEL_AES |
3142 OP_ALG_AAI_CTR_MOD128,
3143 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3144 OP_ALG_AAI_HMAC_PRECOMP,
3145 .rfc3686 = true,
3146 .geniv = true,
3150 .aead = {
3151 .base = {
3152 .cra_name = "authenc(hmac(sha256),"
3153 "rfc3686(ctr(aes)))",
3154 .cra_driver_name = "authenc-hmac-sha256-"
3155 "rfc3686-ctr-aes-caam",
3156 .cra_blocksize = 1,
3158 .setkey = aead_setkey,
3159 .setauthsize = aead_setauthsize,
3160 .encrypt = aead_encrypt,
3161 .decrypt = aead_decrypt,
3162 .ivsize = CTR_RFC3686_IV_SIZE,
3163 .maxauthsize = SHA256_DIGEST_SIZE,
3165 .caam = {
3166 .class1_alg_type = OP_ALG_ALGSEL_AES |
3167 OP_ALG_AAI_CTR_MOD128,
3168 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3169 OP_ALG_AAI_HMAC_PRECOMP,
3170 .rfc3686 = true,
3174 .aead = {
3175 .base = {
3176 .cra_name = "seqiv(authenc(hmac(sha256),"
3177 "rfc3686(ctr(aes))))",
3178 .cra_driver_name = "seqiv-authenc-hmac-sha256-"
3179 "rfc3686-ctr-aes-caam",
3180 .cra_blocksize = 1,
3182 .setkey = aead_setkey,
3183 .setauthsize = aead_setauthsize,
3184 .encrypt = aead_encrypt,
3185 .decrypt = aead_decrypt,
3186 .ivsize = CTR_RFC3686_IV_SIZE,
3187 .maxauthsize = SHA256_DIGEST_SIZE,
3189 .caam = {
3190 .class1_alg_type = OP_ALG_ALGSEL_AES |
3191 OP_ALG_AAI_CTR_MOD128,
3192 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3193 OP_ALG_AAI_HMAC_PRECOMP,
3194 .rfc3686 = true,
3195 .geniv = true,
3199 .aead = {
3200 .base = {
3201 .cra_name = "authenc(hmac(sha384),"
3202 "rfc3686(ctr(aes)))",
3203 .cra_driver_name = "authenc-hmac-sha384-"
3204 "rfc3686-ctr-aes-caam",
3205 .cra_blocksize = 1,
3207 .setkey = aead_setkey,
3208 .setauthsize = aead_setauthsize,
3209 .encrypt = aead_encrypt,
3210 .decrypt = aead_decrypt,
3211 .ivsize = CTR_RFC3686_IV_SIZE,
3212 .maxauthsize = SHA384_DIGEST_SIZE,
3214 .caam = {
3215 .class1_alg_type = OP_ALG_ALGSEL_AES |
3216 OP_ALG_AAI_CTR_MOD128,
3217 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3218 OP_ALG_AAI_HMAC_PRECOMP,
3219 .rfc3686 = true,
3223 .aead = {
3224 .base = {
3225 .cra_name = "seqiv(authenc(hmac(sha384),"
3226 "rfc3686(ctr(aes))))",
3227 .cra_driver_name = "seqiv-authenc-hmac-sha384-"
3228 "rfc3686-ctr-aes-caam",
3229 .cra_blocksize = 1,
3231 .setkey = aead_setkey,
3232 .setauthsize = aead_setauthsize,
3233 .encrypt = aead_encrypt,
3234 .decrypt = aead_decrypt,
3235 .ivsize = CTR_RFC3686_IV_SIZE,
3236 .maxauthsize = SHA384_DIGEST_SIZE,
3238 .caam = {
3239 .class1_alg_type = OP_ALG_ALGSEL_AES |
3240 OP_ALG_AAI_CTR_MOD128,
3241 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3242 OP_ALG_AAI_HMAC_PRECOMP,
3243 .rfc3686 = true,
3244 .geniv = true,
3248 .aead = {
3249 .base = {
3250 .cra_name = "authenc(hmac(sha512),"
3251 "rfc3686(ctr(aes)))",
3252 .cra_driver_name = "authenc-hmac-sha512-"
3253 "rfc3686-ctr-aes-caam",
3254 .cra_blocksize = 1,
3256 .setkey = aead_setkey,
3257 .setauthsize = aead_setauthsize,
3258 .encrypt = aead_encrypt,
3259 .decrypt = aead_decrypt,
3260 .ivsize = CTR_RFC3686_IV_SIZE,
3261 .maxauthsize = SHA512_DIGEST_SIZE,
3263 .caam = {
3264 .class1_alg_type = OP_ALG_ALGSEL_AES |
3265 OP_ALG_AAI_CTR_MOD128,
3266 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3267 OP_ALG_AAI_HMAC_PRECOMP,
3268 .rfc3686 = true,
3272 .aead = {
3273 .base = {
3274 .cra_name = "seqiv(authenc(hmac(sha512),"
3275 "rfc3686(ctr(aes))))",
3276 .cra_driver_name = "seqiv-authenc-hmac-sha512-"
3277 "rfc3686-ctr-aes-caam",
3278 .cra_blocksize = 1,
3280 .setkey = aead_setkey,
3281 .setauthsize = aead_setauthsize,
3282 .encrypt = aead_encrypt,
3283 .decrypt = aead_decrypt,
3284 .ivsize = CTR_RFC3686_IV_SIZE,
3285 .maxauthsize = SHA512_DIGEST_SIZE,
3287 .caam = {
3288 .class1_alg_type = OP_ALG_ALGSEL_AES |
3289 OP_ALG_AAI_CTR_MOD128,
3290 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3291 OP_ALG_AAI_HMAC_PRECOMP,
3292 .rfc3686 = true,
3293 .geniv = true,
3297 .aead = {
3298 .base = {
3299 .cra_name = "rfc7539(chacha20,poly1305)",
3300 .cra_driver_name = "rfc7539-chacha20-poly1305-"
3301 "caam",
3302 .cra_blocksize = 1,
3304 .setkey = chachapoly_setkey,
3305 .setauthsize = chachapoly_setauthsize,
3306 .encrypt = chachapoly_encrypt,
3307 .decrypt = chachapoly_decrypt,
3308 .ivsize = CHACHAPOLY_IV_SIZE,
3309 .maxauthsize = POLY1305_DIGEST_SIZE,
3311 .caam = {
3312 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3313 OP_ALG_AAI_AEAD,
3314 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3315 OP_ALG_AAI_AEAD,
3316 .nodkp = true,
3320 .aead = {
3321 .base = {
3322 .cra_name = "rfc7539esp(chacha20,poly1305)",
3323 .cra_driver_name = "rfc7539esp-chacha20-"
3324 "poly1305-caam",
3325 .cra_blocksize = 1,
3327 .setkey = chachapoly_setkey,
3328 .setauthsize = chachapoly_setauthsize,
3329 .encrypt = chachapoly_encrypt,
3330 .decrypt = chachapoly_decrypt,
3331 .ivsize = 8,
3332 .maxauthsize = POLY1305_DIGEST_SIZE,
3334 .caam = {
3335 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3336 OP_ALG_AAI_AEAD,
3337 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3338 OP_ALG_AAI_AEAD,
3339 .nodkp = true,
3344 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
3345 bool uses_dkp)
3347 dma_addr_t dma_addr;
3348 struct caam_drv_private *priv;
3349 const size_t sh_desc_enc_offset = offsetof(struct caam_ctx,
3350 sh_desc_enc);
3352 ctx->jrdev = caam_jr_alloc();
3353 if (IS_ERR(ctx->jrdev)) {
3354 pr_err("Job Ring Device allocation for transform failed\n");
3355 return PTR_ERR(ctx->jrdev);
3358 priv = dev_get_drvdata(ctx->jrdev->parent);
3359 if (priv->era >= 6 && uses_dkp)
3360 ctx->dir = DMA_BIDIRECTIONAL;
3361 else
3362 ctx->dir = DMA_TO_DEVICE;
3364 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
3365 offsetof(struct caam_ctx,
3366 sh_desc_enc_dma) -
3367 sh_desc_enc_offset,
3368 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3369 if (dma_mapping_error(ctx->jrdev, dma_addr)) {
3370 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
3371 caam_jr_free(ctx->jrdev);
3372 return -ENOMEM;
3375 ctx->sh_desc_enc_dma = dma_addr;
3376 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
3377 sh_desc_dec) -
3378 sh_desc_enc_offset;
3379 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key) -
3380 sh_desc_enc_offset;
3382 /* copy descriptor header template value */
3383 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
3384 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
3386 return 0;
3389 static int caam_cra_init(struct crypto_skcipher *tfm)
3391 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
3392 struct caam_skcipher_alg *caam_alg =
3393 container_of(alg, typeof(*caam_alg), skcipher);
3394 struct caam_ctx *ctx = crypto_skcipher_ctx(tfm);
3395 u32 alg_aai = caam_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3396 int ret = 0;
3398 ctx->enginectx.op.do_one_request = skcipher_do_one_req;
3400 if (alg_aai == OP_ALG_AAI_XTS) {
3401 const char *tfm_name = crypto_tfm_alg_name(&tfm->base);
3402 struct crypto_skcipher *fallback;
3404 fallback = crypto_alloc_skcipher(tfm_name, 0,
3405 CRYPTO_ALG_NEED_FALLBACK);
3406 if (IS_ERR(fallback)) {
3407 pr_err("Failed to allocate %s fallback: %ld\n",
3408 tfm_name, PTR_ERR(fallback));
3409 return PTR_ERR(fallback);
3412 ctx->fallback = fallback;
3413 crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx) +
3414 crypto_skcipher_reqsize(fallback));
3415 } else {
3416 crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx));
3419 ret = caam_init_common(ctx, &caam_alg->caam, false);
3420 if (ret && ctx->fallback)
3421 crypto_free_skcipher(ctx->fallback);
3423 return ret;
3426 static int caam_aead_init(struct crypto_aead *tfm)
3428 struct aead_alg *alg = crypto_aead_alg(tfm);
3429 struct caam_aead_alg *caam_alg =
3430 container_of(alg, struct caam_aead_alg, aead);
3431 struct caam_ctx *ctx = crypto_aead_ctx(tfm);
3433 crypto_aead_set_reqsize(tfm, sizeof(struct caam_aead_req_ctx));
3435 ctx->enginectx.op.do_one_request = aead_do_one_req;
3437 return caam_init_common(ctx, &caam_alg->caam, !caam_alg->caam.nodkp);
3440 static void caam_exit_common(struct caam_ctx *ctx)
3442 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
3443 offsetof(struct caam_ctx, sh_desc_enc_dma) -
3444 offsetof(struct caam_ctx, sh_desc_enc),
3445 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3446 caam_jr_free(ctx->jrdev);
3449 static void caam_cra_exit(struct crypto_skcipher *tfm)
3451 struct caam_ctx *ctx = crypto_skcipher_ctx(tfm);
3453 if (ctx->fallback)
3454 crypto_free_skcipher(ctx->fallback);
3455 caam_exit_common(ctx);
3458 static void caam_aead_exit(struct crypto_aead *tfm)
3460 caam_exit_common(crypto_aead_ctx(tfm));
3463 void caam_algapi_exit(void)
3465 int i;
3467 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3468 struct caam_aead_alg *t_alg = driver_aeads + i;
3470 if (t_alg->registered)
3471 crypto_unregister_aead(&t_alg->aead);
3474 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3475 struct caam_skcipher_alg *t_alg = driver_algs + i;
3477 if (t_alg->registered)
3478 crypto_unregister_skcipher(&t_alg->skcipher);
3482 static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
3484 struct skcipher_alg *alg = &t_alg->skcipher;
3486 alg->base.cra_module = THIS_MODULE;
3487 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3488 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3489 alg->base.cra_flags |= (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
3490 CRYPTO_ALG_KERN_DRIVER_ONLY);
3492 alg->init = caam_cra_init;
3493 alg->exit = caam_cra_exit;
3496 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
3498 struct aead_alg *alg = &t_alg->aead;
3500 alg->base.cra_module = THIS_MODULE;
3501 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3502 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3503 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
3504 CRYPTO_ALG_KERN_DRIVER_ONLY;
3506 alg->init = caam_aead_init;
3507 alg->exit = caam_aead_exit;
3510 int caam_algapi_init(struct device *ctrldev)
3512 struct caam_drv_private *priv = dev_get_drvdata(ctrldev);
3513 int i = 0, err = 0;
3514 u32 aes_vid, aes_inst, des_inst, md_vid, md_inst, ccha_inst, ptha_inst;
3515 unsigned int md_limit = SHA512_DIGEST_SIZE;
3516 bool registered = false, gcm_support;
3519 * Register crypto algorithms the device supports.
3520 * First, detect presence and attributes of DES, AES, and MD blocks.
3522 if (priv->era < 10) {
3523 u32 cha_vid, cha_inst, aes_rn;
3525 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
3526 aes_vid = cha_vid & CHA_ID_LS_AES_MASK;
3527 md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3529 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
3530 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >>
3531 CHA_ID_LS_DES_SHIFT;
3532 aes_inst = cha_inst & CHA_ID_LS_AES_MASK;
3533 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3534 ccha_inst = 0;
3535 ptha_inst = 0;
3537 aes_rn = rd_reg32(&priv->ctrl->perfmon.cha_rev_ls) &
3538 CHA_ID_LS_AES_MASK;
3539 gcm_support = !(aes_vid == CHA_VER_VID_AES_LP && aes_rn < 8);
3540 } else {
3541 u32 aesa, mdha;
3543 aesa = rd_reg32(&priv->ctrl->vreg.aesa);
3544 mdha = rd_reg32(&priv->ctrl->vreg.mdha);
3546 aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3547 md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3549 des_inst = rd_reg32(&priv->ctrl->vreg.desa) & CHA_VER_NUM_MASK;
3550 aes_inst = aesa & CHA_VER_NUM_MASK;
3551 md_inst = mdha & CHA_VER_NUM_MASK;
3552 ccha_inst = rd_reg32(&priv->ctrl->vreg.ccha) & CHA_VER_NUM_MASK;
3553 ptha_inst = rd_reg32(&priv->ctrl->vreg.ptha) & CHA_VER_NUM_MASK;
3555 gcm_support = aesa & CHA_VER_MISC_AES_GCM;
3558 /* If MD is present, limit digest size based on LP256 */
3559 if (md_inst && md_vid == CHA_VER_VID_MD_LP256)
3560 md_limit = SHA256_DIGEST_SIZE;
3562 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3563 struct caam_skcipher_alg *t_alg = driver_algs + i;
3564 u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK;
3566 /* Skip DES algorithms if not supported by device */
3567 if (!des_inst &&
3568 ((alg_sel == OP_ALG_ALGSEL_3DES) ||
3569 (alg_sel == OP_ALG_ALGSEL_DES)))
3570 continue;
3572 /* Skip AES algorithms if not supported by device */
3573 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
3574 continue;
3577 * Check support for AES modes not available
3578 * on LP devices.
3580 if (aes_vid == CHA_VER_VID_AES_LP &&
3581 (t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) ==
3582 OP_ALG_AAI_XTS)
3583 continue;
3585 caam_skcipher_alg_init(t_alg);
3587 err = crypto_register_skcipher(&t_alg->skcipher);
3588 if (err) {
3589 pr_warn("%s alg registration failed\n",
3590 t_alg->skcipher.base.cra_driver_name);
3591 continue;
3594 t_alg->registered = true;
3595 registered = true;
3598 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3599 struct caam_aead_alg *t_alg = driver_aeads + i;
3600 u32 c1_alg_sel = t_alg->caam.class1_alg_type &
3601 OP_ALG_ALGSEL_MASK;
3602 u32 c2_alg_sel = t_alg->caam.class2_alg_type &
3603 OP_ALG_ALGSEL_MASK;
3604 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3606 /* Skip DES algorithms if not supported by device */
3607 if (!des_inst &&
3608 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
3609 (c1_alg_sel == OP_ALG_ALGSEL_DES)))
3610 continue;
3612 /* Skip AES algorithms if not supported by device */
3613 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
3614 continue;
3616 /* Skip CHACHA20 algorithms if not supported by device */
3617 if (c1_alg_sel == OP_ALG_ALGSEL_CHACHA20 && !ccha_inst)
3618 continue;
3620 /* Skip POLY1305 algorithms if not supported by device */
3621 if (c2_alg_sel == OP_ALG_ALGSEL_POLY1305 && !ptha_inst)
3622 continue;
3624 /* Skip GCM algorithms if not supported by device */
3625 if (c1_alg_sel == OP_ALG_ALGSEL_AES &&
3626 alg_aai == OP_ALG_AAI_GCM && !gcm_support)
3627 continue;
3630 * Skip algorithms requiring message digests
3631 * if MD or MD size is not supported by device.
3633 if (is_mdha(c2_alg_sel) &&
3634 (!md_inst || t_alg->aead.maxauthsize > md_limit))
3635 continue;
3637 caam_aead_alg_init(t_alg);
3639 err = crypto_register_aead(&t_alg->aead);
3640 if (err) {
3641 pr_warn("%s alg registration failed\n",
3642 t_alg->aead.base.cra_driver_name);
3643 continue;
3646 t_alg->registered = true;
3647 registered = true;
3650 if (registered)
3651 pr_info("caam algorithms registered in /proc/crypto\n");
3653 return err;