2 * caam - Freescale FSL CAAM support for crypto API
4 * Copyright 2008-2011 Freescale Semiconductor, Inc.
6 * Based on talitos crypto API driver.
8 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
10 * --------------- ---------------
11 * | JobDesc #1 |-------------------->| ShareDesc |
12 * | *(packet 1) | | (PDB) |
13 * --------------- |------------->| (hashKey) |
15 * . | |-------->| (operation) |
16 * --------------- | | ---------------
17 * | JobDesc #2 |------| |
23 * | JobDesc #3 |------------
27 * The SharedDesc never changes for a connection unless rekeyed, but
28 * each packet will likely be in a different place. So all we need
29 * to know to process the packet is where the input is, where the
30 * output goes, and what context we want to process with. Context is
31 * in the SharedDesc, packet references in the JobDesc.
33 * So, a job desc looks like:
35 * ---------------------
37 * | ShareDesc Pointer |
44 * ---------------------
51 #include "desc_constr.h"
54 #include "sg_sw_sec4.h"
60 #define CAAM_CRA_PRIORITY 3000
61 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
62 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
63 CTR_RFC3686_NONCE_SIZE + \
64 SHA512_DIGEST_SIZE * 2)
65 /* max IV is max of AES_BLOCK_SIZE, DES3_EDE_BLOCK_SIZE */
66 #define CAAM_MAX_IV_LENGTH 16
68 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
69 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
71 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
74 /* length of descriptors text */
75 #define DESC_AEAD_BASE (4 * CAAM_CMD_SZ)
76 #define DESC_AEAD_ENC_LEN (DESC_AEAD_BASE + 11 * CAAM_CMD_SZ)
77 #define DESC_AEAD_DEC_LEN (DESC_AEAD_BASE + 15 * CAAM_CMD_SZ)
78 #define DESC_AEAD_GIVENC_LEN (DESC_AEAD_ENC_LEN + 9 * CAAM_CMD_SZ)
80 /* Note: Nonce is counted in enckeylen */
81 #define DESC_AEAD_CTR_RFC3686_LEN (4 * CAAM_CMD_SZ)
83 #define DESC_AEAD_NULL_BASE (3 * CAAM_CMD_SZ)
84 #define DESC_AEAD_NULL_ENC_LEN (DESC_AEAD_NULL_BASE + 11 * CAAM_CMD_SZ)
85 #define DESC_AEAD_NULL_DEC_LEN (DESC_AEAD_NULL_BASE + 13 * CAAM_CMD_SZ)
87 #define DESC_GCM_BASE (3 * CAAM_CMD_SZ)
88 #define DESC_GCM_ENC_LEN (DESC_GCM_BASE + 16 * CAAM_CMD_SZ)
89 #define DESC_GCM_DEC_LEN (DESC_GCM_BASE + 12 * CAAM_CMD_SZ)
91 #define DESC_RFC4106_BASE (3 * CAAM_CMD_SZ)
92 #define DESC_RFC4106_ENC_LEN (DESC_RFC4106_BASE + 13 * CAAM_CMD_SZ)
93 #define DESC_RFC4106_DEC_LEN (DESC_RFC4106_BASE + 13 * CAAM_CMD_SZ)
95 #define DESC_RFC4543_BASE (3 * CAAM_CMD_SZ)
96 #define DESC_RFC4543_ENC_LEN (DESC_RFC4543_BASE + 11 * CAAM_CMD_SZ)
97 #define DESC_RFC4543_DEC_LEN (DESC_RFC4543_BASE + 12 * CAAM_CMD_SZ)
99 #define DESC_ABLKCIPHER_BASE (3 * CAAM_CMD_SZ)
100 #define DESC_ABLKCIPHER_ENC_LEN (DESC_ABLKCIPHER_BASE + \
102 #define DESC_ABLKCIPHER_DEC_LEN (DESC_ABLKCIPHER_BASE + \
105 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
106 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
109 /* for print_hex_dumps with line references */
110 #define debug(format, arg...) printk(format, arg)
112 #define debug(format, arg...)
114 static struct list_head alg_list
;
116 struct caam_alg_entry
{
124 struct caam_aead_alg
{
125 struct aead_alg aead
;
126 struct caam_alg_entry caam
;
130 /* Set DK bit in class 1 operation if shared */
131 static inline void append_dec_op1(u32
*desc
, u32 type
)
133 u32
*jump_cmd
, *uncond_jump_cmd
;
135 /* DK bit is valid only for AES */
136 if ((type
& OP_ALG_ALGSEL_MASK
) != OP_ALG_ALGSEL_AES
) {
137 append_operation(desc
, type
| OP_ALG_AS_INITFINAL
|
142 jump_cmd
= append_jump(desc
, JUMP_TEST_ALL
| JUMP_COND_SHRD
);
143 append_operation(desc
, type
| OP_ALG_AS_INITFINAL
|
145 uncond_jump_cmd
= append_jump(desc
, JUMP_TEST_ALL
);
146 set_jump_tgt_here(desc
, jump_cmd
);
147 append_operation(desc
, type
| OP_ALG_AS_INITFINAL
|
148 OP_ALG_DECRYPT
| OP_ALG_AAI_DK
);
149 set_jump_tgt_here(desc
, uncond_jump_cmd
);
153 * For aead functions, read payload and write payload,
154 * both of which are specified in req->src and req->dst
156 static inline void aead_append_src_dst(u32
*desc
, u32 msg_type
)
158 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_MESSAGE_DATA
| KEY_VLF
);
159 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_BOTH
|
160 KEY_VLF
| msg_type
| FIFOLD_TYPE_LASTBOTH
);
164 * For ablkcipher encrypt and decrypt, read from req->src and
167 static inline void ablkcipher_append_src_dst(u32
*desc
)
169 append_math_add(desc
, VARSEQOUTLEN
, SEQINLEN
, REG0
, CAAM_CMD_SZ
);
170 append_math_add(desc
, VARSEQINLEN
, SEQINLEN
, REG0
, CAAM_CMD_SZ
);
171 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS1
|
172 KEY_VLF
| FIFOLD_TYPE_MSG
| FIFOLD_TYPE_LAST1
);
173 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_MESSAGE_DATA
| KEY_VLF
);
177 * per-session context
180 struct device
*jrdev
;
181 u32 sh_desc_enc
[DESC_MAX_USED_LEN
];
182 u32 sh_desc_dec
[DESC_MAX_USED_LEN
];
183 u32 sh_desc_givenc
[DESC_MAX_USED_LEN
];
184 dma_addr_t sh_desc_enc_dma
;
185 dma_addr_t sh_desc_dec_dma
;
186 dma_addr_t sh_desc_givenc_dma
;
190 u8 key
[CAAM_MAX_KEY_SIZE
];
192 unsigned int enckeylen
;
193 unsigned int split_key_len
;
194 unsigned int split_key_pad_len
;
195 unsigned int authsize
;
198 static void append_key_aead(u32
*desc
, struct caam_ctx
*ctx
,
199 int keys_fit_inline
, bool is_rfc3686
)
202 unsigned int enckeylen
= ctx
->enckeylen
;
206 * | ctx->key = {AUTH_KEY, ENC_KEY, NONCE}
207 * | enckeylen = encryption key size + nonce size
210 enckeylen
-= CTR_RFC3686_NONCE_SIZE
;
212 if (keys_fit_inline
) {
213 append_key_as_imm(desc
, ctx
->key
, ctx
->split_key_pad_len
,
214 ctx
->split_key_len
, CLASS_2
|
215 KEY_DEST_MDHA_SPLIT
| KEY_ENC
);
216 append_key_as_imm(desc
, (void *)ctx
->key
+
217 ctx
->split_key_pad_len
, enckeylen
,
218 enckeylen
, CLASS_1
| KEY_DEST_CLASS_REG
);
220 append_key(desc
, ctx
->key_dma
, ctx
->split_key_len
, CLASS_2
|
221 KEY_DEST_MDHA_SPLIT
| KEY_ENC
);
222 append_key(desc
, ctx
->key_dma
+ ctx
->split_key_pad_len
,
223 enckeylen
, CLASS_1
| KEY_DEST_CLASS_REG
);
226 /* Load Counter into CONTEXT1 reg */
228 nonce
= (u32
*)((void *)ctx
->key
+ ctx
->split_key_pad_len
+
230 append_load_imm_u32(desc
, *nonce
, LDST_CLASS_IND_CCB
|
231 LDST_SRCDST_BYTE_OUTFIFO
| LDST_IMM
);
234 MOVE_DEST_CLASS1CTX
|
235 (16 << MOVE_OFFSET_SHIFT
) |
236 (CTR_RFC3686_NONCE_SIZE
<< MOVE_LEN_SHIFT
));
240 static void init_sh_desc_key_aead(u32
*desc
, struct caam_ctx
*ctx
,
241 int keys_fit_inline
, bool is_rfc3686
)
245 /* Note: Context registers are saved. */
246 init_sh_desc(desc
, HDR_SHARE_SERIAL
| HDR_SAVECTX
);
248 /* Skip if already shared */
249 key_jump_cmd
= append_jump(desc
, JUMP_JSL
| JUMP_TEST_ALL
|
252 append_key_aead(desc
, ctx
, keys_fit_inline
, is_rfc3686
);
254 set_jump_tgt_here(desc
, key_jump_cmd
);
257 static int aead_null_set_sh_desc(struct crypto_aead
*aead
)
259 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
260 struct device
*jrdev
= ctx
->jrdev
;
261 bool keys_fit_inline
= false;
262 u32
*key_jump_cmd
, *jump_cmd
, *read_move_cmd
, *write_move_cmd
;
266 * Job Descriptor and Shared Descriptors
267 * must all fit into the 64-word Descriptor h/w Buffer
269 if (DESC_AEAD_NULL_ENC_LEN
+ AEAD_DESC_JOB_IO_LEN
+
270 ctx
->split_key_pad_len
<= CAAM_DESC_BYTES_MAX
)
271 keys_fit_inline
= true;
273 /* aead_encrypt shared descriptor */
274 desc
= ctx
->sh_desc_enc
;
276 init_sh_desc(desc
, HDR_SHARE_SERIAL
);
278 /* Skip if already shared */
279 key_jump_cmd
= append_jump(desc
, JUMP_JSL
| JUMP_TEST_ALL
|
282 append_key_as_imm(desc
, ctx
->key
, ctx
->split_key_pad_len
,
283 ctx
->split_key_len
, CLASS_2
|
284 KEY_DEST_MDHA_SPLIT
| KEY_ENC
);
286 append_key(desc
, ctx
->key_dma
, ctx
->split_key_len
, CLASS_2
|
287 KEY_DEST_MDHA_SPLIT
| KEY_ENC
);
288 set_jump_tgt_here(desc
, key_jump_cmd
);
290 /* assoclen + cryptlen = seqinlen */
291 append_math_sub(desc
, REG3
, SEQINLEN
, REG0
, CAAM_CMD_SZ
);
293 /* Prepare to read and write cryptlen + assoclen bytes */
294 append_math_add(desc
, VARSEQINLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
295 append_math_add(desc
, VARSEQOUTLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
298 * MOVE_LEN opcode is not available in all SEC HW revisions,
299 * thus need to do some magic, i.e. self-patch the descriptor
302 read_move_cmd
= append_move(desc
, MOVE_SRC_DESCBUF
|
304 (0x6 << MOVE_LEN_SHIFT
));
305 write_move_cmd
= append_move(desc
, MOVE_SRC_MATH3
|
308 (0x8 << MOVE_LEN_SHIFT
));
310 /* Class 2 operation */
311 append_operation(desc
, ctx
->class2_alg_type
|
312 OP_ALG_AS_INITFINAL
| OP_ALG_ENCRYPT
);
314 /* Read and write cryptlen bytes */
315 aead_append_src_dst(desc
, FIFOLD_TYPE_MSG
| FIFOLD_TYPE_FLUSH1
);
317 set_move_tgt_here(desc
, read_move_cmd
);
318 set_move_tgt_here(desc
, write_move_cmd
);
319 append_cmd(desc
, CMD_LOAD
| DISABLE_AUTO_INFO_FIFO
);
320 append_move(desc
, MOVE_SRC_INFIFO_CL
| MOVE_DEST_OUTFIFO
|
324 append_seq_store(desc
, ctx
->authsize
, LDST_CLASS_2_CCB
|
325 LDST_SRCDST_BYTE_CONTEXT
);
327 ctx
->sh_desc_enc_dma
= dma_map_single(jrdev
, desc
,
330 if (dma_mapping_error(jrdev
, ctx
->sh_desc_enc_dma
)) {
331 dev_err(jrdev
, "unable to map shared descriptor\n");
335 print_hex_dump(KERN_ERR
,
336 "aead null enc shdesc@"__stringify(__LINE__
)": ",
337 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
338 desc_bytes(desc
), 1);
342 * Job Descriptor and Shared Descriptors
343 * must all fit into the 64-word Descriptor h/w Buffer
345 keys_fit_inline
= false;
346 if (DESC_AEAD_NULL_DEC_LEN
+ DESC_JOB_IO_LEN
+
347 ctx
->split_key_pad_len
<= CAAM_DESC_BYTES_MAX
)
348 keys_fit_inline
= true;
350 desc
= ctx
->sh_desc_dec
;
352 /* aead_decrypt shared descriptor */
353 init_sh_desc(desc
, HDR_SHARE_SERIAL
);
355 /* Skip if already shared */
356 key_jump_cmd
= append_jump(desc
, JUMP_JSL
| JUMP_TEST_ALL
|
359 append_key_as_imm(desc
, ctx
->key
, ctx
->split_key_pad_len
,
360 ctx
->split_key_len
, CLASS_2
|
361 KEY_DEST_MDHA_SPLIT
| KEY_ENC
);
363 append_key(desc
, ctx
->key_dma
, ctx
->split_key_len
, CLASS_2
|
364 KEY_DEST_MDHA_SPLIT
| KEY_ENC
);
365 set_jump_tgt_here(desc
, key_jump_cmd
);
367 /* Class 2 operation */
368 append_operation(desc
, ctx
->class2_alg_type
|
369 OP_ALG_AS_INITFINAL
| OP_ALG_DECRYPT
| OP_ALG_ICV_ON
);
371 /* assoclen + cryptlen = seqoutlen */
372 append_math_sub(desc
, REG2
, SEQOUTLEN
, REG0
, CAAM_CMD_SZ
);
374 /* Prepare to read and write cryptlen + assoclen bytes */
375 append_math_add(desc
, VARSEQINLEN
, ZERO
, REG2
, CAAM_CMD_SZ
);
376 append_math_add(desc
, VARSEQOUTLEN
, ZERO
, REG2
, CAAM_CMD_SZ
);
379 * MOVE_LEN opcode is not available in all SEC HW revisions,
380 * thus need to do some magic, i.e. self-patch the descriptor
383 read_move_cmd
= append_move(desc
, MOVE_SRC_DESCBUF
|
385 (0x6 << MOVE_LEN_SHIFT
));
386 write_move_cmd
= append_move(desc
, MOVE_SRC_MATH2
|
389 (0x8 << MOVE_LEN_SHIFT
));
391 /* Read and write cryptlen bytes */
392 aead_append_src_dst(desc
, FIFOLD_TYPE_MSG
| FIFOLD_TYPE_FLUSH1
);
395 * Insert a NOP here, since we need at least 4 instructions between
396 * code patching the descriptor buffer and the location being patched.
398 jump_cmd
= append_jump(desc
, JUMP_TEST_ALL
);
399 set_jump_tgt_here(desc
, jump_cmd
);
401 set_move_tgt_here(desc
, read_move_cmd
);
402 set_move_tgt_here(desc
, write_move_cmd
);
403 append_cmd(desc
, CMD_LOAD
| DISABLE_AUTO_INFO_FIFO
);
404 append_move(desc
, MOVE_SRC_INFIFO_CL
| MOVE_DEST_OUTFIFO
|
406 append_cmd(desc
, CMD_LOAD
| ENABLE_AUTO_INFO_FIFO
);
409 append_seq_fifo_load(desc
, ctx
->authsize
, FIFOLD_CLASS_CLASS2
|
410 FIFOLD_TYPE_LAST2
| FIFOLD_TYPE_ICV
);
412 ctx
->sh_desc_dec_dma
= dma_map_single(jrdev
, desc
,
415 if (dma_mapping_error(jrdev
, ctx
->sh_desc_dec_dma
)) {
416 dev_err(jrdev
, "unable to map shared descriptor\n");
420 print_hex_dump(KERN_ERR
,
421 "aead null dec shdesc@"__stringify(__LINE__
)": ",
422 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
423 desc_bytes(desc
), 1);
429 static int aead_set_sh_desc(struct crypto_aead
*aead
)
431 struct caam_aead_alg
*alg
= container_of(crypto_aead_alg(aead
),
432 struct caam_aead_alg
, aead
);
433 unsigned int ivsize
= crypto_aead_ivsize(aead
);
434 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
435 struct device
*jrdev
= ctx
->jrdev
;
436 bool keys_fit_inline
;
440 const bool ctr_mode
= ((ctx
->class1_alg_type
& OP_ALG_AAI_MASK
) ==
441 OP_ALG_AAI_CTR_MOD128
);
442 const bool is_rfc3686
= alg
->caam
.rfc3686
;
444 /* NULL encryption / decryption */
446 return aead_null_set_sh_desc(aead
);
449 * AES-CTR needs to load IV in CONTEXT1 reg
450 * at an offset of 128bits (16bytes)
451 * CONTEXT1[255:128] = IV
458 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
461 ctx1_iv_off
= 16 + CTR_RFC3686_NONCE_SIZE
;
467 * Job Descriptor and Shared Descriptors
468 * must all fit into the 64-word Descriptor h/w Buffer
470 keys_fit_inline
= false;
471 if (DESC_AEAD_ENC_LEN
+ AUTHENC_DESC_JOB_IO_LEN
+
472 ctx
->split_key_pad_len
+ ctx
->enckeylen
+
473 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0) <=
475 keys_fit_inline
= true;
477 /* aead_encrypt shared descriptor */
478 desc
= ctx
->sh_desc_enc
;
480 /* Note: Context registers are saved. */
481 init_sh_desc_key_aead(desc
, ctx
, keys_fit_inline
, is_rfc3686
);
483 /* Class 2 operation */
484 append_operation(desc
, ctx
->class2_alg_type
|
485 OP_ALG_AS_INITFINAL
| OP_ALG_ENCRYPT
);
487 /* Read and write assoclen bytes */
488 append_math_add(desc
, VARSEQINLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
489 append_math_add(desc
, VARSEQOUTLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
491 /* Skip assoc data */
492 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_SKIP
| FIFOLDST_VLF
);
494 /* read assoc before reading payload */
495 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS2
| FIFOLD_TYPE_MSG
|
498 /* Load Counter into CONTEXT1 reg */
500 append_load_imm_u32(desc
, be32_to_cpu(1), LDST_IMM
|
502 LDST_SRCDST_BYTE_CONTEXT
|
503 ((ctx1_iv_off
+ CTR_RFC3686_IV_SIZE
) <<
506 /* Class 1 operation */
507 append_operation(desc
, ctx
->class1_alg_type
|
508 OP_ALG_AS_INITFINAL
| OP_ALG_ENCRYPT
);
510 /* Read and write cryptlen bytes */
511 append_math_add(desc
, VARSEQINLEN
, SEQINLEN
, REG0
, CAAM_CMD_SZ
);
512 append_math_add(desc
, VARSEQOUTLEN
, SEQINLEN
, REG0
, CAAM_CMD_SZ
);
513 aead_append_src_dst(desc
, FIFOLD_TYPE_MSG1OUT2
);
516 append_seq_store(desc
, ctx
->authsize
, LDST_CLASS_2_CCB
|
517 LDST_SRCDST_BYTE_CONTEXT
);
519 ctx
->sh_desc_enc_dma
= dma_map_single(jrdev
, desc
,
522 if (dma_mapping_error(jrdev
, ctx
->sh_desc_enc_dma
)) {
523 dev_err(jrdev
, "unable to map shared descriptor\n");
527 print_hex_dump(KERN_ERR
, "aead enc shdesc@"__stringify(__LINE__
)": ",
528 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
529 desc_bytes(desc
), 1);
534 * Job Descriptor and Shared Descriptors
535 * must all fit into the 64-word Descriptor h/w Buffer
537 keys_fit_inline
= false;
538 if (DESC_AEAD_DEC_LEN
+ AUTHENC_DESC_JOB_IO_LEN
+
539 ctx
->split_key_pad_len
+ ctx
->enckeylen
+
540 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0) <=
542 keys_fit_inline
= true;
544 /* aead_decrypt shared descriptor */
545 desc
= ctx
->sh_desc_dec
;
547 /* Note: Context registers are saved. */
548 init_sh_desc_key_aead(desc
, ctx
, keys_fit_inline
, is_rfc3686
);
550 /* Class 2 operation */
551 append_operation(desc
, ctx
->class2_alg_type
|
552 OP_ALG_AS_INITFINAL
| OP_ALG_DECRYPT
| OP_ALG_ICV_ON
);
554 /* Read and write assoclen bytes */
555 append_math_add(desc
, VARSEQINLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
556 append_math_add(desc
, VARSEQOUTLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
558 /* Skip assoc data */
559 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_SKIP
| FIFOLDST_VLF
);
561 /* read assoc before reading payload */
562 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS2
| FIFOLD_TYPE_MSG
|
565 /* Load Counter into CONTEXT1 reg */
567 append_load_imm_u32(desc
, be32_to_cpu(1), LDST_IMM
|
569 LDST_SRCDST_BYTE_CONTEXT
|
570 ((ctx1_iv_off
+ CTR_RFC3686_IV_SIZE
) <<
573 /* Choose operation */
575 append_operation(desc
, ctx
->class1_alg_type
|
576 OP_ALG_AS_INITFINAL
| OP_ALG_DECRYPT
);
578 append_dec_op1(desc
, ctx
->class1_alg_type
);
580 /* Read and write cryptlen bytes */
581 append_math_add(desc
, VARSEQINLEN
, SEQOUTLEN
, REG0
, CAAM_CMD_SZ
);
582 append_math_add(desc
, VARSEQOUTLEN
, SEQOUTLEN
, REG0
, CAAM_CMD_SZ
);
583 aead_append_src_dst(desc
, FIFOLD_TYPE_MSG
);
586 append_seq_fifo_load(desc
, ctx
->authsize
, FIFOLD_CLASS_CLASS2
|
587 FIFOLD_TYPE_LAST2
| FIFOLD_TYPE_ICV
);
589 ctx
->sh_desc_dec_dma
= dma_map_single(jrdev
, desc
,
592 if (dma_mapping_error(jrdev
, ctx
->sh_desc_dec_dma
)) {
593 dev_err(jrdev
, "unable to map shared descriptor\n");
597 print_hex_dump(KERN_ERR
, "aead dec shdesc@"__stringify(__LINE__
)": ",
598 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
599 desc_bytes(desc
), 1);
602 if (!alg
->caam
.geniv
)
606 * Job Descriptor and Shared Descriptors
607 * must all fit into the 64-word Descriptor h/w Buffer
609 keys_fit_inline
= false;
610 if (DESC_AEAD_GIVENC_LEN
+ AUTHENC_DESC_JOB_IO_LEN
+
611 ctx
->split_key_pad_len
+ ctx
->enckeylen
+
612 (is_rfc3686
? DESC_AEAD_CTR_RFC3686_LEN
: 0) <=
614 keys_fit_inline
= true;
616 /* aead_givencrypt shared descriptor */
617 desc
= ctx
->sh_desc_givenc
;
619 /* Note: Context registers are saved. */
620 init_sh_desc_key_aead(desc
, ctx
, keys_fit_inline
, is_rfc3686
);
626 geniv
= NFIFOENTRY_STYPE_PAD
| NFIFOENTRY_DEST_DECO
|
627 NFIFOENTRY_DTYPE_MSG
| NFIFOENTRY_LC1
|
628 NFIFOENTRY_PTYPE_RND
| (ivsize
<< NFIFOENTRY_DLEN_SHIFT
);
629 append_load_imm_u32(desc
, geniv
, LDST_CLASS_IND_CCB
|
630 LDST_SRCDST_WORD_INFO_FIFO
| LDST_IMM
);
631 append_cmd(desc
, CMD_LOAD
| DISABLE_AUTO_INFO_FIFO
);
632 append_move(desc
, MOVE_WAITCOMP
|
633 MOVE_SRC_INFIFO
| MOVE_DEST_CLASS1CTX
|
634 (ctx1_iv_off
<< MOVE_OFFSET_SHIFT
) |
635 (ivsize
<< MOVE_LEN_SHIFT
));
636 append_cmd(desc
, CMD_LOAD
| ENABLE_AUTO_INFO_FIFO
);
639 /* Copy IV to class 1 context */
640 append_move(desc
, MOVE_SRC_CLASS1CTX
| MOVE_DEST_OUTFIFO
|
641 (ctx1_iv_off
<< MOVE_OFFSET_SHIFT
) |
642 (ivsize
<< MOVE_LEN_SHIFT
));
644 /* Return to encryption */
645 append_operation(desc
, ctx
->class2_alg_type
|
646 OP_ALG_AS_INITFINAL
| OP_ALG_ENCRYPT
);
648 /* ivsize + cryptlen = seqoutlen - authsize */
649 append_math_sub_imm_u32(desc
, REG3
, SEQOUTLEN
, IMM
, ctx
->authsize
);
651 /* Read and write assoclen bytes */
652 append_math_add(desc
, VARSEQINLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
653 append_math_add(desc
, VARSEQOUTLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
655 /* Skip assoc data */
656 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_SKIP
| FIFOLDST_VLF
);
658 /* read assoc before reading payload */
659 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS2
| FIFOLD_TYPE_MSG
|
662 /* Copy iv from outfifo to class 2 fifo */
663 moveiv
= NFIFOENTRY_STYPE_OFIFO
| NFIFOENTRY_DEST_CLASS2
|
664 NFIFOENTRY_DTYPE_MSG
| (ivsize
<< NFIFOENTRY_DLEN_SHIFT
);
665 append_load_imm_u32(desc
, moveiv
, LDST_CLASS_IND_CCB
|
666 LDST_SRCDST_WORD_INFO_FIFO
| LDST_IMM
);
667 append_load_imm_u32(desc
, ivsize
, LDST_CLASS_2_CCB
|
668 LDST_SRCDST_WORD_DATASZ_REG
| LDST_IMM
);
670 /* Load Counter into CONTEXT1 reg */
672 append_load_imm_u32(desc
, be32_to_cpu(1), LDST_IMM
|
674 LDST_SRCDST_BYTE_CONTEXT
|
675 ((ctx1_iv_off
+ CTR_RFC3686_IV_SIZE
) <<
678 /* Class 1 operation */
679 append_operation(desc
, ctx
->class1_alg_type
|
680 OP_ALG_AS_INITFINAL
| OP_ALG_ENCRYPT
);
682 /* Will write ivsize + cryptlen */
683 append_math_add(desc
, VARSEQOUTLEN
, SEQINLEN
, REG0
, CAAM_CMD_SZ
);
685 /* Not need to reload iv */
686 append_seq_fifo_load(desc
, ivsize
,
689 /* Will read cryptlen */
690 append_math_add(desc
, VARSEQINLEN
, SEQINLEN
, REG0
, CAAM_CMD_SZ
);
691 aead_append_src_dst(desc
, FIFOLD_TYPE_MSG1OUT2
);
694 append_seq_store(desc
, ctx
->authsize
, LDST_CLASS_2_CCB
|
695 LDST_SRCDST_BYTE_CONTEXT
);
697 ctx
->sh_desc_enc_dma
= dma_map_single(jrdev
, desc
,
700 if (dma_mapping_error(jrdev
, ctx
->sh_desc_givenc_dma
)) {
701 dev_err(jrdev
, "unable to map shared descriptor\n");
705 print_hex_dump(KERN_ERR
, "aead givenc shdesc@"__stringify(__LINE__
)": ",
706 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
707 desc_bytes(desc
), 1);
714 static int aead_setauthsize(struct crypto_aead
*authenc
,
715 unsigned int authsize
)
717 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
719 ctx
->authsize
= authsize
;
720 aead_set_sh_desc(authenc
);
725 static int gcm_set_sh_desc(struct crypto_aead
*aead
)
727 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
728 struct device
*jrdev
= ctx
->jrdev
;
729 bool keys_fit_inline
= false;
730 u32
*key_jump_cmd
, *zero_payload_jump_cmd
,
731 *zero_assoc_jump_cmd1
, *zero_assoc_jump_cmd2
;
734 if (!ctx
->enckeylen
|| !ctx
->authsize
)
738 * AES GCM encrypt shared descriptor
739 * Job Descriptor and Shared Descriptor
740 * must fit into the 64-word Descriptor h/w Buffer
742 if (DESC_GCM_ENC_LEN
+ GCM_DESC_JOB_IO_LEN
+
743 ctx
->enckeylen
<= CAAM_DESC_BYTES_MAX
)
744 keys_fit_inline
= true;
746 desc
= ctx
->sh_desc_enc
;
748 init_sh_desc(desc
, HDR_SHARE_SERIAL
);
750 /* skip key loading if they are loaded due to sharing */
751 key_jump_cmd
= append_jump(desc
, JUMP_JSL
| JUMP_TEST_ALL
|
752 JUMP_COND_SHRD
| JUMP_COND_SELF
);
754 append_key_as_imm(desc
, (void *)ctx
->key
, ctx
->enckeylen
,
755 ctx
->enckeylen
, CLASS_1
| KEY_DEST_CLASS_REG
);
757 append_key(desc
, ctx
->key_dma
, ctx
->enckeylen
,
758 CLASS_1
| KEY_DEST_CLASS_REG
);
759 set_jump_tgt_here(desc
, key_jump_cmd
);
761 /* class 1 operation */
762 append_operation(desc
, ctx
->class1_alg_type
|
763 OP_ALG_AS_INITFINAL
| OP_ALG_ENCRYPT
);
765 /* if assoclen + cryptlen is ZERO, skip to ICV write */
766 append_math_sub(desc
, VARSEQOUTLEN
, SEQINLEN
, REG0
, CAAM_CMD_SZ
);
767 zero_assoc_jump_cmd2
= append_jump(desc
, JUMP_TEST_ALL
|
770 /* if assoclen is ZERO, skip reading the assoc data */
771 append_math_add(desc
, VARSEQINLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
772 zero_assoc_jump_cmd1
= append_jump(desc
, JUMP_TEST_ALL
|
775 append_math_add(desc
, VARSEQOUTLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
777 /* skip assoc data */
778 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_SKIP
| FIFOLDST_VLF
);
780 /* cryptlen = seqinlen - assoclen */
781 append_math_sub(desc
, VARSEQOUTLEN
, SEQINLEN
, REG3
, CAAM_CMD_SZ
);
783 /* if cryptlen is ZERO jump to zero-payload commands */
784 zero_payload_jump_cmd
= append_jump(desc
, JUMP_TEST_ALL
|
787 /* read assoc data */
788 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS1
| FIFOLDST_VLF
|
789 FIFOLD_TYPE_AAD
| FIFOLD_TYPE_FLUSH1
);
790 set_jump_tgt_here(desc
, zero_assoc_jump_cmd1
);
792 append_math_sub(desc
, VARSEQINLEN
, SEQINLEN
, REG0
, CAAM_CMD_SZ
);
794 /* write encrypted data */
795 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_MESSAGE_DATA
| FIFOLDST_VLF
);
797 /* read payload data */
798 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS1
| FIFOLDST_VLF
|
799 FIFOLD_TYPE_MSG
| FIFOLD_TYPE_LAST1
);
801 /* jump the zero-payload commands */
802 append_jump(desc
, JUMP_TEST_ALL
| 2);
804 /* zero-payload commands */
805 set_jump_tgt_here(desc
, zero_payload_jump_cmd
);
807 /* read assoc data */
808 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS1
| FIFOLDST_VLF
|
809 FIFOLD_TYPE_AAD
| FIFOLD_TYPE_LAST1
);
811 /* There is no input data */
812 set_jump_tgt_here(desc
, zero_assoc_jump_cmd2
);
815 append_seq_store(desc
, ctx
->authsize
, LDST_CLASS_1_CCB
|
816 LDST_SRCDST_BYTE_CONTEXT
);
818 ctx
->sh_desc_enc_dma
= dma_map_single(jrdev
, desc
,
821 if (dma_mapping_error(jrdev
, ctx
->sh_desc_enc_dma
)) {
822 dev_err(jrdev
, "unable to map shared descriptor\n");
826 print_hex_dump(KERN_ERR
, "gcm enc shdesc@"__stringify(__LINE__
)": ",
827 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
828 desc_bytes(desc
), 1);
832 * Job Descriptor and Shared Descriptors
833 * must all fit into the 64-word Descriptor h/w Buffer
835 keys_fit_inline
= false;
836 if (DESC_GCM_DEC_LEN
+ GCM_DESC_JOB_IO_LEN
+
837 ctx
->enckeylen
<= CAAM_DESC_BYTES_MAX
)
838 keys_fit_inline
= true;
840 desc
= ctx
->sh_desc_dec
;
842 init_sh_desc(desc
, HDR_SHARE_SERIAL
);
844 /* skip key loading if they are loaded due to sharing */
845 key_jump_cmd
= append_jump(desc
, JUMP_JSL
|
846 JUMP_TEST_ALL
| JUMP_COND_SHRD
|
849 append_key_as_imm(desc
, (void *)ctx
->key
, ctx
->enckeylen
,
850 ctx
->enckeylen
, CLASS_1
| KEY_DEST_CLASS_REG
);
852 append_key(desc
, ctx
->key_dma
, ctx
->enckeylen
,
853 CLASS_1
| KEY_DEST_CLASS_REG
);
854 set_jump_tgt_here(desc
, key_jump_cmd
);
856 /* class 1 operation */
857 append_operation(desc
, ctx
->class1_alg_type
|
858 OP_ALG_AS_INITFINAL
| OP_ALG_DECRYPT
| OP_ALG_ICV_ON
);
860 /* if assoclen is ZERO, skip reading the assoc data */
861 append_math_add(desc
, VARSEQINLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
862 zero_assoc_jump_cmd1
= append_jump(desc
, JUMP_TEST_ALL
|
865 append_math_add(desc
, VARSEQOUTLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
867 /* skip assoc data */
868 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_SKIP
| FIFOLDST_VLF
);
870 /* read assoc data */
871 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS1
| FIFOLDST_VLF
|
872 FIFOLD_TYPE_AAD
| FIFOLD_TYPE_FLUSH1
);
874 set_jump_tgt_here(desc
, zero_assoc_jump_cmd1
);
876 /* cryptlen = seqoutlen - assoclen */
877 append_math_sub(desc
, VARSEQINLEN
, SEQOUTLEN
, REG0
, CAAM_CMD_SZ
);
879 /* jump to zero-payload command if cryptlen is zero */
880 zero_payload_jump_cmd
= append_jump(desc
, JUMP_TEST_ALL
|
883 append_math_sub(desc
, VARSEQOUTLEN
, SEQOUTLEN
, REG0
, CAAM_CMD_SZ
);
885 /* store encrypted data */
886 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_MESSAGE_DATA
| FIFOLDST_VLF
);
888 /* read payload data */
889 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS1
| FIFOLDST_VLF
|
890 FIFOLD_TYPE_MSG
| FIFOLD_TYPE_FLUSH1
);
892 /* zero-payload command */
893 set_jump_tgt_here(desc
, zero_payload_jump_cmd
);
896 append_seq_fifo_load(desc
, ctx
->authsize
, FIFOLD_CLASS_CLASS1
|
897 FIFOLD_TYPE_ICV
| FIFOLD_TYPE_LAST1
);
899 ctx
->sh_desc_dec_dma
= dma_map_single(jrdev
, desc
,
902 if (dma_mapping_error(jrdev
, ctx
->sh_desc_dec_dma
)) {
903 dev_err(jrdev
, "unable to map shared descriptor\n");
907 print_hex_dump(KERN_ERR
, "gcm dec shdesc@"__stringify(__LINE__
)": ",
908 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
909 desc_bytes(desc
), 1);
915 static int gcm_setauthsize(struct crypto_aead
*authenc
, unsigned int authsize
)
917 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
919 ctx
->authsize
= authsize
;
920 gcm_set_sh_desc(authenc
);
925 static int rfc4106_set_sh_desc(struct crypto_aead
*aead
)
927 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
928 struct device
*jrdev
= ctx
->jrdev
;
929 bool keys_fit_inline
= false;
933 if (!ctx
->enckeylen
|| !ctx
->authsize
)
937 * RFC4106 encrypt shared descriptor
938 * Job Descriptor and Shared Descriptor
939 * must fit into the 64-word Descriptor h/w Buffer
941 if (DESC_RFC4106_ENC_LEN
+ GCM_DESC_JOB_IO_LEN
+
942 ctx
->enckeylen
<= CAAM_DESC_BYTES_MAX
)
943 keys_fit_inline
= true;
945 desc
= ctx
->sh_desc_enc
;
947 init_sh_desc(desc
, HDR_SHARE_SERIAL
);
949 /* Skip key loading if it is loaded due to sharing */
950 key_jump_cmd
= append_jump(desc
, JUMP_JSL
| JUMP_TEST_ALL
|
953 append_key_as_imm(desc
, (void *)ctx
->key
, ctx
->enckeylen
,
954 ctx
->enckeylen
, CLASS_1
| KEY_DEST_CLASS_REG
);
956 append_key(desc
, ctx
->key_dma
, ctx
->enckeylen
,
957 CLASS_1
| KEY_DEST_CLASS_REG
);
958 set_jump_tgt_here(desc
, key_jump_cmd
);
960 /* Class 1 operation */
961 append_operation(desc
, ctx
->class1_alg_type
|
962 OP_ALG_AS_INITFINAL
| OP_ALG_ENCRYPT
);
964 append_math_sub_imm_u32(desc
, VARSEQINLEN
, REG3
, IMM
, 8);
965 append_math_add(desc
, VARSEQOUTLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
967 /* Read assoc data */
968 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS1
| FIFOLDST_VLF
|
969 FIFOLD_TYPE_AAD
| FIFOLD_TYPE_FLUSH1
);
972 append_seq_fifo_load(desc
, 8, FIFOLD_CLASS_SKIP
);
974 /* Will read cryptlen bytes */
975 append_math_sub(desc
, VARSEQINLEN
, SEQINLEN
, REG0
, CAAM_CMD_SZ
);
977 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
978 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS1
| FIFOLD_TYPE_MSG
);
980 /* Skip assoc data */
981 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_SKIP
| FIFOLDST_VLF
);
983 /* cryptlen = seqoutlen - assoclen */
984 append_math_sub(desc
, VARSEQOUTLEN
, VARSEQINLEN
, REG0
, CAAM_CMD_SZ
);
986 /* Write encrypted data */
987 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_MESSAGE_DATA
| FIFOLDST_VLF
);
989 /* Read payload data */
990 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS1
| FIFOLDST_VLF
|
991 FIFOLD_TYPE_MSG
| FIFOLD_TYPE_LAST1
);
994 append_seq_store(desc
, ctx
->authsize
, LDST_CLASS_1_CCB
|
995 LDST_SRCDST_BYTE_CONTEXT
);
997 ctx
->sh_desc_enc_dma
= dma_map_single(jrdev
, desc
,
1000 if (dma_mapping_error(jrdev
, ctx
->sh_desc_enc_dma
)) {
1001 dev_err(jrdev
, "unable to map shared descriptor\n");
1005 print_hex_dump(KERN_ERR
, "rfc4106 enc shdesc@"__stringify(__LINE__
)": ",
1006 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
1007 desc_bytes(desc
), 1);
1011 * Job Descriptor and Shared Descriptors
1012 * must all fit into the 64-word Descriptor h/w Buffer
1014 keys_fit_inline
= false;
1015 if (DESC_RFC4106_DEC_LEN
+ DESC_JOB_IO_LEN
+
1016 ctx
->enckeylen
<= CAAM_DESC_BYTES_MAX
)
1017 keys_fit_inline
= true;
1019 desc
= ctx
->sh_desc_dec
;
1021 init_sh_desc(desc
, HDR_SHARE_SERIAL
);
1023 /* Skip key loading if it is loaded due to sharing */
1024 key_jump_cmd
= append_jump(desc
, JUMP_JSL
|
1025 JUMP_TEST_ALL
| JUMP_COND_SHRD
);
1026 if (keys_fit_inline
)
1027 append_key_as_imm(desc
, (void *)ctx
->key
, ctx
->enckeylen
,
1028 ctx
->enckeylen
, CLASS_1
| KEY_DEST_CLASS_REG
);
1030 append_key(desc
, ctx
->key_dma
, ctx
->enckeylen
,
1031 CLASS_1
| KEY_DEST_CLASS_REG
);
1032 set_jump_tgt_here(desc
, key_jump_cmd
);
1034 /* Class 1 operation */
1035 append_operation(desc
, ctx
->class1_alg_type
|
1036 OP_ALG_AS_INITFINAL
| OP_ALG_DECRYPT
| OP_ALG_ICV_ON
);
1038 append_math_sub_imm_u32(desc
, VARSEQINLEN
, REG3
, IMM
, 8);
1039 append_math_add(desc
, VARSEQOUTLEN
, ZERO
, REG3
, CAAM_CMD_SZ
);
1041 /* Read assoc data */
1042 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS1
| FIFOLDST_VLF
|
1043 FIFOLD_TYPE_AAD
| FIFOLD_TYPE_FLUSH1
);
1046 append_seq_fifo_load(desc
, 8, FIFOLD_CLASS_SKIP
);
1048 /* Will read cryptlen bytes */
1049 append_math_sub(desc
, VARSEQINLEN
, SEQOUTLEN
, REG3
, CAAM_CMD_SZ
);
1051 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
1052 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS1
| FIFOLD_TYPE_MSG
);
1054 /* Skip assoc data */
1055 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_SKIP
| FIFOLDST_VLF
);
1057 /* Will write cryptlen bytes */
1058 append_math_sub(desc
, VARSEQOUTLEN
, SEQOUTLEN
, REG0
, CAAM_CMD_SZ
);
1060 /* Store payload data */
1061 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_MESSAGE_DATA
| FIFOLDST_VLF
);
1063 /* Read encrypted data */
1064 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_CLASS1
| FIFOLDST_VLF
|
1065 FIFOLD_TYPE_MSG
| FIFOLD_TYPE_FLUSH1
);
1068 append_seq_fifo_load(desc
, ctx
->authsize
, FIFOLD_CLASS_CLASS1
|
1069 FIFOLD_TYPE_ICV
| FIFOLD_TYPE_LAST1
);
1071 ctx
->sh_desc_dec_dma
= dma_map_single(jrdev
, desc
,
1074 if (dma_mapping_error(jrdev
, ctx
->sh_desc_dec_dma
)) {
1075 dev_err(jrdev
, "unable to map shared descriptor\n");
1079 print_hex_dump(KERN_ERR
, "rfc4106 dec shdesc@"__stringify(__LINE__
)": ",
1080 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
1081 desc_bytes(desc
), 1);
1087 static int rfc4106_setauthsize(struct crypto_aead
*authenc
,
1088 unsigned int authsize
)
1090 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
1092 ctx
->authsize
= authsize
;
1093 rfc4106_set_sh_desc(authenc
);
1098 static int rfc4543_set_sh_desc(struct crypto_aead
*aead
)
1100 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1101 struct device
*jrdev
= ctx
->jrdev
;
1102 bool keys_fit_inline
= false;
1104 u32
*read_move_cmd
, *write_move_cmd
;
1107 if (!ctx
->enckeylen
|| !ctx
->authsize
)
1111 * RFC4543 encrypt shared descriptor
1112 * Job Descriptor and Shared Descriptor
1113 * must fit into the 64-word Descriptor h/w Buffer
1115 if (DESC_RFC4543_ENC_LEN
+ GCM_DESC_JOB_IO_LEN
+
1116 ctx
->enckeylen
<= CAAM_DESC_BYTES_MAX
)
1117 keys_fit_inline
= true;
1119 desc
= ctx
->sh_desc_enc
;
1121 init_sh_desc(desc
, HDR_SHARE_SERIAL
);
1123 /* Skip key loading if it is loaded due to sharing */
1124 key_jump_cmd
= append_jump(desc
, JUMP_JSL
| JUMP_TEST_ALL
|
1126 if (keys_fit_inline
)
1127 append_key_as_imm(desc
, (void *)ctx
->key
, ctx
->enckeylen
,
1128 ctx
->enckeylen
, CLASS_1
| KEY_DEST_CLASS_REG
);
1130 append_key(desc
, ctx
->key_dma
, ctx
->enckeylen
,
1131 CLASS_1
| KEY_DEST_CLASS_REG
);
1132 set_jump_tgt_here(desc
, key_jump_cmd
);
1134 /* Class 1 operation */
1135 append_operation(desc
, ctx
->class1_alg_type
|
1136 OP_ALG_AS_INITFINAL
| OP_ALG_ENCRYPT
);
1138 /* assoclen + cryptlen = seqinlen */
1139 append_math_sub(desc
, REG3
, SEQINLEN
, REG0
, CAAM_CMD_SZ
);
1142 * MOVE_LEN opcode is not available in all SEC HW revisions,
1143 * thus need to do some magic, i.e. self-patch the descriptor
1146 read_move_cmd
= append_move(desc
, MOVE_SRC_DESCBUF
| MOVE_DEST_MATH3
|
1147 (0x6 << MOVE_LEN_SHIFT
));
1148 write_move_cmd
= append_move(desc
, MOVE_SRC_MATH3
| MOVE_DEST_DESCBUF
|
1149 (0x8 << MOVE_LEN_SHIFT
));
1151 /* Will read assoclen + cryptlen bytes */
1152 append_math_sub(desc
, VARSEQINLEN
, SEQINLEN
, REG0
, CAAM_CMD_SZ
);
1154 /* Will write assoclen + cryptlen bytes */
1155 append_math_sub(desc
, VARSEQOUTLEN
, SEQINLEN
, REG0
, CAAM_CMD_SZ
);
1157 /* Read and write assoclen + cryptlen bytes */
1158 aead_append_src_dst(desc
, FIFOLD_TYPE_AAD
);
1160 set_move_tgt_here(desc
, read_move_cmd
);
1161 set_move_tgt_here(desc
, write_move_cmd
);
1162 append_cmd(desc
, CMD_LOAD
| DISABLE_AUTO_INFO_FIFO
);
1163 /* Move payload data to OFIFO */
1164 append_move(desc
, MOVE_SRC_INFIFO_CL
| MOVE_DEST_OUTFIFO
);
1167 append_seq_store(desc
, ctx
->authsize
, LDST_CLASS_1_CCB
|
1168 LDST_SRCDST_BYTE_CONTEXT
);
1170 ctx
->sh_desc_enc_dma
= dma_map_single(jrdev
, desc
,
1173 if (dma_mapping_error(jrdev
, ctx
->sh_desc_enc_dma
)) {
1174 dev_err(jrdev
, "unable to map shared descriptor\n");
1178 print_hex_dump(KERN_ERR
, "rfc4543 enc shdesc@"__stringify(__LINE__
)": ",
1179 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
1180 desc_bytes(desc
), 1);
1184 * Job Descriptor and Shared Descriptors
1185 * must all fit into the 64-word Descriptor h/w Buffer
1187 keys_fit_inline
= false;
1188 if (DESC_RFC4543_DEC_LEN
+ GCM_DESC_JOB_IO_LEN
+
1189 ctx
->enckeylen
<= CAAM_DESC_BYTES_MAX
)
1190 keys_fit_inline
= true;
1192 desc
= ctx
->sh_desc_dec
;
1194 init_sh_desc(desc
, HDR_SHARE_SERIAL
);
1196 /* Skip key loading if it is loaded due to sharing */
1197 key_jump_cmd
= append_jump(desc
, JUMP_JSL
|
1198 JUMP_TEST_ALL
| JUMP_COND_SHRD
);
1199 if (keys_fit_inline
)
1200 append_key_as_imm(desc
, (void *)ctx
->key
, ctx
->enckeylen
,
1201 ctx
->enckeylen
, CLASS_1
| KEY_DEST_CLASS_REG
);
1203 append_key(desc
, ctx
->key_dma
, ctx
->enckeylen
,
1204 CLASS_1
| KEY_DEST_CLASS_REG
);
1205 set_jump_tgt_here(desc
, key_jump_cmd
);
1207 /* Class 1 operation */
1208 append_operation(desc
, ctx
->class1_alg_type
|
1209 OP_ALG_AS_INITFINAL
| OP_ALG_DECRYPT
| OP_ALG_ICV_ON
);
1211 /* assoclen + cryptlen = seqoutlen */
1212 append_math_sub(desc
, REG3
, SEQOUTLEN
, REG0
, CAAM_CMD_SZ
);
1215 * MOVE_LEN opcode is not available in all SEC HW revisions,
1216 * thus need to do some magic, i.e. self-patch the descriptor
1219 read_move_cmd
= append_move(desc
, MOVE_SRC_DESCBUF
| MOVE_DEST_MATH3
|
1220 (0x6 << MOVE_LEN_SHIFT
));
1221 write_move_cmd
= append_move(desc
, MOVE_SRC_MATH3
| MOVE_DEST_DESCBUF
|
1222 (0x8 << MOVE_LEN_SHIFT
));
1224 /* Will read assoclen + cryptlen bytes */
1225 append_math_sub(desc
, VARSEQINLEN
, SEQOUTLEN
, REG0
, CAAM_CMD_SZ
);
1227 /* Will write assoclen + cryptlen bytes */
1228 append_math_sub(desc
, VARSEQOUTLEN
, SEQOUTLEN
, REG0
, CAAM_CMD_SZ
);
1230 /* Store payload data */
1231 append_seq_fifo_store(desc
, 0, FIFOST_TYPE_MESSAGE_DATA
| FIFOLDST_VLF
);
1233 /* In-snoop assoclen + cryptlen data */
1234 append_seq_fifo_load(desc
, 0, FIFOLD_CLASS_BOTH
| FIFOLDST_VLF
|
1235 FIFOLD_TYPE_AAD
| FIFOLD_TYPE_LAST2FLUSH1
);
1237 set_move_tgt_here(desc
, read_move_cmd
);
1238 set_move_tgt_here(desc
, write_move_cmd
);
1239 append_cmd(desc
, CMD_LOAD
| DISABLE_AUTO_INFO_FIFO
);
1240 /* Move payload data to OFIFO */
1241 append_move(desc
, MOVE_SRC_INFIFO_CL
| MOVE_DEST_OUTFIFO
);
1242 append_cmd(desc
, CMD_LOAD
| ENABLE_AUTO_INFO_FIFO
);
1245 append_seq_fifo_load(desc
, ctx
->authsize
, FIFOLD_CLASS_CLASS1
|
1246 FIFOLD_TYPE_ICV
| FIFOLD_TYPE_LAST1
);
1248 ctx
->sh_desc_dec_dma
= dma_map_single(jrdev
, desc
,
1251 if (dma_mapping_error(jrdev
, ctx
->sh_desc_dec_dma
)) {
1252 dev_err(jrdev
, "unable to map shared descriptor\n");
1256 print_hex_dump(KERN_ERR
, "rfc4543 dec shdesc@"__stringify(__LINE__
)": ",
1257 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
1258 desc_bytes(desc
), 1);
1264 static int rfc4543_setauthsize(struct crypto_aead
*authenc
,
1265 unsigned int authsize
)
1267 struct caam_ctx
*ctx
= crypto_aead_ctx(authenc
);
1269 ctx
->authsize
= authsize
;
1270 rfc4543_set_sh_desc(authenc
);
1275 static u32
gen_split_aead_key(struct caam_ctx
*ctx
, const u8
*key_in
,
1278 return gen_split_key(ctx
->jrdev
, ctx
->key
, ctx
->split_key_len
,
1279 ctx
->split_key_pad_len
, key_in
, authkeylen
,
1283 static int aead_setkey(struct crypto_aead
*aead
,
1284 const u8
*key
, unsigned int keylen
)
1286 /* Sizes for MDHA pads (*not* keys): MD5, SHA1, 224, 256, 384, 512 */
1287 static const u8 mdpadlen
[] = { 16, 20, 32, 32, 64, 64 };
1288 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1289 struct device
*jrdev
= ctx
->jrdev
;
1290 struct crypto_authenc_keys keys
;
1293 if (crypto_authenc_extractkeys(&keys
, key
, keylen
) != 0)
1296 /* Pick class 2 key length from algorithm submask */
1297 ctx
->split_key_len
= mdpadlen
[(ctx
->alg_op
& OP_ALG_ALGSEL_SUBMASK
) >>
1298 OP_ALG_ALGSEL_SHIFT
] * 2;
1299 ctx
->split_key_pad_len
= ALIGN(ctx
->split_key_len
, 16);
1301 if (ctx
->split_key_pad_len
+ keys
.enckeylen
> CAAM_MAX_KEY_SIZE
)
1305 printk(KERN_ERR
"keylen %d enckeylen %d authkeylen %d\n",
1306 keys
.authkeylen
+ keys
.enckeylen
, keys
.enckeylen
,
1308 printk(KERN_ERR
"split_key_len %d split_key_pad_len %d\n",
1309 ctx
->split_key_len
, ctx
->split_key_pad_len
);
1310 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
1311 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
1314 ret
= gen_split_aead_key(ctx
, keys
.authkey
, keys
.authkeylen
);
1319 /* postpend encryption key to auth split key */
1320 memcpy(ctx
->key
+ ctx
->split_key_pad_len
, keys
.enckey
, keys
.enckeylen
);
1322 ctx
->key_dma
= dma_map_single(jrdev
, ctx
->key
, ctx
->split_key_pad_len
+
1323 keys
.enckeylen
, DMA_TO_DEVICE
);
1324 if (dma_mapping_error(jrdev
, ctx
->key_dma
)) {
1325 dev_err(jrdev
, "unable to map key i/o memory\n");
1329 print_hex_dump(KERN_ERR
, "ctx.key@"__stringify(__LINE__
)": ",
1330 DUMP_PREFIX_ADDRESS
, 16, 4, ctx
->key
,
1331 ctx
->split_key_pad_len
+ keys
.enckeylen
, 1);
1334 ctx
->enckeylen
= keys
.enckeylen
;
1336 ret
= aead_set_sh_desc(aead
);
1338 dma_unmap_single(jrdev
, ctx
->key_dma
, ctx
->split_key_pad_len
+
1339 keys
.enckeylen
, DMA_TO_DEVICE
);
1344 crypto_aead_set_flags(aead
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
1348 static int gcm_setkey(struct crypto_aead
*aead
,
1349 const u8
*key
, unsigned int keylen
)
1351 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1352 struct device
*jrdev
= ctx
->jrdev
;
1356 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
1357 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
1360 memcpy(ctx
->key
, key
, keylen
);
1361 ctx
->key_dma
= dma_map_single(jrdev
, ctx
->key
, keylen
,
1363 if (dma_mapping_error(jrdev
, ctx
->key_dma
)) {
1364 dev_err(jrdev
, "unable to map key i/o memory\n");
1367 ctx
->enckeylen
= keylen
;
1369 ret
= gcm_set_sh_desc(aead
);
1371 dma_unmap_single(jrdev
, ctx
->key_dma
, ctx
->enckeylen
,
1378 static int rfc4106_setkey(struct crypto_aead
*aead
,
1379 const u8
*key
, unsigned int keylen
)
1381 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1382 struct device
*jrdev
= ctx
->jrdev
;
1389 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
1390 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
1393 memcpy(ctx
->key
, key
, keylen
);
1396 * The last four bytes of the key material are used as the salt value
1397 * in the nonce. Update the AES key length.
1399 ctx
->enckeylen
= keylen
- 4;
1401 ctx
->key_dma
= dma_map_single(jrdev
, ctx
->key
, ctx
->enckeylen
,
1403 if (dma_mapping_error(jrdev
, ctx
->key_dma
)) {
1404 dev_err(jrdev
, "unable to map key i/o memory\n");
1408 ret
= rfc4106_set_sh_desc(aead
);
1410 dma_unmap_single(jrdev
, ctx
->key_dma
, ctx
->enckeylen
,
1417 static int rfc4543_setkey(struct crypto_aead
*aead
,
1418 const u8
*key
, unsigned int keylen
)
1420 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
1421 struct device
*jrdev
= ctx
->jrdev
;
1428 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
1429 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
1432 memcpy(ctx
->key
, key
, keylen
);
1435 * The last four bytes of the key material are used as the salt value
1436 * in the nonce. Update the AES key length.
1438 ctx
->enckeylen
= keylen
- 4;
1440 ctx
->key_dma
= dma_map_single(jrdev
, ctx
->key
, ctx
->enckeylen
,
1442 if (dma_mapping_error(jrdev
, ctx
->key_dma
)) {
1443 dev_err(jrdev
, "unable to map key i/o memory\n");
1447 ret
= rfc4543_set_sh_desc(aead
);
1449 dma_unmap_single(jrdev
, ctx
->key_dma
, ctx
->enckeylen
,
1456 static int ablkcipher_setkey(struct crypto_ablkcipher
*ablkcipher
,
1457 const u8
*key
, unsigned int keylen
)
1459 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1460 struct ablkcipher_tfm
*crt
= &ablkcipher
->base
.crt_ablkcipher
;
1461 struct crypto_tfm
*tfm
= crypto_ablkcipher_tfm(ablkcipher
);
1462 const char *alg_name
= crypto_tfm_alg_name(tfm
);
1463 struct device
*jrdev
= ctx
->jrdev
;
1469 u32 ctx1_iv_off
= 0;
1470 const bool ctr_mode
= ((ctx
->class1_alg_type
& OP_ALG_AAI_MASK
) ==
1471 OP_ALG_AAI_CTR_MOD128
);
1472 const bool is_rfc3686
= (ctr_mode
&&
1473 (strstr(alg_name
, "rfc3686") != NULL
));
1476 print_hex_dump(KERN_ERR
, "key in @"__stringify(__LINE__
)": ",
1477 DUMP_PREFIX_ADDRESS
, 16, 4, key
, keylen
, 1);
1480 * AES-CTR needs to load IV in CONTEXT1 reg
1481 * at an offset of 128bits (16bytes)
1482 * CONTEXT1[255:128] = IV
1489 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1490 * | *key = {KEY, NONCE}
1493 ctx1_iv_off
= 16 + CTR_RFC3686_NONCE_SIZE
;
1494 keylen
-= CTR_RFC3686_NONCE_SIZE
;
1497 memcpy(ctx
->key
, key
, keylen
);
1498 ctx
->key_dma
= dma_map_single(jrdev
, ctx
->key
, keylen
,
1500 if (dma_mapping_error(jrdev
, ctx
->key_dma
)) {
1501 dev_err(jrdev
, "unable to map key i/o memory\n");
1504 ctx
->enckeylen
= keylen
;
1506 /* ablkcipher_encrypt shared descriptor */
1507 desc
= ctx
->sh_desc_enc
;
1508 init_sh_desc(desc
, HDR_SHARE_SERIAL
| HDR_SAVECTX
);
1509 /* Skip if already shared */
1510 key_jump_cmd
= append_jump(desc
, JUMP_JSL
| JUMP_TEST_ALL
|
1513 /* Load class1 key only */
1514 append_key_as_imm(desc
, (void *)ctx
->key
, ctx
->enckeylen
,
1515 ctx
->enckeylen
, CLASS_1
|
1516 KEY_DEST_CLASS_REG
);
1518 /* Load nonce into CONTEXT1 reg */
1520 nonce
= (u32
*)(key
+ keylen
);
1521 append_load_imm_u32(desc
, *nonce
, LDST_CLASS_IND_CCB
|
1522 LDST_SRCDST_BYTE_OUTFIFO
| LDST_IMM
);
1523 append_move(desc
, MOVE_WAITCOMP
|
1525 MOVE_DEST_CLASS1CTX
|
1526 (16 << MOVE_OFFSET_SHIFT
) |
1527 (CTR_RFC3686_NONCE_SIZE
<< MOVE_LEN_SHIFT
));
1530 set_jump_tgt_here(desc
, key_jump_cmd
);
1533 append_seq_load(desc
, crt
->ivsize
, LDST_SRCDST_BYTE_CONTEXT
|
1534 LDST_CLASS_1_CCB
| (ctx1_iv_off
<< LDST_OFFSET_SHIFT
));
1536 /* Load counter into CONTEXT1 reg */
1538 append_load_imm_u32(desc
, be32_to_cpu(1), LDST_IMM
|
1540 LDST_SRCDST_BYTE_CONTEXT
|
1541 ((ctx1_iv_off
+ CTR_RFC3686_IV_SIZE
) <<
1542 LDST_OFFSET_SHIFT
));
1544 /* Load operation */
1545 append_operation(desc
, ctx
->class1_alg_type
|
1546 OP_ALG_AS_INITFINAL
| OP_ALG_ENCRYPT
);
1548 /* Perform operation */
1549 ablkcipher_append_src_dst(desc
);
1551 ctx
->sh_desc_enc_dma
= dma_map_single(jrdev
, desc
,
1554 if (dma_mapping_error(jrdev
, ctx
->sh_desc_enc_dma
)) {
1555 dev_err(jrdev
, "unable to map shared descriptor\n");
1559 print_hex_dump(KERN_ERR
,
1560 "ablkcipher enc shdesc@"__stringify(__LINE__
)": ",
1561 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
1562 desc_bytes(desc
), 1);
1564 /* ablkcipher_decrypt shared descriptor */
1565 desc
= ctx
->sh_desc_dec
;
1567 init_sh_desc(desc
, HDR_SHARE_SERIAL
| HDR_SAVECTX
);
1568 /* Skip if already shared */
1569 key_jump_cmd
= append_jump(desc
, JUMP_JSL
| JUMP_TEST_ALL
|
1572 /* Load class1 key only */
1573 append_key_as_imm(desc
, (void *)ctx
->key
, ctx
->enckeylen
,
1574 ctx
->enckeylen
, CLASS_1
|
1575 KEY_DEST_CLASS_REG
);
1577 /* Load nonce into CONTEXT1 reg */
1579 nonce
= (u32
*)(key
+ keylen
);
1580 append_load_imm_u32(desc
, *nonce
, LDST_CLASS_IND_CCB
|
1581 LDST_SRCDST_BYTE_OUTFIFO
| LDST_IMM
);
1582 append_move(desc
, MOVE_WAITCOMP
|
1584 MOVE_DEST_CLASS1CTX
|
1585 (16 << MOVE_OFFSET_SHIFT
) |
1586 (CTR_RFC3686_NONCE_SIZE
<< MOVE_LEN_SHIFT
));
1589 set_jump_tgt_here(desc
, key_jump_cmd
);
1592 append_seq_load(desc
, crt
->ivsize
, LDST_SRCDST_BYTE_CONTEXT
|
1593 LDST_CLASS_1_CCB
| (ctx1_iv_off
<< LDST_OFFSET_SHIFT
));
1595 /* Load counter into CONTEXT1 reg */
1597 append_load_imm_u32(desc
, be32_to_cpu(1), LDST_IMM
|
1599 LDST_SRCDST_BYTE_CONTEXT
|
1600 ((ctx1_iv_off
+ CTR_RFC3686_IV_SIZE
) <<
1601 LDST_OFFSET_SHIFT
));
1603 /* Choose operation */
1605 append_operation(desc
, ctx
->class1_alg_type
|
1606 OP_ALG_AS_INITFINAL
| OP_ALG_DECRYPT
);
1608 append_dec_op1(desc
, ctx
->class1_alg_type
);
1610 /* Perform operation */
1611 ablkcipher_append_src_dst(desc
);
1613 ctx
->sh_desc_dec_dma
= dma_map_single(jrdev
, desc
,
1616 if (dma_mapping_error(jrdev
, ctx
->sh_desc_dec_dma
)) {
1617 dev_err(jrdev
, "unable to map shared descriptor\n");
1622 print_hex_dump(KERN_ERR
,
1623 "ablkcipher dec shdesc@"__stringify(__LINE__
)": ",
1624 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
1625 desc_bytes(desc
), 1);
1627 /* ablkcipher_givencrypt shared descriptor */
1628 desc
= ctx
->sh_desc_givenc
;
1630 init_sh_desc(desc
, HDR_SHARE_SERIAL
| HDR_SAVECTX
);
1631 /* Skip if already shared */
1632 key_jump_cmd
= append_jump(desc
, JUMP_JSL
| JUMP_TEST_ALL
|
1635 /* Load class1 key only */
1636 append_key_as_imm(desc
, (void *)ctx
->key
, ctx
->enckeylen
,
1637 ctx
->enckeylen
, CLASS_1
|
1638 KEY_DEST_CLASS_REG
);
1640 /* Load Nonce into CONTEXT1 reg */
1642 nonce
= (u32
*)(key
+ keylen
);
1643 append_load_imm_u32(desc
, *nonce
, LDST_CLASS_IND_CCB
|
1644 LDST_SRCDST_BYTE_OUTFIFO
| LDST_IMM
);
1645 append_move(desc
, MOVE_WAITCOMP
|
1647 MOVE_DEST_CLASS1CTX
|
1648 (16 << MOVE_OFFSET_SHIFT
) |
1649 (CTR_RFC3686_NONCE_SIZE
<< MOVE_LEN_SHIFT
));
1651 set_jump_tgt_here(desc
, key_jump_cmd
);
1654 geniv
= NFIFOENTRY_STYPE_PAD
| NFIFOENTRY_DEST_DECO
|
1655 NFIFOENTRY_DTYPE_MSG
| NFIFOENTRY_LC1
|
1656 NFIFOENTRY_PTYPE_RND
| (crt
->ivsize
<< NFIFOENTRY_DLEN_SHIFT
);
1657 append_load_imm_u32(desc
, geniv
, LDST_CLASS_IND_CCB
|
1658 LDST_SRCDST_WORD_INFO_FIFO
| LDST_IMM
);
1659 append_cmd(desc
, CMD_LOAD
| DISABLE_AUTO_INFO_FIFO
);
1660 append_move(desc
, MOVE_WAITCOMP
|
1662 MOVE_DEST_CLASS1CTX
|
1663 (crt
->ivsize
<< MOVE_LEN_SHIFT
) |
1664 (ctx1_iv_off
<< MOVE_OFFSET_SHIFT
));
1665 append_cmd(desc
, CMD_LOAD
| ENABLE_AUTO_INFO_FIFO
);
1667 /* Copy generated IV to memory */
1668 append_seq_store(desc
, crt
->ivsize
,
1669 LDST_SRCDST_BYTE_CONTEXT
| LDST_CLASS_1_CCB
|
1670 (ctx1_iv_off
<< LDST_OFFSET_SHIFT
));
1672 /* Load Counter into CONTEXT1 reg */
1674 append_load_imm_u32(desc
, (u32
)1, LDST_IMM
|
1676 LDST_SRCDST_BYTE_CONTEXT
|
1677 ((ctx1_iv_off
+ CTR_RFC3686_IV_SIZE
) <<
1678 LDST_OFFSET_SHIFT
));
1681 append_jump(desc
, JUMP_JSL
| JUMP_TEST_ALL
| JUMP_COND_NCP
|
1682 (1 << JUMP_OFFSET_SHIFT
));
1684 /* Load operation */
1685 append_operation(desc
, ctx
->class1_alg_type
|
1686 OP_ALG_AS_INITFINAL
| OP_ALG_ENCRYPT
);
1688 /* Perform operation */
1689 ablkcipher_append_src_dst(desc
);
1691 ctx
->sh_desc_givenc_dma
= dma_map_single(jrdev
, desc
,
1694 if (dma_mapping_error(jrdev
, ctx
->sh_desc_givenc_dma
)) {
1695 dev_err(jrdev
, "unable to map shared descriptor\n");
1699 print_hex_dump(KERN_ERR
,
1700 "ablkcipher givenc shdesc@" __stringify(__LINE__
) ": ",
1701 DUMP_PREFIX_ADDRESS
, 16, 4, desc
,
1702 desc_bytes(desc
), 1);
1708 static int xts_ablkcipher_setkey(struct crypto_ablkcipher
*ablkcipher
,
1709 const u8
*key
, unsigned int keylen
)
1711 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1712 struct device
*jrdev
= ctx
->jrdev
;
1713 u32
*key_jump_cmd
, *desc
;
1714 __be64 sector_size
= cpu_to_be64(512);
1716 if (keylen
!= 2 * AES_MIN_KEY_SIZE
&& keylen
!= 2 * AES_MAX_KEY_SIZE
) {
1717 crypto_ablkcipher_set_flags(ablkcipher
,
1718 CRYPTO_TFM_RES_BAD_KEY_LEN
);
1719 dev_err(jrdev
, "key size mismatch\n");
1723 memcpy(ctx
->key
, key
, keylen
);
1724 ctx
->key_dma
= dma_map_single(jrdev
, ctx
->key
, keylen
, DMA_TO_DEVICE
);
1725 if (dma_mapping_error(jrdev
, ctx
->key_dma
)) {
1726 dev_err(jrdev
, "unable to map key i/o memory\n");
1729 ctx
->enckeylen
= keylen
;
1731 /* xts_ablkcipher_encrypt shared descriptor */
1732 desc
= ctx
->sh_desc_enc
;
1733 init_sh_desc(desc
, HDR_SHARE_SERIAL
| HDR_SAVECTX
);
1734 /* Skip if already shared */
1735 key_jump_cmd
= append_jump(desc
, JUMP_JSL
| JUMP_TEST_ALL
|
1738 /* Load class1 keys only */
1739 append_key_as_imm(desc
, (void *)ctx
->key
, ctx
->enckeylen
,
1740 ctx
->enckeylen
, CLASS_1
| KEY_DEST_CLASS_REG
);
1742 /* Load sector size with index 40 bytes (0x28) */
1743 append_cmd(desc
, CMD_LOAD
| IMMEDIATE
| LDST_SRCDST_BYTE_CONTEXT
|
1744 LDST_CLASS_1_CCB
| (0x28 << LDST_OFFSET_SHIFT
) | 8);
1745 append_data(desc
, (void *)§or_size
, 8);
1747 set_jump_tgt_here(desc
, key_jump_cmd
);
1750 * create sequence for loading the sector index
1751 * Upper 8B of IV - will be used as sector index
1752 * Lower 8B of IV - will be discarded
1754 append_cmd(desc
, CMD_SEQ_LOAD
| LDST_SRCDST_BYTE_CONTEXT
|
1755 LDST_CLASS_1_CCB
| (0x20 << LDST_OFFSET_SHIFT
) | 8);
1756 append_seq_fifo_load(desc
, 8, FIFOLD_CLASS_SKIP
);
1758 /* Load operation */
1759 append_operation(desc
, ctx
->class1_alg_type
| OP_ALG_AS_INITFINAL
|
1762 /* Perform operation */
1763 ablkcipher_append_src_dst(desc
);
1765 ctx
->sh_desc_enc_dma
= dma_map_single(jrdev
, desc
, desc_bytes(desc
),
1767 if (dma_mapping_error(jrdev
, ctx
->sh_desc_enc_dma
)) {
1768 dev_err(jrdev
, "unable to map shared descriptor\n");
1772 print_hex_dump(KERN_ERR
,
1773 "xts ablkcipher enc shdesc@" __stringify(__LINE__
) ": ",
1774 DUMP_PREFIX_ADDRESS
, 16, 4, desc
, desc_bytes(desc
), 1);
1777 /* xts_ablkcipher_decrypt shared descriptor */
1778 desc
= ctx
->sh_desc_dec
;
1780 init_sh_desc(desc
, HDR_SHARE_SERIAL
| HDR_SAVECTX
);
1781 /* Skip if already shared */
1782 key_jump_cmd
= append_jump(desc
, JUMP_JSL
| JUMP_TEST_ALL
|
1785 /* Load class1 key only */
1786 append_key_as_imm(desc
, (void *)ctx
->key
, ctx
->enckeylen
,
1787 ctx
->enckeylen
, CLASS_1
| KEY_DEST_CLASS_REG
);
1789 /* Load sector size with index 40 bytes (0x28) */
1790 append_cmd(desc
, CMD_LOAD
| IMMEDIATE
| LDST_SRCDST_BYTE_CONTEXT
|
1791 LDST_CLASS_1_CCB
| (0x28 << LDST_OFFSET_SHIFT
) | 8);
1792 append_data(desc
, (void *)§or_size
, 8);
1794 set_jump_tgt_here(desc
, key_jump_cmd
);
1797 * create sequence for loading the sector index
1798 * Upper 8B of IV - will be used as sector index
1799 * Lower 8B of IV - will be discarded
1801 append_cmd(desc
, CMD_SEQ_LOAD
| LDST_SRCDST_BYTE_CONTEXT
|
1802 LDST_CLASS_1_CCB
| (0x20 << LDST_OFFSET_SHIFT
) | 8);
1803 append_seq_fifo_load(desc
, 8, FIFOLD_CLASS_SKIP
);
1805 /* Load operation */
1806 append_dec_op1(desc
, ctx
->class1_alg_type
);
1808 /* Perform operation */
1809 ablkcipher_append_src_dst(desc
);
1811 ctx
->sh_desc_dec_dma
= dma_map_single(jrdev
, desc
, desc_bytes(desc
),
1813 if (dma_mapping_error(jrdev
, ctx
->sh_desc_dec_dma
)) {
1814 dma_unmap_single(jrdev
, ctx
->sh_desc_enc_dma
,
1815 desc_bytes(ctx
->sh_desc_enc
), DMA_TO_DEVICE
);
1816 dev_err(jrdev
, "unable to map shared descriptor\n");
1820 print_hex_dump(KERN_ERR
,
1821 "xts ablkcipher dec shdesc@" __stringify(__LINE__
) ": ",
1822 DUMP_PREFIX_ADDRESS
, 16, 4, desc
, desc_bytes(desc
), 1);
1829 * aead_edesc - s/w-extended aead descriptor
1830 * @assoc_nents: number of segments in associated data (SPI+Seq) scatterlist
1831 * @src_nents: number of segments in input scatterlist
1832 * @dst_nents: number of segments in output scatterlist
1833 * @iv_dma: dma address of iv for checking continuity and link table
1834 * @desc: h/w descriptor (variable length; must not exceed MAX_CAAM_DESCSIZE)
1835 * @sec4_sg_bytes: length of dma mapped sec4_sg space
1836 * @sec4_sg_dma: bus physical mapped address of h/w link table
1837 * @hw_desc: the h/w job descriptor followed by any referenced link tables
1845 dma_addr_t sec4_sg_dma
;
1846 struct sec4_sg_entry
*sec4_sg
;
1851 * ablkcipher_edesc - s/w-extended ablkcipher descriptor
1852 * @src_nents: number of segments in input scatterlist
1853 * @dst_nents: number of segments in output scatterlist
1854 * @iv_dma: dma address of iv for checking continuity and link table
1855 * @desc: h/w descriptor (variable length; must not exceed MAX_CAAM_DESCSIZE)
1856 * @sec4_sg_bytes: length of dma mapped sec4_sg space
1857 * @sec4_sg_dma: bus physical mapped address of h/w link table
1858 * @hw_desc: the h/w job descriptor followed by any referenced link tables
1860 struct ablkcipher_edesc
{
1865 dma_addr_t sec4_sg_dma
;
1866 struct sec4_sg_entry
*sec4_sg
;
1870 static void caam_unmap(struct device
*dev
, struct scatterlist
*src
,
1871 struct scatterlist
*dst
, int src_nents
,
1873 dma_addr_t iv_dma
, int ivsize
, dma_addr_t sec4_sg_dma
,
1877 dma_unmap_sg(dev
, src
, src_nents
? : 1, DMA_TO_DEVICE
);
1878 dma_unmap_sg(dev
, dst
, dst_nents
? : 1, DMA_FROM_DEVICE
);
1880 dma_unmap_sg(dev
, src
, src_nents
? : 1, DMA_BIDIRECTIONAL
);
1884 dma_unmap_single(dev
, iv_dma
, ivsize
, DMA_TO_DEVICE
);
1886 dma_unmap_single(dev
, sec4_sg_dma
, sec4_sg_bytes
,
1890 static void aead_unmap(struct device
*dev
,
1891 struct aead_edesc
*edesc
,
1892 struct aead_request
*req
)
1894 caam_unmap(dev
, req
->src
, req
->dst
,
1895 edesc
->src_nents
, edesc
->dst_nents
, 0, 0,
1896 edesc
->sec4_sg_dma
, edesc
->sec4_sg_bytes
);
1899 static void ablkcipher_unmap(struct device
*dev
,
1900 struct ablkcipher_edesc
*edesc
,
1901 struct ablkcipher_request
*req
)
1903 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1904 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
1906 caam_unmap(dev
, req
->src
, req
->dst
,
1907 edesc
->src_nents
, edesc
->dst_nents
,
1908 edesc
->iv_dma
, ivsize
,
1909 edesc
->sec4_sg_dma
, edesc
->sec4_sg_bytes
);
1912 static void aead_encrypt_done(struct device
*jrdev
, u32
*desc
, u32 err
,
1915 struct aead_request
*req
= context
;
1916 struct aead_edesc
*edesc
;
1919 dev_err(jrdev
, "%s %d: err 0x%x\n", __func__
, __LINE__
, err
);
1922 edesc
= container_of(desc
, struct aead_edesc
, hw_desc
[0]);
1925 caam_jr_strstatus(jrdev
, err
);
1927 aead_unmap(jrdev
, edesc
, req
);
1931 aead_request_complete(req
, err
);
1934 static void aead_decrypt_done(struct device
*jrdev
, u32
*desc
, u32 err
,
1937 struct aead_request
*req
= context
;
1938 struct aead_edesc
*edesc
;
1941 dev_err(jrdev
, "%s %d: err 0x%x\n", __func__
, __LINE__
, err
);
1944 edesc
= container_of(desc
, struct aead_edesc
, hw_desc
[0]);
1947 caam_jr_strstatus(jrdev
, err
);
1949 aead_unmap(jrdev
, edesc
, req
);
1952 * verify hw auth check passed else return -EBADMSG
1954 if ((err
& JRSTA_CCBERR_ERRID_MASK
) == JRSTA_CCBERR_ERRID_ICVCHK
)
1959 aead_request_complete(req
, err
);
1962 static void ablkcipher_encrypt_done(struct device
*jrdev
, u32
*desc
, u32 err
,
1965 struct ablkcipher_request
*req
= context
;
1966 struct ablkcipher_edesc
*edesc
;
1968 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1969 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
1971 dev_err(jrdev
, "%s %d: err 0x%x\n", __func__
, __LINE__
, err
);
1974 edesc
= (struct ablkcipher_edesc
*)((char *)desc
-
1975 offsetof(struct ablkcipher_edesc
, hw_desc
));
1978 caam_jr_strstatus(jrdev
, err
);
1981 print_hex_dump(KERN_ERR
, "dstiv @"__stringify(__LINE__
)": ",
1982 DUMP_PREFIX_ADDRESS
, 16, 4, req
->info
,
1983 edesc
->src_nents
> 1 ? 100 : ivsize
, 1);
1984 print_hex_dump(KERN_ERR
, "dst @"__stringify(__LINE__
)": ",
1985 DUMP_PREFIX_ADDRESS
, 16, 4, sg_virt(req
->src
),
1986 edesc
->dst_nents
> 1 ? 100 : req
->nbytes
, 1);
1989 ablkcipher_unmap(jrdev
, edesc
, req
);
1992 ablkcipher_request_complete(req
, err
);
1995 static void ablkcipher_decrypt_done(struct device
*jrdev
, u32
*desc
, u32 err
,
1998 struct ablkcipher_request
*req
= context
;
1999 struct ablkcipher_edesc
*edesc
;
2001 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
2002 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
2004 dev_err(jrdev
, "%s %d: err 0x%x\n", __func__
, __LINE__
, err
);
2007 edesc
= (struct ablkcipher_edesc
*)((char *)desc
-
2008 offsetof(struct ablkcipher_edesc
, hw_desc
));
2010 caam_jr_strstatus(jrdev
, err
);
2013 print_hex_dump(KERN_ERR
, "dstiv @"__stringify(__LINE__
)": ",
2014 DUMP_PREFIX_ADDRESS
, 16, 4, req
->info
,
2016 print_hex_dump(KERN_ERR
, "dst @"__stringify(__LINE__
)": ",
2017 DUMP_PREFIX_ADDRESS
, 16, 4, sg_virt(req
->src
),
2018 edesc
->dst_nents
> 1 ? 100 : req
->nbytes
, 1);
2021 ablkcipher_unmap(jrdev
, edesc
, req
);
2024 ablkcipher_request_complete(req
, err
);
2028 * Fill in aead job descriptor
2030 static void init_aead_job(struct aead_request
*req
,
2031 struct aead_edesc
*edesc
,
2032 bool all_contig
, bool encrypt
)
2034 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
2035 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
2036 int authsize
= ctx
->authsize
;
2037 u32
*desc
= edesc
->hw_desc
;
2038 u32 out_options
, in_options
;
2039 dma_addr_t dst_dma
, src_dma
;
2040 int len
, sec4_sg_index
= 0;
2044 sh_desc
= encrypt
? ctx
->sh_desc_enc
: ctx
->sh_desc_dec
;
2045 ptr
= encrypt
? ctx
->sh_desc_enc_dma
: ctx
->sh_desc_dec_dma
;
2047 len
= desc_len(sh_desc
);
2048 init_job_desc_shared(desc
, ptr
, len
, HDR_SHARE_DEFER
| HDR_REVERSE
);
2051 src_dma
= sg_dma_address(req
->src
);
2054 src_dma
= edesc
->sec4_sg_dma
;
2055 sec4_sg_index
+= edesc
->src_nents
;
2056 in_options
= LDST_SGF
;
2059 append_seq_in_ptr(desc
, src_dma
, req
->assoclen
+ req
->cryptlen
,
2063 out_options
= in_options
;
2065 if (unlikely(req
->src
!= req
->dst
)) {
2066 if (!edesc
->dst_nents
) {
2067 dst_dma
= sg_dma_address(req
->dst
);
2069 dst_dma
= edesc
->sec4_sg_dma
+
2071 sizeof(struct sec4_sg_entry
);
2072 out_options
= LDST_SGF
;
2077 append_seq_out_ptr(desc
, dst_dma
,
2078 req
->assoclen
+ req
->cryptlen
+ authsize
,
2081 append_seq_out_ptr(desc
, dst_dma
,
2082 req
->assoclen
+ req
->cryptlen
- authsize
,
2085 /* REG3 = assoclen */
2086 append_math_add_imm_u32(desc
, REG3
, ZERO
, IMM
, req
->assoclen
);
2089 static void init_gcm_job(struct aead_request
*req
,
2090 struct aead_edesc
*edesc
,
2091 bool all_contig
, bool encrypt
)
2093 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
2094 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
2095 unsigned int ivsize
= crypto_aead_ivsize(aead
);
2096 u32
*desc
= edesc
->hw_desc
;
2097 bool generic_gcm
= (ivsize
== 12);
2100 init_aead_job(req
, edesc
, all_contig
, encrypt
);
2102 /* BUG This should not be specific to generic GCM. */
2104 if (encrypt
&& generic_gcm
&& !(req
->assoclen
+ req
->cryptlen
))
2105 last
= FIFOLD_TYPE_LAST1
;
2108 append_cmd(desc
, CMD_FIFO_LOAD
| FIFOLD_CLASS_CLASS1
| IMMEDIATE
|
2109 FIFOLD_TYPE_IV
| FIFOLD_TYPE_FLUSH1
| 12 | last
);
2112 append_data(desc
, ctx
->key
+ ctx
->enckeylen
, 4);
2114 append_data(desc
, req
->iv
, ivsize
);
2115 /* End of blank commands */
2118 static void init_authenc_job(struct aead_request
*req
,
2119 struct aead_edesc
*edesc
,
2120 bool all_contig
, bool encrypt
)
2122 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
2123 struct caam_aead_alg
*alg
= container_of(crypto_aead_alg(aead
),
2124 struct caam_aead_alg
, aead
);
2125 unsigned int ivsize
= crypto_aead_ivsize(aead
);
2126 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
2127 const bool ctr_mode
= ((ctx
->class1_alg_type
& OP_ALG_AAI_MASK
) ==
2128 OP_ALG_AAI_CTR_MOD128
);
2129 const bool is_rfc3686
= alg
->caam
.rfc3686
;
2130 u32
*desc
= edesc
->hw_desc
;
2134 * AES-CTR needs to load IV in CONTEXT1 reg
2135 * at an offset of 128bits (16bytes)
2136 * CONTEXT1[255:128] = IV
2143 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
2146 ivoffset
= 16 + CTR_RFC3686_NONCE_SIZE
;
2148 init_aead_job(req
, edesc
, all_contig
, encrypt
);
2150 if (ivsize
&& (is_rfc3686
|| !(alg
->caam
.geniv
&& encrypt
)))
2151 append_load_as_imm(desc
, req
->iv
, ivsize
,
2153 LDST_SRCDST_BYTE_CONTEXT
|
2154 (ivoffset
<< LDST_OFFSET_SHIFT
));
2158 * Fill in ablkcipher job descriptor
2160 static void init_ablkcipher_job(u32
*sh_desc
, dma_addr_t ptr
,
2161 struct ablkcipher_edesc
*edesc
,
2162 struct ablkcipher_request
*req
,
2165 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
2166 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
2167 u32
*desc
= edesc
->hw_desc
;
2168 u32 out_options
= 0, in_options
;
2169 dma_addr_t dst_dma
, src_dma
;
2170 int len
, sec4_sg_index
= 0;
2173 print_hex_dump(KERN_ERR
, "presciv@"__stringify(__LINE__
)": ",
2174 DUMP_PREFIX_ADDRESS
, 16, 4, req
->info
,
2176 print_hex_dump(KERN_ERR
, "src @"__stringify(__LINE__
)": ",
2177 DUMP_PREFIX_ADDRESS
, 16, 4, sg_virt(req
->src
),
2178 edesc
->src_nents
? 100 : req
->nbytes
, 1);
2181 len
= desc_len(sh_desc
);
2182 init_job_desc_shared(desc
, ptr
, len
, HDR_SHARE_DEFER
| HDR_REVERSE
);
2185 src_dma
= edesc
->iv_dma
;
2188 src_dma
= edesc
->sec4_sg_dma
;
2189 sec4_sg_index
+= edesc
->src_nents
+ 1;
2190 in_options
= LDST_SGF
;
2192 append_seq_in_ptr(desc
, src_dma
, req
->nbytes
+ ivsize
, in_options
);
2194 if (likely(req
->src
== req
->dst
)) {
2195 if (!edesc
->src_nents
&& iv_contig
) {
2196 dst_dma
= sg_dma_address(req
->src
);
2198 dst_dma
= edesc
->sec4_sg_dma
+
2199 sizeof(struct sec4_sg_entry
);
2200 out_options
= LDST_SGF
;
2203 if (!edesc
->dst_nents
) {
2204 dst_dma
= sg_dma_address(req
->dst
);
2206 dst_dma
= edesc
->sec4_sg_dma
+
2207 sec4_sg_index
* sizeof(struct sec4_sg_entry
);
2208 out_options
= LDST_SGF
;
2211 append_seq_out_ptr(desc
, dst_dma
, req
->nbytes
, out_options
);
2215 * Fill in ablkcipher givencrypt job descriptor
2217 static void init_ablkcipher_giv_job(u32
*sh_desc
, dma_addr_t ptr
,
2218 struct ablkcipher_edesc
*edesc
,
2219 struct ablkcipher_request
*req
,
2222 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
2223 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
2224 u32
*desc
= edesc
->hw_desc
;
2225 u32 out_options
, in_options
;
2226 dma_addr_t dst_dma
, src_dma
;
2227 int len
, sec4_sg_index
= 0;
2230 print_hex_dump(KERN_ERR
, "presciv@" __stringify(__LINE__
) ": ",
2231 DUMP_PREFIX_ADDRESS
, 16, 4, req
->info
,
2233 print_hex_dump(KERN_ERR
, "src @" __stringify(__LINE__
) ": ",
2234 DUMP_PREFIX_ADDRESS
, 16, 4, sg_virt(req
->src
),
2235 edesc
->src_nents
? 100 : req
->nbytes
, 1);
2238 len
= desc_len(sh_desc
);
2239 init_job_desc_shared(desc
, ptr
, len
, HDR_SHARE_DEFER
| HDR_REVERSE
);
2241 if (!edesc
->src_nents
) {
2242 src_dma
= sg_dma_address(req
->src
);
2245 src_dma
= edesc
->sec4_sg_dma
;
2246 sec4_sg_index
+= edesc
->src_nents
;
2247 in_options
= LDST_SGF
;
2249 append_seq_in_ptr(desc
, src_dma
, req
->nbytes
, in_options
);
2252 dst_dma
= edesc
->iv_dma
;
2255 dst_dma
= edesc
->sec4_sg_dma
+
2256 sec4_sg_index
* sizeof(struct sec4_sg_entry
);
2257 out_options
= LDST_SGF
;
2259 append_seq_out_ptr(desc
, dst_dma
, req
->nbytes
+ ivsize
, out_options
);
2263 * allocate and map the aead extended descriptor
2265 static struct aead_edesc
*aead_edesc_alloc(struct aead_request
*req
,
2266 int desc_bytes
, bool *all_contig_ptr
,
2269 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
2270 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
2271 struct device
*jrdev
= ctx
->jrdev
;
2272 gfp_t flags
= (req
->base
.flags
& (CRYPTO_TFM_REQ_MAY_BACKLOG
|
2273 CRYPTO_TFM_REQ_MAY_SLEEP
)) ? GFP_KERNEL
: GFP_ATOMIC
;
2274 int src_nents
, dst_nents
= 0;
2275 struct aead_edesc
*edesc
;
2277 bool all_contig
= true;
2278 int sec4_sg_index
, sec4_sg_len
= 0, sec4_sg_bytes
;
2279 unsigned int authsize
= ctx
->authsize
;
2281 if (unlikely(req
->dst
!= req
->src
)) {
2282 src_nents
= sg_count(req
->src
, req
->assoclen
+ req
->cryptlen
);
2283 dst_nents
= sg_count(req
->dst
,
2284 req
->assoclen
+ req
->cryptlen
+
2285 (encrypt
? authsize
: (-authsize
)));
2287 src_nents
= sg_count(req
->src
,
2288 req
->assoclen
+ req
->cryptlen
+
2289 (encrypt
? authsize
: 0));
2292 /* Check if data are contiguous. */
2293 all_contig
= !src_nents
;
2295 src_nents
= src_nents
? : 1;
2296 sec4_sg_len
= src_nents
;
2299 sec4_sg_len
+= dst_nents
;
2301 sec4_sg_bytes
= sec4_sg_len
* sizeof(struct sec4_sg_entry
);
2303 /* allocate space for base edesc and hw desc commands, link tables */
2304 edesc
= kzalloc(sizeof(*edesc
) + desc_bytes
+ sec4_sg_bytes
,
2307 dev_err(jrdev
, "could not allocate extended descriptor\n");
2308 return ERR_PTR(-ENOMEM
);
2311 if (likely(req
->src
== req
->dst
)) {
2312 sgc
= dma_map_sg(jrdev
, req
->src
, src_nents
? : 1,
2314 if (unlikely(!sgc
)) {
2315 dev_err(jrdev
, "unable to map source\n");
2317 return ERR_PTR(-ENOMEM
);
2320 sgc
= dma_map_sg(jrdev
, req
->src
, src_nents
? : 1,
2322 if (unlikely(!sgc
)) {
2323 dev_err(jrdev
, "unable to map source\n");
2325 return ERR_PTR(-ENOMEM
);
2328 sgc
= dma_map_sg(jrdev
, req
->dst
, dst_nents
? : 1,
2330 if (unlikely(!sgc
)) {
2331 dev_err(jrdev
, "unable to map destination\n");
2332 dma_unmap_sg(jrdev
, req
->src
, src_nents
? : 1,
2335 return ERR_PTR(-ENOMEM
);
2339 edesc
->src_nents
= src_nents
;
2340 edesc
->dst_nents
= dst_nents
;
2341 edesc
->sec4_sg
= (void *)edesc
+ sizeof(struct aead_edesc
) +
2343 *all_contig_ptr
= all_contig
;
2347 sg_to_sec4_sg_last(req
->src
, src_nents
,
2348 edesc
->sec4_sg
+ sec4_sg_index
, 0);
2349 sec4_sg_index
+= src_nents
;
2352 sg_to_sec4_sg_last(req
->dst
, dst_nents
,
2353 edesc
->sec4_sg
+ sec4_sg_index
, 0);
2359 edesc
->sec4_sg_dma
= dma_map_single(jrdev
, edesc
->sec4_sg
,
2360 sec4_sg_bytes
, DMA_TO_DEVICE
);
2361 if (dma_mapping_error(jrdev
, edesc
->sec4_sg_dma
)) {
2362 dev_err(jrdev
, "unable to map S/G table\n");
2363 aead_unmap(jrdev
, edesc
, req
);
2365 return ERR_PTR(-ENOMEM
);
2368 edesc
->sec4_sg_bytes
= sec4_sg_bytes
;
2373 static int gcm_encrypt(struct aead_request
*req
)
2375 struct aead_edesc
*edesc
;
2376 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
2377 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
2378 struct device
*jrdev
= ctx
->jrdev
;
2383 /* allocate extended descriptor */
2384 edesc
= aead_edesc_alloc(req
, GCM_DESC_JOB_IO_LEN
, &all_contig
, true);
2386 return PTR_ERR(edesc
);
2388 /* Create and submit job descriptor */
2389 init_gcm_job(req
, edesc
, all_contig
, true);
2391 print_hex_dump(KERN_ERR
, "aead jobdesc@"__stringify(__LINE__
)": ",
2392 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
2393 desc_bytes(edesc
->hw_desc
), 1);
2396 desc
= edesc
->hw_desc
;
2397 ret
= caam_jr_enqueue(jrdev
, desc
, aead_encrypt_done
, req
);
2401 aead_unmap(jrdev
, edesc
, req
);
2408 static int ipsec_gcm_encrypt(struct aead_request
*req
)
2410 if (req
->assoclen
< 8)
2413 return gcm_encrypt(req
);
2416 static int aead_encrypt(struct aead_request
*req
)
2418 struct aead_edesc
*edesc
;
2419 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
2420 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
2421 struct device
*jrdev
= ctx
->jrdev
;
2426 /* allocate extended descriptor */
2427 edesc
= aead_edesc_alloc(req
, AUTHENC_DESC_JOB_IO_LEN
,
2430 return PTR_ERR(edesc
);
2432 /* Create and submit job descriptor */
2433 init_authenc_job(req
, edesc
, all_contig
, true);
2435 print_hex_dump(KERN_ERR
, "aead jobdesc@"__stringify(__LINE__
)": ",
2436 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
2437 desc_bytes(edesc
->hw_desc
), 1);
2440 desc
= edesc
->hw_desc
;
2441 ret
= caam_jr_enqueue(jrdev
, desc
, aead_encrypt_done
, req
);
2445 aead_unmap(jrdev
, edesc
, req
);
2452 static int gcm_decrypt(struct aead_request
*req
)
2454 struct aead_edesc
*edesc
;
2455 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
2456 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
2457 struct device
*jrdev
= ctx
->jrdev
;
2462 /* allocate extended descriptor */
2463 edesc
= aead_edesc_alloc(req
, GCM_DESC_JOB_IO_LEN
, &all_contig
, false);
2465 return PTR_ERR(edesc
);
2467 /* Create and submit job descriptor*/
2468 init_gcm_job(req
, edesc
, all_contig
, false);
2470 print_hex_dump(KERN_ERR
, "aead jobdesc@"__stringify(__LINE__
)": ",
2471 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
2472 desc_bytes(edesc
->hw_desc
), 1);
2475 desc
= edesc
->hw_desc
;
2476 ret
= caam_jr_enqueue(jrdev
, desc
, aead_decrypt_done
, req
);
2480 aead_unmap(jrdev
, edesc
, req
);
2487 static int ipsec_gcm_decrypt(struct aead_request
*req
)
2489 if (req
->assoclen
< 8)
2492 return gcm_decrypt(req
);
2495 static int aead_decrypt(struct aead_request
*req
)
2497 struct aead_edesc
*edesc
;
2498 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
2499 struct caam_ctx
*ctx
= crypto_aead_ctx(aead
);
2500 struct device
*jrdev
= ctx
->jrdev
;
2505 /* allocate extended descriptor */
2506 edesc
= aead_edesc_alloc(req
, AUTHENC_DESC_JOB_IO_LEN
,
2507 &all_contig
, false);
2509 return PTR_ERR(edesc
);
2512 print_hex_dump(KERN_ERR
, "dec src@"__stringify(__LINE__
)": ",
2513 DUMP_PREFIX_ADDRESS
, 16, 4, sg_virt(req
->src
),
2514 req
->assoclen
+ req
->cryptlen
, 1);
2517 /* Create and submit job descriptor*/
2518 init_authenc_job(req
, edesc
, all_contig
, false);
2520 print_hex_dump(KERN_ERR
, "aead jobdesc@"__stringify(__LINE__
)": ",
2521 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
2522 desc_bytes(edesc
->hw_desc
), 1);
2525 desc
= edesc
->hw_desc
;
2526 ret
= caam_jr_enqueue(jrdev
, desc
, aead_decrypt_done
, req
);
2530 aead_unmap(jrdev
, edesc
, req
);
2537 static int aead_givdecrypt(struct aead_request
*req
)
2539 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
2540 unsigned int ivsize
= crypto_aead_ivsize(aead
);
2542 if (req
->cryptlen
< ivsize
)
2545 req
->cryptlen
-= ivsize
;
2546 req
->assoclen
+= ivsize
;
2548 return aead_decrypt(req
);
2552 * allocate and map the ablkcipher extended descriptor for ablkcipher
2554 static struct ablkcipher_edesc
*ablkcipher_edesc_alloc(struct ablkcipher_request
2555 *req
, int desc_bytes
,
2556 bool *iv_contig_out
)
2558 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
2559 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
2560 struct device
*jrdev
= ctx
->jrdev
;
2561 gfp_t flags
= (req
->base
.flags
& (CRYPTO_TFM_REQ_MAY_BACKLOG
|
2562 CRYPTO_TFM_REQ_MAY_SLEEP
)) ?
2563 GFP_KERNEL
: GFP_ATOMIC
;
2564 int src_nents
, dst_nents
= 0, sec4_sg_bytes
;
2565 struct ablkcipher_edesc
*edesc
;
2566 dma_addr_t iv_dma
= 0;
2567 bool iv_contig
= false;
2569 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
2572 src_nents
= sg_count(req
->src
, req
->nbytes
);
2574 if (req
->dst
!= req
->src
)
2575 dst_nents
= sg_count(req
->dst
, req
->nbytes
);
2577 if (likely(req
->src
== req
->dst
)) {
2578 sgc
= dma_map_sg(jrdev
, req
->src
, src_nents
? : 1,
2581 sgc
= dma_map_sg(jrdev
, req
->src
, src_nents
? : 1,
2583 sgc
= dma_map_sg(jrdev
, req
->dst
, dst_nents
? : 1,
2587 iv_dma
= dma_map_single(jrdev
, req
->info
, ivsize
, DMA_TO_DEVICE
);
2588 if (dma_mapping_error(jrdev
, iv_dma
)) {
2589 dev_err(jrdev
, "unable to map IV\n");
2590 return ERR_PTR(-ENOMEM
);
2594 * Check if iv can be contiguous with source and destination.
2595 * If so, include it. If not, create scatterlist.
2597 if (!src_nents
&& iv_dma
+ ivsize
== sg_dma_address(req
->src
))
2600 src_nents
= src_nents
? : 1;
2601 sec4_sg_bytes
= ((iv_contig
? 0 : 1) + src_nents
+ dst_nents
) *
2602 sizeof(struct sec4_sg_entry
);
2604 /* allocate space for base edesc and hw desc commands, link tables */
2605 edesc
= kzalloc(sizeof(*edesc
) + desc_bytes
+ sec4_sg_bytes
,
2608 dev_err(jrdev
, "could not allocate extended descriptor\n");
2609 return ERR_PTR(-ENOMEM
);
2612 edesc
->src_nents
= src_nents
;
2613 edesc
->dst_nents
= dst_nents
;
2614 edesc
->sec4_sg_bytes
= sec4_sg_bytes
;
2615 edesc
->sec4_sg
= (void *)edesc
+ sizeof(struct ablkcipher_edesc
) +
2620 dma_to_sec4_sg_one(edesc
->sec4_sg
, iv_dma
, ivsize
, 0);
2621 sg_to_sec4_sg_last(req
->src
, src_nents
,
2622 edesc
->sec4_sg
+ 1, 0);
2623 sec4_sg_index
+= 1 + src_nents
;
2627 sg_to_sec4_sg_last(req
->dst
, dst_nents
,
2628 edesc
->sec4_sg
+ sec4_sg_index
, 0);
2631 edesc
->sec4_sg_dma
= dma_map_single(jrdev
, edesc
->sec4_sg
,
2632 sec4_sg_bytes
, DMA_TO_DEVICE
);
2633 if (dma_mapping_error(jrdev
, edesc
->sec4_sg_dma
)) {
2634 dev_err(jrdev
, "unable to map S/G table\n");
2635 return ERR_PTR(-ENOMEM
);
2638 edesc
->iv_dma
= iv_dma
;
2641 print_hex_dump(KERN_ERR
, "ablkcipher sec4_sg@"__stringify(__LINE__
)": ",
2642 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->sec4_sg
,
2646 *iv_contig_out
= iv_contig
;
2650 static int ablkcipher_encrypt(struct ablkcipher_request
*req
)
2652 struct ablkcipher_edesc
*edesc
;
2653 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
2654 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
2655 struct device
*jrdev
= ctx
->jrdev
;
2660 /* allocate extended descriptor */
2661 edesc
= ablkcipher_edesc_alloc(req
, DESC_JOB_IO_LEN
*
2662 CAAM_CMD_SZ
, &iv_contig
);
2664 return PTR_ERR(edesc
);
2666 /* Create and submit job descriptor*/
2667 init_ablkcipher_job(ctx
->sh_desc_enc
,
2668 ctx
->sh_desc_enc_dma
, edesc
, req
, iv_contig
);
2670 print_hex_dump(KERN_ERR
, "ablkcipher jobdesc@"__stringify(__LINE__
)": ",
2671 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
2672 desc_bytes(edesc
->hw_desc
), 1);
2674 desc
= edesc
->hw_desc
;
2675 ret
= caam_jr_enqueue(jrdev
, desc
, ablkcipher_encrypt_done
, req
);
2680 ablkcipher_unmap(jrdev
, edesc
, req
);
2687 static int ablkcipher_decrypt(struct ablkcipher_request
*req
)
2689 struct ablkcipher_edesc
*edesc
;
2690 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
2691 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
2692 struct device
*jrdev
= ctx
->jrdev
;
2697 /* allocate extended descriptor */
2698 edesc
= ablkcipher_edesc_alloc(req
, DESC_JOB_IO_LEN
*
2699 CAAM_CMD_SZ
, &iv_contig
);
2701 return PTR_ERR(edesc
);
2703 /* Create and submit job descriptor*/
2704 init_ablkcipher_job(ctx
->sh_desc_dec
,
2705 ctx
->sh_desc_dec_dma
, edesc
, req
, iv_contig
);
2706 desc
= edesc
->hw_desc
;
2708 print_hex_dump(KERN_ERR
, "ablkcipher jobdesc@"__stringify(__LINE__
)": ",
2709 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
2710 desc_bytes(edesc
->hw_desc
), 1);
2713 ret
= caam_jr_enqueue(jrdev
, desc
, ablkcipher_decrypt_done
, req
);
2717 ablkcipher_unmap(jrdev
, edesc
, req
);
2725 * allocate and map the ablkcipher extended descriptor
2726 * for ablkcipher givencrypt
2728 static struct ablkcipher_edesc
*ablkcipher_giv_edesc_alloc(
2729 struct skcipher_givcrypt_request
*greq
,
2731 bool *iv_contig_out
)
2733 struct ablkcipher_request
*req
= &greq
->creq
;
2734 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
2735 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
2736 struct device
*jrdev
= ctx
->jrdev
;
2737 gfp_t flags
= (req
->base
.flags
& (CRYPTO_TFM_REQ_MAY_BACKLOG
|
2738 CRYPTO_TFM_REQ_MAY_SLEEP
)) ?
2739 GFP_KERNEL
: GFP_ATOMIC
;
2740 int src_nents
, dst_nents
= 0, sec4_sg_bytes
;
2741 struct ablkcipher_edesc
*edesc
;
2742 dma_addr_t iv_dma
= 0;
2743 bool iv_contig
= false;
2745 int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
2748 src_nents
= sg_count(req
->src
, req
->nbytes
);
2750 if (unlikely(req
->dst
!= req
->src
))
2751 dst_nents
= sg_count(req
->dst
, req
->nbytes
);
2753 if (likely(req
->src
== req
->dst
)) {
2754 sgc
= dma_map_sg(jrdev
, req
->src
, src_nents
? : 1,
2757 sgc
= dma_map_sg(jrdev
, req
->src
, src_nents
? : 1,
2759 sgc
= dma_map_sg(jrdev
, req
->dst
, dst_nents
? : 1,
2764 * Check if iv can be contiguous with source and destination.
2765 * If so, include it. If not, create scatterlist.
2767 iv_dma
= dma_map_single(jrdev
, greq
->giv
, ivsize
, DMA_TO_DEVICE
);
2768 if (dma_mapping_error(jrdev
, iv_dma
)) {
2769 dev_err(jrdev
, "unable to map IV\n");
2770 return ERR_PTR(-ENOMEM
);
2773 if (!dst_nents
&& iv_dma
+ ivsize
== sg_dma_address(req
->dst
))
2776 dst_nents
= dst_nents
? : 1;
2777 sec4_sg_bytes
= ((iv_contig
? 0 : 1) + src_nents
+ dst_nents
) *
2778 sizeof(struct sec4_sg_entry
);
2780 /* allocate space for base edesc and hw desc commands, link tables */
2781 edesc
= kzalloc(sizeof(*edesc
) + desc_bytes
+ sec4_sg_bytes
,
2784 dev_err(jrdev
, "could not allocate extended descriptor\n");
2785 return ERR_PTR(-ENOMEM
);
2788 edesc
->src_nents
= src_nents
;
2789 edesc
->dst_nents
= dst_nents
;
2790 edesc
->sec4_sg_bytes
= sec4_sg_bytes
;
2791 edesc
->sec4_sg
= (void *)edesc
+ sizeof(struct ablkcipher_edesc
) +
2796 sg_to_sec4_sg_last(req
->src
, src_nents
, edesc
->sec4_sg
, 0);
2797 sec4_sg_index
+= src_nents
;
2801 dma_to_sec4_sg_one(edesc
->sec4_sg
+ sec4_sg_index
,
2804 sg_to_sec4_sg_last(req
->dst
, dst_nents
,
2805 edesc
->sec4_sg
+ sec4_sg_index
, 0);
2808 edesc
->sec4_sg_dma
= dma_map_single(jrdev
, edesc
->sec4_sg
,
2809 sec4_sg_bytes
, DMA_TO_DEVICE
);
2810 if (dma_mapping_error(jrdev
, edesc
->sec4_sg_dma
)) {
2811 dev_err(jrdev
, "unable to map S/G table\n");
2812 return ERR_PTR(-ENOMEM
);
2814 edesc
->iv_dma
= iv_dma
;
2817 print_hex_dump(KERN_ERR
,
2818 "ablkcipher sec4_sg@" __stringify(__LINE__
) ": ",
2819 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->sec4_sg
,
2823 *iv_contig_out
= iv_contig
;
2827 static int ablkcipher_givencrypt(struct skcipher_givcrypt_request
*creq
)
2829 struct ablkcipher_request
*req
= &creq
->creq
;
2830 struct ablkcipher_edesc
*edesc
;
2831 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
2832 struct caam_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
2833 struct device
*jrdev
= ctx
->jrdev
;
2838 /* allocate extended descriptor */
2839 edesc
= ablkcipher_giv_edesc_alloc(creq
, DESC_JOB_IO_LEN
*
2840 CAAM_CMD_SZ
, &iv_contig
);
2842 return PTR_ERR(edesc
);
2844 /* Create and submit job descriptor*/
2845 init_ablkcipher_giv_job(ctx
->sh_desc_givenc
, ctx
->sh_desc_givenc_dma
,
2846 edesc
, req
, iv_contig
);
2848 print_hex_dump(KERN_ERR
,
2849 "ablkcipher jobdesc@" __stringify(__LINE__
) ": ",
2850 DUMP_PREFIX_ADDRESS
, 16, 4, edesc
->hw_desc
,
2851 desc_bytes(edesc
->hw_desc
), 1);
2853 desc
= edesc
->hw_desc
;
2854 ret
= caam_jr_enqueue(jrdev
, desc
, ablkcipher_encrypt_done
, req
);
2859 ablkcipher_unmap(jrdev
, edesc
, req
);
2866 #define template_aead template_u.aead
2867 #define template_ablkcipher template_u.ablkcipher
2868 struct caam_alg_template
{
2869 char name
[CRYPTO_MAX_ALG_NAME
];
2870 char driver_name
[CRYPTO_MAX_ALG_NAME
];
2871 unsigned int blocksize
;
2874 struct ablkcipher_alg ablkcipher
;
2876 u32 class1_alg_type
;
2877 u32 class2_alg_type
;
2881 static struct caam_alg_template driver_algs
[] = {
2882 /* ablkcipher descriptor */
2885 .driver_name
= "cbc-aes-caam",
2886 .blocksize
= AES_BLOCK_SIZE
,
2887 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
2888 .template_ablkcipher
= {
2889 .setkey
= ablkcipher_setkey
,
2890 .encrypt
= ablkcipher_encrypt
,
2891 .decrypt
= ablkcipher_decrypt
,
2892 .givencrypt
= ablkcipher_givencrypt
,
2893 .geniv
= "<built-in>",
2894 .min_keysize
= AES_MIN_KEY_SIZE
,
2895 .max_keysize
= AES_MAX_KEY_SIZE
,
2896 .ivsize
= AES_BLOCK_SIZE
,
2898 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
2901 .name
= "cbc(des3_ede)",
2902 .driver_name
= "cbc-3des-caam",
2903 .blocksize
= DES3_EDE_BLOCK_SIZE
,
2904 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
2905 .template_ablkcipher
= {
2906 .setkey
= ablkcipher_setkey
,
2907 .encrypt
= ablkcipher_encrypt
,
2908 .decrypt
= ablkcipher_decrypt
,
2909 .givencrypt
= ablkcipher_givencrypt
,
2910 .geniv
= "<built-in>",
2911 .min_keysize
= DES3_EDE_KEY_SIZE
,
2912 .max_keysize
= DES3_EDE_KEY_SIZE
,
2913 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2915 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
2919 .driver_name
= "cbc-des-caam",
2920 .blocksize
= DES_BLOCK_SIZE
,
2921 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
2922 .template_ablkcipher
= {
2923 .setkey
= ablkcipher_setkey
,
2924 .encrypt
= ablkcipher_encrypt
,
2925 .decrypt
= ablkcipher_decrypt
,
2926 .givencrypt
= ablkcipher_givencrypt
,
2927 .geniv
= "<built-in>",
2928 .min_keysize
= DES_KEY_SIZE
,
2929 .max_keysize
= DES_KEY_SIZE
,
2930 .ivsize
= DES_BLOCK_SIZE
,
2932 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
2936 .driver_name
= "ctr-aes-caam",
2938 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
2939 .template_ablkcipher
= {
2940 .setkey
= ablkcipher_setkey
,
2941 .encrypt
= ablkcipher_encrypt
,
2942 .decrypt
= ablkcipher_decrypt
,
2944 .min_keysize
= AES_MIN_KEY_SIZE
,
2945 .max_keysize
= AES_MAX_KEY_SIZE
,
2946 .ivsize
= AES_BLOCK_SIZE
,
2948 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CTR_MOD128
,
2951 .name
= "rfc3686(ctr(aes))",
2952 .driver_name
= "rfc3686-ctr-aes-caam",
2954 .type
= CRYPTO_ALG_TYPE_GIVCIPHER
,
2955 .template_ablkcipher
= {
2956 .setkey
= ablkcipher_setkey
,
2957 .encrypt
= ablkcipher_encrypt
,
2958 .decrypt
= ablkcipher_decrypt
,
2959 .givencrypt
= ablkcipher_givencrypt
,
2960 .geniv
= "<built-in>",
2961 .min_keysize
= AES_MIN_KEY_SIZE
+
2962 CTR_RFC3686_NONCE_SIZE
,
2963 .max_keysize
= AES_MAX_KEY_SIZE
+
2964 CTR_RFC3686_NONCE_SIZE
,
2965 .ivsize
= CTR_RFC3686_IV_SIZE
,
2967 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CTR_MOD128
,
2971 .driver_name
= "xts-aes-caam",
2972 .blocksize
= AES_BLOCK_SIZE
,
2973 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
2974 .template_ablkcipher
= {
2975 .setkey
= xts_ablkcipher_setkey
,
2976 .encrypt
= ablkcipher_encrypt
,
2977 .decrypt
= ablkcipher_decrypt
,
2979 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
2980 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
2981 .ivsize
= AES_BLOCK_SIZE
,
2983 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_XTS
,
2987 static struct caam_aead_alg driver_aeads
[] = {
2991 .cra_name
= "rfc4106(gcm(aes))",
2992 .cra_driver_name
= "rfc4106-gcm-aes-caam",
2995 .setkey
= rfc4106_setkey
,
2996 .setauthsize
= rfc4106_setauthsize
,
2997 .encrypt
= ipsec_gcm_encrypt
,
2998 .decrypt
= ipsec_gcm_decrypt
,
3000 .maxauthsize
= AES_BLOCK_SIZE
,
3003 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_GCM
,
3009 .cra_name
= "rfc4543(gcm(aes))",
3010 .cra_driver_name
= "rfc4543-gcm-aes-caam",
3013 .setkey
= rfc4543_setkey
,
3014 .setauthsize
= rfc4543_setauthsize
,
3015 .encrypt
= ipsec_gcm_encrypt
,
3016 .decrypt
= ipsec_gcm_decrypt
,
3018 .maxauthsize
= AES_BLOCK_SIZE
,
3021 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_GCM
,
3024 /* Galois Counter Mode */
3028 .cra_name
= "gcm(aes)",
3029 .cra_driver_name
= "gcm-aes-caam",
3032 .setkey
= gcm_setkey
,
3033 .setauthsize
= gcm_setauthsize
,
3034 .encrypt
= gcm_encrypt
,
3035 .decrypt
= gcm_decrypt
,
3037 .maxauthsize
= AES_BLOCK_SIZE
,
3040 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_GCM
,
3043 /* single-pass ipsec_esp descriptor */
3047 .cra_name
= "authenc(hmac(md5),"
3048 "ecb(cipher_null))",
3049 .cra_driver_name
= "authenc-hmac-md5-"
3050 "ecb-cipher_null-caam",
3051 .cra_blocksize
= NULL_BLOCK_SIZE
,
3053 .setkey
= aead_setkey
,
3054 .setauthsize
= aead_setauthsize
,
3055 .encrypt
= aead_encrypt
,
3056 .decrypt
= aead_decrypt
,
3057 .ivsize
= NULL_IV_SIZE
,
3058 .maxauthsize
= MD5_DIGEST_SIZE
,
3061 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
3062 OP_ALG_AAI_HMAC_PRECOMP
,
3063 .alg_op
= OP_ALG_ALGSEL_MD5
| OP_ALG_AAI_HMAC
,
3069 .cra_name
= "authenc(hmac(sha1),"
3070 "ecb(cipher_null))",
3071 .cra_driver_name
= "authenc-hmac-sha1-"
3072 "ecb-cipher_null-caam",
3073 .cra_blocksize
= NULL_BLOCK_SIZE
,
3075 .setkey
= aead_setkey
,
3076 .setauthsize
= aead_setauthsize
,
3077 .encrypt
= aead_encrypt
,
3078 .decrypt
= aead_decrypt
,
3079 .ivsize
= NULL_IV_SIZE
,
3080 .maxauthsize
= SHA1_DIGEST_SIZE
,
3083 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
3084 OP_ALG_AAI_HMAC_PRECOMP
,
3085 .alg_op
= OP_ALG_ALGSEL_SHA1
| OP_ALG_AAI_HMAC
,
3091 .cra_name
= "authenc(hmac(sha224),"
3092 "ecb(cipher_null))",
3093 .cra_driver_name
= "authenc-hmac-sha224-"
3094 "ecb-cipher_null-caam",
3095 .cra_blocksize
= NULL_BLOCK_SIZE
,
3097 .setkey
= aead_setkey
,
3098 .setauthsize
= aead_setauthsize
,
3099 .encrypt
= aead_encrypt
,
3100 .decrypt
= aead_decrypt
,
3101 .ivsize
= NULL_IV_SIZE
,
3102 .maxauthsize
= SHA224_DIGEST_SIZE
,
3105 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
3106 OP_ALG_AAI_HMAC_PRECOMP
,
3107 .alg_op
= OP_ALG_ALGSEL_SHA224
| OP_ALG_AAI_HMAC
,
3113 .cra_name
= "authenc(hmac(sha256),"
3114 "ecb(cipher_null))",
3115 .cra_driver_name
= "authenc-hmac-sha256-"
3116 "ecb-cipher_null-caam",
3117 .cra_blocksize
= NULL_BLOCK_SIZE
,
3119 .setkey
= aead_setkey
,
3120 .setauthsize
= aead_setauthsize
,
3121 .encrypt
= aead_encrypt
,
3122 .decrypt
= aead_decrypt
,
3123 .ivsize
= NULL_IV_SIZE
,
3124 .maxauthsize
= SHA256_DIGEST_SIZE
,
3127 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
3128 OP_ALG_AAI_HMAC_PRECOMP
,
3129 .alg_op
= OP_ALG_ALGSEL_SHA256
| OP_ALG_AAI_HMAC
,
3135 .cra_name
= "authenc(hmac(sha384),"
3136 "ecb(cipher_null))",
3137 .cra_driver_name
= "authenc-hmac-sha384-"
3138 "ecb-cipher_null-caam",
3139 .cra_blocksize
= NULL_BLOCK_SIZE
,
3141 .setkey
= aead_setkey
,
3142 .setauthsize
= aead_setauthsize
,
3143 .encrypt
= aead_encrypt
,
3144 .decrypt
= aead_decrypt
,
3145 .ivsize
= NULL_IV_SIZE
,
3146 .maxauthsize
= SHA384_DIGEST_SIZE
,
3149 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
3150 OP_ALG_AAI_HMAC_PRECOMP
,
3151 .alg_op
= OP_ALG_ALGSEL_SHA384
| OP_ALG_AAI_HMAC
,
3157 .cra_name
= "authenc(hmac(sha512),"
3158 "ecb(cipher_null))",
3159 .cra_driver_name
= "authenc-hmac-sha512-"
3160 "ecb-cipher_null-caam",
3161 .cra_blocksize
= NULL_BLOCK_SIZE
,
3163 .setkey
= aead_setkey
,
3164 .setauthsize
= aead_setauthsize
,
3165 .encrypt
= aead_encrypt
,
3166 .decrypt
= aead_decrypt
,
3167 .ivsize
= NULL_IV_SIZE
,
3168 .maxauthsize
= SHA512_DIGEST_SIZE
,
3171 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
3172 OP_ALG_AAI_HMAC_PRECOMP
,
3173 .alg_op
= OP_ALG_ALGSEL_SHA512
| OP_ALG_AAI_HMAC
,
3179 .cra_name
= "authenc(hmac(md5),cbc(aes))",
3180 .cra_driver_name
= "authenc-hmac-md5-"
3182 .cra_blocksize
= AES_BLOCK_SIZE
,
3184 .setkey
= aead_setkey
,
3185 .setauthsize
= aead_setauthsize
,
3186 .encrypt
= aead_encrypt
,
3187 .decrypt
= aead_decrypt
,
3188 .ivsize
= AES_BLOCK_SIZE
,
3189 .maxauthsize
= MD5_DIGEST_SIZE
,
3192 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
3193 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
3194 OP_ALG_AAI_HMAC_PRECOMP
,
3195 .alg_op
= OP_ALG_ALGSEL_MD5
| OP_ALG_AAI_HMAC
,
3201 .cra_name
= "echainiv(authenc(hmac(md5),"
3203 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
3205 .cra_blocksize
= AES_BLOCK_SIZE
,
3207 .setkey
= aead_setkey
,
3208 .setauthsize
= aead_setauthsize
,
3209 .encrypt
= aead_encrypt
,
3210 .decrypt
= aead_givdecrypt
,
3211 .ivsize
= AES_BLOCK_SIZE
,
3212 .maxauthsize
= MD5_DIGEST_SIZE
,
3215 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
3216 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
3217 OP_ALG_AAI_HMAC_PRECOMP
,
3218 .alg_op
= OP_ALG_ALGSEL_MD5
| OP_ALG_AAI_HMAC
,
3225 .cra_name
= "authenc(hmac(sha1),cbc(aes))",
3226 .cra_driver_name
= "authenc-hmac-sha1-"
3228 .cra_blocksize
= AES_BLOCK_SIZE
,
3230 .setkey
= aead_setkey
,
3231 .setauthsize
= aead_setauthsize
,
3232 .encrypt
= aead_encrypt
,
3233 .decrypt
= aead_decrypt
,
3234 .ivsize
= AES_BLOCK_SIZE
,
3235 .maxauthsize
= SHA1_DIGEST_SIZE
,
3238 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
3239 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
3240 OP_ALG_AAI_HMAC_PRECOMP
,
3241 .alg_op
= OP_ALG_ALGSEL_SHA1
| OP_ALG_AAI_HMAC
,
3247 .cra_name
= "echainiv(authenc(hmac(sha1),"
3249 .cra_driver_name
= "echainiv-authenc-"
3250 "hmac-sha1-cbc-aes-caam",
3251 .cra_blocksize
= AES_BLOCK_SIZE
,
3253 .setkey
= aead_setkey
,
3254 .setauthsize
= aead_setauthsize
,
3255 .encrypt
= aead_encrypt
,
3256 .decrypt
= aead_givdecrypt
,
3257 .ivsize
= AES_BLOCK_SIZE
,
3258 .maxauthsize
= SHA1_DIGEST_SIZE
,
3261 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
3262 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
3263 OP_ALG_AAI_HMAC_PRECOMP
,
3264 .alg_op
= OP_ALG_ALGSEL_SHA1
| OP_ALG_AAI_HMAC
,
3271 .cra_name
= "authenc(hmac(sha224),cbc(aes))",
3272 .cra_driver_name
= "authenc-hmac-sha224-"
3274 .cra_blocksize
= AES_BLOCK_SIZE
,
3276 .setkey
= aead_setkey
,
3277 .setauthsize
= aead_setauthsize
,
3278 .encrypt
= aead_encrypt
,
3279 .decrypt
= aead_decrypt
,
3280 .ivsize
= AES_BLOCK_SIZE
,
3281 .maxauthsize
= SHA224_DIGEST_SIZE
,
3284 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
3285 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
3286 OP_ALG_AAI_HMAC_PRECOMP
,
3287 .alg_op
= OP_ALG_ALGSEL_SHA224
| OP_ALG_AAI_HMAC
,
3293 .cra_name
= "echainiv(authenc(hmac(sha224),"
3295 .cra_driver_name
= "echainiv-authenc-"
3296 "hmac-sha224-cbc-aes-caam",
3297 .cra_blocksize
= AES_BLOCK_SIZE
,
3299 .setkey
= aead_setkey
,
3300 .setauthsize
= aead_setauthsize
,
3301 .encrypt
= aead_encrypt
,
3302 .decrypt
= aead_givdecrypt
,
3303 .ivsize
= AES_BLOCK_SIZE
,
3304 .maxauthsize
= SHA224_DIGEST_SIZE
,
3307 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
3308 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
3309 OP_ALG_AAI_HMAC_PRECOMP
,
3310 .alg_op
= OP_ALG_ALGSEL_SHA224
| OP_ALG_AAI_HMAC
,
3317 .cra_name
= "authenc(hmac(sha256),cbc(aes))",
3318 .cra_driver_name
= "authenc-hmac-sha256-"
3320 .cra_blocksize
= AES_BLOCK_SIZE
,
3322 .setkey
= aead_setkey
,
3323 .setauthsize
= aead_setauthsize
,
3324 .encrypt
= aead_encrypt
,
3325 .decrypt
= aead_decrypt
,
3326 .ivsize
= AES_BLOCK_SIZE
,
3327 .maxauthsize
= SHA256_DIGEST_SIZE
,
3330 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
3331 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
3332 OP_ALG_AAI_HMAC_PRECOMP
,
3333 .alg_op
= OP_ALG_ALGSEL_SHA256
| OP_ALG_AAI_HMAC
,
3339 .cra_name
= "echainiv(authenc(hmac(sha256),"
3341 .cra_driver_name
= "echainiv-authenc-"
3342 "hmac-sha256-cbc-aes-caam",
3343 .cra_blocksize
= AES_BLOCK_SIZE
,
3345 .setkey
= aead_setkey
,
3346 .setauthsize
= aead_setauthsize
,
3347 .encrypt
= aead_encrypt
,
3348 .decrypt
= aead_givdecrypt
,
3349 .ivsize
= AES_BLOCK_SIZE
,
3350 .maxauthsize
= SHA256_DIGEST_SIZE
,
3353 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
3354 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
3355 OP_ALG_AAI_HMAC_PRECOMP
,
3356 .alg_op
= OP_ALG_ALGSEL_SHA256
| OP_ALG_AAI_HMAC
,
3363 .cra_name
= "authenc(hmac(sha384),cbc(aes))",
3364 .cra_driver_name
= "authenc-hmac-sha384-"
3366 .cra_blocksize
= AES_BLOCK_SIZE
,
3368 .setkey
= aead_setkey
,
3369 .setauthsize
= aead_setauthsize
,
3370 .encrypt
= aead_encrypt
,
3371 .decrypt
= aead_decrypt
,
3372 .ivsize
= AES_BLOCK_SIZE
,
3373 .maxauthsize
= SHA384_DIGEST_SIZE
,
3376 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
3377 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
3378 OP_ALG_AAI_HMAC_PRECOMP
,
3379 .alg_op
= OP_ALG_ALGSEL_SHA384
| OP_ALG_AAI_HMAC
,
3385 .cra_name
= "echainiv(authenc(hmac(sha384),"
3387 .cra_driver_name
= "echainiv-authenc-"
3388 "hmac-sha384-cbc-aes-caam",
3389 .cra_blocksize
= AES_BLOCK_SIZE
,
3391 .setkey
= aead_setkey
,
3392 .setauthsize
= aead_setauthsize
,
3393 .encrypt
= aead_encrypt
,
3394 .decrypt
= aead_givdecrypt
,
3395 .ivsize
= AES_BLOCK_SIZE
,
3396 .maxauthsize
= SHA384_DIGEST_SIZE
,
3399 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
3400 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
3401 OP_ALG_AAI_HMAC_PRECOMP
,
3402 .alg_op
= OP_ALG_ALGSEL_SHA384
| OP_ALG_AAI_HMAC
,
3409 .cra_name
= "authenc(hmac(sha512),cbc(aes))",
3410 .cra_driver_name
= "authenc-hmac-sha512-"
3412 .cra_blocksize
= AES_BLOCK_SIZE
,
3414 .setkey
= aead_setkey
,
3415 .setauthsize
= aead_setauthsize
,
3416 .encrypt
= aead_encrypt
,
3417 .decrypt
= aead_decrypt
,
3418 .ivsize
= AES_BLOCK_SIZE
,
3419 .maxauthsize
= SHA512_DIGEST_SIZE
,
3422 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
3423 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
3424 OP_ALG_AAI_HMAC_PRECOMP
,
3425 .alg_op
= OP_ALG_ALGSEL_SHA512
| OP_ALG_AAI_HMAC
,
3431 .cra_name
= "echainiv(authenc(hmac(sha512),"
3433 .cra_driver_name
= "echainiv-authenc-"
3434 "hmac-sha512-cbc-aes-caam",
3435 .cra_blocksize
= AES_BLOCK_SIZE
,
3437 .setkey
= aead_setkey
,
3438 .setauthsize
= aead_setauthsize
,
3439 .encrypt
= aead_encrypt
,
3440 .decrypt
= aead_givdecrypt
,
3441 .ivsize
= AES_BLOCK_SIZE
,
3442 .maxauthsize
= SHA512_DIGEST_SIZE
,
3445 .class1_alg_type
= OP_ALG_ALGSEL_AES
| OP_ALG_AAI_CBC
,
3446 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
3447 OP_ALG_AAI_HMAC_PRECOMP
,
3448 .alg_op
= OP_ALG_ALGSEL_SHA512
| OP_ALG_AAI_HMAC
,
3455 .cra_name
= "authenc(hmac(md5),cbc(des3_ede))",
3456 .cra_driver_name
= "authenc-hmac-md5-"
3457 "cbc-des3_ede-caam",
3458 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
3460 .setkey
= aead_setkey
,
3461 .setauthsize
= aead_setauthsize
,
3462 .encrypt
= aead_encrypt
,
3463 .decrypt
= aead_decrypt
,
3464 .ivsize
= DES3_EDE_BLOCK_SIZE
,
3465 .maxauthsize
= MD5_DIGEST_SIZE
,
3468 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
3469 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
3470 OP_ALG_AAI_HMAC_PRECOMP
,
3471 .alg_op
= OP_ALG_ALGSEL_MD5
| OP_ALG_AAI_HMAC
,
3477 .cra_name
= "echainiv(authenc(hmac(md5),"
3479 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
3480 "cbc-des3_ede-caam",
3481 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
3483 .setkey
= aead_setkey
,
3484 .setauthsize
= aead_setauthsize
,
3485 .encrypt
= aead_encrypt
,
3486 .decrypt
= aead_givdecrypt
,
3487 .ivsize
= DES3_EDE_BLOCK_SIZE
,
3488 .maxauthsize
= MD5_DIGEST_SIZE
,
3491 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
3492 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
3493 OP_ALG_AAI_HMAC_PRECOMP
,
3494 .alg_op
= OP_ALG_ALGSEL_MD5
| OP_ALG_AAI_HMAC
,
3501 .cra_name
= "authenc(hmac(sha1),"
3503 .cra_driver_name
= "authenc-hmac-sha1-"
3504 "cbc-des3_ede-caam",
3505 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
3507 .setkey
= aead_setkey
,
3508 .setauthsize
= aead_setauthsize
,
3509 .encrypt
= aead_encrypt
,
3510 .decrypt
= aead_decrypt
,
3511 .ivsize
= DES3_EDE_BLOCK_SIZE
,
3512 .maxauthsize
= SHA1_DIGEST_SIZE
,
3515 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
3516 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
3517 OP_ALG_AAI_HMAC_PRECOMP
,
3518 .alg_op
= OP_ALG_ALGSEL_SHA1
| OP_ALG_AAI_HMAC
,
3524 .cra_name
= "echainiv(authenc(hmac(sha1),"
3526 .cra_driver_name
= "echainiv-authenc-"
3528 "cbc-des3_ede-caam",
3529 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
3531 .setkey
= aead_setkey
,
3532 .setauthsize
= aead_setauthsize
,
3533 .encrypt
= aead_encrypt
,
3534 .decrypt
= aead_givdecrypt
,
3535 .ivsize
= DES3_EDE_BLOCK_SIZE
,
3536 .maxauthsize
= SHA1_DIGEST_SIZE
,
3539 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
3540 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
3541 OP_ALG_AAI_HMAC_PRECOMP
,
3542 .alg_op
= OP_ALG_ALGSEL_SHA1
| OP_ALG_AAI_HMAC
,
3549 .cra_name
= "authenc(hmac(sha224),"
3551 .cra_driver_name
= "authenc-hmac-sha224-"
3552 "cbc-des3_ede-caam",
3553 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
3555 .setkey
= aead_setkey
,
3556 .setauthsize
= aead_setauthsize
,
3557 .encrypt
= aead_encrypt
,
3558 .decrypt
= aead_decrypt
,
3559 .ivsize
= DES3_EDE_BLOCK_SIZE
,
3560 .maxauthsize
= SHA224_DIGEST_SIZE
,
3563 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
3564 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
3565 OP_ALG_AAI_HMAC_PRECOMP
,
3566 .alg_op
= OP_ALG_ALGSEL_SHA224
| OP_ALG_AAI_HMAC
,
3572 .cra_name
= "echainiv(authenc(hmac(sha224),"
3574 .cra_driver_name
= "echainiv-authenc-"
3576 "cbc-des3_ede-caam",
3577 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
3579 .setkey
= aead_setkey
,
3580 .setauthsize
= aead_setauthsize
,
3581 .encrypt
= aead_encrypt
,
3582 .decrypt
= aead_givdecrypt
,
3583 .ivsize
= DES3_EDE_BLOCK_SIZE
,
3584 .maxauthsize
= SHA224_DIGEST_SIZE
,
3587 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
3588 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
3589 OP_ALG_AAI_HMAC_PRECOMP
,
3590 .alg_op
= OP_ALG_ALGSEL_SHA224
| OP_ALG_AAI_HMAC
,
3597 .cra_name
= "authenc(hmac(sha256),"
3599 .cra_driver_name
= "authenc-hmac-sha256-"
3600 "cbc-des3_ede-caam",
3601 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
3603 .setkey
= aead_setkey
,
3604 .setauthsize
= aead_setauthsize
,
3605 .encrypt
= aead_encrypt
,
3606 .decrypt
= aead_decrypt
,
3607 .ivsize
= DES3_EDE_BLOCK_SIZE
,
3608 .maxauthsize
= SHA256_DIGEST_SIZE
,
3611 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
3612 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
3613 OP_ALG_AAI_HMAC_PRECOMP
,
3614 .alg_op
= OP_ALG_ALGSEL_SHA256
| OP_ALG_AAI_HMAC
,
3620 .cra_name
= "echainiv(authenc(hmac(sha256),"
3622 .cra_driver_name
= "echainiv-authenc-"
3624 "cbc-des3_ede-caam",
3625 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
3627 .setkey
= aead_setkey
,
3628 .setauthsize
= aead_setauthsize
,
3629 .encrypt
= aead_encrypt
,
3630 .decrypt
= aead_givdecrypt
,
3631 .ivsize
= DES3_EDE_BLOCK_SIZE
,
3632 .maxauthsize
= SHA256_DIGEST_SIZE
,
3635 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
3636 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
3637 OP_ALG_AAI_HMAC_PRECOMP
,
3638 .alg_op
= OP_ALG_ALGSEL_SHA256
| OP_ALG_AAI_HMAC
,
3645 .cra_name
= "authenc(hmac(sha384),"
3647 .cra_driver_name
= "authenc-hmac-sha384-"
3648 "cbc-des3_ede-caam",
3649 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
3651 .setkey
= aead_setkey
,
3652 .setauthsize
= aead_setauthsize
,
3653 .encrypt
= aead_encrypt
,
3654 .decrypt
= aead_decrypt
,
3655 .ivsize
= DES3_EDE_BLOCK_SIZE
,
3656 .maxauthsize
= SHA384_DIGEST_SIZE
,
3659 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
3660 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
3661 OP_ALG_AAI_HMAC_PRECOMP
,
3662 .alg_op
= OP_ALG_ALGSEL_SHA384
| OP_ALG_AAI_HMAC
,
3668 .cra_name
= "echainiv(authenc(hmac(sha384),"
3670 .cra_driver_name
= "echainiv-authenc-"
3672 "cbc-des3_ede-caam",
3673 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
3675 .setkey
= aead_setkey
,
3676 .setauthsize
= aead_setauthsize
,
3677 .encrypt
= aead_encrypt
,
3678 .decrypt
= aead_givdecrypt
,
3679 .ivsize
= DES3_EDE_BLOCK_SIZE
,
3680 .maxauthsize
= SHA384_DIGEST_SIZE
,
3683 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
3684 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
3685 OP_ALG_AAI_HMAC_PRECOMP
,
3686 .alg_op
= OP_ALG_ALGSEL_SHA384
| OP_ALG_AAI_HMAC
,
3693 .cra_name
= "authenc(hmac(sha512),"
3695 .cra_driver_name
= "authenc-hmac-sha512-"
3696 "cbc-des3_ede-caam",
3697 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
3699 .setkey
= aead_setkey
,
3700 .setauthsize
= aead_setauthsize
,
3701 .encrypt
= aead_encrypt
,
3702 .decrypt
= aead_decrypt
,
3703 .ivsize
= DES3_EDE_BLOCK_SIZE
,
3704 .maxauthsize
= SHA512_DIGEST_SIZE
,
3707 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
3708 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
3709 OP_ALG_AAI_HMAC_PRECOMP
,
3710 .alg_op
= OP_ALG_ALGSEL_SHA512
| OP_ALG_AAI_HMAC
,
3716 .cra_name
= "echainiv(authenc(hmac(sha512),"
3718 .cra_driver_name
= "echainiv-authenc-"
3720 "cbc-des3_ede-caam",
3721 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
3723 .setkey
= aead_setkey
,
3724 .setauthsize
= aead_setauthsize
,
3725 .encrypt
= aead_encrypt
,
3726 .decrypt
= aead_givdecrypt
,
3727 .ivsize
= DES3_EDE_BLOCK_SIZE
,
3728 .maxauthsize
= SHA512_DIGEST_SIZE
,
3731 .class1_alg_type
= OP_ALG_ALGSEL_3DES
| OP_ALG_AAI_CBC
,
3732 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
3733 OP_ALG_AAI_HMAC_PRECOMP
,
3734 .alg_op
= OP_ALG_ALGSEL_SHA512
| OP_ALG_AAI_HMAC
,
3741 .cra_name
= "authenc(hmac(md5),cbc(des))",
3742 .cra_driver_name
= "authenc-hmac-md5-"
3744 .cra_blocksize
= DES_BLOCK_SIZE
,
3746 .setkey
= aead_setkey
,
3747 .setauthsize
= aead_setauthsize
,
3748 .encrypt
= aead_encrypt
,
3749 .decrypt
= aead_decrypt
,
3750 .ivsize
= DES_BLOCK_SIZE
,
3751 .maxauthsize
= MD5_DIGEST_SIZE
,
3754 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3755 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
3756 OP_ALG_AAI_HMAC_PRECOMP
,
3757 .alg_op
= OP_ALG_ALGSEL_MD5
| OP_ALG_AAI_HMAC
,
3763 .cra_name
= "echainiv(authenc(hmac(md5),"
3765 .cra_driver_name
= "echainiv-authenc-hmac-md5-"
3767 .cra_blocksize
= DES_BLOCK_SIZE
,
3769 .setkey
= aead_setkey
,
3770 .setauthsize
= aead_setauthsize
,
3771 .encrypt
= aead_encrypt
,
3772 .decrypt
= aead_givdecrypt
,
3773 .ivsize
= DES_BLOCK_SIZE
,
3774 .maxauthsize
= MD5_DIGEST_SIZE
,
3777 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3778 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
3779 OP_ALG_AAI_HMAC_PRECOMP
,
3780 .alg_op
= OP_ALG_ALGSEL_MD5
| OP_ALG_AAI_HMAC
,
3787 .cra_name
= "authenc(hmac(sha1),cbc(des))",
3788 .cra_driver_name
= "authenc-hmac-sha1-"
3790 .cra_blocksize
= DES_BLOCK_SIZE
,
3792 .setkey
= aead_setkey
,
3793 .setauthsize
= aead_setauthsize
,
3794 .encrypt
= aead_encrypt
,
3795 .decrypt
= aead_decrypt
,
3796 .ivsize
= DES_BLOCK_SIZE
,
3797 .maxauthsize
= SHA1_DIGEST_SIZE
,
3800 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3801 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
3802 OP_ALG_AAI_HMAC_PRECOMP
,
3803 .alg_op
= OP_ALG_ALGSEL_SHA1
| OP_ALG_AAI_HMAC
,
3809 .cra_name
= "echainiv(authenc(hmac(sha1),"
3811 .cra_driver_name
= "echainiv-authenc-"
3812 "hmac-sha1-cbc-des-caam",
3813 .cra_blocksize
= DES_BLOCK_SIZE
,
3815 .setkey
= aead_setkey
,
3816 .setauthsize
= aead_setauthsize
,
3817 .encrypt
= aead_encrypt
,
3818 .decrypt
= aead_givdecrypt
,
3819 .ivsize
= DES_BLOCK_SIZE
,
3820 .maxauthsize
= SHA1_DIGEST_SIZE
,
3823 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3824 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
3825 OP_ALG_AAI_HMAC_PRECOMP
,
3826 .alg_op
= OP_ALG_ALGSEL_SHA1
| OP_ALG_AAI_HMAC
,
3833 .cra_name
= "authenc(hmac(sha224),cbc(des))",
3834 .cra_driver_name
= "authenc-hmac-sha224-"
3836 .cra_blocksize
= DES_BLOCK_SIZE
,
3838 .setkey
= aead_setkey
,
3839 .setauthsize
= aead_setauthsize
,
3840 .encrypt
= aead_encrypt
,
3841 .decrypt
= aead_decrypt
,
3842 .ivsize
= DES_BLOCK_SIZE
,
3843 .maxauthsize
= SHA224_DIGEST_SIZE
,
3846 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3847 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
3848 OP_ALG_AAI_HMAC_PRECOMP
,
3849 .alg_op
= OP_ALG_ALGSEL_SHA224
| OP_ALG_AAI_HMAC
,
3855 .cra_name
= "echainiv(authenc(hmac(sha224),"
3857 .cra_driver_name
= "echainiv-authenc-"
3858 "hmac-sha224-cbc-des-caam",
3859 .cra_blocksize
= DES_BLOCK_SIZE
,
3861 .setkey
= aead_setkey
,
3862 .setauthsize
= aead_setauthsize
,
3863 .encrypt
= aead_encrypt
,
3864 .decrypt
= aead_givdecrypt
,
3865 .ivsize
= DES_BLOCK_SIZE
,
3866 .maxauthsize
= SHA224_DIGEST_SIZE
,
3869 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3870 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
3871 OP_ALG_AAI_HMAC_PRECOMP
,
3872 .alg_op
= OP_ALG_ALGSEL_SHA224
| OP_ALG_AAI_HMAC
,
3879 .cra_name
= "authenc(hmac(sha256),cbc(des))",
3880 .cra_driver_name
= "authenc-hmac-sha256-"
3882 .cra_blocksize
= DES_BLOCK_SIZE
,
3884 .setkey
= aead_setkey
,
3885 .setauthsize
= aead_setauthsize
,
3886 .encrypt
= aead_encrypt
,
3887 .decrypt
= aead_decrypt
,
3888 .ivsize
= DES_BLOCK_SIZE
,
3889 .maxauthsize
= SHA256_DIGEST_SIZE
,
3892 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3893 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
3894 OP_ALG_AAI_HMAC_PRECOMP
,
3895 .alg_op
= OP_ALG_ALGSEL_SHA256
| OP_ALG_AAI_HMAC
,
3901 .cra_name
= "echainiv(authenc(hmac(sha256),"
3903 .cra_driver_name
= "echainiv-authenc-"
3904 "hmac-sha256-cbc-des-caam",
3905 .cra_blocksize
= DES_BLOCK_SIZE
,
3907 .setkey
= aead_setkey
,
3908 .setauthsize
= aead_setauthsize
,
3909 .encrypt
= aead_encrypt
,
3910 .decrypt
= aead_givdecrypt
,
3911 .ivsize
= DES_BLOCK_SIZE
,
3912 .maxauthsize
= SHA256_DIGEST_SIZE
,
3915 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3916 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
3917 OP_ALG_AAI_HMAC_PRECOMP
,
3918 .alg_op
= OP_ALG_ALGSEL_SHA256
| OP_ALG_AAI_HMAC
,
3925 .cra_name
= "authenc(hmac(sha384),cbc(des))",
3926 .cra_driver_name
= "authenc-hmac-sha384-"
3928 .cra_blocksize
= DES_BLOCK_SIZE
,
3930 .setkey
= aead_setkey
,
3931 .setauthsize
= aead_setauthsize
,
3932 .encrypt
= aead_encrypt
,
3933 .decrypt
= aead_decrypt
,
3934 .ivsize
= DES_BLOCK_SIZE
,
3935 .maxauthsize
= SHA384_DIGEST_SIZE
,
3938 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3939 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
3940 OP_ALG_AAI_HMAC_PRECOMP
,
3941 .alg_op
= OP_ALG_ALGSEL_SHA384
| OP_ALG_AAI_HMAC
,
3947 .cra_name
= "echainiv(authenc(hmac(sha384),"
3949 .cra_driver_name
= "echainiv-authenc-"
3950 "hmac-sha384-cbc-des-caam",
3951 .cra_blocksize
= DES_BLOCK_SIZE
,
3953 .setkey
= aead_setkey
,
3954 .setauthsize
= aead_setauthsize
,
3955 .encrypt
= aead_encrypt
,
3956 .decrypt
= aead_givdecrypt
,
3957 .ivsize
= DES_BLOCK_SIZE
,
3958 .maxauthsize
= SHA384_DIGEST_SIZE
,
3961 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3962 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
3963 OP_ALG_AAI_HMAC_PRECOMP
,
3964 .alg_op
= OP_ALG_ALGSEL_SHA384
| OP_ALG_AAI_HMAC
,
3971 .cra_name
= "authenc(hmac(sha512),cbc(des))",
3972 .cra_driver_name
= "authenc-hmac-sha512-"
3974 .cra_blocksize
= DES_BLOCK_SIZE
,
3976 .setkey
= aead_setkey
,
3977 .setauthsize
= aead_setauthsize
,
3978 .encrypt
= aead_encrypt
,
3979 .decrypt
= aead_decrypt
,
3980 .ivsize
= DES_BLOCK_SIZE
,
3981 .maxauthsize
= SHA512_DIGEST_SIZE
,
3984 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
3985 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
3986 OP_ALG_AAI_HMAC_PRECOMP
,
3987 .alg_op
= OP_ALG_ALGSEL_SHA512
| OP_ALG_AAI_HMAC
,
3993 .cra_name
= "echainiv(authenc(hmac(sha512),"
3995 .cra_driver_name
= "echainiv-authenc-"
3996 "hmac-sha512-cbc-des-caam",
3997 .cra_blocksize
= DES_BLOCK_SIZE
,
3999 .setkey
= aead_setkey
,
4000 .setauthsize
= aead_setauthsize
,
4001 .encrypt
= aead_encrypt
,
4002 .decrypt
= aead_givdecrypt
,
4003 .ivsize
= DES_BLOCK_SIZE
,
4004 .maxauthsize
= SHA512_DIGEST_SIZE
,
4007 .class1_alg_type
= OP_ALG_ALGSEL_DES
| OP_ALG_AAI_CBC
,
4008 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
4009 OP_ALG_AAI_HMAC_PRECOMP
,
4010 .alg_op
= OP_ALG_ALGSEL_SHA512
| OP_ALG_AAI_HMAC
,
4017 .cra_name
= "authenc(hmac(md5),"
4018 "rfc3686(ctr(aes)))",
4019 .cra_driver_name
= "authenc-hmac-md5-"
4020 "rfc3686-ctr-aes-caam",
4023 .setkey
= aead_setkey
,
4024 .setauthsize
= aead_setauthsize
,
4025 .encrypt
= aead_encrypt
,
4026 .decrypt
= aead_decrypt
,
4027 .ivsize
= CTR_RFC3686_IV_SIZE
,
4028 .maxauthsize
= MD5_DIGEST_SIZE
,
4031 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
4032 OP_ALG_AAI_CTR_MOD128
,
4033 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
4034 OP_ALG_AAI_HMAC_PRECOMP
,
4035 .alg_op
= OP_ALG_ALGSEL_MD5
| OP_ALG_AAI_HMAC
,
4042 .cra_name
= "seqiv(authenc("
4043 "hmac(md5),rfc3686(ctr(aes))))",
4044 .cra_driver_name
= "seqiv-authenc-hmac-md5-"
4045 "rfc3686-ctr-aes-caam",
4048 .setkey
= aead_setkey
,
4049 .setauthsize
= aead_setauthsize
,
4050 .encrypt
= aead_encrypt
,
4051 .decrypt
= aead_givdecrypt
,
4052 .ivsize
= CTR_RFC3686_IV_SIZE
,
4053 .maxauthsize
= MD5_DIGEST_SIZE
,
4056 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
4057 OP_ALG_AAI_CTR_MOD128
,
4058 .class2_alg_type
= OP_ALG_ALGSEL_MD5
|
4059 OP_ALG_AAI_HMAC_PRECOMP
,
4060 .alg_op
= OP_ALG_ALGSEL_MD5
| OP_ALG_AAI_HMAC
,
4068 .cra_name
= "authenc(hmac(sha1),"
4069 "rfc3686(ctr(aes)))",
4070 .cra_driver_name
= "authenc-hmac-sha1-"
4071 "rfc3686-ctr-aes-caam",
4074 .setkey
= aead_setkey
,
4075 .setauthsize
= aead_setauthsize
,
4076 .encrypt
= aead_encrypt
,
4077 .decrypt
= aead_decrypt
,
4078 .ivsize
= CTR_RFC3686_IV_SIZE
,
4079 .maxauthsize
= SHA1_DIGEST_SIZE
,
4082 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
4083 OP_ALG_AAI_CTR_MOD128
,
4084 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
4085 OP_ALG_AAI_HMAC_PRECOMP
,
4086 .alg_op
= OP_ALG_ALGSEL_SHA1
| OP_ALG_AAI_HMAC
,
4093 .cra_name
= "seqiv(authenc("
4094 "hmac(sha1),rfc3686(ctr(aes))))",
4095 .cra_driver_name
= "seqiv-authenc-hmac-sha1-"
4096 "rfc3686-ctr-aes-caam",
4099 .setkey
= aead_setkey
,
4100 .setauthsize
= aead_setauthsize
,
4101 .encrypt
= aead_encrypt
,
4102 .decrypt
= aead_givdecrypt
,
4103 .ivsize
= CTR_RFC3686_IV_SIZE
,
4104 .maxauthsize
= SHA1_DIGEST_SIZE
,
4107 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
4108 OP_ALG_AAI_CTR_MOD128
,
4109 .class2_alg_type
= OP_ALG_ALGSEL_SHA1
|
4110 OP_ALG_AAI_HMAC_PRECOMP
,
4111 .alg_op
= OP_ALG_ALGSEL_SHA1
| OP_ALG_AAI_HMAC
,
4119 .cra_name
= "authenc(hmac(sha224),"
4120 "rfc3686(ctr(aes)))",
4121 .cra_driver_name
= "authenc-hmac-sha224-"
4122 "rfc3686-ctr-aes-caam",
4125 .setkey
= aead_setkey
,
4126 .setauthsize
= aead_setauthsize
,
4127 .encrypt
= aead_encrypt
,
4128 .decrypt
= aead_decrypt
,
4129 .ivsize
= CTR_RFC3686_IV_SIZE
,
4130 .maxauthsize
= SHA224_DIGEST_SIZE
,
4133 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
4134 OP_ALG_AAI_CTR_MOD128
,
4135 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
4136 OP_ALG_AAI_HMAC_PRECOMP
,
4137 .alg_op
= OP_ALG_ALGSEL_SHA224
| OP_ALG_AAI_HMAC
,
4144 .cra_name
= "seqiv(authenc("
4145 "hmac(sha224),rfc3686(ctr(aes))))",
4146 .cra_driver_name
= "seqiv-authenc-hmac-sha224-"
4147 "rfc3686-ctr-aes-caam",
4150 .setkey
= aead_setkey
,
4151 .setauthsize
= aead_setauthsize
,
4152 .encrypt
= aead_encrypt
,
4153 .decrypt
= aead_givdecrypt
,
4154 .ivsize
= CTR_RFC3686_IV_SIZE
,
4155 .maxauthsize
= SHA224_DIGEST_SIZE
,
4158 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
4159 OP_ALG_AAI_CTR_MOD128
,
4160 .class2_alg_type
= OP_ALG_ALGSEL_SHA224
|
4161 OP_ALG_AAI_HMAC_PRECOMP
,
4162 .alg_op
= OP_ALG_ALGSEL_SHA224
| OP_ALG_AAI_HMAC
,
4170 .cra_name
= "authenc(hmac(sha256),"
4171 "rfc3686(ctr(aes)))",
4172 .cra_driver_name
= "authenc-hmac-sha256-"
4173 "rfc3686-ctr-aes-caam",
4176 .setkey
= aead_setkey
,
4177 .setauthsize
= aead_setauthsize
,
4178 .encrypt
= aead_encrypt
,
4179 .decrypt
= aead_decrypt
,
4180 .ivsize
= CTR_RFC3686_IV_SIZE
,
4181 .maxauthsize
= SHA256_DIGEST_SIZE
,
4184 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
4185 OP_ALG_AAI_CTR_MOD128
,
4186 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
4187 OP_ALG_AAI_HMAC_PRECOMP
,
4188 .alg_op
= OP_ALG_ALGSEL_SHA256
| OP_ALG_AAI_HMAC
,
4195 .cra_name
= "seqiv(authenc(hmac(sha256),"
4196 "rfc3686(ctr(aes))))",
4197 .cra_driver_name
= "seqiv-authenc-hmac-sha256-"
4198 "rfc3686-ctr-aes-caam",
4201 .setkey
= aead_setkey
,
4202 .setauthsize
= aead_setauthsize
,
4203 .encrypt
= aead_encrypt
,
4204 .decrypt
= aead_givdecrypt
,
4205 .ivsize
= CTR_RFC3686_IV_SIZE
,
4206 .maxauthsize
= SHA256_DIGEST_SIZE
,
4209 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
4210 OP_ALG_AAI_CTR_MOD128
,
4211 .class2_alg_type
= OP_ALG_ALGSEL_SHA256
|
4212 OP_ALG_AAI_HMAC_PRECOMP
,
4213 .alg_op
= OP_ALG_ALGSEL_SHA256
| OP_ALG_AAI_HMAC
,
4221 .cra_name
= "authenc(hmac(sha384),"
4222 "rfc3686(ctr(aes)))",
4223 .cra_driver_name
= "authenc-hmac-sha384-"
4224 "rfc3686-ctr-aes-caam",
4227 .setkey
= aead_setkey
,
4228 .setauthsize
= aead_setauthsize
,
4229 .encrypt
= aead_encrypt
,
4230 .decrypt
= aead_decrypt
,
4231 .ivsize
= CTR_RFC3686_IV_SIZE
,
4232 .maxauthsize
= SHA384_DIGEST_SIZE
,
4235 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
4236 OP_ALG_AAI_CTR_MOD128
,
4237 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
4238 OP_ALG_AAI_HMAC_PRECOMP
,
4239 .alg_op
= OP_ALG_ALGSEL_SHA384
| OP_ALG_AAI_HMAC
,
4246 .cra_name
= "seqiv(authenc(hmac(sha384),"
4247 "rfc3686(ctr(aes))))",
4248 .cra_driver_name
= "seqiv-authenc-hmac-sha384-"
4249 "rfc3686-ctr-aes-caam",
4252 .setkey
= aead_setkey
,
4253 .setauthsize
= aead_setauthsize
,
4254 .encrypt
= aead_encrypt
,
4255 .decrypt
= aead_givdecrypt
,
4256 .ivsize
= CTR_RFC3686_IV_SIZE
,
4257 .maxauthsize
= SHA384_DIGEST_SIZE
,
4260 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
4261 OP_ALG_AAI_CTR_MOD128
,
4262 .class2_alg_type
= OP_ALG_ALGSEL_SHA384
|
4263 OP_ALG_AAI_HMAC_PRECOMP
,
4264 .alg_op
= OP_ALG_ALGSEL_SHA384
| OP_ALG_AAI_HMAC
,
4272 .cra_name
= "authenc(hmac(sha512),"
4273 "rfc3686(ctr(aes)))",
4274 .cra_driver_name
= "authenc-hmac-sha512-"
4275 "rfc3686-ctr-aes-caam",
4278 .setkey
= aead_setkey
,
4279 .setauthsize
= aead_setauthsize
,
4280 .encrypt
= aead_encrypt
,
4281 .decrypt
= aead_decrypt
,
4282 .ivsize
= CTR_RFC3686_IV_SIZE
,
4283 .maxauthsize
= SHA512_DIGEST_SIZE
,
4286 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
4287 OP_ALG_AAI_CTR_MOD128
,
4288 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
4289 OP_ALG_AAI_HMAC_PRECOMP
,
4290 .alg_op
= OP_ALG_ALGSEL_SHA512
| OP_ALG_AAI_HMAC
,
4297 .cra_name
= "seqiv(authenc(hmac(sha512),"
4298 "rfc3686(ctr(aes))))",
4299 .cra_driver_name
= "seqiv-authenc-hmac-sha512-"
4300 "rfc3686-ctr-aes-caam",
4303 .setkey
= aead_setkey
,
4304 .setauthsize
= aead_setauthsize
,
4305 .encrypt
= aead_encrypt
,
4306 .decrypt
= aead_givdecrypt
,
4307 .ivsize
= CTR_RFC3686_IV_SIZE
,
4308 .maxauthsize
= SHA512_DIGEST_SIZE
,
4311 .class1_alg_type
= OP_ALG_ALGSEL_AES
|
4312 OP_ALG_AAI_CTR_MOD128
,
4313 .class2_alg_type
= OP_ALG_ALGSEL_SHA512
|
4314 OP_ALG_AAI_HMAC_PRECOMP
,
4315 .alg_op
= OP_ALG_ALGSEL_SHA512
| OP_ALG_AAI_HMAC
,
4322 struct caam_crypto_alg
{
4323 struct crypto_alg crypto_alg
;
4324 struct list_head entry
;
4325 struct caam_alg_entry caam
;
4328 static int caam_init_common(struct caam_ctx
*ctx
, struct caam_alg_entry
*caam
)
4330 ctx
->jrdev
= caam_jr_alloc();
4331 if (IS_ERR(ctx
->jrdev
)) {
4332 pr_err("Job Ring Device allocation for transform failed\n");
4333 return PTR_ERR(ctx
->jrdev
);
4336 /* copy descriptor header template value */
4337 ctx
->class1_alg_type
= OP_TYPE_CLASS1_ALG
| caam
->class1_alg_type
;
4338 ctx
->class2_alg_type
= OP_TYPE_CLASS2_ALG
| caam
->class2_alg_type
;
4339 ctx
->alg_op
= OP_TYPE_CLASS2_ALG
| caam
->alg_op
;
4344 static int caam_cra_init(struct crypto_tfm
*tfm
)
4346 struct crypto_alg
*alg
= tfm
->__crt_alg
;
4347 struct caam_crypto_alg
*caam_alg
=
4348 container_of(alg
, struct caam_crypto_alg
, crypto_alg
);
4349 struct caam_ctx
*ctx
= crypto_tfm_ctx(tfm
);
4351 return caam_init_common(ctx
, &caam_alg
->caam
);
4354 static int caam_aead_init(struct crypto_aead
*tfm
)
4356 struct aead_alg
*alg
= crypto_aead_alg(tfm
);
4357 struct caam_aead_alg
*caam_alg
=
4358 container_of(alg
, struct caam_aead_alg
, aead
);
4359 struct caam_ctx
*ctx
= crypto_aead_ctx(tfm
);
4361 return caam_init_common(ctx
, &caam_alg
->caam
);
4364 static void caam_exit_common(struct caam_ctx
*ctx
)
4366 if (ctx
->sh_desc_enc_dma
&&
4367 !dma_mapping_error(ctx
->jrdev
, ctx
->sh_desc_enc_dma
))
4368 dma_unmap_single(ctx
->jrdev
, ctx
->sh_desc_enc_dma
,
4369 desc_bytes(ctx
->sh_desc_enc
), DMA_TO_DEVICE
);
4370 if (ctx
->sh_desc_dec_dma
&&
4371 !dma_mapping_error(ctx
->jrdev
, ctx
->sh_desc_dec_dma
))
4372 dma_unmap_single(ctx
->jrdev
, ctx
->sh_desc_dec_dma
,
4373 desc_bytes(ctx
->sh_desc_dec
), DMA_TO_DEVICE
);
4374 if (ctx
->sh_desc_givenc_dma
&&
4375 !dma_mapping_error(ctx
->jrdev
, ctx
->sh_desc_givenc_dma
))
4376 dma_unmap_single(ctx
->jrdev
, ctx
->sh_desc_givenc_dma
,
4377 desc_bytes(ctx
->sh_desc_givenc
),
4380 !dma_mapping_error(ctx
->jrdev
, ctx
->key_dma
))
4381 dma_unmap_single(ctx
->jrdev
, ctx
->key_dma
,
4382 ctx
->enckeylen
+ ctx
->split_key_pad_len
,
4385 caam_jr_free(ctx
->jrdev
);
4388 static void caam_cra_exit(struct crypto_tfm
*tfm
)
4390 caam_exit_common(crypto_tfm_ctx(tfm
));
4393 static void caam_aead_exit(struct crypto_aead
*tfm
)
4395 caam_exit_common(crypto_aead_ctx(tfm
));
4398 static void __exit
caam_algapi_exit(void)
4401 struct caam_crypto_alg
*t_alg
, *n
;
4404 for (i
= 0; i
< ARRAY_SIZE(driver_aeads
); i
++) {
4405 struct caam_aead_alg
*t_alg
= driver_aeads
+ i
;
4407 if (t_alg
->registered
)
4408 crypto_unregister_aead(&t_alg
->aead
);
4414 list_for_each_entry_safe(t_alg
, n
, &alg_list
, entry
) {
4415 crypto_unregister_alg(&t_alg
->crypto_alg
);
4416 list_del(&t_alg
->entry
);
4421 static struct caam_crypto_alg
*caam_alg_alloc(struct caam_alg_template
4424 struct caam_crypto_alg
*t_alg
;
4425 struct crypto_alg
*alg
;
4427 t_alg
= kzalloc(sizeof(*t_alg
), GFP_KERNEL
);
4429 pr_err("failed to allocate t_alg\n");
4430 return ERR_PTR(-ENOMEM
);
4433 alg
= &t_alg
->crypto_alg
;
4435 snprintf(alg
->cra_name
, CRYPTO_MAX_ALG_NAME
, "%s", template->name
);
4436 snprintf(alg
->cra_driver_name
, CRYPTO_MAX_ALG_NAME
, "%s",
4437 template->driver_name
);
4438 alg
->cra_module
= THIS_MODULE
;
4439 alg
->cra_init
= caam_cra_init
;
4440 alg
->cra_exit
= caam_cra_exit
;
4441 alg
->cra_priority
= CAAM_CRA_PRIORITY
;
4442 alg
->cra_blocksize
= template->blocksize
;
4443 alg
->cra_alignmask
= 0;
4444 alg
->cra_ctxsize
= sizeof(struct caam_ctx
);
4445 alg
->cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_KERN_DRIVER_ONLY
|
4447 switch (template->type
) {
4448 case CRYPTO_ALG_TYPE_GIVCIPHER
:
4449 alg
->cra_type
= &crypto_givcipher_type
;
4450 alg
->cra_ablkcipher
= template->template_ablkcipher
;
4452 case CRYPTO_ALG_TYPE_ABLKCIPHER
:
4453 alg
->cra_type
= &crypto_ablkcipher_type
;
4454 alg
->cra_ablkcipher
= template->template_ablkcipher
;
4458 t_alg
->caam
.class1_alg_type
= template->class1_alg_type
;
4459 t_alg
->caam
.class2_alg_type
= template->class2_alg_type
;
4460 t_alg
->caam
.alg_op
= template->alg_op
;
4465 static void caam_aead_alg_init(struct caam_aead_alg
*t_alg
)
4467 struct aead_alg
*alg
= &t_alg
->aead
;
4469 alg
->base
.cra_module
= THIS_MODULE
;
4470 alg
->base
.cra_priority
= CAAM_CRA_PRIORITY
;
4471 alg
->base
.cra_ctxsize
= sizeof(struct caam_ctx
);
4472 alg
->base
.cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_KERN_DRIVER_ONLY
;
4474 alg
->init
= caam_aead_init
;
4475 alg
->exit
= caam_aead_exit
;
4478 static int __init
caam_algapi_init(void)
4480 struct device_node
*dev_node
;
4481 struct platform_device
*pdev
;
4482 struct device
*ctrldev
;
4483 struct caam_drv_private
*priv
;
4485 u32 cha_vid
, cha_inst
, des_inst
, aes_inst
, md_inst
;
4486 unsigned int md_limit
= SHA512_DIGEST_SIZE
;
4487 bool registered
= false;
4489 dev_node
= of_find_compatible_node(NULL
, NULL
, "fsl,sec-v4.0");
4491 dev_node
= of_find_compatible_node(NULL
, NULL
, "fsl,sec4.0");
4496 pdev
= of_find_device_by_node(dev_node
);
4498 of_node_put(dev_node
);
4502 ctrldev
= &pdev
->dev
;
4503 priv
= dev_get_drvdata(ctrldev
);
4504 of_node_put(dev_node
);
4507 * If priv is NULL, it's probably because the caam driver wasn't
4508 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
4514 INIT_LIST_HEAD(&alg_list
);
4517 * Register crypto algorithms the device supports.
4518 * First, detect presence and attributes of DES, AES, and MD blocks.
4520 cha_vid
= rd_reg32(&priv
->ctrl
->perfmon
.cha_id_ls
);
4521 cha_inst
= rd_reg32(&priv
->ctrl
->perfmon
.cha_num_ls
);
4522 des_inst
= (cha_inst
& CHA_ID_LS_DES_MASK
) >> CHA_ID_LS_DES_SHIFT
;
4523 aes_inst
= (cha_inst
& CHA_ID_LS_AES_MASK
) >> CHA_ID_LS_AES_SHIFT
;
4524 md_inst
= (cha_inst
& CHA_ID_LS_MD_MASK
) >> CHA_ID_LS_MD_SHIFT
;
4526 /* If MD is present, limit digest size based on LP256 */
4527 if (md_inst
&& ((cha_vid
& CHA_ID_LS_MD_MASK
) == CHA_ID_LS_MD_LP256
))
4528 md_limit
= SHA256_DIGEST_SIZE
;
4530 for (i
= 0; i
< ARRAY_SIZE(driver_algs
); i
++) {
4531 struct caam_crypto_alg
*t_alg
;
4532 struct caam_alg_template
*alg
= driver_algs
+ i
;
4533 u32 alg_sel
= alg
->class1_alg_type
& OP_ALG_ALGSEL_MASK
;
4535 /* Skip DES algorithms if not supported by device */
4537 ((alg_sel
== OP_ALG_ALGSEL_3DES
) ||
4538 (alg_sel
== OP_ALG_ALGSEL_DES
)))
4541 /* Skip AES algorithms if not supported by device */
4542 if (!aes_inst
&& (alg_sel
== OP_ALG_ALGSEL_AES
))
4545 t_alg
= caam_alg_alloc(alg
);
4546 if (IS_ERR(t_alg
)) {
4547 err
= PTR_ERR(t_alg
);
4548 pr_warn("%s alg allocation failed\n", alg
->driver_name
);
4552 err
= crypto_register_alg(&t_alg
->crypto_alg
);
4554 pr_warn("%s alg registration failed\n",
4555 t_alg
->crypto_alg
.cra_driver_name
);
4560 list_add_tail(&t_alg
->entry
, &alg_list
);
4564 for (i
= 0; i
< ARRAY_SIZE(driver_aeads
); i
++) {
4565 struct caam_aead_alg
*t_alg
= driver_aeads
+ i
;
4566 u32 c1_alg_sel
= t_alg
->caam
.class1_alg_type
&
4568 u32 c2_alg_sel
= t_alg
->caam
.class2_alg_type
&
4570 u32 alg_aai
= t_alg
->caam
.class1_alg_type
& OP_ALG_AAI_MASK
;
4572 /* Skip DES algorithms if not supported by device */
4574 ((c1_alg_sel
== OP_ALG_ALGSEL_3DES
) ||
4575 (c1_alg_sel
== OP_ALG_ALGSEL_DES
)))
4578 /* Skip AES algorithms if not supported by device */
4579 if (!aes_inst
&& (c1_alg_sel
== OP_ALG_ALGSEL_AES
))
4583 * Check support for AES algorithms not available
4586 if ((cha_vid
& CHA_ID_LS_AES_MASK
) == CHA_ID_LS_AES_LP
)
4587 if (alg_aai
== OP_ALG_AAI_GCM
)
4591 * Skip algorithms requiring message digests
4592 * if MD or MD size is not supported by device.
4595 (!md_inst
|| (t_alg
->aead
.maxauthsize
> md_limit
)))
4598 caam_aead_alg_init(t_alg
);
4600 err
= crypto_register_aead(&t_alg
->aead
);
4602 pr_warn("%s alg registration failed\n",
4603 t_alg
->aead
.base
.cra_driver_name
);
4607 t_alg
->registered
= true;
4612 pr_info("caam algorithms registered in /proc/crypto\n");
4617 module_init(caam_algapi_init
);
4618 module_exit(caam_algapi_exit
);
4620 MODULE_LICENSE("GPL");
4621 MODULE_DESCRIPTION("FSL CAAM support for crypto API");
4622 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");