Merge tag 'for_linus' of git://git.kernel.org/pub/scm/linux/kernel/git/mst/vhost
[cris-mirror.git] / drivers / crypto / caam / caamalg_desc.c
blobceb93fbb76e63df8d3d9556054cbe200dd540cec
1 /*
2 * Shared descriptors for aead, ablkcipher algorithms
4 * Copyright 2016 NXP
5 */
7 #include "compat.h"
8 #include "desc_constr.h"
9 #include "caamalg_desc.h"
12 * For aead functions, read payload and write payload,
13 * both of which are specified in req->src and req->dst
15 static inline void aead_append_src_dst(u32 *desc, u32 msg_type)
17 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
18 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
19 KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH);
22 /* Set DK bit in class 1 operation if shared */
23 static inline void append_dec_op1(u32 *desc, u32 type)
25 u32 *jump_cmd, *uncond_jump_cmd;
27 /* DK bit is valid only for AES */
28 if ((type & OP_ALG_ALGSEL_MASK) != OP_ALG_ALGSEL_AES) {
29 append_operation(desc, type | OP_ALG_AS_INITFINAL |
30 OP_ALG_DECRYPT);
31 return;
34 jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD);
35 append_operation(desc, type | OP_ALG_AS_INITFINAL |
36 OP_ALG_DECRYPT);
37 uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL);
38 set_jump_tgt_here(desc, jump_cmd);
39 append_operation(desc, type | OP_ALG_AS_INITFINAL |
40 OP_ALG_DECRYPT | OP_ALG_AAI_DK);
41 set_jump_tgt_here(desc, uncond_jump_cmd);
44 /**
45 * cnstr_shdsc_aead_null_encap - IPSec ESP encapsulation shared descriptor
46 * (non-protocol) with no (null) encryption.
47 * @desc: pointer to buffer used for descriptor construction
48 * @adata: pointer to authentication transform definitions.
49 * A split key is required for SEC Era < 6; the size of the split key
50 * is specified in this case. Valid algorithm values - one of
51 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
52 * with OP_ALG_AAI_HMAC_PRECOMP.
53 * @icvsize: integrity check value (ICV) size (truncated or full)
54 * @era: SEC Era
56 void cnstr_shdsc_aead_null_encap(u32 * const desc, struct alginfo *adata,
57 unsigned int icvsize, int era)
59 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
61 init_sh_desc(desc, HDR_SHARE_SERIAL);
63 /* Skip if already shared */
64 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
65 JUMP_COND_SHRD);
66 if (era < 6) {
67 if (adata->key_inline)
68 append_key_as_imm(desc, adata->key_virt,
69 adata->keylen_pad, adata->keylen,
70 CLASS_2 | KEY_DEST_MDHA_SPLIT |
71 KEY_ENC);
72 else
73 append_key(desc, adata->key_dma, adata->keylen,
74 CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
75 } else {
76 append_proto_dkp(desc, adata);
78 set_jump_tgt_here(desc, key_jump_cmd);
80 /* assoclen + cryptlen = seqinlen */
81 append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
83 /* Prepare to read and write cryptlen + assoclen bytes */
84 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
85 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
88 * MOVE_LEN opcode is not available in all SEC HW revisions,
89 * thus need to do some magic, i.e. self-patch the descriptor
90 * buffer.
92 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
93 MOVE_DEST_MATH3 |
94 (0x6 << MOVE_LEN_SHIFT));
95 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
96 MOVE_DEST_DESCBUF |
97 MOVE_WAITCOMP |
98 (0x8 << MOVE_LEN_SHIFT));
100 /* Class 2 operation */
101 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
102 OP_ALG_ENCRYPT);
104 /* Read and write cryptlen bytes */
105 aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
107 set_move_tgt_here(desc, read_move_cmd);
108 set_move_tgt_here(desc, write_move_cmd);
109 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
110 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
111 MOVE_AUX_LS);
113 /* Write ICV */
114 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
115 LDST_SRCDST_BYTE_CONTEXT);
117 #ifdef DEBUG
118 print_hex_dump(KERN_ERR,
119 "aead null enc shdesc@" __stringify(__LINE__)": ",
120 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
121 #endif
123 EXPORT_SYMBOL(cnstr_shdsc_aead_null_encap);
126 * cnstr_shdsc_aead_null_decap - IPSec ESP decapsulation shared descriptor
127 * (non-protocol) with no (null) decryption.
128 * @desc: pointer to buffer used for descriptor construction
129 * @adata: pointer to authentication transform definitions.
130 * A split key is required for SEC Era < 6; the size of the split key
131 * is specified in this case. Valid algorithm values - one of
132 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
133 * with OP_ALG_AAI_HMAC_PRECOMP.
134 * @icvsize: integrity check value (ICV) size (truncated or full)
135 * @era: SEC Era
137 void cnstr_shdsc_aead_null_decap(u32 * const desc, struct alginfo *adata,
138 unsigned int icvsize, int era)
140 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd, *jump_cmd;
142 init_sh_desc(desc, HDR_SHARE_SERIAL);
144 /* Skip if already shared */
145 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
146 JUMP_COND_SHRD);
147 if (era < 6) {
148 if (adata->key_inline)
149 append_key_as_imm(desc, adata->key_virt,
150 adata->keylen_pad, adata->keylen,
151 CLASS_2 | KEY_DEST_MDHA_SPLIT |
152 KEY_ENC);
153 else
154 append_key(desc, adata->key_dma, adata->keylen,
155 CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
156 } else {
157 append_proto_dkp(desc, adata);
159 set_jump_tgt_here(desc, key_jump_cmd);
161 /* Class 2 operation */
162 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
163 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
165 /* assoclen + cryptlen = seqoutlen */
166 append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
168 /* Prepare to read and write cryptlen + assoclen bytes */
169 append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
170 append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
173 * MOVE_LEN opcode is not available in all SEC HW revisions,
174 * thus need to do some magic, i.e. self-patch the descriptor
175 * buffer.
177 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
178 MOVE_DEST_MATH2 |
179 (0x6 << MOVE_LEN_SHIFT));
180 write_move_cmd = append_move(desc, MOVE_SRC_MATH2 |
181 MOVE_DEST_DESCBUF |
182 MOVE_WAITCOMP |
183 (0x8 << MOVE_LEN_SHIFT));
185 /* Read and write cryptlen bytes */
186 aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
189 * Insert a NOP here, since we need at least 4 instructions between
190 * code patching the descriptor buffer and the location being patched.
192 jump_cmd = append_jump(desc, JUMP_TEST_ALL);
193 set_jump_tgt_here(desc, jump_cmd);
195 set_move_tgt_here(desc, read_move_cmd);
196 set_move_tgt_here(desc, write_move_cmd);
197 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
198 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
199 MOVE_AUX_LS);
200 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
202 /* Load ICV */
203 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
204 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
206 #ifdef DEBUG
207 print_hex_dump(KERN_ERR,
208 "aead null dec shdesc@" __stringify(__LINE__)": ",
209 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
210 #endif
212 EXPORT_SYMBOL(cnstr_shdsc_aead_null_decap);
214 static void init_sh_desc_key_aead(u32 * const desc,
215 struct alginfo * const cdata,
216 struct alginfo * const adata,
217 const bool is_rfc3686, u32 *nonce, int era)
219 u32 *key_jump_cmd;
220 unsigned int enckeylen = cdata->keylen;
222 /* Note: Context registers are saved. */
223 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
225 /* Skip if already shared */
226 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
227 JUMP_COND_SHRD);
230 * RFC3686 specific:
231 * | key = {AUTH_KEY, ENC_KEY, NONCE}
232 * | enckeylen = encryption key size + nonce size
234 if (is_rfc3686)
235 enckeylen -= CTR_RFC3686_NONCE_SIZE;
237 if (era < 6) {
238 if (adata->key_inline)
239 append_key_as_imm(desc, adata->key_virt,
240 adata->keylen_pad, adata->keylen,
241 CLASS_2 | KEY_DEST_MDHA_SPLIT |
242 KEY_ENC);
243 else
244 append_key(desc, adata->key_dma, adata->keylen,
245 CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
246 } else {
247 append_proto_dkp(desc, adata);
250 if (cdata->key_inline)
251 append_key_as_imm(desc, cdata->key_virt, enckeylen,
252 enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
253 else
254 append_key(desc, cdata->key_dma, enckeylen, CLASS_1 |
255 KEY_DEST_CLASS_REG);
257 /* Load Counter into CONTEXT1 reg */
258 if (is_rfc3686) {
259 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
260 LDST_CLASS_IND_CCB |
261 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
262 append_move(desc,
263 MOVE_SRC_OUTFIFO |
264 MOVE_DEST_CLASS1CTX |
265 (16 << MOVE_OFFSET_SHIFT) |
266 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
269 set_jump_tgt_here(desc, key_jump_cmd);
273 * cnstr_shdsc_aead_encap - IPSec ESP encapsulation shared descriptor
274 * (non-protocol).
275 * @desc: pointer to buffer used for descriptor construction
276 * @cdata: pointer to block cipher transform definitions
277 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
278 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
279 * @adata: pointer to authentication transform definitions.
280 * A split key is required for SEC Era < 6; the size of the split key
281 * is specified in this case. Valid algorithm values - one of
282 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
283 * with OP_ALG_AAI_HMAC_PRECOMP.
284 * @ivsize: initialization vector size
285 * @icvsize: integrity check value (ICV) size (truncated or full)
286 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
287 * @nonce: pointer to rfc3686 nonce
288 * @ctx1_iv_off: IV offset in CONTEXT1 register
289 * @is_qi: true when called from caam/qi
290 * @era: SEC Era
292 void cnstr_shdsc_aead_encap(u32 * const desc, struct alginfo *cdata,
293 struct alginfo *adata, unsigned int ivsize,
294 unsigned int icvsize, const bool is_rfc3686,
295 u32 *nonce, const u32 ctx1_iv_off, const bool is_qi,
296 int era)
298 /* Note: Context registers are saved. */
299 init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
301 /* Class 2 operation */
302 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
303 OP_ALG_ENCRYPT);
305 if (is_qi) {
306 u32 *wait_load_cmd;
308 /* REG3 = assoclen */
309 append_seq_load(desc, 4, LDST_CLASS_DECO |
310 LDST_SRCDST_WORD_DECO_MATH3 |
311 (4 << LDST_OFFSET_SHIFT));
313 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
314 JUMP_COND_CALM | JUMP_COND_NCP |
315 JUMP_COND_NOP | JUMP_COND_NIP |
316 JUMP_COND_NIFP);
317 set_jump_tgt_here(desc, wait_load_cmd);
319 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
320 LDST_SRCDST_BYTE_CONTEXT |
321 (ctx1_iv_off << LDST_OFFSET_SHIFT));
324 /* Read and write assoclen bytes */
325 if (is_qi || era < 3) {
326 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
327 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
328 } else {
329 append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
330 append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
333 /* Skip assoc data */
334 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
336 /* read assoc before reading payload */
337 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
338 FIFOLDST_VLF);
340 /* Load Counter into CONTEXT1 reg */
341 if (is_rfc3686)
342 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
343 LDST_SRCDST_BYTE_CONTEXT |
344 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
345 LDST_OFFSET_SHIFT));
347 /* Class 1 operation */
348 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
349 OP_ALG_ENCRYPT);
351 /* Read and write cryptlen bytes */
352 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
353 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
354 aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
356 /* Write ICV */
357 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
358 LDST_SRCDST_BYTE_CONTEXT);
360 #ifdef DEBUG
361 print_hex_dump(KERN_ERR, "aead enc shdesc@" __stringify(__LINE__)": ",
362 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
363 #endif
365 EXPORT_SYMBOL(cnstr_shdsc_aead_encap);
368 * cnstr_shdsc_aead_decap - IPSec ESP decapsulation shared descriptor
369 * (non-protocol).
370 * @desc: pointer to buffer used for descriptor construction
371 * @cdata: pointer to block cipher transform definitions
372 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
373 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
374 * @adata: pointer to authentication transform definitions.
375 * A split key is required for SEC Era < 6; the size of the split key
376 * is specified in this case. Valid algorithm values - one of
377 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
378 * with OP_ALG_AAI_HMAC_PRECOMP.
379 * @ivsize: initialization vector size
380 * @icvsize: integrity check value (ICV) size (truncated or full)
381 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
382 * @nonce: pointer to rfc3686 nonce
383 * @ctx1_iv_off: IV offset in CONTEXT1 register
384 * @is_qi: true when called from caam/qi
385 * @era: SEC Era
387 void cnstr_shdsc_aead_decap(u32 * const desc, struct alginfo *cdata,
388 struct alginfo *adata, unsigned int ivsize,
389 unsigned int icvsize, const bool geniv,
390 const bool is_rfc3686, u32 *nonce,
391 const u32 ctx1_iv_off, const bool is_qi, int era)
393 /* Note: Context registers are saved. */
394 init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
396 /* Class 2 operation */
397 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
398 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
400 if (is_qi) {
401 u32 *wait_load_cmd;
403 /* REG3 = assoclen */
404 append_seq_load(desc, 4, LDST_CLASS_DECO |
405 LDST_SRCDST_WORD_DECO_MATH3 |
406 (4 << LDST_OFFSET_SHIFT));
408 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
409 JUMP_COND_CALM | JUMP_COND_NCP |
410 JUMP_COND_NOP | JUMP_COND_NIP |
411 JUMP_COND_NIFP);
412 set_jump_tgt_here(desc, wait_load_cmd);
414 if (!geniv)
415 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
416 LDST_SRCDST_BYTE_CONTEXT |
417 (ctx1_iv_off << LDST_OFFSET_SHIFT));
420 /* Read and write assoclen bytes */
421 if (is_qi || era < 3) {
422 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
423 if (geniv)
424 append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM,
425 ivsize);
426 else
427 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3,
428 CAAM_CMD_SZ);
429 } else {
430 append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
431 if (geniv)
432 append_math_add_imm_u32(desc, VARSEQOUTLEN, DPOVRD, IMM,
433 ivsize);
434 else
435 append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD,
436 CAAM_CMD_SZ);
439 /* Skip assoc data */
440 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
442 /* read assoc before reading payload */
443 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
444 KEY_VLF);
446 if (geniv) {
447 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
448 LDST_SRCDST_BYTE_CONTEXT |
449 (ctx1_iv_off << LDST_OFFSET_SHIFT));
450 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_CLASS2INFIFO |
451 (ctx1_iv_off << MOVE_OFFSET_SHIFT) | ivsize);
454 /* Load Counter into CONTEXT1 reg */
455 if (is_rfc3686)
456 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
457 LDST_SRCDST_BYTE_CONTEXT |
458 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
459 LDST_OFFSET_SHIFT));
461 /* Choose operation */
462 if (ctx1_iv_off)
463 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
464 OP_ALG_DECRYPT);
465 else
466 append_dec_op1(desc, cdata->algtype);
468 /* Read and write cryptlen bytes */
469 append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
470 append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
471 aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
473 /* Load ICV */
474 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
475 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
477 #ifdef DEBUG
478 print_hex_dump(KERN_ERR, "aead dec shdesc@" __stringify(__LINE__)": ",
479 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
480 #endif
482 EXPORT_SYMBOL(cnstr_shdsc_aead_decap);
485 * cnstr_shdsc_aead_givencap - IPSec ESP encapsulation shared descriptor
486 * (non-protocol) with HW-generated initialization
487 * vector.
488 * @desc: pointer to buffer used for descriptor construction
489 * @cdata: pointer to block cipher transform definitions
490 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
491 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
492 * @adata: pointer to authentication transform definitions.
493 * A split key is required for SEC Era < 6; the size of the split key
494 * is specified in this case. Valid algorithm values - one of
495 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
496 * with OP_ALG_AAI_HMAC_PRECOMP.
497 * @ivsize: initialization vector size
498 * @icvsize: integrity check value (ICV) size (truncated or full)
499 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
500 * @nonce: pointer to rfc3686 nonce
501 * @ctx1_iv_off: IV offset in CONTEXT1 register
502 * @is_qi: true when called from caam/qi
503 * @era: SEC Era
505 void cnstr_shdsc_aead_givencap(u32 * const desc, struct alginfo *cdata,
506 struct alginfo *adata, unsigned int ivsize,
507 unsigned int icvsize, const bool is_rfc3686,
508 u32 *nonce, const u32 ctx1_iv_off,
509 const bool is_qi, int era)
511 u32 geniv, moveiv;
513 /* Note: Context registers are saved. */
514 init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
516 if (is_qi) {
517 u32 *wait_load_cmd;
519 /* REG3 = assoclen */
520 append_seq_load(desc, 4, LDST_CLASS_DECO |
521 LDST_SRCDST_WORD_DECO_MATH3 |
522 (4 << LDST_OFFSET_SHIFT));
524 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
525 JUMP_COND_CALM | JUMP_COND_NCP |
526 JUMP_COND_NOP | JUMP_COND_NIP |
527 JUMP_COND_NIFP);
528 set_jump_tgt_here(desc, wait_load_cmd);
531 if (is_rfc3686) {
532 if (is_qi)
533 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
534 LDST_SRCDST_BYTE_CONTEXT |
535 (ctx1_iv_off << LDST_OFFSET_SHIFT));
537 goto copy_iv;
540 /* Generate IV */
541 geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
542 NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
543 NFIFOENTRY_PTYPE_RND | (ivsize << NFIFOENTRY_DLEN_SHIFT);
544 append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
545 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
546 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
547 append_move(desc, MOVE_WAITCOMP |
548 MOVE_SRC_INFIFO | MOVE_DEST_CLASS1CTX |
549 (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
550 (ivsize << MOVE_LEN_SHIFT));
551 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
553 copy_iv:
554 /* Copy IV to class 1 context */
555 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_OUTFIFO |
556 (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
557 (ivsize << MOVE_LEN_SHIFT));
559 /* Return to encryption */
560 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
561 OP_ALG_ENCRYPT);
563 /* Read and write assoclen bytes */
564 if (is_qi || era < 3) {
565 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
566 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
567 } else {
568 append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
569 append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
572 /* Skip assoc data */
573 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
575 /* read assoc before reading payload */
576 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
577 KEY_VLF);
579 /* Copy iv from outfifo to class 2 fifo */
580 moveiv = NFIFOENTRY_STYPE_OFIFO | NFIFOENTRY_DEST_CLASS2 |
581 NFIFOENTRY_DTYPE_MSG | (ivsize << NFIFOENTRY_DLEN_SHIFT);
582 append_load_imm_u32(desc, moveiv, LDST_CLASS_IND_CCB |
583 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
584 append_load_imm_u32(desc, ivsize, LDST_CLASS_2_CCB |
585 LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
587 /* Load Counter into CONTEXT1 reg */
588 if (is_rfc3686)
589 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
590 LDST_SRCDST_BYTE_CONTEXT |
591 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
592 LDST_OFFSET_SHIFT));
594 /* Class 1 operation */
595 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
596 OP_ALG_ENCRYPT);
598 /* Will write ivsize + cryptlen */
599 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
601 /* Not need to reload iv */
602 append_seq_fifo_load(desc, ivsize,
603 FIFOLD_CLASS_SKIP);
605 /* Will read cryptlen */
606 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
607 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | KEY_VLF |
608 FIFOLD_TYPE_MSG1OUT2 | FIFOLD_TYPE_LASTBOTH);
609 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
611 /* Write ICV */
612 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
613 LDST_SRCDST_BYTE_CONTEXT);
615 #ifdef DEBUG
616 print_hex_dump(KERN_ERR,
617 "aead givenc shdesc@" __stringify(__LINE__)": ",
618 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
619 #endif
621 EXPORT_SYMBOL(cnstr_shdsc_aead_givencap);
624 * cnstr_shdsc_gcm_encap - gcm encapsulation shared descriptor
625 * @desc: pointer to buffer used for descriptor construction
626 * @cdata: pointer to block cipher transform definitions
627 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
628 * @icvsize: integrity check value (ICV) size (truncated or full)
630 void cnstr_shdsc_gcm_encap(u32 * const desc, struct alginfo *cdata,
631 unsigned int icvsize)
633 u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1,
634 *zero_assoc_jump_cmd2;
636 init_sh_desc(desc, HDR_SHARE_SERIAL);
638 /* skip key loading if they are loaded due to sharing */
639 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
640 JUMP_COND_SHRD);
641 if (cdata->key_inline)
642 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
643 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
644 else
645 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
646 KEY_DEST_CLASS_REG);
647 set_jump_tgt_here(desc, key_jump_cmd);
649 /* class 1 operation */
650 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
651 OP_ALG_ENCRYPT);
653 /* if assoclen + cryptlen is ZERO, skip to ICV write */
654 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
655 zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
656 JUMP_COND_MATH_Z);
658 /* if assoclen is ZERO, skip reading the assoc data */
659 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
660 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
661 JUMP_COND_MATH_Z);
663 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
665 /* skip assoc data */
666 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
668 /* cryptlen = seqinlen - assoclen */
669 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
671 /* if cryptlen is ZERO jump to zero-payload commands */
672 zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
673 JUMP_COND_MATH_Z);
675 /* read assoc data */
676 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
677 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
678 set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
680 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
682 /* write encrypted data */
683 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
685 /* read payload data */
686 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
687 FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
689 /* jump the zero-payload commands */
690 append_jump(desc, JUMP_TEST_ALL | 2);
692 /* zero-payload commands */
693 set_jump_tgt_here(desc, zero_payload_jump_cmd);
695 /* read assoc data */
696 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
697 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
699 /* There is no input data */
700 set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
702 /* write ICV */
703 append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
704 LDST_SRCDST_BYTE_CONTEXT);
706 #ifdef DEBUG
707 print_hex_dump(KERN_ERR, "gcm enc shdesc@" __stringify(__LINE__)": ",
708 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
709 #endif
711 EXPORT_SYMBOL(cnstr_shdsc_gcm_encap);
714 * cnstr_shdsc_gcm_decap - gcm decapsulation shared descriptor
715 * @desc: pointer to buffer used for descriptor construction
716 * @cdata: pointer to block cipher transform definitions
717 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
718 * @icvsize: integrity check value (ICV) size (truncated or full)
720 void cnstr_shdsc_gcm_decap(u32 * const desc, struct alginfo *cdata,
721 unsigned int icvsize)
723 u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1;
725 init_sh_desc(desc, HDR_SHARE_SERIAL);
727 /* skip key loading if they are loaded due to sharing */
728 key_jump_cmd = append_jump(desc, JUMP_JSL |
729 JUMP_TEST_ALL | JUMP_COND_SHRD);
730 if (cdata->key_inline)
731 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
732 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
733 else
734 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
735 KEY_DEST_CLASS_REG);
736 set_jump_tgt_here(desc, key_jump_cmd);
738 /* class 1 operation */
739 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
740 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
742 /* if assoclen is ZERO, skip reading the assoc data */
743 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
744 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
745 JUMP_COND_MATH_Z);
747 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
749 /* skip assoc data */
750 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
752 /* read assoc data */
753 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
754 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
756 set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
758 /* cryptlen = seqoutlen - assoclen */
759 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
761 /* jump to zero-payload command if cryptlen is zero */
762 zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
763 JUMP_COND_MATH_Z);
765 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
767 /* store encrypted data */
768 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
770 /* read payload data */
771 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
772 FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
774 /* zero-payload command */
775 set_jump_tgt_here(desc, zero_payload_jump_cmd);
777 /* read ICV */
778 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
779 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
781 #ifdef DEBUG
782 print_hex_dump(KERN_ERR, "gcm dec shdesc@" __stringify(__LINE__)": ",
783 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
784 #endif
786 EXPORT_SYMBOL(cnstr_shdsc_gcm_decap);
789 * cnstr_shdsc_rfc4106_encap - IPSec ESP gcm encapsulation shared descriptor
790 * (non-protocol).
791 * @desc: pointer to buffer used for descriptor construction
792 * @cdata: pointer to block cipher transform definitions
793 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
794 * @icvsize: integrity check value (ICV) size (truncated or full)
796 void cnstr_shdsc_rfc4106_encap(u32 * const desc, struct alginfo *cdata,
797 unsigned int icvsize)
799 u32 *key_jump_cmd;
801 init_sh_desc(desc, HDR_SHARE_SERIAL);
803 /* Skip key loading if it is loaded due to sharing */
804 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
805 JUMP_COND_SHRD);
806 if (cdata->key_inline)
807 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
808 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
809 else
810 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
811 KEY_DEST_CLASS_REG);
812 set_jump_tgt_here(desc, key_jump_cmd);
814 /* Class 1 operation */
815 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
816 OP_ALG_ENCRYPT);
818 append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, 8);
819 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
821 /* Read assoc data */
822 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
823 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
825 /* Skip IV */
826 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
828 /* Will read cryptlen bytes */
829 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
831 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
832 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
834 /* Skip assoc data */
835 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
837 /* cryptlen = seqoutlen - assoclen */
838 append_math_sub(desc, VARSEQOUTLEN, VARSEQINLEN, REG0, CAAM_CMD_SZ);
840 /* Write encrypted data */
841 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
843 /* Read payload data */
844 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
845 FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
847 /* Write ICV */
848 append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
849 LDST_SRCDST_BYTE_CONTEXT);
851 #ifdef DEBUG
852 print_hex_dump(KERN_ERR,
853 "rfc4106 enc shdesc@" __stringify(__LINE__)": ",
854 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
855 #endif
857 EXPORT_SYMBOL(cnstr_shdsc_rfc4106_encap);
860 * cnstr_shdsc_rfc4106_decap - IPSec ESP gcm decapsulation shared descriptor
861 * (non-protocol).
862 * @desc: pointer to buffer used for descriptor construction
863 * @cdata: pointer to block cipher transform definitions
864 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
865 * @icvsize: integrity check value (ICV) size (truncated or full)
867 void cnstr_shdsc_rfc4106_decap(u32 * const desc, struct alginfo *cdata,
868 unsigned int icvsize)
870 u32 *key_jump_cmd;
872 init_sh_desc(desc, HDR_SHARE_SERIAL);
874 /* Skip key loading if it is loaded due to sharing */
875 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
876 JUMP_COND_SHRD);
877 if (cdata->key_inline)
878 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
879 cdata->keylen, CLASS_1 |
880 KEY_DEST_CLASS_REG);
881 else
882 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
883 KEY_DEST_CLASS_REG);
884 set_jump_tgt_here(desc, key_jump_cmd);
886 /* Class 1 operation */
887 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
888 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
890 append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, 8);
891 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
893 /* Read assoc data */
894 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
895 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
897 /* Skip IV */
898 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
900 /* Will read cryptlen bytes */
901 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG3, CAAM_CMD_SZ);
903 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
904 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
906 /* Skip assoc data */
907 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
909 /* Will write cryptlen bytes */
910 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
912 /* Store payload data */
913 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
915 /* Read encrypted data */
916 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
917 FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
919 /* Read ICV */
920 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
921 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
923 #ifdef DEBUG
924 print_hex_dump(KERN_ERR,
925 "rfc4106 dec shdesc@" __stringify(__LINE__)": ",
926 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
927 #endif
929 EXPORT_SYMBOL(cnstr_shdsc_rfc4106_decap);
932 * cnstr_shdsc_rfc4543_encap - IPSec ESP gmac encapsulation shared descriptor
933 * (non-protocol).
934 * @desc: pointer to buffer used for descriptor construction
935 * @cdata: pointer to block cipher transform definitions
936 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
937 * @icvsize: integrity check value (ICV) size (truncated or full)
939 void cnstr_shdsc_rfc4543_encap(u32 * const desc, struct alginfo *cdata,
940 unsigned int icvsize)
942 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
944 init_sh_desc(desc, HDR_SHARE_SERIAL);
946 /* Skip key loading if it is loaded due to sharing */
947 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
948 JUMP_COND_SHRD);
949 if (cdata->key_inline)
950 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
951 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
952 else
953 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
954 KEY_DEST_CLASS_REG);
955 set_jump_tgt_here(desc, key_jump_cmd);
957 /* Class 1 operation */
958 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
959 OP_ALG_ENCRYPT);
961 /* assoclen + cryptlen = seqinlen */
962 append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
965 * MOVE_LEN opcode is not available in all SEC HW revisions,
966 * thus need to do some magic, i.e. self-patch the descriptor
967 * buffer.
969 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
970 (0x6 << MOVE_LEN_SHIFT));
971 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
972 (0x8 << MOVE_LEN_SHIFT));
974 /* Will read assoclen + cryptlen bytes */
975 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
977 /* Will write assoclen + cryptlen bytes */
978 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
980 /* Read and write assoclen + cryptlen bytes */
981 aead_append_src_dst(desc, FIFOLD_TYPE_AAD);
983 set_move_tgt_here(desc, read_move_cmd);
984 set_move_tgt_here(desc, write_move_cmd);
985 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
986 /* Move payload data to OFIFO */
987 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
989 /* Write ICV */
990 append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
991 LDST_SRCDST_BYTE_CONTEXT);
993 #ifdef DEBUG
994 print_hex_dump(KERN_ERR,
995 "rfc4543 enc shdesc@" __stringify(__LINE__)": ",
996 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
997 #endif
999 EXPORT_SYMBOL(cnstr_shdsc_rfc4543_encap);
1002 * cnstr_shdsc_rfc4543_decap - IPSec ESP gmac decapsulation shared descriptor
1003 * (non-protocol).
1004 * @desc: pointer to buffer used for descriptor construction
1005 * @cdata: pointer to block cipher transform definitions
1006 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
1007 * @icvsize: integrity check value (ICV) size (truncated or full)
1009 void cnstr_shdsc_rfc4543_decap(u32 * const desc, struct alginfo *cdata,
1010 unsigned int icvsize)
1012 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
1014 init_sh_desc(desc, HDR_SHARE_SERIAL);
1016 /* Skip key loading if it is loaded due to sharing */
1017 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1018 JUMP_COND_SHRD);
1019 if (cdata->key_inline)
1020 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1021 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1022 else
1023 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
1024 KEY_DEST_CLASS_REG);
1025 set_jump_tgt_here(desc, key_jump_cmd);
1027 /* Class 1 operation */
1028 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1029 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
1031 /* assoclen + cryptlen = seqoutlen */
1032 append_math_sub(desc, REG3, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1035 * MOVE_LEN opcode is not available in all SEC HW revisions,
1036 * thus need to do some magic, i.e. self-patch the descriptor
1037 * buffer.
1039 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1040 (0x6 << MOVE_LEN_SHIFT));
1041 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1042 (0x8 << MOVE_LEN_SHIFT));
1044 /* Will read assoclen + cryptlen bytes */
1045 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1047 /* Will write assoclen + cryptlen bytes */
1048 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1050 /* Store payload data */
1051 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1053 /* In-snoop assoclen + cryptlen data */
1054 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | FIFOLDST_VLF |
1055 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST2FLUSH1);
1057 set_move_tgt_here(desc, read_move_cmd);
1058 set_move_tgt_here(desc, write_move_cmd);
1059 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1060 /* Move payload data to OFIFO */
1061 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1062 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
1064 /* Read ICV */
1065 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
1066 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1068 #ifdef DEBUG
1069 print_hex_dump(KERN_ERR,
1070 "rfc4543 dec shdesc@" __stringify(__LINE__)": ",
1071 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1072 #endif
1074 EXPORT_SYMBOL(cnstr_shdsc_rfc4543_decap);
1077 * For ablkcipher encrypt and decrypt, read from req->src and
1078 * write to req->dst
1080 static inline void ablkcipher_append_src_dst(u32 *desc)
1082 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1083 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1084 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 |
1085 KEY_VLF | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
1086 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
1090 * cnstr_shdsc_ablkcipher_encap - ablkcipher encapsulation shared descriptor
1091 * @desc: pointer to buffer used for descriptor construction
1092 * @cdata: pointer to block cipher transform definitions
1093 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
1094 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
1095 * @ivsize: initialization vector size
1096 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
1097 * @ctx1_iv_off: IV offset in CONTEXT1 register
1099 void cnstr_shdsc_ablkcipher_encap(u32 * const desc, struct alginfo *cdata,
1100 unsigned int ivsize, const bool is_rfc3686,
1101 const u32 ctx1_iv_off)
1103 u32 *key_jump_cmd;
1105 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1106 /* Skip if already shared */
1107 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1108 JUMP_COND_SHRD);
1110 /* Load class1 key only */
1111 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1112 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1114 /* Load nonce into CONTEXT1 reg */
1115 if (is_rfc3686) {
1116 const u8 *nonce = cdata->key_virt + cdata->keylen;
1118 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1119 LDST_CLASS_IND_CCB |
1120 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1121 append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
1122 MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
1123 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1126 set_jump_tgt_here(desc, key_jump_cmd);
1128 /* Load iv */
1129 append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1130 LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1132 /* Load counter into CONTEXT1 reg */
1133 if (is_rfc3686)
1134 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1135 LDST_SRCDST_BYTE_CONTEXT |
1136 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1137 LDST_OFFSET_SHIFT));
1139 /* Load operation */
1140 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1141 OP_ALG_ENCRYPT);
1143 /* Perform operation */
1144 ablkcipher_append_src_dst(desc);
1146 #ifdef DEBUG
1147 print_hex_dump(KERN_ERR,
1148 "ablkcipher enc shdesc@" __stringify(__LINE__)": ",
1149 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1150 #endif
1152 EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_encap);
1155 * cnstr_shdsc_ablkcipher_decap - ablkcipher decapsulation shared descriptor
1156 * @desc: pointer to buffer used for descriptor construction
1157 * @cdata: pointer to block cipher transform definitions
1158 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
1159 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
1160 * @ivsize: initialization vector size
1161 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
1162 * @ctx1_iv_off: IV offset in CONTEXT1 register
1164 void cnstr_shdsc_ablkcipher_decap(u32 * const desc, struct alginfo *cdata,
1165 unsigned int ivsize, const bool is_rfc3686,
1166 const u32 ctx1_iv_off)
1168 u32 *key_jump_cmd;
1170 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1171 /* Skip if already shared */
1172 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1173 JUMP_COND_SHRD);
1175 /* Load class1 key only */
1176 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1177 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1179 /* Load nonce into CONTEXT1 reg */
1180 if (is_rfc3686) {
1181 const u8 *nonce = cdata->key_virt + cdata->keylen;
1183 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1184 LDST_CLASS_IND_CCB |
1185 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1186 append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
1187 MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
1188 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1191 set_jump_tgt_here(desc, key_jump_cmd);
1193 /* load IV */
1194 append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1195 LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1197 /* Load counter into CONTEXT1 reg */
1198 if (is_rfc3686)
1199 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1200 LDST_SRCDST_BYTE_CONTEXT |
1201 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1202 LDST_OFFSET_SHIFT));
1204 /* Choose operation */
1205 if (ctx1_iv_off)
1206 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1207 OP_ALG_DECRYPT);
1208 else
1209 append_dec_op1(desc, cdata->algtype);
1211 /* Perform operation */
1212 ablkcipher_append_src_dst(desc);
1214 #ifdef DEBUG
1215 print_hex_dump(KERN_ERR,
1216 "ablkcipher dec shdesc@" __stringify(__LINE__)": ",
1217 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1218 #endif
1220 EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_decap);
1223 * cnstr_shdsc_ablkcipher_givencap - ablkcipher encapsulation shared descriptor
1224 * with HW-generated initialization vector.
1225 * @desc: pointer to buffer used for descriptor construction
1226 * @cdata: pointer to block cipher transform definitions
1227 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
1228 * with OP_ALG_AAI_CBC.
1229 * @ivsize: initialization vector size
1230 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
1231 * @ctx1_iv_off: IV offset in CONTEXT1 register
1233 void cnstr_shdsc_ablkcipher_givencap(u32 * const desc, struct alginfo *cdata,
1234 unsigned int ivsize, const bool is_rfc3686,
1235 const u32 ctx1_iv_off)
1237 u32 *key_jump_cmd, geniv;
1239 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1240 /* Skip if already shared */
1241 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1242 JUMP_COND_SHRD);
1244 /* Load class1 key only */
1245 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1246 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1248 /* Load Nonce into CONTEXT1 reg */
1249 if (is_rfc3686) {
1250 const u8 *nonce = cdata->key_virt + cdata->keylen;
1252 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1253 LDST_CLASS_IND_CCB |
1254 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1255 append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
1256 MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
1257 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1259 set_jump_tgt_here(desc, key_jump_cmd);
1261 /* Generate IV */
1262 geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
1263 NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 | NFIFOENTRY_PTYPE_RND |
1264 (ivsize << NFIFOENTRY_DLEN_SHIFT);
1265 append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
1266 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
1267 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1268 append_move(desc, MOVE_WAITCOMP | MOVE_SRC_INFIFO |
1269 MOVE_DEST_CLASS1CTX | (ivsize << MOVE_LEN_SHIFT) |
1270 (ctx1_iv_off << MOVE_OFFSET_SHIFT));
1271 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
1273 /* Copy generated IV to memory */
1274 append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1275 LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1277 /* Load Counter into CONTEXT1 reg */
1278 if (is_rfc3686)
1279 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1280 LDST_SRCDST_BYTE_CONTEXT |
1281 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1282 LDST_OFFSET_SHIFT));
1284 if (ctx1_iv_off)
1285 append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NCP |
1286 (1 << JUMP_OFFSET_SHIFT));
1288 /* Load operation */
1289 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1290 OP_ALG_ENCRYPT);
1292 /* Perform operation */
1293 ablkcipher_append_src_dst(desc);
1295 #ifdef DEBUG
1296 print_hex_dump(KERN_ERR,
1297 "ablkcipher givenc shdesc@" __stringify(__LINE__) ": ",
1298 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1299 #endif
1301 EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_givencap);
1304 * cnstr_shdsc_xts_ablkcipher_encap - xts ablkcipher encapsulation shared
1305 * descriptor
1306 * @desc: pointer to buffer used for descriptor construction
1307 * @cdata: pointer to block cipher transform definitions
1308 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
1310 void cnstr_shdsc_xts_ablkcipher_encap(u32 * const desc, struct alginfo *cdata)
1312 __be64 sector_size = cpu_to_be64(512);
1313 u32 *key_jump_cmd;
1315 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1316 /* Skip if already shared */
1317 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1318 JUMP_COND_SHRD);
1320 /* Load class1 keys only */
1321 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1322 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1324 /* Load sector size with index 40 bytes (0x28) */
1325 append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
1326 LDST_SRCDST_BYTE_CONTEXT |
1327 (0x28 << LDST_OFFSET_SHIFT));
1329 set_jump_tgt_here(desc, key_jump_cmd);
1332 * create sequence for loading the sector index
1333 * Upper 8B of IV - will be used as sector index
1334 * Lower 8B of IV - will be discarded
1336 append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1337 (0x20 << LDST_OFFSET_SHIFT));
1338 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1340 /* Load operation */
1341 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1342 OP_ALG_ENCRYPT);
1344 /* Perform operation */
1345 ablkcipher_append_src_dst(desc);
1347 #ifdef DEBUG
1348 print_hex_dump(KERN_ERR,
1349 "xts ablkcipher enc shdesc@" __stringify(__LINE__) ": ",
1350 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1351 #endif
1353 EXPORT_SYMBOL(cnstr_shdsc_xts_ablkcipher_encap);
1356 * cnstr_shdsc_xts_ablkcipher_decap - xts ablkcipher decapsulation shared
1357 * descriptor
1358 * @desc: pointer to buffer used for descriptor construction
1359 * @cdata: pointer to block cipher transform definitions
1360 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
1362 void cnstr_shdsc_xts_ablkcipher_decap(u32 * const desc, struct alginfo *cdata)
1364 __be64 sector_size = cpu_to_be64(512);
1365 u32 *key_jump_cmd;
1367 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1368 /* Skip if already shared */
1369 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1370 JUMP_COND_SHRD);
1372 /* Load class1 key only */
1373 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1374 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1376 /* Load sector size with index 40 bytes (0x28) */
1377 append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
1378 LDST_SRCDST_BYTE_CONTEXT |
1379 (0x28 << LDST_OFFSET_SHIFT));
1381 set_jump_tgt_here(desc, key_jump_cmd);
1384 * create sequence for loading the sector index
1385 * Upper 8B of IV - will be used as sector index
1386 * Lower 8B of IV - will be discarded
1388 append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1389 (0x20 << LDST_OFFSET_SHIFT));
1390 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1392 /* Load operation */
1393 append_dec_op1(desc, cdata->algtype);
1395 /* Perform operation */
1396 ablkcipher_append_src_dst(desc);
1398 #ifdef DEBUG
1399 print_hex_dump(KERN_ERR,
1400 "xts ablkcipher dec shdesc@" __stringify(__LINE__) ": ",
1401 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1402 #endif
1404 EXPORT_SYMBOL(cnstr_shdsc_xts_ablkcipher_decap);
1406 MODULE_LICENSE("GPL");
1407 MODULE_DESCRIPTION("FSL CAAM descriptor support");
1408 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");