sh_eth: fix EESIPR values for SH77{34|63}
[linux/fpc-iii.git] / drivers / crypto / caam / caamalg_desc.c
blobf3f48c10b9d6f10987da8742b727d531f7173ba5
1 /*
2 * Shared descriptors for aead, ablkcipher algorithms
4 * Copyright 2016 NXP
5 */
7 #include "compat.h"
8 #include "desc_constr.h"
9 #include "caamalg_desc.h"
12 * For aead functions, read payload and write payload,
13 * both of which are specified in req->src and req->dst
15 static inline void aead_append_src_dst(u32 *desc, u32 msg_type)
17 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
18 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
19 KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH);
22 /* Set DK bit in class 1 operation if shared */
23 static inline void append_dec_op1(u32 *desc, u32 type)
25 u32 *jump_cmd, *uncond_jump_cmd;
27 /* DK bit is valid only for AES */
28 if ((type & OP_ALG_ALGSEL_MASK) != OP_ALG_ALGSEL_AES) {
29 append_operation(desc, type | OP_ALG_AS_INITFINAL |
30 OP_ALG_DECRYPT);
31 return;
34 jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD);
35 append_operation(desc, type | OP_ALG_AS_INITFINAL |
36 OP_ALG_DECRYPT);
37 uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL);
38 set_jump_tgt_here(desc, jump_cmd);
39 append_operation(desc, type | OP_ALG_AS_INITFINAL |
40 OP_ALG_DECRYPT | OP_ALG_AAI_DK);
41 set_jump_tgt_here(desc, uncond_jump_cmd);
44 /**
45 * cnstr_shdsc_aead_null_encap - IPSec ESP encapsulation shared descriptor
46 * (non-protocol) with no (null) encryption.
47 * @desc: pointer to buffer used for descriptor construction
48 * @adata: pointer to authentication transform definitions. Note that since a
49 * split key is to be used, the size of the split key itself is
50 * specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
51 * SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
52 * @icvsize: integrity check value (ICV) size (truncated or full)
54 * Note: Requires an MDHA split key.
56 void cnstr_shdsc_aead_null_encap(u32 * const desc, struct alginfo *adata,
57 unsigned int icvsize)
59 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
61 init_sh_desc(desc, HDR_SHARE_SERIAL);
63 /* Skip if already shared */
64 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
65 JUMP_COND_SHRD);
66 if (adata->key_inline)
67 append_key_as_imm(desc, adata->key_virt, adata->keylen_pad,
68 adata->keylen, CLASS_2 | KEY_DEST_MDHA_SPLIT |
69 KEY_ENC);
70 else
71 append_key(desc, adata->key_dma, adata->keylen, CLASS_2 |
72 KEY_DEST_MDHA_SPLIT | KEY_ENC);
73 set_jump_tgt_here(desc, key_jump_cmd);
75 /* assoclen + cryptlen = seqinlen */
76 append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
78 /* Prepare to read and write cryptlen + assoclen bytes */
79 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
80 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
83 * MOVE_LEN opcode is not available in all SEC HW revisions,
84 * thus need to do some magic, i.e. self-patch the descriptor
85 * buffer.
87 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
88 MOVE_DEST_MATH3 |
89 (0x6 << MOVE_LEN_SHIFT));
90 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
91 MOVE_DEST_DESCBUF |
92 MOVE_WAITCOMP |
93 (0x8 << MOVE_LEN_SHIFT));
95 /* Class 2 operation */
96 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
97 OP_ALG_ENCRYPT);
99 /* Read and write cryptlen bytes */
100 aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
102 set_move_tgt_here(desc, read_move_cmd);
103 set_move_tgt_here(desc, write_move_cmd);
104 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
105 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
106 MOVE_AUX_LS);
108 /* Write ICV */
109 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
110 LDST_SRCDST_BYTE_CONTEXT);
112 #ifdef DEBUG
113 print_hex_dump(KERN_ERR,
114 "aead null enc shdesc@" __stringify(__LINE__)": ",
115 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
116 #endif
118 EXPORT_SYMBOL(cnstr_shdsc_aead_null_encap);
121 * cnstr_shdsc_aead_null_decap - IPSec ESP decapsulation shared descriptor
122 * (non-protocol) with no (null) decryption.
123 * @desc: pointer to buffer used for descriptor construction
124 * @adata: pointer to authentication transform definitions. Note that since a
125 * split key is to be used, the size of the split key itself is
126 * specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
127 * SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
128 * @icvsize: integrity check value (ICV) size (truncated or full)
130 * Note: Requires an MDHA split key.
132 void cnstr_shdsc_aead_null_decap(u32 * const desc, struct alginfo *adata,
133 unsigned int icvsize)
135 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd, *jump_cmd;
137 init_sh_desc(desc, HDR_SHARE_SERIAL);
139 /* Skip if already shared */
140 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
141 JUMP_COND_SHRD);
142 if (adata->key_inline)
143 append_key_as_imm(desc, adata->key_virt, adata->keylen_pad,
144 adata->keylen, CLASS_2 |
145 KEY_DEST_MDHA_SPLIT | KEY_ENC);
146 else
147 append_key(desc, adata->key_dma, adata->keylen, CLASS_2 |
148 KEY_DEST_MDHA_SPLIT | KEY_ENC);
149 set_jump_tgt_here(desc, key_jump_cmd);
151 /* Class 2 operation */
152 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
153 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
155 /* assoclen + cryptlen = seqoutlen */
156 append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
158 /* Prepare to read and write cryptlen + assoclen bytes */
159 append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
160 append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
163 * MOVE_LEN opcode is not available in all SEC HW revisions,
164 * thus need to do some magic, i.e. self-patch the descriptor
165 * buffer.
167 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
168 MOVE_DEST_MATH2 |
169 (0x6 << MOVE_LEN_SHIFT));
170 write_move_cmd = append_move(desc, MOVE_SRC_MATH2 |
171 MOVE_DEST_DESCBUF |
172 MOVE_WAITCOMP |
173 (0x8 << MOVE_LEN_SHIFT));
175 /* Read and write cryptlen bytes */
176 aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
179 * Insert a NOP here, since we need at least 4 instructions between
180 * code patching the descriptor buffer and the location being patched.
182 jump_cmd = append_jump(desc, JUMP_TEST_ALL);
183 set_jump_tgt_here(desc, jump_cmd);
185 set_move_tgt_here(desc, read_move_cmd);
186 set_move_tgt_here(desc, write_move_cmd);
187 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
188 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
189 MOVE_AUX_LS);
190 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
192 /* Load ICV */
193 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
194 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
196 #ifdef DEBUG
197 print_hex_dump(KERN_ERR,
198 "aead null dec shdesc@" __stringify(__LINE__)": ",
199 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
200 #endif
202 EXPORT_SYMBOL(cnstr_shdsc_aead_null_decap);
204 static void init_sh_desc_key_aead(u32 * const desc,
205 struct alginfo * const cdata,
206 struct alginfo * const adata,
207 const bool is_rfc3686, u32 *nonce)
209 u32 *key_jump_cmd;
210 unsigned int enckeylen = cdata->keylen;
212 /* Note: Context registers are saved. */
213 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
215 /* Skip if already shared */
216 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
217 JUMP_COND_SHRD);
220 * RFC3686 specific:
221 * | key = {AUTH_KEY, ENC_KEY, NONCE}
222 * | enckeylen = encryption key size + nonce size
224 if (is_rfc3686)
225 enckeylen -= CTR_RFC3686_NONCE_SIZE;
227 if (adata->key_inline)
228 append_key_as_imm(desc, adata->key_virt, adata->keylen_pad,
229 adata->keylen, CLASS_2 |
230 KEY_DEST_MDHA_SPLIT | KEY_ENC);
231 else
232 append_key(desc, adata->key_dma, adata->keylen, CLASS_2 |
233 KEY_DEST_MDHA_SPLIT | KEY_ENC);
235 if (cdata->key_inline)
236 append_key_as_imm(desc, cdata->key_virt, enckeylen,
237 enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
238 else
239 append_key(desc, cdata->key_dma, enckeylen, CLASS_1 |
240 KEY_DEST_CLASS_REG);
242 /* Load Counter into CONTEXT1 reg */
243 if (is_rfc3686) {
244 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
245 LDST_CLASS_IND_CCB |
246 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
247 append_move(desc,
248 MOVE_SRC_OUTFIFO |
249 MOVE_DEST_CLASS1CTX |
250 (16 << MOVE_OFFSET_SHIFT) |
251 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
254 set_jump_tgt_here(desc, key_jump_cmd);
258 * cnstr_shdsc_aead_encap - IPSec ESP encapsulation shared descriptor
259 * (non-protocol).
260 * @desc: pointer to buffer used for descriptor construction
261 * @cdata: pointer to block cipher transform definitions
262 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
263 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
264 * @adata: pointer to authentication transform definitions. Note that since a
265 * split key is to be used, the size of the split key itself is
266 * specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
267 * SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
268 * @icvsize: integrity check value (ICV) size (truncated or full)
269 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
270 * @nonce: pointer to rfc3686 nonce
271 * @ctx1_iv_off: IV offset in CONTEXT1 register
273 * Note: Requires an MDHA split key.
275 void cnstr_shdsc_aead_encap(u32 * const desc, struct alginfo *cdata,
276 struct alginfo *adata, unsigned int icvsize,
277 const bool is_rfc3686, u32 *nonce,
278 const u32 ctx1_iv_off)
280 /* Note: Context registers are saved. */
281 init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce);
283 /* Class 2 operation */
284 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
285 OP_ALG_ENCRYPT);
287 /* Read and write assoclen bytes */
288 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
289 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
291 /* Skip assoc data */
292 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
294 /* read assoc before reading payload */
295 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
296 FIFOLDST_VLF);
298 /* Load Counter into CONTEXT1 reg */
299 if (is_rfc3686)
300 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
301 LDST_SRCDST_BYTE_CONTEXT |
302 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
303 LDST_OFFSET_SHIFT));
305 /* Class 1 operation */
306 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
307 OP_ALG_ENCRYPT);
309 /* Read and write cryptlen bytes */
310 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
311 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
312 aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
314 /* Write ICV */
315 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
316 LDST_SRCDST_BYTE_CONTEXT);
318 #ifdef DEBUG
319 print_hex_dump(KERN_ERR, "aead enc shdesc@" __stringify(__LINE__)": ",
320 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
321 #endif
323 EXPORT_SYMBOL(cnstr_shdsc_aead_encap);
326 * cnstr_shdsc_aead_decap - IPSec ESP decapsulation shared descriptor
327 * (non-protocol).
328 * @desc: pointer to buffer used for descriptor construction
329 * @cdata: pointer to block cipher transform definitions
330 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
331 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
332 * @adata: pointer to authentication transform definitions. Note that since a
333 * split key is to be used, the size of the split key itself is
334 * specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
335 * SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
336 * @ivsize: initialization vector size
337 * @icvsize: integrity check value (ICV) size (truncated or full)
338 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
339 * @nonce: pointer to rfc3686 nonce
340 * @ctx1_iv_off: IV offset in CONTEXT1 register
342 * Note: Requires an MDHA split key.
344 void cnstr_shdsc_aead_decap(u32 * const desc, struct alginfo *cdata,
345 struct alginfo *adata, unsigned int ivsize,
346 unsigned int icvsize, const bool geniv,
347 const bool is_rfc3686, u32 *nonce,
348 const u32 ctx1_iv_off)
350 /* Note: Context registers are saved. */
351 init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce);
353 /* Class 2 operation */
354 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
355 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
357 /* Read and write assoclen bytes */
358 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
359 if (geniv)
360 append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM, ivsize);
361 else
362 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
364 /* Skip assoc data */
365 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
367 /* read assoc before reading payload */
368 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
369 KEY_VLF);
371 if (geniv) {
372 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
373 LDST_SRCDST_BYTE_CONTEXT |
374 (ctx1_iv_off << LDST_OFFSET_SHIFT));
375 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_CLASS2INFIFO |
376 (ctx1_iv_off << MOVE_OFFSET_SHIFT) | ivsize);
379 /* Load Counter into CONTEXT1 reg */
380 if (is_rfc3686)
381 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
382 LDST_SRCDST_BYTE_CONTEXT |
383 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
384 LDST_OFFSET_SHIFT));
386 /* Choose operation */
387 if (ctx1_iv_off)
388 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
389 OP_ALG_DECRYPT);
390 else
391 append_dec_op1(desc, cdata->algtype);
393 /* Read and write cryptlen bytes */
394 append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
395 append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
396 aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
398 /* Load ICV */
399 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
400 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
402 #ifdef DEBUG
403 print_hex_dump(KERN_ERR, "aead dec shdesc@" __stringify(__LINE__)": ",
404 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
405 #endif
407 EXPORT_SYMBOL(cnstr_shdsc_aead_decap);
410 * cnstr_shdsc_aead_givencap - IPSec ESP encapsulation shared descriptor
411 * (non-protocol) with HW-generated initialization
412 * vector.
413 * @desc: pointer to buffer used for descriptor construction
414 * @cdata: pointer to block cipher transform definitions
415 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
416 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
417 * @adata: pointer to authentication transform definitions. Note that since a
418 * split key is to be used, the size of the split key itself is
419 * specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
420 * SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
421 * @ivsize: initialization vector size
422 * @icvsize: integrity check value (ICV) size (truncated or full)
423 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
424 * @nonce: pointer to rfc3686 nonce
425 * @ctx1_iv_off: IV offset in CONTEXT1 register
427 * Note: Requires an MDHA split key.
429 void cnstr_shdsc_aead_givencap(u32 * const desc, struct alginfo *cdata,
430 struct alginfo *adata, unsigned int ivsize,
431 unsigned int icvsize, const bool is_rfc3686,
432 u32 *nonce, const u32 ctx1_iv_off)
434 u32 geniv, moveiv;
436 /* Note: Context registers are saved. */
437 init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce);
439 if (is_rfc3686)
440 goto copy_iv;
442 /* Generate IV */
443 geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
444 NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
445 NFIFOENTRY_PTYPE_RND | (ivsize << NFIFOENTRY_DLEN_SHIFT);
446 append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
447 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
448 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
449 append_move(desc, MOVE_WAITCOMP |
450 MOVE_SRC_INFIFO | MOVE_DEST_CLASS1CTX |
451 (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
452 (ivsize << MOVE_LEN_SHIFT));
453 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
455 copy_iv:
456 /* Copy IV to class 1 context */
457 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_OUTFIFO |
458 (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
459 (ivsize << MOVE_LEN_SHIFT));
461 /* Return to encryption */
462 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
463 OP_ALG_ENCRYPT);
465 /* Read and write assoclen bytes */
466 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
467 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
469 /* Skip assoc data */
470 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
472 /* read assoc before reading payload */
473 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
474 KEY_VLF);
476 /* Copy iv from outfifo to class 2 fifo */
477 moveiv = NFIFOENTRY_STYPE_OFIFO | NFIFOENTRY_DEST_CLASS2 |
478 NFIFOENTRY_DTYPE_MSG | (ivsize << NFIFOENTRY_DLEN_SHIFT);
479 append_load_imm_u32(desc, moveiv, LDST_CLASS_IND_CCB |
480 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
481 append_load_imm_u32(desc, ivsize, LDST_CLASS_2_CCB |
482 LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
484 /* Load Counter into CONTEXT1 reg */
485 if (is_rfc3686)
486 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
487 LDST_SRCDST_BYTE_CONTEXT |
488 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
489 LDST_OFFSET_SHIFT));
491 /* Class 1 operation */
492 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
493 OP_ALG_ENCRYPT);
495 /* Will write ivsize + cryptlen */
496 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
498 /* Not need to reload iv */
499 append_seq_fifo_load(desc, ivsize,
500 FIFOLD_CLASS_SKIP);
502 /* Will read cryptlen */
503 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
504 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | KEY_VLF |
505 FIFOLD_TYPE_MSG1OUT2 | FIFOLD_TYPE_LASTBOTH);
506 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
508 /* Write ICV */
509 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
510 LDST_SRCDST_BYTE_CONTEXT);
512 #ifdef DEBUG
513 print_hex_dump(KERN_ERR,
514 "aead givenc shdesc@" __stringify(__LINE__)": ",
515 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
516 #endif
518 EXPORT_SYMBOL(cnstr_shdsc_aead_givencap);
521 * cnstr_shdsc_gcm_encap - gcm encapsulation shared descriptor
522 * @desc: pointer to buffer used for descriptor construction
523 * @cdata: pointer to block cipher transform definitions
524 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
525 * @icvsize: integrity check value (ICV) size (truncated or full)
527 void cnstr_shdsc_gcm_encap(u32 * const desc, struct alginfo *cdata,
528 unsigned int icvsize)
530 u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1,
531 *zero_assoc_jump_cmd2;
533 init_sh_desc(desc, HDR_SHARE_SERIAL);
535 /* skip key loading if they are loaded due to sharing */
536 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
537 JUMP_COND_SHRD | JUMP_COND_SELF);
538 if (cdata->key_inline)
539 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
540 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
541 else
542 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
543 KEY_DEST_CLASS_REG);
544 set_jump_tgt_here(desc, key_jump_cmd);
546 /* class 1 operation */
547 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
548 OP_ALG_ENCRYPT);
550 /* if assoclen + cryptlen is ZERO, skip to ICV write */
551 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
552 zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
553 JUMP_COND_MATH_Z);
555 /* if assoclen is ZERO, skip reading the assoc data */
556 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
557 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
558 JUMP_COND_MATH_Z);
560 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
562 /* skip assoc data */
563 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
565 /* cryptlen = seqinlen - assoclen */
566 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
568 /* if cryptlen is ZERO jump to zero-payload commands */
569 zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
570 JUMP_COND_MATH_Z);
572 /* read assoc data */
573 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
574 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
575 set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
577 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
579 /* write encrypted data */
580 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
582 /* read payload data */
583 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
584 FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
586 /* jump the zero-payload commands */
587 append_jump(desc, JUMP_TEST_ALL | 2);
589 /* zero-payload commands */
590 set_jump_tgt_here(desc, zero_payload_jump_cmd);
592 /* read assoc data */
593 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
594 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
596 /* There is no input data */
597 set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
599 /* write ICV */
600 append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
601 LDST_SRCDST_BYTE_CONTEXT);
603 #ifdef DEBUG
604 print_hex_dump(KERN_ERR, "gcm enc shdesc@" __stringify(__LINE__)": ",
605 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
606 #endif
608 EXPORT_SYMBOL(cnstr_shdsc_gcm_encap);
611 * cnstr_shdsc_gcm_decap - gcm decapsulation shared descriptor
612 * @desc: pointer to buffer used for descriptor construction
613 * @cdata: pointer to block cipher transform definitions
614 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
615 * @icvsize: integrity check value (ICV) size (truncated or full)
617 void cnstr_shdsc_gcm_decap(u32 * const desc, struct alginfo *cdata,
618 unsigned int icvsize)
620 u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1;
622 init_sh_desc(desc, HDR_SHARE_SERIAL);
624 /* skip key loading if they are loaded due to sharing */
625 key_jump_cmd = append_jump(desc, JUMP_JSL |
626 JUMP_TEST_ALL | JUMP_COND_SHRD |
627 JUMP_COND_SELF);
628 if (cdata->key_inline)
629 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
630 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
631 else
632 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
633 KEY_DEST_CLASS_REG);
634 set_jump_tgt_here(desc, key_jump_cmd);
636 /* class 1 operation */
637 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
638 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
640 /* if assoclen is ZERO, skip reading the assoc data */
641 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
642 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
643 JUMP_COND_MATH_Z);
645 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
647 /* skip assoc data */
648 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
650 /* read assoc data */
651 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
652 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
654 set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
656 /* cryptlen = seqoutlen - assoclen */
657 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
659 /* jump to zero-payload command if cryptlen is zero */
660 zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
661 JUMP_COND_MATH_Z);
663 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
665 /* store encrypted data */
666 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
668 /* read payload data */
669 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
670 FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
672 /* zero-payload command */
673 set_jump_tgt_here(desc, zero_payload_jump_cmd);
675 /* read ICV */
676 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
677 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
679 #ifdef DEBUG
680 print_hex_dump(KERN_ERR, "gcm dec shdesc@" __stringify(__LINE__)": ",
681 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
682 #endif
684 EXPORT_SYMBOL(cnstr_shdsc_gcm_decap);
687 * cnstr_shdsc_rfc4106_encap - IPSec ESP gcm encapsulation shared descriptor
688 * (non-protocol).
689 * @desc: pointer to buffer used for descriptor construction
690 * @cdata: pointer to block cipher transform definitions
691 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
692 * @icvsize: integrity check value (ICV) size (truncated or full)
694 void cnstr_shdsc_rfc4106_encap(u32 * const desc, struct alginfo *cdata,
695 unsigned int icvsize)
697 u32 *key_jump_cmd;
699 init_sh_desc(desc, HDR_SHARE_SERIAL);
701 /* Skip key loading if it is loaded due to sharing */
702 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
703 JUMP_COND_SHRD);
704 if (cdata->key_inline)
705 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
706 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
707 else
708 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
709 KEY_DEST_CLASS_REG);
710 set_jump_tgt_here(desc, key_jump_cmd);
712 /* Class 1 operation */
713 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
714 OP_ALG_ENCRYPT);
716 append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, 8);
717 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
719 /* Read assoc data */
720 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
721 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
723 /* Skip IV */
724 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
726 /* Will read cryptlen bytes */
727 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
729 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
730 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
732 /* Skip assoc data */
733 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
735 /* cryptlen = seqoutlen - assoclen */
736 append_math_sub(desc, VARSEQOUTLEN, VARSEQINLEN, REG0, CAAM_CMD_SZ);
738 /* Write encrypted data */
739 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
741 /* Read payload data */
742 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
743 FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
745 /* Write ICV */
746 append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
747 LDST_SRCDST_BYTE_CONTEXT);
749 #ifdef DEBUG
750 print_hex_dump(KERN_ERR,
751 "rfc4106 enc shdesc@" __stringify(__LINE__)": ",
752 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
753 #endif
755 EXPORT_SYMBOL(cnstr_shdsc_rfc4106_encap);
758 * cnstr_shdsc_rfc4106_decap - IPSec ESP gcm decapsulation shared descriptor
759 * (non-protocol).
760 * @desc: pointer to buffer used for descriptor construction
761 * @cdata: pointer to block cipher transform definitions
762 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
763 * @icvsize: integrity check value (ICV) size (truncated or full)
765 void cnstr_shdsc_rfc4106_decap(u32 * const desc, struct alginfo *cdata,
766 unsigned int icvsize)
768 u32 *key_jump_cmd;
770 init_sh_desc(desc, HDR_SHARE_SERIAL);
772 /* Skip key loading if it is loaded due to sharing */
773 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
774 JUMP_COND_SHRD);
775 if (cdata->key_inline)
776 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
777 cdata->keylen, CLASS_1 |
778 KEY_DEST_CLASS_REG);
779 else
780 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
781 KEY_DEST_CLASS_REG);
782 set_jump_tgt_here(desc, key_jump_cmd);
784 /* Class 1 operation */
785 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
786 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
788 append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, 8);
789 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
791 /* Read assoc data */
792 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
793 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
795 /* Skip IV */
796 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
798 /* Will read cryptlen bytes */
799 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG3, CAAM_CMD_SZ);
801 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
802 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
804 /* Skip assoc data */
805 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
807 /* Will write cryptlen bytes */
808 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
810 /* Store payload data */
811 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
813 /* Read encrypted data */
814 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
815 FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
817 /* Read ICV */
818 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
819 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
821 #ifdef DEBUG
822 print_hex_dump(KERN_ERR,
823 "rfc4106 dec shdesc@" __stringify(__LINE__)": ",
824 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
825 #endif
827 EXPORT_SYMBOL(cnstr_shdsc_rfc4106_decap);
830 * cnstr_shdsc_rfc4543_encap - IPSec ESP gmac encapsulation shared descriptor
831 * (non-protocol).
832 * @desc: pointer to buffer used for descriptor construction
833 * @cdata: pointer to block cipher transform definitions
834 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
835 * @icvsize: integrity check value (ICV) size (truncated or full)
837 void cnstr_shdsc_rfc4543_encap(u32 * const desc, struct alginfo *cdata,
838 unsigned int icvsize)
840 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
842 init_sh_desc(desc, HDR_SHARE_SERIAL);
844 /* Skip key loading if it is loaded due to sharing */
845 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
846 JUMP_COND_SHRD);
847 if (cdata->key_inline)
848 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
849 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
850 else
851 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
852 KEY_DEST_CLASS_REG);
853 set_jump_tgt_here(desc, key_jump_cmd);
855 /* Class 1 operation */
856 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
857 OP_ALG_ENCRYPT);
859 /* assoclen + cryptlen = seqinlen */
860 append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
863 * MOVE_LEN opcode is not available in all SEC HW revisions,
864 * thus need to do some magic, i.e. self-patch the descriptor
865 * buffer.
867 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
868 (0x6 << MOVE_LEN_SHIFT));
869 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
870 (0x8 << MOVE_LEN_SHIFT));
872 /* Will read assoclen + cryptlen bytes */
873 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
875 /* Will write assoclen + cryptlen bytes */
876 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
878 /* Read and write assoclen + cryptlen bytes */
879 aead_append_src_dst(desc, FIFOLD_TYPE_AAD);
881 set_move_tgt_here(desc, read_move_cmd);
882 set_move_tgt_here(desc, write_move_cmd);
883 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
884 /* Move payload data to OFIFO */
885 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
887 /* Write ICV */
888 append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
889 LDST_SRCDST_BYTE_CONTEXT);
891 #ifdef DEBUG
892 print_hex_dump(KERN_ERR,
893 "rfc4543 enc shdesc@" __stringify(__LINE__)": ",
894 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
895 #endif
897 EXPORT_SYMBOL(cnstr_shdsc_rfc4543_encap);
900 * cnstr_shdsc_rfc4543_decap - IPSec ESP gmac decapsulation shared descriptor
901 * (non-protocol).
902 * @desc: pointer to buffer used for descriptor construction
903 * @cdata: pointer to block cipher transform definitions
904 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
905 * @icvsize: integrity check value (ICV) size (truncated or full)
907 void cnstr_shdsc_rfc4543_decap(u32 * const desc, struct alginfo *cdata,
908 unsigned int icvsize)
910 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
912 init_sh_desc(desc, HDR_SHARE_SERIAL);
914 /* Skip key loading if it is loaded due to sharing */
915 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
916 JUMP_COND_SHRD);
917 if (cdata->key_inline)
918 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
919 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
920 else
921 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
922 KEY_DEST_CLASS_REG);
923 set_jump_tgt_here(desc, key_jump_cmd);
925 /* Class 1 operation */
926 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
927 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
929 /* assoclen + cryptlen = seqoutlen */
930 append_math_sub(desc, REG3, SEQOUTLEN, REG0, CAAM_CMD_SZ);
933 * MOVE_LEN opcode is not available in all SEC HW revisions,
934 * thus need to do some magic, i.e. self-patch the descriptor
935 * buffer.
937 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
938 (0x6 << MOVE_LEN_SHIFT));
939 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
940 (0x8 << MOVE_LEN_SHIFT));
942 /* Will read assoclen + cryptlen bytes */
943 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
945 /* Will write assoclen + cryptlen bytes */
946 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
948 /* Store payload data */
949 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
951 /* In-snoop assoclen + cryptlen data */
952 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | FIFOLDST_VLF |
953 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST2FLUSH1);
955 set_move_tgt_here(desc, read_move_cmd);
956 set_move_tgt_here(desc, write_move_cmd);
957 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
958 /* Move payload data to OFIFO */
959 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
960 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
962 /* Read ICV */
963 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
964 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
966 #ifdef DEBUG
967 print_hex_dump(KERN_ERR,
968 "rfc4543 dec shdesc@" __stringify(__LINE__)": ",
969 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
970 #endif
972 EXPORT_SYMBOL(cnstr_shdsc_rfc4543_decap);
975 * For ablkcipher encrypt and decrypt, read from req->src and
976 * write to req->dst
978 static inline void ablkcipher_append_src_dst(u32 *desc)
980 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
981 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
982 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 |
983 KEY_VLF | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
984 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
988 * cnstr_shdsc_ablkcipher_encap - ablkcipher encapsulation shared descriptor
989 * @desc: pointer to buffer used for descriptor construction
990 * @cdata: pointer to block cipher transform definitions
991 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
992 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
993 * @ivsize: initialization vector size
994 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
995 * @ctx1_iv_off: IV offset in CONTEXT1 register
997 void cnstr_shdsc_ablkcipher_encap(u32 * const desc, struct alginfo *cdata,
998 unsigned int ivsize, const bool is_rfc3686,
999 const u32 ctx1_iv_off)
1001 u32 *key_jump_cmd;
1003 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1004 /* Skip if already shared */
1005 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1006 JUMP_COND_SHRD);
1008 /* Load class1 key only */
1009 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1010 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1012 /* Load nonce into CONTEXT1 reg */
1013 if (is_rfc3686) {
1014 u8 *nonce = cdata->key_virt + cdata->keylen;
1016 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1017 LDST_CLASS_IND_CCB |
1018 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1019 append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
1020 MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
1021 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1024 set_jump_tgt_here(desc, key_jump_cmd);
1026 /* Load iv */
1027 append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1028 LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1030 /* Load counter into CONTEXT1 reg */
1031 if (is_rfc3686)
1032 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1033 LDST_SRCDST_BYTE_CONTEXT |
1034 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1035 LDST_OFFSET_SHIFT));
1037 /* Load operation */
1038 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1039 OP_ALG_ENCRYPT);
1041 /* Perform operation */
1042 ablkcipher_append_src_dst(desc);
1044 #ifdef DEBUG
1045 print_hex_dump(KERN_ERR,
1046 "ablkcipher enc shdesc@" __stringify(__LINE__)": ",
1047 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1048 #endif
1050 EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_encap);
1053 * cnstr_shdsc_ablkcipher_decap - ablkcipher decapsulation shared descriptor
1054 * @desc: pointer to buffer used for descriptor construction
1055 * @cdata: pointer to block cipher transform definitions
1056 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
1057 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
1058 * @ivsize: initialization vector size
1059 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
1060 * @ctx1_iv_off: IV offset in CONTEXT1 register
1062 void cnstr_shdsc_ablkcipher_decap(u32 * const desc, struct alginfo *cdata,
1063 unsigned int ivsize, const bool is_rfc3686,
1064 const u32 ctx1_iv_off)
1066 u32 *key_jump_cmd;
1068 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1069 /* Skip if already shared */
1070 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1071 JUMP_COND_SHRD);
1073 /* Load class1 key only */
1074 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1075 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1077 /* Load nonce into CONTEXT1 reg */
1078 if (is_rfc3686) {
1079 u8 *nonce = cdata->key_virt + cdata->keylen;
1081 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1082 LDST_CLASS_IND_CCB |
1083 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1084 append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
1085 MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
1086 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1089 set_jump_tgt_here(desc, key_jump_cmd);
1091 /* load IV */
1092 append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1093 LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1095 /* Load counter into CONTEXT1 reg */
1096 if (is_rfc3686)
1097 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1098 LDST_SRCDST_BYTE_CONTEXT |
1099 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1100 LDST_OFFSET_SHIFT));
1102 /* Choose operation */
1103 if (ctx1_iv_off)
1104 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1105 OP_ALG_DECRYPT);
1106 else
1107 append_dec_op1(desc, cdata->algtype);
1109 /* Perform operation */
1110 ablkcipher_append_src_dst(desc);
1112 #ifdef DEBUG
1113 print_hex_dump(KERN_ERR,
1114 "ablkcipher dec shdesc@" __stringify(__LINE__)": ",
1115 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1116 #endif
1118 EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_decap);
1121 * cnstr_shdsc_ablkcipher_givencap - ablkcipher encapsulation shared descriptor
1122 * with HW-generated initialization vector.
1123 * @desc: pointer to buffer used for descriptor construction
1124 * @cdata: pointer to block cipher transform definitions
1125 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
1126 * with OP_ALG_AAI_CBC.
1127 * @ivsize: initialization vector size
1128 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
1129 * @ctx1_iv_off: IV offset in CONTEXT1 register
1131 void cnstr_shdsc_ablkcipher_givencap(u32 * const desc, struct alginfo *cdata,
1132 unsigned int ivsize, const bool is_rfc3686,
1133 const u32 ctx1_iv_off)
1135 u32 *key_jump_cmd, geniv;
1137 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1138 /* Skip if already shared */
1139 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1140 JUMP_COND_SHRD);
1142 /* Load class1 key only */
1143 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1144 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1146 /* Load Nonce into CONTEXT1 reg */
1147 if (is_rfc3686) {
1148 u8 *nonce = cdata->key_virt + cdata->keylen;
1150 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1151 LDST_CLASS_IND_CCB |
1152 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1153 append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
1154 MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
1155 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1157 set_jump_tgt_here(desc, key_jump_cmd);
1159 /* Generate IV */
1160 geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
1161 NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 | NFIFOENTRY_PTYPE_RND |
1162 (ivsize << NFIFOENTRY_DLEN_SHIFT);
1163 append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
1164 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
1165 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1166 append_move(desc, MOVE_WAITCOMP | MOVE_SRC_INFIFO |
1167 MOVE_DEST_CLASS1CTX | (ivsize << MOVE_LEN_SHIFT) |
1168 (ctx1_iv_off << MOVE_OFFSET_SHIFT));
1169 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
1171 /* Copy generated IV to memory */
1172 append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1173 LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1175 /* Load Counter into CONTEXT1 reg */
1176 if (is_rfc3686)
1177 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1178 LDST_SRCDST_BYTE_CONTEXT |
1179 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1180 LDST_OFFSET_SHIFT));
1182 if (ctx1_iv_off)
1183 append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NCP |
1184 (1 << JUMP_OFFSET_SHIFT));
1186 /* Load operation */
1187 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1188 OP_ALG_ENCRYPT);
1190 /* Perform operation */
1191 ablkcipher_append_src_dst(desc);
1193 #ifdef DEBUG
1194 print_hex_dump(KERN_ERR,
1195 "ablkcipher givenc shdesc@" __stringify(__LINE__) ": ",
1196 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1197 #endif
1199 EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_givencap);
1202 * cnstr_shdsc_xts_ablkcipher_encap - xts ablkcipher encapsulation shared
1203 * descriptor
1204 * @desc: pointer to buffer used for descriptor construction
1205 * @cdata: pointer to block cipher transform definitions
1206 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
1208 void cnstr_shdsc_xts_ablkcipher_encap(u32 * const desc, struct alginfo *cdata)
1210 __be64 sector_size = cpu_to_be64(512);
1211 u32 *key_jump_cmd;
1213 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1214 /* Skip if already shared */
1215 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1216 JUMP_COND_SHRD);
1218 /* Load class1 keys only */
1219 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1220 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1222 /* Load sector size with index 40 bytes (0x28) */
1223 append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
1224 LDST_SRCDST_BYTE_CONTEXT |
1225 (0x28 << LDST_OFFSET_SHIFT));
1227 set_jump_tgt_here(desc, key_jump_cmd);
1230 * create sequence for loading the sector index
1231 * Upper 8B of IV - will be used as sector index
1232 * Lower 8B of IV - will be discarded
1234 append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1235 (0x20 << LDST_OFFSET_SHIFT));
1236 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1238 /* Load operation */
1239 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1240 OP_ALG_ENCRYPT);
1242 /* Perform operation */
1243 ablkcipher_append_src_dst(desc);
1245 #ifdef DEBUG
1246 print_hex_dump(KERN_ERR,
1247 "xts ablkcipher enc shdesc@" __stringify(__LINE__) ": ",
1248 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1249 #endif
1251 EXPORT_SYMBOL(cnstr_shdsc_xts_ablkcipher_encap);
1254 * cnstr_shdsc_xts_ablkcipher_decap - xts ablkcipher decapsulation shared
1255 * descriptor
1256 * @desc: pointer to buffer used for descriptor construction
1257 * @cdata: pointer to block cipher transform definitions
1258 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
1260 void cnstr_shdsc_xts_ablkcipher_decap(u32 * const desc, struct alginfo *cdata)
1262 __be64 sector_size = cpu_to_be64(512);
1263 u32 *key_jump_cmd;
1265 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1266 /* Skip if already shared */
1267 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1268 JUMP_COND_SHRD);
1270 /* Load class1 key only */
1271 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1272 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1274 /* Load sector size with index 40 bytes (0x28) */
1275 append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
1276 LDST_SRCDST_BYTE_CONTEXT |
1277 (0x28 << LDST_OFFSET_SHIFT));
1279 set_jump_tgt_here(desc, key_jump_cmd);
1282 * create sequence for loading the sector index
1283 * Upper 8B of IV - will be used as sector index
1284 * Lower 8B of IV - will be discarded
1286 append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1287 (0x20 << LDST_OFFSET_SHIFT));
1288 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1290 /* Load operation */
1291 append_dec_op1(desc, cdata->algtype);
1293 /* Perform operation */
1294 ablkcipher_append_src_dst(desc);
1296 #ifdef DEBUG
1297 print_hex_dump(KERN_ERR,
1298 "xts ablkcipher dec shdesc@" __stringify(__LINE__) ": ",
1299 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1300 #endif
1302 EXPORT_SYMBOL(cnstr_shdsc_xts_ablkcipher_decap);
1304 MODULE_LICENSE("GPL");
1305 MODULE_DESCRIPTION("FSL CAAM descriptor support");
1306 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");