2 * caam descriptor construction helper functions
4 * Copyright 2008-2012 Freescale Semiconductor, Inc.
9 #define IMMEDIATE (1 << 23)
10 #define CAAM_CMD_SZ sizeof(u32)
11 #define CAAM_PTR_SZ sizeof(dma_addr_t)
12 #define CAAM_DESC_BYTES_MAX (CAAM_CMD_SZ * MAX_CAAM_DESCSIZE)
13 #define DESC_JOB_IO_LEN (CAAM_CMD_SZ * 5 + CAAM_PTR_SZ * 3)
16 #define PRINT_POS do { printk(KERN_DEBUG "%02d: %s\n", desc_len(desc),\
17 &__func__[sizeof("append")]); } while (0)
22 #define SET_OK_NO_PROP_ERRORS (IMMEDIATE | LDST_CLASS_DECO | \
23 LDST_SRCDST_WORD_DECOCTRL | \
24 (LDOFF_CHG_SHARE_OK_NO_PROP << \
26 #define DISABLE_AUTO_INFO_FIFO (IMMEDIATE | LDST_CLASS_DECO | \
27 LDST_SRCDST_WORD_DECOCTRL | \
28 (LDOFF_DISABLE_AUTO_NFIFO << LDST_OFFSET_SHIFT))
29 #define ENABLE_AUTO_INFO_FIFO (IMMEDIATE | LDST_CLASS_DECO | \
30 LDST_SRCDST_WORD_DECOCTRL | \
31 (LDOFF_ENABLE_AUTO_NFIFO << LDST_OFFSET_SHIFT))
33 static inline int desc_len(u32
*desc
)
35 return *desc
& HDR_DESCLEN_MASK
;
38 static inline int desc_bytes(void *desc
)
40 return desc_len(desc
) * CAAM_CMD_SZ
;
43 static inline u32
*desc_end(u32
*desc
)
45 return desc
+ desc_len(desc
);
48 static inline void *sh_desc_pdb(u32
*desc
)
53 static inline void init_desc(u32
*desc
, u32 options
)
55 *desc
= (options
| HDR_ONE
) + 1;
58 static inline void init_sh_desc(u32
*desc
, u32 options
)
61 init_desc(desc
, CMD_SHARED_DESC_HDR
| options
);
64 static inline void init_sh_desc_pdb(u32
*desc
, u32 options
, size_t pdb_bytes
)
66 u32 pdb_len
= (pdb_bytes
+ CAAM_CMD_SZ
- 1) / CAAM_CMD_SZ
;
68 init_sh_desc(desc
, (((pdb_len
+ 1) << HDR_START_IDX_SHIFT
) + pdb_len
) |
72 static inline void init_job_desc(u32
*desc
, u32 options
)
74 init_desc(desc
, CMD_DESC_HDR
| options
);
77 static inline void append_ptr(u32
*desc
, dma_addr_t ptr
)
79 dma_addr_t
*offset
= (dma_addr_t
*)desc_end(desc
);
83 (*desc
) += CAAM_PTR_SZ
/ CAAM_CMD_SZ
;
86 static inline void init_job_desc_shared(u32
*desc
, dma_addr_t ptr
, int len
,
90 init_job_desc(desc
, HDR_SHARED
| options
|
91 (len
<< HDR_START_IDX_SHIFT
));
92 append_ptr(desc
, ptr
);
95 static inline void append_data(u32
*desc
, void *data
, int len
)
97 u32
*offset
= desc_end(desc
);
99 if (len
) /* avoid sparse warning: memcpy with byte count of 0 */
100 memcpy(offset
, data
, len
);
102 (*desc
) += (len
+ CAAM_CMD_SZ
- 1) / CAAM_CMD_SZ
;
105 static inline void append_cmd(u32
*desc
, u32 command
)
107 u32
*cmd
= desc_end(desc
);
114 #define append_u32 append_cmd
116 static inline void append_u64(u32
*desc
, u64 data
)
118 u32
*offset
= desc_end(desc
);
120 *offset
= upper_32_bits(data
);
121 *(++offset
) = lower_32_bits(data
);
126 /* Write command without affecting header, and return pointer to next word */
127 static inline u32
*write_cmd(u32
*desc
, u32 command
)
134 static inline void append_cmd_ptr(u32
*desc
, dma_addr_t ptr
, int len
,
137 append_cmd(desc
, command
| len
);
138 append_ptr(desc
, ptr
);
141 /* Write length after pointer, rather than inside command */
142 static inline void append_cmd_ptr_extlen(u32
*desc
, dma_addr_t ptr
,
143 unsigned int len
, u32 command
)
145 append_cmd(desc
, command
);
146 if (!(command
& (SQIN_RTO
| SQIN_PRE
)))
147 append_ptr(desc
, ptr
);
148 append_cmd(desc
, len
);
151 static inline void append_cmd_data(u32
*desc
, void *data
, int len
,
154 append_cmd(desc
, command
| IMMEDIATE
| len
);
155 append_data(desc
, data
, len
);
158 #define APPEND_CMD_RET(cmd, op) \
159 static inline u32 *append_##cmd(u32 *desc, u32 options) \
161 u32 *cmd = desc_end(desc); \
163 append_cmd(desc, CMD_##op | options); \
166 APPEND_CMD_RET(jump
, JUMP
)
167 APPEND_CMD_RET(move
, MOVE
)
169 static inline void set_jump_tgt_here(u32
*desc
, u32
*jump_cmd
)
171 *jump_cmd
= *jump_cmd
| (desc_len(desc
) - (jump_cmd
- desc
));
174 static inline void set_move_tgt_here(u32
*desc
, u32
*move_cmd
)
176 *move_cmd
&= ~MOVE_OFFSET_MASK
;
177 *move_cmd
= *move_cmd
| ((desc_len(desc
) << (MOVE_OFFSET_SHIFT
+ 2)) &
181 #define APPEND_CMD(cmd, op) \
182 static inline void append_##cmd(u32 *desc, u32 options) \
185 append_cmd(desc, CMD_##op | options); \
187 APPEND_CMD(operation
, OPERATION
)
189 #define APPEND_CMD_LEN(cmd, op) \
190 static inline void append_##cmd(u32 *desc, unsigned int len, u32 options) \
193 append_cmd(desc, CMD_##op | len | options); \
196 APPEND_CMD_LEN(seq_load
, SEQ_LOAD
)
197 APPEND_CMD_LEN(seq_store
, SEQ_STORE
)
198 APPEND_CMD_LEN(seq_fifo_load
, SEQ_FIFO_LOAD
)
199 APPEND_CMD_LEN(seq_fifo_store
, SEQ_FIFO_STORE
)
201 #define APPEND_CMD_PTR(cmd, op) \
202 static inline void append_##cmd(u32 *desc, dma_addr_t ptr, unsigned int len, \
206 append_cmd_ptr(desc, ptr, len, CMD_##op | options); \
208 APPEND_CMD_PTR(key
, KEY
)
209 APPEND_CMD_PTR(load
, LOAD
)
210 APPEND_CMD_PTR(fifo_load
, FIFO_LOAD
)
211 APPEND_CMD_PTR(fifo_store
, FIFO_STORE
)
213 static inline void append_store(u32
*desc
, dma_addr_t ptr
, unsigned int len
,
218 cmd_src
= options
& LDST_SRCDST_MASK
;
220 append_cmd(desc
, CMD_STORE
| options
| len
);
222 /* The following options do not require pointer */
223 if (!(cmd_src
== LDST_SRCDST_WORD_DESCBUF_SHARED
||
224 cmd_src
== LDST_SRCDST_WORD_DESCBUF_JOB
||
225 cmd_src
== LDST_SRCDST_WORD_DESCBUF_JOB_WE
||
226 cmd_src
== LDST_SRCDST_WORD_DESCBUF_SHARED_WE
))
227 append_ptr(desc
, ptr
);
230 #define APPEND_SEQ_PTR_INTLEN(cmd, op) \
231 static inline void append_seq_##cmd##_ptr_intlen(u32 *desc, dma_addr_t ptr, \
236 if (options & (SQIN_RTO | SQIN_PRE)) \
237 append_cmd(desc, CMD_SEQ_##op##_PTR | len | options); \
239 append_cmd_ptr(desc, ptr, len, CMD_SEQ_##op##_PTR | options); \
241 APPEND_SEQ_PTR_INTLEN(in
, IN
)
242 APPEND_SEQ_PTR_INTLEN(out
, OUT
)
244 #define APPEND_CMD_PTR_TO_IMM(cmd, op) \
245 static inline void append_##cmd##_as_imm(u32 *desc, void *data, \
246 unsigned int len, u32 options) \
249 append_cmd_data(desc, data, len, CMD_##op | options); \
251 APPEND_CMD_PTR_TO_IMM(load
, LOAD
);
252 APPEND_CMD_PTR_TO_IMM(fifo_load
, FIFO_LOAD
);
254 #define APPEND_CMD_PTR_EXTLEN(cmd, op) \
255 static inline void append_##cmd##_extlen(u32 *desc, dma_addr_t ptr, \
256 unsigned int len, u32 options) \
259 append_cmd_ptr_extlen(desc, ptr, len, CMD_##op | SQIN_EXT | options); \
261 APPEND_CMD_PTR_EXTLEN(seq_in_ptr
, SEQ_IN_PTR
)
262 APPEND_CMD_PTR_EXTLEN(seq_out_ptr
, SEQ_OUT_PTR
)
265 * Determine whether to store length internally or externally depending on
266 * the size of its type
268 #define APPEND_CMD_PTR_LEN(cmd, op, type) \
269 static inline void append_##cmd(u32 *desc, dma_addr_t ptr, \
270 type len, u32 options) \
273 if (sizeof(type) > sizeof(u16)) \
274 append_##cmd##_extlen(desc, ptr, len, options); \
276 append_##cmd##_intlen(desc, ptr, len, options); \
278 APPEND_CMD_PTR_LEN(seq_in_ptr
, SEQ_IN_PTR
, u32
)
279 APPEND_CMD_PTR_LEN(seq_out_ptr
, SEQ_OUT_PTR
, u32
)
282 * 2nd variant for commands whose specified immediate length differs
283 * from length of immediate data provided, e.g., split keys
285 #define APPEND_CMD_PTR_TO_IMM2(cmd, op) \
286 static inline void append_##cmd##_as_imm(u32 *desc, void *data, \
287 unsigned int data_len, \
288 unsigned int len, u32 options) \
291 append_cmd(desc, CMD_##op | IMMEDIATE | len | options); \
292 append_data(desc, data, data_len); \
294 APPEND_CMD_PTR_TO_IMM2(key
, KEY
);
296 #define APPEND_CMD_RAW_IMM(cmd, op, type) \
297 static inline void append_##cmd##_imm_##type(u32 *desc, type immediate, \
301 append_cmd(desc, CMD_##op | IMMEDIATE | options | sizeof(type)); \
302 append_cmd(desc, immediate); \
304 APPEND_CMD_RAW_IMM(load
, LOAD
, u32
);
307 * Append math command. Only the last part of destination and source need to
310 #define APPEND_MATH(op, desc, dest, src_0, src_1, len) \
311 append_cmd(desc, CMD_MATH | MATH_FUN_##op | MATH_DEST_##dest | \
312 MATH_SRC0_##src_0 | MATH_SRC1_##src_1 | (u32)len);
314 #define append_math_add(desc, dest, src0, src1, len) \
315 APPEND_MATH(ADD, desc, dest, src0, src1, len)
316 #define append_math_sub(desc, dest, src0, src1, len) \
317 APPEND_MATH(SUB, desc, dest, src0, src1, len)
318 #define append_math_add_c(desc, dest, src0, src1, len) \
319 APPEND_MATH(ADDC, desc, dest, src0, src1, len)
320 #define append_math_sub_b(desc, dest, src0, src1, len) \
321 APPEND_MATH(SUBB, desc, dest, src0, src1, len)
322 #define append_math_and(desc, dest, src0, src1, len) \
323 APPEND_MATH(AND, desc, dest, src0, src1, len)
324 #define append_math_or(desc, dest, src0, src1, len) \
325 APPEND_MATH(OR, desc, dest, src0, src1, len)
326 #define append_math_xor(desc, dest, src0, src1, len) \
327 APPEND_MATH(XOR, desc, dest, src0, src1, len)
328 #define append_math_lshift(desc, dest, src0, src1, len) \
329 APPEND_MATH(LSHIFT, desc, dest, src0, src1, len)
330 #define append_math_rshift(desc, dest, src0, src1, len) \
331 APPEND_MATH(RSHIFT, desc, dest, src0, src1, len)
332 #define append_math_ldshift(desc, dest, src0, src1, len) \
333 APPEND_MATH(SHLD, desc, dest, src0, src1, len)
335 /* Exactly one source is IMM. Data is passed in as u32 value */
336 #define APPEND_MATH_IMM_u32(op, desc, dest, src_0, src_1, data) \
338 APPEND_MATH(op, desc, dest, src_0, src_1, CAAM_CMD_SZ); \
339 append_cmd(desc, data); \
342 #define append_math_add_imm_u32(desc, dest, src0, src1, data) \
343 APPEND_MATH_IMM_u32(ADD, desc, dest, src0, src1, data)
344 #define append_math_sub_imm_u32(desc, dest, src0, src1, data) \
345 APPEND_MATH_IMM_u32(SUB, desc, dest, src0, src1, data)
346 #define append_math_add_c_imm_u32(desc, dest, src0, src1, data) \
347 APPEND_MATH_IMM_u32(ADDC, desc, dest, src0, src1, data)
348 #define append_math_sub_b_imm_u32(desc, dest, src0, src1, data) \
349 APPEND_MATH_IMM_u32(SUBB, desc, dest, src0, src1, data)
350 #define append_math_and_imm_u32(desc, dest, src0, src1, data) \
351 APPEND_MATH_IMM_u32(AND, desc, dest, src0, src1, data)
352 #define append_math_or_imm_u32(desc, dest, src0, src1, data) \
353 APPEND_MATH_IMM_u32(OR, desc, dest, src0, src1, data)
354 #define append_math_xor_imm_u32(desc, dest, src0, src1, data) \
355 APPEND_MATH_IMM_u32(XOR, desc, dest, src0, src1, data)
356 #define append_math_lshift_imm_u32(desc, dest, src0, src1, data) \
357 APPEND_MATH_IMM_u32(LSHIFT, desc, dest, src0, src1, data)
358 #define append_math_rshift_imm_u32(desc, dest, src0, src1, data) \
359 APPEND_MATH_IMM_u32(RSHIFT, desc, dest, src0, src1, data)
361 /* Exactly one source is IMM. Data is passed in as u64 value */
362 #define APPEND_MATH_IMM_u64(op, desc, dest, src_0, src_1, data) \
364 u32 upper = (data >> 16) >> 16; \
365 APPEND_MATH(op, desc, dest, src_0, src_1, CAAM_CMD_SZ * 2 | \
366 (upper ? 0 : MATH_IFB)); \
368 append_u64(desc, data); \
370 append_u32(desc, data); \
373 #define append_math_add_imm_u64(desc, dest, src0, src1, data) \
374 APPEND_MATH_IMM_u64(ADD, desc, dest, src0, src1, data)
375 #define append_math_sub_imm_u64(desc, dest, src0, src1, data) \
376 APPEND_MATH_IMM_u64(SUB, desc, dest, src0, src1, data)
377 #define append_math_add_c_imm_u64(desc, dest, src0, src1, data) \
378 APPEND_MATH_IMM_u64(ADDC, desc, dest, src0, src1, data)
379 #define append_math_sub_b_imm_u64(desc, dest, src0, src1, data) \
380 APPEND_MATH_IMM_u64(SUBB, desc, dest, src0, src1, data)
381 #define append_math_and_imm_u64(desc, dest, src0, src1, data) \
382 APPEND_MATH_IMM_u64(AND, desc, dest, src0, src1, data)
383 #define append_math_or_imm_u64(desc, dest, src0, src1, data) \
384 APPEND_MATH_IMM_u64(OR, desc, dest, src0, src1, data)
385 #define append_math_xor_imm_u64(desc, dest, src0, src1, data) \
386 APPEND_MATH_IMM_u64(XOR, desc, dest, src0, src1, data)
387 #define append_math_lshift_imm_u64(desc, dest, src0, src1, data) \
388 APPEND_MATH_IMM_u64(LSHIFT, desc, dest, src0, src1, data)
389 #define append_math_rshift_imm_u64(desc, dest, src0, src1, data) \
390 APPEND_MATH_IMM_u64(RSHIFT, desc, dest, src0, src1, data)