treewide: remove redundant IS_ERR() before error code check
[linux/fpc-iii.git] / sound / soc / intel / skylake / skl-sst-cldma.h
blobd5e285a69baa498c2cd83488b72b07d328407a53
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3 * Intel Code Loader DMA support
5 * Copyright (C) 2015, Intel Corporation.
6 */
8 #ifndef SKL_SST_CLDMA_H_
9 #define SKL_SST_CLDMA_H_
11 #define FW_CL_STREAM_NUMBER 0x1
13 #define DMA_ADDRESS_128_BITS_ALIGNMENT 7
14 #define BDL_ALIGN(x) (x >> DMA_ADDRESS_128_BITS_ALIGNMENT)
16 #define SKL_ADSPIC_CL_DMA 0x2
17 #define SKL_ADSPIS_CL_DMA 0x2
18 #define SKL_CL_DMA_SD_INT_DESC_ERR 0x10 /* Descriptor error interrupt */
19 #define SKL_CL_DMA_SD_INT_FIFO_ERR 0x08 /* FIFO error interrupt */
20 #define SKL_CL_DMA_SD_INT_COMPLETE 0x04 /* Buffer completion interrupt */
22 /* Intel HD Audio Code Loader DMA Registers */
24 #define HDA_ADSP_LOADER_BASE 0x80
26 /* Stream Registers */
27 #define SKL_ADSP_REG_CL_SD_CTL (HDA_ADSP_LOADER_BASE + 0x00)
28 #define SKL_ADSP_REG_CL_SD_STS (HDA_ADSP_LOADER_BASE + 0x03)
29 #define SKL_ADSP_REG_CL_SD_LPIB (HDA_ADSP_LOADER_BASE + 0x04)
30 #define SKL_ADSP_REG_CL_SD_CBL (HDA_ADSP_LOADER_BASE + 0x08)
31 #define SKL_ADSP_REG_CL_SD_LVI (HDA_ADSP_LOADER_BASE + 0x0c)
32 #define SKL_ADSP_REG_CL_SD_FIFOW (HDA_ADSP_LOADER_BASE + 0x0e)
33 #define SKL_ADSP_REG_CL_SD_FIFOSIZE (HDA_ADSP_LOADER_BASE + 0x10)
34 #define SKL_ADSP_REG_CL_SD_FORMAT (HDA_ADSP_LOADER_BASE + 0x12)
35 #define SKL_ADSP_REG_CL_SD_FIFOL (HDA_ADSP_LOADER_BASE + 0x14)
36 #define SKL_ADSP_REG_CL_SD_BDLPL (HDA_ADSP_LOADER_BASE + 0x18)
37 #define SKL_ADSP_REG_CL_SD_BDLPU (HDA_ADSP_LOADER_BASE + 0x1c)
39 /* CL: Software Position Based FIFO Capability Registers */
40 #define SKL_ADSP_REG_CL_SPBFIFO (HDA_ADSP_LOADER_BASE + 0x20)
41 #define SKL_ADSP_REG_CL_SPBFIFO_SPBFCH (SKL_ADSP_REG_CL_SPBFIFO + 0x0)
42 #define SKL_ADSP_REG_CL_SPBFIFO_SPBFCCTL (SKL_ADSP_REG_CL_SPBFIFO + 0x4)
43 #define SKL_ADSP_REG_CL_SPBFIFO_SPIB (SKL_ADSP_REG_CL_SPBFIFO + 0x8)
44 #define SKL_ADSP_REG_CL_SPBFIFO_MAXFIFOS (SKL_ADSP_REG_CL_SPBFIFO + 0xc)
46 /* CL: Stream Descriptor x Control */
48 /* Stream Reset */
49 #define CL_SD_CTL_SRST_SHIFT 0
50 #define CL_SD_CTL_SRST_MASK (1 << CL_SD_CTL_SRST_SHIFT)
51 #define CL_SD_CTL_SRST(x) \
52 ((x << CL_SD_CTL_SRST_SHIFT) & CL_SD_CTL_SRST_MASK)
54 /* Stream Run */
55 #define CL_SD_CTL_RUN_SHIFT 1
56 #define CL_SD_CTL_RUN_MASK (1 << CL_SD_CTL_RUN_SHIFT)
57 #define CL_SD_CTL_RUN(x) \
58 ((x << CL_SD_CTL_RUN_SHIFT) & CL_SD_CTL_RUN_MASK)
60 /* Interrupt On Completion Enable */
61 #define CL_SD_CTL_IOCE_SHIFT 2
62 #define CL_SD_CTL_IOCE_MASK (1 << CL_SD_CTL_IOCE_SHIFT)
63 #define CL_SD_CTL_IOCE(x) \
64 ((x << CL_SD_CTL_IOCE_SHIFT) & CL_SD_CTL_IOCE_MASK)
66 /* FIFO Error Interrupt Enable */
67 #define CL_SD_CTL_FEIE_SHIFT 3
68 #define CL_SD_CTL_FEIE_MASK (1 << CL_SD_CTL_FEIE_SHIFT)
69 #define CL_SD_CTL_FEIE(x) \
70 ((x << CL_SD_CTL_FEIE_SHIFT) & CL_SD_CTL_FEIE_MASK)
72 /* Descriptor Error Interrupt Enable */
73 #define CL_SD_CTL_DEIE_SHIFT 4
74 #define CL_SD_CTL_DEIE_MASK (1 << CL_SD_CTL_DEIE_SHIFT)
75 #define CL_SD_CTL_DEIE(x) \
76 ((x << CL_SD_CTL_DEIE_SHIFT) & CL_SD_CTL_DEIE_MASK)
78 /* FIFO Limit Change */
79 #define CL_SD_CTL_FIFOLC_SHIFT 5
80 #define CL_SD_CTL_FIFOLC_MASK (1 << CL_SD_CTL_FIFOLC_SHIFT)
81 #define CL_SD_CTL_FIFOLC(x) \
82 ((x << CL_SD_CTL_FIFOLC_SHIFT) & CL_SD_CTL_FIFOLC_MASK)
84 /* Stripe Control */
85 #define CL_SD_CTL_STRIPE_SHIFT 16
86 #define CL_SD_CTL_STRIPE_MASK (0x3 << CL_SD_CTL_STRIPE_SHIFT)
87 #define CL_SD_CTL_STRIPE(x) \
88 ((x << CL_SD_CTL_STRIPE_SHIFT) & CL_SD_CTL_STRIPE_MASK)
90 /* Traffic Priority */
91 #define CL_SD_CTL_TP_SHIFT 18
92 #define CL_SD_CTL_TP_MASK (1 << CL_SD_CTL_TP_SHIFT)
93 #define CL_SD_CTL_TP(x) \
94 ((x << CL_SD_CTL_TP_SHIFT) & CL_SD_CTL_TP_MASK)
96 /* Bidirectional Direction Control */
97 #define CL_SD_CTL_DIR_SHIFT 19
98 #define CL_SD_CTL_DIR_MASK (1 << CL_SD_CTL_DIR_SHIFT)
99 #define CL_SD_CTL_DIR(x) \
100 ((x << CL_SD_CTL_DIR_SHIFT) & CL_SD_CTL_DIR_MASK)
102 /* Stream Number */
103 #define CL_SD_CTL_STRM_SHIFT 20
104 #define CL_SD_CTL_STRM_MASK (0xf << CL_SD_CTL_STRM_SHIFT)
105 #define CL_SD_CTL_STRM(x) \
106 ((x << CL_SD_CTL_STRM_SHIFT) & CL_SD_CTL_STRM_MASK)
108 /* CL: Stream Descriptor x Status */
110 /* Buffer Completion Interrupt Status */
111 #define CL_SD_STS_BCIS(x) CL_SD_CTL_IOCE(x)
113 /* FIFO Error */
114 #define CL_SD_STS_FIFOE(x) CL_SD_CTL_FEIE(x)
116 /* Descriptor Error */
117 #define CL_SD_STS_DESE(x) CL_SD_CTL_DEIE(x)
119 /* FIFO Ready */
120 #define CL_SD_STS_FIFORDY(x) CL_SD_CTL_FIFOLC(x)
123 /* CL: Stream Descriptor x Last Valid Index */
124 #define CL_SD_LVI_SHIFT 0
125 #define CL_SD_LVI_MASK (0xff << CL_SD_LVI_SHIFT)
126 #define CL_SD_LVI(x) ((x << CL_SD_LVI_SHIFT) & CL_SD_LVI_MASK)
128 /* CL: Stream Descriptor x FIFO Eviction Watermark */
129 #define CL_SD_FIFOW_SHIFT 0
130 #define CL_SD_FIFOW_MASK (0x7 << CL_SD_FIFOW_SHIFT)
131 #define CL_SD_FIFOW(x) \
132 ((x << CL_SD_FIFOW_SHIFT) & CL_SD_FIFOW_MASK)
134 /* CL: Stream Descriptor x Buffer Descriptor List Pointer Lower Base Address */
136 /* Protect Bits */
137 #define CL_SD_BDLPLBA_PROT_SHIFT 0
138 #define CL_SD_BDLPLBA_PROT_MASK (1 << CL_SD_BDLPLBA_PROT_SHIFT)
139 #define CL_SD_BDLPLBA_PROT(x) \
140 ((x << CL_SD_BDLPLBA_PROT_SHIFT) & CL_SD_BDLPLBA_PROT_MASK)
142 /* Buffer Descriptor List Lower Base Address */
143 #define CL_SD_BDLPLBA_SHIFT 7
144 #define CL_SD_BDLPLBA_MASK (0x1ffffff << CL_SD_BDLPLBA_SHIFT)
145 #define CL_SD_BDLPLBA(x) \
146 ((BDL_ALIGN(lower_32_bits(x)) << CL_SD_BDLPLBA_SHIFT) & CL_SD_BDLPLBA_MASK)
148 /* Buffer Descriptor List Upper Base Address */
149 #define CL_SD_BDLPUBA_SHIFT 0
150 #define CL_SD_BDLPUBA_MASK (0xffffffff << CL_SD_BDLPUBA_SHIFT)
151 #define CL_SD_BDLPUBA(x) \
152 ((upper_32_bits(x) << CL_SD_BDLPUBA_SHIFT) & CL_SD_BDLPUBA_MASK)
155 * Code Loader - Software Position Based FIFO
156 * Capability Registers x Software Position Based FIFO Header
159 /* Next Capability Pointer */
160 #define CL_SPBFIFO_SPBFCH_PTR_SHIFT 0
161 #define CL_SPBFIFO_SPBFCH_PTR_MASK (0xff << CL_SPBFIFO_SPBFCH_PTR_SHIFT)
162 #define CL_SPBFIFO_SPBFCH_PTR(x) \
163 ((x << CL_SPBFIFO_SPBFCH_PTR_SHIFT) & CL_SPBFIFO_SPBFCH_PTR_MASK)
165 /* Capability Identifier */
166 #define CL_SPBFIFO_SPBFCH_ID_SHIFT 16
167 #define CL_SPBFIFO_SPBFCH_ID_MASK (0xfff << CL_SPBFIFO_SPBFCH_ID_SHIFT)
168 #define CL_SPBFIFO_SPBFCH_ID(x) \
169 ((x << CL_SPBFIFO_SPBFCH_ID_SHIFT) & CL_SPBFIFO_SPBFCH_ID_MASK)
171 /* Capability Version */
172 #define CL_SPBFIFO_SPBFCH_VER_SHIFT 28
173 #define CL_SPBFIFO_SPBFCH_VER_MASK (0xf << CL_SPBFIFO_SPBFCH_VER_SHIFT)
174 #define CL_SPBFIFO_SPBFCH_VER(x) \
175 ((x << CL_SPBFIFO_SPBFCH_VER_SHIFT) & CL_SPBFIFO_SPBFCH_VER_MASK)
177 /* Software Position in Buffer Enable */
178 #define CL_SPBFIFO_SPBFCCTL_SPIBE_SHIFT 0
179 #define CL_SPBFIFO_SPBFCCTL_SPIBE_MASK (1 << CL_SPBFIFO_SPBFCCTL_SPIBE_SHIFT)
180 #define CL_SPBFIFO_SPBFCCTL_SPIBE(x) \
181 ((x << CL_SPBFIFO_SPBFCCTL_SPIBE_SHIFT) & CL_SPBFIFO_SPBFCCTL_SPIBE_MASK)
183 /* SST IPC SKL defines */
184 #define SKL_WAIT_TIMEOUT 500 /* 500 msec */
185 #define SKL_MAX_BUFFER_SIZE (32 * PAGE_SIZE)
187 enum skl_cl_dma_wake_states {
188 SKL_CL_DMA_STATUS_NONE = 0,
189 SKL_CL_DMA_BUF_COMPLETE,
190 SKL_CL_DMA_ERR, /* TODO: Expand the error states */
193 struct sst_dsp;
195 struct skl_cl_dev_ops {
196 void (*cl_setup_bdle)(struct sst_dsp *ctx,
197 struct snd_dma_buffer *dmab_data,
198 __le32 **bdlp, int size, int with_ioc);
199 void (*cl_setup_controller)(struct sst_dsp *ctx,
200 struct snd_dma_buffer *dmab_bdl,
201 unsigned int max_size, u32 page_count);
202 void (*cl_setup_spb)(struct sst_dsp *ctx,
203 unsigned int size, bool enable);
204 void (*cl_cleanup_spb)(struct sst_dsp *ctx);
205 void (*cl_trigger)(struct sst_dsp *ctx, bool enable);
206 void (*cl_cleanup_controller)(struct sst_dsp *ctx);
207 int (*cl_copy_to_dmabuf)(struct sst_dsp *ctx,
208 const void *bin, u32 size, bool wait);
209 void (*cl_stop_dma)(struct sst_dsp *ctx);
213 * skl_cl_dev - holds information for code loader dma transfer
215 * @dmab_data: buffer pointer
216 * @dmab_bdl: buffer descriptor list
217 * @bufsize: ring buffer size
218 * @frags: Last valid buffer descriptor index in the BDL
219 * @curr_spib_pos: Current position in ring buffer
220 * @dma_buffer_offset: dma buffer offset
221 * @ops: operations supported on CL dma
222 * @wait_queue: wait queue to wake for wake event
223 * @wake_status: DMA wake status
224 * @wait_condition: condition to wait on wait queue
225 * @cl_dma_lock: for synchronized access to cldma
227 struct skl_cl_dev {
228 struct snd_dma_buffer dmab_data;
229 struct snd_dma_buffer dmab_bdl;
231 unsigned int bufsize;
232 unsigned int frags;
234 unsigned int curr_spib_pos;
235 unsigned int dma_buffer_offset;
236 struct skl_cl_dev_ops ops;
238 wait_queue_head_t wait_queue;
239 int wake_status;
240 bool wait_condition;
243 #endif /* SKL_SST_CLDMA_H_ */