1 /* Copyright (c) 2014 The Linux Foundation. All rights reserved.
3 * This program is free software; you can redistribute it and/or modify
4 * it under the terms of the GNU General Public License version 2 and
5 * only version 2 as published by the Free Software Foundation.
7 * This program is distributed in the hope that it will be useful,
8 * but WITHOUT ANY WARRANTY; without even the implied warranty of
9 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 * GNU General Public License for more details.
14 #ifdef CONFIG_MSM_OCMEM
15 # include <soc/qcom/ocmem.h>
18 #define A4XX_INT0_MASK \
19 (A4XX_INT0_RBBM_AHB_ERROR | \
20 A4XX_INT0_RBBM_ATB_BUS_OVERFLOW | \
21 A4XX_INT0_CP_T0_PACKET_IN_IB | \
22 A4XX_INT0_CP_OPCODE_ERROR | \
23 A4XX_INT0_CP_RESERVED_BIT_ERROR | \
24 A4XX_INT0_CP_HW_FAULT | \
25 A4XX_INT0_CP_IB1_INT | \
26 A4XX_INT0_CP_IB2_INT | \
27 A4XX_INT0_CP_RB_INT | \
28 A4XX_INT0_CP_REG_PROTECT_FAULT | \
29 A4XX_INT0_CP_AHB_ERROR_HALT | \
30 A4XX_INT0_UCHE_OOB_ACCESS)
32 extern bool hang_debug
;
33 static void a4xx_dump(struct msm_gpu
*gpu
);
36 * a4xx_enable_hwcg() - Program the clock control registers
37 * @device: The adreno device pointer
39 static void a4xx_enable_hwcg(struct msm_gpu
*gpu
)
41 struct adreno_gpu
*adreno_gpu
= to_adreno_gpu(gpu
);
43 for (i
= 0; i
< 4; i
++)
44 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_CTL_TP(i
), 0x02222202);
45 for (i
= 0; i
< 4; i
++)
46 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_CTL2_TP(i
), 0x00002222);
47 for (i
= 0; i
< 4; i
++)
48 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_HYST_TP(i
), 0x0E739CE7);
49 for (i
= 0; i
< 4; i
++)
50 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_DELAY_TP(i
), 0x00111111);
51 for (i
= 0; i
< 4; i
++)
52 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_CTL_SP(i
), 0x22222222);
53 for (i
= 0; i
< 4; i
++)
54 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_CTL2_SP(i
), 0x00222222);
55 for (i
= 0; i
< 4; i
++)
56 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_HYST_SP(i
), 0x00000104);
57 for (i
= 0; i
< 4; i
++)
58 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_DELAY_SP(i
), 0x00000081);
59 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_CTL_UCHE
, 0x22222222);
60 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_CTL2_UCHE
, 0x02222222);
61 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_CTL3_UCHE
, 0x00000000);
62 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_CTL4_UCHE
, 0x00000000);
63 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_HYST_UCHE
, 0x00004444);
64 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_DELAY_UCHE
, 0x00001112);
65 for (i
= 0; i
< 4; i
++)
66 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_CTL_RB(i
), 0x22222222);
68 /* Disable L1 clocking in A420 due to CCU issues with it */
69 for (i
= 0; i
< 4; i
++) {
70 if (adreno_is_a420(adreno_gpu
)) {
71 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_CTL2_RB(i
),
74 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_CTL2_RB(i
),
79 for (i
= 0; i
< 4; i
++) {
80 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_CTL_MARB_CCU(i
),
84 for (i
= 0; i
< 4; i
++) {
85 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_HYST_RB_MARB_CCU(i
),
89 for (i
= 0; i
< 4; i
++) {
90 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_DELAY_RB_MARB_CCU_L1(i
),
94 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_MODE_GPC
, 0x02222222);
95 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_HYST_GPC
, 0x04100104);
96 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_DELAY_GPC
, 0x00022222);
97 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_CTL_COM_DCOM
, 0x00000022);
98 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_HYST_COM_DCOM
, 0x0000010F);
99 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_DELAY_COM_DCOM
, 0x00000022);
100 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_CTL_TSE_RAS_RBBM
, 0x00222222);
101 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_HYST_TSE_RAS_RBBM
, 0x00004104);
102 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_DELAY_TSE_RAS_RBBM
, 0x00000222);
103 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_CTL_HLSQ
, 0x00000000);
104 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_HYST_HLSQ
, 0x00000000);
105 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_DELAY_HLSQ
, 0x00020000);
106 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_CTL
, 0xAAAAAAAA);
107 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_CTL2
, 0);
110 static void a4xx_me_init(struct msm_gpu
*gpu
)
112 struct msm_ringbuffer
*ring
= gpu
->rb
;
114 OUT_PKT3(ring
, CP_ME_INIT
, 17);
115 OUT_RING(ring
, 0x000003f7);
116 OUT_RING(ring
, 0x00000000);
117 OUT_RING(ring
, 0x00000000);
118 OUT_RING(ring
, 0x00000000);
119 OUT_RING(ring
, 0x00000080);
120 OUT_RING(ring
, 0x00000100);
121 OUT_RING(ring
, 0x00000180);
122 OUT_RING(ring
, 0x00006600);
123 OUT_RING(ring
, 0x00000150);
124 OUT_RING(ring
, 0x0000014e);
125 OUT_RING(ring
, 0x00000154);
126 OUT_RING(ring
, 0x00000001);
127 OUT_RING(ring
, 0x00000000);
128 OUT_RING(ring
, 0x00000000);
129 OUT_RING(ring
, 0x00000000);
130 OUT_RING(ring
, 0x00000000);
131 OUT_RING(ring
, 0x00000000);
133 gpu
->funcs
->flush(gpu
);
134 gpu
->funcs
->idle(gpu
);
137 static int a4xx_hw_init(struct msm_gpu
*gpu
)
139 struct adreno_gpu
*adreno_gpu
= to_adreno_gpu(gpu
);
140 struct a4xx_gpu
*a4xx_gpu
= to_a4xx_gpu(adreno_gpu
);
144 if (adreno_is_a4xx(adreno_gpu
)) {
145 gpu_write(gpu
, REG_A4XX_VBIF_ABIT_SORT
, 0x0001001F);
146 gpu_write(gpu
, REG_A4XX_VBIF_ABIT_SORT_CONF
, 0x000000A4);
147 gpu_write(gpu
, REG_A4XX_VBIF_GATE_OFF_WRREQ_EN
, 0x00000001);
148 gpu_write(gpu
, REG_A4XX_VBIF_IN_RD_LIM_CONF0
, 0x18181818);
149 gpu_write(gpu
, REG_A4XX_VBIF_IN_RD_LIM_CONF1
, 0x00000018);
150 gpu_write(gpu
, REG_A4XX_VBIF_IN_WR_LIM_CONF0
, 0x18181818);
151 gpu_write(gpu
, REG_A4XX_VBIF_IN_WR_LIM_CONF1
, 0x00000018);
152 gpu_write(gpu
, REG_A4XX_VBIF_ROUND_ROBIN_QOS_ARB
, 0x00000003);
157 /* Make all blocks contribute to the GPU BUSY perf counter */
158 gpu_write(gpu
, REG_A4XX_RBBM_GPU_BUSY_MASKED
, 0xffffffff);
160 /* Tune the hystersis counters for SP and CP idle detection */
161 gpu_write(gpu
, REG_A4XX_RBBM_SP_HYST_CNT
, 0x10);
162 gpu_write(gpu
, REG_A4XX_RBBM_WAIT_IDLE_CLOCKS_CTL
, 0x10);
164 /* Enable the RBBM error reporting bits */
165 gpu_write(gpu
, REG_A4XX_RBBM_AHB_CTL0
, 0x00000001);
167 /* Enable AHB error reporting*/
168 gpu_write(gpu
, REG_A4XX_RBBM_AHB_CTL1
, 0xa6ffffff);
170 /* Enable power counters*/
171 gpu_write(gpu
, REG_A4XX_RBBM_RBBM_CTL
, 0x00000030);
174 * Turn on hang detection - this spews a lot of useful information
175 * into the RBBM registers on a hang:
177 gpu_write(gpu
, REG_A4XX_RBBM_INTERFACE_HANG_INT_CTL
,
180 gpu_write(gpu
, REG_A4XX_RB_GMEM_BASE_ADDR
,
181 (unsigned int)(a4xx_gpu
->ocmem_base
>> 14));
183 /* Turn on performance counters: */
184 gpu_write(gpu
, REG_A4XX_RBBM_PERFCTR_CTL
, 0x01);
186 /* Disable L2 bypass to avoid UCHE out of bounds errors */
187 gpu_write(gpu
, REG_A4XX_UCHE_TRAP_BASE_LO
, 0xffff0000);
188 gpu_write(gpu
, REG_A4XX_UCHE_TRAP_BASE_HI
, 0xffff0000);
190 gpu_write(gpu
, REG_A4XX_CP_DEBUG
, (1 << 25) |
191 (adreno_is_a420(adreno_gpu
) ? (1 << 29) : 0));
193 a4xx_enable_hwcg(gpu
);
196 * For A420 set RBBM_CLOCK_DELAY_HLSQ.CGC_HLSQ_TP_EARLY_CYC >= 2
197 * due to timing issue with HLSQ_TP_CLK_EN
199 if (adreno_is_a420(adreno_gpu
)) {
201 val
= gpu_read(gpu
, REG_A4XX_RBBM_CLOCK_DELAY_HLSQ
);
202 val
&= ~A4XX_CGC_HLSQ_EARLY_CYC__MASK
;
203 val
|= 2 << A4XX_CGC_HLSQ_EARLY_CYC__SHIFT
;
204 gpu_write(gpu
, REG_A4XX_RBBM_CLOCK_DELAY_HLSQ
, val
);
207 ret
= adreno_hw_init(gpu
);
211 /* setup access protection: */
212 gpu_write(gpu
, REG_A4XX_CP_PROTECT_CTRL
, 0x00000007);
215 gpu_write(gpu
, REG_A4XX_CP_PROTECT(0), 0x62000010);
216 gpu_write(gpu
, REG_A4XX_CP_PROTECT(1), 0x63000020);
217 gpu_write(gpu
, REG_A4XX_CP_PROTECT(2), 0x64000040);
218 gpu_write(gpu
, REG_A4XX_CP_PROTECT(3), 0x65000080);
219 gpu_write(gpu
, REG_A4XX_CP_PROTECT(4), 0x66000100);
220 gpu_write(gpu
, REG_A4XX_CP_PROTECT(5), 0x64000200);
223 gpu_write(gpu
, REG_A4XX_CP_PROTECT(6), 0x67000800);
224 gpu_write(gpu
, REG_A4XX_CP_PROTECT(7), 0x64001600);
228 gpu_write(gpu
, REG_A4XX_CP_PROTECT(8), 0x60003300);
231 gpu_write(gpu
, REG_A4XX_CP_PROTECT(9), 0x60003800);
234 gpu_write(gpu
, REG_A4XX_CP_PROTECT(10), 0x61003980);
237 gpu_write(gpu
, REG_A4XX_CP_PROTECT(11), 0x6e010000);
239 gpu_write(gpu
, REG_A4XX_RBBM_INT_0_MASK
, A4XX_INT0_MASK
);
241 ret
= adreno_hw_init(gpu
);
246 ptr
= (uint32_t *)(adreno_gpu
->pm4
->data
);
247 len
= adreno_gpu
->pm4
->size
/ 4;
248 DBG("loading PM4 ucode version: %u", ptr
[0]);
249 gpu_write(gpu
, REG_A4XX_CP_ME_RAM_WADDR
, 0);
250 for (i
= 1; i
< len
; i
++)
251 gpu_write(gpu
, REG_A4XX_CP_ME_RAM_DATA
, ptr
[i
]);
254 ptr
= (uint32_t *)(adreno_gpu
->pfp
->data
);
255 len
= adreno_gpu
->pfp
->size
/ 4;
256 DBG("loading PFP ucode version: %u", ptr
[0]);
258 gpu_write(gpu
, REG_A4XX_CP_PFP_UCODE_ADDR
, 0);
259 for (i
= 1; i
< len
; i
++)
260 gpu_write(gpu
, REG_A4XX_CP_PFP_UCODE_DATA
, ptr
[i
]);
262 /* clear ME_HALT to start micro engine */
263 gpu_write(gpu
, REG_A4XX_CP_ME_CNTL
, 0);
269 static void a4xx_recover(struct msm_gpu
*gpu
)
271 /* dump registers before resetting gpu, if enabled: */
275 gpu_write(gpu
, REG_A4XX_RBBM_SW_RESET_CMD
, 1);
276 gpu_read(gpu
, REG_A4XX_RBBM_SW_RESET_CMD
);
277 gpu_write(gpu
, REG_A4XX_RBBM_SW_RESET_CMD
, 0);
281 static void a4xx_destroy(struct msm_gpu
*gpu
)
283 struct adreno_gpu
*adreno_gpu
= to_adreno_gpu(gpu
);
284 struct a4xx_gpu
*a4xx_gpu
= to_a4xx_gpu(adreno_gpu
);
286 DBG("%s", gpu
->name
);
288 adreno_gpu_cleanup(adreno_gpu
);
290 #ifdef CONFIG_MSM_OCMEM
291 if (a4xx_gpu
->ocmem_base
)
292 ocmem_free(OCMEM_GRAPHICS
, a4xx_gpu
->ocmem_hdl
);
298 static void a4xx_idle(struct msm_gpu
*gpu
)
300 /* wait for ringbuffer to drain: */
303 /* then wait for GPU to finish: */
304 if (spin_until(!(gpu_read(gpu
, REG_A4XX_RBBM_STATUS
) &
305 A4XX_RBBM_STATUS_GPU_BUSY
)))
306 DRM_ERROR("%s: timeout waiting for GPU to idle!\n", gpu
->name
);
308 /* TODO maybe we need to reset GPU here to recover from hang? */
311 static irqreturn_t
a4xx_irq(struct msm_gpu
*gpu
)
315 status
= gpu_read(gpu
, REG_A4XX_RBBM_INT_0_STATUS
);
316 DBG("%s: Int status %08x", gpu
->name
, status
);
318 gpu_write(gpu
, REG_A4XX_RBBM_INT_CLEAR_CMD
, status
);
325 static const unsigned int a4xx_registers
[] = {
327 0x0000, 0x0002, 0x0004, 0x0021, 0x0023, 0x0024, 0x0026, 0x0026,
328 0x0028, 0x002B, 0x002E, 0x0034, 0x0037, 0x0044, 0x0047, 0x0066,
329 0x0068, 0x0095, 0x009C, 0x0170, 0x0174, 0x01AF,
331 0x0200, 0x0233, 0x0240, 0x0250, 0x04C0, 0x04DD, 0x0500, 0x050B,
334 0x0C00, 0x0C03, 0x0C08, 0x0C41, 0x0C50, 0x0C51,
336 0x0C80, 0x0C81, 0x0C88, 0x0C8F,
338 0x0CC0, 0x0CC0, 0x0CC4, 0x0CD2,
340 0x0D00, 0x0D0C, 0x0D10, 0x0D17, 0x0D20, 0x0D23,
344 0x0E60, 0x0E61, 0x0E63, 0x0E68,
346 0x0E80, 0x0E84, 0x0E88, 0x0E95,
348 0x1000, 0x1000, 0x1002, 0x1002, 0x1004, 0x1004, 0x1008, 0x100A,
349 0x100C, 0x100D, 0x100F, 0x1010, 0x1012, 0x1016, 0x1024, 0x1024,
350 0x1027, 0x1027, 0x1100, 0x1100, 0x1102, 0x1102, 0x1104, 0x1104,
351 0x1110, 0x1110, 0x1112, 0x1116, 0x1124, 0x1124, 0x1300, 0x1300,
354 0x2000, 0x2004, 0x2008, 0x2067, 0x2070, 0x2078, 0x207B, 0x216E,
356 0x21C0, 0x21C6, 0x21D0, 0x21D0, 0x21D9, 0x21D9, 0x21E5, 0x21E7,
358 0x2200, 0x2204, 0x2208, 0x22A9,
360 0x2400, 0x2404, 0x2408, 0x2467, 0x2470, 0x2478, 0x247B, 0x256E,
362 0x25C0, 0x25C6, 0x25D0, 0x25D0, 0x25D9, 0x25D9, 0x25E5, 0x25E7,
364 0x2600, 0x2604, 0x2608, 0x26A9,
366 0x2C00, 0x2C01, 0x2C10, 0x2C10, 0x2C12, 0x2C16, 0x2C1D, 0x2C20,
367 0x2C28, 0x2C28, 0x2C30, 0x2C30, 0x2C32, 0x2C36, 0x2C40, 0x2C40,
368 0x2C50, 0x2C50, 0x2C52, 0x2C56, 0x2C80, 0x2C80, 0x2C94, 0x2C95,
370 0x3000, 0x3007, 0x300C, 0x3014, 0x3018, 0x301D, 0x3020, 0x3022,
371 0x3024, 0x3026, 0x3028, 0x302A, 0x302C, 0x302D, 0x3030, 0x3031,
372 0x3034, 0x3036, 0x3038, 0x3038, 0x303C, 0x303D, 0x3040, 0x3040,
373 0x3049, 0x3049, 0x3058, 0x3058, 0x305B, 0x3061, 0x3064, 0x3068,
374 0x306C, 0x306D, 0x3080, 0x3088, 0x308B, 0x308C, 0x3090, 0x3094,
375 0x3098, 0x3098, 0x309C, 0x309C, 0x30C0, 0x30C0, 0x30C8, 0x30C8,
376 0x30D0, 0x30D0, 0x30D8, 0x30D8, 0x30E0, 0x30E0, 0x3100, 0x3100,
377 0x3108, 0x3108, 0x3110, 0x3110, 0x3118, 0x3118, 0x3120, 0x3120,
378 0x3124, 0x3125, 0x3129, 0x3129, 0x3131, 0x3131, 0x330C, 0x330C,
379 0x3310, 0x3310, 0x3400, 0x3401, 0x3410, 0x3410, 0x3412, 0x3416,
380 0x341D, 0x3420, 0x3428, 0x3428, 0x3430, 0x3430, 0x3432, 0x3436,
381 0x3440, 0x3440, 0x3450, 0x3450, 0x3452, 0x3456, 0x3480, 0x3480,
382 0x3494, 0x3495, 0x4000, 0x4000, 0x4002, 0x4002, 0x4004, 0x4004,
383 0x4008, 0x400A, 0x400C, 0x400D, 0x400F, 0x4012, 0x4014, 0x4016,
384 0x401D, 0x401D, 0x4020, 0x4027, 0x4060, 0x4062, 0x4200, 0x4200,
385 0x4300, 0x4300, 0x4400, 0x4400, 0x4500, 0x4500, 0x4800, 0x4802,
386 0x480F, 0x480F, 0x4811, 0x4811, 0x4813, 0x4813, 0x4815, 0x4816,
387 0x482B, 0x482B, 0x4857, 0x4857, 0x4883, 0x4883, 0x48AF, 0x48AF,
388 0x48C5, 0x48C5, 0x48E5, 0x48E5, 0x4905, 0x4905, 0x4925, 0x4925,
389 0x4945, 0x4945, 0x4950, 0x4950, 0x495B, 0x495B, 0x4980, 0x498E,
390 0x4B00, 0x4B00, 0x4C00, 0x4C00, 0x4D00, 0x4D00, 0x4E00, 0x4E00,
391 0x4E80, 0x4E80, 0x4F00, 0x4F00, 0x4F08, 0x4F08, 0x4F10, 0x4F10,
392 0x4F18, 0x4F18, 0x4F20, 0x4F20, 0x4F30, 0x4F30, 0x4F60, 0x4F60,
393 0x4F80, 0x4F81, 0x4F88, 0x4F89, 0x4FEE, 0x4FEE, 0x4FF3, 0x4FF3,
394 0x6000, 0x6001, 0x6008, 0x600F, 0x6014, 0x6016, 0x6018, 0x601B,
395 0x61FD, 0x61FD, 0x623C, 0x623C, 0x6380, 0x6380, 0x63A0, 0x63A0,
396 0x63C0, 0x63C1, 0x63C8, 0x63C9, 0x63D0, 0x63D4, 0x63D6, 0x63D6,
397 0x63EE, 0x63EE, 0x6400, 0x6401, 0x6408, 0x640F, 0x6414, 0x6416,
398 0x6418, 0x641B, 0x65FD, 0x65FD, 0x663C, 0x663C, 0x6780, 0x6780,
399 0x67A0, 0x67A0, 0x67C0, 0x67C1, 0x67C8, 0x67C9, 0x67D0, 0x67D4,
400 0x67D6, 0x67D6, 0x67EE, 0x67EE, 0x6800, 0x6801, 0x6808, 0x680F,
401 0x6814, 0x6816, 0x6818, 0x681B, 0x69FD, 0x69FD, 0x6A3C, 0x6A3C,
402 0x6B80, 0x6B80, 0x6BA0, 0x6BA0, 0x6BC0, 0x6BC1, 0x6BC8, 0x6BC9,
403 0x6BD0, 0x6BD4, 0x6BD6, 0x6BD6, 0x6BEE, 0x6BEE,
407 #ifdef CONFIG_DEBUG_FS
408 static void a4xx_show(struct msm_gpu
*gpu
, struct seq_file
*m
)
410 gpu
->funcs
->pm_resume(gpu
);
412 seq_printf(m
, "status: %08x\n",
413 gpu_read(gpu
, REG_A4XX_RBBM_STATUS
));
414 gpu
->funcs
->pm_suspend(gpu
);
421 /* Register offset defines for A4XX, in order of enum adreno_regs */
422 static const unsigned int a4xx_register_offsets
[REG_ADRENO_REGISTER_MAX
] = {
423 REG_ADRENO_DEFINE(REG_ADRENO_CP_DEBUG
, REG_A4XX_CP_DEBUG
),
424 REG_ADRENO_DEFINE(REG_ADRENO_CP_ME_RAM_WADDR
, REG_A4XX_CP_ME_RAM_WADDR
),
425 REG_ADRENO_DEFINE(REG_ADRENO_CP_ME_RAM_DATA
, REG_A4XX_CP_ME_RAM_DATA
),
426 REG_ADRENO_DEFINE(REG_ADRENO_CP_PFP_UCODE_DATA
,
427 REG_A4XX_CP_PFP_UCODE_DATA
),
428 REG_ADRENO_DEFINE(REG_ADRENO_CP_PFP_UCODE_ADDR
,
429 REG_A4XX_CP_PFP_UCODE_ADDR
),
430 REG_ADRENO_DEFINE(REG_ADRENO_CP_WFI_PEND_CTR
, REG_A4XX_CP_WFI_PEND_CTR
),
431 REG_ADRENO_DEFINE(REG_ADRENO_CP_RB_BASE
, REG_A4XX_CP_RB_BASE
),
432 REG_ADRENO_DEFINE(REG_ADRENO_CP_RB_RPTR_ADDR
, REG_A4XX_CP_RB_RPTR_ADDR
),
433 REG_ADRENO_DEFINE(REG_ADRENO_CP_RB_RPTR
, REG_A4XX_CP_RB_RPTR
),
434 REG_ADRENO_DEFINE(REG_ADRENO_CP_RB_WPTR
, REG_A4XX_CP_RB_WPTR
),
435 REG_ADRENO_DEFINE(REG_ADRENO_CP_PROTECT_CTRL
, REG_A4XX_CP_PROTECT_CTRL
),
436 REG_ADRENO_DEFINE(REG_ADRENO_CP_ME_CNTL
, REG_A4XX_CP_ME_CNTL
),
437 REG_ADRENO_DEFINE(REG_ADRENO_CP_RB_CNTL
, REG_A4XX_CP_RB_CNTL
),
438 REG_ADRENO_DEFINE(REG_ADRENO_CP_IB1_BASE
, REG_A4XX_CP_IB1_BASE
),
439 REG_ADRENO_DEFINE(REG_ADRENO_CP_IB1_BUFSZ
, REG_A4XX_CP_IB1_BUFSZ
),
440 REG_ADRENO_DEFINE(REG_ADRENO_CP_IB2_BASE
, REG_A4XX_CP_IB2_BASE
),
441 REG_ADRENO_DEFINE(REG_ADRENO_CP_IB2_BUFSZ
, REG_A4XX_CP_IB2_BUFSZ
),
442 REG_ADRENO_DEFINE(REG_ADRENO_CP_TIMESTAMP
, REG_AXXX_CP_SCRATCH_REG0
),
443 REG_ADRENO_DEFINE(REG_ADRENO_CP_ME_RAM_RADDR
, REG_A4XX_CP_ME_RAM_RADDR
),
444 REG_ADRENO_DEFINE(REG_ADRENO_CP_ROQ_ADDR
, REG_A4XX_CP_ROQ_ADDR
),
445 REG_ADRENO_DEFINE(REG_ADRENO_CP_ROQ_DATA
, REG_A4XX_CP_ROQ_DATA
),
446 REG_ADRENO_DEFINE(REG_ADRENO_CP_MERCIU_ADDR
, REG_A4XX_CP_MERCIU_ADDR
),
447 REG_ADRENO_DEFINE(REG_ADRENO_CP_MERCIU_DATA
, REG_A4XX_CP_MERCIU_DATA
),
448 REG_ADRENO_DEFINE(REG_ADRENO_CP_MERCIU_DATA2
, REG_A4XX_CP_MERCIU_DATA2
),
449 REG_ADRENO_DEFINE(REG_ADRENO_CP_MEQ_ADDR
, REG_A4XX_CP_MEQ_ADDR
),
450 REG_ADRENO_DEFINE(REG_ADRENO_CP_MEQ_DATA
, REG_A4XX_CP_MEQ_DATA
),
451 REG_ADRENO_DEFINE(REG_ADRENO_CP_HW_FAULT
, REG_A4XX_CP_HW_FAULT
),
452 REG_ADRENO_DEFINE(REG_ADRENO_CP_PROTECT_STATUS
,
453 REG_A4XX_CP_PROTECT_STATUS
),
454 REG_ADRENO_DEFINE(REG_ADRENO_SCRATCH_ADDR
, REG_A4XX_CP_SCRATCH_ADDR
),
455 REG_ADRENO_DEFINE(REG_ADRENO_SCRATCH_UMSK
, REG_A4XX_CP_SCRATCH_UMASK
),
456 REG_ADRENO_DEFINE(REG_ADRENO_RBBM_STATUS
, REG_A4XX_RBBM_STATUS
),
457 REG_ADRENO_DEFINE(REG_ADRENO_RBBM_PERFCTR_CTL
,
458 REG_A4XX_RBBM_PERFCTR_CTL
),
459 REG_ADRENO_DEFINE(REG_ADRENO_RBBM_PERFCTR_LOAD_CMD0
,
460 REG_A4XX_RBBM_PERFCTR_LOAD_CMD0
),
461 REG_ADRENO_DEFINE(REG_ADRENO_RBBM_PERFCTR_LOAD_CMD1
,
462 REG_A4XX_RBBM_PERFCTR_LOAD_CMD1
),
463 REG_ADRENO_DEFINE(REG_ADRENO_RBBM_PERFCTR_LOAD_CMD2
,
464 REG_A4XX_RBBM_PERFCTR_LOAD_CMD2
),
465 REG_ADRENO_DEFINE(REG_ADRENO_RBBM_PERFCTR_PWR_1_LO
,
466 REG_A4XX_RBBM_PERFCTR_PWR_1_LO
),
467 REG_ADRENO_DEFINE(REG_ADRENO_RBBM_INT_0_MASK
, REG_A4XX_RBBM_INT_0_MASK
),
468 REG_ADRENO_DEFINE(REG_ADRENO_RBBM_INT_0_STATUS
,
469 REG_A4XX_RBBM_INT_0_STATUS
),
470 REG_ADRENO_DEFINE(REG_ADRENO_RBBM_AHB_ERROR_STATUS
,
471 REG_A4XX_RBBM_AHB_ERROR_STATUS
),
472 REG_ADRENO_DEFINE(REG_ADRENO_RBBM_AHB_CMD
, REG_A4XX_RBBM_AHB_CMD
),
473 REG_ADRENO_DEFINE(REG_ADRENO_RBBM_CLOCK_CTL
, REG_A4XX_RBBM_CLOCK_CTL
),
474 REG_ADRENO_DEFINE(REG_ADRENO_RBBM_AHB_ME_SPLIT_STATUS
,
475 REG_A4XX_RBBM_AHB_ME_SPLIT_STATUS
),
476 REG_ADRENO_DEFINE(REG_ADRENO_RBBM_AHB_PFP_SPLIT_STATUS
,
477 REG_A4XX_RBBM_AHB_PFP_SPLIT_STATUS
),
478 REG_ADRENO_DEFINE(REG_ADRENO_VPC_DEBUG_RAM_SEL
,
479 REG_A4XX_VPC_DEBUG_RAM_SEL
),
480 REG_ADRENO_DEFINE(REG_ADRENO_VPC_DEBUG_RAM_READ
,
481 REG_A4XX_VPC_DEBUG_RAM_READ
),
482 REG_ADRENO_DEFINE(REG_ADRENO_RBBM_INT_CLEAR_CMD
,
483 REG_A4XX_RBBM_INT_CLEAR_CMD
),
484 REG_ADRENO_DEFINE(REG_ADRENO_VSC_SIZE_ADDRESS
,
485 REG_A4XX_VSC_SIZE_ADDRESS
),
486 REG_ADRENO_DEFINE(REG_ADRENO_VFD_CONTROL_0
, REG_A4XX_VFD_CONTROL_0
),
487 REG_ADRENO_DEFINE(REG_ADRENO_SP_VS_PVT_MEM_ADDR_REG
,
488 REG_A4XX_SP_VS_PVT_MEM_ADDR
),
489 REG_ADRENO_DEFINE(REG_ADRENO_SP_FS_PVT_MEM_ADDR_REG
,
490 REG_A4XX_SP_FS_PVT_MEM_ADDR
),
491 REG_ADRENO_DEFINE(REG_ADRENO_SP_VS_OBJ_START_REG
,
492 REG_A4XX_SP_VS_OBJ_START
),
493 REG_ADRENO_DEFINE(REG_ADRENO_SP_FS_OBJ_START_REG
,
494 REG_A4XX_SP_FS_OBJ_START
),
495 REG_ADRENO_DEFINE(REG_ADRENO_RBBM_RBBM_CTL
, REG_A4XX_RBBM_RBBM_CTL
),
496 REG_ADRENO_DEFINE(REG_ADRENO_RBBM_SW_RESET_CMD
,
497 REG_A4XX_RBBM_SW_RESET_CMD
),
498 REG_ADRENO_DEFINE(REG_ADRENO_UCHE_INVALIDATE0
,
499 REG_A4XX_UCHE_INVALIDATE0
),
500 REG_ADRENO_DEFINE(REG_ADRENO_RBBM_PERFCTR_LOAD_VALUE_LO
,
501 REG_A4XX_RBBM_PERFCTR_LOAD_VALUE_LO
),
502 REG_ADRENO_DEFINE(REG_ADRENO_RBBM_PERFCTR_LOAD_VALUE_HI
,
503 REG_A4XX_RBBM_PERFCTR_LOAD_VALUE_HI
),
506 static void a4xx_dump(struct msm_gpu
*gpu
)
509 printk("status: %08x\n",
510 gpu_read(gpu
, REG_A4XX_RBBM_STATUS
));
514 static const struct adreno_gpu_funcs funcs
= {
516 .get_param
= adreno_get_param
,
517 .hw_init
= a4xx_hw_init
,
518 .pm_suspend
= msm_gpu_pm_suspend
,
519 .pm_resume
= msm_gpu_pm_resume
,
520 .recover
= a4xx_recover
,
521 .last_fence
= adreno_last_fence
,
522 .submit
= adreno_submit
,
523 .flush
= adreno_flush
,
526 .destroy
= a4xx_destroy
,
527 #ifdef CONFIG_DEBUG_FS
533 struct msm_gpu
*a4xx_gpu_init(struct drm_device
*dev
)
535 struct a4xx_gpu
*a4xx_gpu
= NULL
;
536 struct adreno_gpu
*adreno_gpu
;
538 struct msm_drm_private
*priv
= dev
->dev_private
;
539 struct platform_device
*pdev
= priv
->gpu_pdev
;
543 dev_err(dev
->dev
, "no a4xx device\n");
548 a4xx_gpu
= kzalloc(sizeof(*a4xx_gpu
), GFP_KERNEL
);
554 adreno_gpu
= &a4xx_gpu
->base
;
555 gpu
= &adreno_gpu
->base
;
557 a4xx_gpu
->pdev
= pdev
;
559 gpu
->perfcntrs
= NULL
;
560 gpu
->num_perfcntrs
= 0;
562 adreno_gpu
->registers
= a4xx_registers
;
563 adreno_gpu
->reg_offsets
= a4xx_register_offsets
;
565 ret
= adreno_gpu_init(dev
, pdev
, adreno_gpu
, &funcs
);
569 /* if needed, allocate gmem: */
570 if (adreno_is_a4xx(adreno_gpu
)) {
571 #ifdef CONFIG_MSM_OCMEM
572 /* TODO this is different/missing upstream: */
573 struct ocmem_buf
*ocmem_hdl
=
574 ocmem_allocate(OCMEM_GRAPHICS
, adreno_gpu
->gmem
);
576 a4xx_gpu
->ocmem_hdl
= ocmem_hdl
;
577 a4xx_gpu
->ocmem_base
= ocmem_hdl
->addr
;
578 adreno_gpu
->gmem
= ocmem_hdl
->len
;
579 DBG("using %dK of OCMEM at 0x%08x", adreno_gpu
->gmem
/ 1024,
580 a4xx_gpu
->ocmem_base
);
585 /* TODO we think it is possible to configure the GPU to
586 * restrict access to VRAM carveout. But the required
587 * registers are unknown. For now just bail out and
588 * limp along with just modesetting. If it turns out
589 * to not be possible to restrict access, then we must
590 * implement a cmdstream validator.
592 dev_err(dev
->dev
, "No memory protection without IOMMU\n");
601 a4xx_destroy(&a4xx_gpu
->base
.base
);