2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
24 * Authors: Dave Airlie
28 #ifndef __RADEON_ASIC_H__
29 #define __RADEON_ASIC_H__
34 void radeon_legacy_set_engine_clock(struct radeon_device
*rdev
, uint32_t eng_clock
);
35 void radeon_legacy_set_clock_gating(struct radeon_device
*rdev
, int enable
);
37 void radeon_atom_set_engine_clock(struct radeon_device
*rdev
, uint32_t eng_clock
);
38 void radeon_atom_set_memory_clock(struct radeon_device
*rdev
, uint32_t mem_clock
);
39 void radeon_atom_set_clock_gating(struct radeon_device
*rdev
, int enable
);
42 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
44 extern int r100_init(struct radeon_device
*rdev
);
45 extern void r100_fini(struct radeon_device
*rdev
);
46 extern int r100_suspend(struct radeon_device
*rdev
);
47 extern int r100_resume(struct radeon_device
*rdev
);
48 uint32_t r100_mm_rreg(struct radeon_device
*rdev
, uint32_t reg
);
49 void r100_mm_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
50 void r100_vga_set_state(struct radeon_device
*rdev
, bool state
);
51 int r100_gpu_reset(struct radeon_device
*rdev
);
52 u32
r100_get_vblank_counter(struct radeon_device
*rdev
, int crtc
);
53 void r100_pci_gart_tlb_flush(struct radeon_device
*rdev
);
54 int r100_pci_gart_set_page(struct radeon_device
*rdev
, int i
, uint64_t addr
);
55 void r100_cp_commit(struct radeon_device
*rdev
);
56 void r100_ring_start(struct radeon_device
*rdev
);
57 int r100_irq_set(struct radeon_device
*rdev
);
58 int r100_irq_process(struct radeon_device
*rdev
);
59 void r100_fence_ring_emit(struct radeon_device
*rdev
,
60 struct radeon_fence
*fence
);
61 int r100_cs_parse(struct radeon_cs_parser
*p
);
62 void r100_pll_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
63 uint32_t r100_pll_rreg(struct radeon_device
*rdev
, uint32_t reg
);
64 int r100_copy_blit(struct radeon_device
*rdev
,
68 struct radeon_fence
*fence
);
69 int r100_set_surface_reg(struct radeon_device
*rdev
, int reg
,
70 uint32_t tiling_flags
, uint32_t pitch
,
71 uint32_t offset
, uint32_t obj_size
);
72 int r100_clear_surface_reg(struct radeon_device
*rdev
, int reg
);
73 void r100_bandwidth_update(struct radeon_device
*rdev
);
74 void r100_ring_ib_execute(struct radeon_device
*rdev
, struct radeon_ib
*ib
);
75 int r100_ring_test(struct radeon_device
*rdev
);
77 static struct radeon_asic r100_asic
= {
80 .suspend
= &r100_suspend
,
81 .resume
= &r100_resume
,
82 .vga_set_state
= &r100_vga_set_state
,
83 .gpu_reset
= &r100_gpu_reset
,
84 .gart_tlb_flush
= &r100_pci_gart_tlb_flush
,
85 .gart_set_page
= &r100_pci_gart_set_page
,
86 .cp_commit
= &r100_cp_commit
,
87 .ring_start
= &r100_ring_start
,
88 .ring_test
= &r100_ring_test
,
89 .ring_ib_execute
= &r100_ring_ib_execute
,
90 .irq_set
= &r100_irq_set
,
91 .irq_process
= &r100_irq_process
,
92 .get_vblank_counter
= &r100_get_vblank_counter
,
93 .fence_ring_emit
= &r100_fence_ring_emit
,
94 .cs_parse
= &r100_cs_parse
,
95 .copy_blit
= &r100_copy_blit
,
97 .copy
= &r100_copy_blit
,
98 .set_engine_clock
= &radeon_legacy_set_engine_clock
,
99 .set_memory_clock
= NULL
,
100 .set_pcie_lanes
= NULL
,
101 .set_clock_gating
= &radeon_legacy_set_clock_gating
,
102 .set_surface_reg
= r100_set_surface_reg
,
103 .clear_surface_reg
= r100_clear_surface_reg
,
104 .bandwidth_update
= &r100_bandwidth_update
,
109 * r300,r350,rv350,rv380
111 extern int r300_init(struct radeon_device
*rdev
);
112 extern void r300_fini(struct radeon_device
*rdev
);
113 extern int r300_suspend(struct radeon_device
*rdev
);
114 extern int r300_resume(struct radeon_device
*rdev
);
115 extern int r300_gpu_reset(struct radeon_device
*rdev
);
116 extern void r300_ring_start(struct radeon_device
*rdev
);
117 extern void r300_fence_ring_emit(struct radeon_device
*rdev
,
118 struct radeon_fence
*fence
);
119 extern int r300_cs_parse(struct radeon_cs_parser
*p
);
120 extern void rv370_pcie_gart_tlb_flush(struct radeon_device
*rdev
);
121 extern int rv370_pcie_gart_set_page(struct radeon_device
*rdev
, int i
, uint64_t addr
);
122 extern uint32_t rv370_pcie_rreg(struct radeon_device
*rdev
, uint32_t reg
);
123 extern void rv370_pcie_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
124 extern void rv370_set_pcie_lanes(struct radeon_device
*rdev
, int lanes
);
125 extern int r300_copy_dma(struct radeon_device
*rdev
,
129 struct radeon_fence
*fence
);
130 static struct radeon_asic r300_asic
= {
133 .suspend
= &r300_suspend
,
134 .resume
= &r300_resume
,
135 .vga_set_state
= &r100_vga_set_state
,
136 .gpu_reset
= &r300_gpu_reset
,
137 .gart_tlb_flush
= &r100_pci_gart_tlb_flush
,
138 .gart_set_page
= &r100_pci_gart_set_page
,
139 .cp_commit
= &r100_cp_commit
,
140 .ring_start
= &r300_ring_start
,
141 .ring_test
= &r100_ring_test
,
142 .ring_ib_execute
= &r100_ring_ib_execute
,
143 .irq_set
= &r100_irq_set
,
144 .irq_process
= &r100_irq_process
,
145 .get_vblank_counter
= &r100_get_vblank_counter
,
146 .fence_ring_emit
= &r300_fence_ring_emit
,
147 .cs_parse
= &r300_cs_parse
,
148 .copy_blit
= &r100_copy_blit
,
149 .copy_dma
= &r300_copy_dma
,
150 .copy
= &r100_copy_blit
,
151 .set_engine_clock
= &radeon_legacy_set_engine_clock
,
152 .set_memory_clock
= NULL
,
153 .set_pcie_lanes
= &rv370_set_pcie_lanes
,
154 .set_clock_gating
= &radeon_legacy_set_clock_gating
,
155 .set_surface_reg
= r100_set_surface_reg
,
156 .clear_surface_reg
= r100_clear_surface_reg
,
157 .bandwidth_update
= &r100_bandwidth_update
,
163 extern int r420_init(struct radeon_device
*rdev
);
164 extern void r420_fini(struct radeon_device
*rdev
);
165 extern int r420_suspend(struct radeon_device
*rdev
);
166 extern int r420_resume(struct radeon_device
*rdev
);
167 static struct radeon_asic r420_asic
= {
170 .suspend
= &r420_suspend
,
171 .resume
= &r420_resume
,
172 .vga_set_state
= &r100_vga_set_state
,
173 .gpu_reset
= &r300_gpu_reset
,
174 .gart_tlb_flush
= &rv370_pcie_gart_tlb_flush
,
175 .gart_set_page
= &rv370_pcie_gart_set_page
,
176 .cp_commit
= &r100_cp_commit
,
177 .ring_start
= &r300_ring_start
,
178 .ring_test
= &r100_ring_test
,
179 .ring_ib_execute
= &r100_ring_ib_execute
,
180 .irq_set
= &r100_irq_set
,
181 .irq_process
= &r100_irq_process
,
182 .get_vblank_counter
= &r100_get_vblank_counter
,
183 .fence_ring_emit
= &r300_fence_ring_emit
,
184 .cs_parse
= &r300_cs_parse
,
185 .copy_blit
= &r100_copy_blit
,
186 .copy_dma
= &r300_copy_dma
,
187 .copy
= &r100_copy_blit
,
188 .set_engine_clock
= &radeon_atom_set_engine_clock
,
189 .set_memory_clock
= &radeon_atom_set_memory_clock
,
190 .set_pcie_lanes
= &rv370_set_pcie_lanes
,
191 .set_clock_gating
= &radeon_atom_set_clock_gating
,
192 .set_surface_reg
= r100_set_surface_reg
,
193 .clear_surface_reg
= r100_clear_surface_reg
,
194 .bandwidth_update
= &r100_bandwidth_update
,
201 extern int rs400_init(struct radeon_device
*rdev
);
202 extern void rs400_fini(struct radeon_device
*rdev
);
203 extern int rs400_suspend(struct radeon_device
*rdev
);
204 extern int rs400_resume(struct radeon_device
*rdev
);
205 void rs400_gart_tlb_flush(struct radeon_device
*rdev
);
206 int rs400_gart_set_page(struct radeon_device
*rdev
, int i
, uint64_t addr
);
207 uint32_t rs400_mc_rreg(struct radeon_device
*rdev
, uint32_t reg
);
208 void rs400_mc_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
209 static struct radeon_asic rs400_asic
= {
212 .suspend
= &rs400_suspend
,
213 .resume
= &rs400_resume
,
214 .vga_set_state
= &r100_vga_set_state
,
215 .gpu_reset
= &r300_gpu_reset
,
216 .gart_tlb_flush
= &rs400_gart_tlb_flush
,
217 .gart_set_page
= &rs400_gart_set_page
,
218 .cp_commit
= &r100_cp_commit
,
219 .ring_start
= &r300_ring_start
,
220 .ring_test
= &r100_ring_test
,
221 .ring_ib_execute
= &r100_ring_ib_execute
,
222 .irq_set
= &r100_irq_set
,
223 .irq_process
= &r100_irq_process
,
224 .get_vblank_counter
= &r100_get_vblank_counter
,
225 .fence_ring_emit
= &r300_fence_ring_emit
,
226 .cs_parse
= &r300_cs_parse
,
227 .copy_blit
= &r100_copy_blit
,
228 .copy_dma
= &r300_copy_dma
,
229 .copy
= &r100_copy_blit
,
230 .set_engine_clock
= &radeon_legacy_set_engine_clock
,
231 .set_memory_clock
= NULL
,
232 .set_pcie_lanes
= NULL
,
233 .set_clock_gating
= &radeon_legacy_set_clock_gating
,
234 .set_surface_reg
= r100_set_surface_reg
,
235 .clear_surface_reg
= r100_clear_surface_reg
,
236 .bandwidth_update
= &r100_bandwidth_update
,
243 extern int rs600_init(struct radeon_device
*rdev
);
244 extern void rs600_fini(struct radeon_device
*rdev
);
245 extern int rs600_suspend(struct radeon_device
*rdev
);
246 extern int rs600_resume(struct radeon_device
*rdev
);
247 int rs600_irq_set(struct radeon_device
*rdev
);
248 int rs600_irq_process(struct radeon_device
*rdev
);
249 u32
rs600_get_vblank_counter(struct radeon_device
*rdev
, int crtc
);
250 void rs600_gart_tlb_flush(struct radeon_device
*rdev
);
251 int rs600_gart_set_page(struct radeon_device
*rdev
, int i
, uint64_t addr
);
252 uint32_t rs600_mc_rreg(struct radeon_device
*rdev
, uint32_t reg
);
253 void rs600_mc_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
254 void rs600_bandwidth_update(struct radeon_device
*rdev
);
255 static struct radeon_asic rs600_asic
= {
258 .suspend
= &rs600_suspend
,
259 .resume
= &rs600_resume
,
260 .vga_set_state
= &r100_vga_set_state
,
261 .gpu_reset
= &r300_gpu_reset
,
262 .gart_tlb_flush
= &rs600_gart_tlb_flush
,
263 .gart_set_page
= &rs600_gart_set_page
,
264 .cp_commit
= &r100_cp_commit
,
265 .ring_start
= &r300_ring_start
,
266 .ring_test
= &r100_ring_test
,
267 .ring_ib_execute
= &r100_ring_ib_execute
,
268 .irq_set
= &rs600_irq_set
,
269 .irq_process
= &rs600_irq_process
,
270 .get_vblank_counter
= &rs600_get_vblank_counter
,
271 .fence_ring_emit
= &r300_fence_ring_emit
,
272 .cs_parse
= &r300_cs_parse
,
273 .copy_blit
= &r100_copy_blit
,
274 .copy_dma
= &r300_copy_dma
,
275 .copy
= &r100_copy_blit
,
276 .set_engine_clock
= &radeon_atom_set_engine_clock
,
277 .set_memory_clock
= &radeon_atom_set_memory_clock
,
278 .set_pcie_lanes
= NULL
,
279 .set_clock_gating
= &radeon_atom_set_clock_gating
,
280 .bandwidth_update
= &rs600_bandwidth_update
,
287 int rs690_init(struct radeon_device
*rdev
);
288 void rs690_fini(struct radeon_device
*rdev
);
289 int rs690_resume(struct radeon_device
*rdev
);
290 int rs690_suspend(struct radeon_device
*rdev
);
291 uint32_t rs690_mc_rreg(struct radeon_device
*rdev
, uint32_t reg
);
292 void rs690_mc_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
293 void rs690_bandwidth_update(struct radeon_device
*rdev
);
294 static struct radeon_asic rs690_asic
= {
297 .suspend
= &rs690_suspend
,
298 .resume
= &rs690_resume
,
299 .vga_set_state
= &r100_vga_set_state
,
300 .gpu_reset
= &r300_gpu_reset
,
301 .gart_tlb_flush
= &rs400_gart_tlb_flush
,
302 .gart_set_page
= &rs400_gart_set_page
,
303 .cp_commit
= &r100_cp_commit
,
304 .ring_start
= &r300_ring_start
,
305 .ring_test
= &r100_ring_test
,
306 .ring_ib_execute
= &r100_ring_ib_execute
,
307 .irq_set
= &rs600_irq_set
,
308 .irq_process
= &rs600_irq_process
,
309 .get_vblank_counter
= &rs600_get_vblank_counter
,
310 .fence_ring_emit
= &r300_fence_ring_emit
,
311 .cs_parse
= &r300_cs_parse
,
312 .copy_blit
= &r100_copy_blit
,
313 .copy_dma
= &r300_copy_dma
,
314 .copy
= &r300_copy_dma
,
315 .set_engine_clock
= &radeon_atom_set_engine_clock
,
316 .set_memory_clock
= &radeon_atom_set_memory_clock
,
317 .set_pcie_lanes
= NULL
,
318 .set_clock_gating
= &radeon_atom_set_clock_gating
,
319 .set_surface_reg
= r100_set_surface_reg
,
320 .clear_surface_reg
= r100_clear_surface_reg
,
321 .bandwidth_update
= &rs690_bandwidth_update
,
328 int rv515_init(struct radeon_device
*rdev
);
329 void rv515_fini(struct radeon_device
*rdev
);
330 int rv515_gpu_reset(struct radeon_device
*rdev
);
331 uint32_t rv515_mc_rreg(struct radeon_device
*rdev
, uint32_t reg
);
332 void rv515_mc_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
333 void rv515_ring_start(struct radeon_device
*rdev
);
334 uint32_t rv515_pcie_rreg(struct radeon_device
*rdev
, uint32_t reg
);
335 void rv515_pcie_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
336 void rv515_bandwidth_update(struct radeon_device
*rdev
);
337 int rv515_resume(struct radeon_device
*rdev
);
338 int rv515_suspend(struct radeon_device
*rdev
);
339 static struct radeon_asic rv515_asic
= {
342 .suspend
= &rv515_suspend
,
343 .resume
= &rv515_resume
,
344 .vga_set_state
= &r100_vga_set_state
,
345 .gpu_reset
= &rv515_gpu_reset
,
346 .gart_tlb_flush
= &rv370_pcie_gart_tlb_flush
,
347 .gart_set_page
= &rv370_pcie_gart_set_page
,
348 .cp_commit
= &r100_cp_commit
,
349 .ring_start
= &rv515_ring_start
,
350 .ring_test
= &r100_ring_test
,
351 .ring_ib_execute
= &r100_ring_ib_execute
,
352 .irq_set
= &rs600_irq_set
,
353 .irq_process
= &rs600_irq_process
,
354 .get_vblank_counter
= &rs600_get_vblank_counter
,
355 .fence_ring_emit
= &r300_fence_ring_emit
,
356 .cs_parse
= &r300_cs_parse
,
357 .copy_blit
= &r100_copy_blit
,
358 .copy_dma
= &r300_copy_dma
,
359 .copy
= &r100_copy_blit
,
360 .set_engine_clock
= &radeon_atom_set_engine_clock
,
361 .set_memory_clock
= &radeon_atom_set_memory_clock
,
362 .set_pcie_lanes
= &rv370_set_pcie_lanes
,
363 .set_clock_gating
= &radeon_atom_set_clock_gating
,
364 .set_surface_reg
= r100_set_surface_reg
,
365 .clear_surface_reg
= r100_clear_surface_reg
,
366 .bandwidth_update
= &rv515_bandwidth_update
,
371 * r520,rv530,rv560,rv570,r580
373 int r520_init(struct radeon_device
*rdev
);
374 int r520_resume(struct radeon_device
*rdev
);
375 static struct radeon_asic r520_asic
= {
378 .suspend
= &rv515_suspend
,
379 .resume
= &r520_resume
,
380 .vga_set_state
= &r100_vga_set_state
,
381 .gpu_reset
= &rv515_gpu_reset
,
382 .gart_tlb_flush
= &rv370_pcie_gart_tlb_flush
,
383 .gart_set_page
= &rv370_pcie_gart_set_page
,
384 .cp_commit
= &r100_cp_commit
,
385 .ring_start
= &rv515_ring_start
,
386 .ring_test
= &r100_ring_test
,
387 .ring_ib_execute
= &r100_ring_ib_execute
,
388 .irq_set
= &rs600_irq_set
,
389 .irq_process
= &rs600_irq_process
,
390 .get_vblank_counter
= &rs600_get_vblank_counter
,
391 .fence_ring_emit
= &r300_fence_ring_emit
,
392 .cs_parse
= &r300_cs_parse
,
393 .copy_blit
= &r100_copy_blit
,
394 .copy_dma
= &r300_copy_dma
,
395 .copy
= &r100_copy_blit
,
396 .set_engine_clock
= &radeon_atom_set_engine_clock
,
397 .set_memory_clock
= &radeon_atom_set_memory_clock
,
398 .set_pcie_lanes
= &rv370_set_pcie_lanes
,
399 .set_clock_gating
= &radeon_atom_set_clock_gating
,
400 .set_surface_reg
= r100_set_surface_reg
,
401 .clear_surface_reg
= r100_clear_surface_reg
,
402 .bandwidth_update
= &rv515_bandwidth_update
,
406 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
408 int r600_init(struct radeon_device
*rdev
);
409 void r600_fini(struct radeon_device
*rdev
);
410 int r600_suspend(struct radeon_device
*rdev
);
411 int r600_resume(struct radeon_device
*rdev
);
412 void r600_vga_set_state(struct radeon_device
*rdev
, bool state
);
413 int r600_wb_init(struct radeon_device
*rdev
);
414 void r600_wb_fini(struct radeon_device
*rdev
);
415 void r600_cp_commit(struct radeon_device
*rdev
);
416 void r600_pcie_gart_tlb_flush(struct radeon_device
*rdev
);
417 uint32_t r600_pciep_rreg(struct radeon_device
*rdev
, uint32_t reg
);
418 void r600_pciep_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
419 int r600_cs_parse(struct radeon_cs_parser
*p
);
420 void r600_fence_ring_emit(struct radeon_device
*rdev
,
421 struct radeon_fence
*fence
);
422 int r600_copy_dma(struct radeon_device
*rdev
,
426 struct radeon_fence
*fence
);
427 int r600_irq_process(struct radeon_device
*rdev
);
428 int r600_irq_set(struct radeon_device
*rdev
);
429 int r600_gpu_reset(struct radeon_device
*rdev
);
430 int r600_set_surface_reg(struct radeon_device
*rdev
, int reg
,
431 uint32_t tiling_flags
, uint32_t pitch
,
432 uint32_t offset
, uint32_t obj_size
);
433 int r600_clear_surface_reg(struct radeon_device
*rdev
, int reg
);
434 void r600_ring_ib_execute(struct radeon_device
*rdev
, struct radeon_ib
*ib
);
435 int r600_ring_test(struct radeon_device
*rdev
);
436 int r600_copy_blit(struct radeon_device
*rdev
,
437 uint64_t src_offset
, uint64_t dst_offset
,
438 unsigned num_pages
, struct radeon_fence
*fence
);
440 static struct radeon_asic r600_asic
= {
443 .suspend
= &r600_suspend
,
444 .resume
= &r600_resume
,
445 .cp_commit
= &r600_cp_commit
,
446 .vga_set_state
= &r600_vga_set_state
,
447 .gpu_reset
= &r600_gpu_reset
,
448 .gart_tlb_flush
= &r600_pcie_gart_tlb_flush
,
449 .gart_set_page
= &rs600_gart_set_page
,
450 .ring_test
= &r600_ring_test
,
451 .ring_ib_execute
= &r600_ring_ib_execute
,
452 .irq_set
= &r600_irq_set
,
453 .irq_process
= &r600_irq_process
,
454 .fence_ring_emit
= &r600_fence_ring_emit
,
455 .cs_parse
= &r600_cs_parse
,
456 .copy_blit
= &r600_copy_blit
,
457 .copy_dma
= &r600_copy_blit
,
458 .copy
= &r600_copy_blit
,
459 .set_engine_clock
= &radeon_atom_set_engine_clock
,
460 .set_memory_clock
= &radeon_atom_set_memory_clock
,
461 .set_pcie_lanes
= NULL
,
462 .set_clock_gating
= &radeon_atom_set_clock_gating
,
463 .set_surface_reg
= r600_set_surface_reg
,
464 .clear_surface_reg
= r600_clear_surface_reg
,
465 .bandwidth_update
= &rv515_bandwidth_update
,
469 * rv770,rv730,rv710,rv740
471 int rv770_init(struct radeon_device
*rdev
);
472 void rv770_fini(struct radeon_device
*rdev
);
473 int rv770_suspend(struct radeon_device
*rdev
);
474 int rv770_resume(struct radeon_device
*rdev
);
475 int rv770_gpu_reset(struct radeon_device
*rdev
);
477 static struct radeon_asic rv770_asic
= {
480 .suspend
= &rv770_suspend
,
481 .resume
= &rv770_resume
,
482 .cp_commit
= &r600_cp_commit
,
483 .gpu_reset
= &rv770_gpu_reset
,
484 .vga_set_state
= &r600_vga_set_state
,
485 .gart_tlb_flush
= &r600_pcie_gart_tlb_flush
,
486 .gart_set_page
= &rs600_gart_set_page
,
487 .ring_test
= &r600_ring_test
,
488 .ring_ib_execute
= &r600_ring_ib_execute
,
489 .irq_set
= &r600_irq_set
,
490 .irq_process
= &r600_irq_process
,
491 .fence_ring_emit
= &r600_fence_ring_emit
,
492 .cs_parse
= &r600_cs_parse
,
493 .copy_blit
= &r600_copy_blit
,
494 .copy_dma
= &r600_copy_blit
,
495 .copy
= &r600_copy_blit
,
496 .set_engine_clock
= &radeon_atom_set_engine_clock
,
497 .set_memory_clock
= &radeon_atom_set_memory_clock
,
498 .set_pcie_lanes
= NULL
,
499 .set_clock_gating
= &radeon_atom_set_clock_gating
,
500 .set_surface_reg
= r600_set_surface_reg
,
501 .clear_surface_reg
= r600_clear_surface_reg
,
502 .bandwidth_update
= &rv515_bandwidth_update
,