2 * Copyright 2007-8 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
23 * Authors: Dave Airlie
27 #include "radeon_drm.h"
31 #include <asm/div64.h>
33 #include "drm_crtc_helper.h"
36 static int radeon_ddc_dump(struct drm_connector
*connector
);
38 static void avivo_crtc_load_lut(struct drm_crtc
*crtc
)
40 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
41 struct drm_device
*dev
= crtc
->dev
;
42 struct radeon_device
*rdev
= dev
->dev_private
;
45 DRM_DEBUG_KMS("%d\n", radeon_crtc
->crtc_id
);
46 WREG32(AVIVO_DC_LUTA_CONTROL
+ radeon_crtc
->crtc_offset
, 0);
48 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0);
49 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0);
50 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0);
52 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0xffff);
53 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0xffff);
54 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0xffff);
56 WREG32(AVIVO_DC_LUT_RW_SELECT
, radeon_crtc
->crtc_id
);
57 WREG32(AVIVO_DC_LUT_RW_MODE
, 0);
58 WREG32(AVIVO_DC_LUT_WRITE_EN_MASK
, 0x0000003f);
60 WREG8(AVIVO_DC_LUT_RW_INDEX
, 0);
61 for (i
= 0; i
< 256; i
++) {
62 WREG32(AVIVO_DC_LUT_30_COLOR
,
63 (radeon_crtc
->lut_r
[i
] << 20) |
64 (radeon_crtc
->lut_g
[i
] << 10) |
65 (radeon_crtc
->lut_b
[i
] << 0));
68 WREG32(AVIVO_D1GRPH_LUT_SEL
+ radeon_crtc
->crtc_offset
, radeon_crtc
->crtc_id
);
71 static void dce4_crtc_load_lut(struct drm_crtc
*crtc
)
73 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
74 struct drm_device
*dev
= crtc
->dev
;
75 struct radeon_device
*rdev
= dev
->dev_private
;
78 DRM_DEBUG_KMS("%d\n", radeon_crtc
->crtc_id
);
79 WREG32(EVERGREEN_DC_LUT_CONTROL
+ radeon_crtc
->crtc_offset
, 0);
81 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0);
82 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0);
83 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0);
85 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0xffff);
86 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0xffff);
87 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0xffff);
89 WREG32(EVERGREEN_DC_LUT_RW_MODE
+ radeon_crtc
->crtc_offset
, 0);
90 WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK
+ radeon_crtc
->crtc_offset
, 0x00000007);
92 WREG32(EVERGREEN_DC_LUT_RW_INDEX
+ radeon_crtc
->crtc_offset
, 0);
93 for (i
= 0; i
< 256; i
++) {
94 WREG32(EVERGREEN_DC_LUT_30_COLOR
+ radeon_crtc
->crtc_offset
,
95 (radeon_crtc
->lut_r
[i
] << 20) |
96 (radeon_crtc
->lut_g
[i
] << 10) |
97 (radeon_crtc
->lut_b
[i
] << 0));
101 static void dce5_crtc_load_lut(struct drm_crtc
*crtc
)
103 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
104 struct drm_device
*dev
= crtc
->dev
;
105 struct radeon_device
*rdev
= dev
->dev_private
;
108 DRM_DEBUG_KMS("%d\n", radeon_crtc
->crtc_id
);
110 WREG32(NI_INPUT_CSC_CONTROL
+ radeon_crtc
->crtc_offset
,
111 (NI_INPUT_CSC_GRPH_MODE(NI_INPUT_CSC_BYPASS
) |
112 NI_INPUT_CSC_OVL_MODE(NI_INPUT_CSC_BYPASS
)));
113 WREG32(NI_PRESCALE_GRPH_CONTROL
+ radeon_crtc
->crtc_offset
,
114 NI_GRPH_PRESCALE_BYPASS
);
115 WREG32(NI_PRESCALE_OVL_CONTROL
+ radeon_crtc
->crtc_offset
,
116 NI_OVL_PRESCALE_BYPASS
);
117 WREG32(NI_INPUT_GAMMA_CONTROL
+ radeon_crtc
->crtc_offset
,
118 (NI_GRPH_INPUT_GAMMA_MODE(NI_INPUT_GAMMA_USE_LUT
) |
119 NI_OVL_INPUT_GAMMA_MODE(NI_INPUT_GAMMA_USE_LUT
)));
121 WREG32(EVERGREEN_DC_LUT_CONTROL
+ radeon_crtc
->crtc_offset
, 0);
123 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0);
124 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0);
125 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0);
127 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0xffff);
128 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0xffff);
129 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0xffff);
131 WREG32(EVERGREEN_DC_LUT_RW_MODE
+ radeon_crtc
->crtc_offset
, 0);
132 WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK
+ radeon_crtc
->crtc_offset
, 0x00000007);
134 WREG32(EVERGREEN_DC_LUT_RW_INDEX
+ radeon_crtc
->crtc_offset
, 0);
135 for (i
= 0; i
< 256; i
++) {
136 WREG32(EVERGREEN_DC_LUT_30_COLOR
+ radeon_crtc
->crtc_offset
,
137 (radeon_crtc
->lut_r
[i
] << 20) |
138 (radeon_crtc
->lut_g
[i
] << 10) |
139 (radeon_crtc
->lut_b
[i
] << 0));
142 WREG32(NI_DEGAMMA_CONTROL
+ radeon_crtc
->crtc_offset
,
143 (NI_GRPH_DEGAMMA_MODE(NI_DEGAMMA_BYPASS
) |
144 NI_OVL_DEGAMMA_MODE(NI_DEGAMMA_BYPASS
) |
145 NI_ICON_DEGAMMA_MODE(NI_DEGAMMA_BYPASS
) |
146 NI_CURSOR_DEGAMMA_MODE(NI_DEGAMMA_BYPASS
)));
147 WREG32(NI_GAMUT_REMAP_CONTROL
+ radeon_crtc
->crtc_offset
,
148 (NI_GRPH_GAMUT_REMAP_MODE(NI_GAMUT_REMAP_BYPASS
) |
149 NI_OVL_GAMUT_REMAP_MODE(NI_GAMUT_REMAP_BYPASS
)));
150 WREG32(NI_REGAMMA_CONTROL
+ radeon_crtc
->crtc_offset
,
151 (NI_GRPH_REGAMMA_MODE(NI_REGAMMA_BYPASS
) |
152 NI_OVL_REGAMMA_MODE(NI_REGAMMA_BYPASS
)));
153 WREG32(NI_OUTPUT_CSC_CONTROL
+ radeon_crtc
->crtc_offset
,
154 (NI_OUTPUT_CSC_GRPH_MODE(NI_OUTPUT_CSC_BYPASS
) |
155 NI_OUTPUT_CSC_OVL_MODE(NI_OUTPUT_CSC_BYPASS
)));
156 /* XXX match this to the depth of the crtc fmt block, move to modeset? */
157 WREG32(0x6940 + radeon_crtc
->crtc_offset
, 0);
161 static void legacy_crtc_load_lut(struct drm_crtc
*crtc
)
163 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
164 struct drm_device
*dev
= crtc
->dev
;
165 struct radeon_device
*rdev
= dev
->dev_private
;
169 dac2_cntl
= RREG32(RADEON_DAC_CNTL2
);
170 if (radeon_crtc
->crtc_id
== 0)
171 dac2_cntl
&= (uint32_t)~RADEON_DAC2_PALETTE_ACC_CTL
;
173 dac2_cntl
|= RADEON_DAC2_PALETTE_ACC_CTL
;
174 WREG32(RADEON_DAC_CNTL2
, dac2_cntl
);
176 WREG8(RADEON_PALETTE_INDEX
, 0);
177 for (i
= 0; i
< 256; i
++) {
178 WREG32(RADEON_PALETTE_30_DATA
,
179 (radeon_crtc
->lut_r
[i
] << 20) |
180 (radeon_crtc
->lut_g
[i
] << 10) |
181 (radeon_crtc
->lut_b
[i
] << 0));
185 void radeon_crtc_load_lut(struct drm_crtc
*crtc
)
187 struct drm_device
*dev
= crtc
->dev
;
188 struct radeon_device
*rdev
= dev
->dev_private
;
193 if (ASIC_IS_DCE5(rdev
))
194 dce5_crtc_load_lut(crtc
);
195 else if (ASIC_IS_DCE4(rdev
))
196 dce4_crtc_load_lut(crtc
);
197 else if (ASIC_IS_AVIVO(rdev
))
198 avivo_crtc_load_lut(crtc
);
200 legacy_crtc_load_lut(crtc
);
203 /** Sets the color ramps on behalf of fbcon */
204 void radeon_crtc_fb_gamma_set(struct drm_crtc
*crtc
, u16 red
, u16 green
,
207 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
209 radeon_crtc
->lut_r
[regno
] = red
>> 6;
210 radeon_crtc
->lut_g
[regno
] = green
>> 6;
211 radeon_crtc
->lut_b
[regno
] = blue
>> 6;
214 /** Gets the color ramps on behalf of fbcon */
215 void radeon_crtc_fb_gamma_get(struct drm_crtc
*crtc
, u16
*red
, u16
*green
,
216 u16
*blue
, int regno
)
218 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
220 *red
= radeon_crtc
->lut_r
[regno
] << 6;
221 *green
= radeon_crtc
->lut_g
[regno
] << 6;
222 *blue
= radeon_crtc
->lut_b
[regno
] << 6;
225 static void radeon_crtc_gamma_set(struct drm_crtc
*crtc
, u16
*red
, u16
*green
,
226 u16
*blue
, uint32_t start
, uint32_t size
)
228 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
229 int end
= (start
+ size
> 256) ? 256 : start
+ size
, i
;
231 /* userspace palettes are always correct as is */
232 for (i
= start
; i
< end
; i
++) {
233 radeon_crtc
->lut_r
[i
] = red
[i
] >> 6;
234 radeon_crtc
->lut_g
[i
] = green
[i
] >> 6;
235 radeon_crtc
->lut_b
[i
] = blue
[i
] >> 6;
237 radeon_crtc_load_lut(crtc
);
240 static void radeon_crtc_destroy(struct drm_crtc
*crtc
)
242 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
244 drm_crtc_cleanup(crtc
);
249 * Handle unpin events outside the interrupt handler proper.
251 static void radeon_unpin_work_func(struct work_struct
*__work
)
253 struct radeon_unpin_work
*work
=
254 container_of(__work
, struct radeon_unpin_work
, work
);
257 /* unpin of the old buffer */
258 r
= radeon_bo_reserve(work
->old_rbo
, false);
259 if (likely(r
== 0)) {
260 r
= radeon_bo_unpin(work
->old_rbo
);
261 if (unlikely(r
!= 0)) {
262 DRM_ERROR("failed to unpin buffer after flip\n");
264 radeon_bo_unreserve(work
->old_rbo
);
266 DRM_ERROR("failed to reserve buffer after flip\n");
268 drm_gem_object_unreference_unlocked(&work
->old_rbo
->gem_base
);
272 void radeon_crtc_handle_flip(struct radeon_device
*rdev
, int crtc_id
)
274 struct radeon_crtc
*radeon_crtc
= rdev
->mode_info
.crtcs
[crtc_id
];
275 struct radeon_unpin_work
*work
;
276 struct drm_pending_vblank_event
*e
;
282 spin_lock_irqsave(&rdev
->ddev
->event_lock
, flags
);
283 work
= radeon_crtc
->unpin_work
;
285 (work
->fence
&& !radeon_fence_signaled(work
->fence
))) {
286 spin_unlock_irqrestore(&rdev
->ddev
->event_lock
, flags
);
289 /* New pageflip, or just completion of a previous one? */
290 if (!radeon_crtc
->deferred_flip_completion
) {
291 /* do the flip (mmio) */
292 update_pending
= radeon_page_flip(rdev
, crtc_id
, work
->new_crtc_base
);
294 /* This is just a completion of a flip queued in crtc
295 * at last invocation. Make sure we go directly to
296 * completion routine.
299 radeon_crtc
->deferred_flip_completion
= 0;
302 /* Has the pageflip already completed in crtc, or is it certain
303 * to complete in this vblank?
305 if (update_pending
&&
306 (DRM_SCANOUTPOS_VALID
& radeon_get_crtc_scanoutpos(rdev
->ddev
, crtc_id
,
309 (vpos
< (99 * rdev
->mode_info
.crtcs
[crtc_id
]->base
.hwmode
.crtc_vdisplay
)/100)) {
310 /* crtc didn't flip in this target vblank interval,
311 * but flip is pending in crtc. It will complete it
312 * in next vblank interval, so complete the flip at
315 radeon_crtc
->deferred_flip_completion
= 1;
316 spin_unlock_irqrestore(&rdev
->ddev
->event_lock
, flags
);
320 /* Pageflip (will be) certainly completed in this vblank. Clean up. */
321 radeon_crtc
->unpin_work
= NULL
;
323 /* wakeup userspace */
326 e
->event
.sequence
= drm_vblank_count_and_time(rdev
->ddev
, crtc_id
, &now
);
327 e
->event
.tv_sec
= now
.tv_sec
;
328 e
->event
.tv_usec
= now
.tv_usec
;
329 list_add_tail(&e
->base
.link
, &e
->base
.file_priv
->event_list
);
330 wake_up_interruptible(&e
->base
.file_priv
->event_wait
);
332 spin_unlock_irqrestore(&rdev
->ddev
->event_lock
, flags
);
334 drm_vblank_put(rdev
->ddev
, radeon_crtc
->crtc_id
);
335 radeon_fence_unref(&work
->fence
);
336 radeon_post_page_flip(work
->rdev
, work
->crtc_id
);
337 schedule_work(&work
->work
);
340 static int radeon_crtc_page_flip(struct drm_crtc
*crtc
,
341 struct drm_framebuffer
*fb
,
342 struct drm_pending_vblank_event
*event
)
344 struct drm_device
*dev
= crtc
->dev
;
345 struct radeon_device
*rdev
= dev
->dev_private
;
346 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
347 struct radeon_framebuffer
*old_radeon_fb
;
348 struct radeon_framebuffer
*new_radeon_fb
;
349 struct drm_gem_object
*obj
;
350 struct radeon_bo
*rbo
;
351 struct radeon_unpin_work
*work
;
353 u32 tiling_flags
, pitch_pixels
;
357 work
= kzalloc(sizeof *work
, GFP_KERNEL
);
363 work
->crtc_id
= radeon_crtc
->crtc_id
;
364 old_radeon_fb
= to_radeon_framebuffer(crtc
->fb
);
365 new_radeon_fb
= to_radeon_framebuffer(fb
);
366 /* schedule unpin of the old buffer */
367 obj
= old_radeon_fb
->obj
;
368 /* take a reference to the old object */
369 drm_gem_object_reference(obj
);
370 rbo
= gem_to_radeon_bo(obj
);
372 obj
= new_radeon_fb
->obj
;
373 rbo
= gem_to_radeon_bo(obj
);
374 if (rbo
->tbo
.sync_obj
)
375 work
->fence
= radeon_fence_ref(rbo
->tbo
.sync_obj
);
376 INIT_WORK(&work
->work
, radeon_unpin_work_func
);
378 /* We borrow the event spin lock for protecting unpin_work */
379 spin_lock_irqsave(&dev
->event_lock
, flags
);
380 if (radeon_crtc
->unpin_work
) {
381 DRM_DEBUG_DRIVER("flip queue: crtc already busy\n");
385 radeon_crtc
->unpin_work
= work
;
386 radeon_crtc
->deferred_flip_completion
= 0;
387 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
389 /* pin the new buffer */
390 DRM_DEBUG_DRIVER("flip-ioctl() cur_fbo = %p, cur_bbo = %p\n",
393 r
= radeon_bo_reserve(rbo
, false);
394 if (unlikely(r
!= 0)) {
395 DRM_ERROR("failed to reserve new rbo buffer before flip\n");
398 r
= radeon_bo_pin(rbo
, RADEON_GEM_DOMAIN_VRAM
, &base
);
399 if (unlikely(r
!= 0)) {
400 radeon_bo_unreserve(rbo
);
402 DRM_ERROR("failed to pin new rbo buffer before flip\n");
405 radeon_bo_get_tiling_flags(rbo
, &tiling_flags
, NULL
);
406 radeon_bo_unreserve(rbo
);
408 if (!ASIC_IS_AVIVO(rdev
)) {
409 /* crtc offset is from display base addr not FB location */
410 base
-= radeon_crtc
->legacy_display_base_addr
;
411 pitch_pixels
= fb
->pitch
/ (fb
->bits_per_pixel
/ 8);
413 if (tiling_flags
& RADEON_TILING_MACRO
) {
414 if (ASIC_IS_R300(rdev
)) {
417 int byteshift
= fb
->bits_per_pixel
>> 4;
418 int tile_addr
= (((crtc
->y
>> 3) * pitch_pixels
+ crtc
->x
) >> (8 - byteshift
)) << 11;
419 base
+= tile_addr
+ ((crtc
->x
<< byteshift
) % 256) + ((crtc
->y
% 8) << 8);
422 int offset
= crtc
->y
* pitch_pixels
+ crtc
->x
;
423 switch (fb
->bits_per_pixel
) {
444 spin_lock_irqsave(&dev
->event_lock
, flags
);
445 work
->new_crtc_base
= base
;
446 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
451 r
= drm_vblank_get(dev
, radeon_crtc
->crtc_id
);
453 DRM_ERROR("failed to get vblank before flip\n");
457 /* set the proper interrupt */
458 radeon_pre_page_flip(rdev
, radeon_crtc
->crtc_id
);
463 if (unlikely(radeon_bo_reserve(rbo
, false) != 0)) {
464 DRM_ERROR("failed to reserve new rbo in error path\n");
467 if (unlikely(radeon_bo_unpin(rbo
) != 0)) {
468 DRM_ERROR("failed to unpin new rbo in error path\n");
470 radeon_bo_unreserve(rbo
);
473 spin_lock_irqsave(&dev
->event_lock
, flags
);
474 radeon_crtc
->unpin_work
= NULL
;
476 drm_gem_object_unreference_unlocked(old_radeon_fb
->obj
);
477 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
478 radeon_fence_unref(&work
->fence
);
484 static const struct drm_crtc_funcs radeon_crtc_funcs
= {
485 .cursor_set
= radeon_crtc_cursor_set
,
486 .cursor_move
= radeon_crtc_cursor_move
,
487 .gamma_set
= radeon_crtc_gamma_set
,
488 .set_config
= drm_crtc_helper_set_config
,
489 .destroy
= radeon_crtc_destroy
,
490 .page_flip
= radeon_crtc_page_flip
,
493 static void radeon_crtc_init(struct drm_device
*dev
, int index
)
495 struct radeon_device
*rdev
= dev
->dev_private
;
496 struct radeon_crtc
*radeon_crtc
;
499 radeon_crtc
= kzalloc(sizeof(struct radeon_crtc
) + (RADEONFB_CONN_LIMIT
* sizeof(struct drm_connector
*)), GFP_KERNEL
);
500 if (radeon_crtc
== NULL
)
503 drm_crtc_init(dev
, &radeon_crtc
->base
, &radeon_crtc_funcs
);
505 drm_mode_crtc_set_gamma_size(&radeon_crtc
->base
, 256);
506 radeon_crtc
->crtc_id
= index
;
507 rdev
->mode_info
.crtcs
[index
] = radeon_crtc
;
510 radeon_crtc
->mode_set
.crtc
= &radeon_crtc
->base
;
511 radeon_crtc
->mode_set
.connectors
= (struct drm_connector
**)(radeon_crtc
+ 1);
512 radeon_crtc
->mode_set
.num_connectors
= 0;
515 for (i
= 0; i
< 256; i
++) {
516 radeon_crtc
->lut_r
[i
] = i
<< 2;
517 radeon_crtc
->lut_g
[i
] = i
<< 2;
518 radeon_crtc
->lut_b
[i
] = i
<< 2;
521 if (rdev
->is_atom_bios
&& (ASIC_IS_AVIVO(rdev
) || radeon_r4xx_atom
))
522 radeon_atombios_init_crtc(dev
, radeon_crtc
);
524 radeon_legacy_init_crtc(dev
, radeon_crtc
);
527 static const char *encoder_names
[36] = {
547 "INTERNAL_KLDSCP_TMDS1",
548 "INTERNAL_KLDSCP_DVO1",
549 "INTERNAL_KLDSCP_DAC1",
550 "INTERNAL_KLDSCP_DAC2",
559 "INTERNAL_KLDSCP_LVTMA",
566 static const char *connector_names
[15] = {
584 static const char *hpd_names
[6] = {
593 static void radeon_print_display_setup(struct drm_device
*dev
)
595 struct drm_connector
*connector
;
596 struct radeon_connector
*radeon_connector
;
597 struct drm_encoder
*encoder
;
598 struct radeon_encoder
*radeon_encoder
;
602 DRM_INFO("Radeon Display Connectors\n");
603 list_for_each_entry(connector
, &dev
->mode_config
.connector_list
, head
) {
604 radeon_connector
= to_radeon_connector(connector
);
605 DRM_INFO("Connector %d:\n", i
);
606 DRM_INFO(" %s\n", connector_names
[connector
->connector_type
]);
607 if (radeon_connector
->hpd
.hpd
!= RADEON_HPD_NONE
)
608 DRM_INFO(" %s\n", hpd_names
[radeon_connector
->hpd
.hpd
]);
609 if (radeon_connector
->ddc_bus
) {
610 DRM_INFO(" DDC: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
611 radeon_connector
->ddc_bus
->rec
.mask_clk_reg
,
612 radeon_connector
->ddc_bus
->rec
.mask_data_reg
,
613 radeon_connector
->ddc_bus
->rec
.a_clk_reg
,
614 radeon_connector
->ddc_bus
->rec
.a_data_reg
,
615 radeon_connector
->ddc_bus
->rec
.en_clk_reg
,
616 radeon_connector
->ddc_bus
->rec
.en_data_reg
,
617 radeon_connector
->ddc_bus
->rec
.y_clk_reg
,
618 radeon_connector
->ddc_bus
->rec
.y_data_reg
);
619 if (radeon_connector
->router
.ddc_valid
)
620 DRM_INFO(" DDC Router 0x%x/0x%x\n",
621 radeon_connector
->router
.ddc_mux_control_pin
,
622 radeon_connector
->router
.ddc_mux_state
);
623 if (radeon_connector
->router
.cd_valid
)
624 DRM_INFO(" Clock/Data Router 0x%x/0x%x\n",
625 radeon_connector
->router
.cd_mux_control_pin
,
626 radeon_connector
->router
.cd_mux_state
);
628 if (connector
->connector_type
== DRM_MODE_CONNECTOR_VGA
||
629 connector
->connector_type
== DRM_MODE_CONNECTOR_DVII
||
630 connector
->connector_type
== DRM_MODE_CONNECTOR_DVID
||
631 connector
->connector_type
== DRM_MODE_CONNECTOR_DVIA
||
632 connector
->connector_type
== DRM_MODE_CONNECTOR_HDMIA
||
633 connector
->connector_type
== DRM_MODE_CONNECTOR_HDMIB
)
634 DRM_INFO(" DDC: no ddc bus - possible BIOS bug - please report to xorg-driver-ati@lists.x.org\n");
636 DRM_INFO(" Encoders:\n");
637 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
638 radeon_encoder
= to_radeon_encoder(encoder
);
639 devices
= radeon_encoder
->devices
& radeon_connector
->devices
;
641 if (devices
& ATOM_DEVICE_CRT1_SUPPORT
)
642 DRM_INFO(" CRT1: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
643 if (devices
& ATOM_DEVICE_CRT2_SUPPORT
)
644 DRM_INFO(" CRT2: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
645 if (devices
& ATOM_DEVICE_LCD1_SUPPORT
)
646 DRM_INFO(" LCD1: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
647 if (devices
& ATOM_DEVICE_DFP1_SUPPORT
)
648 DRM_INFO(" DFP1: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
649 if (devices
& ATOM_DEVICE_DFP2_SUPPORT
)
650 DRM_INFO(" DFP2: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
651 if (devices
& ATOM_DEVICE_DFP3_SUPPORT
)
652 DRM_INFO(" DFP3: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
653 if (devices
& ATOM_DEVICE_DFP4_SUPPORT
)
654 DRM_INFO(" DFP4: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
655 if (devices
& ATOM_DEVICE_DFP5_SUPPORT
)
656 DRM_INFO(" DFP5: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
657 if (devices
& ATOM_DEVICE_DFP6_SUPPORT
)
658 DRM_INFO(" DFP6: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
659 if (devices
& ATOM_DEVICE_TV1_SUPPORT
)
660 DRM_INFO(" TV1: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
661 if (devices
& ATOM_DEVICE_CV_SUPPORT
)
662 DRM_INFO(" CV: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
669 static bool radeon_setup_enc_conn(struct drm_device
*dev
)
671 struct radeon_device
*rdev
= dev
->dev_private
;
672 struct drm_connector
*drm_connector
;
676 if (rdev
->is_atom_bios
) {
677 ret
= radeon_get_atom_connector_info_from_supported_devices_table(dev
);
679 ret
= radeon_get_atom_connector_info_from_object_table(dev
);
681 ret
= radeon_get_legacy_connector_info_from_bios(dev
);
683 ret
= radeon_get_legacy_connector_info_from_table(dev
);
686 if (!ASIC_IS_AVIVO(rdev
))
687 ret
= radeon_get_legacy_connector_info_from_table(dev
);
690 radeon_setup_encoder_clones(dev
);
691 radeon_print_display_setup(dev
);
692 list_for_each_entry(drm_connector
, &dev
->mode_config
.connector_list
, head
)
693 radeon_ddc_dump(drm_connector
);
699 int radeon_ddc_get_modes(struct radeon_connector
*radeon_connector
)
701 struct drm_device
*dev
= radeon_connector
->base
.dev
;
702 struct radeon_device
*rdev
= dev
->dev_private
;
705 /* on hw with routers, select right port */
706 if (radeon_connector
->router
.ddc_valid
)
707 radeon_router_select_ddc_port(radeon_connector
);
709 if ((radeon_connector
->base
.connector_type
== DRM_MODE_CONNECTOR_DisplayPort
) ||
710 (radeon_connector
->base
.connector_type
== DRM_MODE_CONNECTOR_eDP
)) {
711 struct radeon_connector_atom_dig
*dig
= radeon_connector
->con_priv
;
712 if ((dig
->dp_sink_type
== CONNECTOR_OBJECT_ID_DISPLAYPORT
||
713 dig
->dp_sink_type
== CONNECTOR_OBJECT_ID_eDP
) && dig
->dp_i2c_bus
)
714 radeon_connector
->edid
= drm_get_edid(&radeon_connector
->base
, &dig
->dp_i2c_bus
->adapter
);
716 if (!radeon_connector
->ddc_bus
)
718 if (!radeon_connector
->edid
) {
719 radeon_connector
->edid
= drm_get_edid(&radeon_connector
->base
, &radeon_connector
->ddc_bus
->adapter
);
722 if (!radeon_connector
->edid
) {
723 if (rdev
->is_atom_bios
) {
724 /* some laptops provide a hardcoded edid in rom for LCDs */
725 if (((radeon_connector
->base
.connector_type
== DRM_MODE_CONNECTOR_LVDS
) ||
726 (radeon_connector
->base
.connector_type
== DRM_MODE_CONNECTOR_eDP
)))
727 radeon_connector
->edid
= radeon_bios_get_hardcoded_edid(rdev
);
729 /* some servers provide a hardcoded edid in rom for KVMs */
730 radeon_connector
->edid
= radeon_bios_get_hardcoded_edid(rdev
);
732 if (radeon_connector
->edid
) {
733 drm_mode_connector_update_edid_property(&radeon_connector
->base
, radeon_connector
->edid
);
734 ret
= drm_add_edid_modes(&radeon_connector
->base
, radeon_connector
->edid
);
737 drm_mode_connector_update_edid_property(&radeon_connector
->base
, NULL
);
741 static int radeon_ddc_dump(struct drm_connector
*connector
)
744 struct radeon_connector
*radeon_connector
= to_radeon_connector(connector
);
747 /* on hw with routers, select right port */
748 if (radeon_connector
->router
.ddc_valid
)
749 radeon_router_select_ddc_port(radeon_connector
);
751 if (!radeon_connector
->ddc_bus
)
753 edid
= drm_get_edid(connector
, &radeon_connector
->ddc_bus
->adapter
);
754 /* Log EDID retrieval status here. In particular with regard to
755 * connectors with requires_extended_probe flag set, that will prevent
756 * function radeon_dvi_detect() to fetch EDID on this connector,
757 * as long as there is no valid EDID header found */
759 DRM_INFO("Radeon display connector %s: Found valid EDID",
760 drm_get_connector_name(connector
));
763 DRM_INFO("Radeon display connector %s: No monitor connected or invalid EDID",
764 drm_get_connector_name(connector
));
770 static void avivo_get_fb_div(struct radeon_pll
*pll
,
777 u32 tmp
= post_div
* ref_div
;
780 *fb_div
= tmp
/ pll
->reference_freq
;
781 *frac_fb_div
= tmp
% pll
->reference_freq
;
783 if (*fb_div
> pll
->max_feedback_div
)
784 *fb_div
= pll
->max_feedback_div
;
785 else if (*fb_div
< pll
->min_feedback_div
)
786 *fb_div
= pll
->min_feedback_div
;
789 static u32
avivo_get_post_div(struct radeon_pll
*pll
,
792 u32 vco
, post_div
, tmp
;
794 if (pll
->flags
& RADEON_PLL_USE_POST_DIV
)
795 return pll
->post_div
;
797 if (pll
->flags
& RADEON_PLL_PREFER_MINM_OVER_MAXP
) {
798 if (pll
->flags
& RADEON_PLL_IS_LCD
)
799 vco
= pll
->lcd_pll_out_min
;
801 vco
= pll
->pll_out_min
;
803 if (pll
->flags
& RADEON_PLL_IS_LCD
)
804 vco
= pll
->lcd_pll_out_max
;
806 vco
= pll
->pll_out_max
;
809 post_div
= vco
/ target_clock
;
810 tmp
= vco
% target_clock
;
812 if (pll
->flags
& RADEON_PLL_PREFER_MINM_OVER_MAXP
) {
820 if (post_div
> pll
->max_post_div
)
821 post_div
= pll
->max_post_div
;
822 else if (post_div
< pll
->min_post_div
)
823 post_div
= pll
->min_post_div
;
828 #define MAX_TOLERANCE 10
830 void radeon_compute_pll_avivo(struct radeon_pll
*pll
,
838 u32 target_clock
= freq
/ 10;
839 u32 post_div
= avivo_get_post_div(pll
, target_clock
);
840 u32 ref_div
= pll
->min_ref_div
;
841 u32 fb_div
= 0, frac_fb_div
= 0, tmp
;
843 if (pll
->flags
& RADEON_PLL_USE_REF_DIV
)
844 ref_div
= pll
->reference_div
;
846 if (pll
->flags
& RADEON_PLL_USE_FRAC_FB_DIV
) {
847 avivo_get_fb_div(pll
, target_clock
, post_div
, ref_div
, &fb_div
, &frac_fb_div
);
848 frac_fb_div
= (100 * frac_fb_div
) / pll
->reference_freq
;
849 if (frac_fb_div
>= 5) {
851 frac_fb_div
= frac_fb_div
/ 10;
854 if (frac_fb_div
>= 10) {
859 while (ref_div
<= pll
->max_ref_div
) {
860 avivo_get_fb_div(pll
, target_clock
, post_div
, ref_div
,
861 &fb_div
, &frac_fb_div
);
862 if (frac_fb_div
>= (pll
->reference_freq
/ 2))
865 tmp
= (pll
->reference_freq
* fb_div
) / (post_div
* ref_div
);
866 tmp
= (tmp
* 10000) / target_clock
;
868 if (tmp
> (10000 + MAX_TOLERANCE
))
870 else if (tmp
>= (10000 - MAX_TOLERANCE
))
877 *dot_clock_p
= ((pll
->reference_freq
* fb_div
* 10) + (pll
->reference_freq
* frac_fb_div
)) /
878 (ref_div
* post_div
* 10);
880 *frac_fb_div_p
= frac_fb_div
;
881 *ref_div_p
= ref_div
;
882 *post_div_p
= post_div
;
883 DRM_DEBUG_KMS("%d, pll dividers - fb: %d.%d ref: %d, post %d\n",
884 *dot_clock_p
, fb_div
, frac_fb_div
, ref_div
, post_div
);
888 static inline uint32_t radeon_div(uint64_t n
, uint32_t d
)
898 void radeon_compute_pll_legacy(struct radeon_pll
*pll
,
900 uint32_t *dot_clock_p
,
902 uint32_t *frac_fb_div_p
,
904 uint32_t *post_div_p
)
906 uint32_t min_ref_div
= pll
->min_ref_div
;
907 uint32_t max_ref_div
= pll
->max_ref_div
;
908 uint32_t min_post_div
= pll
->min_post_div
;
909 uint32_t max_post_div
= pll
->max_post_div
;
910 uint32_t min_fractional_feed_div
= 0;
911 uint32_t max_fractional_feed_div
= 0;
912 uint32_t best_vco
= pll
->best_vco
;
913 uint32_t best_post_div
= 1;
914 uint32_t best_ref_div
= 1;
915 uint32_t best_feedback_div
= 1;
916 uint32_t best_frac_feedback_div
= 0;
917 uint32_t best_freq
= -1;
918 uint32_t best_error
= 0xffffffff;
919 uint32_t best_vco_diff
= 1;
921 u32 pll_out_min
, pll_out_max
;
923 DRM_DEBUG_KMS("PLL freq %llu %u %u\n", freq
, pll
->min_ref_div
, pll
->max_ref_div
);
926 if (pll
->flags
& RADEON_PLL_IS_LCD
) {
927 pll_out_min
= pll
->lcd_pll_out_min
;
928 pll_out_max
= pll
->lcd_pll_out_max
;
930 pll_out_min
= pll
->pll_out_min
;
931 pll_out_max
= pll
->pll_out_max
;
934 if (pll_out_min
> 64800)
937 if (pll
->flags
& RADEON_PLL_USE_REF_DIV
)
938 min_ref_div
= max_ref_div
= pll
->reference_div
;
940 while (min_ref_div
< max_ref_div
-1) {
941 uint32_t mid
= (min_ref_div
+ max_ref_div
) / 2;
942 uint32_t pll_in
= pll
->reference_freq
/ mid
;
943 if (pll_in
< pll
->pll_in_min
)
945 else if (pll_in
> pll
->pll_in_max
)
952 if (pll
->flags
& RADEON_PLL_USE_POST_DIV
)
953 min_post_div
= max_post_div
= pll
->post_div
;
955 if (pll
->flags
& RADEON_PLL_USE_FRAC_FB_DIV
) {
956 min_fractional_feed_div
= pll
->min_frac_feedback_div
;
957 max_fractional_feed_div
= pll
->max_frac_feedback_div
;
960 for (post_div
= max_post_div
; post_div
>= min_post_div
; --post_div
) {
963 if ((pll
->flags
& RADEON_PLL_NO_ODD_POST_DIV
) && (post_div
& 1))
966 /* legacy radeons only have a few post_divs */
967 if (pll
->flags
& RADEON_PLL_LEGACY
) {
968 if ((post_div
== 5) ||
979 for (ref_div
= min_ref_div
; ref_div
<= max_ref_div
; ++ref_div
) {
980 uint32_t feedback_div
, current_freq
= 0, error
, vco_diff
;
981 uint32_t pll_in
= pll
->reference_freq
/ ref_div
;
982 uint32_t min_feed_div
= pll
->min_feedback_div
;
983 uint32_t max_feed_div
= pll
->max_feedback_div
+ 1;
985 if (pll_in
< pll
->pll_in_min
|| pll_in
> pll
->pll_in_max
)
988 while (min_feed_div
< max_feed_div
) {
990 uint32_t min_frac_feed_div
= min_fractional_feed_div
;
991 uint32_t max_frac_feed_div
= max_fractional_feed_div
+ 1;
992 uint32_t frac_feedback_div
;
995 feedback_div
= (min_feed_div
+ max_feed_div
) / 2;
997 tmp
= (uint64_t)pll
->reference_freq
* feedback_div
;
998 vco
= radeon_div(tmp
, ref_div
);
1000 if (vco
< pll_out_min
) {
1001 min_feed_div
= feedback_div
+ 1;
1003 } else if (vco
> pll_out_max
) {
1004 max_feed_div
= feedback_div
;
1008 while (min_frac_feed_div
< max_frac_feed_div
) {
1009 frac_feedback_div
= (min_frac_feed_div
+ max_frac_feed_div
) / 2;
1010 tmp
= (uint64_t)pll
->reference_freq
* 10000 * feedback_div
;
1011 tmp
+= (uint64_t)pll
->reference_freq
* 1000 * frac_feedback_div
;
1012 current_freq
= radeon_div(tmp
, ref_div
* post_div
);
1014 if (pll
->flags
& RADEON_PLL_PREFER_CLOSEST_LOWER
) {
1015 if (freq
< current_freq
)
1018 error
= freq
- current_freq
;
1020 error
= abs(current_freq
- freq
);
1021 vco_diff
= abs(vco
- best_vco
);
1023 if ((best_vco
== 0 && error
< best_error
) ||
1025 ((best_error
> 100 && error
< best_error
- 100) ||
1026 (abs(error
- best_error
) < 100 && vco_diff
< best_vco_diff
)))) {
1027 best_post_div
= post_div
;
1028 best_ref_div
= ref_div
;
1029 best_feedback_div
= feedback_div
;
1030 best_frac_feedback_div
= frac_feedback_div
;
1031 best_freq
= current_freq
;
1033 best_vco_diff
= vco_diff
;
1034 } else if (current_freq
== freq
) {
1035 if (best_freq
== -1) {
1036 best_post_div
= post_div
;
1037 best_ref_div
= ref_div
;
1038 best_feedback_div
= feedback_div
;
1039 best_frac_feedback_div
= frac_feedback_div
;
1040 best_freq
= current_freq
;
1042 best_vco_diff
= vco_diff
;
1043 } else if (((pll
->flags
& RADEON_PLL_PREFER_LOW_REF_DIV
) && (ref_div
< best_ref_div
)) ||
1044 ((pll
->flags
& RADEON_PLL_PREFER_HIGH_REF_DIV
) && (ref_div
> best_ref_div
)) ||
1045 ((pll
->flags
& RADEON_PLL_PREFER_LOW_FB_DIV
) && (feedback_div
< best_feedback_div
)) ||
1046 ((pll
->flags
& RADEON_PLL_PREFER_HIGH_FB_DIV
) && (feedback_div
> best_feedback_div
)) ||
1047 ((pll
->flags
& RADEON_PLL_PREFER_LOW_POST_DIV
) && (post_div
< best_post_div
)) ||
1048 ((pll
->flags
& RADEON_PLL_PREFER_HIGH_POST_DIV
) && (post_div
> best_post_div
))) {
1049 best_post_div
= post_div
;
1050 best_ref_div
= ref_div
;
1051 best_feedback_div
= feedback_div
;
1052 best_frac_feedback_div
= frac_feedback_div
;
1053 best_freq
= current_freq
;
1055 best_vco_diff
= vco_diff
;
1058 if (current_freq
< freq
)
1059 min_frac_feed_div
= frac_feedback_div
+ 1;
1061 max_frac_feed_div
= frac_feedback_div
;
1063 if (current_freq
< freq
)
1064 min_feed_div
= feedback_div
+ 1;
1066 max_feed_div
= feedback_div
;
1071 *dot_clock_p
= best_freq
/ 10000;
1072 *fb_div_p
= best_feedback_div
;
1073 *frac_fb_div_p
= best_frac_feedback_div
;
1074 *ref_div_p
= best_ref_div
;
1075 *post_div_p
= best_post_div
;
1076 DRM_DEBUG_KMS("%lld %d, pll dividers - fb: %d.%d ref: %d, post %d\n",
1078 best_freq
/ 1000, best_feedback_div
, best_frac_feedback_div
,
1079 best_ref_div
, best_post_div
);
1083 static void radeon_user_framebuffer_destroy(struct drm_framebuffer
*fb
)
1085 struct radeon_framebuffer
*radeon_fb
= to_radeon_framebuffer(fb
);
1087 if (radeon_fb
->obj
) {
1088 drm_gem_object_unreference_unlocked(radeon_fb
->obj
);
1090 drm_framebuffer_cleanup(fb
);
1094 static int radeon_user_framebuffer_create_handle(struct drm_framebuffer
*fb
,
1095 struct drm_file
*file_priv
,
1096 unsigned int *handle
)
1098 struct radeon_framebuffer
*radeon_fb
= to_radeon_framebuffer(fb
);
1100 return drm_gem_handle_create(file_priv
, radeon_fb
->obj
, handle
);
1103 static const struct drm_framebuffer_funcs radeon_fb_funcs
= {
1104 .destroy
= radeon_user_framebuffer_destroy
,
1105 .create_handle
= radeon_user_framebuffer_create_handle
,
1109 radeon_framebuffer_init(struct drm_device
*dev
,
1110 struct radeon_framebuffer
*rfb
,
1111 struct drm_mode_fb_cmd
*mode_cmd
,
1112 struct drm_gem_object
*obj
)
1115 drm_framebuffer_init(dev
, &rfb
->base
, &radeon_fb_funcs
);
1116 drm_helper_mode_fill_fb_struct(&rfb
->base
, mode_cmd
);
1119 static struct drm_framebuffer
*
1120 radeon_user_framebuffer_create(struct drm_device
*dev
,
1121 struct drm_file
*file_priv
,
1122 struct drm_mode_fb_cmd
*mode_cmd
)
1124 struct drm_gem_object
*obj
;
1125 struct radeon_framebuffer
*radeon_fb
;
1127 obj
= drm_gem_object_lookup(dev
, file_priv
, mode_cmd
->handle
);
1129 dev_err(&dev
->pdev
->dev
, "No GEM object associated to handle 0x%08X, "
1130 "can't create framebuffer\n", mode_cmd
->handle
);
1131 return ERR_PTR(-ENOENT
);
1134 radeon_fb
= kzalloc(sizeof(*radeon_fb
), GFP_KERNEL
);
1135 if (radeon_fb
== NULL
)
1136 return ERR_PTR(-ENOMEM
);
1138 radeon_framebuffer_init(dev
, radeon_fb
, mode_cmd
, obj
);
1140 return &radeon_fb
->base
;
1143 static void radeon_output_poll_changed(struct drm_device
*dev
)
1145 struct radeon_device
*rdev
= dev
->dev_private
;
1146 radeon_fb_output_poll_changed(rdev
);
1149 static const struct drm_mode_config_funcs radeon_mode_funcs
= {
1150 .fb_create
= radeon_user_framebuffer_create
,
1151 .output_poll_changed
= radeon_output_poll_changed
1154 struct drm_prop_enum_list
{
1159 static struct drm_prop_enum_list radeon_tmds_pll_enum_list
[] =
1164 static struct drm_prop_enum_list radeon_tv_std_enum_list
[] =
1165 { { TV_STD_NTSC
, "ntsc" },
1166 { TV_STD_PAL
, "pal" },
1167 { TV_STD_PAL_M
, "pal-m" },
1168 { TV_STD_PAL_60
, "pal-60" },
1169 { TV_STD_NTSC_J
, "ntsc-j" },
1170 { TV_STD_SCART_PAL
, "scart-pal" },
1171 { TV_STD_PAL_CN
, "pal-cn" },
1172 { TV_STD_SECAM
, "secam" },
1175 static struct drm_prop_enum_list radeon_underscan_enum_list
[] =
1176 { { UNDERSCAN_OFF
, "off" },
1177 { UNDERSCAN_ON
, "on" },
1178 { UNDERSCAN_AUTO
, "auto" },
1181 static int radeon_modeset_create_props(struct radeon_device
*rdev
)
1185 if (rdev
->is_atom_bios
) {
1186 rdev
->mode_info
.coherent_mode_property
=
1187 drm_property_create(rdev
->ddev
,
1188 DRM_MODE_PROP_RANGE
,
1190 if (!rdev
->mode_info
.coherent_mode_property
)
1193 rdev
->mode_info
.coherent_mode_property
->values
[0] = 0;
1194 rdev
->mode_info
.coherent_mode_property
->values
[1] = 1;
1197 if (!ASIC_IS_AVIVO(rdev
)) {
1198 sz
= ARRAY_SIZE(radeon_tmds_pll_enum_list
);
1199 rdev
->mode_info
.tmds_pll_property
=
1200 drm_property_create(rdev
->ddev
,
1203 for (i
= 0; i
< sz
; i
++) {
1204 drm_property_add_enum(rdev
->mode_info
.tmds_pll_property
,
1206 radeon_tmds_pll_enum_list
[i
].type
,
1207 radeon_tmds_pll_enum_list
[i
].name
);
1211 rdev
->mode_info
.load_detect_property
=
1212 drm_property_create(rdev
->ddev
,
1213 DRM_MODE_PROP_RANGE
,
1214 "load detection", 2);
1215 if (!rdev
->mode_info
.load_detect_property
)
1217 rdev
->mode_info
.load_detect_property
->values
[0] = 0;
1218 rdev
->mode_info
.load_detect_property
->values
[1] = 1;
1220 drm_mode_create_scaling_mode_property(rdev
->ddev
);
1222 sz
= ARRAY_SIZE(radeon_tv_std_enum_list
);
1223 rdev
->mode_info
.tv_std_property
=
1224 drm_property_create(rdev
->ddev
,
1227 for (i
= 0; i
< sz
; i
++) {
1228 drm_property_add_enum(rdev
->mode_info
.tv_std_property
,
1230 radeon_tv_std_enum_list
[i
].type
,
1231 radeon_tv_std_enum_list
[i
].name
);
1234 sz
= ARRAY_SIZE(radeon_underscan_enum_list
);
1235 rdev
->mode_info
.underscan_property
=
1236 drm_property_create(rdev
->ddev
,
1239 for (i
= 0; i
< sz
; i
++) {
1240 drm_property_add_enum(rdev
->mode_info
.underscan_property
,
1242 radeon_underscan_enum_list
[i
].type
,
1243 radeon_underscan_enum_list
[i
].name
);
1246 rdev
->mode_info
.underscan_hborder_property
=
1247 drm_property_create(rdev
->ddev
,
1248 DRM_MODE_PROP_RANGE
,
1249 "underscan hborder", 2);
1250 if (!rdev
->mode_info
.underscan_hborder_property
)
1252 rdev
->mode_info
.underscan_hborder_property
->values
[0] = 0;
1253 rdev
->mode_info
.underscan_hborder_property
->values
[1] = 128;
1255 rdev
->mode_info
.underscan_vborder_property
=
1256 drm_property_create(rdev
->ddev
,
1257 DRM_MODE_PROP_RANGE
,
1258 "underscan vborder", 2);
1259 if (!rdev
->mode_info
.underscan_vborder_property
)
1261 rdev
->mode_info
.underscan_vborder_property
->values
[0] = 0;
1262 rdev
->mode_info
.underscan_vborder_property
->values
[1] = 128;
1267 void radeon_update_display_priority(struct radeon_device
*rdev
)
1269 /* adjustment options for the display watermarks */
1270 if ((radeon_disp_priority
== 0) || (radeon_disp_priority
> 2)) {
1271 /* set display priority to high for r3xx, rv515 chips
1272 * this avoids flickering due to underflow to the
1273 * display controllers during heavy acceleration.
1274 * Don't force high on rs4xx igp chips as it seems to
1275 * affect the sound card. See kernel bug 15982.
1277 if ((ASIC_IS_R300(rdev
) || (rdev
->family
== CHIP_RV515
)) &&
1278 !(rdev
->flags
& RADEON_IS_IGP
))
1279 rdev
->disp_priority
= 2;
1281 rdev
->disp_priority
= 0;
1283 rdev
->disp_priority
= radeon_disp_priority
;
1287 int radeon_modeset_init(struct radeon_device
*rdev
)
1292 drm_mode_config_init(rdev
->ddev
);
1293 rdev
->mode_info
.mode_config_initialized
= true;
1295 rdev
->ddev
->mode_config
.funcs
= (void *)&radeon_mode_funcs
;
1297 if (ASIC_IS_DCE5(rdev
)) {
1298 rdev
->ddev
->mode_config
.max_width
= 16384;
1299 rdev
->ddev
->mode_config
.max_height
= 16384;
1300 } else if (ASIC_IS_AVIVO(rdev
)) {
1301 rdev
->ddev
->mode_config
.max_width
= 8192;
1302 rdev
->ddev
->mode_config
.max_height
= 8192;
1304 rdev
->ddev
->mode_config
.max_width
= 4096;
1305 rdev
->ddev
->mode_config
.max_height
= 4096;
1308 rdev
->ddev
->mode_config
.fb_base
= rdev
->mc
.aper_base
;
1310 ret
= radeon_modeset_create_props(rdev
);
1315 /* init i2c buses */
1316 radeon_i2c_init(rdev
);
1318 /* check combios for a valid hardcoded EDID - Sun servers */
1319 if (!rdev
->is_atom_bios
) {
1320 /* check for hardcoded EDID in BIOS */
1321 radeon_combios_check_hardcoded_edid(rdev
);
1324 /* allocate crtcs */
1325 for (i
= 0; i
< rdev
->num_crtc
; i
++) {
1326 radeon_crtc_init(rdev
->ddev
, i
);
1329 /* okay we should have all the bios connectors */
1330 ret
= radeon_setup_enc_conn(rdev
->ddev
);
1336 if (rdev
->is_atom_bios
)
1337 radeon_atom_encoder_init(rdev
);
1339 /* initialize hpd */
1340 radeon_hpd_init(rdev
);
1342 /* Initialize power management */
1343 radeon_pm_init(rdev
);
1345 radeon_fbdev_init(rdev
);
1346 drm_kms_helper_poll_init(rdev
->ddev
);
1351 void radeon_modeset_fini(struct radeon_device
*rdev
)
1353 radeon_fbdev_fini(rdev
);
1354 kfree(rdev
->mode_info
.bios_hardcoded_edid
);
1355 radeon_pm_fini(rdev
);
1357 if (rdev
->mode_info
.mode_config_initialized
) {
1358 drm_kms_helper_poll_fini(rdev
->ddev
);
1359 radeon_hpd_fini(rdev
);
1360 drm_mode_config_cleanup(rdev
->ddev
);
1361 rdev
->mode_info
.mode_config_initialized
= false;
1363 /* free i2c buses */
1364 radeon_i2c_fini(rdev
);
1367 static bool is_hdtv_mode(struct drm_display_mode
*mode
)
1369 /* try and guess if this is a tv or a monitor */
1370 if ((mode
->vdisplay
== 480 && mode
->hdisplay
== 720) || /* 480p */
1371 (mode
->vdisplay
== 576) || /* 576p */
1372 (mode
->vdisplay
== 720) || /* 720p */
1373 (mode
->vdisplay
== 1080)) /* 1080p */
1379 bool radeon_crtc_scaling_mode_fixup(struct drm_crtc
*crtc
,
1380 struct drm_display_mode
*mode
,
1381 struct drm_display_mode
*adjusted_mode
)
1383 struct drm_device
*dev
= crtc
->dev
;
1384 struct radeon_device
*rdev
= dev
->dev_private
;
1385 struct drm_encoder
*encoder
;
1386 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
1387 struct radeon_encoder
*radeon_encoder
;
1388 struct drm_connector
*connector
;
1389 struct radeon_connector
*radeon_connector
;
1391 u32 src_v
= 1, dst_v
= 1;
1392 u32 src_h
= 1, dst_h
= 1;
1394 radeon_crtc
->h_border
= 0;
1395 radeon_crtc
->v_border
= 0;
1397 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
1398 if (encoder
->crtc
!= crtc
)
1400 radeon_encoder
= to_radeon_encoder(encoder
);
1401 connector
= radeon_get_connector_for_encoder(encoder
);
1402 radeon_connector
= to_radeon_connector(connector
);
1406 if (radeon_encoder
->rmx_type
== RMX_OFF
)
1407 radeon_crtc
->rmx_type
= RMX_OFF
;
1408 else if (mode
->hdisplay
< radeon_encoder
->native_mode
.hdisplay
||
1409 mode
->vdisplay
< radeon_encoder
->native_mode
.vdisplay
)
1410 radeon_crtc
->rmx_type
= radeon_encoder
->rmx_type
;
1412 radeon_crtc
->rmx_type
= RMX_OFF
;
1413 /* copy native mode */
1414 memcpy(&radeon_crtc
->native_mode
,
1415 &radeon_encoder
->native_mode
,
1416 sizeof(struct drm_display_mode
));
1417 src_v
= crtc
->mode
.vdisplay
;
1418 dst_v
= radeon_crtc
->native_mode
.vdisplay
;
1419 src_h
= crtc
->mode
.hdisplay
;
1420 dst_h
= radeon_crtc
->native_mode
.hdisplay
;
1422 /* fix up for overscan on hdmi */
1423 if (ASIC_IS_AVIVO(rdev
) &&
1424 (!(mode
->flags
& DRM_MODE_FLAG_INTERLACE
)) &&
1425 ((radeon_encoder
->underscan_type
== UNDERSCAN_ON
) ||
1426 ((radeon_encoder
->underscan_type
== UNDERSCAN_AUTO
) &&
1427 drm_detect_hdmi_monitor(radeon_connector
->edid
) &&
1428 is_hdtv_mode(mode
)))) {
1429 if (radeon_encoder
->underscan_hborder
!= 0)
1430 radeon_crtc
->h_border
= radeon_encoder
->underscan_hborder
;
1432 radeon_crtc
->h_border
= (mode
->hdisplay
>> 5) + 16;
1433 if (radeon_encoder
->underscan_vborder
!= 0)
1434 radeon_crtc
->v_border
= radeon_encoder
->underscan_vborder
;
1436 radeon_crtc
->v_border
= (mode
->vdisplay
>> 5) + 16;
1437 radeon_crtc
->rmx_type
= RMX_FULL
;
1438 src_v
= crtc
->mode
.vdisplay
;
1439 dst_v
= crtc
->mode
.vdisplay
- (radeon_crtc
->v_border
* 2);
1440 src_h
= crtc
->mode
.hdisplay
;
1441 dst_h
= crtc
->mode
.hdisplay
- (radeon_crtc
->h_border
* 2);
1445 if (radeon_crtc
->rmx_type
!= radeon_encoder
->rmx_type
) {
1446 /* WARNING: Right now this can't happen but
1447 * in the future we need to check that scaling
1448 * are consistent across different encoder
1449 * (ie all encoder can work with the same
1452 DRM_ERROR("Scaling not consistent across encoder.\n");
1457 if (radeon_crtc
->rmx_type
!= RMX_OFF
) {
1459 a
.full
= dfixed_const(src_v
);
1460 b
.full
= dfixed_const(dst_v
);
1461 radeon_crtc
->vsc
.full
= dfixed_div(a
, b
);
1462 a
.full
= dfixed_const(src_h
);
1463 b
.full
= dfixed_const(dst_h
);
1464 radeon_crtc
->hsc
.full
= dfixed_div(a
, b
);
1466 radeon_crtc
->vsc
.full
= dfixed_const(1);
1467 radeon_crtc
->hsc
.full
= dfixed_const(1);
1473 * Retrieve current video scanout position of crtc on a given gpu.
1475 * \param dev Device to query.
1476 * \param crtc Crtc to query.
1477 * \param *vpos Location where vertical scanout position should be stored.
1478 * \param *hpos Location where horizontal scanout position should go.
1480 * Returns vpos as a positive number while in active scanout area.
1481 * Returns vpos as a negative number inside vblank, counting the number
1482 * of scanlines to go until end of vblank, e.g., -1 means "one scanline
1483 * until start of active scanout / end of vblank."
1485 * \return Flags, or'ed together as follows:
1487 * DRM_SCANOUTPOS_VALID = Query successful.
1488 * DRM_SCANOUTPOS_INVBL = Inside vblank.
1489 * DRM_SCANOUTPOS_ACCURATE = Returned position is accurate. A lack of
1490 * this flag means that returned position may be offset by a constant but
1491 * unknown small number of scanlines wrt. real scanout position.
1494 int radeon_get_crtc_scanoutpos(struct drm_device
*dev
, int crtc
, int *vpos
, int *hpos
)
1496 u32 stat_crtc
= 0, vbl
= 0, position
= 0;
1497 int vbl_start
, vbl_end
, vtotal
, ret
= 0;
1500 struct radeon_device
*rdev
= dev
->dev_private
;
1502 if (ASIC_IS_DCE4(rdev
)) {
1504 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1505 EVERGREEN_CRTC0_REGISTER_OFFSET
);
1506 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1507 EVERGREEN_CRTC0_REGISTER_OFFSET
);
1508 ret
|= DRM_SCANOUTPOS_VALID
;
1511 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1512 EVERGREEN_CRTC1_REGISTER_OFFSET
);
1513 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1514 EVERGREEN_CRTC1_REGISTER_OFFSET
);
1515 ret
|= DRM_SCANOUTPOS_VALID
;
1518 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1519 EVERGREEN_CRTC2_REGISTER_OFFSET
);
1520 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1521 EVERGREEN_CRTC2_REGISTER_OFFSET
);
1522 ret
|= DRM_SCANOUTPOS_VALID
;
1525 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1526 EVERGREEN_CRTC3_REGISTER_OFFSET
);
1527 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1528 EVERGREEN_CRTC3_REGISTER_OFFSET
);
1529 ret
|= DRM_SCANOUTPOS_VALID
;
1532 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1533 EVERGREEN_CRTC4_REGISTER_OFFSET
);
1534 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1535 EVERGREEN_CRTC4_REGISTER_OFFSET
);
1536 ret
|= DRM_SCANOUTPOS_VALID
;
1539 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1540 EVERGREEN_CRTC5_REGISTER_OFFSET
);
1541 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1542 EVERGREEN_CRTC5_REGISTER_OFFSET
);
1543 ret
|= DRM_SCANOUTPOS_VALID
;
1545 } else if (ASIC_IS_AVIVO(rdev
)) {
1547 vbl
= RREG32(AVIVO_D1CRTC_V_BLANK_START_END
);
1548 position
= RREG32(AVIVO_D1CRTC_STATUS_POSITION
);
1549 ret
|= DRM_SCANOUTPOS_VALID
;
1552 vbl
= RREG32(AVIVO_D2CRTC_V_BLANK_START_END
);
1553 position
= RREG32(AVIVO_D2CRTC_STATUS_POSITION
);
1554 ret
|= DRM_SCANOUTPOS_VALID
;
1557 /* Pre-AVIVO: Different encoding of scanout pos and vblank interval. */
1559 /* Assume vbl_end == 0, get vbl_start from
1562 vbl
= (RREG32(RADEON_CRTC_V_TOTAL_DISP
) &
1563 RADEON_CRTC_V_DISP
) >> RADEON_CRTC_V_DISP_SHIFT
;
1564 /* Only retrieve vpos from upper 16 bits, set hpos == 0. */
1565 position
= (RREG32(RADEON_CRTC_VLINE_CRNT_VLINE
) >> 16) & RADEON_CRTC_V_TOTAL
;
1566 stat_crtc
= RREG32(RADEON_CRTC_STATUS
);
1567 if (!(stat_crtc
& 1))
1570 ret
|= DRM_SCANOUTPOS_VALID
;
1573 vbl
= (RREG32(RADEON_CRTC2_V_TOTAL_DISP
) &
1574 RADEON_CRTC_V_DISP
) >> RADEON_CRTC_V_DISP_SHIFT
;
1575 position
= (RREG32(RADEON_CRTC2_VLINE_CRNT_VLINE
) >> 16) & RADEON_CRTC_V_TOTAL
;
1576 stat_crtc
= RREG32(RADEON_CRTC2_STATUS
);
1577 if (!(stat_crtc
& 1))
1580 ret
|= DRM_SCANOUTPOS_VALID
;
1584 /* Decode into vertical and horizontal scanout position. */
1585 *vpos
= position
& 0x1fff;
1586 *hpos
= (position
>> 16) & 0x1fff;
1588 /* Valid vblank area boundaries from gpu retrieved? */
1591 ret
|= DRM_SCANOUTPOS_ACCURATE
;
1592 vbl_start
= vbl
& 0x1fff;
1593 vbl_end
= (vbl
>> 16) & 0x1fff;
1596 /* No: Fake something reasonable which gives at least ok results. */
1597 vbl_start
= rdev
->mode_info
.crtcs
[crtc
]->base
.hwmode
.crtc_vdisplay
;
1601 /* Test scanout position against vblank region. */
1602 if ((*vpos
< vbl_start
) && (*vpos
>= vbl_end
))
1605 /* Check if inside vblank area and apply corrective offsets:
1606 * vpos will then be >=0 in video scanout area, but negative
1607 * within vblank area, counting down the number of lines until
1611 /* Inside "upper part" of vblank area? Apply corrective offset if so: */
1612 if (in_vbl
&& (*vpos
>= vbl_start
)) {
1613 vtotal
= rdev
->mode_info
.crtcs
[crtc
]->base
.hwmode
.crtc_vtotal
;
1614 *vpos
= *vpos
- vtotal
;
1617 /* Correct for shifted end of vbl at vbl_end. */
1618 *vpos
= *vpos
- vbl_end
;
1622 ret
|= DRM_SCANOUTPOS_INVBL
;