ARM: 7409/1: Do not call flush_cache_user_range with mmap_sem held
[linux/fpc-iii.git] / drivers / gpu / drm / drm_edid.c
blob1bbb85b9ce4e8aa6d5ca754da7d2d0df57b65140
1 /*
2 * Copyright (c) 2006 Luc Verhaegen (quirks list)
3 * Copyright (c) 2007-2008 Intel Corporation
4 * Jesse Barnes <jesse.barnes@intel.com>
5 * Copyright 2010 Red Hat, Inc.
7 * DDC probing routines (drm_ddc_read & drm_do_probe_ddc_edid) originally from
8 * FB layer.
9 * Copyright (C) 2006 Dennis Munsie <dmunsie@cecropia.com>
11 * Permission is hereby granted, free of charge, to any person obtaining a
12 * copy of this software and associated documentation files (the "Software"),
13 * to deal in the Software without restriction, including without limitation
14 * the rights to use, copy, modify, merge, publish, distribute, sub license,
15 * and/or sell copies of the Software, and to permit persons to whom the
16 * Software is furnished to do so, subject to the following conditions:
18 * The above copyright notice and this permission notice (including the
19 * next paragraph) shall be included in all copies or substantial portions
20 * of the Software.
22 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
23 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
24 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
25 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
26 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
27 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
28 * DEALINGS IN THE SOFTWARE.
30 #include <linux/kernel.h>
31 #include <linux/slab.h>
32 #include <linux/i2c.h>
33 #include "drmP.h"
34 #include "drm_edid.h"
35 #include "drm_edid_modes.h"
37 #define version_greater(edid, maj, min) \
38 (((edid)->version > (maj)) || \
39 ((edid)->version == (maj) && (edid)->revision > (min)))
41 #define EDID_EST_TIMINGS 16
42 #define EDID_STD_TIMINGS 8
43 #define EDID_DETAILED_TIMINGS 4
46 * EDID blocks out in the wild have a variety of bugs, try to collect
47 * them here (note that userspace may work around broken monitors first,
48 * but fixes should make their way here so that the kernel "just works"
49 * on as many displays as possible).
52 /* First detailed mode wrong, use largest 60Hz mode */
53 #define EDID_QUIRK_PREFER_LARGE_60 (1 << 0)
54 /* Reported 135MHz pixel clock is too high, needs adjustment */
55 #define EDID_QUIRK_135_CLOCK_TOO_HIGH (1 << 1)
56 /* Prefer the largest mode at 75 Hz */
57 #define EDID_QUIRK_PREFER_LARGE_75 (1 << 2)
58 /* Detail timing is in cm not mm */
59 #define EDID_QUIRK_DETAILED_IN_CM (1 << 3)
60 /* Detailed timing descriptors have bogus size values, so just take the
61 * maximum size and use that.
63 #define EDID_QUIRK_DETAILED_USE_MAXIMUM_SIZE (1 << 4)
64 /* Monitor forgot to set the first detailed is preferred bit. */
65 #define EDID_QUIRK_FIRST_DETAILED_PREFERRED (1 << 5)
66 /* use +hsync +vsync for detailed mode */
67 #define EDID_QUIRK_DETAILED_SYNC_PP (1 << 6)
69 struct detailed_mode_closure {
70 struct drm_connector *connector;
71 struct edid *edid;
72 bool preferred;
73 u32 quirks;
74 int modes;
77 #define LEVEL_DMT 0
78 #define LEVEL_GTF 1
79 #define LEVEL_GTF2 2
80 #define LEVEL_CVT 3
82 static struct edid_quirk {
83 char *vendor;
84 int product_id;
85 u32 quirks;
86 } edid_quirk_list[] = {
87 /* Acer AL1706 */
88 { "ACR", 44358, EDID_QUIRK_PREFER_LARGE_60 },
89 /* Acer F51 */
90 { "API", 0x7602, EDID_QUIRK_PREFER_LARGE_60 },
91 /* Unknown Acer */
92 { "ACR", 2423, EDID_QUIRK_FIRST_DETAILED_PREFERRED },
94 /* Belinea 10 15 55 */
95 { "MAX", 1516, EDID_QUIRK_PREFER_LARGE_60 },
96 { "MAX", 0x77e, EDID_QUIRK_PREFER_LARGE_60 },
98 /* Envision Peripherals, Inc. EN-7100e */
99 { "EPI", 59264, EDID_QUIRK_135_CLOCK_TOO_HIGH },
100 /* Envision EN2028 */
101 { "EPI", 8232, EDID_QUIRK_PREFER_LARGE_60 },
103 /* Funai Electronics PM36B */
104 { "FCM", 13600, EDID_QUIRK_PREFER_LARGE_75 |
105 EDID_QUIRK_DETAILED_IN_CM },
107 /* LG Philips LCD LP154W01-A5 */
108 { "LPL", 0, EDID_QUIRK_DETAILED_USE_MAXIMUM_SIZE },
109 { "LPL", 0x2a00, EDID_QUIRK_DETAILED_USE_MAXIMUM_SIZE },
111 /* Philips 107p5 CRT */
112 { "PHL", 57364, EDID_QUIRK_FIRST_DETAILED_PREFERRED },
114 /* Proview AY765C */
115 { "PTS", 765, EDID_QUIRK_FIRST_DETAILED_PREFERRED },
117 /* Samsung SyncMaster 205BW. Note: irony */
118 { "SAM", 541, EDID_QUIRK_DETAILED_SYNC_PP },
119 /* Samsung SyncMaster 22[5-6]BW */
120 { "SAM", 596, EDID_QUIRK_PREFER_LARGE_60 },
121 { "SAM", 638, EDID_QUIRK_PREFER_LARGE_60 },
124 /*** DDC fetch and block validation ***/
126 static const u8 edid_header[] = {
127 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00
131 * Sanity check the header of the base EDID block. Return 8 if the header
132 * is perfect, down to 0 if it's totally wrong.
134 int drm_edid_header_is_valid(const u8 *raw_edid)
136 int i, score = 0;
138 for (i = 0; i < sizeof(edid_header); i++)
139 if (raw_edid[i] == edid_header[i])
140 score++;
142 return score;
144 EXPORT_SYMBOL(drm_edid_header_is_valid);
148 * Sanity check the EDID block (base or extension). Return 0 if the block
149 * doesn't check out, or 1 if it's valid.
151 static bool
152 drm_edid_block_valid(u8 *raw_edid)
154 int i;
155 u8 csum = 0;
156 struct edid *edid = (struct edid *)raw_edid;
158 if (raw_edid[0] == 0x00) {
159 int score = drm_edid_header_is_valid(raw_edid);
160 if (score == 8) ;
161 else if (score >= 6) {
162 DRM_DEBUG("Fixing EDID header, your hardware may be failing\n");
163 memcpy(raw_edid, edid_header, sizeof(edid_header));
164 } else {
165 goto bad;
169 for (i = 0; i < EDID_LENGTH; i++)
170 csum += raw_edid[i];
171 if (csum) {
172 DRM_ERROR("EDID checksum is invalid, remainder is %d\n", csum);
174 /* allow CEA to slide through, switches mangle this */
175 if (raw_edid[0] != 0x02)
176 goto bad;
179 /* per-block-type checks */
180 switch (raw_edid[0]) {
181 case 0: /* base */
182 if (edid->version != 1) {
183 DRM_ERROR("EDID has major version %d, instead of 1\n", edid->version);
184 goto bad;
187 if (edid->revision > 4)
188 DRM_DEBUG("EDID minor > 4, assuming backward compatibility\n");
189 break;
191 default:
192 break;
195 return 1;
197 bad:
198 if (raw_edid) {
199 printk(KERN_ERR "Raw EDID:\n");
200 print_hex_dump_bytes(KERN_ERR, DUMP_PREFIX_NONE, raw_edid, EDID_LENGTH);
201 printk(KERN_ERR "\n");
203 return 0;
207 * drm_edid_is_valid - sanity check EDID data
208 * @edid: EDID data
210 * Sanity-check an entire EDID record (including extensions)
212 bool drm_edid_is_valid(struct edid *edid)
214 int i;
215 u8 *raw = (u8 *)edid;
217 if (!edid)
218 return false;
220 for (i = 0; i <= edid->extensions; i++)
221 if (!drm_edid_block_valid(raw + i * EDID_LENGTH))
222 return false;
224 return true;
226 EXPORT_SYMBOL(drm_edid_is_valid);
228 #define DDC_ADDR 0x50
229 #define DDC_SEGMENT_ADDR 0x30
231 * Get EDID information via I2C.
233 * \param adapter : i2c device adaptor
234 * \param buf : EDID data buffer to be filled
235 * \param len : EDID data buffer length
236 * \return 0 on success or -1 on failure.
238 * Try to fetch EDID information by calling i2c driver function.
240 static int
241 drm_do_probe_ddc_edid(struct i2c_adapter *adapter, unsigned char *buf,
242 int block, int len)
244 unsigned char start = block * EDID_LENGTH;
245 int ret, retries = 5;
247 /* The core i2c driver will automatically retry the transfer if the
248 * adapter reports EAGAIN. However, we find that bit-banging transfers
249 * are susceptible to errors under a heavily loaded machine and
250 * generate spurious NAKs and timeouts. Retrying the transfer
251 * of the individual block a few times seems to overcome this.
253 do {
254 struct i2c_msg msgs[] = {
256 .addr = DDC_ADDR,
257 .flags = 0,
258 .len = 1,
259 .buf = &start,
260 }, {
261 .addr = DDC_ADDR,
262 .flags = I2C_M_RD,
263 .len = len,
264 .buf = buf,
267 ret = i2c_transfer(adapter, msgs, 2);
268 } while (ret != 2 && --retries);
270 return ret == 2 ? 0 : -1;
273 static bool drm_edid_is_zero(u8 *in_edid, int length)
275 int i;
276 u32 *raw_edid = (u32 *)in_edid;
278 for (i = 0; i < length / 4; i++)
279 if (*(raw_edid + i) != 0)
280 return false;
281 return true;
284 static u8 *
285 drm_do_get_edid(struct drm_connector *connector, struct i2c_adapter *adapter)
287 int i, j = 0, valid_extensions = 0;
288 u8 *block, *new;
290 if ((block = kmalloc(EDID_LENGTH, GFP_KERNEL)) == NULL)
291 return NULL;
293 /* base block fetch */
294 for (i = 0; i < 4; i++) {
295 if (drm_do_probe_ddc_edid(adapter, block, 0, EDID_LENGTH))
296 goto out;
297 if (drm_edid_block_valid(block))
298 break;
299 if (i == 0 && drm_edid_is_zero(block, EDID_LENGTH)) {
300 connector->null_edid_counter++;
301 goto carp;
304 if (i == 4)
305 goto carp;
307 /* if there's no extensions, we're done */
308 if (block[0x7e] == 0)
309 return block;
311 new = krealloc(block, (block[0x7e] + 1) * EDID_LENGTH, GFP_KERNEL);
312 if (!new)
313 goto out;
314 block = new;
316 for (j = 1; j <= block[0x7e]; j++) {
317 for (i = 0; i < 4; i++) {
318 if (drm_do_probe_ddc_edid(adapter,
319 block + (valid_extensions + 1) * EDID_LENGTH,
320 j, EDID_LENGTH))
321 goto out;
322 if (drm_edid_block_valid(block + (valid_extensions + 1) * EDID_LENGTH)) {
323 valid_extensions++;
324 break;
327 if (i == 4)
328 dev_warn(connector->dev->dev,
329 "%s: Ignoring invalid EDID block %d.\n",
330 drm_get_connector_name(connector), j);
333 if (valid_extensions != block[0x7e]) {
334 block[EDID_LENGTH-1] += block[0x7e] - valid_extensions;
335 block[0x7e] = valid_extensions;
336 new = krealloc(block, (valid_extensions + 1) * EDID_LENGTH, GFP_KERNEL);
337 if (!new)
338 goto out;
339 block = new;
342 return block;
344 carp:
345 dev_warn(connector->dev->dev, "%s: EDID block %d invalid.\n",
346 drm_get_connector_name(connector), j);
348 out:
349 kfree(block);
350 return NULL;
354 * Probe DDC presence.
356 * \param adapter : i2c device adaptor
357 * \return 1 on success
359 static bool
360 drm_probe_ddc(struct i2c_adapter *adapter)
362 unsigned char out;
364 return (drm_do_probe_ddc_edid(adapter, &out, 0, 1) == 0);
368 * drm_get_edid - get EDID data, if available
369 * @connector: connector we're probing
370 * @adapter: i2c adapter to use for DDC
372 * Poke the given i2c channel to grab EDID data if possible. If found,
373 * attach it to the connector.
375 * Return edid data or NULL if we couldn't find any.
377 struct edid *drm_get_edid(struct drm_connector *connector,
378 struct i2c_adapter *adapter)
380 struct edid *edid = NULL;
382 if (drm_probe_ddc(adapter))
383 edid = (struct edid *)drm_do_get_edid(connector, adapter);
385 connector->display_info.raw_edid = (char *)edid;
387 return edid;
390 EXPORT_SYMBOL(drm_get_edid);
392 /*** EDID parsing ***/
395 * edid_vendor - match a string against EDID's obfuscated vendor field
396 * @edid: EDID to match
397 * @vendor: vendor string
399 * Returns true if @vendor is in @edid, false otherwise
401 static bool edid_vendor(struct edid *edid, char *vendor)
403 char edid_vendor[3];
405 edid_vendor[0] = ((edid->mfg_id[0] & 0x7c) >> 2) + '@';
406 edid_vendor[1] = (((edid->mfg_id[0] & 0x3) << 3) |
407 ((edid->mfg_id[1] & 0xe0) >> 5)) + '@';
408 edid_vendor[2] = (edid->mfg_id[1] & 0x1f) + '@';
410 return !strncmp(edid_vendor, vendor, 3);
414 * edid_get_quirks - return quirk flags for a given EDID
415 * @edid: EDID to process
417 * This tells subsequent routines what fixes they need to apply.
419 static u32 edid_get_quirks(struct edid *edid)
421 struct edid_quirk *quirk;
422 int i;
424 for (i = 0; i < ARRAY_SIZE(edid_quirk_list); i++) {
425 quirk = &edid_quirk_list[i];
427 if (edid_vendor(edid, quirk->vendor) &&
428 (EDID_PRODUCT_ID(edid) == quirk->product_id))
429 return quirk->quirks;
432 return 0;
435 #define MODE_SIZE(m) ((m)->hdisplay * (m)->vdisplay)
436 #define MODE_REFRESH_DIFF(m,r) (abs((m)->vrefresh - target_refresh))
439 * edid_fixup_preferred - set preferred modes based on quirk list
440 * @connector: has mode list to fix up
441 * @quirks: quirks list
443 * Walk the mode list for @connector, clearing the preferred status
444 * on existing modes and setting it anew for the right mode ala @quirks.
446 static void edid_fixup_preferred(struct drm_connector *connector,
447 u32 quirks)
449 struct drm_display_mode *t, *cur_mode, *preferred_mode;
450 int target_refresh = 0;
452 if (list_empty(&connector->probed_modes))
453 return;
455 if (quirks & EDID_QUIRK_PREFER_LARGE_60)
456 target_refresh = 60;
457 if (quirks & EDID_QUIRK_PREFER_LARGE_75)
458 target_refresh = 75;
460 preferred_mode = list_first_entry(&connector->probed_modes,
461 struct drm_display_mode, head);
463 list_for_each_entry_safe(cur_mode, t, &connector->probed_modes, head) {
464 cur_mode->type &= ~DRM_MODE_TYPE_PREFERRED;
466 if (cur_mode == preferred_mode)
467 continue;
469 /* Largest mode is preferred */
470 if (MODE_SIZE(cur_mode) > MODE_SIZE(preferred_mode))
471 preferred_mode = cur_mode;
473 /* At a given size, try to get closest to target refresh */
474 if ((MODE_SIZE(cur_mode) == MODE_SIZE(preferred_mode)) &&
475 MODE_REFRESH_DIFF(cur_mode, target_refresh) <
476 MODE_REFRESH_DIFF(preferred_mode, target_refresh)) {
477 preferred_mode = cur_mode;
481 preferred_mode->type |= DRM_MODE_TYPE_PREFERRED;
484 struct drm_display_mode *drm_mode_find_dmt(struct drm_device *dev,
485 int hsize, int vsize, int fresh)
487 struct drm_display_mode *mode = NULL;
488 int i;
490 for (i = 0; i < drm_num_dmt_modes; i++) {
491 const struct drm_display_mode *ptr = &drm_dmt_modes[i];
492 if (hsize == ptr->hdisplay &&
493 vsize == ptr->vdisplay &&
494 fresh == drm_mode_vrefresh(ptr)) {
495 /* get the expected default mode */
496 mode = drm_mode_duplicate(dev, ptr);
497 break;
500 return mode;
502 EXPORT_SYMBOL(drm_mode_find_dmt);
504 typedef void detailed_cb(struct detailed_timing *timing, void *closure);
506 static void
507 cea_for_each_detailed_block(u8 *ext, detailed_cb *cb, void *closure)
509 int i, n = 0;
510 u8 rev = ext[0x01], d = ext[0x02];
511 u8 *det_base = ext + d;
513 switch (rev) {
514 case 0:
515 /* can't happen */
516 return;
517 case 1:
518 /* have to infer how many blocks we have, check pixel clock */
519 for (i = 0; i < 6; i++)
520 if (det_base[18*i] || det_base[18*i+1])
521 n++;
522 break;
523 default:
524 /* explicit count */
525 n = min(ext[0x03] & 0x0f, 6);
526 break;
529 for (i = 0; i < n; i++)
530 cb((struct detailed_timing *)(det_base + 18 * i), closure);
533 static void
534 vtb_for_each_detailed_block(u8 *ext, detailed_cb *cb, void *closure)
536 unsigned int i, n = min((int)ext[0x02], 6);
537 u8 *det_base = ext + 5;
539 if (ext[0x01] != 1)
540 return; /* unknown version */
542 for (i = 0; i < n; i++)
543 cb((struct detailed_timing *)(det_base + 18 * i), closure);
546 static void
547 drm_for_each_detailed_block(u8 *raw_edid, detailed_cb *cb, void *closure)
549 int i;
550 struct edid *edid = (struct edid *)raw_edid;
552 if (edid == NULL)
553 return;
555 for (i = 0; i < EDID_DETAILED_TIMINGS; i++)
556 cb(&(edid->detailed_timings[i]), closure);
558 for (i = 1; i <= raw_edid[0x7e]; i++) {
559 u8 *ext = raw_edid + (i * EDID_LENGTH);
560 switch (*ext) {
561 case CEA_EXT:
562 cea_for_each_detailed_block(ext, cb, closure);
563 break;
564 case VTB_EXT:
565 vtb_for_each_detailed_block(ext, cb, closure);
566 break;
567 default:
568 break;
573 static void
574 is_rb(struct detailed_timing *t, void *data)
576 u8 *r = (u8 *)t;
577 if (r[3] == EDID_DETAIL_MONITOR_RANGE)
578 if (r[15] & 0x10)
579 *(bool *)data = true;
582 /* EDID 1.4 defines this explicitly. For EDID 1.3, we guess, badly. */
583 static bool
584 drm_monitor_supports_rb(struct edid *edid)
586 if (edid->revision >= 4) {
587 bool ret;
588 drm_for_each_detailed_block((u8 *)edid, is_rb, &ret);
589 return ret;
592 return ((edid->input & DRM_EDID_INPUT_DIGITAL) != 0);
595 static void
596 find_gtf2(struct detailed_timing *t, void *data)
598 u8 *r = (u8 *)t;
599 if (r[3] == EDID_DETAIL_MONITOR_RANGE && r[10] == 0x02)
600 *(u8 **)data = r;
603 /* Secondary GTF curve kicks in above some break frequency */
604 static int
605 drm_gtf2_hbreak(struct edid *edid)
607 u8 *r = NULL;
608 drm_for_each_detailed_block((u8 *)edid, find_gtf2, &r);
609 return r ? (r[12] * 2) : 0;
612 static int
613 drm_gtf2_2c(struct edid *edid)
615 u8 *r = NULL;
616 drm_for_each_detailed_block((u8 *)edid, find_gtf2, &r);
617 return r ? r[13] : 0;
620 static int
621 drm_gtf2_m(struct edid *edid)
623 u8 *r = NULL;
624 drm_for_each_detailed_block((u8 *)edid, find_gtf2, &r);
625 return r ? (r[15] << 8) + r[14] : 0;
628 static int
629 drm_gtf2_k(struct edid *edid)
631 u8 *r = NULL;
632 drm_for_each_detailed_block((u8 *)edid, find_gtf2, &r);
633 return r ? r[16] : 0;
636 static int
637 drm_gtf2_2j(struct edid *edid)
639 u8 *r = NULL;
640 drm_for_each_detailed_block((u8 *)edid, find_gtf2, &r);
641 return r ? r[17] : 0;
645 * standard_timing_level - get std. timing level(CVT/GTF/DMT)
646 * @edid: EDID block to scan
648 static int standard_timing_level(struct edid *edid)
650 if (edid->revision >= 2) {
651 if (edid->revision >= 4 && (edid->features & DRM_EDID_FEATURE_DEFAULT_GTF))
652 return LEVEL_CVT;
653 if (drm_gtf2_hbreak(edid))
654 return LEVEL_GTF2;
655 return LEVEL_GTF;
657 return LEVEL_DMT;
661 * 0 is reserved. The spec says 0x01 fill for unused timings. Some old
662 * monitors fill with ascii space (0x20) instead.
664 static int
665 bad_std_timing(u8 a, u8 b)
667 return (a == 0x00 && b == 0x00) ||
668 (a == 0x01 && b == 0x01) ||
669 (a == 0x20 && b == 0x20);
673 * drm_mode_std - convert standard mode info (width, height, refresh) into mode
674 * @t: standard timing params
675 * @timing_level: standard timing level
677 * Take the standard timing params (in this case width, aspect, and refresh)
678 * and convert them into a real mode using CVT/GTF/DMT.
680 static struct drm_display_mode *
681 drm_mode_std(struct drm_connector *connector, struct edid *edid,
682 struct std_timing *t, int revision)
684 struct drm_device *dev = connector->dev;
685 struct drm_display_mode *m, *mode = NULL;
686 int hsize, vsize;
687 int vrefresh_rate;
688 unsigned aspect_ratio = (t->vfreq_aspect & EDID_TIMING_ASPECT_MASK)
689 >> EDID_TIMING_ASPECT_SHIFT;
690 unsigned vfreq = (t->vfreq_aspect & EDID_TIMING_VFREQ_MASK)
691 >> EDID_TIMING_VFREQ_SHIFT;
692 int timing_level = standard_timing_level(edid);
694 if (bad_std_timing(t->hsize, t->vfreq_aspect))
695 return NULL;
697 /* According to the EDID spec, the hdisplay = hsize * 8 + 248 */
698 hsize = t->hsize * 8 + 248;
699 /* vrefresh_rate = vfreq + 60 */
700 vrefresh_rate = vfreq + 60;
701 /* the vdisplay is calculated based on the aspect ratio */
702 if (aspect_ratio == 0) {
703 if (revision < 3)
704 vsize = hsize;
705 else
706 vsize = (hsize * 10) / 16;
707 } else if (aspect_ratio == 1)
708 vsize = (hsize * 3) / 4;
709 else if (aspect_ratio == 2)
710 vsize = (hsize * 4) / 5;
711 else
712 vsize = (hsize * 9) / 16;
714 /* HDTV hack, part 1 */
715 if (vrefresh_rate == 60 &&
716 ((hsize == 1360 && vsize == 765) ||
717 (hsize == 1368 && vsize == 769))) {
718 hsize = 1366;
719 vsize = 768;
723 * If this connector already has a mode for this size and refresh
724 * rate (because it came from detailed or CVT info), use that
725 * instead. This way we don't have to guess at interlace or
726 * reduced blanking.
728 list_for_each_entry(m, &connector->probed_modes, head)
729 if (m->hdisplay == hsize && m->vdisplay == vsize &&
730 drm_mode_vrefresh(m) == vrefresh_rate)
731 return NULL;
733 /* HDTV hack, part 2 */
734 if (hsize == 1366 && vsize == 768 && vrefresh_rate == 60) {
735 mode = drm_cvt_mode(dev, 1366, 768, vrefresh_rate, 0, 0,
736 false);
737 mode->hdisplay = 1366;
738 mode->hsync_start = mode->hsync_start - 1;
739 mode->hsync_end = mode->hsync_end - 1;
740 return mode;
743 /* check whether it can be found in default mode table */
744 mode = drm_mode_find_dmt(dev, hsize, vsize, vrefresh_rate);
745 if (mode)
746 return mode;
748 switch (timing_level) {
749 case LEVEL_DMT:
750 break;
751 case LEVEL_GTF:
752 mode = drm_gtf_mode(dev, hsize, vsize, vrefresh_rate, 0, 0);
753 break;
754 case LEVEL_GTF2:
756 * This is potentially wrong if there's ever a monitor with
757 * more than one ranges section, each claiming a different
758 * secondary GTF curve. Please don't do that.
760 mode = drm_gtf_mode(dev, hsize, vsize, vrefresh_rate, 0, 0);
761 if (drm_mode_hsync(mode) > drm_gtf2_hbreak(edid)) {
762 kfree(mode);
763 mode = drm_gtf_mode_complex(dev, hsize, vsize,
764 vrefresh_rate, 0, 0,
765 drm_gtf2_m(edid),
766 drm_gtf2_2c(edid),
767 drm_gtf2_k(edid),
768 drm_gtf2_2j(edid));
770 break;
771 case LEVEL_CVT:
772 mode = drm_cvt_mode(dev, hsize, vsize, vrefresh_rate, 0, 0,
773 false);
774 break;
776 return mode;
780 * EDID is delightfully ambiguous about how interlaced modes are to be
781 * encoded. Our internal representation is of frame height, but some
782 * HDTV detailed timings are encoded as field height.
784 * The format list here is from CEA, in frame size. Technically we
785 * should be checking refresh rate too. Whatever.
787 static void
788 drm_mode_do_interlace_quirk(struct drm_display_mode *mode,
789 struct detailed_pixel_timing *pt)
791 int i;
792 static const struct {
793 int w, h;
794 } cea_interlaced[] = {
795 { 1920, 1080 },
796 { 720, 480 },
797 { 1440, 480 },
798 { 2880, 480 },
799 { 720, 576 },
800 { 1440, 576 },
801 { 2880, 576 },
804 if (!(pt->misc & DRM_EDID_PT_INTERLACED))
805 return;
807 for (i = 0; i < ARRAY_SIZE(cea_interlaced); i++) {
808 if ((mode->hdisplay == cea_interlaced[i].w) &&
809 (mode->vdisplay == cea_interlaced[i].h / 2)) {
810 mode->vdisplay *= 2;
811 mode->vsync_start *= 2;
812 mode->vsync_end *= 2;
813 mode->vtotal *= 2;
814 mode->vtotal |= 1;
818 mode->flags |= DRM_MODE_FLAG_INTERLACE;
822 * drm_mode_detailed - create a new mode from an EDID detailed timing section
823 * @dev: DRM device (needed to create new mode)
824 * @edid: EDID block
825 * @timing: EDID detailed timing info
826 * @quirks: quirks to apply
828 * An EDID detailed timing block contains enough info for us to create and
829 * return a new struct drm_display_mode.
831 static struct drm_display_mode *drm_mode_detailed(struct drm_device *dev,
832 struct edid *edid,
833 struct detailed_timing *timing,
834 u32 quirks)
836 struct drm_display_mode *mode;
837 struct detailed_pixel_timing *pt = &timing->data.pixel_data;
838 unsigned hactive = (pt->hactive_hblank_hi & 0xf0) << 4 | pt->hactive_lo;
839 unsigned vactive = (pt->vactive_vblank_hi & 0xf0) << 4 | pt->vactive_lo;
840 unsigned hblank = (pt->hactive_hblank_hi & 0xf) << 8 | pt->hblank_lo;
841 unsigned vblank = (pt->vactive_vblank_hi & 0xf) << 8 | pt->vblank_lo;
842 unsigned hsync_offset = (pt->hsync_vsync_offset_pulse_width_hi & 0xc0) << 2 | pt->hsync_offset_lo;
843 unsigned hsync_pulse_width = (pt->hsync_vsync_offset_pulse_width_hi & 0x30) << 4 | pt->hsync_pulse_width_lo;
844 unsigned vsync_offset = (pt->hsync_vsync_offset_pulse_width_hi & 0xc) >> 2 | pt->vsync_offset_pulse_width_lo >> 4;
845 unsigned vsync_pulse_width = (pt->hsync_vsync_offset_pulse_width_hi & 0x3) << 4 | (pt->vsync_offset_pulse_width_lo & 0xf);
847 /* ignore tiny modes */
848 if (hactive < 64 || vactive < 64)
849 return NULL;
851 if (pt->misc & DRM_EDID_PT_STEREO) {
852 printk(KERN_WARNING "stereo mode not supported\n");
853 return NULL;
855 if (!(pt->misc & DRM_EDID_PT_SEPARATE_SYNC)) {
856 printk(KERN_WARNING "composite sync not supported\n");
859 /* it is incorrect if hsync/vsync width is zero */
860 if (!hsync_pulse_width || !vsync_pulse_width) {
861 DRM_DEBUG_KMS("Incorrect Detailed timing. "
862 "Wrong Hsync/Vsync pulse width\n");
863 return NULL;
865 mode = drm_mode_create(dev);
866 if (!mode)
867 return NULL;
869 mode->type = DRM_MODE_TYPE_DRIVER;
871 if (quirks & EDID_QUIRK_135_CLOCK_TOO_HIGH)
872 timing->pixel_clock = cpu_to_le16(1088);
874 mode->clock = le16_to_cpu(timing->pixel_clock) * 10;
876 mode->hdisplay = hactive;
877 mode->hsync_start = mode->hdisplay + hsync_offset;
878 mode->hsync_end = mode->hsync_start + hsync_pulse_width;
879 mode->htotal = mode->hdisplay + hblank;
881 mode->vdisplay = vactive;
882 mode->vsync_start = mode->vdisplay + vsync_offset;
883 mode->vsync_end = mode->vsync_start + vsync_pulse_width;
884 mode->vtotal = mode->vdisplay + vblank;
886 /* Some EDIDs have bogus h/vtotal values */
887 if (mode->hsync_end > mode->htotal)
888 mode->htotal = mode->hsync_end + 1;
889 if (mode->vsync_end > mode->vtotal)
890 mode->vtotal = mode->vsync_end + 1;
892 drm_mode_do_interlace_quirk(mode, pt);
894 drm_mode_set_name(mode);
896 if (quirks & EDID_QUIRK_DETAILED_SYNC_PP) {
897 pt->misc |= DRM_EDID_PT_HSYNC_POSITIVE | DRM_EDID_PT_VSYNC_POSITIVE;
900 mode->flags |= (pt->misc & DRM_EDID_PT_HSYNC_POSITIVE) ?
901 DRM_MODE_FLAG_PHSYNC : DRM_MODE_FLAG_NHSYNC;
902 mode->flags |= (pt->misc & DRM_EDID_PT_VSYNC_POSITIVE) ?
903 DRM_MODE_FLAG_PVSYNC : DRM_MODE_FLAG_NVSYNC;
905 mode->width_mm = pt->width_mm_lo | (pt->width_height_mm_hi & 0xf0) << 4;
906 mode->height_mm = pt->height_mm_lo | (pt->width_height_mm_hi & 0xf) << 8;
908 if (quirks & EDID_QUIRK_DETAILED_IN_CM) {
909 mode->width_mm *= 10;
910 mode->height_mm *= 10;
913 if (quirks & EDID_QUIRK_DETAILED_USE_MAXIMUM_SIZE) {
914 mode->width_mm = edid->width_cm * 10;
915 mode->height_mm = edid->height_cm * 10;
918 return mode;
921 static bool
922 mode_is_rb(const struct drm_display_mode *mode)
924 return (mode->htotal - mode->hdisplay == 160) &&
925 (mode->hsync_end - mode->hdisplay == 80) &&
926 (mode->hsync_end - mode->hsync_start == 32) &&
927 (mode->vsync_start - mode->vdisplay == 3);
930 static bool
931 mode_in_hsync_range(const struct drm_display_mode *mode,
932 struct edid *edid, u8 *t)
934 int hsync, hmin, hmax;
936 hmin = t[7];
937 if (edid->revision >= 4)
938 hmin += ((t[4] & 0x04) ? 255 : 0);
939 hmax = t[8];
940 if (edid->revision >= 4)
941 hmax += ((t[4] & 0x08) ? 255 : 0);
942 hsync = drm_mode_hsync(mode);
944 return (hsync <= hmax && hsync >= hmin);
947 static bool
948 mode_in_vsync_range(const struct drm_display_mode *mode,
949 struct edid *edid, u8 *t)
951 int vsync, vmin, vmax;
953 vmin = t[5];
954 if (edid->revision >= 4)
955 vmin += ((t[4] & 0x01) ? 255 : 0);
956 vmax = t[6];
957 if (edid->revision >= 4)
958 vmax += ((t[4] & 0x02) ? 255 : 0);
959 vsync = drm_mode_vrefresh(mode);
961 return (vsync <= vmax && vsync >= vmin);
964 static u32
965 range_pixel_clock(struct edid *edid, u8 *t)
967 /* unspecified */
968 if (t[9] == 0 || t[9] == 255)
969 return 0;
971 /* 1.4 with CVT support gives us real precision, yay */
972 if (edid->revision >= 4 && t[10] == 0x04)
973 return (t[9] * 10000) - ((t[12] >> 2) * 250);
975 /* 1.3 is pathetic, so fuzz up a bit */
976 return t[9] * 10000 + 5001;
979 static bool
980 mode_in_range(const struct drm_display_mode *mode, struct edid *edid,
981 struct detailed_timing *timing)
983 u32 max_clock;
984 u8 *t = (u8 *)timing;
986 if (!mode_in_hsync_range(mode, edid, t))
987 return false;
989 if (!mode_in_vsync_range(mode, edid, t))
990 return false;
992 if ((max_clock = range_pixel_clock(edid, t)))
993 if (mode->clock > max_clock)
994 return false;
996 /* 1.4 max horizontal check */
997 if (edid->revision >= 4 && t[10] == 0x04)
998 if (t[13] && mode->hdisplay > 8 * (t[13] + (256 * (t[12]&0x3))))
999 return false;
1001 if (mode_is_rb(mode) && !drm_monitor_supports_rb(edid))
1002 return false;
1004 return true;
1008 * XXX If drm_dmt_modes ever regrows the CVT-R modes (and it will) this will
1009 * need to account for them.
1011 static int
1012 drm_gtf_modes_for_range(struct drm_connector *connector, struct edid *edid,
1013 struct detailed_timing *timing)
1015 int i, modes = 0;
1016 struct drm_display_mode *newmode;
1017 struct drm_device *dev = connector->dev;
1019 for (i = 0; i < drm_num_dmt_modes; i++) {
1020 if (mode_in_range(drm_dmt_modes + i, edid, timing)) {
1021 newmode = drm_mode_duplicate(dev, &drm_dmt_modes[i]);
1022 if (newmode) {
1023 drm_mode_probed_add(connector, newmode);
1024 modes++;
1029 return modes;
1032 static void
1033 do_inferred_modes(struct detailed_timing *timing, void *c)
1035 struct detailed_mode_closure *closure = c;
1036 struct detailed_non_pixel *data = &timing->data.other_data;
1037 int gtf = (closure->edid->features & DRM_EDID_FEATURE_DEFAULT_GTF);
1039 if (gtf && data->type == EDID_DETAIL_MONITOR_RANGE)
1040 closure->modes += drm_gtf_modes_for_range(closure->connector,
1041 closure->edid,
1042 timing);
1045 static int
1046 add_inferred_modes(struct drm_connector *connector, struct edid *edid)
1048 struct detailed_mode_closure closure = {
1049 connector, edid, 0, 0, 0
1052 if (version_greater(edid, 1, 0))
1053 drm_for_each_detailed_block((u8 *)edid, do_inferred_modes,
1054 &closure);
1056 return closure.modes;
1059 static int
1060 drm_est3_modes(struct drm_connector *connector, struct detailed_timing *timing)
1062 int i, j, m, modes = 0;
1063 struct drm_display_mode *mode;
1064 u8 *est = ((u8 *)timing) + 5;
1066 for (i = 0; i < 6; i++) {
1067 for (j = 7; j > 0; j--) {
1068 m = (i * 8) + (7 - j);
1069 if (m >= ARRAY_SIZE(est3_modes))
1070 break;
1071 if (est[i] & (1 << j)) {
1072 mode = drm_mode_find_dmt(connector->dev,
1073 est3_modes[m].w,
1074 est3_modes[m].h,
1075 est3_modes[m].r
1076 /*, est3_modes[m].rb */);
1077 if (mode) {
1078 drm_mode_probed_add(connector, mode);
1079 modes++;
1085 return modes;
1088 static void
1089 do_established_modes(struct detailed_timing *timing, void *c)
1091 struct detailed_mode_closure *closure = c;
1092 struct detailed_non_pixel *data = &timing->data.other_data;
1094 if (data->type == EDID_DETAIL_EST_TIMINGS)
1095 closure->modes += drm_est3_modes(closure->connector, timing);
1099 * add_established_modes - get est. modes from EDID and add them
1100 * @edid: EDID block to scan
1102 * Each EDID block contains a bitmap of the supported "established modes" list
1103 * (defined above). Tease them out and add them to the global modes list.
1105 static int
1106 add_established_modes(struct drm_connector *connector, struct edid *edid)
1108 struct drm_device *dev = connector->dev;
1109 unsigned long est_bits = edid->established_timings.t1 |
1110 (edid->established_timings.t2 << 8) |
1111 ((edid->established_timings.mfg_rsvd & 0x80) << 9);
1112 int i, modes = 0;
1113 struct detailed_mode_closure closure = {
1114 connector, edid, 0, 0, 0
1117 for (i = 0; i <= EDID_EST_TIMINGS; i++) {
1118 if (est_bits & (1<<i)) {
1119 struct drm_display_mode *newmode;
1120 newmode = drm_mode_duplicate(dev, &edid_est_modes[i]);
1121 if (newmode) {
1122 drm_mode_probed_add(connector, newmode);
1123 modes++;
1128 if (version_greater(edid, 1, 0))
1129 drm_for_each_detailed_block((u8 *)edid,
1130 do_established_modes, &closure);
1132 return modes + closure.modes;
1135 static void
1136 do_standard_modes(struct detailed_timing *timing, void *c)
1138 struct detailed_mode_closure *closure = c;
1139 struct detailed_non_pixel *data = &timing->data.other_data;
1140 struct drm_connector *connector = closure->connector;
1141 struct edid *edid = closure->edid;
1143 if (data->type == EDID_DETAIL_STD_MODES) {
1144 int i;
1145 for (i = 0; i < 6; i++) {
1146 struct std_timing *std;
1147 struct drm_display_mode *newmode;
1149 std = &data->data.timings[i];
1150 newmode = drm_mode_std(connector, edid, std,
1151 edid->revision);
1152 if (newmode) {
1153 drm_mode_probed_add(connector, newmode);
1154 closure->modes++;
1161 * add_standard_modes - get std. modes from EDID and add them
1162 * @edid: EDID block to scan
1164 * Standard modes can be calculated using the appropriate standard (DMT,
1165 * GTF or CVT. Grab them from @edid and add them to the list.
1167 static int
1168 add_standard_modes(struct drm_connector *connector, struct edid *edid)
1170 int i, modes = 0;
1171 struct detailed_mode_closure closure = {
1172 connector, edid, 0, 0, 0
1175 for (i = 0; i < EDID_STD_TIMINGS; i++) {
1176 struct drm_display_mode *newmode;
1178 newmode = drm_mode_std(connector, edid,
1179 &edid->standard_timings[i],
1180 edid->revision);
1181 if (newmode) {
1182 drm_mode_probed_add(connector, newmode);
1183 modes++;
1187 if (version_greater(edid, 1, 0))
1188 drm_for_each_detailed_block((u8 *)edid, do_standard_modes,
1189 &closure);
1191 /* XXX should also look for standard codes in VTB blocks */
1193 return modes + closure.modes;
1196 static int drm_cvt_modes(struct drm_connector *connector,
1197 struct detailed_timing *timing)
1199 int i, j, modes = 0;
1200 struct drm_display_mode *newmode;
1201 struct drm_device *dev = connector->dev;
1202 struct cvt_timing *cvt;
1203 const int rates[] = { 60, 85, 75, 60, 50 };
1204 const u8 empty[3] = { 0, 0, 0 };
1206 for (i = 0; i < 4; i++) {
1207 int uninitialized_var(width), height;
1208 cvt = &(timing->data.other_data.data.cvt[i]);
1210 if (!memcmp(cvt->code, empty, 3))
1211 continue;
1213 height = (cvt->code[0] + ((cvt->code[1] & 0xf0) << 4) + 1) * 2;
1214 switch (cvt->code[1] & 0x0c) {
1215 case 0x00:
1216 width = height * 4 / 3;
1217 break;
1218 case 0x04:
1219 width = height * 16 / 9;
1220 break;
1221 case 0x08:
1222 width = height * 16 / 10;
1223 break;
1224 case 0x0c:
1225 width = height * 15 / 9;
1226 break;
1229 for (j = 1; j < 5; j++) {
1230 if (cvt->code[2] & (1 << j)) {
1231 newmode = drm_cvt_mode(dev, width, height,
1232 rates[j], j == 0,
1233 false, false);
1234 if (newmode) {
1235 drm_mode_probed_add(connector, newmode);
1236 modes++;
1242 return modes;
1245 static void
1246 do_cvt_mode(struct detailed_timing *timing, void *c)
1248 struct detailed_mode_closure *closure = c;
1249 struct detailed_non_pixel *data = &timing->data.other_data;
1251 if (data->type == EDID_DETAIL_CVT_3BYTE)
1252 closure->modes += drm_cvt_modes(closure->connector, timing);
1255 static int
1256 add_cvt_modes(struct drm_connector *connector, struct edid *edid)
1258 struct detailed_mode_closure closure = {
1259 connector, edid, 0, 0, 0
1262 if (version_greater(edid, 1, 2))
1263 drm_for_each_detailed_block((u8 *)edid, do_cvt_mode, &closure);
1265 /* XXX should also look for CVT codes in VTB blocks */
1267 return closure.modes;
1270 static void
1271 do_detailed_mode(struct detailed_timing *timing, void *c)
1273 struct detailed_mode_closure *closure = c;
1274 struct drm_display_mode *newmode;
1276 if (timing->pixel_clock) {
1277 newmode = drm_mode_detailed(closure->connector->dev,
1278 closure->edid, timing,
1279 closure->quirks);
1280 if (!newmode)
1281 return;
1283 if (closure->preferred)
1284 newmode->type |= DRM_MODE_TYPE_PREFERRED;
1286 drm_mode_probed_add(closure->connector, newmode);
1287 closure->modes++;
1288 closure->preferred = 0;
1293 * add_detailed_modes - Add modes from detailed timings
1294 * @connector: attached connector
1295 * @edid: EDID block to scan
1296 * @quirks: quirks to apply
1298 static int
1299 add_detailed_modes(struct drm_connector *connector, struct edid *edid,
1300 u32 quirks)
1302 struct detailed_mode_closure closure = {
1303 connector,
1304 edid,
1306 quirks,
1310 if (closure.preferred && !version_greater(edid, 1, 3))
1311 closure.preferred =
1312 (edid->features & DRM_EDID_FEATURE_PREFERRED_TIMING);
1314 drm_for_each_detailed_block((u8 *)edid, do_detailed_mode, &closure);
1316 return closure.modes;
1319 #define HDMI_IDENTIFIER 0x000C03
1320 #define AUDIO_BLOCK 0x01
1321 #define VENDOR_BLOCK 0x03
1322 #define EDID_BASIC_AUDIO (1 << 6)
1325 * Search EDID for CEA extension block.
1327 u8 *drm_find_cea_extension(struct edid *edid)
1329 u8 *edid_ext = NULL;
1330 int i;
1332 /* No EDID or EDID extensions */
1333 if (edid == NULL || edid->extensions == 0)
1334 return NULL;
1336 /* Find CEA extension */
1337 for (i = 0; i < edid->extensions; i++) {
1338 edid_ext = (u8 *)edid + EDID_LENGTH * (i + 1);
1339 if (edid_ext[0] == CEA_EXT)
1340 break;
1343 if (i == edid->extensions)
1344 return NULL;
1346 return edid_ext;
1348 EXPORT_SYMBOL(drm_find_cea_extension);
1351 * drm_detect_hdmi_monitor - detect whether monitor is hdmi.
1352 * @edid: monitor EDID information
1354 * Parse the CEA extension according to CEA-861-B.
1355 * Return true if HDMI, false if not or unknown.
1357 bool drm_detect_hdmi_monitor(struct edid *edid)
1359 u8 *edid_ext;
1360 int i, hdmi_id;
1361 int start_offset, end_offset;
1362 bool is_hdmi = false;
1364 edid_ext = drm_find_cea_extension(edid);
1365 if (!edid_ext)
1366 goto end;
1368 /* Data block offset in CEA extension block */
1369 start_offset = 4;
1370 end_offset = edid_ext[2];
1373 * Because HDMI identifier is in Vendor Specific Block,
1374 * search it from all data blocks of CEA extension.
1376 for (i = start_offset; i < end_offset;
1377 /* Increased by data block len */
1378 i += ((edid_ext[i] & 0x1f) + 1)) {
1379 /* Find vendor specific block */
1380 if ((edid_ext[i] >> 5) == VENDOR_BLOCK) {
1381 hdmi_id = edid_ext[i + 1] | (edid_ext[i + 2] << 8) |
1382 edid_ext[i + 3] << 16;
1383 /* Find HDMI identifier */
1384 if (hdmi_id == HDMI_IDENTIFIER)
1385 is_hdmi = true;
1386 break;
1390 end:
1391 return is_hdmi;
1393 EXPORT_SYMBOL(drm_detect_hdmi_monitor);
1396 * drm_detect_monitor_audio - check monitor audio capability
1398 * Monitor should have CEA extension block.
1399 * If monitor has 'basic audio', but no CEA audio blocks, it's 'basic
1400 * audio' only. If there is any audio extension block and supported
1401 * audio format, assume at least 'basic audio' support, even if 'basic
1402 * audio' is not defined in EDID.
1405 bool drm_detect_monitor_audio(struct edid *edid)
1407 u8 *edid_ext;
1408 int i, j;
1409 bool has_audio = false;
1410 int start_offset, end_offset;
1412 edid_ext = drm_find_cea_extension(edid);
1413 if (!edid_ext)
1414 goto end;
1416 has_audio = ((edid_ext[3] & EDID_BASIC_AUDIO) != 0);
1418 if (has_audio) {
1419 DRM_DEBUG_KMS("Monitor has basic audio support\n");
1420 goto end;
1423 /* Data block offset in CEA extension block */
1424 start_offset = 4;
1425 end_offset = edid_ext[2];
1427 for (i = start_offset; i < end_offset;
1428 i += ((edid_ext[i] & 0x1f) + 1)) {
1429 if ((edid_ext[i] >> 5) == AUDIO_BLOCK) {
1430 has_audio = true;
1431 for (j = 1; j < (edid_ext[i] & 0x1f); j += 3)
1432 DRM_DEBUG_KMS("CEA audio format %d\n",
1433 (edid_ext[i + j] >> 3) & 0xf);
1434 goto end;
1437 end:
1438 return has_audio;
1440 EXPORT_SYMBOL(drm_detect_monitor_audio);
1443 * drm_add_display_info - pull display info out if present
1444 * @edid: EDID data
1445 * @info: display info (attached to connector)
1447 * Grab any available display info and stuff it into the drm_display_info
1448 * structure that's part of the connector. Useful for tracking bpp and
1449 * color spaces.
1451 static void drm_add_display_info(struct edid *edid,
1452 struct drm_display_info *info)
1454 info->width_mm = edid->width_cm * 10;
1455 info->height_mm = edid->height_cm * 10;
1457 /* driver figures it out in this case */
1458 info->bpc = 0;
1459 info->color_formats = 0;
1461 /* Only defined for 1.4 with digital displays */
1462 if (edid->revision < 4)
1463 return;
1465 if (!(edid->input & DRM_EDID_INPUT_DIGITAL))
1466 return;
1468 switch (edid->input & DRM_EDID_DIGITAL_DEPTH_MASK) {
1469 case DRM_EDID_DIGITAL_DEPTH_6:
1470 info->bpc = 6;
1471 break;
1472 case DRM_EDID_DIGITAL_DEPTH_8:
1473 info->bpc = 8;
1474 break;
1475 case DRM_EDID_DIGITAL_DEPTH_10:
1476 info->bpc = 10;
1477 break;
1478 case DRM_EDID_DIGITAL_DEPTH_12:
1479 info->bpc = 12;
1480 break;
1481 case DRM_EDID_DIGITAL_DEPTH_14:
1482 info->bpc = 14;
1483 break;
1484 case DRM_EDID_DIGITAL_DEPTH_16:
1485 info->bpc = 16;
1486 break;
1487 case DRM_EDID_DIGITAL_DEPTH_UNDEF:
1488 default:
1489 info->bpc = 0;
1490 break;
1493 info->color_formats = DRM_COLOR_FORMAT_RGB444;
1494 if (info->color_formats & DRM_EDID_FEATURE_RGB_YCRCB444)
1495 info->color_formats = DRM_COLOR_FORMAT_YCRCB444;
1496 if (info->color_formats & DRM_EDID_FEATURE_RGB_YCRCB422)
1497 info->color_formats = DRM_COLOR_FORMAT_YCRCB422;
1501 * drm_add_edid_modes - add modes from EDID data, if available
1502 * @connector: connector we're probing
1503 * @edid: edid data
1505 * Add the specified modes to the connector's mode list.
1507 * Return number of modes added or 0 if we couldn't find any.
1509 int drm_add_edid_modes(struct drm_connector *connector, struct edid *edid)
1511 int num_modes = 0;
1512 u32 quirks;
1514 if (edid == NULL) {
1515 return 0;
1517 if (!drm_edid_is_valid(edid)) {
1518 dev_warn(connector->dev->dev, "%s: EDID invalid.\n",
1519 drm_get_connector_name(connector));
1520 return 0;
1523 quirks = edid_get_quirks(edid);
1526 * EDID spec says modes should be preferred in this order:
1527 * - preferred detailed mode
1528 * - other detailed modes from base block
1529 * - detailed modes from extension blocks
1530 * - CVT 3-byte code modes
1531 * - standard timing codes
1532 * - established timing codes
1533 * - modes inferred from GTF or CVT range information
1535 * We get this pretty much right.
1537 * XXX order for additional mode types in extension blocks?
1539 num_modes += add_detailed_modes(connector, edid, quirks);
1540 num_modes += add_cvt_modes(connector, edid);
1541 num_modes += add_standard_modes(connector, edid);
1542 num_modes += add_established_modes(connector, edid);
1543 num_modes += add_inferred_modes(connector, edid);
1545 if (quirks & (EDID_QUIRK_PREFER_LARGE_60 | EDID_QUIRK_PREFER_LARGE_75))
1546 edid_fixup_preferred(connector, quirks);
1548 drm_add_display_info(edid, &connector->display_info);
1550 return num_modes;
1552 EXPORT_SYMBOL(drm_add_edid_modes);
1555 * drm_add_modes_noedid - add modes for the connectors without EDID
1556 * @connector: connector we're probing
1557 * @hdisplay: the horizontal display limit
1558 * @vdisplay: the vertical display limit
1560 * Add the specified modes to the connector's mode list. Only when the
1561 * hdisplay/vdisplay is not beyond the given limit, it will be added.
1563 * Return number of modes added or 0 if we couldn't find any.
1565 int drm_add_modes_noedid(struct drm_connector *connector,
1566 int hdisplay, int vdisplay)
1568 int i, count, num_modes = 0;
1569 struct drm_display_mode *mode;
1570 struct drm_device *dev = connector->dev;
1572 count = sizeof(drm_dmt_modes) / sizeof(struct drm_display_mode);
1573 if (hdisplay < 0)
1574 hdisplay = 0;
1575 if (vdisplay < 0)
1576 vdisplay = 0;
1578 for (i = 0; i < count; i++) {
1579 const struct drm_display_mode *ptr = &drm_dmt_modes[i];
1580 if (hdisplay && vdisplay) {
1582 * Only when two are valid, they will be used to check
1583 * whether the mode should be added to the mode list of
1584 * the connector.
1586 if (ptr->hdisplay > hdisplay ||
1587 ptr->vdisplay > vdisplay)
1588 continue;
1590 if (drm_mode_vrefresh(ptr) > 61)
1591 continue;
1592 mode = drm_mode_duplicate(dev, ptr);
1593 if (mode) {
1594 drm_mode_probed_add(connector, mode);
1595 num_modes++;
1598 return num_modes;
1600 EXPORT_SYMBOL(drm_add_modes_noedid);