30 #include <linux/kernel.h>
31 #include <linux/slab.h>
32 #include <linux/i2c.h>
33 #include <linux/module.h>
38 #define version_greater(edid, maj, min) \
39 (((edid)->version > (maj)) || \
40 ((edid)->version == (maj) && (edid)->revision > (min)))
42 #define EDID_EST_TIMINGS 16
43 #define EDID_STD_TIMINGS 8
44 #define EDID_DETAILED_TIMINGS 4
54 #define EDID_QUIRK_PREFER_LARGE_60 (1 << 0)
56 #define EDID_QUIRK_135_CLOCK_TOO_HIGH (1 << 1)
58 #define EDID_QUIRK_PREFER_LARGE_75 (1 << 2)
60 #define EDID_QUIRK_DETAILED_IN_CM (1 << 3)
64 #define EDID_QUIRK_DETAILED_USE_MAXIMUM_SIZE (1 << 4)
66 #define EDID_QUIRK_FIRST_DETAILED_PREFERRED (1 << 5)
68 #define EDID_QUIRK_DETAILED_SYNC_PP (1 << 6)
70 #define EDID_QUIRK_FORCE_REDUCED_BLANKING (1 << 7)
85 static struct edid_quirk {
89 } edid_quirk_list[] = {
135 static const u8 edid_header[] = {
136 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00
147 for (i = 0; i <
sizeof(edid_header); i++)
148 if (raw_edid[i] == edid_header[i])
158 "Minimum number of valid EDID header bytes (0-8, default 6)");
168 struct edid *
edid = (
struct edid *)raw_edid;
170 if (edid_fixup > 8 || edid_fixup < 0)
176 else if (score >= edid_fixup) {
177 DRM_DEBUG(
"Fixing EDID header, your hardware may be failing\n");
178 memcpy(raw_edid, edid_header,
sizeof(edid_header));
187 if (print_bad_edid) {
188 DRM_ERROR(
"EDID checksum is invalid, remainder is %d\n", csum);
192 if (raw_edid[0] != 0x02)
197 switch (raw_edid[0]) {
200 DRM_ERROR(
"EDID has major version %d, instead of 1\n", edid->
version);
205 DRM_DEBUG(
"EDID minor > 4, assuming backward compatibility\n");
215 if (raw_edid && print_bad_edid) {
218 raw_edid, EDID_LENGTH,
false);
246 #define DDC_SEGMENT_ADDR 0x30
262 unsigned char segment = block >> 1;
263 unsigned char xfers = segment ? 3 : 2;
299 DRM_DEBUG_KMS(
"drm: skipping non-existent adapter %s\n",
303 }
while (ret != xfers && --retries);
305 return ret == xfers ? 0 : -1;
308 static bool drm_edid_is_zero(
u8 *in_edid,
int length)
311 u32 *raw_edid = (
u32 *)in_edid;
313 for (i = 0; i < length / 4; i++)
314 if (*(raw_edid + i) != 0)
322 int i,
j = 0, valid_extensions = 0;
330 for (i = 0; i < 4; i++) {
331 if (drm_do_probe_ddc_edid(adapter, block, 0,
EDID_LENGTH))
335 if (i == 0 && drm_edid_is_zero(block,
EDID_LENGTH)) {
344 if (block[0x7e] == 0)
352 for (j = 1; j <= block[0x7e]; j++) {
353 for (i = 0; i < 4; i++) {
354 if (drm_do_probe_ddc_edid(adapter,
365 "%s: Ignoring invalid EDID block %d.\n",
369 if (valid_extensions != block[0x7e]) {
370 block[
EDID_LENGTH-1] += block[0x7e] - valid_extensions;
371 block[0x7e] = valid_extensions;
381 if (print_bad_edid) {
382 dev_warn(connector->
dev->dev,
"%s: EDID block %d invalid.\n",
403 return (drm_do_probe_ddc_edid(adapter, &out, 0, 1) == 0);
423 edid = (
struct edid *)drm_do_get_edid(connector, adapter);
442 edid_vendor[0] = ((edid->
mfg_id[0] & 0x7c) >> 2) +
'@';
443 edid_vendor[1] = (((edid->
mfg_id[0] & 0x3) << 3) |
444 ((edid->
mfg_id[1] & 0xe0) >> 5)) +
'@';
445 edid_vendor[2] = (edid->
mfg_id[1] & 0x1f) +
'@';
447 return !
strncmp(edid_vendor, vendor, 3);
458 struct edid_quirk *quirk;
461 for (i = 0; i <
ARRAY_SIZE(edid_quirk_list); i++) {
462 quirk = &edid_quirk_list[
i];
464 if (edid_vendor(edid, quirk->vendor) &&
466 return quirk->quirks;
472 #define MODE_SIZE(m) ((m)->hdisplay * (m)->vdisplay)
473 #define MODE_REFRESH_DIFF(m,r) (abs((m)->vrefresh - target_refresh))
483 static void edid_fixup_preferred(
struct drm_connector *connector,
487 int target_refresh = 0;
503 if (cur_mode == preferred_mode)
508 preferred_mode = cur_mode;
514 preferred_mode = cur_mode;
542 int hsize,
int vsize,
int fresh,
547 for (i = 0; i < drm_num_dmt_modes; i++) {
555 if (rb != mode_is_rb(ptr))
572 u8 *det_base = ext +
d;
575 for (i = 0; i <
n; i++)
580 vtb_for_each_detailed_block(
u8 *ext,
detailed_cb *cb,
void *closure)
582 unsigned int i, n =
min((
int)ext[0x02], 6);
583 u8 *det_base = ext + 5;
588 for (i = 0; i <
n; i++)
593 drm_for_each_detailed_block(
u8 *raw_edid,
detailed_cb *cb,
void *closure)
596 struct edid *edid = (
struct edid *)raw_edid;
604 for (i = 1; i <= raw_edid[0x7e]; i++) {
608 cea_for_each_detailed_block(ext, cb, closure);
611 vtb_for_each_detailed_block(ext, cb, closure);
625 *(
bool *)data =
true;
630 drm_monitor_supports_rb(
struct edid *edid)
634 drm_for_each_detailed_block((
u8 *)edid, is_rb, &ret);
651 drm_gtf2_hbreak(
struct edid *edid)
654 drm_for_each_detailed_block((
u8 *)edid, find_gtf2, &r);
655 return r ? (r[12] * 2) : 0;
659 drm_gtf2_2c(
struct edid *edid)
662 drm_for_each_detailed_block((
u8 *)edid, find_gtf2, &r);
663 return r ? r[13] : 0;
667 drm_gtf2_m(
struct edid *edid)
670 drm_for_each_detailed_block((
u8 *)edid, find_gtf2, &r);
671 return r ? (r[15] << 8) + r[14] : 0;
675 drm_gtf2_k(
struct edid *edid)
678 drm_for_each_detailed_block((
u8 *)edid, find_gtf2, &r);
679 return r ? r[16] : 0;
683 drm_gtf2_2j(
struct edid *edid)
686 drm_for_each_detailed_block((
u8 *)edid, find_gtf2, &r);
687 return r ? r[17] : 0;
694 static int standard_timing_level(
struct edid *edid)
699 if (drm_gtf2_hbreak(edid))
711 bad_std_timing(
u8 a,
u8 b)
713 return (a == 0x00 && b == 0x00) ||
714 (a == 0x01 && b == 0x01) ||
715 (a == 0x20 && b == 0x20);
727 drm_mode_std(
struct drm_connector *connector,
struct edid *edid,
738 int timing_level = standard_timing_level(edid);
744 hsize = t->
hsize * 8 + 248;
746 vrefresh_rate = vfreq + 60;
748 if (aspect_ratio == 0) {
752 vsize = (hsize * 10) / 16;
753 }
else if (aspect_ratio == 1)
754 vsize = (hsize * 3) / 4;
755 else if (aspect_ratio == 2)
756 vsize = (hsize * 4) / 5;
758 vsize = (hsize * 9) / 16;
761 if (vrefresh_rate == 60 &&
762 ((hsize == 1360 && vsize == 765) ||
763 (hsize == 1368 && vsize == 769))) {
780 if (hsize == 1366 && vsize == 768 && vrefresh_rate == 60) {
781 mode =
drm_cvt_mode(dev, 1366, 768, vrefresh_rate, 0, 0,
790 if (drm_monitor_supports_rb(edid)) {
801 switch (timing_level) {
805 mode =
drm_gtf_mode(dev, hsize, vsize, vrefresh_rate, 0, 0);
813 mode =
drm_gtf_mode(dev, hsize, vsize, vrefresh_rate, 0, 0);
827 mode =
drm_cvt_mode(dev, hsize, vsize, vrefresh_rate, 0, 0,
847 static const struct {
849 } cea_interlaced[] = {
862 for (i = 0; i <
ARRAY_SIZE(cea_interlaced); i++) {
863 if ((mode->
hdisplay == cea_interlaced[i].w) &&
864 (mode->
vdisplay == cea_interlaced[
i].h / 2)) {
903 if (hactive < 64 || vactive < 64)
916 DRM_DEBUG_KMS(
"Incorrect Detailed timing. "
917 "Wrong Hsync/Vsync pulse width\n");
922 mode =
drm_cvt_mode(dev, hactive, vactive, 60,
true,
false,
false);
954 drm_mode_do_interlace_quirk(mode, pt);
987 struct edid *edid,
u8 *t)
989 int hsync, hmin, hmax;
993 hmin += ((t[4] & 0x04) ? 255 : 0);
996 hmax += ((t[4] & 0x08) ? 255 : 0);
999 return (hsync <= hmax && hsync >= hmin);
1004 struct edid *edid,
u8 *t)
1006 int vsync, vmin, vmax;
1010 vmin += ((t[4] & 0x01) ? 255 : 0);
1013 vmax += ((t[4] & 0x02) ? 255 : 0);
1016 return (vsync <= vmax && vsync >= vmin);
1020 range_pixel_clock(
struct edid *edid,
u8 *t)
1023 if (t[9] == 0 || t[9] == 255)
1027 if (edid->
revision >= 4 && t[10] == 0x04)
1028 return (t[9] * 10000) - ((t[12] >> 2) * 250);
1031 return t[9] * 10000 + 5001;
1039 u8 *t = (
u8 *)timing;
1041 if (!mode_in_hsync_range(mode, edid, t))
1044 if (!mode_in_vsync_range(mode, edid, t))
1047 if ((max_clock = range_pixel_clock(edid, t)))
1048 if (mode->
clock > max_clock)
1052 if (edid->
revision >= 4 && t[10] == 0x04)
1053 if (t[13] && mode->
hdisplay > 8 * (t[13] + (256 * (t[12]&0x3))))
1056 if (mode_is_rb(mode) && !drm_monitor_supports_rb(edid))
1062 static bool valid_inferred_mode(
const struct drm_connector *connector,
1081 drm_dmt_modes_for_range(
struct drm_connector *connector,
struct edid *edid,
1088 for (i = 0; i < drm_num_dmt_modes; i++) {
1089 if (mode_in_range(drm_dmt_modes + i, edid, timing) &&
1090 valid_inferred_mode(connector, drm_dmt_modes + i)) {
1116 drm_gtf_modes_for_range(
struct drm_connector *connector,
struct edid *edid,
1123 for (i = 0; i < num_extra_modes; i++) {
1124 const struct minimode *m = &extra_modes[
i];
1129 fixup_mode_1366x768(newmode);
1130 if (!mode_in_range(newmode, edid, timing) ||
1131 !valid_inferred_mode(connector, newmode)) {
1144 drm_cvt_modes_for_range(
struct drm_connector *connector,
struct edid *edid,
1150 bool rb = drm_monitor_supports_rb(edid);
1152 for (i = 0; i < num_extra_modes; i++) {
1153 const struct minimode *m = &extra_modes[
i];
1158 fixup_mode_1366x768(newmode);
1159 if (!mode_in_range(newmode, edid, timing) ||
1160 !valid_inferred_mode(connector, newmode)) {
1189 switch (range->
flags) {
1211 add_inferred_modes(
struct drm_connector *connector,
struct edid *edid)
1218 drm_for_each_detailed_block((
u8 *)edid, do_inferred_modes,
1221 return closure.
modes;
1227 int i,
j,
m, modes = 0;
1229 u8 *est = ((
u8 *)timing) + 5;
1231 for (i = 0; i < 6; i++) {
1232 for (j = 7; j > 0; j--) {
1233 m = (i * 8) + (7 - j);
1236 if (est[i] & (1 << j)) {
1271 add_established_modes(
struct drm_connector *connector,
struct edid *edid)
1283 if (est_bits & (1<<i)) {
1294 drm_for_each_detailed_block((
u8 *)edid,
1295 do_established_modes, &closure);
1297 return modes + closure.
modes;
1306 struct edid *edid = closure->
edid;
1310 for (i = 0; i < 6; i++) {
1315 newmode = drm_mode_std(connector, edid, std,
1333 add_standard_modes(
struct drm_connector *connector,
struct edid *edid)
1343 newmode = drm_mode_std(connector, edid,
1353 drm_for_each_detailed_block((
u8 *)edid, do_standard_modes,
1358 return modes + closure.
modes;
1364 int i,
j, modes = 0;
1368 const int rates[] = { 60, 85, 75, 60, 50 };
1369 const u8 empty[3] = { 0, 0, 0 };
1371 for (i = 0; i < 4; i++) {
1378 height = (cvt->
code[0] + ((cvt->
code[1] & 0xf0) << 4) + 1) * 2;
1379 switch (cvt->
code[1] & 0x0c) {
1381 width = height * 4 / 3;
1384 width = height * 16 / 9;
1387 width = height * 16 / 10;
1390 width = height * 15 / 9;
1394 for (j = 1; j < 5; j++) {
1395 if (cvt->
code[2] & (1 << j)) {
1421 add_cvt_modes(
struct drm_connector *connector,
struct edid *edid)
1428 drm_for_each_detailed_block((
u8 *)edid, do_cvt_mode, &closure);
1432 return closure.
modes;
1442 newmode = drm_mode_detailed(closure->
connector->dev,
1443 closure->
edid, timing,
1464 add_detailed_modes(
struct drm_connector *connector,
struct edid *edid,
1479 drm_for_each_detailed_block((
u8 *)edid, do_detailed_mode, &closure);
1481 return closure.
modes;
1484 #define HDMI_IDENTIFIER 0x000C03
1485 #define AUDIO_BLOCK 0x01
1486 #define VIDEO_BLOCK 0x02
1487 #define VENDOR_BLOCK 0x03
1488 #define SPEAKER_BLOCK 0x04
1489 #define EDID_BASIC_AUDIO (1 << 6)
1490 #define EDID_CEA_YCRCB444 (1 << 5)
1491 #define EDID_CEA_YCRCB422 (1 << 4)
1526 for (mode = db; mode < db + len; mode++) {
1527 cea_mode = (*mode & 127) - 1;
1528 if (cea_mode < drm_num_cea_modes) {
1531 &edid_cea_modes[cea_mode]);
1543 cea_db_payload_len(
const u8 *db)
1545 return db[0] & 0x1f;
1549 cea_db_tag(
const u8 *db)
1555 cea_revision(
const u8 *cea)
1561 cea_db_offsets(
const u8 *cea,
int *
start,
int *
end)
1568 if (*end < 4 || *end > 127)
1573 #define for_each_cea_db(cea, i, start, end) \
1574 for ((i) = (start); (i) < (end) && (i) + cea_db_payload_len(&(cea)[(i)]) < (end); (i) += cea_db_payload_len(&(cea)[(i)]) + 1)
1577 add_cea_modes(
struct drm_connector *connector,
struct edid *edid)
1583 if (cea && cea_revision(cea) >= 3) {
1586 if (cea_db_offsets(cea, &start, &end))
1591 dbl = cea_db_payload_len(db);
1594 modes += do_cea_modes (connector, db+1, dbl);
1604 u8 len = cea_db_payload_len(db);
1607 connector->
eld[5] |= (db[6] >> 7) << 1;
1625 DRM_LOG_KMS(
"HDMI: DVI dual %d, "
1626 "max TMDS clock %d, "
1627 "latency present %d %d, "
1628 "video latency %d %d, "
1629 "audio latency %d %d\n",
1647 static bool cea_db_is_hdmi_vsdb(
const u8 *db)
1654 if (cea_db_payload_len(db) < 5)
1657 hdmi_id = db[1] | (db[2] << 8) | (db[3] << 16);
1687 DRM_DEBUG_KMS(
"ELD: no CEA Extension found\n");
1693 for (mnl = 0; name && mnl < 13; mnl++) {
1694 if (name[mnl] == 0x0a)
1696 eld[20 + mnl] = name[mnl];
1698 eld[4] = (cea[1] << 5) | mnl;
1699 DRM_DEBUG_KMS(
"ELD monitor %s\n", eld + 20);
1703 eld[16] = edid->
mfg_id[0];
1704 eld[17] = edid->
mfg_id[1];
1708 if (cea_revision(cea) >= 3) {
1711 if (cea_db_offsets(cea, &start, &end)) {
1718 dbl = cea_db_payload_len(db);
1720 switch (cea_db_tag(db)) {
1723 sad_count = dbl / 3;
1725 memcpy(eld + 20 + mnl, &db[1], dbl);
1734 if (cea_db_is_hdmi_vsdb(db))
1735 parse_hdmi_vsdb(connector, db);
1742 eld[5] |= sad_count << 4;
1743 eld[2] = (20 + mnl + sad_count * 3 + 3) / 4;
1745 DRM_DEBUG_KMS(
"ELD size %d, SAD count %d\n", (
int)eld[2], sad_count);
1771 if (a == 255 || v == 255)
1779 a =
min(2 * (a - 1), 500);
1781 v =
min(2 * (v - 1), 500);
1783 return max(v - a, 0);
1802 if (connector->
encoder == encoder && connector->
eld[0])
1826 if (cea_db_offsets(edid_ext, &start_offset, &end_offset))
1834 if (cea_db_is_hdmi_vsdb(&edid_ext[i]))
1856 bool has_audio =
false;
1866 DRM_DEBUG_KMS(
"Monitor has basic audio support\n");
1870 if (cea_db_offsets(edid_ext, &start_offset, &end_offset))
1876 for (j = 1; j < cea_db_payload_len(&edid_ext[i]) + 1; j += 3)
1877 DRM_DEBUG_KMS(
"CEA audio format %d\n",
1878 (edid_ext[i + j] >> 3) & 0xf);
1896 static void drm_add_display_info(
struct edid *edid,
1981 dev_warn(connector->
dev->dev,
"%s: EDID invalid.\n",
1986 quirks = edid_get_quirks(edid);
2002 num_modes += add_detailed_modes(connector, edid, quirks);
2003 num_modes += add_cvt_modes(connector, edid);
2004 num_modes += add_standard_modes(connector, edid);
2005 num_modes += add_established_modes(connector, edid);
2006 num_modes += add_inferred_modes(connector, edid);
2007 num_modes += add_cea_modes(connector, edid);
2010 edid_fixup_preferred(connector, quirks);
2030 int hdisplay,
int vdisplay)
2032 int i,
count, num_modes = 0;
2042 for (i = 0; i <
count; i++) {
2044 if (hdisplay && vdisplay) {