47 #define EVO_MASTER (0x00)
48 #define EVO_FLIP(c) (0x01 + (c))
49 #define EVO_OVLY(c) (0x05 + (c))
50 #define EVO_OIMM(c) (0x09 + (c))
51 #define EVO_CURS(c) (0x0d + (c))
54 #define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
55 #define EVO_MAST_NTFY EVO_SYNC( 0, 0x00)
56 #define EVO_FLIP_SEM0(c) EVO_SYNC((c), 0x00)
57 #define EVO_FLIP_SEM1(c) EVO_SYNC((c), 0x10)
98 nv_mask(device, 0x610700 + (
id * 0x10), 0x00000001, 0x00000001);
99 nv_wr32(device, 0x610704 + (
id * 0x10), data);
100 nv_mask(device, 0x610704 + (
id * 0x10), 0x80000ffc, 0x80000000 | mthd);
101 if (!
nv_wait(device, 0x610704 + (
id * 0x10), 0x80000000, 0x00000000))
103 nv_mask(device, 0x610700 + (
id * 0x10), 0x00000001, 0x00000000);
113 u32 put = nv_rd32(device, 0x640000 + (
id * 0x1000)) / 4;
116 disp->
evo[
id].ptr[
put] = 0x20000000;
118 nv_wr32(device, 0x640000 + (
id * 0x1000), 0x00000000);
119 if (!
nv_wait(device, 0x640004 + (
id * 0x1000), ~0, 0x00000000)) {
120 NV_ERROR(drm,
"evo %d dma stalled\n",
id);
136 nv_wr32(device, 0x640000 + (
id * 0x1000), (push - disp->
evo[
id].ptr) << 2);
139 #define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
140 #define evo_data(p,d) *((p)++) = (d)
154 nv_wr32(device, 0x610494 + (ch * 0x0010), (disp->
evo[ch].handle >> 8) | 3);
155 nv_wr32(device, 0x610498 + (ch * 0x0010), 0x00010000);
156 nv_wr32(device, 0x61049c + (ch * 0x0010), 0x00000001);
157 nv_mask(device, 0x610490 + (ch * 0x0010), 0x00000010, 0x00000010);
158 nv_wr32(device, 0x640000 + (ch * 0x1000), 0x00000000);
159 nv_wr32(device, 0x610490 + (ch * 0x0010), 0x00000013 | flags);
160 if (!
nv_wait(device, 0x610490 + (ch * 0x0010), 0x80000000, 0x00000000)) {
161 NV_ERROR(drm,
"PDISP: ch%d 0x%08x\n", ch,
162 nv_rd32(device, 0x610490 + (ch * 0x0010)));
166 nv_mask(device, 0x610090, (1 << ch), (1 << ch));
167 nv_mask(device, 0x6100a0, (1 << ch), (1 << ch));
176 if (!(nv_rd32(device, 0x610490 + (ch * 0x0010)) & 0x00000010))
179 nv_mask(device, 0x610490 + (ch * 0x0010), 0x00000010, 0x00000000);
180 nv_mask(device, 0x610490 + (ch * 0x0010), 0x00000003, 0x00000000);
181 nv_wait(device, 0x610490 + (ch * 0x0010), 0x80000000, 0x00000000);
182 nv_mask(device, 0x610090, (1 << ch), 0x00000000);
183 nv_mask(device, 0x6100a0, (1 << ch), 0x00000000);
190 nv_wr32(device, 0x640000 + (ch * 0x1000) + mthd, data);
199 nv_wr32(device, 0x610490 + (ch * 0x0010), 0x00000001);
200 if (!
nv_wait(device, 0x610490 + (ch * 0x0010), 0x00010000, 0x00010000)) {
201 NV_ERROR(drm,
"PDISP: ch%d 0x%08x\n", ch,
202 nv_rd32(device, 0x610490 + (ch * 0x0010)));
206 nv_mask(device, 0x610090, (1 << ch), (1 << ch));
207 nv_mask(device, 0x6100a0, (1 << ch), (1 << ch));
216 if (!(nv_rd32(device, 0x610490 + (ch * 0x0010)) & 0x00000001))
219 nv_mask(device, 0x610490 + (ch * 0x0010), 0x00000010, 0x00000010);
220 nv_mask(device, 0x610490 + (ch * 0x0010), 0x00000001, 0x00000000);
221 nv_wait(device, 0x610490 + (ch * 0x0010), 0x00010000, 0x00000000);
222 nv_mask(device, 0x610090, (1 << ch), 0x00000000);
223 nv_mask(device, 0x6100a0, (1 << ch), 0x00000000);
227 evo_sync_wait(
void *data)
237 u32 *push = evo_wait(dev, ch, 8);
245 evo_kick(push, dev, ch);
270 push = evo_wait(crtc->
dev, evo->
idx, 8);
280 evo_kick(push, crtc->
dev, evo->
idx);
297 if (swap_interval == 0)
298 swap_interval |= 0x100;
300 push = evo_wait(crtc->
dev, evo->
idx, 128);
306 ret = RING_SPACE(chan, 10);
312 offset += evo->
sem.offset;
327 0xf00d0000 | evo->
sem.value);
336 if (!(swap_interval & 0x00000100)) {
361 evo_kick(push, crtc->
dev, evo->
idx);
363 evo->
sem.offset ^= 0x10;
382 connector = &nv_connector->
base;
397 if (nv_device(drm->
device)->card_type < NV_E0)
398 mthd = 0x0490 + (nv_crtc->
index * 0x0300);
400 mthd = 0x04a0 + (nv_crtc->
index * 0x0300);
417 nvd0_crtc_set_scale(
struct nouveau_crtc *nv_crtc,
bool update)
449 nv_connector->
edid &&
453 u32 aspect = (oY << 19) / oX;
457 if (bY) oY -= (bY * 2);
458 else oY = ((oX * aspect) + (aspect / 2)) >> 19;
460 oX -= (oX >> 4) + 32;
461 if (bY) oY -= (bY * 2);
462 else oY = ((oX * aspect) + (aspect / 2)) >> 19;
477 oX = ((oY * aspect) + (aspect / 2)) >> 19;
480 oY = ((oX * aspect) + (aspect / 2)) >> 19;
509 int x,
int y,
bool update)
532 nv_crtc->
fb.tile_flags = nvfb->
r_dma;
537 nvd0_crtc_cursor_show(
struct nouveau_crtc *nv_crtc,
bool show,
bool update)
565 nvd0_crtc_dpms(
struct drm_crtc *crtc,
int mode)
570 nvd0_crtc_prepare(
struct drm_crtc *crtc)
588 nvd0_crtc_cursor_show(nv_crtc,
false,
false);
592 nvd0_crtc_commit(
struct drm_crtc *crtc)
613 nvd0_crtc_cursor_show(nv_crtc, nv_crtc->
cursor.visible,
true);
651 u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks;
652 u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks;
653 u32 vblan2e = 0, vblan2s = 1;
660 hblanke = hsynce + hbackp;
662 hblanks = mode->
htotal - hfrontp - 1;
664 vactive = mode->
vtotal * vscan / ilace;
667 vblanke = vsynce + vbackp;
669 vblanks = vactive - vfrontp - 1;
671 vblan2e = vactive + vsynce + vbackp;
672 vblan2s = vblan2e + (mode->
vdisplay * vscan / ilace);
673 vactive = (vactive * 2) + 1;
676 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
684 evo_data(push, (vactive << 16) | hactive);
685 evo_data(push, ( vsynce << 16) | hsynce);
686 evo_data(push, (vblanke << 16) | hblanke);
687 evo_data(push, (vblanks << 16) | hblanks);
688 evo_data(push, (vblan2e << 16) | vblan2s);
702 nvd0_crtc_set_dither(nv_crtc,
false);
703 nvd0_crtc_set_scale(nv_crtc,
false);
704 nvd0_crtc_set_image(nv_crtc, crtc->
fb, x, y,
false);
709 nvd0_crtc_mode_set_base(
struct drm_crtc *crtc,
int x,
int y,
721 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
726 nvd0_crtc_set_image(nv_crtc, crtc->
fb, x, y,
true);
732 nvd0_crtc_mode_set_base_atomic(
struct drm_crtc *crtc,
738 nvd0_crtc_set_image(nv_crtc, fb, x, y,
true);
743 nvd0_crtc_lut_load(
struct drm_crtc *crtc)
746 void __iomem *
lut = nvbo_kmap_obj_iovirtual(nv_crtc->
lut.nvbo);
749 for (i = 0; i < 256; i++) {
750 writew(0x6000 + (nv_crtc->
lut.r[i] >> 2), lut + (i * 0x20) + 0);
751 writew(0x6000 + (nv_crtc->
lut.g[i] >> 2), lut + (i * 0x20) + 2);
752 writew(0x6000 + (nv_crtc->
lut.b[i] >> 2), lut + (i * 0x20) + 4);
757 nvd0_crtc_cursor_set(
struct drm_crtc *crtc,
struct drm_file *file_priv,
762 struct drm_gem_object *
gem;
764 bool visible = (handle != 0);
768 if (width != 64 || height != 64)
774 nvbo = nouveau_gem_object(gem);
778 for (i = 0; i < 64 * 64; i++) {
785 drm_gem_object_unreference_unlocked(gem);
788 if (visible != nv_crtc->
cursor.visible) {
789 nvd0_crtc_cursor_show(nv_crtc, visible,
true);
790 nv_crtc->
cursor.visible = visible;
797 nvd0_crtc_cursor_move(
struct drm_crtc *crtc,
int x,
int y)
802 evo_piow(crtc->
dev, ch, 0x0084, (y << 16) | (x & 0xffff));
803 evo_piow(crtc->
dev, ch, 0x0080, 0x00000000);
815 for (i = start; i <
end; i++) {
816 nv_crtc->
lut.r[
i] = r[
i];
817 nv_crtc->
lut.g[
i] = g[
i];
818 nv_crtc->
lut.b[
i] = b[
i];
821 nvd0_crtc_lut_load(crtc);
825 nvd0_crtc_destroy(
struct drm_crtc *crtc)
831 nouveau_bo_ref(
NULL, &nv_crtc->
lut.nvbo);
837 .dpms = nvd0_crtc_dpms,
838 .prepare = nvd0_crtc_prepare,
839 .commit = nvd0_crtc_commit,
840 .mode_fixup = nvd0_crtc_mode_fixup,
841 .mode_set = nvd0_crtc_mode_set,
842 .mode_set_base = nvd0_crtc_mode_set_base,
843 .mode_set_base_atomic = nvd0_crtc_mode_set_base_atomic,
844 .load_lut = nvd0_crtc_lut_load,
848 .cursor_set = nvd0_crtc_cursor_set,
849 .cursor_move = nvd0_crtc_cursor_move,
850 .gamma_set = nvd0_crtc_gamma_set,
852 .destroy = nvd0_crtc_destroy,
857 nvd0_cursor_set_pos(
struct nouveau_crtc *nv_crtc,
int x,
int y)
873 nv_crtc = kzalloc(
sizeof(*nv_crtc),
GFP_KERNEL);
879 nv_crtc->
set_scale = nvd0_crtc_set_scale;
880 nv_crtc->
cursor.set_offset = nvd0_cursor_set_offset;
881 nv_crtc->
cursor.set_pos = nvd0_cursor_set_pos;
882 for (i = 0; i < 256; i++) {
883 nv_crtc->
lut.r[
i] = i << 8;
884 nv_crtc->
lut.g[
i] = i << 8;
885 nv_crtc->
lut.b[
i] = i << 8;
888 crtc = &nv_crtc->
base;
890 drm_crtc_helper_add(crtc, &nvd0_crtc_hfunc);
907 0, 0x0000,
NULL, &nv_crtc->
lut.nvbo);
913 nouveau_bo_ref(
NULL, &nv_crtc->
lut.nvbo);
919 nvd0_crtc_lut_load(crtc);
923 nvd0_crtc_destroy(crtc);
931 nvd0_dac_dpms(
struct drm_encoder *encoder,
int mode)
936 int or = nv_encoder->
or;
939 dpms_ctrl = 0x80000000;
941 dpms_ctrl |= 0x00000001;
943 dpms_ctrl |= 0x00000004;
945 nv_wait(device, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
946 nv_mask(device, 0x61a004 + (or * 0x0800), 0xc000007f, dpms_ctrl);
947 nv_wait(device, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
961 int id = adjusted_mode->
base.id;
963 adjusted_mode->
base.id =
id;
989 magic = 0x31ec6000 | (nv_crtc->
index << 25);
1000 evo_mthd(push, 0x0180 + (nv_encoder->
or * 0x020), 2);
1016 if (nv_encoder->
crtc) {
1017 nvd0_crtc_prepare(nv_encoder->
crtc);
1021 evo_mthd(push, 0x0180 + (nv_encoder->
or * 0x20), 1);
1039 int or = nv_encoder->
or;
1042 nv_wr32(device, 0x61a00c + (or * 0x800), 0x00100000);
1044 nv_wr32(device, 0x61a00c + (or * 0x800), 0x80000000);
1046 load = nv_rd32(device, 0x61a00c + (or * 0x800));
1047 if ((load & 0x38000000) == 0x38000000)
1050 nv_wr32(device, 0x61a00c + (or * 0x800), 0x00000000);
1062 .dpms = nvd0_dac_dpms,
1063 .mode_fixup = nvd0_dac_mode_fixup,
1064 .prepare = nvd0_dac_disconnect,
1065 .commit = nvd0_dac_commit,
1066 .mode_set = nvd0_dac_mode_set,
1067 .disable = nvd0_dac_disconnect,
1068 .get_crtc = nvd0_display_crtc_get,
1069 .detect = nvd0_dac_detect
1073 .destroy = nvd0_dac_destroy,
1083 nv_encoder = kzalloc(
sizeof(*nv_encoder),
GFP_KERNEL);
1086 nv_encoder->
dcb = dcbe;
1087 nv_encoder->
or =
ffs(dcbe->
or) - 1;
1089 encoder = to_drm_encoder(nv_encoder);
1093 drm_encoder_helper_add(encoder, &nvd0_dac_hfunc);
1109 int i, or = nv_encoder->
or * 0x30;
1115 nv_mask(device, 0x10ec10 + or, 0x80000003, 0x80000001);
1118 if (nv_connector->
base.eld[0]) {
1119 u8 *eld = nv_connector->
base.eld;
1121 for (i = 0; i < eld[2] * 4; i++)
1122 nv_wr32(device, 0x10ec00 + or, (i << 8) | eld[i]);
1123 for (i = eld[2] * 4; i < 0x60; i++)
1124 nv_wr32(device, 0x10ec00 + or, (i << 8) | 0x00);
1126 nv_mask(device, 0x10ec10 + or, 0x80000002, 0x80000002);
1131 nvd0_audio_disconnect(
struct drm_encoder *encoder)
1136 int or = nv_encoder->
or * 0x30;
1138 nv_mask(device, 0x10ec10 + or, 0x80000003, 0x80000000);
1161 max_ac_packet -= rekey;
1162 max_ac_packet -= 18;
1163 max_ac_packet /= 32;
1166 nv_mask(device, 0x616714 + head, 0x00000001, 0x00000000);
1167 nv_wr32(device, 0x61671c + head, 0x000d0282);
1168 nv_wr32(device, 0x616720 + head, 0x0000006f);
1169 nv_wr32(device, 0x616724 + head, 0x00000000);
1170 nv_wr32(device, 0x616728 + head, 0x00000000);
1171 nv_wr32(device, 0x61672c + head, 0x00000000);
1172 nv_mask(device, 0x616714 + head, 0x00000001, 0x00000001);
1175 nv_mask(device, 0x6167a4 + head, 0x00000001, 0x00000000);
1176 nv_wr32(device, 0x6167ac + head, 0x00000010);
1177 nv_mask(device, 0x6167a4 + head, 0x00000001, 0x00000001);
1180 nv_mask(device, 0x616798 + head, 0x401f007f, 0x40000000 | rekey |
1181 max_ac_packet << 16);
1184 nv_mask(device, 0x616548 + head, 0x00000070, 0x00000000);
1186 nvd0_audio_mode_set(encoder, mode);
1196 int head = nv_crtc->
index * 0x800;
1198 nvd0_audio_disconnect(encoder);
1200 nv_mask(device, 0x616798 + head, 0x40000000, 0x00000000);
1201 nv_mask(device, 0x6167a4 + head, 0x00000001, 0x00000000);
1202 nv_mask(device, 0x616714 + head, 0x00000001, 0x00000000);
1211 static const u8 nvd0[] = { 16, 8, 0, 24 };
1220 const u32 loff = (or * 0x800) + (
link * 0x80);
1221 nv_mask(device, 0x61c110 + loff, 0x0f0f0f0f, 0x01010101 * pattern);
1226 u8 lane,
u8 swing,
u8 preem)
1231 const u32 loff = (or * 0x800) + (
link * 0x80);
1232 u32 shift = nvd0_sor_dp_lane_map(dev, dcb, lane);
1233 u32 mask = 0x000000ff << shift;
1237 case 0: preem += 0;
break;
1238 case 1: preem += 4;
break;
1239 case 2: preem += 7;
break;
1240 case 3: preem += 9;
break;
1245 if (table[0] == 0x30) {
1246 config = entry + table[4];
1247 config += table[5] * preem;
1249 if (table[0] == 0x40) {
1250 config = table + table[1];
1251 config += table[2] * table[3];
1252 config += table[6] * preem;
1257 NV_ERROR(drm,
"PDISP: unsupported DP table for chipset\n");
1261 nv_mask(device, 0x61c118 + loff, mask, config[1] << shift);
1262 nv_mask(device, 0x61c120 + loff, mask, config[2] << shift);
1263 nv_mask(device, 0x61c130 + loff, 0x0000ff00, config[3] << 8);
1264 nv_mask(device, 0x61c13c + loff, 0x00000000, 0x00000000);
1269 int link_nr,
u32 link_bw,
bool enhframe)
1273 const u32 loff = (or * 0x800) + (
link * 0x80);
1274 const u32 soff = (or * 0x800);
1275 u32 dpctrl = nv_rd32(device, 0x61c10c + loff) & ~0x001f4000;
1276 u32 clksor = nv_rd32(device, 0x612300 + soff) & ~0x007c0000;
1285 if (table[0] == 0x30) entry =
ROMPTR(dev, entry[10]);
1286 else if (table[0] == 0x40) entry =
ROMPTR(dev, entry[9]);
1290 if (entry[0] >= link_bw)
1295 nouveau_bios_run_init_table(dev, script, dcb, crtc);
1298 clksor |= link_bw << 18;
1299 dpctrl |= ((1 << link_nr) - 1) << 16;
1301 dpctrl |= 0x00004000;
1303 for (i = 0; i < link_nr; i++)
1304 lane_mask |= 1 << (nvd0_sor_dp_lane_map(dev, dcb, i) >> 3);
1306 nv_wr32(device, 0x612300 + soff, clksor);
1307 nv_wr32(device, 0x61c10c + loff, dpctrl);
1308 nv_mask(device, 0x61c130 + loff, 0x0000000f, lane_mask);
1313 u32 *link_nr,
u32 *link_bw)
1317 const u32 loff = (or * 0x800) + (
link * 0x80);
1318 const u32 soff = (or * 0x800);
1319 u32 dpctrl = nv_rd32(device, 0x61c10c + loff) & 0x000f0000;
1320 u32 clksor = nv_rd32(device, 0x612300 + soff);
1322 if (dpctrl > 0x00030000) *link_nr = 4;
1323 else if (dpctrl > 0x00010000) *link_nr = 2;
1326 *link_bw = (clksor & 0x007c0000) >> 18;
1337 u32 link_nr, link_bw;
1340 nvd0_sor_dp_link_get(dev, dcb, &link_nr, &link_bw);
1344 do_div(ratio, link_nr * link_bw);
1346 value = (symbol -
ratio) * TU;
1352 value |= 0x08000000;
1354 nv_wr32(device, 0x616610 + (crtc * 0x800), value);
1358 nvd0_sor_dpms(
struct drm_encoder *encoder,
int mode)
1364 int or = nv_encoder->
or;
1375 if (nv_partner != nv_encoder &&
1376 nv_partner->
dcb->or == nv_encoder->
dcb->or) {
1384 dpms_ctrl |= 0x80000000;
1386 nv_wait(device, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
1387 nv_mask(device, 0x61c004 + (or * 0x0800), 0x80000001, dpms_ctrl);
1388 nv_wait(device, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
1389 nv_wait(device, 0x61c030 + (or * 0x0800), 0x10000000, 0x00000000);
1393 .link_set = nvd0_sor_dp_link_set,
1394 .train_set = nvd0_sor_dp_train_set,
1395 .train_adj = nvd0_sor_dp_train_adj
1413 int id = adjusted_mode->
base.id;
1415 adjusted_mode->
base.id =
id;
1429 if (nv_encoder->
crtc) {
1430 nvd0_crtc_prepare(nv_encoder->
crtc);
1434 evo_mthd(push, 0x0200 + (nv_encoder->
or * 0x20), 1);
1441 nvd0_hdmi_disconnect(encoder);
1451 nvd0_sor_disconnect(encoder);
1471 u32 mode_ctrl = (1 << nv_crtc->
index);
1477 syncs |= 0x00000008;
1479 syncs |= 0x00000010;
1481 magic = 0x31ec6000 | (nv_crtc->
index << 25);
1483 magic |= 0x00000001;
1486 switch (nv_encoder->
dcb->type) {
1488 if (nv_encoder->
dcb->sorconf.link & 1) {
1489 if (mode->
clock < 165000)
1490 mode_ctrl |= 0x00000100;
1492 mode_ctrl |= 0x00000500;
1494 mode_ctrl |= 0x00000200;
1497 or_config = (mode_ctrl & 0x00000f00) >> 8;
1498 if (mode->
clock >= 165000)
1499 or_config |= 0x0100;
1501 nvd0_hdmi_mode_set(encoder, mode);
1504 or_config = (mode_ctrl & 0x00000f00) >> 8;
1506 if (bios->
fp.dual_link)
1507 or_config |= 0x0100;
1508 if (bios->
fp.if_is_24bit)
1509 or_config |= 0x0200;
1512 if (((
u8 *)nv_connector->
edid)[121] == 2)
1513 or_config |= 0x0100;
1515 if (mode->
clock >= bios->
fp.duallink_transition_clk) {
1516 or_config |= 0x0100;
1519 if (or_config & 0x0100) {
1520 if (bios->
fp.strapless_is_24bit & 2)
1521 or_config |= 0x0200;
1523 if (bios->
fp.strapless_is_24bit & 1)
1524 or_config |= 0x0200;
1527 if (nv_connector->
base.display_info.bpc == 8)
1528 or_config |= 0x0200;
1533 if (nv_connector->
base.display_info.bpc == 6) {
1534 nv_encoder->
dp.datarate = mode->
clock * 18 / 8;
1535 syncs |= 0x00000002 << 6;
1537 nv_encoder->
dp.datarate = mode->
clock * 24 / 8;
1538 syncs |= 0x00000005 << 6;
1541 if (nv_encoder->
dcb->sorconf.link & 1)
1542 mode_ctrl |= 0x00000800;
1544 mode_ctrl |= 0x00000900;
1546 or_config = (mode_ctrl & 0x00000f00) >> 8;
1556 nvd0_sor_dp_calc_tu(dev, nv_encoder->
dcb, nv_crtc->
index,
1557 nv_encoder->
dp.datarate);
1565 evo_mthd(push, 0x0200 + (nv_encoder->
or * 0x020), 2);
1582 .dpms = nvd0_sor_dpms,
1583 .mode_fixup = nvd0_sor_mode_fixup,
1584 .prepare = nvd0_sor_prepare,
1585 .commit = nvd0_sor_commit,
1586 .mode_set = nvd0_sor_mode_set,
1587 .disable = nvd0_sor_disconnect,
1588 .get_crtc = nvd0_display_crtc_get,
1592 .destroy = nvd0_sor_destroy,
1602 nv_encoder = kzalloc(
sizeof(*nv_encoder),
GFP_KERNEL);
1605 nv_encoder->
dcb = dcbe;
1606 nv_encoder->
or =
ffs(dcbe->
or) - 1;
1609 encoder = to_drm_encoder(nv_encoder);
1613 drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
1632 switch (mc & 0x00000f00) {
1640 NV_ERROR(drm,
"PDISP: unknown SOR mc 0x%08x\n", mc);
1647 for (i = 0; i < drm->
vbios.dcb.entries; i++) {
1649 if (dcb->
type == type && (dcb->
or & (1 << or)) &&
1650 (link < 0 || link == !(dcb->
sorconf.link & 1)))
1654 NV_ERROR(drm,
"PDISP: DCB for %d/0x%08x not found\n",
id, mc);
1665 for (i = 0; mask && i < 8; i++) {
1666 u32 mcc = nv_rd32(device, 0x640180 + (i * 0x20));
1667 if (!(mcc & (1 << crtc)))
1670 dcb = lookup_dcb(dev, i, mcc);
1677 nv_wr32(device, 0x6101d4, 0x00000000);
1678 nv_wr32(device, 0x6109d4, 0x00000000);
1679 nv_wr32(device, 0x6101d0, 0x80000000);
1691 for (i = 0; mask && i < 8; i++) {
1692 u32 mcc = nv_rd32(device, 0x640180 + (i * 0x20));
1693 if (!(mcc & (1 << crtc)))
1696 dcb = lookup_dcb(dev, i, mcc);
1703 pclk = nv_rd32(device, 0x660450 + (crtc * 0x300)) / 1000;
1704 NV_DEBUG(drm,
"PDISP: crtc %d pclk %d mask 0x%08x\n",
1706 if (pclk && (mask & 0x00010000)) {
1710 for (i = 0; mask && i < 8; i++) {
1711 u32 mcp = nv_rd32(device, 0x660180 + (i * 0x20));
1712 u32 cfg = nv_rd32(device, 0x660184 + (i * 0x20));
1713 if (!(mcp & (1 << crtc)))
1716 dcb = lookup_dcb(dev, i, mcp);
1719 or =
ffs(dcb->
or) - 1;
1723 nv_wr32(device, 0x612200 + (crtc * 0x800), 0x00000000);
1724 switch (dcb->
type) {
1726 nv_wr32(device, 0x612280 + (or * 0x800), 0x00000000);
1731 if (cfg & 0x00000100)
1736 nv_mask(device, 0x612300 + (or * 0x800), 0x00000707, tmp);
1745 nv_wr32(device, 0x6101d4, 0x00000000);
1746 nv_wr32(device, 0x6109d4, 0x00000000);
1747 nv_wr32(device, 0x6101d0, 0x80000000);
1757 pclk = nv_rd32(device, 0x660450 + (crtc * 0x300)) / 1000;
1759 for (i = 0; mask && i < 8; i++) {
1760 u32 mcp = nv_rd32(device, 0x660180 + (i * 0x20));
1761 u32 cfg = nv_rd32(device, 0x660184 + (i * 0x20));
1762 if (!(mcp & (1 << crtc)))
1765 dcb = lookup_dcb(dev, i, mcp);
1772 nv_wr32(device, 0x6101d4, 0x00000000);
1773 nv_wr32(device, 0x6109d4, 0x00000000);
1774 nv_wr32(device, 0x6101d0, 0x80000000);
1778 nvd0_display_bh(
unsigned long data)
1784 u32 mask = 0, crtc = ~0;
1787 if (
drm_debug & (DRM_UT_DRIVER | DRM_UT_KMS)) {
1789 NV_INFO(drm,
" STAT: 0x%08x 0x%08x 0x%08x\n",
1790 nv_rd32(device, 0x6101d0),
1791 nv_rd32(device, 0x6101d4), nv_rd32(device, 0x6109d4));
1792 for (i = 0; i < 8; i++) {
1793 NV_INFO(drm,
" %s%d: 0x%08x 0x%08x\n",
1794 i < 4 ?
"DAC" :
"SOR", i,
1795 nv_rd32(device, 0x640180 + (i * 0x20)),
1796 nv_rd32(device, 0x660180 + (i * 0x20)));
1801 mask = nv_rd32(device, 0x6101d4 + (crtc * 0x800));
1803 if (disp->
modeset & 0x00000001)
1804 nvd0_display_unk1_handler(dev, crtc, mask);
1805 if (disp->
modeset & 0x00000002)
1806 nvd0_display_unk2_handler(dev, crtc, mask);
1807 if (disp->
modeset & 0x00000004)
1808 nvd0_display_unk4_handler(dev, crtc, mask);
1817 u32 intr = nv_rd32(device, 0x610088);
1819 if (intr & 0x00000001) {
1820 u32 stat = nv_rd32(device, 0x61008c);
1821 nv_wr32(device, 0x61008c, stat);
1822 intr &= ~0x00000001;
1825 if (intr & 0x00000002) {
1826 u32 stat = nv_rd32(device, 0x61009c);
1827 int chid =
ffs(stat) - 1;
1829 u32 mthd = nv_rd32(device, 0x6101f0 + (chid * 12));
1830 u32 data = nv_rd32(device, 0x6101f4 + (chid * 12));
1831 u32 unkn = nv_rd32(device, 0x6101f8 + (chid * 12));
1833 NV_INFO(drm,
"EvoCh: chid %d mthd 0x%04x data 0x%08x "
1835 chid, (mthd & 0x0000ffc), data, mthd, unkn);
1836 nv_wr32(device, 0x61009c, (1 << chid));
1837 nv_wr32(device, 0x6101f0 + (chid * 12), 0x90000000);
1840 intr &= ~0x00000002;
1843 if (intr & 0x00100000) {
1844 u32 stat = nv_rd32(device, 0x6100ac);
1846 if (stat & 0x00000007) {
1848 tasklet_schedule(&disp->
tasklet);
1850 nv_wr32(device, 0x6100ac, (stat & 0x00000007));
1851 stat &= ~0x00000007;
1855 NV_INFO(drm,
"PDISP: unknown intr24 0x%08x\n", stat);
1856 nv_wr32(device, 0x6100ac, stat);
1859 intr &= ~0x00100000;
1862 intr &= ~0x0f000000;
1864 NV_INFO(drm,
"PDISP: unknown intr 0x%08x\n", intr);
1876 for (i = 1; i >= 0; i--) {
1896 if (nv_rd32(device, 0x6100ac) & 0x00000100) {
1897 nv_wr32(device, 0x6100ac, 0x00000100);
1898 nv_mask(device, 0x6194e8, 0x00000001, 0x00000000);
1899 if (!
nv_wait(device, 0x6194e8, 0x00000002, 0x00000000)) {
1900 NV_ERROR(drm,
"PDISP: 0x6194e8 0x%08x\n",
1901 nv_rd32(device, 0x6194e8));
1909 for (i = 0; i < 3; i++) {
1910 u32 dac = nv_rd32(device, 0x61a000 + (i * 0x800));
1911 nv_wr32(device, 0x6101c0 + (i * 0x800), dac);
1914 for (i = 0; i < 4; i++) {
1915 u32 sor = nv_rd32(device, 0x61c000 + (i * 0x800));
1916 nv_wr32(device, 0x6301c4 + (i * 0x800), sor);
1919 for (i = 0; i < dev->mode_config.num_crtc; i++) {
1920 u32 crtc0 = nv_rd32(device, 0x616104 + (i * 0x800));
1921 u32 crtc1 = nv_rd32(device, 0x616108 + (i * 0x800));
1922 u32 crtc2 = nv_rd32(device, 0x61610c + (i * 0x800));
1923 nv_wr32(device, 0x6101b4 + (i * 0x800), crtc0);
1924 nv_wr32(device, 0x6101b8 + (i * 0x800), crtc1);
1925 nv_wr32(device, 0x6101bc + (i * 0x800), crtc2);
1929 nv_wr32(device, 0x610010, (disp->
mem->addr >> 8) | 9);
1930 nv_mask(device, 0x6100b0, 0x00000307, 0x00000307);
1938 for (i = 0; i < dev->mode_config.num_crtc; i++) {
1939 if ((ret = evo_init_dma(dev,
EVO_FLIP(i))) ||
1940 (ret = evo_init_dma(dev,
EVO_OVLY(i))) ||
1941 (ret = evo_init_pio(dev,
EVO_OIMM(i))) ||
1942 (ret = evo_init_pio(dev,
EVO_CURS(i))))
1979 nouveau_gpuobj_ref(
NULL, &disp->
mem);
2011 crtcs = nv_rd32(device, 0x022448);
2012 for (i = 0; i < crtcs; i++) {
2013 ret = nvd0_crtc_create(dev, i);
2019 for (i = 0, dcbe = &dcb->
entry[0]; i < dcb->
entries; i++, dcbe++) {
2021 if (IS_ERR(connector))
2025 NV_WARN(drm,
"skipping off-chip encoder %d/%d\n",
2030 switch (dcbe->
type) {
2034 nvd0_sor_create(connector, dcbe);
2037 nvd0_dac_create(connector, dcbe);
2040 NV_WARN(drm,
"skipping unsupported encoder %d/%d\n",
2051 NV_WARN(drm,
"%s has no encoders, removing\n",
2053 connector->
funcs->destroy(connector);
2083 u32 dmao = 0x1000 + (i * 0x100);
2084 u32 hash = 0x0000 + (i * 0x040);
2094 nv_wo32(disp->
mem, dmao + 0x00, 0x00000049);
2095 nv_wo32(disp->
mem, dmao + 0x04, (offset + 0x0000) >> 8);
2096 nv_wo32(disp->
mem, dmao + 0x08, (offset + 0x0fff) >> 8);
2097 nv_wo32(disp->
mem, dmao + 0x0c, 0x00000000);
2098 nv_wo32(disp->
mem, dmao + 0x10, 0x00000000);
2099 nv_wo32(disp->
mem, dmao + 0x14, 0x00000000);
2101 nv_wo32(disp->
mem, hash + 0x04, 0x00000001 | (i << 27) |
2102 ((dmao + 0x00) << 9));
2104 nv_wo32(disp->
mem, dmao + 0x20, 0x00000049);
2105 nv_wo32(disp->
mem, dmao + 0x24, 0x00000000);
2106 nv_wo32(disp->
mem, dmao + 0x28, (pfb->
ram.size - 1) >> 8);
2107 nv_wo32(disp->
mem, dmao + 0x2c, 0x00000000);
2108 nv_wo32(disp->
mem, dmao + 0x30, 0x00000000);
2109 nv_wo32(disp->
mem, dmao + 0x34, 0x00000000);
2111 nv_wo32(disp->
mem, hash + 0x0c, 0x00000001 | (i << 27) |
2112 ((dmao + 0x20) << 9));
2114 nv_wo32(disp->
mem, dmao + 0x40, 0x00000009);
2115 nv_wo32(disp->
mem, dmao + 0x44, 0x00000000);
2116 nv_wo32(disp->
mem, dmao + 0x48, (pfb->
ram.size - 1) >> 8);
2117 nv_wo32(disp->
mem, dmao + 0x4c, 0x00000000);
2118 nv_wo32(disp->
mem, dmao + 0x50, 0x00000000);
2119 nv_wo32(disp->
mem, dmao + 0x54, 0x00000000);
2121 nv_wo32(disp->
mem, hash + 0x14, 0x00000001 | (i << 27) |
2122 ((dmao + 0x40) << 9));
2124 nv_wo32(disp->
mem, dmao + 0x60, 0x0fe00009);
2125 nv_wo32(disp->
mem, dmao + 0x64, 0x00000000);
2126 nv_wo32(disp->
mem, dmao + 0x68, (pfb->
ram.size - 1) >> 8);
2127 nv_wo32(disp->
mem, dmao + 0x6c, 0x00000000);
2128 nv_wo32(disp->
mem, dmao + 0x70, 0x00000000);
2129 nv_wo32(disp->
mem, dmao + 0x74, 0x00000000);
2131 nv_wo32(disp->
mem, hash + 0x1c, 0x00000001 | (i << 27) |
2132 ((dmao + 0x60) << 9));