47 void __iomem *
lut = nvbo_kmap_obj_iovirtual(nv_crtc->
lut.nvbo);
52 for (i = 0; i < 256; i++) {
53 writew(nv_crtc->
lut.r[i] >> 2, lut + 8*i + 0);
54 writew(nv_crtc->
lut.g[i] >> 2, lut + 8*i + 2);
55 writew(nv_crtc->
lut.b[i] >> 2, lut + 8*i + 4);
58 if (nv_crtc->
lut.depth == 30) {
59 writew(nv_crtc->
lut.r[i - 1] >> 2, lut + 8*i + 0);
60 writew(nv_crtc->
lut.g[i - 1] >> 2, lut + 8*i + 2);
61 writew(nv_crtc->
lut.b[i - 1] >> 2, lut + 8*i + 4);
74 NV_DEBUG(drm,
"%s\n", blanked ?
"blanked" :
"unblanked");
77 nv_crtc->
cursor.hide(nv_crtc,
false);
79 ret = RING_SPACE(evo, nv_device(drm->
device)->chipset != 0x50 ? 7 : 5);
81 NV_ERROR(drm,
"no space while blanking crtc\n");
87 if (nv_device(drm->
device)->chipset != 0x50) {
95 if (nv_crtc->
cursor.visible)
96 nv_crtc->
cursor.show(nv_crtc,
false);
98 nv_crtc->
cursor.hide(nv_crtc,
false);
100 ret = RING_SPACE(evo, nv_device(drm->
device)->chipset != 0x50 ? 10 : 8);
102 NV_ERROR(drm,
"no space while unblanking crtc\n");
110 if (nv_device(drm->
device)->chipset != 0x50) {
119 if (nv_device(drm->
device)->chipset != 0x50)
120 if (nv_crtc->
fb.tile_flags == 0x7a00 ||
121 nv_crtc->
fb.tile_flags == 0xfe00)
124 if (nv_crtc->
fb.tile_flags == 0x7000)
132 nv_crtc->
fb.blanked = blanked;
146 connector = &nv_connector->
base;
161 ret = RING_SPACE(evo, 2 + (update ? 2 : 0));
176 nv50_crtc_set_color_vibrance(
struct nouveau_crtc *nv_crtc,
bool update)
185 NV_DEBUG(drm,
"vibrance = %i, hue = %i\n",
188 ret = RING_SPACE(evo, 2 + (update ? 2 : 0));
190 NV_ERROR(drm,
"no space while setting color vibrance\n");
197 hue = ((nv_crtc->
vibrant_hue * 2047) / 100) & 0xfff;
200 OUT_RING (evo, (hue << 20) | (vib << 8));
216 struct drm_crtc *crtc = to_drm_crtc(nv_crtc);
222 if (connector->
encoder->crtc == crtc)
230 nv50_crtc_set_scale(
struct nouveau_crtc *nv_crtc,
bool update)
239 int scaling_mode,
ret;
246 NV_ERROR(drm,
"no native mode, forcing panel scaling\n");
271 nv_connector->
edid &&
275 u32 aspect = (oY << 19) / oX;
279 if (bY) oY -= (bY * 2);
280 else oY = ((oX * aspect) + (aspect / 2)) >> 19;
282 oX -= (oX >> 4) + 32;
283 if (bY) oY -= (bY * 2);
284 else oY = ((oX * aspect) + (aspect / 2)) >> 19;
291 switch (scaling_mode) {
299 oX = ((oY * aspect) + (aspect / 2)) >> 19;
302 oY = ((oX * aspect) + (aspect / 2)) >> 19;
314 ret = RING_SPACE(evo, 5);
343 nv50_crtc_destroy(
struct drm_crtc *crtc)
351 nouveau_bo_ref(
NULL, &nv_crtc->
lut.nvbo);
365 struct drm_gem_object *
gem;
368 if (!buffer_handle) {
369 nv_crtc->
cursor.hide(nv_crtc,
true);
373 if (width != 64 || height != 64)
379 cursor = nouveau_gem_object(gem);
386 for (i = 0; i < 64 * 64; i++)
391 nv_crtc->
cursor.set_offset(nv_crtc, nv_crtc->
cursor.nvbo->bo.offset);
392 nv_crtc->
cursor.show(nv_crtc,
true);
395 drm_gem_object_unreference_unlocked(gem);
404 nv_crtc->
cursor.set_pos(nv_crtc, x, y);
412 int end = (start + size > 256) ? 256 : start + size, i;
415 for (i = start; i <
end; i++) {
416 nv_crtc->
lut.r[
i] = r[
i];
417 nv_crtc->
lut.g[
i] = g[
i];
418 nv_crtc->
lut.b[
i] = b[
i];
426 if (!nv_crtc->
base.fb) {
427 nv_crtc->
lut.depth = 0;
431 nv50_crtc_lut_load(crtc);
435 nv50_crtc_save(
struct drm_crtc *crtc)
442 nv50_crtc_restore(
struct drm_crtc *crtc)
449 .save = nv50_crtc_save,
450 .restore = nv50_crtc_restore,
453 .gamma_set = nv50_crtc_gamma_set,
456 .destroy = nv50_crtc_destroy,
460 nv50_crtc_dpms(
struct drm_crtc *crtc,
int mode)
465 nv50_crtc_prepare(
struct drm_crtc *crtc)
479 nv50_crtc_commit(
struct drm_crtc *crtc)
501 nv50_crtc_do_mode_set_base(
struct drm_crtc *crtc,
503 int x,
int y,
bool atomic)
516 if (!atomic && !crtc->
fb) {
544 nv_crtc->
fb.offset = fb->
nvbo->bo.offset;
547 if (!nv_crtc->
fb.blanked && nv_device(drm->
device)->chipset != 0x50) {
548 ret = RING_SPACE(evo, 2);
556 ret = RING_SPACE(evo, 12);
574 if (nv_crtc->
lut.depth != fb->
base.depth) {
575 nv_crtc->
lut.depth = fb->
base.depth;
576 nv50_crtc_lut_load(crtc);
593 u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks;
594 u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks;
595 u32 vblan2e = 0, vblan2s = 1;
613 hblanke = hsynce + hbackp;
615 hblanks = mode->
htotal - hfrontp - 1;
617 vactive = mode->
vtotal * vscan / ilace;
620 vblanke = vsynce + vbackp;
622 vblanks = vactive - vfrontp - 1;
624 vblan2e = vactive + vsynce + vbackp;
625 vblan2s = vblan2e + (mode->
vdisplay * vscan / ilace);
626 vactive = (vactive * 2) + 1;
629 ret = RING_SPACE(evo, 18);
631 BEGIN_NV04(evo, 0, 0x0804 + head, 2);
633 OUT_RING (evo, (ilace == 2) ? 2 : 0);
634 BEGIN_NV04(evo, 0, 0x0810 + head, 6);
636 OUT_RING (evo, (vactive << 16) | hactive);
637 OUT_RING (evo, ( vsynce << 16) | hsynce);
638 OUT_RING (evo, (vblanke << 16) | hblanke);
639 OUT_RING (evo, (vblanks << 16) | hblanks);
640 OUT_RING (evo, (vblan2e << 16) | vblan2s);
641 BEGIN_NV04(evo, 0, 0x082c + head, 1);
643 BEGIN_NV04(evo, 0, 0x0900 + head, 1);
645 BEGIN_NV04(evo, 0, 0x08c8 + head, 1);
647 BEGIN_NV04(evo, 0, 0x08d4 + head, 1);
655 return nv50_crtc_do_mode_set_base(crtc, old_fb, x, y,
false);
659 nv50_crtc_mode_set_base(
struct drm_crtc *crtc,
int x,
int y,
665 ret = nv50_crtc_do_mode_set_base(crtc, old_fb, x, y,
false);
677 nv50_crtc_mode_set_base_atomic(
struct drm_crtc *crtc,
684 ret = nv50_crtc_do_mode_set_base(crtc, fb, x, y,
true);
692 .dpms = nv50_crtc_dpms,
693 .prepare = nv50_crtc_prepare,
694 .commit = nv50_crtc_commit,
695 .mode_fixup = nv50_crtc_mode_fixup,
696 .mode_set = nv50_crtc_mode_set,
697 .mode_set_base = nv50_crtc_mode_set_base,
698 .mode_set_base_atomic = nv50_crtc_mode_set_base_atomic,
699 .load_lut = nv50_crtc_lut_load,
711 nv_crtc = kzalloc(
sizeof(*nv_crtc),
GFP_KERNEL);
717 nv_crtc->
set_scale = nv50_crtc_set_scale;
721 nv_crtc->
lut.depth = 0;
722 for (i = 0; i < 256; i++) {
723 nv_crtc->
lut.r[
i] = i << 8;
724 nv_crtc->
lut.g[
i] = i << 8;
725 nv_crtc->
lut.b[
i] = i << 8;
729 drm_crtc_helper_add(&nv_crtc->
base, &nv50_crtc_helper_funcs);
733 0, 0x0000,
NULL, &nv_crtc->
lut.nvbo);
739 nouveau_bo_ref(
NULL, &nv_crtc->
lut.nvbo);
762 nv50_crtc_destroy(&nv_crtc->
base);