Linux Kernel  3.7.1
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
nvd0_display.c
Go to the documentation of this file.
1 /*
2  * Copyright 2011 Red Hat Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Ben Skeggs
23  */
24 
25 #include <linux/dma-mapping.h>
26 
27 #include <drm/drmP.h>
28 #include <drm/drm_crtc_helper.h>
29 
30 #include "nouveau_drm.h"
31 #include "nouveau_dma.h"
32 #include "nouveau_gem.h"
33 #include "nouveau_connector.h"
34 #include "nouveau_encoder.h"
35 #include "nouveau_crtc.h"
36 #include "nouveau_fence.h"
37 #include "nv50_display.h"
38 
39 #include <core/gpuobj.h>
40 
41 #include <subdev/timer.h>
42 #include <subdev/bar.h>
43 #include <subdev/fb.h>
44 
45 #define EVO_DMA_NR 9
46 
47 #define EVO_MASTER (0x00)
48 #define EVO_FLIP(c) (0x01 + (c))
49 #define EVO_OVLY(c) (0x05 + (c))
50 #define EVO_OIMM(c) (0x09 + (c))
51 #define EVO_CURS(c) (0x0d + (c))
52 
53 /* offsets in shared sync bo of various structures */
54 #define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
55 #define EVO_MAST_NTFY EVO_SYNC( 0, 0x00)
56 #define EVO_FLIP_SEM0(c) EVO_SYNC((c), 0x00)
57 #define EVO_FLIP_SEM1(c) EVO_SYNC((c), 0x10)
58 
59 struct evo {
60  int idx;
62  u32 *ptr;
63  struct {
66  } sem;
67 };
68 
69 struct nvd0_display {
71  struct nouveau_bo *sync;
72  struct evo evo[9];
73 
76 };
77 
78 static struct nvd0_display *
80 {
81  return nouveau_display(dev)->priv;
82 }
83 
84 static struct drm_crtc *
85 nvd0_display_crtc_get(struct drm_encoder *encoder)
86 {
87  return nouveau_encoder(encoder)->crtc;
88 }
89 
90 /******************************************************************************
91  * EVO channel helpers
92  *****************************************************************************/
93 static inline int
94 evo_icmd(struct drm_device *dev, int id, u32 mthd, u32 data)
95 {
96  struct nouveau_device *device = nouveau_dev(dev);
97  int ret = 0;
98  nv_mask(device, 0x610700 + (id * 0x10), 0x00000001, 0x00000001);
99  nv_wr32(device, 0x610704 + (id * 0x10), data);
100  nv_mask(device, 0x610704 + (id * 0x10), 0x80000ffc, 0x80000000 | mthd);
101  if (!nv_wait(device, 0x610704 + (id * 0x10), 0x80000000, 0x00000000))
102  ret = -EBUSY;
103  nv_mask(device, 0x610700 + (id * 0x10), 0x00000001, 0x00000000);
104  return ret;
105 }
106 
107 static u32 *
108 evo_wait(struct drm_device *dev, int id, int nr)
109 {
110  struct nouveau_device *device = nouveau_dev(dev);
111  struct nouveau_drm *drm = nouveau_drm(dev);
112  struct nvd0_display *disp = nvd0_display(dev);
113  u32 put = nv_rd32(device, 0x640000 + (id * 0x1000)) / 4;
114 
115  if (put + nr >= (PAGE_SIZE / 4)) {
116  disp->evo[id].ptr[put] = 0x20000000;
117 
118  nv_wr32(device, 0x640000 + (id * 0x1000), 0x00000000);
119  if (!nv_wait(device, 0x640004 + (id * 0x1000), ~0, 0x00000000)) {
120  NV_ERROR(drm, "evo %d dma stalled\n", id);
121  return NULL;
122  }
123 
124  put = 0;
125  }
126 
127  return disp->evo[id].ptr + put;
128 }
129 
130 static void
131 evo_kick(u32 *push, struct drm_device *dev, int id)
132 {
133  struct nouveau_device *device = nouveau_dev(dev);
134  struct nvd0_display *disp = nvd0_display(dev);
135 
136  nv_wr32(device, 0x640000 + (id * 0x1000), (push - disp->evo[id].ptr) << 2);
137 }
138 
139 #define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
140 #define evo_data(p,d) *((p)++) = (d)
141 
142 static int
143 evo_init_dma(struct drm_device *dev, int ch)
144 {
145  struct nouveau_device *device = nouveau_dev(dev);
146  struct nouveau_drm *drm = nouveau_drm(dev);
147  struct nvd0_display *disp = nvd0_display(dev);
148  u32 flags;
149 
150  flags = 0x00000000;
151  if (ch == EVO_MASTER)
152  flags |= 0x01000000;
153 
154  nv_wr32(device, 0x610494 + (ch * 0x0010), (disp->evo[ch].handle >> 8) | 3);
155  nv_wr32(device, 0x610498 + (ch * 0x0010), 0x00010000);
156  nv_wr32(device, 0x61049c + (ch * 0x0010), 0x00000001);
157  nv_mask(device, 0x610490 + (ch * 0x0010), 0x00000010, 0x00000010);
158  nv_wr32(device, 0x640000 + (ch * 0x1000), 0x00000000);
159  nv_wr32(device, 0x610490 + (ch * 0x0010), 0x00000013 | flags);
160  if (!nv_wait(device, 0x610490 + (ch * 0x0010), 0x80000000, 0x00000000)) {
161  NV_ERROR(drm, "PDISP: ch%d 0x%08x\n", ch,
162  nv_rd32(device, 0x610490 + (ch * 0x0010)));
163  return -EBUSY;
164  }
165 
166  nv_mask(device, 0x610090, (1 << ch), (1 << ch));
167  nv_mask(device, 0x6100a0, (1 << ch), (1 << ch));
168  return 0;
169 }
170 
171 static void
172 evo_fini_dma(struct drm_device *dev, int ch)
173 {
174  struct nouveau_device *device = nouveau_dev(dev);
175 
176  if (!(nv_rd32(device, 0x610490 + (ch * 0x0010)) & 0x00000010))
177  return;
178 
179  nv_mask(device, 0x610490 + (ch * 0x0010), 0x00000010, 0x00000000);
180  nv_mask(device, 0x610490 + (ch * 0x0010), 0x00000003, 0x00000000);
181  nv_wait(device, 0x610490 + (ch * 0x0010), 0x80000000, 0x00000000);
182  nv_mask(device, 0x610090, (1 << ch), 0x00000000);
183  nv_mask(device, 0x6100a0, (1 << ch), 0x00000000);
184 }
185 
186 static inline void
187 evo_piow(struct drm_device *dev, int ch, u16 mthd, u32 data)
188 {
189  struct nouveau_device *device = nouveau_dev(dev);
190  nv_wr32(device, 0x640000 + (ch * 0x1000) + mthd, data);
191 }
192 
193 static int
194 evo_init_pio(struct drm_device *dev, int ch)
195 {
196  struct nouveau_device *device = nouveau_dev(dev);
197  struct nouveau_drm *drm = nouveau_drm(dev);
198 
199  nv_wr32(device, 0x610490 + (ch * 0x0010), 0x00000001);
200  if (!nv_wait(device, 0x610490 + (ch * 0x0010), 0x00010000, 0x00010000)) {
201  NV_ERROR(drm, "PDISP: ch%d 0x%08x\n", ch,
202  nv_rd32(device, 0x610490 + (ch * 0x0010)));
203  return -EBUSY;
204  }
205 
206  nv_mask(device, 0x610090, (1 << ch), (1 << ch));
207  nv_mask(device, 0x6100a0, (1 << ch), (1 << ch));
208  return 0;
209 }
210 
211 static void
212 evo_fini_pio(struct drm_device *dev, int ch)
213 {
214  struct nouveau_device *device = nouveau_dev(dev);
215 
216  if (!(nv_rd32(device, 0x610490 + (ch * 0x0010)) & 0x00000001))
217  return;
218 
219  nv_mask(device, 0x610490 + (ch * 0x0010), 0x00000010, 0x00000010);
220  nv_mask(device, 0x610490 + (ch * 0x0010), 0x00000001, 0x00000000);
221  nv_wait(device, 0x610490 + (ch * 0x0010), 0x00010000, 0x00000000);
222  nv_mask(device, 0x610090, (1 << ch), 0x00000000);
223  nv_mask(device, 0x6100a0, (1 << ch), 0x00000000);
224 }
225 
226 static bool
227 evo_sync_wait(void *data)
228 {
229  return nouveau_bo_rd32(data, EVO_MAST_NTFY) != 0x00000000;
230 }
231 
232 static int
233 evo_sync(struct drm_device *dev, int ch)
234 {
235  struct nouveau_device *device = nouveau_dev(dev);
236  struct nvd0_display *disp = nvd0_display(dev);
237  u32 *push = evo_wait(dev, ch, 8);
238  if (push) {
239  nouveau_bo_wr32(disp->sync, EVO_MAST_NTFY, 0x00000000);
240  evo_mthd(push, 0x0084, 1);
241  evo_data(push, 0x80000000 | EVO_MAST_NTFY);
242  evo_mthd(push, 0x0080, 2);
243  evo_data(push, 0x00000000);
244  evo_data(push, 0x00000000);
245  evo_kick(push, dev, ch);
246  if (nv_wait_cb(device, evo_sync_wait, disp->sync))
247  return 0;
248  }
249 
250  return -EBUSY;
251 }
252 
253 /******************************************************************************
254  * Page flipping channel
255  *****************************************************************************/
256 struct nouveau_bo *
258 {
259  return nvd0_display(dev)->sync;
260 }
261 
262 void
264 {
265  struct nvd0_display *disp = nvd0_display(crtc->dev);
266  struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
267  struct evo *evo = &disp->evo[EVO_FLIP(nv_crtc->index)];
268  u32 *push;
269 
270  push = evo_wait(crtc->dev, evo->idx, 8);
271  if (push) {
272  evo_mthd(push, 0x0084, 1);
273  evo_data(push, 0x00000000);
274  evo_mthd(push, 0x0094, 1);
275  evo_data(push, 0x00000000);
276  evo_mthd(push, 0x00c0, 1);
277  evo_data(push, 0x00000000);
278  evo_mthd(push, 0x0080, 1);
279  evo_data(push, 0x00000000);
280  evo_kick(push, crtc->dev, evo->idx);
281  }
282 }
283 
284 int
286  struct nouveau_channel *chan, u32 swap_interval)
287 {
288  struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
289  struct nvd0_display *disp = nvd0_display(crtc->dev);
290  struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
291  struct evo *evo = &disp->evo[EVO_FLIP(nv_crtc->index)];
292  u64 offset;
293  u32 *push;
294  int ret;
295 
296  swap_interval <<= 4;
297  if (swap_interval == 0)
298  swap_interval |= 0x100;
299 
300  push = evo_wait(crtc->dev, evo->idx, 128);
301  if (unlikely(push == NULL))
302  return -EBUSY;
303 
304  /* synchronise with the rendering channel, if necessary */
305  if (likely(chan)) {
306  ret = RING_SPACE(chan, 10);
307  if (ret)
308  return ret;
309 
310 
311  offset = nvc0_fence_crtc(chan, nv_crtc->index);
312  offset += evo->sem.offset;
313 
314  BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
315  OUT_RING (chan, upper_32_bits(offset));
316  OUT_RING (chan, lower_32_bits(offset));
317  OUT_RING (chan, 0xf00d0000 | evo->sem.value);
318  OUT_RING (chan, 0x1002);
319  BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
320  OUT_RING (chan, upper_32_bits(offset));
321  OUT_RING (chan, lower_32_bits(offset ^ 0x10));
322  OUT_RING (chan, 0x74b1e000);
323  OUT_RING (chan, 0x1001);
324  FIRE_RING (chan);
325  } else {
326  nouveau_bo_wr32(disp->sync, evo->sem.offset / 4,
327  0xf00d0000 | evo->sem.value);
328  evo_sync(crtc->dev, EVO_MASTER);
329  }
330 
331  /* queue the flip */
332  evo_mthd(push, 0x0100, 1);
333  evo_data(push, 0xfffe0000);
334  evo_mthd(push, 0x0084, 1);
335  evo_data(push, swap_interval);
336  if (!(swap_interval & 0x00000100)) {
337  evo_mthd(push, 0x00e0, 1);
338  evo_data(push, 0x40000000);
339  }
340  evo_mthd(push, 0x0088, 4);
341  evo_data(push, evo->sem.offset);
342  evo_data(push, 0xf00d0000 | evo->sem.value);
343  evo_data(push, 0x74b1e000);
344  evo_data(push, NvEvoSync);
345  evo_mthd(push, 0x00a0, 2);
346  evo_data(push, 0x00000000);
347  evo_data(push, 0x00000000);
348  evo_mthd(push, 0x00c0, 1);
349  evo_data(push, nv_fb->r_dma);
350  evo_mthd(push, 0x0110, 2);
351  evo_data(push, 0x00000000);
352  evo_data(push, 0x00000000);
353  evo_mthd(push, 0x0400, 5);
354  evo_data(push, nv_fb->nvbo->bo.offset >> 8);
355  evo_data(push, 0);
356  evo_data(push, (fb->height << 16) | fb->width);
357  evo_data(push, nv_fb->r_pitch);
358  evo_data(push, nv_fb->r_format);
359  evo_mthd(push, 0x0080, 1);
360  evo_data(push, 0x00000000);
361  evo_kick(push, crtc->dev, evo->idx);
362 
363  evo->sem.offset ^= 0x10;
364  evo->sem.value++;
365  return 0;
366 }
367 
368 /******************************************************************************
369  * CRTC
370  *****************************************************************************/
371 static int
372 nvd0_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
373 {
374  struct nouveau_drm *drm = nouveau_drm(nv_crtc->base.dev);
375  struct drm_device *dev = nv_crtc->base.dev;
376  struct nouveau_connector *nv_connector;
377  struct drm_connector *connector;
378  u32 *push, mode = 0x00;
379  u32 mthd;
380 
381  nv_connector = nouveau_crtc_connector_get(nv_crtc);
382  connector = &nv_connector->base;
383  if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) {
384  if (nv_crtc->base.fb->depth > connector->display_info.bpc * 3)
386  } else {
387  mode = nv_connector->dithering_mode;
388  }
389 
390  if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) {
391  if (connector->display_info.bpc >= 8)
392  mode |= DITHERING_DEPTH_8BPC;
393  } else {
394  mode |= nv_connector->dithering_depth;
395  }
396 
397  if (nv_device(drm->device)->card_type < NV_E0)
398  mthd = 0x0490 + (nv_crtc->index * 0x0300);
399  else
400  mthd = 0x04a0 + (nv_crtc->index * 0x0300);
401 
402  push = evo_wait(dev, EVO_MASTER, 4);
403  if (push) {
404  evo_mthd(push, mthd, 1);
405  evo_data(push, mode);
406  if (update) {
407  evo_mthd(push, 0x0080, 1);
408  evo_data(push, 0x00000000);
409  }
410  evo_kick(push, dev, EVO_MASTER);
411  }
412 
413  return 0;
414 }
415 
416 static int
417 nvd0_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
418 {
419  struct drm_display_mode *omode, *umode = &nv_crtc->base.mode;
420  struct drm_device *dev = nv_crtc->base.dev;
421  struct drm_crtc *crtc = &nv_crtc->base;
422  struct nouveau_connector *nv_connector;
423  int mode = DRM_MODE_SCALE_NONE;
424  u32 oX, oY, *push;
425 
426  /* start off at the resolution we programmed the crtc for, this
427  * effectively handles NONE/FULL scaling
428  */
429  nv_connector = nouveau_crtc_connector_get(nv_crtc);
430  if (nv_connector && nv_connector->native_mode)
431  mode = nv_connector->scaling_mode;
432 
433  if (mode != DRM_MODE_SCALE_NONE)
434  omode = nv_connector->native_mode;
435  else
436  omode = umode;
437 
438  oX = omode->hdisplay;
439  oY = omode->vdisplay;
440  if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
441  oY *= 2;
442 
443  /* add overscan compensation if necessary, will keep the aspect
444  * ratio the same as the backend mode unless overridden by the
445  * user setting both hborder and vborder properties.
446  */
447  if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON ||
448  (nv_connector->underscan == UNDERSCAN_AUTO &&
449  nv_connector->edid &&
450  drm_detect_hdmi_monitor(nv_connector->edid)))) {
451  u32 bX = nv_connector->underscan_hborder;
452  u32 bY = nv_connector->underscan_vborder;
453  u32 aspect = (oY << 19) / oX;
454 
455  if (bX) {
456  oX -= (bX * 2);
457  if (bY) oY -= (bY * 2);
458  else oY = ((oX * aspect) + (aspect / 2)) >> 19;
459  } else {
460  oX -= (oX >> 4) + 32;
461  if (bY) oY -= (bY * 2);
462  else oY = ((oX * aspect) + (aspect / 2)) >> 19;
463  }
464  }
465 
466  /* handle CENTER/ASPECT scaling, taking into account the areas
467  * removed already for overscan compensation
468  */
469  switch (mode) {
471  oX = min((u32)umode->hdisplay, oX);
472  oY = min((u32)umode->vdisplay, oY);
473  /* fall-through */
475  if (oY < oX) {
476  u32 aspect = (umode->hdisplay << 19) / umode->vdisplay;
477  oX = ((oY * aspect) + (aspect / 2)) >> 19;
478  } else {
479  u32 aspect = (umode->vdisplay << 19) / umode->hdisplay;
480  oY = ((oX * aspect) + (aspect / 2)) >> 19;
481  }
482  break;
483  default:
484  break;
485  }
486 
487  push = evo_wait(dev, EVO_MASTER, 8);
488  if (push) {
489  evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
490  evo_data(push, (oY << 16) | oX);
491  evo_data(push, (oY << 16) | oX);
492  evo_data(push, (oY << 16) | oX);
493  evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
494  evo_data(push, 0x00000000);
495  evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
496  evo_data(push, (umode->vdisplay << 16) | umode->hdisplay);
497  evo_kick(push, dev, EVO_MASTER);
498  if (update) {
500  nvd0_display_flip_next(crtc, crtc->fb, NULL, 1);
501  }
502  }
503 
504  return 0;
505 }
506 
507 static int
508 nvd0_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
509  int x, int y, bool update)
510 {
511  struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
512  u32 *push;
513 
514  push = evo_wait(fb->dev, EVO_MASTER, 16);
515  if (push) {
516  evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
517  evo_data(push, nvfb->nvbo->bo.offset >> 8);
518  evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
519  evo_data(push, (fb->height << 16) | fb->width);
520  evo_data(push, nvfb->r_pitch);
521  evo_data(push, nvfb->r_format);
522  evo_data(push, nvfb->r_dma);
523  evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
524  evo_data(push, (y << 16) | x);
525  if (update) {
526  evo_mthd(push, 0x0080, 1);
527  evo_data(push, 0x00000000);
528  }
529  evo_kick(push, fb->dev, EVO_MASTER);
530  }
531 
532  nv_crtc->fb.tile_flags = nvfb->r_dma;
533  return 0;
534 }
535 
536 static void
537 nvd0_crtc_cursor_show(struct nouveau_crtc *nv_crtc, bool show, bool update)
538 {
539  struct drm_device *dev = nv_crtc->base.dev;
540  u32 *push = evo_wait(dev, EVO_MASTER, 16);
541  if (push) {
542  if (show) {
543  evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
544  evo_data(push, 0x85000000);
545  evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
546  evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
547  evo_data(push, NvEvoVRAM);
548  } else {
549  evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
550  evo_data(push, 0x05000000);
551  evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
552  evo_data(push, 0x00000000);
553  }
554 
555  if (update) {
556  evo_mthd(push, 0x0080, 1);
557  evo_data(push, 0x00000000);
558  }
559 
560  evo_kick(push, dev, EVO_MASTER);
561  }
562 }
563 
564 static void
565 nvd0_crtc_dpms(struct drm_crtc *crtc, int mode)
566 {
567 }
568 
569 static void
570 nvd0_crtc_prepare(struct drm_crtc *crtc)
571 {
572  struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
573  u32 *push;
574 
576 
577  push = evo_wait(crtc->dev, EVO_MASTER, 2);
578  if (push) {
579  evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
580  evo_data(push, 0x00000000);
581  evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
582  evo_data(push, 0x03000000);
583  evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
584  evo_data(push, 0x00000000);
585  evo_kick(push, crtc->dev, EVO_MASTER);
586  }
587 
588  nvd0_crtc_cursor_show(nv_crtc, false, false);
589 }
590 
591 static void
592 nvd0_crtc_commit(struct drm_crtc *crtc)
593 {
594  struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
595  u32 *push;
596 
597  push = evo_wait(crtc->dev, EVO_MASTER, 32);
598  if (push) {
599  evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
600  evo_data(push, nv_crtc->fb.tile_flags);
601  evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
602  evo_data(push, 0x83000000);
603  evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
604  evo_data(push, 0x00000000);
605  evo_data(push, 0x00000000);
606  evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
607  evo_data(push, NvEvoVRAM);
608  evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
609  evo_data(push, 0xffffff00);
610  evo_kick(push, crtc->dev, EVO_MASTER);
611  }
612 
613  nvd0_crtc_cursor_show(nv_crtc, nv_crtc->cursor.visible, true);
614  nvd0_display_flip_next(crtc, crtc->fb, NULL, 1);
615 }
616 
617 static bool
618 nvd0_crtc_mode_fixup(struct drm_crtc *crtc, const struct drm_display_mode *mode,
619  struct drm_display_mode *adjusted_mode)
620 {
621  return true;
622 }
623 
624 static int
625 nvd0_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
626 {
627  struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->fb);
628  int ret;
629 
630  ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
631  if (ret)
632  return ret;
633 
634  if (old_fb) {
635  nvfb = nouveau_framebuffer(old_fb);
636  nouveau_bo_unpin(nvfb->nvbo);
637  }
638 
639  return 0;
640 }
641 
642 static int
643 nvd0_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
644  struct drm_display_mode *mode, int x, int y,
645  struct drm_framebuffer *old_fb)
646 {
647  struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
648  struct nouveau_connector *nv_connector;
649  u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
650  u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
651  u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks;
652  u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks;
653  u32 vblan2e = 0, vblan2s = 1;
654  u32 *push;
655  int ret;
656 
657  hactive = mode->htotal;
658  hsynce = mode->hsync_end - mode->hsync_start - 1;
659  hbackp = mode->htotal - mode->hsync_end;
660  hblanke = hsynce + hbackp;
661  hfrontp = mode->hsync_start - mode->hdisplay;
662  hblanks = mode->htotal - hfrontp - 1;
663 
664  vactive = mode->vtotal * vscan / ilace;
665  vsynce = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
666  vbackp = (mode->vtotal - mode->vsync_end) * vscan / ilace;
667  vblanke = vsynce + vbackp;
668  vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
669  vblanks = vactive - vfrontp - 1;
670  if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
671  vblan2e = vactive + vsynce + vbackp;
672  vblan2s = vblan2e + (mode->vdisplay * vscan / ilace);
673  vactive = (vactive * 2) + 1;
674  }
675 
676  ret = nvd0_crtc_swap_fbs(crtc, old_fb);
677  if (ret)
678  return ret;
679 
680  push = evo_wait(crtc->dev, EVO_MASTER, 64);
681  if (push) {
682  evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 6);
683  evo_data(push, 0x00000000);
684  evo_data(push, (vactive << 16) | hactive);
685  evo_data(push, ( vsynce << 16) | hsynce);
686  evo_data(push, (vblanke << 16) | hblanke);
687  evo_data(push, (vblanks << 16) | hblanks);
688  evo_data(push, (vblan2e << 16) | vblan2s);
689  evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
690  evo_data(push, 0x00000000); /* ??? */
691  evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
692  evo_data(push, mode->clock * 1000);
693  evo_data(push, 0x00200000); /* ??? */
694  evo_data(push, mode->clock * 1000);
695  evo_mthd(push, 0x04d0 + (nv_crtc->index * 0x300), 2);
696  evo_data(push, 0x00000311);
697  evo_data(push, 0x00000100);
698  evo_kick(push, crtc->dev, EVO_MASTER);
699  }
700 
701  nv_connector = nouveau_crtc_connector_get(nv_crtc);
702  nvd0_crtc_set_dither(nv_crtc, false);
703  nvd0_crtc_set_scale(nv_crtc, false);
704  nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, false);
705  return 0;
706 }
707 
708 static int
709 nvd0_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
710  struct drm_framebuffer *old_fb)
711 {
712  struct nouveau_drm *drm = nouveau_drm(crtc->dev);
713  struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
714  int ret;
715 
716  if (!crtc->fb) {
717  NV_DEBUG(drm, "No FB bound\n");
718  return 0;
719  }
720 
721  ret = nvd0_crtc_swap_fbs(crtc, old_fb);
722  if (ret)
723  return ret;
724 
726  nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, true);
727  nvd0_display_flip_next(crtc, crtc->fb, NULL, 1);
728  return 0;
729 }
730 
731 static int
732 nvd0_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
733  struct drm_framebuffer *fb, int x, int y,
734  enum mode_set_atomic state)
735 {
736  struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
738  nvd0_crtc_set_image(nv_crtc, fb, x, y, true);
739  return 0;
740 }
741 
742 static void
743 nvd0_crtc_lut_load(struct drm_crtc *crtc)
744 {
745  struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
746  void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
747  int i;
748 
749  for (i = 0; i < 256; i++) {
750  writew(0x6000 + (nv_crtc->lut.r[i] >> 2), lut + (i * 0x20) + 0);
751  writew(0x6000 + (nv_crtc->lut.g[i] >> 2), lut + (i * 0x20) + 2);
752  writew(0x6000 + (nv_crtc->lut.b[i] >> 2), lut + (i * 0x20) + 4);
753  }
754 }
755 
756 static int
757 nvd0_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
759 {
760  struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
761  struct drm_device *dev = crtc->dev;
762  struct drm_gem_object *gem;
763  struct nouveau_bo *nvbo;
764  bool visible = (handle != 0);
765  int i, ret = 0;
766 
767  if (visible) {
768  if (width != 64 || height != 64)
769  return -EINVAL;
770 
771  gem = drm_gem_object_lookup(dev, file_priv, handle);
772  if (unlikely(!gem))
773  return -ENOENT;
774  nvbo = nouveau_gem_object(gem);
775 
776  ret = nouveau_bo_map(nvbo);
777  if (ret == 0) {
778  for (i = 0; i < 64 * 64; i++) {
779  u32 v = nouveau_bo_rd32(nvbo, i);
780  nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
781  }
782  nouveau_bo_unmap(nvbo);
783  }
784 
785  drm_gem_object_unreference_unlocked(gem);
786  }
787 
788  if (visible != nv_crtc->cursor.visible) {
789  nvd0_crtc_cursor_show(nv_crtc, visible, true);
790  nv_crtc->cursor.visible = visible;
791  }
792 
793  return ret;
794 }
795 
796 static int
797 nvd0_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
798 {
799  struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
800  int ch = EVO_CURS(nv_crtc->index);
801 
802  evo_piow(crtc->dev, ch, 0x0084, (y << 16) | (x & 0xffff));
803  evo_piow(crtc->dev, ch, 0x0080, 0x00000000);
804  return 0;
805 }
806 
807 static void
808 nvd0_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
810 {
811  struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
812  u32 end = max(start + size, (u32)256);
813  u32 i;
814 
815  for (i = start; i < end; i++) {
816  nv_crtc->lut.r[i] = r[i];
817  nv_crtc->lut.g[i] = g[i];
818  nv_crtc->lut.b[i] = b[i];
819  }
820 
821  nvd0_crtc_lut_load(crtc);
822 }
823 
824 static void
825 nvd0_crtc_destroy(struct drm_crtc *crtc)
826 {
827  struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
828  nouveau_bo_unmap(nv_crtc->cursor.nvbo);
829  nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
830  nouveau_bo_unmap(nv_crtc->lut.nvbo);
831  nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
832  drm_crtc_cleanup(crtc);
833  kfree(crtc);
834 }
835 
836 static const struct drm_crtc_helper_funcs nvd0_crtc_hfunc = {
837  .dpms = nvd0_crtc_dpms,
838  .prepare = nvd0_crtc_prepare,
839  .commit = nvd0_crtc_commit,
840  .mode_fixup = nvd0_crtc_mode_fixup,
841  .mode_set = nvd0_crtc_mode_set,
842  .mode_set_base = nvd0_crtc_mode_set_base,
843  .mode_set_base_atomic = nvd0_crtc_mode_set_base_atomic,
844  .load_lut = nvd0_crtc_lut_load,
845 };
846 
847 static const struct drm_crtc_funcs nvd0_crtc_func = {
848  .cursor_set = nvd0_crtc_cursor_set,
849  .cursor_move = nvd0_crtc_cursor_move,
850  .gamma_set = nvd0_crtc_gamma_set,
851  .set_config = drm_crtc_helper_set_config,
852  .destroy = nvd0_crtc_destroy,
853  .page_flip = nouveau_crtc_page_flip,
854 };
855 
856 static void
857 nvd0_cursor_set_pos(struct nouveau_crtc *nv_crtc, int x, int y)
858 {
859 }
860 
861 static void
862 nvd0_cursor_set_offset(struct nouveau_crtc *nv_crtc, uint32_t offset)
863 {
864 }
865 
866 static int
867 nvd0_crtc_create(struct drm_device *dev, int index)
868 {
869  struct nouveau_crtc *nv_crtc;
870  struct drm_crtc *crtc;
871  int ret, i;
872 
873  nv_crtc = kzalloc(sizeof(*nv_crtc), GFP_KERNEL);
874  if (!nv_crtc)
875  return -ENOMEM;
876 
877  nv_crtc->index = index;
878  nv_crtc->set_dither = nvd0_crtc_set_dither;
879  nv_crtc->set_scale = nvd0_crtc_set_scale;
880  nv_crtc->cursor.set_offset = nvd0_cursor_set_offset;
881  nv_crtc->cursor.set_pos = nvd0_cursor_set_pos;
882  for (i = 0; i < 256; i++) {
883  nv_crtc->lut.r[i] = i << 8;
884  nv_crtc->lut.g[i] = i << 8;
885  nv_crtc->lut.b[i] = i << 8;
886  }
887 
888  crtc = &nv_crtc->base;
889  drm_crtc_init(dev, crtc, &nvd0_crtc_func);
890  drm_crtc_helper_add(crtc, &nvd0_crtc_hfunc);
891  drm_mode_crtc_set_gamma_size(crtc, 256);
892 
893  ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
894  0, 0x0000, NULL, &nv_crtc->cursor.nvbo);
895  if (!ret) {
896  ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
897  if (!ret)
898  ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
899  if (ret)
900  nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
901  }
902 
903  if (ret)
904  goto out;
905 
906  ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
907  0, 0x0000, NULL, &nv_crtc->lut.nvbo);
908  if (!ret) {
909  ret = nouveau_bo_pin(nv_crtc->lut.nvbo, TTM_PL_FLAG_VRAM);
910  if (!ret)
911  ret = nouveau_bo_map(nv_crtc->lut.nvbo);
912  if (ret)
913  nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
914  }
915 
916  if (ret)
917  goto out;
918 
919  nvd0_crtc_lut_load(crtc);
920 
921 out:
922  if (ret)
923  nvd0_crtc_destroy(crtc);
924  return ret;
925 }
926 
927 /******************************************************************************
928  * DAC
929  *****************************************************************************/
930 static void
931 nvd0_dac_dpms(struct drm_encoder *encoder, int mode)
932 {
933  struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
934  struct drm_device *dev = encoder->dev;
935  struct nouveau_device *device = nouveau_dev(dev);
936  int or = nv_encoder->or;
937  u32 dpms_ctrl;
938 
939  dpms_ctrl = 0x80000000;
940  if (mode == DRM_MODE_DPMS_STANDBY || mode == DRM_MODE_DPMS_OFF)
941  dpms_ctrl |= 0x00000001;
942  if (mode == DRM_MODE_DPMS_SUSPEND || mode == DRM_MODE_DPMS_OFF)
943  dpms_ctrl |= 0x00000004;
944 
945  nv_wait(device, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
946  nv_mask(device, 0x61a004 + (or * 0x0800), 0xc000007f, dpms_ctrl);
947  nv_wait(device, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
948 }
949 
950 static bool
951 nvd0_dac_mode_fixup(struct drm_encoder *encoder,
952  const struct drm_display_mode *mode,
953  struct drm_display_mode *adjusted_mode)
954 {
955  struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
956  struct nouveau_connector *nv_connector;
957 
958  nv_connector = nouveau_encoder_connector_get(nv_encoder);
959  if (nv_connector && nv_connector->native_mode) {
960  if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
961  int id = adjusted_mode->base.id;
962  *adjusted_mode = *nv_connector->native_mode;
963  adjusted_mode->base.id = id;
964  }
965  }
966 
967  return true;
968 }
969 
970 static void
971 nvd0_dac_commit(struct drm_encoder *encoder)
972 {
973 }
974 
975 static void
976 nvd0_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
977  struct drm_display_mode *adjusted_mode)
978 {
979  struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
980  struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
981  u32 syncs, magic, *push;
982 
983  syncs = 0x00000001;
984  if (mode->flags & DRM_MODE_FLAG_NHSYNC)
985  syncs |= 0x00000008;
986  if (mode->flags & DRM_MODE_FLAG_NVSYNC)
987  syncs |= 0x00000010;
988 
989  magic = 0x31ec6000 | (nv_crtc->index << 25);
990  if (mode->flags & DRM_MODE_FLAG_INTERLACE)
991  magic |= 0x00000001;
992 
993  nvd0_dac_dpms(encoder, DRM_MODE_DPMS_ON);
994 
995  push = evo_wait(encoder->dev, EVO_MASTER, 8);
996  if (push) {
997  evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
998  evo_data(push, syncs);
999  evo_data(push, magic);
1000  evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 2);
1001  evo_data(push, 1 << nv_crtc->index);
1002  evo_data(push, 0x00ff);
1003  evo_kick(push, encoder->dev, EVO_MASTER);
1004  }
1005 
1006  nv_encoder->crtc = encoder->crtc;
1007 }
1008 
1009 static void
1010 nvd0_dac_disconnect(struct drm_encoder *encoder)
1011 {
1012  struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1013  struct drm_device *dev = encoder->dev;
1014  u32 *push;
1015 
1016  if (nv_encoder->crtc) {
1017  nvd0_crtc_prepare(nv_encoder->crtc);
1018 
1019  push = evo_wait(dev, EVO_MASTER, 4);
1020  if (push) {
1021  evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
1022  evo_data(push, 0x00000000);
1023  evo_mthd(push, 0x0080, 1);
1024  evo_data(push, 0x00000000);
1025  evo_kick(push, dev, EVO_MASTER);
1026  }
1027 
1028  nv_encoder->crtc = NULL;
1029  }
1030 }
1031 
1032 static enum drm_connector_status
1033 nvd0_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
1034 {
1036  struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1037  struct drm_device *dev = encoder->dev;
1038  struct nouveau_device *device = nouveau_dev(dev);
1039  int or = nv_encoder->or;
1040  u32 load;
1041 
1042  nv_wr32(device, 0x61a00c + (or * 0x800), 0x00100000);
1043  udelay(9500);
1044  nv_wr32(device, 0x61a00c + (or * 0x800), 0x80000000);
1045 
1046  load = nv_rd32(device, 0x61a00c + (or * 0x800));
1047  if ((load & 0x38000000) == 0x38000000)
1048  status = connector_status_connected;
1049 
1050  nv_wr32(device, 0x61a00c + (or * 0x800), 0x00000000);
1051  return status;
1052 }
1053 
1054 static void
1055 nvd0_dac_destroy(struct drm_encoder *encoder)
1056 {
1057  drm_encoder_cleanup(encoder);
1058  kfree(encoder);
1059 }
1060 
1061 static const struct drm_encoder_helper_funcs nvd0_dac_hfunc = {
1062  .dpms = nvd0_dac_dpms,
1063  .mode_fixup = nvd0_dac_mode_fixup,
1064  .prepare = nvd0_dac_disconnect,
1065  .commit = nvd0_dac_commit,
1066  .mode_set = nvd0_dac_mode_set,
1067  .disable = nvd0_dac_disconnect,
1068  .get_crtc = nvd0_display_crtc_get,
1069  .detect = nvd0_dac_detect
1070 };
1071 
1072 static const struct drm_encoder_funcs nvd0_dac_func = {
1073  .destroy = nvd0_dac_destroy,
1074 };
1075 
1076 static int
1077 nvd0_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
1078 {
1079  struct drm_device *dev = connector->dev;
1080  struct nouveau_encoder *nv_encoder;
1081  struct drm_encoder *encoder;
1082 
1083  nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1084  if (!nv_encoder)
1085  return -ENOMEM;
1086  nv_encoder->dcb = dcbe;
1087  nv_encoder->or = ffs(dcbe->or) - 1;
1088 
1089  encoder = to_drm_encoder(nv_encoder);
1090  encoder->possible_crtcs = dcbe->heads;
1091  encoder->possible_clones = 0;
1092  drm_encoder_init(dev, encoder, &nvd0_dac_func, DRM_MODE_ENCODER_DAC);
1093  drm_encoder_helper_add(encoder, &nvd0_dac_hfunc);
1094 
1095  drm_mode_connector_attach_encoder(connector, encoder);
1096  return 0;
1097 }
1098 
1099 /******************************************************************************
1100  * Audio
1101  *****************************************************************************/
1102 static void
1103 nvd0_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1104 {
1105  struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1106  struct nouveau_connector *nv_connector;
1107  struct drm_device *dev = encoder->dev;
1108  struct nouveau_device *device = nouveau_dev(dev);
1109  int i, or = nv_encoder->or * 0x30;
1110 
1111  nv_connector = nouveau_encoder_connector_get(nv_encoder);
1112  if (!drm_detect_monitor_audio(nv_connector->edid))
1113  return;
1114 
1115  nv_mask(device, 0x10ec10 + or, 0x80000003, 0x80000001);
1116 
1117  drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
1118  if (nv_connector->base.eld[0]) {
1119  u8 *eld = nv_connector->base.eld;
1120 
1121  for (i = 0; i < eld[2] * 4; i++)
1122  nv_wr32(device, 0x10ec00 + or, (i << 8) | eld[i]);
1123  for (i = eld[2] * 4; i < 0x60; i++)
1124  nv_wr32(device, 0x10ec00 + or, (i << 8) | 0x00);
1125 
1126  nv_mask(device, 0x10ec10 + or, 0x80000002, 0x80000002);
1127  }
1128 }
1129 
1130 static void
1131 nvd0_audio_disconnect(struct drm_encoder *encoder)
1132 {
1133  struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1134  struct drm_device *dev = encoder->dev;
1135  struct nouveau_device *device = nouveau_dev(dev);
1136  int or = nv_encoder->or * 0x30;
1137 
1138  nv_mask(device, 0x10ec10 + or, 0x80000003, 0x80000000);
1139 }
1140 
1141 /******************************************************************************
1142  * HDMI
1143  *****************************************************************************/
1144 static void
1145 nvd0_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1146 {
1147  struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1148  struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1149  struct nouveau_connector *nv_connector;
1150  struct drm_device *dev = encoder->dev;
1151  struct nouveau_device *device = nouveau_dev(dev);
1152  int head = nv_crtc->index * 0x800;
1153  u32 rekey = 56; /* binary driver, and tegra constant */
1154  u32 max_ac_packet;
1155 
1156  nv_connector = nouveau_encoder_connector_get(nv_encoder);
1157  if (!drm_detect_hdmi_monitor(nv_connector->edid))
1158  return;
1159 
1160  max_ac_packet = mode->htotal - mode->hdisplay;
1161  max_ac_packet -= rekey;
1162  max_ac_packet -= 18; /* constant from tegra */
1163  max_ac_packet /= 32;
1164 
1165  /* AVI InfoFrame */
1166  nv_mask(device, 0x616714 + head, 0x00000001, 0x00000000);
1167  nv_wr32(device, 0x61671c + head, 0x000d0282);
1168  nv_wr32(device, 0x616720 + head, 0x0000006f);
1169  nv_wr32(device, 0x616724 + head, 0x00000000);
1170  nv_wr32(device, 0x616728 + head, 0x00000000);
1171  nv_wr32(device, 0x61672c + head, 0x00000000);
1172  nv_mask(device, 0x616714 + head, 0x00000001, 0x00000001);
1173 
1174  /* ??? InfoFrame? */
1175  nv_mask(device, 0x6167a4 + head, 0x00000001, 0x00000000);
1176  nv_wr32(device, 0x6167ac + head, 0x00000010);
1177  nv_mask(device, 0x6167a4 + head, 0x00000001, 0x00000001);
1178 
1179  /* HDMI_CTRL */
1180  nv_mask(device, 0x616798 + head, 0x401f007f, 0x40000000 | rekey |
1181  max_ac_packet << 16);
1182 
1183  /* NFI, audio doesn't work without it though.. */
1184  nv_mask(device, 0x616548 + head, 0x00000070, 0x00000000);
1185 
1186  nvd0_audio_mode_set(encoder, mode);
1187 }
1188 
1189 static void
1190 nvd0_hdmi_disconnect(struct drm_encoder *encoder)
1191 {
1192  struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1193  struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
1194  struct drm_device *dev = encoder->dev;
1195  struct nouveau_device *device = nouveau_dev(dev);
1196  int head = nv_crtc->index * 0x800;
1197 
1198  nvd0_audio_disconnect(encoder);
1199 
1200  nv_mask(device, 0x616798 + head, 0x40000000, 0x00000000);
1201  nv_mask(device, 0x6167a4 + head, 0x00000001, 0x00000000);
1202  nv_mask(device, 0x616714 + head, 0x00000001, 0x00000000);
1203 }
1204 
1205 /******************************************************************************
1206  * SOR
1207  *****************************************************************************/
1208 static inline u32
1209 nvd0_sor_dp_lane_map(struct drm_device *dev, struct dcb_output *dcb, u8 lane)
1210 {
1211  static const u8 nvd0[] = { 16, 8, 0, 24 };
1212  return nvd0[lane];
1213 }
1214 
1215 static void
1216 nvd0_sor_dp_train_set(struct drm_device *dev, struct dcb_output *dcb, u8 pattern)
1217 {
1218  struct nouveau_device *device = nouveau_dev(dev);
1219  const u32 or = ffs(dcb->or) - 1, link = !(dcb->sorconf.link & 1);
1220  const u32 loff = (or * 0x800) + (link * 0x80);
1221  nv_mask(device, 0x61c110 + loff, 0x0f0f0f0f, 0x01010101 * pattern);
1222 }
1223 
1224 static void
1225 nvd0_sor_dp_train_adj(struct drm_device *dev, struct dcb_output *dcb,
1226  u8 lane, u8 swing, u8 preem)
1227 {
1228  struct nouveau_device *device = nouveau_dev(dev);
1229  struct nouveau_drm *drm = nouveau_drm(dev);
1230  const u32 or = ffs(dcb->or) - 1, link = !(dcb->sorconf.link & 1);
1231  const u32 loff = (or * 0x800) + (link * 0x80);
1232  u32 shift = nvd0_sor_dp_lane_map(dev, dcb, lane);
1233  u32 mask = 0x000000ff << shift;
1234  u8 *table, *entry, *config = NULL;
1235 
1236  switch (swing) {
1237  case 0: preem += 0; break;
1238  case 1: preem += 4; break;
1239  case 2: preem += 7; break;
1240  case 3: preem += 9; break;
1241  }
1242 
1243  table = nouveau_dp_bios_data(dev, dcb, &entry);
1244  if (table) {
1245  if (table[0] == 0x30) {
1246  config = entry + table[4];
1247  config += table[5] * preem;
1248  } else
1249  if (table[0] == 0x40) {
1250  config = table + table[1];
1251  config += table[2] * table[3];
1252  config += table[6] * preem;
1253  }
1254  }
1255 
1256  if (!config) {
1257  NV_ERROR(drm, "PDISP: unsupported DP table for chipset\n");
1258  return;
1259  }
1260 
1261  nv_mask(device, 0x61c118 + loff, mask, config[1] << shift);
1262  nv_mask(device, 0x61c120 + loff, mask, config[2] << shift);
1263  nv_mask(device, 0x61c130 + loff, 0x0000ff00, config[3] << 8);
1264  nv_mask(device, 0x61c13c + loff, 0x00000000, 0x00000000);
1265 }
1266 
1267 static void
1268 nvd0_sor_dp_link_set(struct drm_device *dev, struct dcb_output *dcb, int crtc,
1269  int link_nr, u32 link_bw, bool enhframe)
1270 {
1271  struct nouveau_device *device = nouveau_dev(dev);
1272  const u32 or = ffs(dcb->or) - 1, link = !(dcb->sorconf.link & 1);
1273  const u32 loff = (or * 0x800) + (link * 0x80);
1274  const u32 soff = (or * 0x800);
1275  u32 dpctrl = nv_rd32(device, 0x61c10c + loff) & ~0x001f4000;
1276  u32 clksor = nv_rd32(device, 0x612300 + soff) & ~0x007c0000;
1277  u32 script = 0x0000, lane_mask = 0;
1278  u8 *table, *entry;
1279  int i;
1280 
1281  link_bw /= 27000;
1282 
1283  table = nouveau_dp_bios_data(dev, dcb, &entry);
1284  if (table) {
1285  if (table[0] == 0x30) entry = ROMPTR(dev, entry[10]);
1286  else if (table[0] == 0x40) entry = ROMPTR(dev, entry[9]);
1287  else entry = NULL;
1288 
1289  while (entry) {
1290  if (entry[0] >= link_bw)
1291  break;
1292  entry += 3;
1293  }
1294 
1295  nouveau_bios_run_init_table(dev, script, dcb, crtc);
1296  }
1297 
1298  clksor |= link_bw << 18;
1299  dpctrl |= ((1 << link_nr) - 1) << 16;
1300  if (enhframe)
1301  dpctrl |= 0x00004000;
1302 
1303  for (i = 0; i < link_nr; i++)
1304  lane_mask |= 1 << (nvd0_sor_dp_lane_map(dev, dcb, i) >> 3);
1305 
1306  nv_wr32(device, 0x612300 + soff, clksor);
1307  nv_wr32(device, 0x61c10c + loff, dpctrl);
1308  nv_mask(device, 0x61c130 + loff, 0x0000000f, lane_mask);
1309 }
1310 
1311 static void
1312 nvd0_sor_dp_link_get(struct drm_device *dev, struct dcb_output *dcb,
1313  u32 *link_nr, u32 *link_bw)
1314 {
1315  struct nouveau_device *device = nouveau_dev(dev);
1316  const u32 or = ffs(dcb->or) - 1, link = !(dcb->sorconf.link & 1);
1317  const u32 loff = (or * 0x800) + (link * 0x80);
1318  const u32 soff = (or * 0x800);
1319  u32 dpctrl = nv_rd32(device, 0x61c10c + loff) & 0x000f0000;
1320  u32 clksor = nv_rd32(device, 0x612300 + soff);
1321 
1322  if (dpctrl > 0x00030000) *link_nr = 4;
1323  else if (dpctrl > 0x00010000) *link_nr = 2;
1324  else *link_nr = 1;
1325 
1326  *link_bw = (clksor & 0x007c0000) >> 18;
1327  *link_bw *= 27000;
1328 }
1329 
1330 static void
1331 nvd0_sor_dp_calc_tu(struct drm_device *dev, struct dcb_output *dcb,
1332  u32 crtc, u32 datarate)
1333 {
1334  struct nouveau_device *device = nouveau_dev(dev);
1335  const u32 symbol = 100000;
1336  const u32 TU = 64;
1337  u32 link_nr, link_bw;
1338  u64 ratio, value;
1339 
1340  nvd0_sor_dp_link_get(dev, dcb, &link_nr, &link_bw);
1341 
1342  ratio = datarate;
1343  ratio *= symbol;
1344  do_div(ratio, link_nr * link_bw);
1345 
1346  value = (symbol - ratio) * TU;
1347  value *= ratio;
1348  do_div(value, symbol);
1349  do_div(value, symbol);
1350 
1351  value += 5;
1352  value |= 0x08000000;
1353 
1354  nv_wr32(device, 0x616610 + (crtc * 0x800), value);
1355 }
1356 
1357 static void
1358 nvd0_sor_dpms(struct drm_encoder *encoder, int mode)
1359 {
1360  struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1361  struct drm_device *dev = encoder->dev;
1362  struct nouveau_device *device = nouveau_dev(dev);
1363  struct drm_encoder *partner;
1364  int or = nv_encoder->or;
1365  u32 dpms_ctrl;
1366 
1367  nv_encoder->last_dpms = mode;
1368 
1369  list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
1370  struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
1371 
1372  if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
1373  continue;
1374 
1375  if (nv_partner != nv_encoder &&
1376  nv_partner->dcb->or == nv_encoder->dcb->or) {
1377  if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
1378  return;
1379  break;
1380  }
1381  }
1382 
1383  dpms_ctrl = (mode == DRM_MODE_DPMS_ON);
1384  dpms_ctrl |= 0x80000000;
1385 
1386  nv_wait(device, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
1387  nv_mask(device, 0x61c004 + (or * 0x0800), 0x80000001, dpms_ctrl);
1388  nv_wait(device, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
1389  nv_wait(device, 0x61c030 + (or * 0x0800), 0x10000000, 0x00000000);
1390 
1391  if (nv_encoder->dcb->type == DCB_OUTPUT_DP) {
1392  struct dp_train_func func = {
1393  .link_set = nvd0_sor_dp_link_set,
1394  .train_set = nvd0_sor_dp_train_set,
1395  .train_adj = nvd0_sor_dp_train_adj
1396  };
1397 
1398  nouveau_dp_dpms(encoder, mode, nv_encoder->dp.datarate, &func);
1399  }
1400 }
1401 
1402 static bool
1403 nvd0_sor_mode_fixup(struct drm_encoder *encoder,
1404  const struct drm_display_mode *mode,
1405  struct drm_display_mode *adjusted_mode)
1406 {
1407  struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1408  struct nouveau_connector *nv_connector;
1409 
1410  nv_connector = nouveau_encoder_connector_get(nv_encoder);
1411  if (nv_connector && nv_connector->native_mode) {
1412  if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
1413  int id = adjusted_mode->base.id;
1414  *adjusted_mode = *nv_connector->native_mode;
1415  adjusted_mode->base.id = id;
1416  }
1417  }
1418 
1419  return true;
1420 }
1421 
1422 static void
1423 nvd0_sor_disconnect(struct drm_encoder *encoder)
1424 {
1425  struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1426  struct drm_device *dev = encoder->dev;
1427  u32 *push;
1428 
1429  if (nv_encoder->crtc) {
1430  nvd0_crtc_prepare(nv_encoder->crtc);
1431 
1432  push = evo_wait(dev, EVO_MASTER, 4);
1433  if (push) {
1434  evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
1435  evo_data(push, 0x00000000);
1436  evo_mthd(push, 0x0080, 1);
1437  evo_data(push, 0x00000000);
1438  evo_kick(push, dev, EVO_MASTER);
1439  }
1440 
1441  nvd0_hdmi_disconnect(encoder);
1442 
1443  nv_encoder->crtc = NULL;
1444  nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1445  }
1446 }
1447 
1448 static void
1449 nvd0_sor_prepare(struct drm_encoder *encoder)
1450 {
1451  nvd0_sor_disconnect(encoder);
1452  if (nouveau_encoder(encoder)->dcb->type == DCB_OUTPUT_DP)
1453  evo_sync(encoder->dev, EVO_MASTER);
1454 }
1455 
1456 static void
1457 nvd0_sor_commit(struct drm_encoder *encoder)
1458 {
1459 }
1460 
1461 static void
1462 nvd0_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode,
1463  struct drm_display_mode *mode)
1464 {
1465  struct drm_device *dev = encoder->dev;
1466  struct nouveau_drm *drm = nouveau_drm(dev);
1467  struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1468  struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1469  struct nouveau_connector *nv_connector;
1470  struct nvbios *bios = &drm->vbios;
1471  u32 mode_ctrl = (1 << nv_crtc->index);
1472  u32 syncs, magic, *push;
1473  u32 or_config;
1474 
1475  syncs = 0x00000001;
1476  if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1477  syncs |= 0x00000008;
1478  if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1479  syncs |= 0x00000010;
1480 
1481  magic = 0x31ec6000 | (nv_crtc->index << 25);
1482  if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1483  magic |= 0x00000001;
1484 
1485  nv_connector = nouveau_encoder_connector_get(nv_encoder);
1486  switch (nv_encoder->dcb->type) {
1487  case DCB_OUTPUT_TMDS:
1488  if (nv_encoder->dcb->sorconf.link & 1) {
1489  if (mode->clock < 165000)
1490  mode_ctrl |= 0x00000100;
1491  else
1492  mode_ctrl |= 0x00000500;
1493  } else {
1494  mode_ctrl |= 0x00000200;
1495  }
1496 
1497  or_config = (mode_ctrl & 0x00000f00) >> 8;
1498  if (mode->clock >= 165000)
1499  or_config |= 0x0100;
1500 
1501  nvd0_hdmi_mode_set(encoder, mode);
1502  break;
1503  case DCB_OUTPUT_LVDS:
1504  or_config = (mode_ctrl & 0x00000f00) >> 8;
1505  if (bios->fp_no_ddc) {
1506  if (bios->fp.dual_link)
1507  or_config |= 0x0100;
1508  if (bios->fp.if_is_24bit)
1509  or_config |= 0x0200;
1510  } else {
1511  if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
1512  if (((u8 *)nv_connector->edid)[121] == 2)
1513  or_config |= 0x0100;
1514  } else
1515  if (mode->clock >= bios->fp.duallink_transition_clk) {
1516  or_config |= 0x0100;
1517  }
1518 
1519  if (or_config & 0x0100) {
1520  if (bios->fp.strapless_is_24bit & 2)
1521  or_config |= 0x0200;
1522  } else {
1523  if (bios->fp.strapless_is_24bit & 1)
1524  or_config |= 0x0200;
1525  }
1526 
1527  if (nv_connector->base.display_info.bpc == 8)
1528  or_config |= 0x0200;
1529 
1530  }
1531  break;
1532  case DCB_OUTPUT_DP:
1533  if (nv_connector->base.display_info.bpc == 6) {
1534  nv_encoder->dp.datarate = mode->clock * 18 / 8;
1535  syncs |= 0x00000002 << 6;
1536  } else {
1537  nv_encoder->dp.datarate = mode->clock * 24 / 8;
1538  syncs |= 0x00000005 << 6;
1539  }
1540 
1541  if (nv_encoder->dcb->sorconf.link & 1)
1542  mode_ctrl |= 0x00000800;
1543  else
1544  mode_ctrl |= 0x00000900;
1545 
1546  or_config = (mode_ctrl & 0x00000f00) >> 8;
1547  break;
1548  default:
1549  BUG_ON(1);
1550  break;
1551  }
1552 
1553  nvd0_sor_dpms(encoder, DRM_MODE_DPMS_ON);
1554 
1555  if (nv_encoder->dcb->type == DCB_OUTPUT_DP) {
1556  nvd0_sor_dp_calc_tu(dev, nv_encoder->dcb, nv_crtc->index,
1557  nv_encoder->dp.datarate);
1558  }
1559 
1560  push = evo_wait(dev, EVO_MASTER, 8);
1561  if (push) {
1562  evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1563  evo_data(push, syncs);
1564  evo_data(push, magic);
1565  evo_mthd(push, 0x0200 + (nv_encoder->or * 0x020), 2);
1566  evo_data(push, mode_ctrl);
1567  evo_data(push, or_config);
1568  evo_kick(push, dev, EVO_MASTER);
1569  }
1570 
1571  nv_encoder->crtc = encoder->crtc;
1572 }
1573 
1574 static void
1575 nvd0_sor_destroy(struct drm_encoder *encoder)
1576 {
1577  drm_encoder_cleanup(encoder);
1578  kfree(encoder);
1579 }
1580 
1581 static const struct drm_encoder_helper_funcs nvd0_sor_hfunc = {
1582  .dpms = nvd0_sor_dpms,
1583  .mode_fixup = nvd0_sor_mode_fixup,
1584  .prepare = nvd0_sor_prepare,
1585  .commit = nvd0_sor_commit,
1586  .mode_set = nvd0_sor_mode_set,
1587  .disable = nvd0_sor_disconnect,
1588  .get_crtc = nvd0_display_crtc_get,
1589 };
1590 
1591 static const struct drm_encoder_funcs nvd0_sor_func = {
1592  .destroy = nvd0_sor_destroy,
1593 };
1594 
1595 static int
1596 nvd0_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
1597 {
1598  struct drm_device *dev = connector->dev;
1599  struct nouveau_encoder *nv_encoder;
1600  struct drm_encoder *encoder;
1601 
1602  nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1603  if (!nv_encoder)
1604  return -ENOMEM;
1605  nv_encoder->dcb = dcbe;
1606  nv_encoder->or = ffs(dcbe->or) - 1;
1607  nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1608 
1609  encoder = to_drm_encoder(nv_encoder);
1610  encoder->possible_crtcs = dcbe->heads;
1611  encoder->possible_clones = 0;
1612  drm_encoder_init(dev, encoder, &nvd0_sor_func, DRM_MODE_ENCODER_TMDS);
1613  drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
1614 
1615  drm_mode_connector_attach_encoder(connector, encoder);
1616  return 0;
1617 }
1618 
1619 /******************************************************************************
1620  * IRQ
1621  *****************************************************************************/
1622 static struct dcb_output *
1623 lookup_dcb(struct drm_device *dev, int id, u32 mc)
1624 {
1625  struct nouveau_drm *drm = nouveau_drm(dev);
1626  int type, or, i, link = -1;
1627 
1628  if (id < 4) {
1629  type = DCB_OUTPUT_ANALOG;
1630  or = id;
1631  } else {
1632  switch (mc & 0x00000f00) {
1633  case 0x00000000: link = 0; type = DCB_OUTPUT_LVDS; break;
1634  case 0x00000100: link = 0; type = DCB_OUTPUT_TMDS; break;
1635  case 0x00000200: link = 1; type = DCB_OUTPUT_TMDS; break;
1636  case 0x00000500: link = 0; type = DCB_OUTPUT_TMDS; break;
1637  case 0x00000800: link = 0; type = DCB_OUTPUT_DP; break;
1638  case 0x00000900: link = 1; type = DCB_OUTPUT_DP; break;
1639  default:
1640  NV_ERROR(drm, "PDISP: unknown SOR mc 0x%08x\n", mc);
1641  return NULL;
1642  }
1643 
1644  or = id - 4;
1645  }
1646 
1647  for (i = 0; i < drm->vbios.dcb.entries; i++) {
1648  struct dcb_output *dcb = &drm->vbios.dcb.entry[i];
1649  if (dcb->type == type && (dcb->or & (1 << or)) &&
1650  (link < 0 || link == !(dcb->sorconf.link & 1)))
1651  return dcb;
1652  }
1653 
1654  NV_ERROR(drm, "PDISP: DCB for %d/0x%08x not found\n", id, mc);
1655  return NULL;
1656 }
1657 
1658 static void
1659 nvd0_display_unk1_handler(struct drm_device *dev, u32 crtc, u32 mask)
1660 {
1661  struct nouveau_device *device = nouveau_dev(dev);
1662  struct dcb_output *dcb;
1663  int i;
1664 
1665  for (i = 0; mask && i < 8; i++) {
1666  u32 mcc = nv_rd32(device, 0x640180 + (i * 0x20));
1667  if (!(mcc & (1 << crtc)))
1668  continue;
1669 
1670  dcb = lookup_dcb(dev, i, mcc);
1671  if (!dcb)
1672  continue;
1673 
1674  nouveau_bios_run_display_table(dev, 0x0000, -1, dcb, crtc);
1675  }
1676 
1677  nv_wr32(device, 0x6101d4, 0x00000000);
1678  nv_wr32(device, 0x6109d4, 0x00000000);
1679  nv_wr32(device, 0x6101d0, 0x80000000);
1680 }
1681 
1682 static void
1683 nvd0_display_unk2_handler(struct drm_device *dev, u32 crtc, u32 mask)
1684 {
1685  struct nouveau_device *device = nouveau_dev(dev);
1686  struct nouveau_drm *drm = nouveau_drm(dev);
1687  struct dcb_output *dcb;
1688  u32 or, tmp, pclk;
1689  int i;
1690 
1691  for (i = 0; mask && i < 8; i++) {
1692  u32 mcc = nv_rd32(device, 0x640180 + (i * 0x20));
1693  if (!(mcc & (1 << crtc)))
1694  continue;
1695 
1696  dcb = lookup_dcb(dev, i, mcc);
1697  if (!dcb)
1698  continue;
1699 
1700  nouveau_bios_run_display_table(dev, 0x0000, -2, dcb, crtc);
1701  }
1702 
1703  pclk = nv_rd32(device, 0x660450 + (crtc * 0x300)) / 1000;
1704  NV_DEBUG(drm, "PDISP: crtc %d pclk %d mask 0x%08x\n",
1705  crtc, pclk, mask);
1706  if (pclk && (mask & 0x00010000)) {
1707  nv50_crtc_set_clock(dev, crtc, pclk);
1708  }
1709 
1710  for (i = 0; mask && i < 8; i++) {
1711  u32 mcp = nv_rd32(device, 0x660180 + (i * 0x20));
1712  u32 cfg = nv_rd32(device, 0x660184 + (i * 0x20));
1713  if (!(mcp & (1 << crtc)))
1714  continue;
1715 
1716  dcb = lookup_dcb(dev, i, mcp);
1717  if (!dcb)
1718  continue;
1719  or = ffs(dcb->or) - 1;
1720 
1721  nouveau_bios_run_display_table(dev, cfg, pclk, dcb, crtc);
1722 
1723  nv_wr32(device, 0x612200 + (crtc * 0x800), 0x00000000);
1724  switch (dcb->type) {
1725  case DCB_OUTPUT_ANALOG:
1726  nv_wr32(device, 0x612280 + (or * 0x800), 0x00000000);
1727  break;
1728  case DCB_OUTPUT_TMDS:
1729  case DCB_OUTPUT_LVDS:
1730  case DCB_OUTPUT_DP:
1731  if (cfg & 0x00000100)
1732  tmp = 0x00000101;
1733  else
1734  tmp = 0x00000000;
1735 
1736  nv_mask(device, 0x612300 + (or * 0x800), 0x00000707, tmp);
1737  break;
1738  default:
1739  break;
1740  }
1741 
1742  break;
1743  }
1744 
1745  nv_wr32(device, 0x6101d4, 0x00000000);
1746  nv_wr32(device, 0x6109d4, 0x00000000);
1747  nv_wr32(device, 0x6101d0, 0x80000000);
1748 }
1749 
1750 static void
1751 nvd0_display_unk4_handler(struct drm_device *dev, u32 crtc, u32 mask)
1752 {
1753  struct nouveau_device *device = nouveau_dev(dev);
1754  struct dcb_output *dcb;
1755  int pclk, i;
1756 
1757  pclk = nv_rd32(device, 0x660450 + (crtc * 0x300)) / 1000;
1758 
1759  for (i = 0; mask && i < 8; i++) {
1760  u32 mcp = nv_rd32(device, 0x660180 + (i * 0x20));
1761  u32 cfg = nv_rd32(device, 0x660184 + (i * 0x20));
1762  if (!(mcp & (1 << crtc)))
1763  continue;
1764 
1765  dcb = lookup_dcb(dev, i, mcp);
1766  if (!dcb)
1767  continue;
1768 
1769  nouveau_bios_run_display_table(dev, cfg, -pclk, dcb, crtc);
1770  }
1771 
1772  nv_wr32(device, 0x6101d4, 0x00000000);
1773  nv_wr32(device, 0x6109d4, 0x00000000);
1774  nv_wr32(device, 0x6101d0, 0x80000000);
1775 }
1776 
1777 static void
1778 nvd0_display_bh(unsigned long data)
1779 {
1780  struct drm_device *dev = (struct drm_device *)data;
1781  struct nouveau_device *device = nouveau_dev(dev);
1782  struct nouveau_drm *drm = nouveau_drm(dev);
1783  struct nvd0_display *disp = nvd0_display(dev);
1784  u32 mask = 0, crtc = ~0;
1785  int i;
1786 
1787  if (drm_debug & (DRM_UT_DRIVER | DRM_UT_KMS)) {
1788  NV_INFO(drm, "PDISP: modeset req %d\n", disp->modeset);
1789  NV_INFO(drm, " STAT: 0x%08x 0x%08x 0x%08x\n",
1790  nv_rd32(device, 0x6101d0),
1791  nv_rd32(device, 0x6101d4), nv_rd32(device, 0x6109d4));
1792  for (i = 0; i < 8; i++) {
1793  NV_INFO(drm, " %s%d: 0x%08x 0x%08x\n",
1794  i < 4 ? "DAC" : "SOR", i,
1795  nv_rd32(device, 0x640180 + (i * 0x20)),
1796  nv_rd32(device, 0x660180 + (i * 0x20)));
1797  }
1798  }
1799 
1800  while (!mask && ++crtc < dev->mode_config.num_crtc)
1801  mask = nv_rd32(device, 0x6101d4 + (crtc * 0x800));
1802 
1803  if (disp->modeset & 0x00000001)
1804  nvd0_display_unk1_handler(dev, crtc, mask);
1805  if (disp->modeset & 0x00000002)
1806  nvd0_display_unk2_handler(dev, crtc, mask);
1807  if (disp->modeset & 0x00000004)
1808  nvd0_display_unk4_handler(dev, crtc, mask);
1809 }
1810 
1811 void
1813 {
1814  struct nvd0_display *disp = nvd0_display(dev);
1815  struct nouveau_device *device = nouveau_dev(dev);
1816  struct nouveau_drm *drm = nouveau_drm(dev);
1817  u32 intr = nv_rd32(device, 0x610088);
1818 
1819  if (intr & 0x00000001) {
1820  u32 stat = nv_rd32(device, 0x61008c);
1821  nv_wr32(device, 0x61008c, stat);
1822  intr &= ~0x00000001;
1823  }
1824 
1825  if (intr & 0x00000002) {
1826  u32 stat = nv_rd32(device, 0x61009c);
1827  int chid = ffs(stat) - 1;
1828  if (chid >= 0) {
1829  u32 mthd = nv_rd32(device, 0x6101f0 + (chid * 12));
1830  u32 data = nv_rd32(device, 0x6101f4 + (chid * 12));
1831  u32 unkn = nv_rd32(device, 0x6101f8 + (chid * 12));
1832 
1833  NV_INFO(drm, "EvoCh: chid %d mthd 0x%04x data 0x%08x "
1834  "0x%08x 0x%08x\n",
1835  chid, (mthd & 0x0000ffc), data, mthd, unkn);
1836  nv_wr32(device, 0x61009c, (1 << chid));
1837  nv_wr32(device, 0x6101f0 + (chid * 12), 0x90000000);
1838  }
1839 
1840  intr &= ~0x00000002;
1841  }
1842 
1843  if (intr & 0x00100000) {
1844  u32 stat = nv_rd32(device, 0x6100ac);
1845 
1846  if (stat & 0x00000007) {
1847  disp->modeset = stat;
1848  tasklet_schedule(&disp->tasklet);
1849 
1850  nv_wr32(device, 0x6100ac, (stat & 0x00000007));
1851  stat &= ~0x00000007;
1852  }
1853 
1854  if (stat) {
1855  NV_INFO(drm, "PDISP: unknown intr24 0x%08x\n", stat);
1856  nv_wr32(device, 0x6100ac, stat);
1857  }
1858 
1859  intr &= ~0x00100000;
1860  }
1861 
1862  intr &= ~0x0f000000; /* vblank, handled in core */
1863  if (intr)
1864  NV_INFO(drm, "PDISP: unknown intr 0x%08x\n", intr);
1865 }
1866 
1867 /******************************************************************************
1868  * Init
1869  *****************************************************************************/
1870 void
1872 {
1873  int i;
1874 
1875  /* fini cursors + overlays + flips */
1876  for (i = 1; i >= 0; i--) {
1877  evo_fini_pio(dev, EVO_CURS(i));
1878  evo_fini_pio(dev, EVO_OIMM(i));
1879  evo_fini_dma(dev, EVO_OVLY(i));
1880  evo_fini_dma(dev, EVO_FLIP(i));
1881  }
1882 
1883  /* fini master */
1884  evo_fini_dma(dev, EVO_MASTER);
1885 }
1886 
1887 int
1889 {
1890  struct nvd0_display *disp = nvd0_display(dev);
1891  struct nouveau_device *device = nouveau_dev(dev);
1892  struct nouveau_drm *drm = nouveau_drm(dev);
1893  int ret, i;
1894  u32 *push;
1895 
1896  if (nv_rd32(device, 0x6100ac) & 0x00000100) {
1897  nv_wr32(device, 0x6100ac, 0x00000100);
1898  nv_mask(device, 0x6194e8, 0x00000001, 0x00000000);
1899  if (!nv_wait(device, 0x6194e8, 0x00000002, 0x00000000)) {
1900  NV_ERROR(drm, "PDISP: 0x6194e8 0x%08x\n",
1901  nv_rd32(device, 0x6194e8));
1902  return -EBUSY;
1903  }
1904  }
1905 
1906  /* nfi what these are exactly, i do know that SOR_MODE_CTRL won't
1907  * work at all unless you do the SOR part below.
1908  */
1909  for (i = 0; i < 3; i++) {
1910  u32 dac = nv_rd32(device, 0x61a000 + (i * 0x800));
1911  nv_wr32(device, 0x6101c0 + (i * 0x800), dac);
1912  }
1913 
1914  for (i = 0; i < 4; i++) {
1915  u32 sor = nv_rd32(device, 0x61c000 + (i * 0x800));
1916  nv_wr32(device, 0x6301c4 + (i * 0x800), sor);
1917  }
1918 
1919  for (i = 0; i < dev->mode_config.num_crtc; i++) {
1920  u32 crtc0 = nv_rd32(device, 0x616104 + (i * 0x800));
1921  u32 crtc1 = nv_rd32(device, 0x616108 + (i * 0x800));
1922  u32 crtc2 = nv_rd32(device, 0x61610c + (i * 0x800));
1923  nv_wr32(device, 0x6101b4 + (i * 0x800), crtc0);
1924  nv_wr32(device, 0x6101b8 + (i * 0x800), crtc1);
1925  nv_wr32(device, 0x6101bc + (i * 0x800), crtc2);
1926  }
1927 
1928  /* point at our hash table / objects, enable interrupts */
1929  nv_wr32(device, 0x610010, (disp->mem->addr >> 8) | 9);
1930  nv_mask(device, 0x6100b0, 0x00000307, 0x00000307);
1931 
1932  /* init master */
1933  ret = evo_init_dma(dev, EVO_MASTER);
1934  if (ret)
1935  goto error;
1936 
1937  /* init flips + overlays + cursors */
1938  for (i = 0; i < dev->mode_config.num_crtc; i++) {
1939  if ((ret = evo_init_dma(dev, EVO_FLIP(i))) ||
1940  (ret = evo_init_dma(dev, EVO_OVLY(i))) ||
1941  (ret = evo_init_pio(dev, EVO_OIMM(i))) ||
1942  (ret = evo_init_pio(dev, EVO_CURS(i))))
1943  goto error;
1944  }
1945 
1946  push = evo_wait(dev, EVO_MASTER, 32);
1947  if (!push) {
1948  ret = -EBUSY;
1949  goto error;
1950  }
1951  evo_mthd(push, 0x0088, 1);
1952  evo_data(push, NvEvoSync);
1953  evo_mthd(push, 0x0084, 1);
1954  evo_data(push, 0x00000000);
1955  evo_mthd(push, 0x0084, 1);
1956  evo_data(push, 0x80000000);
1957  evo_mthd(push, 0x008c, 1);
1958  evo_data(push, 0x00000000);
1959  evo_kick(push, dev, EVO_MASTER);
1960 
1961 error:
1962  if (ret)
1963  nvd0_display_fini(dev);
1964  return ret;
1965 }
1966 
1967 void
1969 {
1970  struct nvd0_display *disp = nvd0_display(dev);
1971  struct pci_dev *pdev = dev->pdev;
1972  int i;
1973 
1974  for (i = 0; i < EVO_DMA_NR; i++) {
1975  struct evo *evo = &disp->evo[i];
1976  pci_free_consistent(pdev, PAGE_SIZE, evo->ptr, evo->handle);
1977  }
1978 
1979  nouveau_gpuobj_ref(NULL, &disp->mem);
1980  nouveau_bo_unmap(disp->sync);
1981  nouveau_bo_ref(NULL, &disp->sync);
1982 
1983  nouveau_display(dev)->priv = NULL;
1984  kfree(disp);
1985 }
1986 
1987 int
1989 {
1990  struct nouveau_device *device = nouveau_dev(dev);
1991  struct nouveau_drm *drm = nouveau_drm(dev);
1992  struct nouveau_bar *bar = nouveau_bar(device);
1993  struct nouveau_fb *pfb = nouveau_fb(device);
1994  struct dcb_table *dcb = &drm->vbios.dcb;
1995  struct drm_connector *connector, *tmp;
1996  struct pci_dev *pdev = dev->pdev;
1997  struct nvd0_display *disp;
1998  struct dcb_output *dcbe;
1999  int crtcs, ret, i;
2000 
2001  disp = kzalloc(sizeof(*disp), GFP_KERNEL);
2002  if (!disp)
2003  return -ENOMEM;
2004 
2005  nouveau_display(dev)->priv = disp;
2009 
2010  /* create crtc objects to represent the hw heads */
2011  crtcs = nv_rd32(device, 0x022448);
2012  for (i = 0; i < crtcs; i++) {
2013  ret = nvd0_crtc_create(dev, i);
2014  if (ret)
2015  goto out;
2016  }
2017 
2018  /* create encoder/connector objects based on VBIOS DCB table */
2019  for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
2020  connector = nouveau_connector_create(dev, dcbe->connector);
2021  if (IS_ERR(connector))
2022  continue;
2023 
2024  if (dcbe->location != DCB_LOC_ON_CHIP) {
2025  NV_WARN(drm, "skipping off-chip encoder %d/%d\n",
2026  dcbe->type, ffs(dcbe->or) - 1);
2027  continue;
2028  }
2029 
2030  switch (dcbe->type) {
2031  case DCB_OUTPUT_TMDS:
2032  case DCB_OUTPUT_LVDS:
2033  case DCB_OUTPUT_DP:
2034  nvd0_sor_create(connector, dcbe);
2035  break;
2036  case DCB_OUTPUT_ANALOG:
2037  nvd0_dac_create(connector, dcbe);
2038  break;
2039  default:
2040  NV_WARN(drm, "skipping unsupported encoder %d/%d\n",
2041  dcbe->type, ffs(dcbe->or) - 1);
2042  continue;
2043  }
2044  }
2045 
2046  /* cull any connectors we created that don't have an encoder */
2047  list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
2048  if (connector->encoder_ids[0])
2049  continue;
2050 
2051  NV_WARN(drm, "%s has no encoders, removing\n",
2052  drm_get_connector_name(connector));
2053  connector->funcs->destroy(connector);
2054  }
2055 
2056  /* setup interrupt handling */
2057  tasklet_init(&disp->tasklet, nvd0_display_bh, (unsigned long)dev);
2058 
2059  /* small shared memory area we use for notifiers and semaphores */
2060  ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM,
2061  0, 0x0000, NULL, &disp->sync);
2062  if (!ret) {
2063  ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM);
2064  if (!ret)
2065  ret = nouveau_bo_map(disp->sync);
2066  if (ret)
2067  nouveau_bo_ref(NULL, &disp->sync);
2068  }
2069 
2070  if (ret)
2071  goto out;
2072 
2073  /* hash table and dma objects for the memory areas we care about */
2074  ret = nouveau_gpuobj_new(nv_object(device), NULL, 0x4000, 0x10000,
2075  NVOBJ_FLAG_ZERO_ALLOC, &disp->mem);
2076  if (ret)
2077  goto out;
2078 
2079  /* create evo dma channels */
2080  for (i = 0; i < EVO_DMA_NR; i++) {
2081  struct evo *evo = &disp->evo[i];
2082  u64 offset = disp->sync->bo.offset;
2083  u32 dmao = 0x1000 + (i * 0x100);
2084  u32 hash = 0x0000 + (i * 0x040);
2085 
2086  evo->idx = i;
2087  evo->sem.offset = EVO_SYNC(evo->idx, 0x00);
2088  evo->ptr = pci_alloc_consistent(pdev, PAGE_SIZE, &evo->handle);
2089  if (!evo->ptr) {
2090  ret = -ENOMEM;
2091  goto out;
2092  }
2093 
2094  nv_wo32(disp->mem, dmao + 0x00, 0x00000049);
2095  nv_wo32(disp->mem, dmao + 0x04, (offset + 0x0000) >> 8);
2096  nv_wo32(disp->mem, dmao + 0x08, (offset + 0x0fff) >> 8);
2097  nv_wo32(disp->mem, dmao + 0x0c, 0x00000000);
2098  nv_wo32(disp->mem, dmao + 0x10, 0x00000000);
2099  nv_wo32(disp->mem, dmao + 0x14, 0x00000000);
2100  nv_wo32(disp->mem, hash + 0x00, NvEvoSync);
2101  nv_wo32(disp->mem, hash + 0x04, 0x00000001 | (i << 27) |
2102  ((dmao + 0x00) << 9));
2103 
2104  nv_wo32(disp->mem, dmao + 0x20, 0x00000049);
2105  nv_wo32(disp->mem, dmao + 0x24, 0x00000000);
2106  nv_wo32(disp->mem, dmao + 0x28, (pfb->ram.size - 1) >> 8);
2107  nv_wo32(disp->mem, dmao + 0x2c, 0x00000000);
2108  nv_wo32(disp->mem, dmao + 0x30, 0x00000000);
2109  nv_wo32(disp->mem, dmao + 0x34, 0x00000000);
2110  nv_wo32(disp->mem, hash + 0x08, NvEvoVRAM);
2111  nv_wo32(disp->mem, hash + 0x0c, 0x00000001 | (i << 27) |
2112  ((dmao + 0x20) << 9));
2113 
2114  nv_wo32(disp->mem, dmao + 0x40, 0x00000009);
2115  nv_wo32(disp->mem, dmao + 0x44, 0x00000000);
2116  nv_wo32(disp->mem, dmao + 0x48, (pfb->ram.size - 1) >> 8);
2117  nv_wo32(disp->mem, dmao + 0x4c, 0x00000000);
2118  nv_wo32(disp->mem, dmao + 0x50, 0x00000000);
2119  nv_wo32(disp->mem, dmao + 0x54, 0x00000000);
2120  nv_wo32(disp->mem, hash + 0x10, NvEvoVRAM_LP);
2121  nv_wo32(disp->mem, hash + 0x14, 0x00000001 | (i << 27) |
2122  ((dmao + 0x40) << 9));
2123 
2124  nv_wo32(disp->mem, dmao + 0x60, 0x0fe00009);
2125  nv_wo32(disp->mem, dmao + 0x64, 0x00000000);
2126  nv_wo32(disp->mem, dmao + 0x68, (pfb->ram.size - 1) >> 8);
2127  nv_wo32(disp->mem, dmao + 0x6c, 0x00000000);
2128  nv_wo32(disp->mem, dmao + 0x70, 0x00000000);
2129  nv_wo32(disp->mem, dmao + 0x74, 0x00000000);
2130  nv_wo32(disp->mem, hash + 0x18, NvEvoFB32);
2131  nv_wo32(disp->mem, hash + 0x1c, 0x00000001 | (i << 27) |
2132  ((dmao + 0x60) << 9));
2133  }
2134 
2135  bar->flush(bar);
2136 
2137 out:
2138  if (ret)
2139  nvd0_display_destroy(dev);
2140  return ret;
2141 }