Linux Kernel  3.7.1
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
radeon_display.c
Go to the documentation of this file.
1 /*
2  * Copyright 2007-8 Advanced Micro Devices, Inc.
3  * Copyright 2008 Red Hat Inc.
4  *
5  * Permission is hereby granted, free of charge, to any person obtaining a
6  * copy of this software and associated documentation files (the "Software"),
7  * to deal in the Software without restriction, including without limitation
8  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9  * and/or sell copies of the Software, and to permit persons to whom the
10  * Software is furnished to do so, subject to the following conditions:
11  *
12  * The above copyright notice and this permission notice shall be included in
13  * all copies or substantial portions of the Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21  * OTHER DEALINGS IN THE SOFTWARE.
22  *
23  * Authors: Dave Airlie
24  * Alex Deucher
25  */
26 #include <drm/drmP.h>
27 #include <drm/radeon_drm.h>
28 #include "radeon.h"
29 
30 #include "atom.h"
31 #include <asm/div64.h>
32 
33 #include <drm/drm_crtc_helper.h>
34 #include <drm/drm_edid.h>
35 
36 static void avivo_crtc_load_lut(struct drm_crtc *crtc)
37 {
38  struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
39  struct drm_device *dev = crtc->dev;
40  struct radeon_device *rdev = dev->dev_private;
41  int i;
42 
43  DRM_DEBUG_KMS("%d\n", radeon_crtc->crtc_id);
44  WREG32(AVIVO_DC_LUTA_CONTROL + radeon_crtc->crtc_offset, 0);
45 
49 
50  WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_BLUE + radeon_crtc->crtc_offset, 0xffff);
51  WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_GREEN + radeon_crtc->crtc_offset, 0xffff);
52  WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_RED + radeon_crtc->crtc_offset, 0xffff);
53 
54  WREG32(AVIVO_DC_LUT_RW_SELECT, radeon_crtc->crtc_id);
57 
59  for (i = 0; i < 256; i++) {
61  (radeon_crtc->lut_r[i] << 20) |
62  (radeon_crtc->lut_g[i] << 10) |
63  (radeon_crtc->lut_b[i] << 0));
64  }
65 
66  WREG32(AVIVO_D1GRPH_LUT_SEL + radeon_crtc->crtc_offset, radeon_crtc->crtc_id);
67 }
68 
69 static void dce4_crtc_load_lut(struct drm_crtc *crtc)
70 {
71  struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
72  struct drm_device *dev = crtc->dev;
73  struct radeon_device *rdev = dev->dev_private;
74  int i;
75 
76  DRM_DEBUG_KMS("%d\n", radeon_crtc->crtc_id);
77  WREG32(EVERGREEN_DC_LUT_CONTROL + radeon_crtc->crtc_offset, 0);
78 
82 
86 
87  WREG32(EVERGREEN_DC_LUT_RW_MODE + radeon_crtc->crtc_offset, 0);
88  WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK + radeon_crtc->crtc_offset, 0x00000007);
89 
90  WREG32(EVERGREEN_DC_LUT_RW_INDEX + radeon_crtc->crtc_offset, 0);
91  for (i = 0; i < 256; i++) {
93  (radeon_crtc->lut_r[i] << 20) |
94  (radeon_crtc->lut_g[i] << 10) |
95  (radeon_crtc->lut_b[i] << 0));
96  }
97 }
98 
99 static void dce5_crtc_load_lut(struct drm_crtc *crtc)
100 {
101  struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
102  struct drm_device *dev = crtc->dev;
103  struct radeon_device *rdev = dev->dev_private;
104  int i;
105 
106  DRM_DEBUG_KMS("%d\n", radeon_crtc->crtc_id);
107 
108  WREG32(NI_INPUT_CSC_CONTROL + radeon_crtc->crtc_offset,
118 
119  WREG32(EVERGREEN_DC_LUT_CONTROL + radeon_crtc->crtc_offset, 0);
120 
124 
125  WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE + radeon_crtc->crtc_offset, 0xffff);
127  WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_RED + radeon_crtc->crtc_offset, 0xffff);
128 
129  WREG32(EVERGREEN_DC_LUT_RW_MODE + radeon_crtc->crtc_offset, 0);
130  WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK + radeon_crtc->crtc_offset, 0x00000007);
131 
132  WREG32(EVERGREEN_DC_LUT_RW_INDEX + radeon_crtc->crtc_offset, 0);
133  for (i = 0; i < 256; i++) {
135  (radeon_crtc->lut_r[i] << 20) |
136  (radeon_crtc->lut_g[i] << 10) |
137  (radeon_crtc->lut_b[i] << 0));
138  }
139 
140  WREG32(NI_DEGAMMA_CONTROL + radeon_crtc->crtc_offset,
148  WREG32(NI_REGAMMA_CONTROL + radeon_crtc->crtc_offset,
154  /* XXX match this to the depth of the crtc fmt block, move to modeset? */
155  WREG32(0x6940 + radeon_crtc->crtc_offset, 0);
156 
157 }
158 
159 static void legacy_crtc_load_lut(struct drm_crtc *crtc)
160 {
161  struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
162  struct drm_device *dev = crtc->dev;
163  struct radeon_device *rdev = dev->dev_private;
164  int i;
165  uint32_t dac2_cntl;
166 
167  dac2_cntl = RREG32(RADEON_DAC_CNTL2);
168  if (radeon_crtc->crtc_id == 0)
169  dac2_cntl &= (uint32_t)~RADEON_DAC2_PALETTE_ACC_CTL;
170  else
171  dac2_cntl |= RADEON_DAC2_PALETTE_ACC_CTL;
172  WREG32(RADEON_DAC_CNTL2, dac2_cntl);
173 
175  for (i = 0; i < 256; i++) {
177  (radeon_crtc->lut_r[i] << 20) |
178  (radeon_crtc->lut_g[i] << 10) |
179  (radeon_crtc->lut_b[i] << 0));
180  }
181 }
182 
183 void radeon_crtc_load_lut(struct drm_crtc *crtc)
184 {
185  struct drm_device *dev = crtc->dev;
186  struct radeon_device *rdev = dev->dev_private;
187 
188  if (!crtc->enabled)
189  return;
190 
191  if (ASIC_IS_DCE5(rdev))
192  dce5_crtc_load_lut(crtc);
193  else if (ASIC_IS_DCE4(rdev))
194  dce4_crtc_load_lut(crtc);
195  else if (ASIC_IS_AVIVO(rdev))
196  avivo_crtc_load_lut(crtc);
197  else
198  legacy_crtc_load_lut(crtc);
199 }
200 
203  u16 blue, int regno)
204 {
205  struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
206 
207  radeon_crtc->lut_r[regno] = red >> 6;
208  radeon_crtc->lut_g[regno] = green >> 6;
209  radeon_crtc->lut_b[regno] = blue >> 6;
210 }
211 
214  u16 *blue, int regno)
215 {
216  struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
217 
218  *red = radeon_crtc->lut_r[regno] << 6;
219  *green = radeon_crtc->lut_g[regno] << 6;
220  *blue = radeon_crtc->lut_b[regno] << 6;
221 }
222 
223 static void radeon_crtc_gamma_set(struct drm_crtc *crtc, u16 *red, u16 *green,
225 {
226  struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
227  int end = (start + size > 256) ? 256 : start + size, i;
228 
229  /* userspace palettes are always correct as is */
230  for (i = start; i < end; i++) {
231  radeon_crtc->lut_r[i] = red[i] >> 6;
232  radeon_crtc->lut_g[i] = green[i] >> 6;
233  radeon_crtc->lut_b[i] = blue[i] >> 6;
234  }
235  radeon_crtc_load_lut(crtc);
236 }
237 
238 static void radeon_crtc_destroy(struct drm_crtc *crtc)
239 {
240  struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
241 
242  drm_crtc_cleanup(crtc);
243  kfree(radeon_crtc);
244 }
245 
246 /*
247  * Handle unpin events outside the interrupt handler proper.
248  */
249 static void radeon_unpin_work_func(struct work_struct *__work)
250 {
251  struct radeon_unpin_work *work =
252  container_of(__work, struct radeon_unpin_work, work);
253  int r;
254 
255  /* unpin of the old buffer */
256  r = radeon_bo_reserve(work->old_rbo, false);
257  if (likely(r == 0)) {
258  r = radeon_bo_unpin(work->old_rbo);
259  if (unlikely(r != 0)) {
260  DRM_ERROR("failed to unpin buffer after flip\n");
261  }
262  radeon_bo_unreserve(work->old_rbo);
263  } else
264  DRM_ERROR("failed to reserve buffer after flip\n");
265 
266  drm_gem_object_unreference_unlocked(&work->old_rbo->gem_base);
267  kfree(work);
268 }
269 
271 {
272  struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
273  struct radeon_unpin_work *work;
274  struct drm_pending_vblank_event *e;
275  struct timeval now;
276  unsigned long flags;
277  u32 update_pending;
278  int vpos, hpos;
279 
280  spin_lock_irqsave(&rdev->ddev->event_lock, flags);
281  work = radeon_crtc->unpin_work;
282  if (work == NULL ||
283  (work->fence && !radeon_fence_signaled(work->fence))) {
284  spin_unlock_irqrestore(&rdev->ddev->event_lock, flags);
285  return;
286  }
287  /* New pageflip, or just completion of a previous one? */
288  if (!radeon_crtc->deferred_flip_completion) {
289  /* do the flip (mmio) */
290  update_pending = radeon_page_flip(rdev, crtc_id, work->new_crtc_base);
291  } else {
292  /* This is just a completion of a flip queued in crtc
293  * at last invocation. Make sure we go directly to
294  * completion routine.
295  */
296  update_pending = 0;
297  radeon_crtc->deferred_flip_completion = 0;
298  }
299 
300  /* Has the pageflip already completed in crtc, or is it certain
301  * to complete in this vblank?
302  */
303  if (update_pending &&
304  (DRM_SCANOUTPOS_VALID & radeon_get_crtc_scanoutpos(rdev->ddev, crtc_id,
305  &vpos, &hpos)) &&
306  ((vpos >= (99 * rdev->mode_info.crtcs[crtc_id]->base.hwmode.crtc_vdisplay)/100) ||
307  (vpos < 0 && !ASIC_IS_AVIVO(rdev)))) {
308  /* crtc didn't flip in this target vblank interval,
309  * but flip is pending in crtc. Based on the current
310  * scanout position we know that the current frame is
311  * (nearly) complete and the flip will (likely)
312  * complete before the start of the next frame.
313  */
314  update_pending = 0;
315  }
316  if (update_pending) {
317  /* crtc didn't flip in this target vblank interval,
318  * but flip is pending in crtc. It will complete it
319  * in next vblank interval, so complete the flip at
320  * next vblank irq.
321  */
322  radeon_crtc->deferred_flip_completion = 1;
323  spin_unlock_irqrestore(&rdev->ddev->event_lock, flags);
324  return;
325  }
326 
327  /* Pageflip (will be) certainly completed in this vblank. Clean up. */
328  radeon_crtc->unpin_work = NULL;
329 
330  /* wakeup userspace */
331  if (work->event) {
332  e = work->event;
333  e->event.sequence = drm_vblank_count_and_time(rdev->ddev, crtc_id, &now);
334  e->event.tv_sec = now.tv_sec;
335  e->event.tv_usec = now.tv_usec;
336  list_add_tail(&e->base.link, &e->base.file_priv->event_list);
337  wake_up_interruptible(&e->base.file_priv->event_wait);
338  }
339  spin_unlock_irqrestore(&rdev->ddev->event_lock, flags);
340 
341  drm_vblank_put(rdev->ddev, radeon_crtc->crtc_id);
342  radeon_fence_unref(&work->fence);
343  radeon_post_page_flip(work->rdev, work->crtc_id);
344  schedule_work(&work->work);
345 }
346 
347 static int radeon_crtc_page_flip(struct drm_crtc *crtc,
348  struct drm_framebuffer *fb,
349  struct drm_pending_vblank_event *event)
350 {
351  struct drm_device *dev = crtc->dev;
352  struct radeon_device *rdev = dev->dev_private;
353  struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
354  struct radeon_framebuffer *old_radeon_fb;
355  struct radeon_framebuffer *new_radeon_fb;
356  struct drm_gem_object *obj;
357  struct radeon_bo *rbo;
358  struct radeon_unpin_work *work;
359  unsigned long flags;
360  u32 tiling_flags, pitch_pixels;
361  u64 base;
362  int r;
363 
364  work = kzalloc(sizeof *work, GFP_KERNEL);
365  if (work == NULL)
366  return -ENOMEM;
367 
368  work->event = event;
369  work->rdev = rdev;
370  work->crtc_id = radeon_crtc->crtc_id;
371  old_radeon_fb = to_radeon_framebuffer(crtc->fb);
372  new_radeon_fb = to_radeon_framebuffer(fb);
373  /* schedule unpin of the old buffer */
374  obj = old_radeon_fb->obj;
375  /* take a reference to the old object */
376  drm_gem_object_reference(obj);
377  rbo = gem_to_radeon_bo(obj);
378  work->old_rbo = rbo;
379  obj = new_radeon_fb->obj;
380  rbo = gem_to_radeon_bo(obj);
381  if (rbo->tbo.sync_obj)
382  work->fence = radeon_fence_ref(rbo->tbo.sync_obj);
383  INIT_WORK(&work->work, radeon_unpin_work_func);
384 
385  /* We borrow the event spin lock for protecting unpin_work */
386  spin_lock_irqsave(&dev->event_lock, flags);
387  if (radeon_crtc->unpin_work) {
388  DRM_DEBUG_DRIVER("flip queue: crtc already busy\n");
389  r = -EBUSY;
390  goto unlock_free;
391  }
392  radeon_crtc->unpin_work = work;
393  radeon_crtc->deferred_flip_completion = 0;
394  spin_unlock_irqrestore(&dev->event_lock, flags);
395 
396  /* pin the new buffer */
397  DRM_DEBUG_DRIVER("flip-ioctl() cur_fbo = %p, cur_bbo = %p\n",
398  work->old_rbo, rbo);
399 
400  r = radeon_bo_reserve(rbo, false);
401  if (unlikely(r != 0)) {
402  DRM_ERROR("failed to reserve new rbo buffer before flip\n");
403  goto pflip_cleanup;
404  }
405  /* Only 27 bit offset for legacy CRTC */
407  ASIC_IS_AVIVO(rdev) ? 0 : 1 << 27, &base);
408  if (unlikely(r != 0)) {
409  radeon_bo_unreserve(rbo);
410  r = -EINVAL;
411  DRM_ERROR("failed to pin new rbo buffer before flip\n");
412  goto pflip_cleanup;
413  }
414  radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
415  radeon_bo_unreserve(rbo);
416 
417  if (!ASIC_IS_AVIVO(rdev)) {
418  /* crtc offset is from display base addr not FB location */
419  base -= radeon_crtc->legacy_display_base_addr;
420  pitch_pixels = fb->pitches[0] / (fb->bits_per_pixel / 8);
421 
422  if (tiling_flags & RADEON_TILING_MACRO) {
423  if (ASIC_IS_R300(rdev)) {
424  base &= ~0x7ff;
425  } else {
426  int byteshift = fb->bits_per_pixel >> 4;
427  int tile_addr = (((crtc->y >> 3) * pitch_pixels + crtc->x) >> (8 - byteshift)) << 11;
428  base += tile_addr + ((crtc->x << byteshift) % 256) + ((crtc->y % 8) << 8);
429  }
430  } else {
431  int offset = crtc->y * pitch_pixels + crtc->x;
432  switch (fb->bits_per_pixel) {
433  case 8:
434  default:
435  offset *= 1;
436  break;
437  case 15:
438  case 16:
439  offset *= 2;
440  break;
441  case 24:
442  offset *= 3;
443  break;
444  case 32:
445  offset *= 4;
446  break;
447  }
448  base += offset;
449  }
450  base &= ~7;
451  }
452 
453  spin_lock_irqsave(&dev->event_lock, flags);
454  work->new_crtc_base = base;
455  spin_unlock_irqrestore(&dev->event_lock, flags);
456 
457  /* update crtc fb */
458  crtc->fb = fb;
459 
460  r = drm_vblank_get(dev, radeon_crtc->crtc_id);
461  if (r) {
462  DRM_ERROR("failed to get vblank before flip\n");
463  goto pflip_cleanup1;
464  }
465 
466  /* set the proper interrupt */
467  radeon_pre_page_flip(rdev, radeon_crtc->crtc_id);
468 
469  return 0;
470 
471 pflip_cleanup1:
472  if (unlikely(radeon_bo_reserve(rbo, false) != 0)) {
473  DRM_ERROR("failed to reserve new rbo in error path\n");
474  goto pflip_cleanup;
475  }
476  if (unlikely(radeon_bo_unpin(rbo) != 0)) {
477  DRM_ERROR("failed to unpin new rbo in error path\n");
478  }
479  radeon_bo_unreserve(rbo);
480 
481 pflip_cleanup:
482  spin_lock_irqsave(&dev->event_lock, flags);
483  radeon_crtc->unpin_work = NULL;
484 unlock_free:
485  spin_unlock_irqrestore(&dev->event_lock, flags);
486  drm_gem_object_unreference_unlocked(old_radeon_fb->obj);
487  radeon_fence_unref(&work->fence);
488  kfree(work);
489 
490  return r;
491 }
492 
493 static const struct drm_crtc_funcs radeon_crtc_funcs = {
494  .cursor_set = radeon_crtc_cursor_set,
495  .cursor_move = radeon_crtc_cursor_move,
496  .gamma_set = radeon_crtc_gamma_set,
497  .set_config = drm_crtc_helper_set_config,
498  .destroy = radeon_crtc_destroy,
499  .page_flip = radeon_crtc_page_flip,
500 };
501 
502 static void radeon_crtc_init(struct drm_device *dev, int index)
503 {
504  struct radeon_device *rdev = dev->dev_private;
505  struct radeon_crtc *radeon_crtc;
506  int i;
507 
508  radeon_crtc = kzalloc(sizeof(struct radeon_crtc) + (RADEONFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL);
509  if (radeon_crtc == NULL)
510  return;
511 
512  drm_crtc_init(dev, &radeon_crtc->base, &radeon_crtc_funcs);
513 
514  drm_mode_crtc_set_gamma_size(&radeon_crtc->base, 256);
515  radeon_crtc->crtc_id = index;
516  rdev->mode_info.crtcs[index] = radeon_crtc;
517 
518 #if 0
519  radeon_crtc->mode_set.crtc = &radeon_crtc->base;
520  radeon_crtc->mode_set.connectors = (struct drm_connector **)(radeon_crtc + 1);
521  radeon_crtc->mode_set.num_connectors = 0;
522 #endif
523 
524  for (i = 0; i < 256; i++) {
525  radeon_crtc->lut_r[i] = i << 2;
526  radeon_crtc->lut_g[i] = i << 2;
527  radeon_crtc->lut_b[i] = i << 2;
528  }
529 
530  if (rdev->is_atom_bios && (ASIC_IS_AVIVO(rdev) || radeon_r4xx_atom))
531  radeon_atombios_init_crtc(dev, radeon_crtc);
532  else
533  radeon_legacy_init_crtc(dev, radeon_crtc);
534 }
535 
536 static const char *encoder_names[37] = {
537  "NONE",
538  "INTERNAL_LVDS",
539  "INTERNAL_TMDS1",
540  "INTERNAL_TMDS2",
541  "INTERNAL_DAC1",
542  "INTERNAL_DAC2",
543  "INTERNAL_SDVOA",
544  "INTERNAL_SDVOB",
545  "SI170B",
546  "CH7303",
547  "CH7301",
548  "INTERNAL_DVO1",
549  "EXTERNAL_SDVOA",
550  "EXTERNAL_SDVOB",
551  "TITFP513",
552  "INTERNAL_LVTM1",
553  "VT1623",
554  "HDMI_SI1930",
555  "HDMI_INTERNAL",
556  "INTERNAL_KLDSCP_TMDS1",
557  "INTERNAL_KLDSCP_DVO1",
558  "INTERNAL_KLDSCP_DAC1",
559  "INTERNAL_KLDSCP_DAC2",
560  "SI178",
561  "MVPU_FPGA",
562  "INTERNAL_DDI",
563  "VT1625",
564  "HDMI_SI1932",
565  "DP_AN9801",
566  "DP_DP501",
567  "INTERNAL_UNIPHY",
568  "INTERNAL_KLDSCP_LVTMA",
569  "INTERNAL_UNIPHY1",
570  "INTERNAL_UNIPHY2",
571  "NUTMEG",
572  "TRAVIS",
573  "INTERNAL_VCE"
574 };
575 
576 static const char *hpd_names[6] = {
577  "HPD1",
578  "HPD2",
579  "HPD3",
580  "HPD4",
581  "HPD5",
582  "HPD6",
583 };
584 
585 static void radeon_print_display_setup(struct drm_device *dev)
586 {
587  struct drm_connector *connector;
589  struct drm_encoder *encoder;
592  int i = 0;
593 
594  DRM_INFO("Radeon Display Connectors\n");
595  list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
596  radeon_connector = to_radeon_connector(connector);
597  DRM_INFO("Connector %d:\n", i);
598  DRM_INFO(" %s\n", drm_get_connector_name(connector));
599  if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
600  DRM_INFO(" %s\n", hpd_names[radeon_connector->hpd.hpd]);
601  if (radeon_connector->ddc_bus) {
602  DRM_INFO(" DDC: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
603  radeon_connector->ddc_bus->rec.mask_clk_reg,
604  radeon_connector->ddc_bus->rec.mask_data_reg,
605  radeon_connector->ddc_bus->rec.a_clk_reg,
606  radeon_connector->ddc_bus->rec.a_data_reg,
607  radeon_connector->ddc_bus->rec.en_clk_reg,
608  radeon_connector->ddc_bus->rec.en_data_reg,
609  radeon_connector->ddc_bus->rec.y_clk_reg,
610  radeon_connector->ddc_bus->rec.y_data_reg);
611  if (radeon_connector->router.ddc_valid)
612  DRM_INFO(" DDC Router 0x%x/0x%x\n",
613  radeon_connector->router.ddc_mux_control_pin,
614  radeon_connector->router.ddc_mux_state);
615  if (radeon_connector->router.cd_valid)
616  DRM_INFO(" Clock/Data Router 0x%x/0x%x\n",
617  radeon_connector->router.cd_mux_control_pin,
618  radeon_connector->router.cd_mux_state);
619  } else {
620  if (connector->connector_type == DRM_MODE_CONNECTOR_VGA ||
621  connector->connector_type == DRM_MODE_CONNECTOR_DVII ||
622  connector->connector_type == DRM_MODE_CONNECTOR_DVID ||
623  connector->connector_type == DRM_MODE_CONNECTOR_DVIA ||
626  DRM_INFO(" DDC: no ddc bus - possible BIOS bug - please report to [email protected]\n");
627  }
628  DRM_INFO(" Encoders:\n");
629  list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
630  radeon_encoder = to_radeon_encoder(encoder);
631  devices = radeon_encoder->devices & radeon_connector->devices;
632  if (devices) {
633  if (devices & ATOM_DEVICE_CRT1_SUPPORT)
634  DRM_INFO(" CRT1: %s\n", encoder_names[radeon_encoder->encoder_id]);
635  if (devices & ATOM_DEVICE_CRT2_SUPPORT)
636  DRM_INFO(" CRT2: %s\n", encoder_names[radeon_encoder->encoder_id]);
637  if (devices & ATOM_DEVICE_LCD1_SUPPORT)
638  DRM_INFO(" LCD1: %s\n", encoder_names[radeon_encoder->encoder_id]);
639  if (devices & ATOM_DEVICE_DFP1_SUPPORT)
640  DRM_INFO(" DFP1: %s\n", encoder_names[radeon_encoder->encoder_id]);
641  if (devices & ATOM_DEVICE_DFP2_SUPPORT)
642  DRM_INFO(" DFP2: %s\n", encoder_names[radeon_encoder->encoder_id]);
643  if (devices & ATOM_DEVICE_DFP3_SUPPORT)
644  DRM_INFO(" DFP3: %s\n", encoder_names[radeon_encoder->encoder_id]);
645  if (devices & ATOM_DEVICE_DFP4_SUPPORT)
646  DRM_INFO(" DFP4: %s\n", encoder_names[radeon_encoder->encoder_id]);
647  if (devices & ATOM_DEVICE_DFP5_SUPPORT)
648  DRM_INFO(" DFP5: %s\n", encoder_names[radeon_encoder->encoder_id]);
649  if (devices & ATOM_DEVICE_DFP6_SUPPORT)
650  DRM_INFO(" DFP6: %s\n", encoder_names[radeon_encoder->encoder_id]);
651  if (devices & ATOM_DEVICE_TV1_SUPPORT)
652  DRM_INFO(" TV1: %s\n", encoder_names[radeon_encoder->encoder_id]);
653  if (devices & ATOM_DEVICE_CV_SUPPORT)
654  DRM_INFO(" CV: %s\n", encoder_names[radeon_encoder->encoder_id]);
655  }
656  }
657  i++;
658  }
659 }
660 
661 static bool radeon_setup_enc_conn(struct drm_device *dev)
662 {
663  struct radeon_device *rdev = dev->dev_private;
664  bool ret = false;
665 
666  if (rdev->bios) {
667  if (rdev->is_atom_bios) {
669  if (ret == false)
671  } else {
673  if (ret == false)
675  }
676  } else {
677  if (!ASIC_IS_AVIVO(rdev))
679  }
680  if (ret) {
682  radeon_print_display_setup(dev);
683  }
684 
685  return ret;
686 }
687 
688 int radeon_ddc_get_modes(struct radeon_connector *radeon_connector)
689 {
690  struct drm_device *dev = radeon_connector->base.dev;
691  struct radeon_device *rdev = dev->dev_private;
692  int ret = 0;
693 
694  /* on hw with routers, select right port */
695  if (radeon_connector->router.ddc_valid)
696  radeon_router_select_ddc_port(radeon_connector);
697 
698  if ((radeon_connector->base.connector_type == DRM_MODE_CONNECTOR_DisplayPort) ||
699  (radeon_connector->base.connector_type == DRM_MODE_CONNECTOR_eDP) ||
702  struct radeon_connector_atom_dig *dig = radeon_connector->con_priv;
703 
706  radeon_connector->edid = drm_get_edid(&radeon_connector->base,
707  &dig->dp_i2c_bus->adapter);
708  else if (radeon_connector->ddc_bus && !radeon_connector->edid)
709  radeon_connector->edid = drm_get_edid(&radeon_connector->base,
710  &radeon_connector->ddc_bus->adapter);
711  } else {
712  if (radeon_connector->ddc_bus && !radeon_connector->edid)
713  radeon_connector->edid = drm_get_edid(&radeon_connector->base,
714  &radeon_connector->ddc_bus->adapter);
715  }
716 
717  if (!radeon_connector->edid) {
718  if (rdev->is_atom_bios) {
719  /* some laptops provide a hardcoded edid in rom for LCDs */
720  if (((radeon_connector->base.connector_type == DRM_MODE_CONNECTOR_LVDS) ||
721  (radeon_connector->base.connector_type == DRM_MODE_CONNECTOR_eDP)))
722  radeon_connector->edid = radeon_bios_get_hardcoded_edid(rdev);
723  } else
724  /* some servers provide a hardcoded edid in rom for KVMs */
725  radeon_connector->edid = radeon_bios_get_hardcoded_edid(rdev);
726  }
727  if (radeon_connector->edid) {
728  drm_mode_connector_update_edid_property(&radeon_connector->base, radeon_connector->edid);
729  ret = drm_add_edid_modes(&radeon_connector->base, radeon_connector->edid);
730  return ret;
731  }
733  return 0;
734 }
735 
736 /* avivo */
737 static void avivo_get_fb_div(struct radeon_pll *pll,
738  u32 target_clock,
739  u32 post_div,
740  u32 ref_div,
741  u32 *fb_div,
742  u32 *frac_fb_div)
743 {
744  u32 tmp = post_div * ref_div;
745 
746  tmp *= target_clock;
747  *fb_div = tmp / pll->reference_freq;
748  *frac_fb_div = tmp % pll->reference_freq;
749 
750  if (*fb_div > pll->max_feedback_div)
751  *fb_div = pll->max_feedback_div;
752  else if (*fb_div < pll->min_feedback_div)
753  *fb_div = pll->min_feedback_div;
754 }
755 
756 static u32 avivo_get_post_div(struct radeon_pll *pll,
757  u32 target_clock)
758 {
759  u32 vco, post_div, tmp;
760 
761  if (pll->flags & RADEON_PLL_USE_POST_DIV)
762  return pll->post_div;
763 
765  if (pll->flags & RADEON_PLL_IS_LCD)
766  vco = pll->lcd_pll_out_min;
767  else
768  vco = pll->pll_out_min;
769  } else {
770  if (pll->flags & RADEON_PLL_IS_LCD)
771  vco = pll->lcd_pll_out_max;
772  else
773  vco = pll->pll_out_max;
774  }
775 
776  post_div = vco / target_clock;
777  tmp = vco % target_clock;
778 
780  if (tmp)
781  post_div++;
782  } else {
783  if (!tmp)
784  post_div--;
785  }
786 
787  if (post_div > pll->max_post_div)
788  post_div = pll->max_post_div;
789  else if (post_div < pll->min_post_div)
790  post_div = pll->min_post_div;
791 
792  return post_div;
793 }
794 
795 #define MAX_TOLERANCE 10
796 
798  u32 freq,
799  u32 *dot_clock_p,
800  u32 *fb_div_p,
801  u32 *frac_fb_div_p,
802  u32 *ref_div_p,
803  u32 *post_div_p)
804 {
805  u32 target_clock = freq / 10;
806  u32 post_div = avivo_get_post_div(pll, target_clock);
807  u32 ref_div = pll->min_ref_div;
808  u32 fb_div = 0, frac_fb_div = 0, tmp;
809 
810  if (pll->flags & RADEON_PLL_USE_REF_DIV)
811  ref_div = pll->reference_div;
812 
813  if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) {
814  avivo_get_fb_div(pll, target_clock, post_div, ref_div, &fb_div, &frac_fb_div);
815  frac_fb_div = (100 * frac_fb_div) / pll->reference_freq;
816  if (frac_fb_div >= 5) {
817  frac_fb_div -= 5;
818  frac_fb_div = frac_fb_div / 10;
819  frac_fb_div++;
820  }
821  if (frac_fb_div >= 10) {
822  fb_div++;
823  frac_fb_div = 0;
824  }
825  } else {
826  while (ref_div <= pll->max_ref_div) {
827  avivo_get_fb_div(pll, target_clock, post_div, ref_div,
828  &fb_div, &frac_fb_div);
829  if (frac_fb_div >= (pll->reference_freq / 2))
830  fb_div++;
831  frac_fb_div = 0;
832  tmp = (pll->reference_freq * fb_div) / (post_div * ref_div);
833  tmp = (tmp * 10000) / target_clock;
834 
835  if (tmp > (10000 + MAX_TOLERANCE))
836  ref_div++;
837  else if (tmp >= (10000 - MAX_TOLERANCE))
838  break;
839  else
840  ref_div++;
841  }
842  }
843 
844  *dot_clock_p = ((pll->reference_freq * fb_div * 10) + (pll->reference_freq * frac_fb_div)) /
845  (ref_div * post_div * 10);
846  *fb_div_p = fb_div;
847  *frac_fb_div_p = frac_fb_div;
848  *ref_div_p = ref_div;
849  *post_div_p = post_div;
850  DRM_DEBUG_KMS("%d, pll dividers - fb: %d.%d ref: %d, post %d\n",
851  *dot_clock_p, fb_div, frac_fb_div, ref_div, post_div);
852 }
853 
854 /* pre-avivo */
855 static inline uint32_t radeon_div(uint64_t n, uint32_t d)
856 {
857  uint64_t mod;
858 
859  n += d / 2;
860 
861  mod = do_div(n, d);
862  return n;
863 }
864 
866  uint64_t freq,
867  uint32_t *dot_clock_p,
868  uint32_t *fb_div_p,
869  uint32_t *frac_fb_div_p,
870  uint32_t *ref_div_p,
871  uint32_t *post_div_p)
872 {
873  uint32_t min_ref_div = pll->min_ref_div;
874  uint32_t max_ref_div = pll->max_ref_div;
875  uint32_t min_post_div = pll->min_post_div;
876  uint32_t max_post_div = pll->max_post_div;
877  uint32_t min_fractional_feed_div = 0;
878  uint32_t max_fractional_feed_div = 0;
879  uint32_t best_vco = pll->best_vco;
880  uint32_t best_post_div = 1;
881  uint32_t best_ref_div = 1;
882  uint32_t best_feedback_div = 1;
883  uint32_t best_frac_feedback_div = 0;
884  uint32_t best_freq = -1;
885  uint32_t best_error = 0xffffffff;
886  uint32_t best_vco_diff = 1;
887  uint32_t post_div;
888  u32 pll_out_min, pll_out_max;
889 
890  DRM_DEBUG_KMS("PLL freq %llu %u %u\n", freq, pll->min_ref_div, pll->max_ref_div);
891  freq = freq * 1000;
892 
893  if (pll->flags & RADEON_PLL_IS_LCD) {
894  pll_out_min = pll->lcd_pll_out_min;
895  pll_out_max = pll->lcd_pll_out_max;
896  } else {
897  pll_out_min = pll->pll_out_min;
898  pll_out_max = pll->pll_out_max;
899  }
900 
901  if (pll_out_min > 64800)
902  pll_out_min = 64800;
903 
904  if (pll->flags & RADEON_PLL_USE_REF_DIV)
905  min_ref_div = max_ref_div = pll->reference_div;
906  else {
907  while (min_ref_div < max_ref_div-1) {
908  uint32_t mid = (min_ref_div + max_ref_div) / 2;
909  uint32_t pll_in = pll->reference_freq / mid;
910  if (pll_in < pll->pll_in_min)
911  max_ref_div = mid;
912  else if (pll_in > pll->pll_in_max)
913  min_ref_div = mid;
914  else
915  break;
916  }
917  }
918 
919  if (pll->flags & RADEON_PLL_USE_POST_DIV)
920  min_post_div = max_post_div = pll->post_div;
921 
922  if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) {
923  min_fractional_feed_div = pll->min_frac_feedback_div;
924  max_fractional_feed_div = pll->max_frac_feedback_div;
925  }
926 
927  for (post_div = max_post_div; post_div >= min_post_div; --post_div) {
928  uint32_t ref_div;
929 
930  if ((pll->flags & RADEON_PLL_NO_ODD_POST_DIV) && (post_div & 1))
931  continue;
932 
933  /* legacy radeons only have a few post_divs */
934  if (pll->flags & RADEON_PLL_LEGACY) {
935  if ((post_div == 5) ||
936  (post_div == 7) ||
937  (post_div == 9) ||
938  (post_div == 10) ||
939  (post_div == 11) ||
940  (post_div == 13) ||
941  (post_div == 14) ||
942  (post_div == 15))
943  continue;
944  }
945 
946  for (ref_div = min_ref_div; ref_div <= max_ref_div; ++ref_div) {
947  uint32_t feedback_div, current_freq = 0, error, vco_diff;
948  uint32_t pll_in = pll->reference_freq / ref_div;
949  uint32_t min_feed_div = pll->min_feedback_div;
950  uint32_t max_feed_div = pll->max_feedback_div + 1;
951 
952  if (pll_in < pll->pll_in_min || pll_in > pll->pll_in_max)
953  continue;
954 
955  while (min_feed_div < max_feed_div) {
956  uint32_t vco;
957  uint32_t min_frac_feed_div = min_fractional_feed_div;
958  uint32_t max_frac_feed_div = max_fractional_feed_div + 1;
959  uint32_t frac_feedback_div;
960  uint64_t tmp;
961 
962  feedback_div = (min_feed_div + max_feed_div) / 2;
963 
964  tmp = (uint64_t)pll->reference_freq * feedback_div;
965  vco = radeon_div(tmp, ref_div);
966 
967  if (vco < pll_out_min) {
968  min_feed_div = feedback_div + 1;
969  continue;
970  } else if (vco > pll_out_max) {
971  max_feed_div = feedback_div;
972  continue;
973  }
974 
975  while (min_frac_feed_div < max_frac_feed_div) {
976  frac_feedback_div = (min_frac_feed_div + max_frac_feed_div) / 2;
977  tmp = (uint64_t)pll->reference_freq * 10000 * feedback_div;
978  tmp += (uint64_t)pll->reference_freq * 1000 * frac_feedback_div;
979  current_freq = radeon_div(tmp, ref_div * post_div);
980 
982  if (freq < current_freq)
983  error = 0xffffffff;
984  else
985  error = freq - current_freq;
986  } else
987  error = abs(current_freq - freq);
988  vco_diff = abs(vco - best_vco);
989 
990  if ((best_vco == 0 && error < best_error) ||
991  (best_vco != 0 &&
992  ((best_error > 100 && error < best_error - 100) ||
993  (abs(error - best_error) < 100 && vco_diff < best_vco_diff)))) {
994  best_post_div = post_div;
995  best_ref_div = ref_div;
996  best_feedback_div = feedback_div;
997  best_frac_feedback_div = frac_feedback_div;
998  best_freq = current_freq;
999  best_error = error;
1000  best_vco_diff = vco_diff;
1001  } else if (current_freq == freq) {
1002  if (best_freq == -1) {
1003  best_post_div = post_div;
1004  best_ref_div = ref_div;
1005  best_feedback_div = feedback_div;
1006  best_frac_feedback_div = frac_feedback_div;
1007  best_freq = current_freq;
1008  best_error = error;
1009  best_vco_diff = vco_diff;
1010  } else if (((pll->flags & RADEON_PLL_PREFER_LOW_REF_DIV) && (ref_div < best_ref_div)) ||
1011  ((pll->flags & RADEON_PLL_PREFER_HIGH_REF_DIV) && (ref_div > best_ref_div)) ||
1012  ((pll->flags & RADEON_PLL_PREFER_LOW_FB_DIV) && (feedback_div < best_feedback_div)) ||
1013  ((pll->flags & RADEON_PLL_PREFER_HIGH_FB_DIV) && (feedback_div > best_feedback_div)) ||
1014  ((pll->flags & RADEON_PLL_PREFER_LOW_POST_DIV) && (post_div < best_post_div)) ||
1015  ((pll->flags & RADEON_PLL_PREFER_HIGH_POST_DIV) && (post_div > best_post_div))) {
1016  best_post_div = post_div;
1017  best_ref_div = ref_div;
1018  best_feedback_div = feedback_div;
1019  best_frac_feedback_div = frac_feedback_div;
1020  best_freq = current_freq;
1021  best_error = error;
1022  best_vco_diff = vco_diff;
1023  }
1024  }
1025  if (current_freq < freq)
1026  min_frac_feed_div = frac_feedback_div + 1;
1027  else
1028  max_frac_feed_div = frac_feedback_div;
1029  }
1030  if (current_freq < freq)
1031  min_feed_div = feedback_div + 1;
1032  else
1033  max_feed_div = feedback_div;
1034  }
1035  }
1036  }
1037 
1038  *dot_clock_p = best_freq / 10000;
1039  *fb_div_p = best_feedback_div;
1040  *frac_fb_div_p = best_frac_feedback_div;
1041  *ref_div_p = best_ref_div;
1042  *post_div_p = best_post_div;
1043  DRM_DEBUG_KMS("%lld %d, pll dividers - fb: %d.%d ref: %d, post %d\n",
1044  (long long)freq,
1045  best_freq / 1000, best_feedback_div, best_frac_feedback_div,
1046  best_ref_div, best_post_div);
1047 
1048 }
1049 
1050 static void radeon_user_framebuffer_destroy(struct drm_framebuffer *fb)
1051 {
1052  struct radeon_framebuffer *radeon_fb = to_radeon_framebuffer(fb);
1053 
1054  if (radeon_fb->obj) {
1055  drm_gem_object_unreference_unlocked(radeon_fb->obj);
1056  }
1058  kfree(radeon_fb);
1059 }
1060 
1061 static int radeon_user_framebuffer_create_handle(struct drm_framebuffer *fb,
1062  struct drm_file *file_priv,
1063  unsigned int *handle)
1064 {
1065  struct radeon_framebuffer *radeon_fb = to_radeon_framebuffer(fb);
1066 
1067  return drm_gem_handle_create(file_priv, radeon_fb->obj, handle);
1068 }
1069 
1070 static const struct drm_framebuffer_funcs radeon_fb_funcs = {
1071  .destroy = radeon_user_framebuffer_destroy,
1072  .create_handle = radeon_user_framebuffer_create_handle,
1073 };
1074 
1075 int
1077  struct radeon_framebuffer *rfb,
1078  struct drm_mode_fb_cmd2 *mode_cmd,
1079  struct drm_gem_object *obj)
1080 {
1081  int ret;
1082  rfb->obj = obj;
1083  ret = drm_framebuffer_init(dev, &rfb->base, &radeon_fb_funcs);
1084  if (ret) {
1085  rfb->obj = NULL;
1086  return ret;
1087  }
1088  drm_helper_mode_fill_fb_struct(&rfb->base, mode_cmd);
1089  return 0;
1090 }
1091 
1092 static struct drm_framebuffer *
1093 radeon_user_framebuffer_create(struct drm_device *dev,
1094  struct drm_file *file_priv,
1095  struct drm_mode_fb_cmd2 *mode_cmd)
1096 {
1097  struct drm_gem_object *obj;
1098  struct radeon_framebuffer *radeon_fb;
1099  int ret;
1100 
1101  obj = drm_gem_object_lookup(dev, file_priv, mode_cmd->handles[0]);
1102  if (obj == NULL) {
1103  dev_err(&dev->pdev->dev, "No GEM object associated to handle 0x%08X, "
1104  "can't create framebuffer\n", mode_cmd->handles[0]);
1105  return ERR_PTR(-ENOENT);
1106  }
1107 
1108  radeon_fb = kzalloc(sizeof(*radeon_fb), GFP_KERNEL);
1109  if (radeon_fb == NULL)
1110  return ERR_PTR(-ENOMEM);
1111 
1112  ret = radeon_framebuffer_init(dev, radeon_fb, mode_cmd, obj);
1113  if (ret) {
1114  kfree(radeon_fb);
1115  drm_gem_object_unreference_unlocked(obj);
1116  return NULL;
1117  }
1118 
1119  return &radeon_fb->base;
1120 }
1121 
1122 static void radeon_output_poll_changed(struct drm_device *dev)
1123 {
1124  struct radeon_device *rdev = dev->dev_private;
1126 }
1127 
1128 static const struct drm_mode_config_funcs radeon_mode_funcs = {
1129  .fb_create = radeon_user_framebuffer_create,
1130  .output_poll_changed = radeon_output_poll_changed
1131 };
1132 
1133 static struct drm_prop_enum_list radeon_tmds_pll_enum_list[] =
1134 { { 0, "driver" },
1135  { 1, "bios" },
1136 };
1137 
1138 static struct drm_prop_enum_list radeon_tv_std_enum_list[] =
1139 { { TV_STD_NTSC, "ntsc" },
1140  { TV_STD_PAL, "pal" },
1141  { TV_STD_PAL_M, "pal-m" },
1142  { TV_STD_PAL_60, "pal-60" },
1143  { TV_STD_NTSC_J, "ntsc-j" },
1144  { TV_STD_SCART_PAL, "scart-pal" },
1145  { TV_STD_PAL_CN, "pal-cn" },
1146  { TV_STD_SECAM, "secam" },
1147 };
1148 
1149 static struct drm_prop_enum_list radeon_underscan_enum_list[] =
1150 { { UNDERSCAN_OFF, "off" },
1151  { UNDERSCAN_ON, "on" },
1152  { UNDERSCAN_AUTO, "auto" },
1153 };
1154 
1155 static int radeon_modeset_create_props(struct radeon_device *rdev)
1156 {
1157  int sz;
1158 
1159  if (rdev->is_atom_bios) {
1160  rdev->mode_info.coherent_mode_property =
1161  drm_property_create_range(rdev->ddev, 0 , "coherent", 0, 1);
1162  if (!rdev->mode_info.coherent_mode_property)
1163  return -ENOMEM;
1164  }
1165 
1166  if (!ASIC_IS_AVIVO(rdev)) {
1167  sz = ARRAY_SIZE(radeon_tmds_pll_enum_list);
1168  rdev->mode_info.tmds_pll_property =
1169  drm_property_create_enum(rdev->ddev, 0,
1170  "tmds_pll",
1171  radeon_tmds_pll_enum_list, sz);
1172  }
1173 
1174  rdev->mode_info.load_detect_property =
1175  drm_property_create_range(rdev->ddev, 0, "load detection", 0, 1);
1176  if (!rdev->mode_info.load_detect_property)
1177  return -ENOMEM;
1178 
1180 
1181  sz = ARRAY_SIZE(radeon_tv_std_enum_list);
1182  rdev->mode_info.tv_std_property =
1183  drm_property_create_enum(rdev->ddev, 0,
1184  "tv standard",
1185  radeon_tv_std_enum_list, sz);
1186 
1187  sz = ARRAY_SIZE(radeon_underscan_enum_list);
1188  rdev->mode_info.underscan_property =
1189  drm_property_create_enum(rdev->ddev, 0,
1190  "underscan",
1191  radeon_underscan_enum_list, sz);
1192 
1193  rdev->mode_info.underscan_hborder_property =
1195  "underscan hborder", 0, 128);
1196  if (!rdev->mode_info.underscan_hborder_property)
1197  return -ENOMEM;
1198 
1199  rdev->mode_info.underscan_vborder_property =
1201  "underscan vborder", 0, 128);
1202  if (!rdev->mode_info.underscan_vborder_property)
1203  return -ENOMEM;
1204 
1205  return 0;
1206 }
1207 
1209 {
1210  /* adjustment options for the display watermarks */
1211  if ((radeon_disp_priority == 0) || (radeon_disp_priority > 2)) {
1212  /* set display priority to high for r3xx, rv515 chips
1213  * this avoids flickering due to underflow to the
1214  * display controllers during heavy acceleration.
1215  * Don't force high on rs4xx igp chips as it seems to
1216  * affect the sound card. See kernel bug 15982.
1217  */
1218  if ((ASIC_IS_R300(rdev) || (rdev->family == CHIP_RV515)) &&
1219  !(rdev->flags & RADEON_IS_IGP))
1220  rdev->disp_priority = 2;
1221  else
1222  rdev->disp_priority = 0;
1223  } else
1225 
1226 }
1227 
1228 /*
1229  * Allocate hdmi structs and determine register offsets
1230  */
1231 static void radeon_afmt_init(struct radeon_device *rdev)
1232 {
1233  int i;
1234 
1235  for (i = 0; i < RADEON_MAX_AFMT_BLOCKS; i++)
1236  rdev->mode_info.afmt[i] = NULL;
1237 
1238  if (ASIC_IS_DCE6(rdev)) {
1239  /* todo */
1240  } else if (ASIC_IS_DCE4(rdev)) {
1241  /* DCE4/5 has 6 audio blocks tied to DIG encoders */
1242  /* DCE4.1 has 2 audio blocks tied to DIG encoders */
1243  rdev->mode_info.afmt[0] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1244  if (rdev->mode_info.afmt[0]) {
1245  rdev->mode_info.afmt[0]->offset = EVERGREEN_CRTC0_REGISTER_OFFSET;
1246  rdev->mode_info.afmt[0]->id = 0;
1247  }
1248  rdev->mode_info.afmt[1] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1249  if (rdev->mode_info.afmt[1]) {
1250  rdev->mode_info.afmt[1]->offset = EVERGREEN_CRTC1_REGISTER_OFFSET;
1251  rdev->mode_info.afmt[1]->id = 1;
1252  }
1253  if (!ASIC_IS_DCE41(rdev)) {
1254  rdev->mode_info.afmt[2] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1255  if (rdev->mode_info.afmt[2]) {
1256  rdev->mode_info.afmt[2]->offset = EVERGREEN_CRTC2_REGISTER_OFFSET;
1257  rdev->mode_info.afmt[2]->id = 2;
1258  }
1259  rdev->mode_info.afmt[3] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1260  if (rdev->mode_info.afmt[3]) {
1261  rdev->mode_info.afmt[3]->offset = EVERGREEN_CRTC3_REGISTER_OFFSET;
1262  rdev->mode_info.afmt[3]->id = 3;
1263  }
1264  rdev->mode_info.afmt[4] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1265  if (rdev->mode_info.afmt[4]) {
1266  rdev->mode_info.afmt[4]->offset = EVERGREEN_CRTC4_REGISTER_OFFSET;
1267  rdev->mode_info.afmt[4]->id = 4;
1268  }
1269  rdev->mode_info.afmt[5] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1270  if (rdev->mode_info.afmt[5]) {
1271  rdev->mode_info.afmt[5]->offset = EVERGREEN_CRTC5_REGISTER_OFFSET;
1272  rdev->mode_info.afmt[5]->id = 5;
1273  }
1274  }
1275  } else if (ASIC_IS_DCE3(rdev)) {
1276  /* DCE3.x has 2 audio blocks tied to DIG encoders */
1277  rdev->mode_info.afmt[0] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1278  if (rdev->mode_info.afmt[0]) {
1279  rdev->mode_info.afmt[0]->offset = DCE3_HDMI_OFFSET0;
1280  rdev->mode_info.afmt[0]->id = 0;
1281  }
1282  rdev->mode_info.afmt[1] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1283  if (rdev->mode_info.afmt[1]) {
1284  rdev->mode_info.afmt[1]->offset = DCE3_HDMI_OFFSET1;
1285  rdev->mode_info.afmt[1]->id = 1;
1286  }
1287  } else if (ASIC_IS_DCE2(rdev)) {
1288  /* DCE2 has at least 1 routable audio block */
1289  rdev->mode_info.afmt[0] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1290  if (rdev->mode_info.afmt[0]) {
1291  rdev->mode_info.afmt[0]->offset = DCE2_HDMI_OFFSET0;
1292  rdev->mode_info.afmt[0]->id = 0;
1293  }
1294  /* r6xx has 2 routable audio blocks */
1295  if (rdev->family >= CHIP_R600) {
1296  rdev->mode_info.afmt[1] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1297  if (rdev->mode_info.afmt[1]) {
1298  rdev->mode_info.afmt[1]->offset = DCE2_HDMI_OFFSET1;
1299  rdev->mode_info.afmt[1]->id = 1;
1300  }
1301  }
1302  }
1303 }
1304 
1305 static void radeon_afmt_fini(struct radeon_device *rdev)
1306 {
1307  int i;
1308 
1309  for (i = 0; i < RADEON_MAX_AFMT_BLOCKS; i++) {
1310  kfree(rdev->mode_info.afmt[i]);
1311  rdev->mode_info.afmt[i] = NULL;
1312  }
1313 }
1314 
1316 {
1317  int i;
1318  int ret;
1319 
1320  drm_mode_config_init(rdev->ddev);
1321  rdev->mode_info.mode_config_initialized = true;
1322 
1323  rdev->ddev->mode_config.funcs = &radeon_mode_funcs;
1324 
1325  if (ASIC_IS_DCE5(rdev)) {
1326  rdev->ddev->mode_config.max_width = 16384;
1327  rdev->ddev->mode_config.max_height = 16384;
1328  } else if (ASIC_IS_AVIVO(rdev)) {
1329  rdev->ddev->mode_config.max_width = 8192;
1330  rdev->ddev->mode_config.max_height = 8192;
1331  } else {
1332  rdev->ddev->mode_config.max_width = 4096;
1333  rdev->ddev->mode_config.max_height = 4096;
1334  }
1335 
1336  rdev->ddev->mode_config.preferred_depth = 24;
1337  rdev->ddev->mode_config.prefer_shadow = 1;
1338 
1339  rdev->ddev->mode_config.fb_base = rdev->mc.aper_base;
1340 
1341  ret = radeon_modeset_create_props(rdev);
1342  if (ret) {
1343  return ret;
1344  }
1345 
1346  /* init i2c buses */
1347  radeon_i2c_init(rdev);
1348 
1349  /* check combios for a valid hardcoded EDID - Sun servers */
1350  if (!rdev->is_atom_bios) {
1351  /* check for hardcoded EDID in BIOS */
1353  }
1354 
1355  /* allocate crtcs */
1356  for (i = 0; i < rdev->num_crtc; i++) {
1357  radeon_crtc_init(rdev->ddev, i);
1358  }
1359 
1360  /* okay we should have all the bios connectors */
1361  ret = radeon_setup_enc_conn(rdev->ddev);
1362  if (!ret) {
1363  return ret;
1364  }
1365 
1366  /* init dig PHYs, disp eng pll */
1367  if (rdev->is_atom_bios) {
1370  }
1371 
1372  /* initialize hpd */
1373  radeon_hpd_init(rdev);
1374 
1375  /* setup afmt */
1376  radeon_afmt_init(rdev);
1377 
1378  /* Initialize power management */
1379  radeon_pm_init(rdev);
1380 
1381  radeon_fbdev_init(rdev);
1383 
1384  return 0;
1385 }
1386 
1388 {
1389  radeon_fbdev_fini(rdev);
1390  kfree(rdev->mode_info.bios_hardcoded_edid);
1391  radeon_pm_fini(rdev);
1392 
1393  if (rdev->mode_info.mode_config_initialized) {
1394  radeon_afmt_fini(rdev);
1396  radeon_hpd_fini(rdev);
1398  rdev->mode_info.mode_config_initialized = false;
1399  }
1400  /* free i2c buses */
1401  radeon_i2c_fini(rdev);
1402 }
1403 
1404 static bool is_hdtv_mode(const struct drm_display_mode *mode)
1405 {
1406  /* try and guess if this is a tv or a monitor */
1407  if ((mode->vdisplay == 480 && mode->hdisplay == 720) || /* 480p */
1408  (mode->vdisplay == 576) || /* 576p */
1409  (mode->vdisplay == 720) || /* 720p */
1410  (mode->vdisplay == 1080)) /* 1080p */
1411  return true;
1412  else
1413  return false;
1414 }
1415 
1417  const struct drm_display_mode *mode,
1418  struct drm_display_mode *adjusted_mode)
1419 {
1420  struct drm_device *dev = crtc->dev;
1421  struct radeon_device *rdev = dev->dev_private;
1422  struct drm_encoder *encoder;
1423  struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1424  struct radeon_encoder *radeon_encoder;
1425  struct drm_connector *connector;
1426  struct radeon_connector *radeon_connector;
1427  bool first = true;
1428  u32 src_v = 1, dst_v = 1;
1429  u32 src_h = 1, dst_h = 1;
1430 
1431  radeon_crtc->h_border = 0;
1432  radeon_crtc->v_border = 0;
1433 
1434  list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1435  if (encoder->crtc != crtc)
1436  continue;
1437  radeon_encoder = to_radeon_encoder(encoder);
1438  connector = radeon_get_connector_for_encoder(encoder);
1439  radeon_connector = to_radeon_connector(connector);
1440 
1441  if (first) {
1442  /* set scaling */
1443  if (radeon_encoder->rmx_type == RMX_OFF)
1444  radeon_crtc->rmx_type = RMX_OFF;
1445  else if (mode->hdisplay < radeon_encoder->native_mode.hdisplay ||
1446  mode->vdisplay < radeon_encoder->native_mode.vdisplay)
1447  radeon_crtc->rmx_type = radeon_encoder->rmx_type;
1448  else
1449  radeon_crtc->rmx_type = RMX_OFF;
1450  /* copy native mode */
1451  memcpy(&radeon_crtc->native_mode,
1452  &radeon_encoder->native_mode,
1453  sizeof(struct drm_display_mode));
1454  src_v = crtc->mode.vdisplay;
1455  dst_v = radeon_crtc->native_mode.vdisplay;
1456  src_h = crtc->mode.hdisplay;
1457  dst_h = radeon_crtc->native_mode.hdisplay;
1458 
1459  /* fix up for overscan on hdmi */
1460  if (ASIC_IS_AVIVO(rdev) &&
1461  (!(mode->flags & DRM_MODE_FLAG_INTERLACE)) &&
1462  ((radeon_encoder->underscan_type == UNDERSCAN_ON) ||
1463  ((radeon_encoder->underscan_type == UNDERSCAN_AUTO) &&
1464  drm_detect_hdmi_monitor(radeon_connector->edid) &&
1465  is_hdtv_mode(mode)))) {
1466  if (radeon_encoder->underscan_hborder != 0)
1467  radeon_crtc->h_border = radeon_encoder->underscan_hborder;
1468  else
1469  radeon_crtc->h_border = (mode->hdisplay >> 5) + 16;
1470  if (radeon_encoder->underscan_vborder != 0)
1471  radeon_crtc->v_border = radeon_encoder->underscan_vborder;
1472  else
1473  radeon_crtc->v_border = (mode->vdisplay >> 5) + 16;
1474  radeon_crtc->rmx_type = RMX_FULL;
1475  src_v = crtc->mode.vdisplay;
1476  dst_v = crtc->mode.vdisplay - (radeon_crtc->v_border * 2);
1477  src_h = crtc->mode.hdisplay;
1478  dst_h = crtc->mode.hdisplay - (radeon_crtc->h_border * 2);
1479  }
1480  first = false;
1481  } else {
1482  if (radeon_crtc->rmx_type != radeon_encoder->rmx_type) {
1483  /* WARNING: Right now this can't happen but
1484  * in the future we need to check that scaling
1485  * are consistent across different encoder
1486  * (ie all encoder can work with the same
1487  * scaling).
1488  */
1489  DRM_ERROR("Scaling not consistent across encoder.\n");
1490  return false;
1491  }
1492  }
1493  }
1494  if (radeon_crtc->rmx_type != RMX_OFF) {
1495  fixed20_12 a, b;
1496  a.full = dfixed_const(src_v);
1497  b.full = dfixed_const(dst_v);
1498  radeon_crtc->vsc.full = dfixed_div(a, b);
1499  a.full = dfixed_const(src_h);
1500  b.full = dfixed_const(dst_h);
1501  radeon_crtc->hsc.full = dfixed_div(a, b);
1502  } else {
1503  radeon_crtc->vsc.full = dfixed_const(1);
1504  radeon_crtc->hsc.full = dfixed_const(1);
1505  }
1506  return true;
1507 }
1508 
1509 /*
1510  * Retrieve current video scanout position of crtc on a given gpu.
1511  *
1512  * \param dev Device to query.
1513  * \param crtc Crtc to query.
1514  * \param *vpos Location where vertical scanout position should be stored.
1515  * \param *hpos Location where horizontal scanout position should go.
1516  *
1517  * Returns vpos as a positive number while in active scanout area.
1518  * Returns vpos as a negative number inside vblank, counting the number
1519  * of scanlines to go until end of vblank, e.g., -1 means "one scanline
1520  * until start of active scanout / end of vblank."
1521  *
1522  * \return Flags, or'ed together as follows:
1523  *
1524  * DRM_SCANOUTPOS_VALID = Query successful.
1525  * DRM_SCANOUTPOS_INVBL = Inside vblank.
1526  * DRM_SCANOUTPOS_ACCURATE = Returned position is accurate. A lack of
1527  * this flag means that returned position may be offset by a constant but
1528  * unknown small number of scanlines wrt. real scanout position.
1529  *
1530  */
1531 int radeon_get_crtc_scanoutpos(struct drm_device *dev, int crtc, int *vpos, int *hpos)
1532 {
1533  u32 stat_crtc = 0, vbl = 0, position = 0;
1534  int vbl_start, vbl_end, vtotal, ret = 0;
1535  bool in_vbl = true;
1536 
1537  struct radeon_device *rdev = dev->dev_private;
1538 
1539  if (ASIC_IS_DCE4(rdev)) {
1540  if (crtc == 0) {
1545  ret |= DRM_SCANOUTPOS_VALID;
1546  }
1547  if (crtc == 1) {
1552  ret |= DRM_SCANOUTPOS_VALID;
1553  }
1554  if (crtc == 2) {
1559  ret |= DRM_SCANOUTPOS_VALID;
1560  }
1561  if (crtc == 3) {
1566  ret |= DRM_SCANOUTPOS_VALID;
1567  }
1568  if (crtc == 4) {
1573  ret |= DRM_SCANOUTPOS_VALID;
1574  }
1575  if (crtc == 5) {
1580  ret |= DRM_SCANOUTPOS_VALID;
1581  }
1582  } else if (ASIC_IS_AVIVO(rdev)) {
1583  if (crtc == 0) {
1586  ret |= DRM_SCANOUTPOS_VALID;
1587  }
1588  if (crtc == 1) {
1591  ret |= DRM_SCANOUTPOS_VALID;
1592  }
1593  } else {
1594  /* Pre-AVIVO: Different encoding of scanout pos and vblank interval. */
1595  if (crtc == 0) {
1596  /* Assume vbl_end == 0, get vbl_start from
1597  * upper 16 bits.
1598  */
1601  /* Only retrieve vpos from upper 16 bits, set hpos == 0. */
1603  stat_crtc = RREG32(RADEON_CRTC_STATUS);
1604  if (!(stat_crtc & 1))
1605  in_vbl = false;
1606 
1607  ret |= DRM_SCANOUTPOS_VALID;
1608  }
1609  if (crtc == 1) {
1613  stat_crtc = RREG32(RADEON_CRTC2_STATUS);
1614  if (!(stat_crtc & 1))
1615  in_vbl = false;
1616 
1617  ret |= DRM_SCANOUTPOS_VALID;
1618  }
1619  }
1620 
1621  /* Decode into vertical and horizontal scanout position. */
1622  *vpos = position & 0x1fff;
1623  *hpos = (position >> 16) & 0x1fff;
1624 
1625  /* Valid vblank area boundaries from gpu retrieved? */
1626  if (vbl > 0) {
1627  /* Yes: Decode. */
1628  ret |= DRM_SCANOUTPOS_ACCURATE;
1629  vbl_start = vbl & 0x1fff;
1630  vbl_end = (vbl >> 16) & 0x1fff;
1631  }
1632  else {
1633  /* No: Fake something reasonable which gives at least ok results. */
1634  vbl_start = rdev->mode_info.crtcs[crtc]->base.hwmode.crtc_vdisplay;
1635  vbl_end = 0;
1636  }
1637 
1638  /* Test scanout position against vblank region. */
1639  if ((*vpos < vbl_start) && (*vpos >= vbl_end))
1640  in_vbl = false;
1641 
1642  /* Check if inside vblank area and apply corrective offsets:
1643  * vpos will then be >=0 in video scanout area, but negative
1644  * within vblank area, counting down the number of lines until
1645  * start of scanout.
1646  */
1647 
1648  /* Inside "upper part" of vblank area? Apply corrective offset if so: */
1649  if (in_vbl && (*vpos >= vbl_start)) {
1650  vtotal = rdev->mode_info.crtcs[crtc]->base.hwmode.crtc_vtotal;
1651  *vpos = *vpos - vtotal;
1652  }
1653 
1654  /* Correct for shifted end of vbl at vbl_end. */
1655  *vpos = *vpos - vbl_end;
1656 
1657  /* In vblank? */
1658  if (in_vbl)
1659  ret |= DRM_SCANOUTPOS_INVBL;
1660 
1661  return ret;
1662 }