43 u32 sctl = nv_rd32(device, 0x4120 + (clk * 4));
44 if ((sctl & 0x00000030) != 0x00000030)
45 return read_pll(dev, 0x41, 0x00e820);
46 return read_pll(dev, 0x42, 0x00e8a0);
50 read_clk(
struct drm_device *dev,
int clk,
bool ignore_en)
58 if (nv_device(drm->
device)->chipset == 0xaf) {
60 return nv_rd32(device, 0x00471c) * 1000;
66 sctl = nv_rd32(device, 0x4120 + (clk * 4));
67 if (!ignore_en && !(sctl & 0x00000100))
70 switch (sctl & 0x00003000) {
74 if (sctl & 0x00000040)
78 sclk = read_vco(dev, clk);
79 sdiv = ((sctl & 0x003f0000) >> 16) + 2;
80 return (sclk * 2) / sdiv;
90 u32 ctrl = nv_rd32(device, pll + 0);
91 u32 sclk = 0,
P = 1,
N = 1,
M = 1;
93 if (!(ctrl & 0x00000008)) {
94 if (ctrl & 0x00000001) {
95 u32 coef = nv_rd32(device, pll + 4);
96 M = (coef & 0x000000ff) >> 0;
97 N = (coef & 0x0000ff00) >> 8;
98 P = (coef & 0x003f0000) >> 16;
101 if ((pll & 0x00ff00) == 0x00e800)
104 sclk = read_clk(dev, 0x00 + clk,
false);
107 sclk = read_clk(dev, 0x10 + clk,
false);
111 return sclk *
N / (
M *
P);
134 NV_DEBUG(drm,
"no clock for 0x%04x/0x%02x\n", pll, clk);
140 reg->
clk = 0x00000100;
143 reg->
clk = 0x00002100;
146 reg->
clk = 0x00002140;
149 sclk = read_vco(dev, clk);
150 sdiv =
min((sclk * 2) / (khz - 2999), (
u32)65);
160 oclk = (sclk * 2) / sdiv;
162 if (!pll || (diff >= -2000 && diff < 3000)) {
163 reg->
clk = (((sdiv - 2) << 16) | 0x00003100);
169 NV_ERROR(drm,
"bad freq %02x: %d %d\n", clk, khz, sclk);
180 limits.
refclk = read_clk(dev, clk - 0x10,
true);
184 ret = nva3_calc_pll(dev, &limits, khz, &N,
NULL, &M, &P);
186 reg->
clk = nv_rd32(device, 0x4120 + (clk * 4));
187 reg->
pll = (P << 16) | (N << 8) |
M;
198 const u32 src0 = 0x004120 + (clk * 4);
199 const u32 src1 = 0x004160 + (clk * 4);
200 const u32 ctrl = pll + 0;
201 const u32 coef = pll + 4;
203 if (!reg->
clk && !reg->
pll) {
204 NV_DEBUG(drm,
"no clock for %02x\n", clk);
209 nv_mask(device, src0, 0x00000101, 0x00000101);
210 nv_wr32(device, coef, reg->
pll);
211 nv_mask(device, ctrl, 0x00000015, 0x00000015);
212 nv_mask(device, ctrl, 0x00000010, 0x00000000);
213 nv_wait(device, ctrl, 0x00020000, 0x00020000);
214 nv_mask(device, ctrl, 0x00000010, 0x00000010);
215 nv_mask(device, ctrl, 0x00000008, 0x00000000);
216 nv_mask(device, src1, 0x00000100, 0x00000000);
217 nv_mask(device, src1, 0x00000001, 0x00000000);
219 nv_mask(device, src1, 0x003f3141, 0x00000101 | reg->
clk);
220 nv_mask(device, ctrl, 0x00000018, 0x00000018);
222 nv_mask(device, ctrl, 0x00000001, 0x00000000);
223 nv_mask(device, src0, 0x00000100, 0x00000000);
224 nv_mask(device, src0, 0x00000001, 0x00000000);
235 NV_DEBUG(drm,
"no clock for %02x\n", clk);
239 nv_mask(device, 0x004120 + (clk * 4), 0x003f3141, 0x00000101 | reg->
clk);
245 perflvl->
core = read_pll(dev, 0x00, 0x4200);
246 perflvl->
shader = read_pll(dev, 0x01, 0x4220);
247 perflvl->
memory = read_pll(dev, 0x02, 0x4000);
248 perflvl->
unka0 = read_clk(dev, 0x20,
false);
249 perflvl->
vdec = read_clk(dev, 0x21,
false);
250 perflvl->
daemon = read_clk(dev, 0x25,
false);
284 ret = calc_clk(dev, 0x10, 0x4200, perflvl->
core, &info->
nclk);
288 ret = calc_clk(dev, 0x11, 0x4220, perflvl->
shader, &info->
sclk);
292 ret = calc_clk(dev, 0x12, 0x4000, perflvl->
memory, &info->
mclk);
296 ret = calc_clk(dev, 0x20, 0x0000, perflvl->
unka0, &info->
unka0);
300 ret = calc_clk(dev, 0x21, 0x0000, perflvl->
vdec, &info->
vdec);
327 nva3_pm_grcp_idle(
void *
data)
332 if (!(nv_rd32(device, 0x400304) & 0x00000001))
334 if (nv_rd32(device, 0x400308) == 0x0050001c)
343 nv_wr32(device, 0x1002d4, 0x00000001);
350 nv_wr32(device, 0x1002d0, 0x00000001);
357 nv_wr32(device, 0x100210, enable ? 0x80000000 : 0x00000000);
364 nv_wr32(device, 0x1002dc, enable ? 0x00000001 : 0x00000000);
371 volatile u32 post = nv_rd32(device, 0); (
void)post;
372 udelay((nsec + 500) / 1000);
380 return nv_rd32(device, 0x1002c0 + ((mr - 0) * 4));
382 return nv_rd32(device, 0x1002e0 + ((mr - 2) * 4));
392 if (pfb->
ram.ranks > 1)
393 nv_wr32(device, 0x1002c8 + ((mr - 0) * 4), data);
394 nv_wr32(device, 0x1002c0 + ((mr - 0) * 4), data);
397 if (pfb->
ram.ranks > 1)
398 nv_wr32(device, 0x1002e8 + ((mr - 2) * 4), data);
399 nv_wr32(device, 0x1002e0 + ((mr - 2) * 4), data);
410 ctrl = nv_rd32(device, 0x004000);
411 if (!(ctrl & 0x00000008) && info->
mclk.pll) {
412 nv_wr32(device, 0x004000, (ctrl |= 0x00000008));
413 nv_mask(device, 0x1110e0, 0x00088000, 0x00088000);
414 nv_wr32(device, 0x004018, 0x00001000);
415 nv_wr32(device, 0x004000, (ctrl &= ~0x00000001));
416 nv_wr32(device, 0x004004, info->
mclk.pll);
417 nv_wr32(device, 0x004000, (ctrl |= 0x00000001));
419 nv_wr32(device, 0x004018, 0x00005000 | info->
r004018);
422 if (!info->
mclk.pll) {
423 nv_mask(device, 0x004168, 0x003f3040, info->
mclk.clk);
424 nv_wr32(device, 0x004000, (ctrl |= 0x00000008));
425 nv_mask(device, 0x1110e0, 0x00088000, 0x00088000);
426 nv_wr32(device, 0x004018, 0x0000d000 | info->
r004018);
434 u32 unk804 = (info->
ramcfg[9] & 0xf0) << 16 |
435 (info->
ramcfg[3] & 0x0f) << 16 |
436 (info->
ramcfg[9] & 0x0f) |
438 nv_wr32(device, 0x1005a0, unk5a0);
439 nv_wr32(device, 0x1005a4, unk5a4);
440 nv_wr32(device, 0x10f804, unk804);
441 nv_mask(device, 0x10053c, 0x00001000, 0x00000000);
443 nv_mask(device, 0x10053c, 0x00001000, 0x00001000);
444 nv_mask(device, 0x10f804, 0x80000000, 0x00000000);
445 nv_mask(device, 0x100760, 0x22222222, info->
r100760);
446 nv_mask(device, 0x1007a0, 0x22222222, info->
r100760);
447 nv_mask(device, 0x1007e0, 0x22222222, info->
r100760);
451 if (info->
mclk.pll) {
452 nv_mask(device, 0x1110e0, 0x00088000, 0x00011000);
453 nv_wr32(device, 0x004000, (ctrl &= ~0x00000008));
465 for (i = 0; i < 9; i++)
466 nv_wr32(device, 0x100220 + (i * 4), perflvl->
timing.reg[i]);
469 u32 data = (info->
ramcfg[2] & 0x08) ? 0x00000000 : 0x00001000;
470 nv_mask(device, 0x100200, 0x00001000, data);
474 u32 unk714 = nv_rd32(device, 0x100714) & ~0xf0000010;
475 u32 unk718 = nv_rd32(device, 0x100718) & ~0x00000100;
476 u32 unk71c = nv_rd32(device, 0x10071c) & ~0x00000100;
477 if ( (info->
ramcfg[2] & 0x20))
478 unk714 |= 0xf0000000;
479 if (!(info->
ramcfg[2] & 0x04))
480 unk714 |= 0x00000010;
481 nv_wr32(device, 0x100714, unk714);
483 if (info->
ramcfg[2] & 0x01)
484 unk71c |= 0x00000100;
485 nv_wr32(device, 0x10071c, unk71c);
487 if (info->
ramcfg[2] & 0x02)
488 unk718 |= 0x00000100;
489 nv_wr32(device, 0x100718, unk718);
491 if (info->
ramcfg[2] & 0x10)
492 nv_wr32(device, 0x111100, 0x48000000);
502 .precharge = mclk_precharge,
503 .refresh = mclk_refresh,
504 .refresh_auto = mclk_refresh_auto,
505 .refresh_self = mclk_refresh_self,
509 .clock_set = mclk_clock_set,
510 .timing_set = mclk_timing_set,
516 if (info->
perflvl->memory <= 750000) {
521 ctrl = nv_rd32(device, 0x004000);
522 if (ctrl & 0x00000008) {
523 if (info->
mclk.pll) {
524 nv_mask(device, 0x004128, 0x00000101, 0x00000101);
525 nv_wr32(device, 0x004004, info->
mclk.pll);
526 nv_wr32(device, 0x004000, (ctrl |= 0x00000001));
527 nv_wr32(device, 0x004000, (ctrl &= 0xffffffef));
528 nv_wait(device, 0x004000, 0x00020000, 0x00020000);
529 nv_wr32(device, 0x004000, (ctrl |= 0x00000010));
530 nv_wr32(device, 0x004018, 0x00005000 | info->
r004018);
531 nv_wr32(device, 0x004000, (ctrl |= 0x00000004));
534 u32 ssel = 0x00000101;
536 ssel |= info->
mclk.clk;
539 nv_mask(device, 0x004168, 0x003f3141, ctrl);
543 if (info->
ramcfg[2] & 0x10) {
544 nv_mask(device, 0x111104, 0x00000600, 0x00000000);
546 nv_mask(device, 0x111100, 0x40000000, 0x40000000);
547 nv_mask(device, 0x111104, 0x00000180, 0x00000000);
551 nv_mask(device, 0x100200, 0x00000800, 0x00000000);
552 nv_wr32(device, 0x611200, 0x00003300);
553 if (!(info->
ramcfg[2] & 0x10))
554 nv_wr32(device, 0x111100, 0x4c020000);
558 nv_wr32(device, 0x611200, 0x00003330);
560 nv_mask(device, 0x100200, 0x00000800, 0x00000800);
562 if (info->
ramcfg[2] & 0x10) {
563 nv_mask(device, 0x111104, 0x00000180, 0x00000180);
564 nv_mask(device, 0x111100, 0x40000000, 0x00000000);
566 nv_mask(device, 0x111104, 0x00000600, 0x00000600);
570 if (info->
mclk.pll) {
571 nv_mask(device, 0x004168, 0x00000001, 0x00000000);
572 nv_mask(device, 0x004168, 0x00000100, 0x00000000);
574 nv_mask(device, 0x004000, 0x00000001, 0x00000000);
575 nv_mask(device, 0x004128, 0x00000001, 0x00000000);
576 nv_mask(device, 0x004128, 0x00000100, 0x00000000);
589 nv_wr32(device, 0x400324, 0x00000000);
590 nv_wr32(device, 0x400328, 0x0050001c);
592 if (!
nv_wait_cb(device, nva3_pm_grcp_idle, dev)) {
593 NV_ERROR(drm,
"pm: ctxprog didn't go idle\n");
597 nv_mask(device, 0x002504, 0x00000001, 0x00000001);
598 if (!
nv_wait(device, 0x002504, 0x00000010, 0x00000010)) {
599 NV_ERROR(drm,
"pm: fifo didn't go idle\n");
603 prog_pll(dev, 0x00, 0x004200, &info->
nclk);
604 prog_pll(dev, 0x01, 0x004220, &info->
sclk);
605 prog_clk(dev, 0x20, &info->
unka0);
606 prog_clk(dev, 0x21, &info->
vdec);
608 if (info->
mclk.clk || info->
mclk.pll)
615 nv_mask(device, 0x002504, 0x00000001, 0x00000000);
617 nv_wr32(device, 0x400324, 0x00000000);
618 nv_wr32(device, 0x400328, 0x0070009c);
620 if (nv_rd32(device, 0x400308) == 0x0050001c)
621 nv_mask(device, 0x400824, 0x10000000, 0x10000000);