42 u32 ssrc = nv_rd32(device, dsrc);
43 if (!(ssrc & 0x00000100))
44 return read_pll(dev, 0x00e800);
45 return read_pll(dev, 0x00e820);
52 u32 ctrl = nv_rd32(device, pll + 0);
53 u32 coef = nv_rd32(device, pll + 4);
54 u32 P = (coef & 0x003f0000) >> 16;
55 u32 N = (coef & 0x0000ff00) >> 8;
56 u32 M = (coef & 0x000000ff) >> 0;
59 if (!(ctrl & 0x00000001))
62 switch (pll & 0xfff000) {
68 doff = (pll - 0x137000) / 0x20;
69 sclk = read_div(dev, doff, 0x137120, 0x137140);
74 sclk = read_pll(dev, 0x132020);
77 sclk = read_div(dev, 0, 0x137320, 0x137330);
87 return sclk * N / M /
P;
94 u32 ssrc = nv_rd32(device, dsrc + (doff * 4));
95 u32 sctl = nv_rd32(device, dctl + (doff * 4));
97 switch (ssrc & 0x00000003) {
99 if ((ssrc & 0x00030000) != 0x00030000)
105 if (sctl & 0x80000000) {
106 u32 sclk = read_vco(dev, dsrc + (doff * 4));
107 u32 sdiv = (sctl & 0x0000003f) + 2;
108 return (sclk * 2) / sdiv;
111 return read_vco(dev, dsrc + (doff * 4));
121 u32 ssel = nv_rd32(device, 0x1373f0);
122 if (ssel & 0x00000001)
123 return read_div(dev, 0, 0x137300, 0x137310);
124 return read_pll(dev, 0x132000);
131 u32 sctl = nv_rd32(device, 0x137250 + (clk * 4));
132 u32 ssel = nv_rd32(device, 0x137100);
135 if (ssel & (1 << clk)) {
137 sclk = read_pll(dev, 0x137000 + (clk * 0x20));
139 sclk = read_pll(dev, 0x1370e0);
140 sdiv = ((sctl & 0x00003f00) >> 8) + 2;
142 sclk = read_div(dev, clk, 0x137160, 0x1371d0);
143 sdiv = ((sctl & 0x0000003f) >> 0) + 2;
146 if (sctl & 0x80000000)
147 return (sclk * 2) / sdiv;
154 perflvl->
shader = read_clk(dev, 0x00);
156 perflvl->
memory = read_mem(dev);
157 perflvl->
rop = read_clk(dev, 0x01);
158 perflvl->
hub07 = read_clk(dev, 0x02);
159 perflvl->
hub06 = read_clk(dev, 0x07);
160 perflvl->
hub01 = read_clk(dev, 0x08);
161 perflvl->
copy = read_clk(dev, 0x09);
162 perflvl->
daemon = read_clk(dev, 0x0c);
163 perflvl->
vdec = read_clk(dev, 0x0e);
190 return (ref * 2) /
div;
216 sclk = read_vco(dev, clk);
218 sclk = calc_div(dev, clk, sclk, freq, ddiv);
234 limits.refclk = read_div(dev, clk, 0x137120, 0x137140);
238 ret = nva3_calc_pll(dev, &limits, freq, &N,
NULL, &M, &P);
242 *coef = (P << 16) | (N << 8) |
M;
274 u32 src0, div0, div1D, div1P = 0;
282 clk0 = calc_src(dev, clk, freq, &src0, &div0);
283 clk0 = calc_div(dev, clk, clk0, freq, &div1D);
286 if (clk0 != freq && (0x00004387 & (1 << clk))) {
288 clk1 = calc_pll(dev, clk, freq, &info->
coef);
290 clk1 = read_pll(dev, 0x1370e0);
291 clk1 = calc_div(dev, clk, clk1, freq, &div1P);
295 if (
abs((
int)freq - clk0) <=
abs((
int)freq - clk1)) {
298 info->
ddiv |= 0x80000000;
299 info->
ddiv |= div0 << 8;
303 info->
mdiv |= 0x80000000;
310 info->
mdiv |= 0x80000000;
311 info->
mdiv |= div1P << 8;
313 info->
ssel = (1 << clk);
330 ctrl = nv_rd32(device, 0x132020);
331 if (!(ctrl & 0x00000001)) {
337 nv_wr32(device, 0x137320, 0x00000103);
338 nv_wr32(device, 0x137330, 0x81200606);
339 nv_wait(device, 0x132020, 0x00010000, 0x00010000);
340 nv_wr32(device, 0x132024, 0x0001150f);
341 nv_mask(device, 0x132020, 0x00000001, 0x00000001);
342 nv_wait(device, 0x137390, 0x00020000, 0x00020000);
343 nv_mask(device, 0x132020, 0x00000004, 0x00000004);
351 pll.refclk = read_pll(dev, 0x132020);
353 ret = nva3_calc_pll(dev, &pll, freq, &N,
NULL, &M, &P);
355 info->
coef = (P << 16) | (N << 8) |
M;
384 if ((ret = calc_clk(dev, 0x00, &info->
eng[0x00], perflvl->
shader)) ||
385 (ret = calc_clk(dev, 0x01, &info->
eng[0x01], perflvl->
rop)) ||
386 (ret = calc_clk(dev, 0x02, &info->
eng[0x02], perflvl->
hub07)) ||
387 (ret = calc_clk(dev, 0x07, &info->
eng[0x07], perflvl->
hub06)) ||
388 (ret = calc_clk(dev, 0x08, &info->
eng[0x08], perflvl->
hub01)) ||
389 (ret = calc_clk(dev, 0x09, &info->
eng[0x09], perflvl->
copy)) ||
390 (ret = calc_clk(dev, 0x0c, &info->
eng[0x0c], perflvl->
daemon)) ||
391 (ret = calc_clk(dev, 0x0e, &info->
eng[0x0e], perflvl->
vdec))) {
397 ret = calc_mem(dev, &info->
mem, perflvl->
memory);
415 nv_mask(device, 0x1371d0 + (clk * 0x04), 0x80003f3f, info->
ddiv);
416 nv_wr32(device, 0x137160 + (clk * 0x04), info->
dsrc);
420 nv_mask(device, 0x137100, (1 << clk), 0x00000000);
421 nv_wait(device, 0x137100, (1 << clk), 0x00000000);
426 u32 base = 0x137000 + (clk * 0x20);
427 u32 ctrl = nv_rd32(device, base + 0x00);
428 if (ctrl & 0x00000001) {
429 nv_mask(device, base + 0x00, 0x00000004, 0x00000000);
430 nv_mask(device, base + 0x00, 0x00000001, 0x00000000);
434 nv_wr32(device, base + 0x04, info->
coef);
435 nv_mask(device, base + 0x00, 0x00000001, 0x00000001);
436 nv_wait(device, base + 0x00, 0x00020000, 0x00020000);
437 nv_mask(device, base + 0x00, 0x00020004, 0x00000004);
442 nv_mask(device, 0x137100, (1 << clk), info->
ssel);
444 nv_mask(device, 0x137250 + (clk * 0x04), 0x00003f3f, info->
mdiv);
461 nv_wr32(device, 0x10f210, enable ? 0x80000000 : 0x00000000);
472 udelay((nsec + 500) / 1000);
482 return nv_rd32(device, 0x10f300 + ((mr - 0) * 4));
483 return nv_rd32(device, 0x10f320 + ((mr - 2) * 4));
486 return nv_rd32(device, 0x10f300 + (mr * 4));
489 return nv_rd32(device, 0x10f32c + (mr * 4));
490 return nv_rd32(device, 0x10f34c);
501 nv_wr32(device, 0x10f300 + ((mr - 0) * 4), data);
502 if (pfb->
ram.ranks > 1)
503 nv_wr32(device, 0x10f308 + ((mr - 0) * 4), data);
506 nv_wr32(device, 0x10f320 + ((mr - 2) * 4), data);
507 if (pfb->
ram.ranks > 1)
508 nv_wr32(device, 0x10f328 + ((mr - 2) * 4), data);
511 if (mr == 0) nv_wr32(device, 0x10f300 + (mr * 4), data);
512 else if (mr <= 7) nv_wr32(device, 0x10f32c + (mr * 4), data);
513 else if (mr == 15) nv_wr32(device, 0x10f34c, data);
522 u32 ctrl = nv_rd32(device, 0x132000);
524 nv_wr32(device, 0x137360, 0x00000001);
525 nv_wr32(device, 0x137370, 0x00000000);
526 nv_wr32(device, 0x137380, 0x00000000);
527 if (ctrl & 0x00000001)
528 nv_wr32(device, 0x132000, (ctrl &= ~0x00000001));
530 nv_wr32(device, 0x132004, info->
mem.coef);
531 nv_wr32(device, 0x132000, (ctrl |= 0x00000001));
532 nv_wait(device, 0x137390, 0x00000002, 0x00000002);
533 nv_wr32(device, 0x132018, 0x00005000);
535 nv_wr32(device, 0x137370, 0x00000001);
536 nv_wr32(device, 0x137380, 0x00000001);
537 nv_wr32(device, 0x137360, 0x00000000);
548 for (i = 0; i < 5; i++)
549 nv_wr32(device, 0x10f290 + (i * 4), perflvl->
timing.reg[i]);
558 .precharge = mclk_precharge,
559 .refresh = mclk_refresh,
560 .refresh_auto = mclk_refresh_auto,
561 .refresh_self = mclk_refresh_self,
565 .clock_set = mclk_clock_set,
566 .timing_set = mclk_timing_set,
571 nv_wr32(device, 0x611200, 0x00003300);
573 nv_wr32(device, 0x62c000, 0x03030000);
578 nv_wr32(device, 0x611200, 0x00003330);
580 nv_wr32(device, 0x62c000, 0x03030300);
591 for (i = 0; i < 16; i++) {
592 if (!info->
eng[i].freq)
594 prog_clk(dev, i, &info->
eng[i]);