]> git.karo-electronics.de Git - karo-tx-linux.git/blob - drivers/gpu/drm/radeon/ni_dpm.c
ARM: sa11x0/assabet: ensure CS2 is configured appropriately
[karo-tx-linux.git] / drivers / gpu / drm / radeon / ni_dpm.c
1 /*
2  * Copyright 2012 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  */
23
24 #include "drmP.h"
25 #include "radeon.h"
26 #include "nid.h"
27 #include "r600_dpm.h"
28 #include "ni_dpm.h"
29 #include "atom.h"
30 #include <linux/math64.h>
31 #include <linux/seq_file.h>
32
33 #define MC_CG_ARB_FREQ_F0           0x0a
34 #define MC_CG_ARB_FREQ_F1           0x0b
35 #define MC_CG_ARB_FREQ_F2           0x0c
36 #define MC_CG_ARB_FREQ_F3           0x0d
37
38 #define SMC_RAM_END 0xC000
39
40 static const struct ni_cac_weights cac_weights_cayman_xt =
41 {
42         0x15,
43         0x2,
44         0x19,
45         0x2,
46         0x8,
47         0x14,
48         0x2,
49         0x16,
50         0xE,
51         0x17,
52         0x13,
53         0x2B,
54         0x10,
55         0x7,
56         0x5,
57         0x5,
58         0x5,
59         0x2,
60         0x3,
61         0x9,
62         0x10,
63         0x10,
64         0x2B,
65         0xA,
66         0x9,
67         0x4,
68         0xD,
69         0xD,
70         0x3E,
71         0x18,
72         0x14,
73         0,
74         0x3,
75         0x3,
76         0x5,
77         0,
78         0x2,
79         0,
80         0,
81         0,
82         0,
83         0,
84         0,
85         0,
86         0,
87         0,
88         0x1CC,
89         0,
90         0x164,
91         1,
92         1,
93         1,
94         1,
95         12,
96         12,
97         12,
98         0x12,
99         0x1F,
100         132,
101         5,
102         7,
103         0,
104         { 0, 0, 0, 0, 0, 0, 0, 0 },
105         { 0, 0, 0, 0 },
106         true
107 };
108
109 static const struct ni_cac_weights cac_weights_cayman_pro =
110 {
111         0x16,
112         0x4,
113         0x10,
114         0x2,
115         0xA,
116         0x16,
117         0x2,
118         0x18,
119         0x10,
120         0x1A,
121         0x16,
122         0x2D,
123         0x12,
124         0xA,
125         0x6,
126         0x6,
127         0x6,
128         0x2,
129         0x4,
130         0xB,
131         0x11,
132         0x11,
133         0x2D,
134         0xC,
135         0xC,
136         0x7,
137         0x10,
138         0x10,
139         0x3F,
140         0x1A,
141         0x16,
142         0,
143         0x7,
144         0x4,
145         0x6,
146         1,
147         0x2,
148         0x1,
149         0,
150         0,
151         0,
152         0,
153         0,
154         0,
155         0x30,
156         0,
157         0x1CF,
158         0,
159         0x166,
160         1,
161         1,
162         1,
163         1,
164         12,
165         12,
166         12,
167         0x15,
168         0x1F,
169         132,
170         6,
171         6,
172         0,
173         { 0, 0, 0, 0, 0, 0, 0, 0 },
174         { 0, 0, 0, 0 },
175         true
176 };
177
178 static const struct ni_cac_weights cac_weights_cayman_le =
179 {
180         0x7,
181         0xE,
182         0x1,
183         0xA,
184         0x1,
185         0x3F,
186         0x2,
187         0x18,
188         0x10,
189         0x1A,
190         0x1,
191         0x3F,
192         0x1,
193         0xE,
194         0x6,
195         0x6,
196         0x6,
197         0x2,
198         0x4,
199         0x9,
200         0x1A,
201         0x1A,
202         0x2C,
203         0xA,
204         0x11,
205         0x8,
206         0x19,
207         0x19,
208         0x1,
209         0x1,
210         0x1A,
211         0,
212         0x8,
213         0x5,
214         0x8,
215         0x1,
216         0x3,
217         0x1,
218         0,
219         0,
220         0,
221         0,
222         0,
223         0,
224         0x38,
225         0x38,
226         0x239,
227         0x3,
228         0x18A,
229         1,
230         1,
231         1,
232         1,
233         12,
234         12,
235         12,
236         0x15,
237         0x22,
238         132,
239         6,
240         6,
241         0,
242         { 0, 0, 0, 0, 0, 0, 0, 0 },
243         { 0, 0, 0, 0 },
244         true
245 };
246
247 #define NISLANDS_MGCG_SEQUENCE  300
248
249 static const u32 cayman_cgcg_cgls_default[] =
250 {
251         0x000008f8, 0x00000010, 0xffffffff,
252         0x000008fc, 0x00000000, 0xffffffff,
253         0x000008f8, 0x00000011, 0xffffffff,
254         0x000008fc, 0x00000000, 0xffffffff,
255         0x000008f8, 0x00000012, 0xffffffff,
256         0x000008fc, 0x00000000, 0xffffffff,
257         0x000008f8, 0x00000013, 0xffffffff,
258         0x000008fc, 0x00000000, 0xffffffff,
259         0x000008f8, 0x00000014, 0xffffffff,
260         0x000008fc, 0x00000000, 0xffffffff,
261         0x000008f8, 0x00000015, 0xffffffff,
262         0x000008fc, 0x00000000, 0xffffffff,
263         0x000008f8, 0x00000016, 0xffffffff,
264         0x000008fc, 0x00000000, 0xffffffff,
265         0x000008f8, 0x00000017, 0xffffffff,
266         0x000008fc, 0x00000000, 0xffffffff,
267         0x000008f8, 0x00000018, 0xffffffff,
268         0x000008fc, 0x00000000, 0xffffffff,
269         0x000008f8, 0x00000019, 0xffffffff,
270         0x000008fc, 0x00000000, 0xffffffff,
271         0x000008f8, 0x0000001a, 0xffffffff,
272         0x000008fc, 0x00000000, 0xffffffff,
273         0x000008f8, 0x0000001b, 0xffffffff,
274         0x000008fc, 0x00000000, 0xffffffff,
275         0x000008f8, 0x00000020, 0xffffffff,
276         0x000008fc, 0x00000000, 0xffffffff,
277         0x000008f8, 0x00000021, 0xffffffff,
278         0x000008fc, 0x00000000, 0xffffffff,
279         0x000008f8, 0x00000022, 0xffffffff,
280         0x000008fc, 0x00000000, 0xffffffff,
281         0x000008f8, 0x00000023, 0xffffffff,
282         0x000008fc, 0x00000000, 0xffffffff,
283         0x000008f8, 0x00000024, 0xffffffff,
284         0x000008fc, 0x00000000, 0xffffffff,
285         0x000008f8, 0x00000025, 0xffffffff,
286         0x000008fc, 0x00000000, 0xffffffff,
287         0x000008f8, 0x00000026, 0xffffffff,
288         0x000008fc, 0x00000000, 0xffffffff,
289         0x000008f8, 0x00000027, 0xffffffff,
290         0x000008fc, 0x00000000, 0xffffffff,
291         0x000008f8, 0x00000028, 0xffffffff,
292         0x000008fc, 0x00000000, 0xffffffff,
293         0x000008f8, 0x00000029, 0xffffffff,
294         0x000008fc, 0x00000000, 0xffffffff,
295         0x000008f8, 0x0000002a, 0xffffffff,
296         0x000008fc, 0x00000000, 0xffffffff,
297         0x000008f8, 0x0000002b, 0xffffffff,
298         0x000008fc, 0x00000000, 0xffffffff
299 };
300 #define CAYMAN_CGCG_CGLS_DEFAULT_LENGTH sizeof(cayman_cgcg_cgls_default) / (3 * sizeof(u32))
301
302 static const u32 cayman_cgcg_cgls_disable[] =
303 {
304         0x000008f8, 0x00000010, 0xffffffff,
305         0x000008fc, 0xffffffff, 0xffffffff,
306         0x000008f8, 0x00000011, 0xffffffff,
307         0x000008fc, 0xffffffff, 0xffffffff,
308         0x000008f8, 0x00000012, 0xffffffff,
309         0x000008fc, 0xffffffff, 0xffffffff,
310         0x000008f8, 0x00000013, 0xffffffff,
311         0x000008fc, 0xffffffff, 0xffffffff,
312         0x000008f8, 0x00000014, 0xffffffff,
313         0x000008fc, 0xffffffff, 0xffffffff,
314         0x000008f8, 0x00000015, 0xffffffff,
315         0x000008fc, 0xffffffff, 0xffffffff,
316         0x000008f8, 0x00000016, 0xffffffff,
317         0x000008fc, 0xffffffff, 0xffffffff,
318         0x000008f8, 0x00000017, 0xffffffff,
319         0x000008fc, 0xffffffff, 0xffffffff,
320         0x000008f8, 0x00000018, 0xffffffff,
321         0x000008fc, 0xffffffff, 0xffffffff,
322         0x000008f8, 0x00000019, 0xffffffff,
323         0x000008fc, 0xffffffff, 0xffffffff,
324         0x000008f8, 0x0000001a, 0xffffffff,
325         0x000008fc, 0xffffffff, 0xffffffff,
326         0x000008f8, 0x0000001b, 0xffffffff,
327         0x000008fc, 0xffffffff, 0xffffffff,
328         0x000008f8, 0x00000020, 0xffffffff,
329         0x000008fc, 0x00000000, 0xffffffff,
330         0x000008f8, 0x00000021, 0xffffffff,
331         0x000008fc, 0x00000000, 0xffffffff,
332         0x000008f8, 0x00000022, 0xffffffff,
333         0x000008fc, 0x00000000, 0xffffffff,
334         0x000008f8, 0x00000023, 0xffffffff,
335         0x000008fc, 0x00000000, 0xffffffff,
336         0x000008f8, 0x00000024, 0xffffffff,
337         0x000008fc, 0x00000000, 0xffffffff,
338         0x000008f8, 0x00000025, 0xffffffff,
339         0x000008fc, 0x00000000, 0xffffffff,
340         0x000008f8, 0x00000026, 0xffffffff,
341         0x000008fc, 0x00000000, 0xffffffff,
342         0x000008f8, 0x00000027, 0xffffffff,
343         0x000008fc, 0x00000000, 0xffffffff,
344         0x000008f8, 0x00000028, 0xffffffff,
345         0x000008fc, 0x00000000, 0xffffffff,
346         0x000008f8, 0x00000029, 0xffffffff,
347         0x000008fc, 0x00000000, 0xffffffff,
348         0x000008f8, 0x0000002a, 0xffffffff,
349         0x000008fc, 0x00000000, 0xffffffff,
350         0x000008f8, 0x0000002b, 0xffffffff,
351         0x000008fc, 0x00000000, 0xffffffff,
352         0x00000644, 0x000f7902, 0x001f4180,
353         0x00000644, 0x000f3802, 0x001f4180
354 };
355 #define CAYMAN_CGCG_CGLS_DISABLE_LENGTH sizeof(cayman_cgcg_cgls_disable) / (3 * sizeof(u32))
356
357 static const u32 cayman_cgcg_cgls_enable[] =
358 {
359         0x00000644, 0x000f7882, 0x001f4080,
360         0x000008f8, 0x00000010, 0xffffffff,
361         0x000008fc, 0x00000000, 0xffffffff,
362         0x000008f8, 0x00000011, 0xffffffff,
363         0x000008fc, 0x00000000, 0xffffffff,
364         0x000008f8, 0x00000012, 0xffffffff,
365         0x000008fc, 0x00000000, 0xffffffff,
366         0x000008f8, 0x00000013, 0xffffffff,
367         0x000008fc, 0x00000000, 0xffffffff,
368         0x000008f8, 0x00000014, 0xffffffff,
369         0x000008fc, 0x00000000, 0xffffffff,
370         0x000008f8, 0x00000015, 0xffffffff,
371         0x000008fc, 0x00000000, 0xffffffff,
372         0x000008f8, 0x00000016, 0xffffffff,
373         0x000008fc, 0x00000000, 0xffffffff,
374         0x000008f8, 0x00000017, 0xffffffff,
375         0x000008fc, 0x00000000, 0xffffffff,
376         0x000008f8, 0x00000018, 0xffffffff,
377         0x000008fc, 0x00000000, 0xffffffff,
378         0x000008f8, 0x00000019, 0xffffffff,
379         0x000008fc, 0x00000000, 0xffffffff,
380         0x000008f8, 0x0000001a, 0xffffffff,
381         0x000008fc, 0x00000000, 0xffffffff,
382         0x000008f8, 0x0000001b, 0xffffffff,
383         0x000008fc, 0x00000000, 0xffffffff,
384         0x000008f8, 0x00000020, 0xffffffff,
385         0x000008fc, 0xffffffff, 0xffffffff,
386         0x000008f8, 0x00000021, 0xffffffff,
387         0x000008fc, 0xffffffff, 0xffffffff,
388         0x000008f8, 0x00000022, 0xffffffff,
389         0x000008fc, 0xffffffff, 0xffffffff,
390         0x000008f8, 0x00000023, 0xffffffff,
391         0x000008fc, 0xffffffff, 0xffffffff,
392         0x000008f8, 0x00000024, 0xffffffff,
393         0x000008fc, 0xffffffff, 0xffffffff,
394         0x000008f8, 0x00000025, 0xffffffff,
395         0x000008fc, 0xffffffff, 0xffffffff,
396         0x000008f8, 0x00000026, 0xffffffff,
397         0x000008fc, 0xffffffff, 0xffffffff,
398         0x000008f8, 0x00000027, 0xffffffff,
399         0x000008fc, 0xffffffff, 0xffffffff,
400         0x000008f8, 0x00000028, 0xffffffff,
401         0x000008fc, 0xffffffff, 0xffffffff,
402         0x000008f8, 0x00000029, 0xffffffff,
403         0x000008fc, 0xffffffff, 0xffffffff,
404         0x000008f8, 0x0000002a, 0xffffffff,
405         0x000008fc, 0xffffffff, 0xffffffff,
406         0x000008f8, 0x0000002b, 0xffffffff,
407         0x000008fc, 0xffffffff, 0xffffffff
408 };
409 #define CAYMAN_CGCG_CGLS_ENABLE_LENGTH  sizeof(cayman_cgcg_cgls_enable) / (3 * sizeof(u32))
410
411 static const u32 cayman_mgcg_default[] =
412 {
413         0x0000802c, 0xc0000000, 0xffffffff,
414         0x00003fc4, 0xc0000000, 0xffffffff,
415         0x00005448, 0x00000100, 0xffffffff,
416         0x000055e4, 0x00000100, 0xffffffff,
417         0x0000160c, 0x00000100, 0xffffffff,
418         0x00008984, 0x06000100, 0xffffffff,
419         0x0000c164, 0x00000100, 0xffffffff,
420         0x00008a18, 0x00000100, 0xffffffff,
421         0x0000897c, 0x06000100, 0xffffffff,
422         0x00008b28, 0x00000100, 0xffffffff,
423         0x00009144, 0x00800200, 0xffffffff,
424         0x00009a60, 0x00000100, 0xffffffff,
425         0x00009868, 0x00000100, 0xffffffff,
426         0x00008d58, 0x00000100, 0xffffffff,
427         0x00009510, 0x00000100, 0xffffffff,
428         0x0000949c, 0x00000100, 0xffffffff,
429         0x00009654, 0x00000100, 0xffffffff,
430         0x00009030, 0x00000100, 0xffffffff,
431         0x00009034, 0x00000100, 0xffffffff,
432         0x00009038, 0x00000100, 0xffffffff,
433         0x0000903c, 0x00000100, 0xffffffff,
434         0x00009040, 0x00000100, 0xffffffff,
435         0x0000a200, 0x00000100, 0xffffffff,
436         0x0000a204, 0x00000100, 0xffffffff,
437         0x0000a208, 0x00000100, 0xffffffff,
438         0x0000a20c, 0x00000100, 0xffffffff,
439         0x00009744, 0x00000100, 0xffffffff,
440         0x00003f80, 0x00000100, 0xffffffff,
441         0x0000a210, 0x00000100, 0xffffffff,
442         0x0000a214, 0x00000100, 0xffffffff,
443         0x000004d8, 0x00000100, 0xffffffff,
444         0x00009664, 0x00000100, 0xffffffff,
445         0x00009698, 0x00000100, 0xffffffff,
446         0x000004d4, 0x00000200, 0xffffffff,
447         0x000004d0, 0x00000000, 0xffffffff,
448         0x000030cc, 0x00000104, 0xffffffff,
449         0x0000d0c0, 0x00000100, 0xffffffff,
450         0x0000d8c0, 0x00000100, 0xffffffff,
451         0x0000802c, 0x40000000, 0xffffffff,
452         0x00003fc4, 0x40000000, 0xffffffff,
453         0x0000915c, 0x00010000, 0xffffffff,
454         0x00009160, 0x00030002, 0xffffffff,
455         0x00009164, 0x00050004, 0xffffffff,
456         0x00009168, 0x00070006, 0xffffffff,
457         0x00009178, 0x00070000, 0xffffffff,
458         0x0000917c, 0x00030002, 0xffffffff,
459         0x00009180, 0x00050004, 0xffffffff,
460         0x0000918c, 0x00010006, 0xffffffff,
461         0x00009190, 0x00090008, 0xffffffff,
462         0x00009194, 0x00070000, 0xffffffff,
463         0x00009198, 0x00030002, 0xffffffff,
464         0x0000919c, 0x00050004, 0xffffffff,
465         0x000091a8, 0x00010006, 0xffffffff,
466         0x000091ac, 0x00090008, 0xffffffff,
467         0x000091b0, 0x00070000, 0xffffffff,
468         0x000091b4, 0x00030002, 0xffffffff,
469         0x000091b8, 0x00050004, 0xffffffff,
470         0x000091c4, 0x00010006, 0xffffffff,
471         0x000091c8, 0x00090008, 0xffffffff,
472         0x000091cc, 0x00070000, 0xffffffff,
473         0x000091d0, 0x00030002, 0xffffffff,
474         0x000091d4, 0x00050004, 0xffffffff,
475         0x000091e0, 0x00010006, 0xffffffff,
476         0x000091e4, 0x00090008, 0xffffffff,
477         0x000091e8, 0x00000000, 0xffffffff,
478         0x000091ec, 0x00070000, 0xffffffff,
479         0x000091f0, 0x00030002, 0xffffffff,
480         0x000091f4, 0x00050004, 0xffffffff,
481         0x00009200, 0x00010006, 0xffffffff,
482         0x00009204, 0x00090008, 0xffffffff,
483         0x00009208, 0x00070000, 0xffffffff,
484         0x0000920c, 0x00030002, 0xffffffff,
485         0x00009210, 0x00050004, 0xffffffff,
486         0x0000921c, 0x00010006, 0xffffffff,
487         0x00009220, 0x00090008, 0xffffffff,
488         0x00009224, 0x00070000, 0xffffffff,
489         0x00009228, 0x00030002, 0xffffffff,
490         0x0000922c, 0x00050004, 0xffffffff,
491         0x00009238, 0x00010006, 0xffffffff,
492         0x0000923c, 0x00090008, 0xffffffff,
493         0x00009240, 0x00070000, 0xffffffff,
494         0x00009244, 0x00030002, 0xffffffff,
495         0x00009248, 0x00050004, 0xffffffff,
496         0x00009254, 0x00010006, 0xffffffff,
497         0x00009258, 0x00090008, 0xffffffff,
498         0x0000925c, 0x00070000, 0xffffffff,
499         0x00009260, 0x00030002, 0xffffffff,
500         0x00009264, 0x00050004, 0xffffffff,
501         0x00009270, 0x00010006, 0xffffffff,
502         0x00009274, 0x00090008, 0xffffffff,
503         0x00009278, 0x00070000, 0xffffffff,
504         0x0000927c, 0x00030002, 0xffffffff,
505         0x00009280, 0x00050004, 0xffffffff,
506         0x0000928c, 0x00010006, 0xffffffff,
507         0x00009290, 0x00090008, 0xffffffff,
508         0x000092a8, 0x00070000, 0xffffffff,
509         0x000092ac, 0x00030002, 0xffffffff,
510         0x000092b0, 0x00050004, 0xffffffff,
511         0x000092bc, 0x00010006, 0xffffffff,
512         0x000092c0, 0x00090008, 0xffffffff,
513         0x000092c4, 0x00070000, 0xffffffff,
514         0x000092c8, 0x00030002, 0xffffffff,
515         0x000092cc, 0x00050004, 0xffffffff,
516         0x000092d8, 0x00010006, 0xffffffff,
517         0x000092dc, 0x00090008, 0xffffffff,
518         0x00009294, 0x00000000, 0xffffffff,
519         0x0000802c, 0x40010000, 0xffffffff,
520         0x00003fc4, 0x40010000, 0xffffffff,
521         0x0000915c, 0x00010000, 0xffffffff,
522         0x00009160, 0x00030002, 0xffffffff,
523         0x00009164, 0x00050004, 0xffffffff,
524         0x00009168, 0x00070006, 0xffffffff,
525         0x00009178, 0x00070000, 0xffffffff,
526         0x0000917c, 0x00030002, 0xffffffff,
527         0x00009180, 0x00050004, 0xffffffff,
528         0x0000918c, 0x00010006, 0xffffffff,
529         0x00009190, 0x00090008, 0xffffffff,
530         0x00009194, 0x00070000, 0xffffffff,
531         0x00009198, 0x00030002, 0xffffffff,
532         0x0000919c, 0x00050004, 0xffffffff,
533         0x000091a8, 0x00010006, 0xffffffff,
534         0x000091ac, 0x00090008, 0xffffffff,
535         0x000091b0, 0x00070000, 0xffffffff,
536         0x000091b4, 0x00030002, 0xffffffff,
537         0x000091b8, 0x00050004, 0xffffffff,
538         0x000091c4, 0x00010006, 0xffffffff,
539         0x000091c8, 0x00090008, 0xffffffff,
540         0x000091cc, 0x00070000, 0xffffffff,
541         0x000091d0, 0x00030002, 0xffffffff,
542         0x000091d4, 0x00050004, 0xffffffff,
543         0x000091e0, 0x00010006, 0xffffffff,
544         0x000091e4, 0x00090008, 0xffffffff,
545         0x000091e8, 0x00000000, 0xffffffff,
546         0x000091ec, 0x00070000, 0xffffffff,
547         0x000091f0, 0x00030002, 0xffffffff,
548         0x000091f4, 0x00050004, 0xffffffff,
549         0x00009200, 0x00010006, 0xffffffff,
550         0x00009204, 0x00090008, 0xffffffff,
551         0x00009208, 0x00070000, 0xffffffff,
552         0x0000920c, 0x00030002, 0xffffffff,
553         0x00009210, 0x00050004, 0xffffffff,
554         0x0000921c, 0x00010006, 0xffffffff,
555         0x00009220, 0x00090008, 0xffffffff,
556         0x00009224, 0x00070000, 0xffffffff,
557         0x00009228, 0x00030002, 0xffffffff,
558         0x0000922c, 0x00050004, 0xffffffff,
559         0x00009238, 0x00010006, 0xffffffff,
560         0x0000923c, 0x00090008, 0xffffffff,
561         0x00009240, 0x00070000, 0xffffffff,
562         0x00009244, 0x00030002, 0xffffffff,
563         0x00009248, 0x00050004, 0xffffffff,
564         0x00009254, 0x00010006, 0xffffffff,
565         0x00009258, 0x00090008, 0xffffffff,
566         0x0000925c, 0x00070000, 0xffffffff,
567         0x00009260, 0x00030002, 0xffffffff,
568         0x00009264, 0x00050004, 0xffffffff,
569         0x00009270, 0x00010006, 0xffffffff,
570         0x00009274, 0x00090008, 0xffffffff,
571         0x00009278, 0x00070000, 0xffffffff,
572         0x0000927c, 0x00030002, 0xffffffff,
573         0x00009280, 0x00050004, 0xffffffff,
574         0x0000928c, 0x00010006, 0xffffffff,
575         0x00009290, 0x00090008, 0xffffffff,
576         0x000092a8, 0x00070000, 0xffffffff,
577         0x000092ac, 0x00030002, 0xffffffff,
578         0x000092b0, 0x00050004, 0xffffffff,
579         0x000092bc, 0x00010006, 0xffffffff,
580         0x000092c0, 0x00090008, 0xffffffff,
581         0x000092c4, 0x00070000, 0xffffffff,
582         0x000092c8, 0x00030002, 0xffffffff,
583         0x000092cc, 0x00050004, 0xffffffff,
584         0x000092d8, 0x00010006, 0xffffffff,
585         0x000092dc, 0x00090008, 0xffffffff,
586         0x00009294, 0x00000000, 0xffffffff,
587         0x0000802c, 0xc0000000, 0xffffffff,
588         0x00003fc4, 0xc0000000, 0xffffffff,
589         0x000008f8, 0x00000010, 0xffffffff,
590         0x000008fc, 0x00000000, 0xffffffff,
591         0x000008f8, 0x00000011, 0xffffffff,
592         0x000008fc, 0x00000000, 0xffffffff,
593         0x000008f8, 0x00000012, 0xffffffff,
594         0x000008fc, 0x00000000, 0xffffffff,
595         0x000008f8, 0x00000013, 0xffffffff,
596         0x000008fc, 0x00000000, 0xffffffff,
597         0x000008f8, 0x00000014, 0xffffffff,
598         0x000008fc, 0x00000000, 0xffffffff,
599         0x000008f8, 0x00000015, 0xffffffff,
600         0x000008fc, 0x00000000, 0xffffffff,
601         0x000008f8, 0x00000016, 0xffffffff,
602         0x000008fc, 0x00000000, 0xffffffff,
603         0x000008f8, 0x00000017, 0xffffffff,
604         0x000008fc, 0x00000000, 0xffffffff,
605         0x000008f8, 0x00000018, 0xffffffff,
606         0x000008fc, 0x00000000, 0xffffffff,
607         0x000008f8, 0x00000019, 0xffffffff,
608         0x000008fc, 0x00000000, 0xffffffff,
609         0x000008f8, 0x0000001a, 0xffffffff,
610         0x000008fc, 0x00000000, 0xffffffff,
611         0x000008f8, 0x0000001b, 0xffffffff,
612         0x000008fc, 0x00000000, 0xffffffff
613 };
614 #define CAYMAN_MGCG_DEFAULT_LENGTH sizeof(cayman_mgcg_default) / (3 * sizeof(u32))
615
616 static const u32 cayman_mgcg_disable[] =
617 {
618         0x0000802c, 0xc0000000, 0xffffffff,
619         0x000008f8, 0x00000000, 0xffffffff,
620         0x000008fc, 0xffffffff, 0xffffffff,
621         0x000008f8, 0x00000001, 0xffffffff,
622         0x000008fc, 0xffffffff, 0xffffffff,
623         0x000008f8, 0x00000002, 0xffffffff,
624         0x000008fc, 0xffffffff, 0xffffffff,
625         0x000008f8, 0x00000003, 0xffffffff,
626         0x000008fc, 0xffffffff, 0xffffffff,
627         0x00009150, 0x00600000, 0xffffffff
628 };
629 #define CAYMAN_MGCG_DISABLE_LENGTH   sizeof(cayman_mgcg_disable) / (3 * sizeof(u32))
630
631 static const u32 cayman_mgcg_enable[] =
632 {
633         0x0000802c, 0xc0000000, 0xffffffff,
634         0x000008f8, 0x00000000, 0xffffffff,
635         0x000008fc, 0x00000000, 0xffffffff,
636         0x000008f8, 0x00000001, 0xffffffff,
637         0x000008fc, 0x00000000, 0xffffffff,
638         0x000008f8, 0x00000002, 0xffffffff,
639         0x000008fc, 0x00600000, 0xffffffff,
640         0x000008f8, 0x00000003, 0xffffffff,
641         0x000008fc, 0x00000000, 0xffffffff,
642         0x00009150, 0x96944200, 0xffffffff
643 };
644
645 #define CAYMAN_MGCG_ENABLE_LENGTH   sizeof(cayman_mgcg_enable) / (3 * sizeof(u32))
646
647 #define NISLANDS_SYSLS_SEQUENCE  100
648
649 static const u32 cayman_sysls_default[] =
650 {
651         /* Register,   Value,     Mask bits */
652         0x000055e8, 0x00000000, 0xffffffff,
653         0x0000d0bc, 0x00000000, 0xffffffff,
654         0x0000d8bc, 0x00000000, 0xffffffff,
655         0x000015c0, 0x000c1401, 0xffffffff,
656         0x0000264c, 0x000c0400, 0xffffffff,
657         0x00002648, 0x000c0400, 0xffffffff,
658         0x00002650, 0x000c0400, 0xffffffff,
659         0x000020b8, 0x000c0400, 0xffffffff,
660         0x000020bc, 0x000c0400, 0xffffffff,
661         0x000020c0, 0x000c0c80, 0xffffffff,
662         0x0000f4a0, 0x000000c0, 0xffffffff,
663         0x0000f4a4, 0x00680fff, 0xffffffff,
664         0x00002f50, 0x00000404, 0xffffffff,
665         0x000004c8, 0x00000001, 0xffffffff,
666         0x000064ec, 0x00000000, 0xffffffff,
667         0x00000c7c, 0x00000000, 0xffffffff,
668         0x00008dfc, 0x00000000, 0xffffffff
669 };
670 #define CAYMAN_SYSLS_DEFAULT_LENGTH sizeof(cayman_sysls_default) / (3 * sizeof(u32))
671
672 static const u32 cayman_sysls_disable[] =
673 {
674         /* Register,   Value,     Mask bits */
675         0x0000d0c0, 0x00000000, 0xffffffff,
676         0x0000d8c0, 0x00000000, 0xffffffff,
677         0x000055e8, 0x00000000, 0xffffffff,
678         0x0000d0bc, 0x00000000, 0xffffffff,
679         0x0000d8bc, 0x00000000, 0xffffffff,
680         0x000015c0, 0x00041401, 0xffffffff,
681         0x0000264c, 0x00040400, 0xffffffff,
682         0x00002648, 0x00040400, 0xffffffff,
683         0x00002650, 0x00040400, 0xffffffff,
684         0x000020b8, 0x00040400, 0xffffffff,
685         0x000020bc, 0x00040400, 0xffffffff,
686         0x000020c0, 0x00040c80, 0xffffffff,
687         0x0000f4a0, 0x000000c0, 0xffffffff,
688         0x0000f4a4, 0x00680000, 0xffffffff,
689         0x00002f50, 0x00000404, 0xffffffff,
690         0x000004c8, 0x00000001, 0xffffffff,
691         0x000064ec, 0x00007ffd, 0xffffffff,
692         0x00000c7c, 0x0000ff00, 0xffffffff,
693         0x00008dfc, 0x0000007f, 0xffffffff
694 };
695 #define CAYMAN_SYSLS_DISABLE_LENGTH sizeof(cayman_sysls_disable) / (3 * sizeof(u32))
696
697 static const u32 cayman_sysls_enable[] =
698 {
699         /* Register,   Value,     Mask bits */
700         0x000055e8, 0x00000001, 0xffffffff,
701         0x0000d0bc, 0x00000100, 0xffffffff,
702         0x0000d8bc, 0x00000100, 0xffffffff,
703         0x000015c0, 0x000c1401, 0xffffffff,
704         0x0000264c, 0x000c0400, 0xffffffff,
705         0x00002648, 0x000c0400, 0xffffffff,
706         0x00002650, 0x000c0400, 0xffffffff,
707         0x000020b8, 0x000c0400, 0xffffffff,
708         0x000020bc, 0x000c0400, 0xffffffff,
709         0x000020c0, 0x000c0c80, 0xffffffff,
710         0x0000f4a0, 0x000000c0, 0xffffffff,
711         0x0000f4a4, 0x00680fff, 0xffffffff,
712         0x00002f50, 0x00000903, 0xffffffff,
713         0x000004c8, 0x00000000, 0xffffffff,
714         0x000064ec, 0x00000000, 0xffffffff,
715         0x00000c7c, 0x00000000, 0xffffffff,
716         0x00008dfc, 0x00000000, 0xffffffff
717 };
718 #define CAYMAN_SYSLS_ENABLE_LENGTH sizeof(cayman_sysls_enable) / (3 * sizeof(u32))
719
720 struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev);
721 struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev);
722
723 struct ni_power_info *ni_get_pi(struct radeon_device *rdev)
724 {
725         struct ni_power_info *pi = rdev->pm.dpm.priv;
726
727         return pi;
728 }
729
730 struct ni_ps *ni_get_ps(struct radeon_ps *rps)
731 {
732         struct ni_ps *ps = rps->ps_priv;
733
734         return ps;
735 }
736
737 static void ni_calculate_leakage_for_v_and_t_formula(const struct ni_leakage_coeffients *coeff,
738                                                      u16 v, s32 t,
739                                                      u32 ileakage,
740                                                      u32 *leakage)
741 {
742         s64 kt, kv, leakage_w, i_leakage, vddc, temperature;
743
744         i_leakage = div64_s64(drm_int2fixp(ileakage), 1000);
745         vddc = div64_s64(drm_int2fixp(v), 1000);
746         temperature = div64_s64(drm_int2fixp(t), 1000);
747
748         kt = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->at), 1000),
749                           drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bt), 1000), temperature)));
750         kv = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->av), 1000),
751                           drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bv), 1000), vddc)));
752
753         leakage_w = drm_fixp_mul(drm_fixp_mul(drm_fixp_mul(i_leakage, kt), kv), vddc);
754
755         *leakage = drm_fixp2int(leakage_w * 1000);
756 }
757
758 static void ni_calculate_leakage_for_v_and_t(struct radeon_device *rdev,
759                                              const struct ni_leakage_coeffients *coeff,
760                                              u16 v,
761                                              s32 t,
762                                              u32 i_leakage,
763                                              u32 *leakage)
764 {
765         ni_calculate_leakage_for_v_and_t_formula(coeff, v, t, i_leakage, leakage);
766 }
767
768 bool ni_dpm_vblank_too_short(struct radeon_device *rdev)
769 {
770         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
771         u32 vblank_time = r600_dpm_get_vblank_time(rdev);
772         /* we never hit the non-gddr5 limit so disable it */
773         u32 switch_limit = pi->mem_gddr5 ? 450 : 0;
774
775         if (vblank_time < switch_limit)
776                 return true;
777         else
778                 return false;
779
780 }
781
782 static void ni_apply_state_adjust_rules(struct radeon_device *rdev,
783                                         struct radeon_ps *rps)
784 {
785         struct ni_ps *ps = ni_get_ps(rps);
786         struct radeon_clock_and_voltage_limits *max_limits;
787         bool disable_mclk_switching;
788         u32 mclk, sclk;
789         u16 vddc, vddci;
790         int i;
791
792         if ((rdev->pm.dpm.new_active_crtc_count > 1) ||
793             ni_dpm_vblank_too_short(rdev))
794                 disable_mclk_switching = true;
795         else
796                 disable_mclk_switching = false;
797
798         if (rdev->pm.dpm.ac_power)
799                 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
800         else
801                 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc;
802
803         if (rdev->pm.dpm.ac_power == false) {
804                 for (i = 0; i < ps->performance_level_count; i++) {
805                         if (ps->performance_levels[i].mclk > max_limits->mclk)
806                                 ps->performance_levels[i].mclk = max_limits->mclk;
807                         if (ps->performance_levels[i].sclk > max_limits->sclk)
808                                 ps->performance_levels[i].sclk = max_limits->sclk;
809                         if (ps->performance_levels[i].vddc > max_limits->vddc)
810                                 ps->performance_levels[i].vddc = max_limits->vddc;
811                         if (ps->performance_levels[i].vddci > max_limits->vddci)
812                                 ps->performance_levels[i].vddci = max_limits->vddci;
813                 }
814         }
815
816         /* XXX validate the min clocks required for display */
817
818         if (disable_mclk_switching) {
819                 mclk  = ps->performance_levels[ps->performance_level_count - 1].mclk;
820                 sclk = ps->performance_levels[0].sclk;
821                 vddc = ps->performance_levels[0].vddc;
822                 vddci = ps->performance_levels[ps->performance_level_count - 1].vddci;
823         } else {
824                 sclk = ps->performance_levels[0].sclk;
825                 mclk = ps->performance_levels[0].mclk;
826                 vddc = ps->performance_levels[0].vddc;
827                 vddci = ps->performance_levels[0].vddci;
828         }
829
830         /* adjusted low state */
831         ps->performance_levels[0].sclk = sclk;
832         ps->performance_levels[0].mclk = mclk;
833         ps->performance_levels[0].vddc = vddc;
834         ps->performance_levels[0].vddci = vddci;
835
836         btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
837                                   &ps->performance_levels[0].sclk,
838                                   &ps->performance_levels[0].mclk);
839
840         for (i = 1; i < ps->performance_level_count; i++) {
841                 if (ps->performance_levels[i].sclk < ps->performance_levels[i - 1].sclk)
842                         ps->performance_levels[i].sclk = ps->performance_levels[i - 1].sclk;
843                 if (ps->performance_levels[i].vddc < ps->performance_levels[i - 1].vddc)
844                         ps->performance_levels[i].vddc = ps->performance_levels[i - 1].vddc;
845         }
846
847         if (disable_mclk_switching) {
848                 mclk = ps->performance_levels[0].mclk;
849                 for (i = 1; i < ps->performance_level_count; i++) {
850                         if (mclk < ps->performance_levels[i].mclk)
851                                 mclk = ps->performance_levels[i].mclk;
852                 }
853                 for (i = 0; i < ps->performance_level_count; i++) {
854                         ps->performance_levels[i].mclk = mclk;
855                         ps->performance_levels[i].vddci = vddci;
856                 }
857         } else {
858                 for (i = 1; i < ps->performance_level_count; i++) {
859                         if (ps->performance_levels[i].mclk < ps->performance_levels[i - 1].mclk)
860                                 ps->performance_levels[i].mclk = ps->performance_levels[i - 1].mclk;
861                         if (ps->performance_levels[i].vddci < ps->performance_levels[i - 1].vddci)
862                                 ps->performance_levels[i].vddci = ps->performance_levels[i - 1].vddci;
863                 }
864         }
865
866         for (i = 1; i < ps->performance_level_count; i++)
867                 btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
868                                           &ps->performance_levels[i].sclk,
869                                           &ps->performance_levels[i].mclk);
870
871         for (i = 0; i < ps->performance_level_count; i++)
872                 btc_adjust_clock_combinations(rdev, max_limits,
873                                               &ps->performance_levels[i]);
874
875         for (i = 0; i < ps->performance_level_count; i++) {
876                 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk,
877                                                    ps->performance_levels[i].sclk,
878                                                    max_limits->vddc,  &ps->performance_levels[i].vddc);
879                 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk,
880                                                    ps->performance_levels[i].mclk,
881                                                    max_limits->vddci, &ps->performance_levels[i].vddci);
882                 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk,
883                                                    ps->performance_levels[i].mclk,
884                                                    max_limits->vddc,  &ps->performance_levels[i].vddc);
885                 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk,
886                                                    rdev->clock.current_dispclk,
887                                                    max_limits->vddc,  &ps->performance_levels[i].vddc);
888         }
889
890         for (i = 0; i < ps->performance_level_count; i++) {
891                 btc_apply_voltage_delta_rules(rdev,
892                                               max_limits->vddc, max_limits->vddci,
893                                               &ps->performance_levels[i].vddc,
894                                               &ps->performance_levels[i].vddci);
895         }
896
897         ps->dc_compatible = true;
898         for (i = 0; i < ps->performance_level_count; i++) {
899                 if (ps->performance_levels[i].vddc > rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc)
900                         ps->dc_compatible = false;
901
902                 if (ps->performance_levels[i].vddc < rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2)
903                         ps->performance_levels[i].flags &= ~ATOM_PPLIB_R600_FLAGS_PCIEGEN2;
904         }
905 }
906
907 static void ni_cg_clockgating_default(struct radeon_device *rdev)
908 {
909         u32 count;
910         const u32 *ps = NULL;
911
912         ps = (const u32 *)&cayman_cgcg_cgls_default;
913         count = CAYMAN_CGCG_CGLS_DEFAULT_LENGTH;
914
915         btc_program_mgcg_hw_sequence(rdev, ps, count);
916 }
917
918 static void ni_gfx_clockgating_enable(struct radeon_device *rdev,
919                                       bool enable)
920 {
921         u32 count;
922         const u32 *ps = NULL;
923
924         if (enable) {
925                 ps = (const u32 *)&cayman_cgcg_cgls_enable;
926                 count = CAYMAN_CGCG_CGLS_ENABLE_LENGTH;
927         } else {
928                 ps = (const u32 *)&cayman_cgcg_cgls_disable;
929                 count = CAYMAN_CGCG_CGLS_DISABLE_LENGTH;
930         }
931
932         btc_program_mgcg_hw_sequence(rdev, ps, count);
933 }
934
935 static void ni_mg_clockgating_default(struct radeon_device *rdev)
936 {
937         u32 count;
938         const u32 *ps = NULL;
939
940         ps = (const u32 *)&cayman_mgcg_default;
941         count = CAYMAN_MGCG_DEFAULT_LENGTH;
942
943         btc_program_mgcg_hw_sequence(rdev, ps, count);
944 }
945
946 static void ni_mg_clockgating_enable(struct radeon_device *rdev,
947                                      bool enable)
948 {
949         u32 count;
950         const u32 *ps = NULL;
951
952         if (enable) {
953                 ps = (const u32 *)&cayman_mgcg_enable;
954                 count = CAYMAN_MGCG_ENABLE_LENGTH;
955         } else {
956                 ps = (const u32 *)&cayman_mgcg_disable;
957                 count = CAYMAN_MGCG_DISABLE_LENGTH;
958         }
959
960         btc_program_mgcg_hw_sequence(rdev, ps, count);
961 }
962
963 static void ni_ls_clockgating_default(struct radeon_device *rdev)
964 {
965         u32 count;
966         const u32 *ps = NULL;
967
968         ps = (const u32 *)&cayman_sysls_default;
969         count = CAYMAN_SYSLS_DEFAULT_LENGTH;
970
971         btc_program_mgcg_hw_sequence(rdev, ps, count);
972 }
973
974 static void ni_ls_clockgating_enable(struct radeon_device *rdev,
975                                      bool enable)
976 {
977         u32 count;
978         const u32 *ps = NULL;
979
980         if (enable) {
981                 ps = (const u32 *)&cayman_sysls_enable;
982                 count = CAYMAN_SYSLS_ENABLE_LENGTH;
983         } else {
984                 ps = (const u32 *)&cayman_sysls_disable;
985                 count = CAYMAN_SYSLS_DISABLE_LENGTH;
986         }
987
988         btc_program_mgcg_hw_sequence(rdev, ps, count);
989
990 }
991
992 static int ni_patch_single_dependency_table_based_on_leakage(struct radeon_device *rdev,
993                                                              struct radeon_clock_voltage_dependency_table *table)
994 {
995         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
996         u32 i;
997
998         if (table) {
999                 for (i = 0; i < table->count; i++) {
1000                         if (0xff01 == table->entries[i].v) {
1001                                 if (pi->max_vddc == 0)
1002                                         return -EINVAL;
1003                                 table->entries[i].v = pi->max_vddc;
1004                         }
1005                 }
1006         }
1007         return 0;
1008 }
1009
1010 static int ni_patch_dependency_tables_based_on_leakage(struct radeon_device *rdev)
1011 {
1012         int ret = 0;
1013
1014         ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
1015                                                                 &rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk);
1016
1017         ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
1018                                                                 &rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk);
1019         return ret;
1020 }
1021
1022 static void ni_stop_dpm(struct radeon_device *rdev)
1023 {
1024         WREG32_P(GENERAL_PWRMGT, 0, ~GLOBAL_PWRMGT_EN);
1025 }
1026
1027 #if 0
1028 static int ni_notify_hw_of_power_source(struct radeon_device *rdev,
1029                                         bool ac_power)
1030 {
1031         if (ac_power)
1032                 return (rv770_send_msg_to_smc(rdev, PPSMC_MSG_RunningOnAC) == PPSMC_Result_OK) ?
1033                         0 : -EINVAL;
1034
1035         return 0;
1036 }
1037 #endif
1038
1039 static PPSMC_Result ni_send_msg_to_smc_with_parameter(struct radeon_device *rdev,
1040                                                       PPSMC_Msg msg, u32 parameter)
1041 {
1042         WREG32(SMC_SCRATCH0, parameter);
1043         return rv770_send_msg_to_smc(rdev, msg);
1044 }
1045
1046 static int ni_restrict_performance_levels_before_switch(struct radeon_device *rdev)
1047 {
1048         if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK)
1049                 return -EINVAL;
1050
1051         return (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) == PPSMC_Result_OK) ?
1052                 0 : -EINVAL;
1053 }
1054
1055 int ni_dpm_force_performance_level(struct radeon_device *rdev,
1056                                    enum radeon_dpm_forced_level level)
1057 {
1058         if (level == RADEON_DPM_FORCED_LEVEL_HIGH) {
1059                 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) != PPSMC_Result_OK)
1060                         return -EINVAL;
1061
1062                 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 1) != PPSMC_Result_OK)
1063                         return -EINVAL;
1064         } else if (level == RADEON_DPM_FORCED_LEVEL_LOW) {
1065                 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
1066                         return -EINVAL;
1067
1068                 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) != PPSMC_Result_OK)
1069                         return -EINVAL;
1070         } else if (level == RADEON_DPM_FORCED_LEVEL_AUTO) {
1071                 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
1072                         return -EINVAL;
1073
1074                 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) != PPSMC_Result_OK)
1075                         return -EINVAL;
1076         }
1077
1078         rdev->pm.dpm.forced_level = level;
1079
1080         return 0;
1081 }
1082
1083 static void ni_stop_smc(struct radeon_device *rdev)
1084 {
1085         u32 tmp;
1086         int i;
1087
1088         for (i = 0; i < rdev->usec_timeout; i++) {
1089                 tmp = RREG32(LB_SYNC_RESET_SEL) & LB_SYNC_RESET_SEL_MASK;
1090                 if (tmp != 1)
1091                         break;
1092                 udelay(1);
1093         }
1094
1095         udelay(100);
1096
1097         r7xx_stop_smc(rdev);
1098 }
1099
1100 static int ni_process_firmware_header(struct radeon_device *rdev)
1101 {
1102         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1103         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1104         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1105         u32 tmp;
1106         int ret;
1107
1108         ret = rv770_read_smc_sram_dword(rdev,
1109                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1110                                         NISLANDS_SMC_FIRMWARE_HEADER_stateTable,
1111                                         &tmp, pi->sram_end);
1112
1113         if (ret)
1114                 return ret;
1115
1116         pi->state_table_start = (u16)tmp;
1117
1118         ret = rv770_read_smc_sram_dword(rdev,
1119                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1120                                         NISLANDS_SMC_FIRMWARE_HEADER_softRegisters,
1121                                         &tmp, pi->sram_end);
1122
1123         if (ret)
1124                 return ret;
1125
1126         pi->soft_regs_start = (u16)tmp;
1127
1128         ret = rv770_read_smc_sram_dword(rdev,
1129                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1130                                         NISLANDS_SMC_FIRMWARE_HEADER_mcRegisterTable,
1131                                         &tmp, pi->sram_end);
1132
1133         if (ret)
1134                 return ret;
1135
1136         eg_pi->mc_reg_table_start = (u16)tmp;
1137
1138         ret = rv770_read_smc_sram_dword(rdev,
1139                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1140                                         NISLANDS_SMC_FIRMWARE_HEADER_fanTable,
1141                                         &tmp, pi->sram_end);
1142
1143         if (ret)
1144                 return ret;
1145
1146         ni_pi->fan_table_start = (u16)tmp;
1147
1148         ret = rv770_read_smc_sram_dword(rdev,
1149                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1150                                         NISLANDS_SMC_FIRMWARE_HEADER_mcArbDramAutoRefreshTable,
1151                                         &tmp, pi->sram_end);
1152
1153         if (ret)
1154                 return ret;
1155
1156         ni_pi->arb_table_start = (u16)tmp;
1157
1158         ret = rv770_read_smc_sram_dword(rdev,
1159                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1160                                         NISLANDS_SMC_FIRMWARE_HEADER_cacTable,
1161                                         &tmp, pi->sram_end);
1162
1163         if (ret)
1164                 return ret;
1165
1166         ni_pi->cac_table_start = (u16)tmp;
1167
1168         ret = rv770_read_smc_sram_dword(rdev,
1169                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1170                                         NISLANDS_SMC_FIRMWARE_HEADER_spllTable,
1171                                         &tmp, pi->sram_end);
1172
1173         if (ret)
1174                 return ret;
1175
1176         ni_pi->spll_table_start = (u16)tmp;
1177
1178
1179         return ret;
1180 }
1181
1182 static void ni_read_clock_registers(struct radeon_device *rdev)
1183 {
1184         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1185
1186         ni_pi->clock_registers.cg_spll_func_cntl = RREG32(CG_SPLL_FUNC_CNTL);
1187         ni_pi->clock_registers.cg_spll_func_cntl_2 = RREG32(CG_SPLL_FUNC_CNTL_2);
1188         ni_pi->clock_registers.cg_spll_func_cntl_3 = RREG32(CG_SPLL_FUNC_CNTL_3);
1189         ni_pi->clock_registers.cg_spll_func_cntl_4 = RREG32(CG_SPLL_FUNC_CNTL_4);
1190         ni_pi->clock_registers.cg_spll_spread_spectrum = RREG32(CG_SPLL_SPREAD_SPECTRUM);
1191         ni_pi->clock_registers.cg_spll_spread_spectrum_2 = RREG32(CG_SPLL_SPREAD_SPECTRUM_2);
1192         ni_pi->clock_registers.mpll_ad_func_cntl = RREG32(MPLL_AD_FUNC_CNTL);
1193         ni_pi->clock_registers.mpll_ad_func_cntl_2 = RREG32(MPLL_AD_FUNC_CNTL_2);
1194         ni_pi->clock_registers.mpll_dq_func_cntl = RREG32(MPLL_DQ_FUNC_CNTL);
1195         ni_pi->clock_registers.mpll_dq_func_cntl_2 = RREG32(MPLL_DQ_FUNC_CNTL_2);
1196         ni_pi->clock_registers.mclk_pwrmgt_cntl = RREG32(MCLK_PWRMGT_CNTL);
1197         ni_pi->clock_registers.dll_cntl = RREG32(DLL_CNTL);
1198         ni_pi->clock_registers.mpll_ss1 = RREG32(MPLL_SS1);
1199         ni_pi->clock_registers.mpll_ss2 = RREG32(MPLL_SS2);
1200 }
1201
1202 #if 0
1203 static int ni_enter_ulp_state(struct radeon_device *rdev)
1204 {
1205         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1206
1207         if (pi->gfx_clock_gating) {
1208                 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
1209                 WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
1210                 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
1211                 RREG32(GB_ADDR_CONFIG);
1212         }
1213
1214         WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower),
1215                  ~HOST_SMC_MSG_MASK);
1216
1217         udelay(25000);
1218
1219         return 0;
1220 }
1221 #endif
1222
1223 static void ni_program_response_times(struct radeon_device *rdev)
1224 {
1225         u32 voltage_response_time, backbias_response_time, acpi_delay_time, vbi_time_out;
1226         u32 vddc_dly, bb_dly, acpi_dly, vbi_dly, mclk_switch_limit;
1227         u32 reference_clock;
1228
1229         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mvdd_chg_time, 1);
1230
1231         voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time;
1232         backbias_response_time = (u32)rdev->pm.dpm.backbias_response_time;
1233
1234         if (voltage_response_time == 0)
1235                 voltage_response_time = 1000;
1236
1237         if (backbias_response_time == 0)
1238                 backbias_response_time = 1000;
1239
1240         acpi_delay_time = 15000;
1241         vbi_time_out = 100000;
1242
1243         reference_clock = radeon_get_xclk(rdev);
1244
1245         vddc_dly = (voltage_response_time  * reference_clock) / 1600;
1246         bb_dly   = (backbias_response_time * reference_clock) / 1600;
1247         acpi_dly = (acpi_delay_time * reference_clock) / 1600;
1248         vbi_dly  = (vbi_time_out * reference_clock) / 1600;
1249
1250         mclk_switch_limit = (460 * reference_clock) / 100;
1251
1252         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_vreg,  vddc_dly);
1253         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_bbias, bb_dly);
1254         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_acpi,  acpi_dly);
1255         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_chg_timeout, vbi_dly);
1256         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mc_block_delay, 0xAA);
1257         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_switch_lim, mclk_switch_limit);
1258 }
1259
1260 static void ni_populate_smc_voltage_table(struct radeon_device *rdev,
1261                                           struct atom_voltage_table *voltage_table,
1262                                           NISLANDS_SMC_STATETABLE *table)
1263 {
1264         unsigned int i;
1265
1266         for (i = 0; i < voltage_table->count; i++) {
1267                 table->highSMIO[i] = 0;
1268                 table->lowSMIO[i] |= cpu_to_be32(voltage_table->entries[i].smio_low);
1269         }
1270 }
1271
1272 static void ni_populate_smc_voltage_tables(struct radeon_device *rdev,
1273                                            NISLANDS_SMC_STATETABLE *table)
1274 {
1275         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1276         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1277         unsigned char i;
1278
1279         if (eg_pi->vddc_voltage_table.count) {
1280                 ni_populate_smc_voltage_table(rdev, &eg_pi->vddc_voltage_table, table);
1281                 table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] = 0;
1282                 table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] =
1283                         cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
1284
1285                 for (i = 0; i < eg_pi->vddc_voltage_table.count; i++) {
1286                         if (pi->max_vddc_in_table <= eg_pi->vddc_voltage_table.entries[i].value) {
1287                                 table->maxVDDCIndexInPPTable = i;
1288                                 break;
1289                         }
1290                 }
1291         }
1292
1293         if (eg_pi->vddci_voltage_table.count) {
1294                 ni_populate_smc_voltage_table(rdev, &eg_pi->vddci_voltage_table, table);
1295
1296                 table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] = 0;
1297                 table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] =
1298                         cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
1299         }
1300 }
1301
1302 static int ni_populate_voltage_value(struct radeon_device *rdev,
1303                                      struct atom_voltage_table *table,
1304                                      u16 value,
1305                                      NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1306 {
1307         unsigned int i;
1308
1309         for (i = 0; i < table->count; i++) {
1310                 if (value <= table->entries[i].value) {
1311                         voltage->index = (u8)i;
1312                         voltage->value = cpu_to_be16(table->entries[i].value);
1313                         break;
1314                 }
1315         }
1316
1317         if (i >= table->count)
1318                 return -EINVAL;
1319
1320         return 0;
1321 }
1322
1323 static void ni_populate_mvdd_value(struct radeon_device *rdev,
1324                                    u32 mclk,
1325                                    NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1326 {
1327         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1328         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1329
1330         if (!pi->mvdd_control) {
1331                 voltage->index = eg_pi->mvdd_high_index;
1332                 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1333                 return;
1334         }
1335
1336         if (mclk <= pi->mvdd_split_frequency) {
1337                 voltage->index = eg_pi->mvdd_low_index;
1338                 voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
1339         } else {
1340                 voltage->index = eg_pi->mvdd_high_index;
1341                 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1342         }
1343 }
1344
1345 static int ni_get_std_voltage_value(struct radeon_device *rdev,
1346                                     NISLANDS_SMC_VOLTAGE_VALUE *voltage,
1347                                     u16 *std_voltage)
1348 {
1349         if (rdev->pm.dpm.dyn_state.cac_leakage_table.entries &&
1350             ((u32)voltage->index < rdev->pm.dpm.dyn_state.cac_leakage_table.count))
1351                 *std_voltage = rdev->pm.dpm.dyn_state.cac_leakage_table.entries[voltage->index].vddc;
1352         else
1353                 *std_voltage = be16_to_cpu(voltage->value);
1354
1355         return 0;
1356 }
1357
1358 static void ni_populate_std_voltage_value(struct radeon_device *rdev,
1359                                           u16 value, u8 index,
1360                                           NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1361 {
1362         voltage->index = index;
1363         voltage->value = cpu_to_be16(value);
1364 }
1365
1366 static u32 ni_get_smc_power_scaling_factor(struct radeon_device *rdev)
1367 {
1368         u32 xclk_period;
1369         u32 xclk = radeon_get_xclk(rdev);
1370         u32 tmp = RREG32(CG_CAC_CTRL) & TID_CNT_MASK;
1371
1372         xclk_period = (1000000000UL / xclk);
1373         xclk_period /= 10000UL;
1374
1375         return tmp * xclk_period;
1376 }
1377
1378 static u32 ni_scale_power_for_smc(u32 power_in_watts, u32 scaling_factor)
1379 {
1380         return (power_in_watts * scaling_factor) << 2;
1381 }
1382
1383 static u32 ni_calculate_power_boost_limit(struct radeon_device *rdev,
1384                                           struct radeon_ps *radeon_state,
1385                                           u32 near_tdp_limit)
1386 {
1387         struct ni_ps *state = ni_get_ps(radeon_state);
1388         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1389         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1390         u32 power_boost_limit = 0;
1391         int ret;
1392
1393         if (ni_pi->enable_power_containment &&
1394             ni_pi->use_power_boost_limit) {
1395                 NISLANDS_SMC_VOLTAGE_VALUE vddc;
1396                 u16 std_vddc_med;
1397                 u16 std_vddc_high;
1398                 u64 tmp, n, d;
1399
1400                 if (state->performance_level_count < 3)
1401                         return 0;
1402
1403                 ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1404                                                 state->performance_levels[state->performance_level_count - 2].vddc,
1405                                                 &vddc);
1406                 if (ret)
1407                         return 0;
1408
1409                 ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_med);
1410                 if (ret)
1411                         return 0;
1412
1413                 ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1414                                                 state->performance_levels[state->performance_level_count - 1].vddc,
1415                                                 &vddc);
1416                 if (ret)
1417                         return 0;
1418
1419                 ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_high);
1420                 if (ret)
1421                         return 0;
1422
1423                 n = ((u64)near_tdp_limit * ((u64)std_vddc_med * (u64)std_vddc_med) * 90);
1424                 d = ((u64)std_vddc_high * (u64)std_vddc_high * 100);
1425                 tmp = div64_u64(n, d);
1426
1427                 if (tmp >> 32)
1428                         return 0;
1429                 power_boost_limit = (u32)tmp;
1430         }
1431
1432         return power_boost_limit;
1433 }
1434
1435 static int ni_calculate_adjusted_tdp_limits(struct radeon_device *rdev,
1436                                             bool adjust_polarity,
1437                                             u32 tdp_adjustment,
1438                                             u32 *tdp_limit,
1439                                             u32 *near_tdp_limit)
1440 {
1441         if (tdp_adjustment > (u32)rdev->pm.dpm.tdp_od_limit)
1442                 return -EINVAL;
1443
1444         if (adjust_polarity) {
1445                 *tdp_limit = ((100 + tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
1446                 *near_tdp_limit = rdev->pm.dpm.near_tdp_limit + (*tdp_limit - rdev->pm.dpm.tdp_limit);
1447         } else {
1448                 *tdp_limit = ((100 - tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
1449                 *near_tdp_limit = rdev->pm.dpm.near_tdp_limit - (rdev->pm.dpm.tdp_limit - *tdp_limit);
1450         }
1451
1452         return 0;
1453 }
1454
1455 static int ni_populate_smc_tdp_limits(struct radeon_device *rdev,
1456                                       struct radeon_ps *radeon_state)
1457 {
1458         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1459         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1460
1461         if (ni_pi->enable_power_containment) {
1462                 NISLANDS_SMC_STATETABLE *smc_table = &ni_pi->smc_statetable;
1463                 u32 scaling_factor = ni_get_smc_power_scaling_factor(rdev);
1464                 u32 tdp_limit;
1465                 u32 near_tdp_limit;
1466                 u32 power_boost_limit;
1467                 int ret;
1468
1469                 if (scaling_factor == 0)
1470                         return -EINVAL;
1471
1472                 memset(smc_table, 0, sizeof(NISLANDS_SMC_STATETABLE));
1473
1474                 ret = ni_calculate_adjusted_tdp_limits(rdev,
1475                                                        false, /* ??? */
1476                                                        rdev->pm.dpm.tdp_adjustment,
1477                                                        &tdp_limit,
1478                                                        &near_tdp_limit);
1479                 if (ret)
1480                         return ret;
1481
1482                 power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state,
1483                                                                    near_tdp_limit);
1484
1485                 smc_table->dpm2Params.TDPLimit =
1486                         cpu_to_be32(ni_scale_power_for_smc(tdp_limit, scaling_factor));
1487                 smc_table->dpm2Params.NearTDPLimit =
1488                         cpu_to_be32(ni_scale_power_for_smc(near_tdp_limit, scaling_factor));
1489                 smc_table->dpm2Params.SafePowerLimit =
1490                         cpu_to_be32(ni_scale_power_for_smc((near_tdp_limit * NISLANDS_DPM2_TDP_SAFE_LIMIT_PERCENT) / 100,
1491                                                            scaling_factor));
1492                 smc_table->dpm2Params.PowerBoostLimit =
1493                         cpu_to_be32(ni_scale_power_for_smc(power_boost_limit, scaling_factor));
1494
1495                 ret = rv770_copy_bytes_to_smc(rdev,
1496                                               (u16)(pi->state_table_start + offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
1497                                                     offsetof(PP_NIslands_DPM2Parameters, TDPLimit)),
1498                                               (u8 *)(&smc_table->dpm2Params.TDPLimit),
1499                                               sizeof(u32) * 4, pi->sram_end);
1500                 if (ret)
1501                         return ret;
1502         }
1503
1504         return 0;
1505 }
1506
1507 int ni_copy_and_switch_arb_sets(struct radeon_device *rdev,
1508                                 u32 arb_freq_src, u32 arb_freq_dest)
1509 {
1510         u32 mc_arb_dram_timing;
1511         u32 mc_arb_dram_timing2;
1512         u32 burst_time;
1513         u32 mc_cg_config;
1514
1515         switch (arb_freq_src) {
1516         case MC_CG_ARB_FREQ_F0:
1517                 mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING);
1518                 mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
1519                 burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE0_MASK) >> STATE0_SHIFT;
1520                 break;
1521         case MC_CG_ARB_FREQ_F1:
1522                 mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_1);
1523                 mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_1);
1524                 burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE1_MASK) >> STATE1_SHIFT;
1525                 break;
1526         case MC_CG_ARB_FREQ_F2:
1527                 mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_2);
1528                 mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_2);
1529                 burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE2_MASK) >> STATE2_SHIFT;
1530                 break;
1531         case MC_CG_ARB_FREQ_F3:
1532                 mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_3);
1533                 mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_3);
1534                 burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE3_MASK) >> STATE3_SHIFT;
1535                 break;
1536         default:
1537                 return -EINVAL;
1538         }
1539
1540         switch (arb_freq_dest) {
1541         case MC_CG_ARB_FREQ_F0:
1542                 WREG32(MC_ARB_DRAM_TIMING, mc_arb_dram_timing);
1543                 WREG32(MC_ARB_DRAM_TIMING2, mc_arb_dram_timing2);
1544                 WREG32_P(MC_ARB_BURST_TIME, STATE0(burst_time), ~STATE0_MASK);
1545                 break;
1546         case MC_CG_ARB_FREQ_F1:
1547                 WREG32(MC_ARB_DRAM_TIMING_1, mc_arb_dram_timing);
1548                 WREG32(MC_ARB_DRAM_TIMING2_1, mc_arb_dram_timing2);
1549                 WREG32_P(MC_ARB_BURST_TIME, STATE1(burst_time), ~STATE1_MASK);
1550                 break;
1551         case MC_CG_ARB_FREQ_F2:
1552                 WREG32(MC_ARB_DRAM_TIMING_2, mc_arb_dram_timing);
1553                 WREG32(MC_ARB_DRAM_TIMING2_2, mc_arb_dram_timing2);
1554                 WREG32_P(MC_ARB_BURST_TIME, STATE2(burst_time), ~STATE2_MASK);
1555                 break;
1556         case MC_CG_ARB_FREQ_F3:
1557                 WREG32(MC_ARB_DRAM_TIMING_3, mc_arb_dram_timing);
1558                 WREG32(MC_ARB_DRAM_TIMING2_3, mc_arb_dram_timing2);
1559                 WREG32_P(MC_ARB_BURST_TIME, STATE3(burst_time), ~STATE3_MASK);
1560                 break;
1561         default:
1562                 return -EINVAL;
1563         }
1564
1565         mc_cg_config = RREG32(MC_CG_CONFIG) | 0x0000000F;
1566         WREG32(MC_CG_CONFIG, mc_cg_config);
1567         WREG32_P(MC_ARB_CG, CG_ARB_REQ(arb_freq_dest), ~CG_ARB_REQ_MASK);
1568
1569         return 0;
1570 }
1571
1572 static int ni_init_arb_table_index(struct radeon_device *rdev)
1573 {
1574         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1575         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1576         u32 tmp;
1577         int ret;
1578
1579         ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
1580                                         &tmp, pi->sram_end);
1581         if (ret)
1582                 return ret;
1583
1584         tmp &= 0x00FFFFFF;
1585         tmp |= ((u32)MC_CG_ARB_FREQ_F1) << 24;
1586
1587         return rv770_write_smc_sram_dword(rdev, ni_pi->arb_table_start,
1588                                           tmp, pi->sram_end);
1589 }
1590
1591 static int ni_initial_switch_from_arb_f0_to_f1(struct radeon_device *rdev)
1592 {
1593         return ni_copy_and_switch_arb_sets(rdev, MC_CG_ARB_FREQ_F0, MC_CG_ARB_FREQ_F1);
1594 }
1595
1596 static int ni_force_switch_to_arb_f0(struct radeon_device *rdev)
1597 {
1598         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1599         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1600         u32 tmp;
1601         int ret;
1602
1603         ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
1604                                         &tmp, pi->sram_end);
1605         if (ret)
1606                 return ret;
1607
1608         tmp = (tmp >> 24) & 0xff;
1609
1610         if (tmp == MC_CG_ARB_FREQ_F0)
1611                 return 0;
1612
1613         return ni_copy_and_switch_arb_sets(rdev, tmp, MC_CG_ARB_FREQ_F0);
1614 }
1615
1616 static int ni_populate_memory_timing_parameters(struct radeon_device *rdev,
1617                                                 struct rv7xx_pl *pl,
1618                                                 SMC_NIslands_MCArbDramTimingRegisterSet *arb_regs)
1619 {
1620         u32 dram_timing;
1621         u32 dram_timing2;
1622
1623         arb_regs->mc_arb_rfsh_rate =
1624                 (u8)rv770_calculate_memory_refresh_rate(rdev, pl->sclk);
1625
1626
1627         radeon_atom_set_engine_dram_timings(rdev,
1628                                             pl->sclk,
1629                                             pl->mclk);
1630
1631         dram_timing = RREG32(MC_ARB_DRAM_TIMING);
1632         dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
1633
1634         arb_regs->mc_arb_dram_timing  = cpu_to_be32(dram_timing);
1635         arb_regs->mc_arb_dram_timing2 = cpu_to_be32(dram_timing2);
1636
1637         return 0;
1638 }
1639
1640 static int ni_do_program_memory_timing_parameters(struct radeon_device *rdev,
1641                                                   struct radeon_ps *radeon_state,
1642                                                   unsigned int first_arb_set)
1643 {
1644         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1645         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1646         struct ni_ps *state = ni_get_ps(radeon_state);
1647         SMC_NIslands_MCArbDramTimingRegisterSet arb_regs = { 0 };
1648         int i, ret = 0;
1649
1650         for (i = 0; i < state->performance_level_count; i++) {
1651                 ret = ni_populate_memory_timing_parameters(rdev, &state->performance_levels[i], &arb_regs);
1652                 if (ret)
1653                         break;
1654
1655                 ret = rv770_copy_bytes_to_smc(rdev,
1656                                               (u16)(ni_pi->arb_table_start +
1657                                                     offsetof(SMC_NIslands_MCArbDramTimingRegisters, data) +
1658                                                     sizeof(SMC_NIslands_MCArbDramTimingRegisterSet) * (first_arb_set + i)),
1659                                               (u8 *)&arb_regs,
1660                                               (u16)sizeof(SMC_NIslands_MCArbDramTimingRegisterSet),
1661                                               pi->sram_end);
1662                 if (ret)
1663                         break;
1664         }
1665         return ret;
1666 }
1667
1668 static int ni_program_memory_timing_parameters(struct radeon_device *rdev,
1669                                                struct radeon_ps *radeon_new_state)
1670 {
1671         return ni_do_program_memory_timing_parameters(rdev, radeon_new_state,
1672                                                       NISLANDS_DRIVER_STATE_ARB_INDEX);
1673 }
1674
1675 static void ni_populate_initial_mvdd_value(struct radeon_device *rdev,
1676                                            struct NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1677 {
1678         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1679
1680         voltage->index = eg_pi->mvdd_high_index;
1681         voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1682 }
1683
1684 static int ni_populate_smc_initial_state(struct radeon_device *rdev,
1685                                          struct radeon_ps *radeon_initial_state,
1686                                          NISLANDS_SMC_STATETABLE *table)
1687 {
1688         struct ni_ps *initial_state = ni_get_ps(radeon_initial_state);
1689         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1690         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1691         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1692         u32 reg;
1693         int ret;
1694
1695         table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL =
1696                 cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl);
1697         table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 =
1698                 cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl_2);
1699         table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL =
1700                 cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl);
1701         table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 =
1702                 cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl_2);
1703         table->initialState.levels[0].mclk.vMCLK_PWRMGT_CNTL =
1704                 cpu_to_be32(ni_pi->clock_registers.mclk_pwrmgt_cntl);
1705         table->initialState.levels[0].mclk.vDLL_CNTL =
1706                 cpu_to_be32(ni_pi->clock_registers.dll_cntl);
1707         table->initialState.levels[0].mclk.vMPLL_SS =
1708                 cpu_to_be32(ni_pi->clock_registers.mpll_ss1);
1709         table->initialState.levels[0].mclk.vMPLL_SS2 =
1710                 cpu_to_be32(ni_pi->clock_registers.mpll_ss2);
1711         table->initialState.levels[0].mclk.mclk_value =
1712                 cpu_to_be32(initial_state->performance_levels[0].mclk);
1713
1714         table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1715                 cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl);
1716         table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1717                 cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_2);
1718         table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1719                 cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_3);
1720         table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 =
1721                 cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_4);
1722         table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM =
1723                 cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum);
1724         table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 =
1725                 cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum_2);
1726         table->initialState.levels[0].sclk.sclk_value =
1727                 cpu_to_be32(initial_state->performance_levels[0].sclk);
1728         table->initialState.levels[0].arbRefreshState =
1729                 NISLANDS_INITIAL_STATE_ARB_INDEX;
1730
1731         table->initialState.levels[0].ACIndex = 0;
1732
1733         ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1734                                         initial_state->performance_levels[0].vddc,
1735                                         &table->initialState.levels[0].vddc);
1736         if (!ret) {
1737                 u16 std_vddc;
1738
1739                 ret = ni_get_std_voltage_value(rdev,
1740                                                &table->initialState.levels[0].vddc,
1741                                                &std_vddc);
1742                 if (!ret)
1743                         ni_populate_std_voltage_value(rdev, std_vddc,
1744                                                       table->initialState.levels[0].vddc.index,
1745                                                       &table->initialState.levels[0].std_vddc);
1746         }
1747
1748         if (eg_pi->vddci_control)
1749                 ni_populate_voltage_value(rdev,
1750                                           &eg_pi->vddci_voltage_table,
1751                                           initial_state->performance_levels[0].vddci,
1752                                           &table->initialState.levels[0].vddci);
1753
1754         ni_populate_initial_mvdd_value(rdev, &table->initialState.levels[0].mvdd);
1755
1756         reg = CG_R(0xffff) | CG_L(0);
1757         table->initialState.levels[0].aT = cpu_to_be32(reg);
1758
1759         table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp);
1760
1761         if (pi->boot_in_gen2)
1762                 table->initialState.levels[0].gen2PCIE = 1;
1763         else
1764                 table->initialState.levels[0].gen2PCIE = 0;
1765
1766         if (pi->mem_gddr5) {
1767                 table->initialState.levels[0].strobeMode =
1768                         cypress_get_strobe_mode_settings(rdev,
1769                                                          initial_state->performance_levels[0].mclk);
1770
1771                 if (initial_state->performance_levels[0].mclk > pi->mclk_edc_enable_threshold)
1772                         table->initialState.levels[0].mcFlags = NISLANDS_SMC_MC_EDC_RD_FLAG | NISLANDS_SMC_MC_EDC_WR_FLAG;
1773                 else
1774                         table->initialState.levels[0].mcFlags =  0;
1775         }
1776
1777         table->initialState.levelCount = 1;
1778
1779         table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC;
1780
1781         table->initialState.levels[0].dpm2.MaxPS = 0;
1782         table->initialState.levels[0].dpm2.NearTDPDec = 0;
1783         table->initialState.levels[0].dpm2.AboveSafeInc = 0;
1784         table->initialState.levels[0].dpm2.BelowSafeInc = 0;
1785
1786         reg = MIN_POWER_MASK | MAX_POWER_MASK;
1787         table->initialState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
1788
1789         reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
1790         table->initialState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
1791
1792         return 0;
1793 }
1794
1795 static int ni_populate_smc_acpi_state(struct radeon_device *rdev,
1796                                       NISLANDS_SMC_STATETABLE *table)
1797 {
1798         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1799         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1800         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1801         u32 mpll_ad_func_cntl   = ni_pi->clock_registers.mpll_ad_func_cntl;
1802         u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
1803         u32 mpll_dq_func_cntl   = ni_pi->clock_registers.mpll_dq_func_cntl;
1804         u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
1805         u32 spll_func_cntl      = ni_pi->clock_registers.cg_spll_func_cntl;
1806         u32 spll_func_cntl_2    = ni_pi->clock_registers.cg_spll_func_cntl_2;
1807         u32 spll_func_cntl_3    = ni_pi->clock_registers.cg_spll_func_cntl_3;
1808         u32 spll_func_cntl_4    = ni_pi->clock_registers.cg_spll_func_cntl_4;
1809         u32 mclk_pwrmgt_cntl    = ni_pi->clock_registers.mclk_pwrmgt_cntl;
1810         u32 dll_cntl            = ni_pi->clock_registers.dll_cntl;
1811         u32 reg;
1812         int ret;
1813
1814         table->ACPIState = table->initialState;
1815
1816         table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC;
1817
1818         if (pi->acpi_vddc) {
1819                 ret = ni_populate_voltage_value(rdev,
1820                                                 &eg_pi->vddc_voltage_table,
1821                                                 pi->acpi_vddc, &table->ACPIState.levels[0].vddc);
1822                 if (!ret) {
1823                         u16 std_vddc;
1824
1825                         ret = ni_get_std_voltage_value(rdev,
1826                                                        &table->ACPIState.levels[0].vddc, &std_vddc);
1827                         if (!ret)
1828                                 ni_populate_std_voltage_value(rdev, std_vddc,
1829                                                               table->ACPIState.levels[0].vddc.index,
1830                                                               &table->ACPIState.levels[0].std_vddc);
1831                 }
1832
1833                 if (pi->pcie_gen2) {
1834                         if (pi->acpi_pcie_gen2)
1835                                 table->ACPIState.levels[0].gen2PCIE = 1;
1836                         else
1837                                 table->ACPIState.levels[0].gen2PCIE = 0;
1838                 } else {
1839                         table->ACPIState.levels[0].gen2PCIE = 0;
1840                 }
1841         } else {
1842                 ret = ni_populate_voltage_value(rdev,
1843                                                 &eg_pi->vddc_voltage_table,
1844                                                 pi->min_vddc_in_table,
1845                                                 &table->ACPIState.levels[0].vddc);
1846                 if (!ret) {
1847                         u16 std_vddc;
1848
1849                         ret = ni_get_std_voltage_value(rdev,
1850                                                        &table->ACPIState.levels[0].vddc,
1851                                                        &std_vddc);
1852                         if (!ret)
1853                                 ni_populate_std_voltage_value(rdev, std_vddc,
1854                                                               table->ACPIState.levels[0].vddc.index,
1855                                                               &table->ACPIState.levels[0].std_vddc);
1856                 }
1857                 table->ACPIState.levels[0].gen2PCIE = 0;
1858         }
1859
1860         if (eg_pi->acpi_vddci) {
1861                 if (eg_pi->vddci_control)
1862                         ni_populate_voltage_value(rdev,
1863                                                   &eg_pi->vddci_voltage_table,
1864                                                   eg_pi->acpi_vddci,
1865                                                   &table->ACPIState.levels[0].vddci);
1866         }
1867
1868
1869         mpll_ad_func_cntl &= ~PDNB;
1870
1871         mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
1872
1873         if (pi->mem_gddr5)
1874                 mpll_dq_func_cntl &= ~PDNB;
1875         mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN | BYPASS;
1876
1877
1878         mclk_pwrmgt_cntl |= (MRDCKA0_RESET |
1879                              MRDCKA1_RESET |
1880                              MRDCKB0_RESET |
1881                              MRDCKB1_RESET |
1882                              MRDCKC0_RESET |
1883                              MRDCKC1_RESET |
1884                              MRDCKD0_RESET |
1885                              MRDCKD1_RESET);
1886
1887         mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
1888                               MRDCKA1_PDNB |
1889                               MRDCKB0_PDNB |
1890                               MRDCKB1_PDNB |
1891                               MRDCKC0_PDNB |
1892                               MRDCKC1_PDNB |
1893                               MRDCKD0_PDNB |
1894                               MRDCKD1_PDNB);
1895
1896         dll_cntl |= (MRDCKA0_BYPASS |
1897                      MRDCKA1_BYPASS |
1898                      MRDCKB0_BYPASS |
1899                      MRDCKB1_BYPASS |
1900                      MRDCKC0_BYPASS |
1901                      MRDCKC1_BYPASS |
1902                      MRDCKD0_BYPASS |
1903                      MRDCKD1_BYPASS);
1904
1905         spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
1906         spll_func_cntl_2 |= SCLK_MUX_SEL(4);
1907
1908         table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
1909         table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
1910         table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
1911         table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
1912         table->ACPIState.levels[0].mclk.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
1913         table->ACPIState.levels[0].mclk.vDLL_CNTL = cpu_to_be32(dll_cntl);
1914
1915         table->ACPIState.levels[0].mclk.mclk_value = 0;
1916
1917         table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
1918         table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
1919         table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
1920         table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(spll_func_cntl_4);
1921
1922         table->ACPIState.levels[0].sclk.sclk_value = 0;
1923
1924         ni_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd);
1925
1926         if (eg_pi->dynamic_ac_timing)
1927                 table->ACPIState.levels[0].ACIndex = 1;
1928
1929         table->ACPIState.levels[0].dpm2.MaxPS = 0;
1930         table->ACPIState.levels[0].dpm2.NearTDPDec = 0;
1931         table->ACPIState.levels[0].dpm2.AboveSafeInc = 0;
1932         table->ACPIState.levels[0].dpm2.BelowSafeInc = 0;
1933
1934         reg = MIN_POWER_MASK | MAX_POWER_MASK;
1935         table->ACPIState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
1936
1937         reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
1938         table->ACPIState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
1939
1940         return 0;
1941 }
1942
1943 static int ni_init_smc_table(struct radeon_device *rdev)
1944 {
1945         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1946         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1947         int ret;
1948         struct radeon_ps *radeon_boot_state = rdev->pm.dpm.boot_ps;
1949         NISLANDS_SMC_STATETABLE *table = &ni_pi->smc_statetable;
1950
1951         memset(table, 0, sizeof(NISLANDS_SMC_STATETABLE));
1952
1953         ni_populate_smc_voltage_tables(rdev, table);
1954
1955         switch (rdev->pm.int_thermal_type) {
1956         case THERMAL_TYPE_NI:
1957         case THERMAL_TYPE_EMC2103_WITH_INTERNAL:
1958                 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL;
1959                 break;
1960         case THERMAL_TYPE_NONE:
1961                 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE;
1962                 break;
1963         default:
1964                 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL;
1965                 break;
1966         }
1967
1968         if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC)
1969                 table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC;
1970
1971         if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REGULATOR_HOT)
1972                 table->systemFlags |= PPSMC_SYSTEMFLAG_REGULATOR_HOT;
1973
1974         if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
1975                 table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC;
1976
1977         if (pi->mem_gddr5)
1978                 table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5;
1979
1980         ret = ni_populate_smc_initial_state(rdev, radeon_boot_state, table);
1981         if (ret)
1982                 return ret;
1983
1984         ret = ni_populate_smc_acpi_state(rdev, table);
1985         if (ret)
1986                 return ret;
1987
1988         table->driverState = table->initialState;
1989
1990         table->ULVState = table->initialState;
1991
1992         ret = ni_do_program_memory_timing_parameters(rdev, radeon_boot_state,
1993                                                      NISLANDS_INITIAL_STATE_ARB_INDEX);
1994         if (ret)
1995                 return ret;
1996
1997         return rv770_copy_bytes_to_smc(rdev, pi->state_table_start, (u8 *)table,
1998                                        sizeof(NISLANDS_SMC_STATETABLE), pi->sram_end);
1999 }
2000
2001 static int ni_calculate_sclk_params(struct radeon_device *rdev,
2002                                     u32 engine_clock,
2003                                     NISLANDS_SMC_SCLK_VALUE *sclk)
2004 {
2005         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2006         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2007         struct atom_clock_dividers dividers;
2008         u32 spll_func_cntl = ni_pi->clock_registers.cg_spll_func_cntl;
2009         u32 spll_func_cntl_2 = ni_pi->clock_registers.cg_spll_func_cntl_2;
2010         u32 spll_func_cntl_3 = ni_pi->clock_registers.cg_spll_func_cntl_3;
2011         u32 spll_func_cntl_4 = ni_pi->clock_registers.cg_spll_func_cntl_4;
2012         u32 cg_spll_spread_spectrum = ni_pi->clock_registers.cg_spll_spread_spectrum;
2013         u32 cg_spll_spread_spectrum_2 = ni_pi->clock_registers.cg_spll_spread_spectrum_2;
2014         u64 tmp;
2015         u32 reference_clock = rdev->clock.spll.reference_freq;
2016         u32 reference_divider;
2017         u32 fbdiv;
2018         int ret;
2019
2020         ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
2021                                              engine_clock, false, &dividers);
2022         if (ret)
2023                 return ret;
2024
2025         reference_divider = 1 + dividers.ref_div;
2026
2027
2028         tmp = (u64) engine_clock * reference_divider * dividers.post_div * 16834;
2029         do_div(tmp, reference_clock);
2030         fbdiv = (u32) tmp;
2031
2032         spll_func_cntl &= ~(SPLL_PDIV_A_MASK | SPLL_REF_DIV_MASK);
2033         spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div);
2034         spll_func_cntl |= SPLL_PDIV_A(dividers.post_div);
2035
2036         spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
2037         spll_func_cntl_2 |= SCLK_MUX_SEL(2);
2038
2039         spll_func_cntl_3 &= ~SPLL_FB_DIV_MASK;
2040         spll_func_cntl_3 |= SPLL_FB_DIV(fbdiv);
2041         spll_func_cntl_3 |= SPLL_DITHEN;
2042
2043         if (pi->sclk_ss) {
2044                 struct radeon_atom_ss ss;
2045                 u32 vco_freq = engine_clock * dividers.post_div;
2046
2047                 if (radeon_atombios_get_asic_ss_info(rdev, &ss,
2048                                                      ASIC_INTERNAL_ENGINE_SS, vco_freq)) {
2049                         u32 clk_s = reference_clock * 5 / (reference_divider * ss.rate);
2050                         u32 clk_v = 4 * ss.percentage * fbdiv / (clk_s * 10000);
2051
2052                         cg_spll_spread_spectrum &= ~CLK_S_MASK;
2053                         cg_spll_spread_spectrum |= CLK_S(clk_s);
2054                         cg_spll_spread_spectrum |= SSEN;
2055
2056                         cg_spll_spread_spectrum_2 &= ~CLK_V_MASK;
2057                         cg_spll_spread_spectrum_2 |= CLK_V(clk_v);
2058                 }
2059         }
2060
2061         sclk->sclk_value = engine_clock;
2062         sclk->vCG_SPLL_FUNC_CNTL = spll_func_cntl;
2063         sclk->vCG_SPLL_FUNC_CNTL_2 = spll_func_cntl_2;
2064         sclk->vCG_SPLL_FUNC_CNTL_3 = spll_func_cntl_3;
2065         sclk->vCG_SPLL_FUNC_CNTL_4 = spll_func_cntl_4;
2066         sclk->vCG_SPLL_SPREAD_SPECTRUM = cg_spll_spread_spectrum;
2067         sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cg_spll_spread_spectrum_2;
2068
2069         return 0;
2070 }
2071
2072 static int ni_populate_sclk_value(struct radeon_device *rdev,
2073                                   u32 engine_clock,
2074                                   NISLANDS_SMC_SCLK_VALUE *sclk)
2075 {
2076         NISLANDS_SMC_SCLK_VALUE sclk_tmp;
2077         int ret;
2078
2079         ret = ni_calculate_sclk_params(rdev, engine_clock, &sclk_tmp);
2080         if (!ret) {
2081                 sclk->sclk_value = cpu_to_be32(sclk_tmp.sclk_value);
2082                 sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL);
2083                 sclk->vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_2);
2084                 sclk->vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_3);
2085                 sclk->vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_4);
2086                 sclk->vCG_SPLL_SPREAD_SPECTRUM = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM);
2087                 sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM_2);
2088         }
2089
2090         return ret;
2091 }
2092
2093 static int ni_init_smc_spll_table(struct radeon_device *rdev)
2094 {
2095         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2096         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2097         SMC_NISLANDS_SPLL_DIV_TABLE *spll_table;
2098         NISLANDS_SMC_SCLK_VALUE sclk_params;
2099         u32 fb_div;
2100         u32 p_div;
2101         u32 clk_s;
2102         u32 clk_v;
2103         u32 sclk = 0;
2104         int i, ret;
2105         u32 tmp;
2106
2107         if (ni_pi->spll_table_start == 0)
2108                 return -EINVAL;
2109
2110         spll_table = kzalloc(sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), GFP_KERNEL);
2111         if (spll_table == NULL)
2112                 return -ENOMEM;
2113
2114         for (i = 0; i < 256; i++) {
2115                 ret = ni_calculate_sclk_params(rdev, sclk, &sclk_params);
2116                 if (ret)
2117                         break;
2118
2119                 p_div = (sclk_params.vCG_SPLL_FUNC_CNTL & SPLL_PDIV_A_MASK) >> SPLL_PDIV_A_SHIFT;
2120                 fb_div = (sclk_params.vCG_SPLL_FUNC_CNTL_3 & SPLL_FB_DIV_MASK) >> SPLL_FB_DIV_SHIFT;
2121                 clk_s = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM & CLK_S_MASK) >> CLK_S_SHIFT;
2122                 clk_v = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM_2 & CLK_V_MASK) >> CLK_V_SHIFT;
2123
2124                 fb_div &= ~0x00001FFF;
2125                 fb_div >>= 1;
2126                 clk_v >>= 6;
2127
2128                 if (p_div & ~(SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT))
2129                         ret = -EINVAL;
2130
2131                 if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
2132                         ret = -EINVAL;
2133
2134                 if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
2135                         ret = -EINVAL;
2136
2137                 if (clk_v & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT))
2138                         ret = -EINVAL;
2139
2140                 if (ret)
2141                         break;
2142
2143                 tmp = ((fb_div << SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_MASK) |
2144                         ((p_div << SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK);
2145                 spll_table->freq[i] = cpu_to_be32(tmp);
2146
2147                 tmp = ((clk_v << SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK) |
2148                         ((clk_s << SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK);
2149                 spll_table->ss[i] = cpu_to_be32(tmp);
2150
2151                 sclk += 512;
2152         }
2153
2154         if (!ret)
2155                 ret = rv770_copy_bytes_to_smc(rdev, ni_pi->spll_table_start, (u8 *)spll_table,
2156                                               sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), pi->sram_end);
2157
2158         kfree(spll_table);
2159
2160         return ret;
2161 }
2162
2163 static int ni_populate_mclk_value(struct radeon_device *rdev,
2164                                   u32 engine_clock,
2165                                   u32 memory_clock,
2166                                   NISLANDS_SMC_MCLK_VALUE *mclk,
2167                                   bool strobe_mode,
2168                                   bool dll_state_on)
2169 {
2170         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2171         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2172         u32 mpll_ad_func_cntl = ni_pi->clock_registers.mpll_ad_func_cntl;
2173         u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
2174         u32 mpll_dq_func_cntl = ni_pi->clock_registers.mpll_dq_func_cntl;
2175         u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
2176         u32 mclk_pwrmgt_cntl = ni_pi->clock_registers.mclk_pwrmgt_cntl;
2177         u32 dll_cntl = ni_pi->clock_registers.dll_cntl;
2178         u32 mpll_ss1 = ni_pi->clock_registers.mpll_ss1;
2179         u32 mpll_ss2 = ni_pi->clock_registers.mpll_ss2;
2180         struct atom_clock_dividers dividers;
2181         u32 ibias;
2182         u32 dll_speed;
2183         int ret;
2184         u32 mc_seq_misc7;
2185
2186         ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
2187                                              memory_clock, strobe_mode, &dividers);
2188         if (ret)
2189                 return ret;
2190
2191         if (!strobe_mode) {
2192                 mc_seq_misc7 = RREG32(MC_SEQ_MISC7);
2193
2194                 if (mc_seq_misc7 & 0x8000000)
2195                         dividers.post_div = 1;
2196         }
2197
2198         ibias = cypress_map_clkf_to_ibias(rdev, dividers.whole_fb_div);
2199
2200         mpll_ad_func_cntl &= ~(CLKR_MASK |
2201                                YCLK_POST_DIV_MASK |
2202                                CLKF_MASK |
2203                                CLKFRAC_MASK |
2204                                IBIAS_MASK);
2205         mpll_ad_func_cntl |= CLKR(dividers.ref_div);
2206         mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div);
2207         mpll_ad_func_cntl |= CLKF(dividers.whole_fb_div);
2208         mpll_ad_func_cntl |= CLKFRAC(dividers.frac_fb_div);
2209         mpll_ad_func_cntl |= IBIAS(ibias);
2210
2211         if (dividers.vco_mode)
2212                 mpll_ad_func_cntl_2 |= VCO_MODE;
2213         else
2214                 mpll_ad_func_cntl_2 &= ~VCO_MODE;
2215
2216         if (pi->mem_gddr5) {
2217                 mpll_dq_func_cntl &= ~(CLKR_MASK |
2218                                        YCLK_POST_DIV_MASK |
2219                                        CLKF_MASK |
2220                                        CLKFRAC_MASK |
2221                                        IBIAS_MASK);
2222                 mpll_dq_func_cntl |= CLKR(dividers.ref_div);
2223                 mpll_dq_func_cntl |= YCLK_POST_DIV(dividers.post_div);
2224                 mpll_dq_func_cntl |= CLKF(dividers.whole_fb_div);
2225                 mpll_dq_func_cntl |= CLKFRAC(dividers.frac_fb_div);
2226                 mpll_dq_func_cntl |= IBIAS(ibias);
2227
2228                 if (strobe_mode)
2229                         mpll_dq_func_cntl &= ~PDNB;
2230                 else
2231                         mpll_dq_func_cntl |= PDNB;
2232
2233                 if (dividers.vco_mode)
2234                         mpll_dq_func_cntl_2 |= VCO_MODE;
2235                 else
2236                         mpll_dq_func_cntl_2 &= ~VCO_MODE;
2237         }
2238
2239         if (pi->mclk_ss) {
2240                 struct radeon_atom_ss ss;
2241                 u32 vco_freq = memory_clock * dividers.post_div;
2242
2243                 if (radeon_atombios_get_asic_ss_info(rdev, &ss,
2244                                                      ASIC_INTERNAL_MEMORY_SS, vco_freq)) {
2245                         u32 reference_clock = rdev->clock.mpll.reference_freq;
2246                         u32 decoded_ref = rv740_get_decoded_reference_divider(dividers.ref_div);
2247                         u32 clk_s = reference_clock * 5 / (decoded_ref * ss.rate);
2248                         u32 clk_v = ss.percentage *
2249                                 (0x4000 * dividers.whole_fb_div + 0x800 * dividers.frac_fb_div) / (clk_s * 625);
2250
2251                         mpll_ss1 &= ~CLKV_MASK;
2252                         mpll_ss1 |= CLKV(clk_v);
2253
2254                         mpll_ss2 &= ~CLKS_MASK;
2255                         mpll_ss2 |= CLKS(clk_s);
2256                 }
2257         }
2258
2259         dll_speed = rv740_get_dll_speed(pi->mem_gddr5,
2260                                         memory_clock);
2261
2262         mclk_pwrmgt_cntl &= ~DLL_SPEED_MASK;
2263         mclk_pwrmgt_cntl |= DLL_SPEED(dll_speed);
2264         if (dll_state_on)
2265                 mclk_pwrmgt_cntl |= (MRDCKA0_PDNB |
2266                                      MRDCKA1_PDNB |
2267                                      MRDCKB0_PDNB |
2268                                      MRDCKB1_PDNB |
2269                                      MRDCKC0_PDNB |
2270                                      MRDCKC1_PDNB |
2271                                      MRDCKD0_PDNB |
2272                                      MRDCKD1_PDNB);
2273         else
2274                 mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
2275                                       MRDCKA1_PDNB |
2276                                       MRDCKB0_PDNB |
2277                                       MRDCKB1_PDNB |
2278                                       MRDCKC0_PDNB |
2279                                       MRDCKC1_PDNB |
2280                                       MRDCKD0_PDNB |
2281                                       MRDCKD1_PDNB);
2282
2283
2284         mclk->mclk_value = cpu_to_be32(memory_clock);
2285         mclk->vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
2286         mclk->vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
2287         mclk->vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
2288         mclk->vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
2289         mclk->vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
2290         mclk->vDLL_CNTL = cpu_to_be32(dll_cntl);
2291         mclk->vMPLL_SS = cpu_to_be32(mpll_ss1);
2292         mclk->vMPLL_SS2 = cpu_to_be32(mpll_ss2);
2293
2294         return 0;
2295 }
2296
2297 static void ni_populate_smc_sp(struct radeon_device *rdev,
2298                                struct radeon_ps *radeon_state,
2299                                NISLANDS_SMC_SWSTATE *smc_state)
2300 {
2301         struct ni_ps *ps = ni_get_ps(radeon_state);
2302         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2303         int i;
2304
2305         for (i = 0; i < ps->performance_level_count - 1; i++)
2306                 smc_state->levels[i].bSP = cpu_to_be32(pi->dsp);
2307
2308         smc_state->levels[ps->performance_level_count - 1].bSP =
2309                 cpu_to_be32(pi->psp);
2310 }
2311
2312 static int ni_convert_power_level_to_smc(struct radeon_device *rdev,
2313                                          struct rv7xx_pl *pl,
2314                                          NISLANDS_SMC_HW_PERFORMANCE_LEVEL *level)
2315 {
2316         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2317         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2318         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2319         int ret;
2320         bool dll_state_on;
2321         u16 std_vddc;
2322         u32 tmp = RREG32(DC_STUTTER_CNTL);
2323
2324         level->gen2PCIE = pi->pcie_gen2 ?
2325                 ((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0;
2326
2327         ret = ni_populate_sclk_value(rdev, pl->sclk, &level->sclk);
2328         if (ret)
2329                 return ret;
2330
2331         level->mcFlags =  0;
2332         if (pi->mclk_stutter_mode_threshold &&
2333             (pl->mclk <= pi->mclk_stutter_mode_threshold) &&
2334             !eg_pi->uvd_enabled &&
2335             (tmp & DC_STUTTER_ENABLE_A) &&
2336             (tmp & DC_STUTTER_ENABLE_B))
2337                 level->mcFlags |= NISLANDS_SMC_MC_STUTTER_EN;
2338
2339         if (pi->mem_gddr5) {
2340                 if (pl->mclk > pi->mclk_edc_enable_threshold)
2341                         level->mcFlags |= NISLANDS_SMC_MC_EDC_RD_FLAG;
2342                 if (pl->mclk > eg_pi->mclk_edc_wr_enable_threshold)
2343                         level->mcFlags |= NISLANDS_SMC_MC_EDC_WR_FLAG;
2344
2345                 level->strobeMode = cypress_get_strobe_mode_settings(rdev, pl->mclk);
2346
2347                 if (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) {
2348                         if (cypress_get_mclk_frequency_ratio(rdev, pl->mclk, true) >=
2349                             ((RREG32(MC_SEQ_MISC7) >> 16) & 0xf))
2350                                 dll_state_on = ((RREG32(MC_SEQ_MISC5) >> 1) & 0x1) ? true : false;
2351                         else
2352                                 dll_state_on = ((RREG32(MC_SEQ_MISC6) >> 1) & 0x1) ? true : false;
2353                 } else {
2354                         dll_state_on = false;
2355                         if (pl->mclk > ni_pi->mclk_rtt_mode_threshold)
2356                                 level->mcFlags |= NISLANDS_SMC_MC_RTT_ENABLE;
2357                 }
2358
2359                 ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk,
2360                                              &level->mclk,
2361                                              (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) != 0,
2362                                              dll_state_on);
2363         } else
2364                 ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk, &level->mclk, 1, 1);
2365
2366         if (ret)
2367                 return ret;
2368
2369         ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
2370                                         pl->vddc, &level->vddc);
2371         if (ret)
2372                 return ret;
2373
2374         ret = ni_get_std_voltage_value(rdev, &level->vddc, &std_vddc);
2375         if (ret)
2376                 return ret;
2377
2378         ni_populate_std_voltage_value(rdev, std_vddc,
2379                                       level->vddc.index, &level->std_vddc);
2380
2381         if (eg_pi->vddci_control) {
2382                 ret = ni_populate_voltage_value(rdev, &eg_pi->vddci_voltage_table,
2383                                                 pl->vddci, &level->vddci);
2384                 if (ret)
2385                         return ret;
2386         }
2387
2388         ni_populate_mvdd_value(rdev, pl->mclk, &level->mvdd);
2389
2390         return ret;
2391 }
2392
2393 static int ni_populate_smc_t(struct radeon_device *rdev,
2394                              struct radeon_ps *radeon_state,
2395                              NISLANDS_SMC_SWSTATE *smc_state)
2396 {
2397         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2398         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2399         struct ni_ps *state = ni_get_ps(radeon_state);
2400         u32 a_t;
2401         u32 t_l, t_h;
2402         u32 high_bsp;
2403         int i, ret;
2404
2405         if (state->performance_level_count >= 9)
2406                 return -EINVAL;
2407
2408         if (state->performance_level_count < 2) {
2409                 a_t = CG_R(0xffff) | CG_L(0);
2410                 smc_state->levels[0].aT = cpu_to_be32(a_t);
2411                 return 0;
2412         }
2413
2414         smc_state->levels[0].aT = cpu_to_be32(0);
2415
2416         for (i = 0; i <= state->performance_level_count - 2; i++) {
2417                 if (eg_pi->uvd_enabled)
2418                         ret = r600_calculate_at(
2419                                 1000 * (i * (eg_pi->smu_uvd_hs ? 2 : 8) + 2),
2420                                 100 * R600_AH_DFLT,
2421                                 state->performance_levels[i + 1].sclk,
2422                                 state->performance_levels[i].sclk,
2423                                 &t_l,
2424                                 &t_h);
2425                 else
2426                         ret = r600_calculate_at(
2427                                 1000 * (i + 1),
2428                                 100 * R600_AH_DFLT,
2429                                 state->performance_levels[i + 1].sclk,
2430                                 state->performance_levels[i].sclk,
2431                                 &t_l,
2432                                 &t_h);
2433
2434                 if (ret) {
2435                         t_h = (i + 1) * 1000 - 50 * R600_AH_DFLT;
2436                         t_l = (i + 1) * 1000 + 50 * R600_AH_DFLT;
2437                 }
2438
2439                 a_t = be32_to_cpu(smc_state->levels[i].aT) & ~CG_R_MASK;
2440                 a_t |= CG_R(t_l * pi->bsp / 20000);
2441                 smc_state->levels[i].aT = cpu_to_be32(a_t);
2442
2443                 high_bsp = (i == state->performance_level_count - 2) ?
2444                         pi->pbsp : pi->bsp;
2445
2446                 a_t = CG_R(0xffff) | CG_L(t_h * high_bsp / 20000);
2447                 smc_state->levels[i + 1].aT = cpu_to_be32(a_t);
2448         }
2449
2450         return 0;
2451 }
2452
2453 static int ni_populate_power_containment_values(struct radeon_device *rdev,
2454                                                 struct radeon_ps *radeon_state,
2455                                                 NISLANDS_SMC_SWSTATE *smc_state)
2456 {
2457         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2458         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2459         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2460         struct ni_ps *state = ni_get_ps(radeon_state);
2461         u32 prev_sclk;
2462         u32 max_sclk;
2463         u32 min_sclk;
2464         int i, ret;
2465         u32 tdp_limit;
2466         u32 near_tdp_limit;
2467         u32 power_boost_limit;
2468         u8 max_ps_percent;
2469
2470         if (ni_pi->enable_power_containment == false)
2471                 return 0;
2472
2473         if (state->performance_level_count == 0)
2474                 return -EINVAL;
2475
2476         if (smc_state->levelCount != state->performance_level_count)
2477                 return -EINVAL;
2478
2479         ret = ni_calculate_adjusted_tdp_limits(rdev,
2480                                                false, /* ??? */
2481                                                rdev->pm.dpm.tdp_adjustment,
2482                                                &tdp_limit,
2483                                                &near_tdp_limit);
2484         if (ret)
2485                 return ret;
2486
2487         power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state, near_tdp_limit);
2488
2489         ret = rv770_write_smc_sram_dword(rdev,
2490                                          pi->state_table_start +
2491                                          offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
2492                                          offsetof(PP_NIslands_DPM2Parameters, PowerBoostLimit),
2493                                          ni_scale_power_for_smc(power_boost_limit, ni_get_smc_power_scaling_factor(rdev)),
2494                                          pi->sram_end);
2495         if (ret)
2496                 power_boost_limit = 0;
2497
2498         smc_state->levels[0].dpm2.MaxPS = 0;
2499         smc_state->levels[0].dpm2.NearTDPDec = 0;
2500         smc_state->levels[0].dpm2.AboveSafeInc = 0;
2501         smc_state->levels[0].dpm2.BelowSafeInc = 0;
2502         smc_state->levels[0].stateFlags |= power_boost_limit ? PPSMC_STATEFLAG_POWERBOOST : 0;
2503
2504         for (i = 1; i < state->performance_level_count; i++) {
2505                 prev_sclk = state->performance_levels[i-1].sclk;
2506                 max_sclk  = state->performance_levels[i].sclk;
2507                 max_ps_percent = (i != (state->performance_level_count - 1)) ?
2508                         NISLANDS_DPM2_MAXPS_PERCENT_M : NISLANDS_DPM2_MAXPS_PERCENT_H;
2509
2510                 if (max_sclk < prev_sclk)
2511                         return -EINVAL;
2512
2513                 if ((max_ps_percent == 0) || (prev_sclk == max_sclk) || eg_pi->uvd_enabled)
2514                         min_sclk = max_sclk;
2515                 else if (1 == i)
2516                         min_sclk = prev_sclk;
2517                 else
2518                         min_sclk = (prev_sclk * (u32)max_ps_percent) / 100;
2519
2520                 if (min_sclk < state->performance_levels[0].sclk)
2521                         min_sclk = state->performance_levels[0].sclk;
2522
2523                 if (min_sclk == 0)
2524                         return -EINVAL;
2525
2526                 smc_state->levels[i].dpm2.MaxPS =
2527                         (u8)((NISLANDS_DPM2_MAX_PULSE_SKIP * (max_sclk - min_sclk)) / max_sclk);
2528                 smc_state->levels[i].dpm2.NearTDPDec = NISLANDS_DPM2_NEAR_TDP_DEC;
2529                 smc_state->levels[i].dpm2.AboveSafeInc = NISLANDS_DPM2_ABOVE_SAFE_INC;
2530                 smc_state->levels[i].dpm2.BelowSafeInc = NISLANDS_DPM2_BELOW_SAFE_INC;
2531                 smc_state->levels[i].stateFlags |=
2532                         ((i != (state->performance_level_count - 1)) && power_boost_limit) ?
2533                         PPSMC_STATEFLAG_POWERBOOST : 0;
2534         }
2535
2536         return 0;
2537 }
2538
2539 static int ni_populate_sq_ramping_values(struct radeon_device *rdev,
2540                                          struct radeon_ps *radeon_state,
2541                                          NISLANDS_SMC_SWSTATE *smc_state)
2542 {
2543         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2544         struct ni_ps *state = ni_get_ps(radeon_state);
2545         u32 sq_power_throttle;
2546         u32 sq_power_throttle2;
2547         bool enable_sq_ramping = ni_pi->enable_sq_ramping;
2548         int i;
2549
2550         if (state->performance_level_count == 0)
2551                 return -EINVAL;
2552
2553         if (smc_state->levelCount != state->performance_level_count)
2554                 return -EINVAL;
2555
2556         if (rdev->pm.dpm.sq_ramping_threshold == 0)
2557                 return -EINVAL;
2558
2559         if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER > (MAX_POWER_MASK >> MAX_POWER_SHIFT))
2560                 enable_sq_ramping = false;
2561
2562         if (NISLANDS_DPM2_SQ_RAMP_MIN_POWER > (MIN_POWER_MASK >> MIN_POWER_SHIFT))
2563                 enable_sq_ramping = false;
2564
2565         if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA > (MAX_POWER_DELTA_MASK >> MAX_POWER_DELTA_SHIFT))
2566                 enable_sq_ramping = false;
2567
2568         if (NISLANDS_DPM2_SQ_RAMP_STI_SIZE > (STI_SIZE_MASK >> STI_SIZE_SHIFT))
2569                 enable_sq_ramping = false;
2570
2571         if (NISLANDS_DPM2_SQ_RAMP_LTI_RATIO <= (LTI_RATIO_MASK >> LTI_RATIO_SHIFT))
2572                 enable_sq_ramping = false;
2573
2574         for (i = 0; i < state->performance_level_count; i++) {
2575                 sq_power_throttle  = 0;
2576                 sq_power_throttle2 = 0;
2577
2578                 if ((state->performance_levels[i].sclk >= rdev->pm.dpm.sq_ramping_threshold) &&
2579                     enable_sq_ramping) {
2580                         sq_power_throttle |= MAX_POWER(NISLANDS_DPM2_SQ_RAMP_MAX_POWER);
2581                         sq_power_throttle |= MIN_POWER(NISLANDS_DPM2_SQ_RAMP_MIN_POWER);
2582                         sq_power_throttle2 |= MAX_POWER_DELTA(NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA);
2583                         sq_power_throttle2 |= STI_SIZE(NISLANDS_DPM2_SQ_RAMP_STI_SIZE);
2584                         sq_power_throttle2 |= LTI_RATIO(NISLANDS_DPM2_SQ_RAMP_LTI_RATIO);
2585                 } else {
2586                         sq_power_throttle |= MAX_POWER_MASK | MIN_POWER_MASK;
2587                         sq_power_throttle2 |= MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
2588                 }
2589
2590                 smc_state->levels[i].SQPowerThrottle   = cpu_to_be32(sq_power_throttle);
2591                 smc_state->levels[i].SQPowerThrottle_2 = cpu_to_be32(sq_power_throttle2);
2592         }
2593
2594         return 0;
2595 }
2596
2597 static int ni_enable_power_containment(struct radeon_device *rdev,
2598                                        struct radeon_ps *radeon_new_state,
2599                                        bool enable)
2600 {
2601         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2602         PPSMC_Result smc_result;
2603         int ret = 0;
2604
2605         if (ni_pi->enable_power_containment) {
2606                 if (enable) {
2607                         if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
2608                                 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingActive);
2609                                 if (smc_result != PPSMC_Result_OK) {
2610                                         ret = -EINVAL;
2611                                         ni_pi->pc_enabled = false;
2612                                 } else {
2613                                         ni_pi->pc_enabled = true;
2614                                 }
2615                         }
2616                 } else {
2617                         smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingInactive);
2618                         if (smc_result != PPSMC_Result_OK)
2619                                 ret = -EINVAL;
2620                         ni_pi->pc_enabled = false;
2621                 }
2622         }
2623
2624         return ret;
2625 }
2626
2627 static int ni_convert_power_state_to_smc(struct radeon_device *rdev,
2628                                          struct radeon_ps *radeon_state,
2629                                          NISLANDS_SMC_SWSTATE *smc_state)
2630 {
2631         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2632         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2633         struct ni_ps *state = ni_get_ps(radeon_state);
2634         int i, ret;
2635         u32 threshold = state->performance_levels[state->performance_level_count - 1].sclk * 100 / 100;
2636
2637         if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC))
2638                 smc_state->flags |= PPSMC_SWSTATE_FLAG_DC;
2639
2640         smc_state->levelCount = 0;
2641
2642         if (state->performance_level_count > NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE)
2643                 return -EINVAL;
2644
2645         for (i = 0; i < state->performance_level_count; i++) {
2646                 ret = ni_convert_power_level_to_smc(rdev, &state->performance_levels[i],
2647                                                     &smc_state->levels[i]);
2648                 smc_state->levels[i].arbRefreshState =
2649                         (u8)(NISLANDS_DRIVER_STATE_ARB_INDEX + i);
2650
2651                 if (ret)
2652                         return ret;
2653
2654                 if (ni_pi->enable_power_containment)
2655                         smc_state->levels[i].displayWatermark =
2656                                 (state->performance_levels[i].sclk < threshold) ?
2657                                 PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
2658                 else
2659                         smc_state->levels[i].displayWatermark = (i < 2) ?
2660                                 PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
2661
2662                 if (eg_pi->dynamic_ac_timing)
2663                         smc_state->levels[i].ACIndex = NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i;
2664                 else
2665                         smc_state->levels[i].ACIndex = 0;
2666
2667                 smc_state->levelCount++;
2668         }
2669
2670         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_watermark_threshold,
2671                                       cpu_to_be32(threshold / 512));
2672
2673         ni_populate_smc_sp(rdev, radeon_state, smc_state);
2674
2675         ret = ni_populate_power_containment_values(rdev, radeon_state, smc_state);
2676         if (ret)
2677                 ni_pi->enable_power_containment = false;
2678
2679         ret = ni_populate_sq_ramping_values(rdev, radeon_state, smc_state);
2680         if (ret)
2681                 ni_pi->enable_sq_ramping = false;
2682
2683         return ni_populate_smc_t(rdev, radeon_state, smc_state);
2684 }
2685
2686 static int ni_upload_sw_state(struct radeon_device *rdev,
2687                               struct radeon_ps *radeon_new_state)
2688 {
2689         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2690         u16 address = pi->state_table_start +
2691                 offsetof(NISLANDS_SMC_STATETABLE, driverState);
2692         u16 state_size = sizeof(NISLANDS_SMC_SWSTATE) +
2693                 ((NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1) * sizeof(NISLANDS_SMC_HW_PERFORMANCE_LEVEL));
2694         int ret;
2695         NISLANDS_SMC_SWSTATE *smc_state = kzalloc(state_size, GFP_KERNEL);
2696
2697         if (smc_state == NULL)
2698                 return -ENOMEM;
2699
2700         ret = ni_convert_power_state_to_smc(rdev, radeon_new_state, smc_state);
2701         if (ret)
2702                 goto done;
2703
2704         ret = rv770_copy_bytes_to_smc(rdev, address, (u8 *)smc_state, state_size, pi->sram_end);
2705
2706 done:
2707         kfree(smc_state);
2708
2709         return ret;
2710 }
2711
2712 static int ni_set_mc_special_registers(struct radeon_device *rdev,
2713                                        struct ni_mc_reg_table *table)
2714 {
2715         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2716         u8 i, j, k;
2717         u32 temp_reg;
2718
2719         for (i = 0, j = table->last; i < table->last; i++) {
2720                 switch (table->mc_reg_address[i].s1) {
2721                 case MC_SEQ_MISC1 >> 2:
2722                         if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2723                                 return -EINVAL;
2724                         temp_reg = RREG32(MC_PMG_CMD_EMRS);
2725                         table->mc_reg_address[j].s1 = MC_PMG_CMD_EMRS >> 2;
2726                         table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
2727                         for (k = 0; k < table->num_entries; k++)
2728                                 table->mc_reg_table_entry[k].mc_data[j] =
2729                                         ((temp_reg & 0xffff0000)) |
2730                                         ((table->mc_reg_table_entry[k].mc_data[i] & 0xffff0000) >> 16);
2731                         j++;
2732                         if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2733                                 return -EINVAL;
2734
2735                         temp_reg = RREG32(MC_PMG_CMD_MRS);
2736                         table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS >> 2;
2737                         table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS_LP >> 2;
2738                         for(k = 0; k < table->num_entries; k++) {
2739                                 table->mc_reg_table_entry[k].mc_data[j] =
2740                                         (temp_reg & 0xffff0000) |
2741                                         (table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
2742                                 if (!pi->mem_gddr5)
2743                                         table->mc_reg_table_entry[k].mc_data[j] |= 0x100;
2744                         }
2745                         j++;
2746                         if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2747                                 return -EINVAL;
2748                         break;
2749                 case MC_SEQ_RESERVE_M >> 2:
2750                         temp_reg = RREG32(MC_PMG_CMD_MRS1);
2751                         table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS1 >> 2;
2752                         table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
2753                         for (k = 0; k < table->num_entries; k++)
2754                                 table->mc_reg_table_entry[k].mc_data[j] =
2755                                         (temp_reg & 0xffff0000) |
2756                                         (table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
2757                         j++;
2758                         if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2759                                 return -EINVAL;
2760                         break;
2761                 default:
2762                         break;
2763                 }
2764         }
2765
2766         table->last = j;
2767
2768         return 0;
2769 }
2770
2771 static bool ni_check_s0_mc_reg_index(u16 in_reg, u16 *out_reg)
2772 {
2773         bool result = true;
2774
2775         switch (in_reg) {
2776         case  MC_SEQ_RAS_TIMING >> 2:
2777                 *out_reg = MC_SEQ_RAS_TIMING_LP >> 2;
2778                 break;
2779         case MC_SEQ_CAS_TIMING >> 2:
2780                 *out_reg = MC_SEQ_CAS_TIMING_LP >> 2;
2781                 break;
2782         case MC_SEQ_MISC_TIMING >> 2:
2783                 *out_reg = MC_SEQ_MISC_TIMING_LP >> 2;
2784                 break;
2785         case MC_SEQ_MISC_TIMING2 >> 2:
2786                 *out_reg = MC_SEQ_MISC_TIMING2_LP >> 2;
2787                 break;
2788         case MC_SEQ_RD_CTL_D0 >> 2:
2789                 *out_reg = MC_SEQ_RD_CTL_D0_LP >> 2;
2790                 break;
2791         case MC_SEQ_RD_CTL_D1 >> 2:
2792                 *out_reg = MC_SEQ_RD_CTL_D1_LP >> 2;
2793                 break;
2794         case MC_SEQ_WR_CTL_D0 >> 2:
2795                 *out_reg = MC_SEQ_WR_CTL_D0_LP >> 2;
2796                 break;
2797         case MC_SEQ_WR_CTL_D1 >> 2:
2798                 *out_reg = MC_SEQ_WR_CTL_D1_LP >> 2;
2799                 break;
2800         case MC_PMG_CMD_EMRS >> 2:
2801                 *out_reg = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
2802                 break;
2803         case MC_PMG_CMD_MRS >> 2:
2804                 *out_reg = MC_SEQ_PMG_CMD_MRS_LP >> 2;
2805                 break;
2806         case MC_PMG_CMD_MRS1 >> 2:
2807                 *out_reg = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
2808                 break;
2809         case MC_SEQ_PMG_TIMING >> 2:
2810                 *out_reg = MC_SEQ_PMG_TIMING_LP >> 2;
2811                 break;
2812         case MC_PMG_CMD_MRS2 >> 2:
2813                 *out_reg = MC_SEQ_PMG_CMD_MRS2_LP >> 2;
2814                 break;
2815         default:
2816                 result = false;
2817                 break;
2818         }
2819
2820         return result;
2821 }
2822
2823 static void ni_set_valid_flag(struct ni_mc_reg_table *table)
2824 {
2825         u8 i, j;
2826
2827         for (i = 0; i < table->last; i++) {
2828                 for (j = 1; j < table->num_entries; j++) {
2829                         if (table->mc_reg_table_entry[j-1].mc_data[i] != table->mc_reg_table_entry[j].mc_data[i]) {
2830                                 table->valid_flag |= 1 << i;
2831                                 break;
2832                         }
2833                 }
2834         }
2835 }
2836
2837 static void ni_set_s0_mc_reg_index(struct ni_mc_reg_table *table)
2838 {
2839         u32 i;
2840         u16 address;
2841
2842         for (i = 0; i < table->last; i++)
2843                 table->mc_reg_address[i].s0 =
2844                         ni_check_s0_mc_reg_index(table->mc_reg_address[i].s1, &address) ?
2845                         address : table->mc_reg_address[i].s1;
2846 }
2847
2848 static int ni_copy_vbios_mc_reg_table(struct atom_mc_reg_table *table,
2849                                       struct ni_mc_reg_table *ni_table)
2850 {
2851         u8 i, j;
2852
2853         if (table->last > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2854                 return -EINVAL;
2855         if (table->num_entries > MAX_AC_TIMING_ENTRIES)
2856                 return -EINVAL;
2857
2858         for (i = 0; i < table->last; i++)
2859                 ni_table->mc_reg_address[i].s1 = table->mc_reg_address[i].s1;
2860         ni_table->last = table->last;
2861
2862         for (i = 0; i < table->num_entries; i++) {
2863                 ni_table->mc_reg_table_entry[i].mclk_max =
2864                         table->mc_reg_table_entry[i].mclk_max;
2865                 for (j = 0; j < table->last; j++)
2866                         ni_table->mc_reg_table_entry[i].mc_data[j] =
2867                                 table->mc_reg_table_entry[i].mc_data[j];
2868         }
2869         ni_table->num_entries = table->num_entries;
2870
2871         return 0;
2872 }
2873
2874 static int ni_initialize_mc_reg_table(struct radeon_device *rdev)
2875 {
2876         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2877         int ret;
2878         struct atom_mc_reg_table *table;
2879         struct ni_mc_reg_table *ni_table = &ni_pi->mc_reg_table;
2880         u8 module_index = rv770_get_memory_module_index(rdev);
2881
2882         table = kzalloc(sizeof(struct atom_mc_reg_table), GFP_KERNEL);
2883         if (!table)
2884                 return -ENOMEM;
2885
2886         WREG32(MC_SEQ_RAS_TIMING_LP, RREG32(MC_SEQ_RAS_TIMING));
2887         WREG32(MC_SEQ_CAS_TIMING_LP, RREG32(MC_SEQ_CAS_TIMING));
2888         WREG32(MC_SEQ_MISC_TIMING_LP, RREG32(MC_SEQ_MISC_TIMING));
2889         WREG32(MC_SEQ_MISC_TIMING2_LP, RREG32(MC_SEQ_MISC_TIMING2));
2890         WREG32(MC_SEQ_PMG_CMD_EMRS_LP, RREG32(MC_PMG_CMD_EMRS));
2891         WREG32(MC_SEQ_PMG_CMD_MRS_LP, RREG32(MC_PMG_CMD_MRS));
2892         WREG32(MC_SEQ_PMG_CMD_MRS1_LP, RREG32(MC_PMG_CMD_MRS1));
2893         WREG32(MC_SEQ_WR_CTL_D0_LP, RREG32(MC_SEQ_WR_CTL_D0));
2894         WREG32(MC_SEQ_WR_CTL_D1_LP, RREG32(MC_SEQ_WR_CTL_D1));
2895         WREG32(MC_SEQ_RD_CTL_D0_LP, RREG32(MC_SEQ_RD_CTL_D0));
2896         WREG32(MC_SEQ_RD_CTL_D1_LP, RREG32(MC_SEQ_RD_CTL_D1));
2897         WREG32(MC_SEQ_PMG_TIMING_LP, RREG32(MC_SEQ_PMG_TIMING));
2898         WREG32(MC_SEQ_PMG_CMD_MRS2_LP, RREG32(MC_PMG_CMD_MRS2));
2899
2900         ret = radeon_atom_init_mc_reg_table(rdev, module_index, table);
2901
2902         if (ret)
2903                 goto init_mc_done;
2904
2905         ret = ni_copy_vbios_mc_reg_table(table, ni_table);
2906
2907         if (ret)
2908                 goto init_mc_done;
2909
2910         ni_set_s0_mc_reg_index(ni_table);
2911
2912         ret = ni_set_mc_special_registers(rdev, ni_table);
2913
2914         if (ret)
2915                 goto init_mc_done;
2916
2917         ni_set_valid_flag(ni_table);
2918
2919 init_mc_done:
2920         kfree(table);
2921
2922         return ret;
2923 }
2924
2925 static void ni_populate_mc_reg_addresses(struct radeon_device *rdev,
2926                                          SMC_NIslands_MCRegisters *mc_reg_table)
2927 {
2928         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2929         u32 i, j;
2930
2931         for (i = 0, j = 0; j < ni_pi->mc_reg_table.last; j++) {
2932                 if (ni_pi->mc_reg_table.valid_flag & (1 << j)) {
2933                         if (i >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2934                                 break;
2935                         mc_reg_table->address[i].s0 =
2936                                 cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s0);
2937                         mc_reg_table->address[i].s1 =
2938                                 cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s1);
2939                         i++;
2940                 }
2941         }
2942         mc_reg_table->last = (u8)i;
2943 }
2944
2945
2946 static void ni_convert_mc_registers(struct ni_mc_reg_entry *entry,
2947                                     SMC_NIslands_MCRegisterSet *data,
2948                                     u32 num_entries, u32 valid_flag)
2949 {
2950         u32 i, j;
2951
2952         for (i = 0, j = 0; j < num_entries; j++) {
2953                 if (valid_flag & (1 << j)) {
2954                         data->value[i] = cpu_to_be32(entry->mc_data[j]);
2955                         i++;
2956                 }
2957         }
2958 }
2959
2960 static void ni_convert_mc_reg_table_entry_to_smc(struct radeon_device *rdev,
2961                                                  struct rv7xx_pl *pl,
2962                                                  SMC_NIslands_MCRegisterSet *mc_reg_table_data)
2963 {
2964         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2965         u32 i = 0;
2966
2967         for (i = 0; i < ni_pi->mc_reg_table.num_entries; i++) {
2968                 if (pl->mclk <= ni_pi->mc_reg_table.mc_reg_table_entry[i].mclk_max)
2969                         break;
2970         }
2971
2972         if ((i == ni_pi->mc_reg_table.num_entries) && (i > 0))
2973                 --i;
2974
2975         ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[i],
2976                                 mc_reg_table_data,
2977                                 ni_pi->mc_reg_table.last,
2978                                 ni_pi->mc_reg_table.valid_flag);
2979 }
2980
2981 static void ni_convert_mc_reg_table_to_smc(struct radeon_device *rdev,
2982                                            struct radeon_ps *radeon_state,
2983                                            SMC_NIslands_MCRegisters *mc_reg_table)
2984 {
2985         struct ni_ps *state = ni_get_ps(radeon_state);
2986         int i;
2987
2988         for (i = 0; i < state->performance_level_count; i++) {
2989                 ni_convert_mc_reg_table_entry_to_smc(rdev,
2990                                                      &state->performance_levels[i],
2991                                                      &mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i]);
2992         }
2993 }
2994
2995 static int ni_populate_mc_reg_table(struct radeon_device *rdev,
2996                                     struct radeon_ps *radeon_boot_state)
2997 {
2998         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2999         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3000         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3001         struct ni_ps *boot_state = ni_get_ps(radeon_boot_state);
3002         SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
3003
3004         memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
3005
3006         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_seq_index, 1);
3007
3008         ni_populate_mc_reg_addresses(rdev, mc_reg_table);
3009
3010         ni_convert_mc_reg_table_entry_to_smc(rdev, &boot_state->performance_levels[0],
3011                                              &mc_reg_table->data[0]);
3012
3013         ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[0],
3014                                 &mc_reg_table->data[1],
3015                                 ni_pi->mc_reg_table.last,
3016                                 ni_pi->mc_reg_table.valid_flag);
3017
3018         ni_convert_mc_reg_table_to_smc(rdev, radeon_boot_state, mc_reg_table);
3019
3020         return rv770_copy_bytes_to_smc(rdev, eg_pi->mc_reg_table_start,
3021                                        (u8 *)mc_reg_table,
3022                                        sizeof(SMC_NIslands_MCRegisters),
3023                                        pi->sram_end);
3024 }
3025
3026 static int ni_upload_mc_reg_table(struct radeon_device *rdev,
3027                                   struct radeon_ps *radeon_new_state)
3028 {
3029         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3030         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3031         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3032         struct ni_ps *ni_new_state = ni_get_ps(radeon_new_state);
3033         SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
3034         u16 address;
3035
3036         memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
3037
3038         ni_convert_mc_reg_table_to_smc(rdev, radeon_new_state, mc_reg_table);
3039
3040         address = eg_pi->mc_reg_table_start +
3041                 (u16)offsetof(SMC_NIslands_MCRegisters, data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT]);
3042
3043         return rv770_copy_bytes_to_smc(rdev, address,
3044                                        (u8 *)&mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT],
3045                                        sizeof(SMC_NIslands_MCRegisterSet) * ni_new_state->performance_level_count,
3046                                        pi->sram_end);
3047 }
3048
3049 static int ni_init_driver_calculated_leakage_table(struct radeon_device *rdev,
3050                                                    PP_NIslands_CACTABLES *cac_tables)
3051 {
3052         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3053         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3054         u32 leakage = 0;
3055         unsigned int i, j, table_size;
3056         s32 t;
3057         u32 smc_leakage, max_leakage = 0;
3058         u32 scaling_factor;
3059
3060         table_size = eg_pi->vddc_voltage_table.count;
3061
3062         if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
3063                 table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
3064
3065         scaling_factor = ni_get_smc_power_scaling_factor(rdev);
3066
3067         for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++) {
3068                 for (j = 0; j < table_size; j++) {
3069                         t = (1000 * ((i + 1) * 8));
3070
3071                         if (t < ni_pi->cac_data.leakage_minimum_temperature)
3072                                 t = ni_pi->cac_data.leakage_minimum_temperature;
3073
3074                         ni_calculate_leakage_for_v_and_t(rdev,
3075                                                          &ni_pi->cac_data.leakage_coefficients,
3076                                                          eg_pi->vddc_voltage_table.entries[j].value,
3077                                                          t,
3078                                                          ni_pi->cac_data.i_leakage,
3079                                                          &leakage);
3080
3081                         smc_leakage = ni_scale_power_for_smc(leakage, scaling_factor) / 1000;
3082                         if (smc_leakage > max_leakage)
3083                                 max_leakage = smc_leakage;
3084
3085                         cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(smc_leakage);
3086                 }
3087         }
3088
3089         for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
3090                 for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3091                         cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(max_leakage);
3092         }
3093         return 0;
3094 }
3095
3096 static int ni_init_simplified_leakage_table(struct radeon_device *rdev,
3097                                             PP_NIslands_CACTABLES *cac_tables)
3098 {
3099         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3100         struct radeon_cac_leakage_table *leakage_table =
3101                 &rdev->pm.dpm.dyn_state.cac_leakage_table;
3102         u32 i, j, table_size;
3103         u32 smc_leakage, max_leakage = 0;
3104         u32 scaling_factor;
3105
3106         if (!leakage_table)
3107                 return -EINVAL;
3108
3109         table_size = leakage_table->count;
3110
3111         if (eg_pi->vddc_voltage_table.count != table_size)
3112                 table_size = (eg_pi->vddc_voltage_table.count < leakage_table->count) ?
3113                         eg_pi->vddc_voltage_table.count : leakage_table->count;
3114
3115         if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
3116                 table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
3117
3118         if (table_size == 0)
3119                 return -EINVAL;
3120
3121         scaling_factor = ni_get_smc_power_scaling_factor(rdev);
3122
3123         for (j = 0; j < table_size; j++) {
3124                 smc_leakage = leakage_table->entries[j].leakage;
3125
3126                 if (smc_leakage > max_leakage)
3127                         max_leakage = smc_leakage;
3128
3129                 for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3130                         cac_tables->cac_lkge_lut[i][j] =
3131                                 cpu_to_be32(ni_scale_power_for_smc(smc_leakage, scaling_factor));
3132         }
3133
3134         for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
3135                 for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3136                         cac_tables->cac_lkge_lut[i][j] =
3137                                 cpu_to_be32(ni_scale_power_for_smc(max_leakage, scaling_factor));
3138         }
3139         return 0;
3140 }
3141
3142 static int ni_initialize_smc_cac_tables(struct radeon_device *rdev)
3143 {
3144         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3145         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3146         PP_NIslands_CACTABLES *cac_tables = NULL;
3147         int i, ret;
3148         u32 reg;
3149
3150         if (ni_pi->enable_cac == false)
3151                 return 0;
3152
3153         cac_tables = kzalloc(sizeof(PP_NIslands_CACTABLES), GFP_KERNEL);
3154         if (!cac_tables)
3155                 return -ENOMEM;
3156
3157         reg = RREG32(CG_CAC_CTRL) & ~(TID_CNT_MASK | TID_UNIT_MASK);
3158         reg |= (TID_CNT(ni_pi->cac_weights->tid_cnt) |
3159                 TID_UNIT(ni_pi->cac_weights->tid_unit));
3160         WREG32(CG_CAC_CTRL, reg);
3161
3162         for (i = 0; i < NISLANDS_DCCAC_MAX_LEVELS; i++)
3163                 ni_pi->dc_cac_table[i] = ni_pi->cac_weights->dc_cac[i];
3164
3165         for (i = 0; i < SMC_NISLANDS_BIF_LUT_NUM_OF_ENTRIES; i++)
3166                 cac_tables->cac_bif_lut[i] = ni_pi->cac_weights->pcie_cac[i];
3167
3168         ni_pi->cac_data.i_leakage = rdev->pm.dpm.cac_leakage;
3169         ni_pi->cac_data.pwr_const = 0;
3170         ni_pi->cac_data.dc_cac_value = ni_pi->dc_cac_table[NISLANDS_DCCAC_LEVEL_0];
3171         ni_pi->cac_data.bif_cac_value = 0;
3172         ni_pi->cac_data.mc_wr_weight = ni_pi->cac_weights->mc_write_weight;
3173         ni_pi->cac_data.mc_rd_weight = ni_pi->cac_weights->mc_read_weight;
3174         ni_pi->cac_data.allow_ovrflw = 0;
3175         ni_pi->cac_data.l2num_win_tdp = ni_pi->lta_window_size;
3176         ni_pi->cac_data.num_win_tdp = 0;
3177         ni_pi->cac_data.lts_truncate_n = ni_pi->lts_truncate;
3178
3179         if (ni_pi->driver_calculate_cac_leakage)
3180                 ret = ni_init_driver_calculated_leakage_table(rdev, cac_tables);
3181         else
3182                 ret = ni_init_simplified_leakage_table(rdev, cac_tables);
3183
3184         if (ret)
3185                 goto done_free;
3186
3187         cac_tables->pwr_const      = cpu_to_be32(ni_pi->cac_data.pwr_const);
3188         cac_tables->dc_cacValue    = cpu_to_be32(ni_pi->cac_data.dc_cac_value);
3189         cac_tables->bif_cacValue   = cpu_to_be32(ni_pi->cac_data.bif_cac_value);
3190         cac_tables->AllowOvrflw    = ni_pi->cac_data.allow_ovrflw;
3191         cac_tables->MCWrWeight     = ni_pi->cac_data.mc_wr_weight;
3192         cac_tables->MCRdWeight     = ni_pi->cac_data.mc_rd_weight;
3193         cac_tables->numWin_TDP     = ni_pi->cac_data.num_win_tdp;
3194         cac_tables->l2numWin_TDP   = ni_pi->cac_data.l2num_win_tdp;
3195         cac_tables->lts_truncate_n = ni_pi->cac_data.lts_truncate_n;
3196
3197         ret = rv770_copy_bytes_to_smc(rdev, ni_pi->cac_table_start, (u8 *)cac_tables,
3198                                       sizeof(PP_NIslands_CACTABLES), pi->sram_end);
3199
3200 done_free:
3201         if (ret) {
3202                 ni_pi->enable_cac = false;
3203                 ni_pi->enable_power_containment = false;
3204         }
3205
3206         kfree(cac_tables);
3207
3208         return 0;
3209 }
3210
3211 static int ni_initialize_hardware_cac_manager(struct radeon_device *rdev)
3212 {
3213         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3214         u32 reg;
3215
3216         if (!ni_pi->enable_cac ||
3217             !ni_pi->cac_configuration_required)
3218                 return 0;
3219
3220         if (ni_pi->cac_weights == NULL)
3221                 return -EINVAL;
3222
3223         reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_0) & ~(WEIGHT_TCP_SIG0_MASK |
3224                                                       WEIGHT_TCP_SIG1_MASK |
3225                                                       WEIGHT_TA_SIG_MASK);
3226         reg |= (WEIGHT_TCP_SIG0(ni_pi->cac_weights->weight_tcp_sig0) |
3227                 WEIGHT_TCP_SIG1(ni_pi->cac_weights->weight_tcp_sig1) |
3228                 WEIGHT_TA_SIG(ni_pi->cac_weights->weight_ta_sig));
3229         WREG32_CG(CG_CAC_REGION_1_WEIGHT_0, reg);
3230
3231         reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_1) & ~(WEIGHT_TCC_EN0_MASK |
3232                                                       WEIGHT_TCC_EN1_MASK |
3233                                                       WEIGHT_TCC_EN2_MASK);
3234         reg |= (WEIGHT_TCC_EN0(ni_pi->cac_weights->weight_tcc_en0) |
3235                 WEIGHT_TCC_EN1(ni_pi->cac_weights->weight_tcc_en1) |
3236                 WEIGHT_TCC_EN2(ni_pi->cac_weights->weight_tcc_en2));
3237         WREG32_CG(CG_CAC_REGION_1_WEIGHT_1, reg);
3238
3239         reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_0) & ~(WEIGHT_CB_EN0_MASK |
3240                                                       WEIGHT_CB_EN1_MASK |
3241                                                       WEIGHT_CB_EN2_MASK |
3242                                                       WEIGHT_CB_EN3_MASK);
3243         reg |= (WEIGHT_CB_EN0(ni_pi->cac_weights->weight_cb_en0) |
3244                 WEIGHT_CB_EN1(ni_pi->cac_weights->weight_cb_en1) |
3245                 WEIGHT_CB_EN2(ni_pi->cac_weights->weight_cb_en2) |
3246                 WEIGHT_CB_EN3(ni_pi->cac_weights->weight_cb_en3));
3247         WREG32_CG(CG_CAC_REGION_2_WEIGHT_0, reg);
3248
3249         reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_1) & ~(WEIGHT_DB_SIG0_MASK |
3250                                                       WEIGHT_DB_SIG1_MASK |
3251                                                       WEIGHT_DB_SIG2_MASK |
3252                                                       WEIGHT_DB_SIG3_MASK);
3253         reg |= (WEIGHT_DB_SIG0(ni_pi->cac_weights->weight_db_sig0) |
3254                 WEIGHT_DB_SIG1(ni_pi->cac_weights->weight_db_sig1) |
3255                 WEIGHT_DB_SIG2(ni_pi->cac_weights->weight_db_sig2) |
3256                 WEIGHT_DB_SIG3(ni_pi->cac_weights->weight_db_sig3));
3257         WREG32_CG(CG_CAC_REGION_2_WEIGHT_1, reg);
3258
3259         reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_2) & ~(WEIGHT_SXM_SIG0_MASK |
3260                                                       WEIGHT_SXM_SIG1_MASK |
3261                                                       WEIGHT_SXM_SIG2_MASK |
3262                                                       WEIGHT_SXS_SIG0_MASK |
3263                                                       WEIGHT_SXS_SIG1_MASK);
3264         reg |= (WEIGHT_SXM_SIG0(ni_pi->cac_weights->weight_sxm_sig0) |
3265                 WEIGHT_SXM_SIG1(ni_pi->cac_weights->weight_sxm_sig1) |
3266                 WEIGHT_SXM_SIG2(ni_pi->cac_weights->weight_sxm_sig2) |
3267                 WEIGHT_SXS_SIG0(ni_pi->cac_weights->weight_sxs_sig0) |
3268                 WEIGHT_SXS_SIG1(ni_pi->cac_weights->weight_sxs_sig1));
3269         WREG32_CG(CG_CAC_REGION_2_WEIGHT_2, reg);
3270
3271         reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_0) & ~(WEIGHT_XBR_0_MASK |
3272                                                       WEIGHT_XBR_1_MASK |
3273                                                       WEIGHT_XBR_2_MASK |
3274                                                       WEIGHT_SPI_SIG0_MASK);
3275         reg |= (WEIGHT_XBR_0(ni_pi->cac_weights->weight_xbr_0) |
3276                 WEIGHT_XBR_1(ni_pi->cac_weights->weight_xbr_1) |
3277                 WEIGHT_XBR_2(ni_pi->cac_weights->weight_xbr_2) |
3278                 WEIGHT_SPI_SIG0(ni_pi->cac_weights->weight_spi_sig0));
3279         WREG32_CG(CG_CAC_REGION_3_WEIGHT_0, reg);
3280
3281         reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_1) & ~(WEIGHT_SPI_SIG1_MASK |
3282                                                       WEIGHT_SPI_SIG2_MASK |
3283                                                       WEIGHT_SPI_SIG3_MASK |
3284                                                       WEIGHT_SPI_SIG4_MASK |
3285                                                       WEIGHT_SPI_SIG5_MASK);
3286         reg |= (WEIGHT_SPI_SIG1(ni_pi->cac_weights->weight_spi_sig1) |
3287                 WEIGHT_SPI_SIG2(ni_pi->cac_weights->weight_spi_sig2) |
3288                 WEIGHT_SPI_SIG3(ni_pi->cac_weights->weight_spi_sig3) |
3289                 WEIGHT_SPI_SIG4(ni_pi->cac_weights->weight_spi_sig4) |
3290                 WEIGHT_SPI_SIG5(ni_pi->cac_weights->weight_spi_sig5));
3291         WREG32_CG(CG_CAC_REGION_3_WEIGHT_1, reg);
3292
3293         reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_0) & ~(WEIGHT_LDS_SIG0_MASK |
3294                                                       WEIGHT_LDS_SIG1_MASK |
3295                                                       WEIGHT_SC_MASK);
3296         reg |= (WEIGHT_LDS_SIG0(ni_pi->cac_weights->weight_lds_sig0) |
3297                 WEIGHT_LDS_SIG1(ni_pi->cac_weights->weight_lds_sig1) |
3298                 WEIGHT_SC(ni_pi->cac_weights->weight_sc));
3299         WREG32_CG(CG_CAC_REGION_4_WEIGHT_0, reg);
3300
3301         reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_1) & ~(WEIGHT_BIF_MASK |
3302                                                       WEIGHT_CP_MASK |
3303                                                       WEIGHT_PA_SIG0_MASK |
3304                                                       WEIGHT_PA_SIG1_MASK |
3305                                                       WEIGHT_VGT_SIG0_MASK);
3306         reg |= (WEIGHT_BIF(ni_pi->cac_weights->weight_bif) |
3307                 WEIGHT_CP(ni_pi->cac_weights->weight_cp) |
3308                 WEIGHT_PA_SIG0(ni_pi->cac_weights->weight_pa_sig0) |
3309                 WEIGHT_PA_SIG1(ni_pi->cac_weights->weight_pa_sig1) |
3310                 WEIGHT_VGT_SIG0(ni_pi->cac_weights->weight_vgt_sig0));
3311         WREG32_CG(CG_CAC_REGION_4_WEIGHT_1, reg);
3312
3313         reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_2) & ~(WEIGHT_VGT_SIG1_MASK |
3314                                                       WEIGHT_VGT_SIG2_MASK |
3315                                                       WEIGHT_DC_SIG0_MASK |
3316                                                       WEIGHT_DC_SIG1_MASK |
3317                                                       WEIGHT_DC_SIG2_MASK);
3318         reg |= (WEIGHT_VGT_SIG1(ni_pi->cac_weights->weight_vgt_sig1) |
3319                 WEIGHT_VGT_SIG2(ni_pi->cac_weights->weight_vgt_sig2) |
3320                 WEIGHT_DC_SIG0(ni_pi->cac_weights->weight_dc_sig0) |
3321                 WEIGHT_DC_SIG1(ni_pi->cac_weights->weight_dc_sig1) |
3322                 WEIGHT_DC_SIG2(ni_pi->cac_weights->weight_dc_sig2));
3323         WREG32_CG(CG_CAC_REGION_4_WEIGHT_2, reg);
3324
3325         reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_3) & ~(WEIGHT_DC_SIG3_MASK |
3326                                                       WEIGHT_UVD_SIG0_MASK |
3327                                                       WEIGHT_UVD_SIG1_MASK |
3328                                                       WEIGHT_SPARE0_MASK |
3329                                                       WEIGHT_SPARE1_MASK);
3330         reg |= (WEIGHT_DC_SIG3(ni_pi->cac_weights->weight_dc_sig3) |
3331                 WEIGHT_UVD_SIG0(ni_pi->cac_weights->weight_uvd_sig0) |
3332                 WEIGHT_UVD_SIG1(ni_pi->cac_weights->weight_uvd_sig1) |
3333                 WEIGHT_SPARE0(ni_pi->cac_weights->weight_spare0) |
3334                 WEIGHT_SPARE1(ni_pi->cac_weights->weight_spare1));
3335         WREG32_CG(CG_CAC_REGION_4_WEIGHT_3, reg);
3336
3337         reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_0) & ~(WEIGHT_SQ_VSP_MASK |
3338                                                       WEIGHT_SQ_VSP0_MASK);
3339         reg |= (WEIGHT_SQ_VSP(ni_pi->cac_weights->weight_sq_vsp) |
3340                 WEIGHT_SQ_VSP0(ni_pi->cac_weights->weight_sq_vsp0));
3341         WREG32_CG(CG_CAC_REGION_5_WEIGHT_0, reg);
3342
3343         reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_1) & ~(WEIGHT_SQ_GPR_MASK);
3344         reg |= WEIGHT_SQ_GPR(ni_pi->cac_weights->weight_sq_gpr);
3345         WREG32_CG(CG_CAC_REGION_5_WEIGHT_1, reg);
3346
3347         reg = RREG32_CG(CG_CAC_REGION_4_OVERRIDE_4) & ~(OVR_MODE_SPARE_0_MASK |
3348                                                         OVR_VAL_SPARE_0_MASK |
3349                                                         OVR_MODE_SPARE_1_MASK |
3350                                                         OVR_VAL_SPARE_1_MASK);
3351         reg |= (OVR_MODE_SPARE_0(ni_pi->cac_weights->ovr_mode_spare_0) |
3352                 OVR_VAL_SPARE_0(ni_pi->cac_weights->ovr_val_spare_0) |
3353                 OVR_MODE_SPARE_1(ni_pi->cac_weights->ovr_mode_spare_1) |
3354                 OVR_VAL_SPARE_1(ni_pi->cac_weights->ovr_val_spare_1));
3355         WREG32_CG(CG_CAC_REGION_4_OVERRIDE_4, reg);
3356
3357         reg = RREG32(SQ_CAC_THRESHOLD) & ~(VSP_MASK |
3358                                            VSP0_MASK |
3359                                            GPR_MASK);
3360         reg |= (VSP(ni_pi->cac_weights->vsp) |
3361                 VSP0(ni_pi->cac_weights->vsp0) |
3362                 GPR(ni_pi->cac_weights->gpr));
3363         WREG32(SQ_CAC_THRESHOLD, reg);
3364
3365         reg = (MCDW_WR_ENABLE |
3366                MCDX_WR_ENABLE |
3367                MCDY_WR_ENABLE |
3368                MCDZ_WR_ENABLE |
3369                INDEX(0x09D4));
3370         WREG32(MC_CG_CONFIG, reg);
3371
3372         reg = (READ_WEIGHT(ni_pi->cac_weights->mc_read_weight) |
3373                WRITE_WEIGHT(ni_pi->cac_weights->mc_write_weight) |
3374                ALLOW_OVERFLOW);
3375         WREG32(MC_CG_DATAPORT, reg);
3376
3377         return 0;
3378 }
3379
3380 static int ni_enable_smc_cac(struct radeon_device *rdev,
3381                              struct radeon_ps *radeon_new_state,
3382                              bool enable)
3383 {
3384         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3385         int ret = 0;
3386         PPSMC_Result smc_result;
3387
3388         if (ni_pi->enable_cac) {
3389                 if (enable) {
3390                         if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
3391                                 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_CollectCAC_PowerCorreln);
3392
3393                                 if (ni_pi->support_cac_long_term_average) {
3394                                         smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgEnable);
3395                                         if (PPSMC_Result_OK != smc_result)
3396                                                 ni_pi->support_cac_long_term_average = false;
3397                                 }
3398
3399                                 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableCac);
3400                                 if (PPSMC_Result_OK != smc_result)
3401                                         ret = -EINVAL;
3402
3403                                 ni_pi->cac_enabled = (PPSMC_Result_OK == smc_result) ? true : false;
3404                         }
3405                 } else if (ni_pi->cac_enabled) {
3406                         smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_DisableCac);
3407
3408                         ni_pi->cac_enabled = false;
3409
3410                         if (ni_pi->support_cac_long_term_average) {
3411                                 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgDisable);
3412                                 if (PPSMC_Result_OK != smc_result)
3413                                         ni_pi->support_cac_long_term_average = false;
3414                         }
3415                 }
3416         }
3417
3418         return ret;
3419 }
3420
3421 static int ni_pcie_performance_request(struct radeon_device *rdev,
3422                                        u8 perf_req, bool advertise)
3423 {
3424         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3425
3426 #if defined(CONFIG_ACPI)
3427         if ((perf_req == PCIE_PERF_REQ_PECI_GEN1) ||
3428             (perf_req == PCIE_PERF_REQ_PECI_GEN2)) {
3429                 if (eg_pi->pcie_performance_request_registered == false)
3430                         radeon_acpi_pcie_notify_device_ready(rdev);
3431                 eg_pi->pcie_performance_request_registered = true;
3432                 return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
3433         } else if ((perf_req == PCIE_PERF_REQ_REMOVE_REGISTRY) &&
3434                    eg_pi->pcie_performance_request_registered) {
3435                 eg_pi->pcie_performance_request_registered = false;
3436                 return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
3437         }
3438 #endif
3439         return 0;
3440 }
3441
3442 static int ni_advertise_gen2_capability(struct radeon_device *rdev)
3443 {
3444         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3445         u32 tmp;
3446
3447         tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
3448
3449         if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
3450             (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
3451                 pi->pcie_gen2 = true;
3452         else
3453                 pi->pcie_gen2 = false;
3454
3455         if (!pi->pcie_gen2)
3456                 ni_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN2, true);
3457
3458         return 0;
3459 }
3460
3461 static void ni_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev,
3462                                             bool enable)
3463 {
3464         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3465         u32 tmp, bif;
3466
3467         tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
3468
3469         if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
3470             (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
3471                 if (enable) {
3472                         if (!pi->boot_in_gen2) {
3473                                 bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
3474                                 bif |= CG_CLIENT_REQ(0xd);
3475                                 WREG32(CG_BIF_REQ_AND_RSP, bif);
3476                         }
3477                         tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
3478                         tmp |= LC_HW_VOLTAGE_IF_CONTROL(1);
3479                         tmp |= LC_GEN2_EN_STRAP;
3480
3481                         tmp |= LC_CLR_FAILED_SPD_CHANGE_CNT;
3482                         WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3483                         udelay(10);
3484                         tmp &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
3485                         WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3486                 } else {
3487                         if (!pi->boot_in_gen2) {
3488                                 bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
3489                                 bif |= CG_CLIENT_REQ(0xd);
3490                                 WREG32(CG_BIF_REQ_AND_RSP, bif);
3491
3492                                 tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
3493                                 tmp &= ~LC_GEN2_EN_STRAP;
3494                         }
3495                         WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3496                 }
3497         }
3498 }
3499
3500 static void ni_enable_dynamic_pcie_gen2(struct radeon_device *rdev,
3501                                         bool enable)
3502 {
3503         ni_enable_bif_dynamic_pcie_gen2(rdev, enable);
3504
3505         if (enable)
3506                 WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE);
3507         else
3508                 WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE);
3509 }
3510
3511 void ni_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev,
3512                                            struct radeon_ps *new_ps,
3513                                            struct radeon_ps *old_ps)
3514 {
3515         struct ni_ps *new_state = ni_get_ps(new_ps);
3516         struct ni_ps *current_state = ni_get_ps(old_ps);
3517
3518         if ((new_ps->vclk == old_ps->vclk) &&
3519             (new_ps->dclk == old_ps->dclk))
3520                 return;
3521
3522         if (new_state->performance_levels[new_state->performance_level_count - 1].sclk >=
3523             current_state->performance_levels[current_state->performance_level_count - 1].sclk)
3524                 return;
3525
3526         radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
3527 }
3528
3529 void ni_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev,
3530                                           struct radeon_ps *new_ps,
3531                                           struct radeon_ps *old_ps)
3532 {
3533         struct ni_ps *new_state = ni_get_ps(new_ps);
3534         struct ni_ps *current_state = ni_get_ps(old_ps);
3535
3536         if ((new_ps->vclk == old_ps->vclk) &&
3537             (new_ps->dclk == old_ps->dclk))
3538                 return;
3539
3540         if (new_state->performance_levels[new_state->performance_level_count - 1].sclk <
3541             current_state->performance_levels[current_state->performance_level_count - 1].sclk)
3542                 return;
3543
3544         radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
3545 }
3546
3547 void ni_dpm_setup_asic(struct radeon_device *rdev)
3548 {
3549         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3550
3551         ni_read_clock_registers(rdev);
3552         btc_read_arb_registers(rdev);
3553         rv770_get_memory_type(rdev);
3554         if (eg_pi->pcie_performance_request)
3555                 ni_advertise_gen2_capability(rdev);
3556         rv770_get_pcie_gen2_status(rdev);
3557         rv770_enable_acpi_pm(rdev);
3558 }
3559
3560 void ni_update_current_ps(struct radeon_device *rdev,
3561                           struct radeon_ps *rps)
3562 {
3563         struct ni_ps *new_ps = ni_get_ps(rps);
3564         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3565         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3566
3567         eg_pi->current_rps = *rps;
3568         ni_pi->current_ps = *new_ps;
3569         eg_pi->current_rps.ps_priv = &ni_pi->current_ps;
3570 }
3571
3572 void ni_update_requested_ps(struct radeon_device *rdev,
3573                             struct radeon_ps *rps)
3574 {
3575         struct ni_ps *new_ps = ni_get_ps(rps);
3576         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3577         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3578
3579         eg_pi->requested_rps = *rps;
3580         ni_pi->requested_ps = *new_ps;
3581         eg_pi->requested_rps.ps_priv = &ni_pi->requested_ps;
3582 }
3583
3584 int ni_dpm_enable(struct radeon_device *rdev)
3585 {
3586         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3587         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3588         struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
3589         int ret;
3590
3591         if (pi->gfx_clock_gating)
3592                 ni_cg_clockgating_default(rdev);
3593         if (btc_dpm_enabled(rdev))
3594                 return -EINVAL;
3595         if (pi->mg_clock_gating)
3596                 ni_mg_clockgating_default(rdev);
3597         if (eg_pi->ls_clock_gating)
3598                 ni_ls_clockgating_default(rdev);
3599         if (pi->voltage_control) {
3600                 rv770_enable_voltage_control(rdev, true);
3601                 ret = cypress_construct_voltage_tables(rdev);
3602                 if (ret) {
3603                         DRM_ERROR("cypress_construct_voltage_tables failed\n");
3604                         return ret;
3605                 }
3606         }
3607         if (eg_pi->dynamic_ac_timing) {
3608                 ret = ni_initialize_mc_reg_table(rdev);
3609                 if (ret)
3610                         eg_pi->dynamic_ac_timing = false;
3611         }
3612         if (pi->dynamic_ss)
3613                 cypress_enable_spread_spectrum(rdev, true);
3614         if (pi->thermal_protection)
3615                 rv770_enable_thermal_protection(rdev, true);
3616         rv770_setup_bsp(rdev);
3617         rv770_program_git(rdev);
3618         rv770_program_tp(rdev);
3619         rv770_program_tpp(rdev);
3620         rv770_program_sstp(rdev);
3621         cypress_enable_display_gap(rdev);
3622         rv770_program_vc(rdev);
3623         if (pi->dynamic_pcie_gen2)
3624                 ni_enable_dynamic_pcie_gen2(rdev, true);
3625         ret = rv770_upload_firmware(rdev);
3626         if (ret) {
3627                 DRM_ERROR("rv770_upload_firmware failed\n");
3628                 return ret;
3629         }
3630         ret = ni_process_firmware_header(rdev);
3631         if (ret) {
3632                 DRM_ERROR("ni_process_firmware_header failed\n");
3633                 return ret;
3634         }
3635         ret = ni_initial_switch_from_arb_f0_to_f1(rdev);
3636         if (ret) {
3637                 DRM_ERROR("ni_initial_switch_from_arb_f0_to_f1 failed\n");
3638                 return ret;
3639         }
3640         ret = ni_init_smc_table(rdev);
3641         if (ret) {
3642                 DRM_ERROR("ni_init_smc_table failed\n");
3643                 return ret;
3644         }
3645         ret = ni_init_smc_spll_table(rdev);
3646         if (ret) {
3647                 DRM_ERROR("ni_init_smc_spll_table failed\n");
3648                 return ret;
3649         }
3650         ret = ni_init_arb_table_index(rdev);
3651         if (ret) {
3652                 DRM_ERROR("ni_init_arb_table_index failed\n");
3653                 return ret;
3654         }
3655         if (eg_pi->dynamic_ac_timing) {
3656                 ret = ni_populate_mc_reg_table(rdev, boot_ps);
3657                 if (ret) {
3658                         DRM_ERROR("ni_populate_mc_reg_table failed\n");
3659                         return ret;
3660                 }
3661         }
3662         ret = ni_initialize_smc_cac_tables(rdev);
3663         if (ret) {
3664                 DRM_ERROR("ni_initialize_smc_cac_tables failed\n");
3665                 return ret;
3666         }
3667         ret = ni_initialize_hardware_cac_manager(rdev);
3668         if (ret) {
3669                 DRM_ERROR("ni_initialize_hardware_cac_manager failed\n");
3670                 return ret;
3671         }
3672         ret = ni_populate_smc_tdp_limits(rdev, boot_ps);
3673         if (ret) {
3674                 DRM_ERROR("ni_populate_smc_tdp_limits failed\n");
3675                 return ret;
3676         }
3677         ni_program_response_times(rdev);
3678         r7xx_start_smc(rdev);
3679         ret = cypress_notify_smc_display_change(rdev, false);
3680         if (ret) {
3681                 DRM_ERROR("cypress_notify_smc_display_change failed\n");
3682                 return ret;
3683         }
3684         cypress_enable_sclk_control(rdev, true);
3685         if (eg_pi->memory_transition)
3686                 cypress_enable_mclk_control(rdev, true);
3687         cypress_start_dpm(rdev);
3688         if (pi->gfx_clock_gating)
3689                 ni_gfx_clockgating_enable(rdev, true);
3690         if (pi->mg_clock_gating)
3691                 ni_mg_clockgating_enable(rdev, true);
3692         if (eg_pi->ls_clock_gating)
3693                 ni_ls_clockgating_enable(rdev, true);
3694
3695         if (rdev->irq.installed &&
3696             r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
3697                 PPSMC_Result result;
3698
3699                 ret = rv770_set_thermal_temperature_range(rdev, R600_TEMP_RANGE_MIN, 0xff * 1000);
3700                 if (ret)
3701                         return ret;
3702                 rdev->irq.dpm_thermal = true;
3703                 radeon_irq_set(rdev);
3704                 result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableThermalInterrupt);
3705
3706                 if (result != PPSMC_Result_OK)
3707                         DRM_DEBUG_KMS("Could not enable thermal interrupts.\n");
3708         }
3709
3710         rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
3711
3712         ni_update_current_ps(rdev, boot_ps);
3713
3714         return 0;
3715 }
3716
3717 void ni_dpm_disable(struct radeon_device *rdev)
3718 {
3719         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3720         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3721         struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
3722
3723         if (!btc_dpm_enabled(rdev))
3724                 return;
3725         rv770_clear_vc(rdev);
3726         if (pi->thermal_protection)
3727                 rv770_enable_thermal_protection(rdev, false);
3728         ni_enable_power_containment(rdev, boot_ps, false);
3729         ni_enable_smc_cac(rdev, boot_ps, false);
3730         cypress_enable_spread_spectrum(rdev, false);
3731         rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, false);
3732         if (pi->dynamic_pcie_gen2)
3733                 ni_enable_dynamic_pcie_gen2(rdev, false);
3734
3735         if (rdev->irq.installed &&
3736             r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
3737                 rdev->irq.dpm_thermal = false;
3738                 radeon_irq_set(rdev);
3739         }
3740
3741         if (pi->gfx_clock_gating)
3742                 ni_gfx_clockgating_enable(rdev, false);
3743         if (pi->mg_clock_gating)
3744                 ni_mg_clockgating_enable(rdev, false);
3745         if (eg_pi->ls_clock_gating)
3746                 ni_ls_clockgating_enable(rdev, false);
3747         ni_stop_dpm(rdev);
3748         btc_reset_to_default(rdev);
3749         ni_stop_smc(rdev);
3750         ni_force_switch_to_arb_f0(rdev);
3751
3752         ni_update_current_ps(rdev, boot_ps);
3753 }
3754
3755 static int ni_power_control_set_level(struct radeon_device *rdev)
3756 {
3757         struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps;
3758         int ret;
3759
3760         ret = ni_restrict_performance_levels_before_switch(rdev);
3761         if (ret)
3762                 return ret;
3763         ret = rv770_halt_smc(rdev);
3764         if (ret)
3765                 return ret;
3766         ret = ni_populate_smc_tdp_limits(rdev, new_ps);
3767         if (ret)
3768                 return ret;
3769         ret = rv770_resume_smc(rdev);
3770         if (ret)
3771                 return ret;
3772         ret = rv770_set_sw_state(rdev);
3773         if (ret)
3774                 return ret;
3775
3776         return 0;
3777 }
3778
3779 int ni_dpm_pre_set_power_state(struct radeon_device *rdev)
3780 {
3781         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3782         struct radeon_ps requested_ps = *rdev->pm.dpm.requested_ps;
3783         struct radeon_ps *new_ps = &requested_ps;
3784
3785         ni_update_requested_ps(rdev, new_ps);
3786
3787         ni_apply_state_adjust_rules(rdev, &eg_pi->requested_rps);
3788
3789         return 0;
3790 }
3791
3792 int ni_dpm_set_power_state(struct radeon_device *rdev)
3793 {
3794         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3795         struct radeon_ps *new_ps = &eg_pi->requested_rps;
3796         struct radeon_ps *old_ps = &eg_pi->current_rps;
3797         int ret;
3798
3799         ret = ni_restrict_performance_levels_before_switch(rdev);
3800         if (ret) {
3801                 DRM_ERROR("ni_restrict_performance_levels_before_switch failed\n");
3802                 return ret;
3803         }
3804         ni_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps);
3805         ret = ni_enable_power_containment(rdev, new_ps, false);
3806         if (ret) {
3807                 DRM_ERROR("ni_enable_power_containment failed\n");
3808                 return ret;
3809         }
3810         ret = ni_enable_smc_cac(rdev, new_ps, false);
3811         if (ret) {
3812                 DRM_ERROR("ni_enable_smc_cac failed\n");
3813                 return ret;
3814         }
3815         ret = rv770_halt_smc(rdev);
3816         if (ret) {
3817                 DRM_ERROR("rv770_halt_smc failed\n");
3818                 return ret;
3819         }
3820         if (eg_pi->smu_uvd_hs)
3821                 btc_notify_uvd_to_smc(rdev, new_ps);
3822         ret = ni_upload_sw_state(rdev, new_ps);
3823         if (ret) {
3824                 DRM_ERROR("ni_upload_sw_state failed\n");
3825                 return ret;
3826         }
3827         if (eg_pi->dynamic_ac_timing) {
3828                 ret = ni_upload_mc_reg_table(rdev, new_ps);
3829                 if (ret) {
3830                         DRM_ERROR("ni_upload_mc_reg_table failed\n");
3831                         return ret;
3832                 }
3833         }
3834         ret = ni_program_memory_timing_parameters(rdev, new_ps);
3835         if (ret) {
3836                 DRM_ERROR("ni_program_memory_timing_parameters failed\n");
3837                 return ret;
3838         }
3839         ret = rv770_resume_smc(rdev);
3840         if (ret) {
3841                 DRM_ERROR("rv770_resume_smc failed\n");
3842                 return ret;
3843         }
3844         ret = rv770_set_sw_state(rdev);
3845         if (ret) {
3846                 DRM_ERROR("rv770_set_sw_state failed\n");
3847                 return ret;
3848         }
3849         ni_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps);
3850         ret = ni_enable_smc_cac(rdev, new_ps, true);
3851         if (ret) {
3852                 DRM_ERROR("ni_enable_smc_cac failed\n");
3853                 return ret;
3854         }
3855         ret = ni_enable_power_containment(rdev, new_ps, true);
3856         if (ret) {
3857                 DRM_ERROR("ni_enable_power_containment failed\n");
3858                 return ret;
3859         }
3860
3861         /* update tdp */
3862         ret = ni_power_control_set_level(rdev);
3863         if (ret) {
3864                 DRM_ERROR("ni_power_control_set_level failed\n");
3865                 return ret;
3866         }
3867
3868         ret = ni_dpm_force_performance_level(rdev, RADEON_DPM_FORCED_LEVEL_AUTO);
3869         if (ret) {
3870                 DRM_ERROR("ni_dpm_force_performance_level failed\n");
3871                 return ret;
3872         }
3873
3874         return 0;
3875 }
3876
3877 void ni_dpm_post_set_power_state(struct radeon_device *rdev)
3878 {
3879         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3880         struct radeon_ps *new_ps = &eg_pi->requested_rps;
3881
3882         ni_update_current_ps(rdev, new_ps);
3883 }
3884
3885 void ni_dpm_reset_asic(struct radeon_device *rdev)
3886 {
3887         ni_restrict_performance_levels_before_switch(rdev);
3888         rv770_set_boot_state(rdev);
3889 }
3890
3891 union power_info {
3892         struct _ATOM_POWERPLAY_INFO info;
3893         struct _ATOM_POWERPLAY_INFO_V2 info_2;
3894         struct _ATOM_POWERPLAY_INFO_V3 info_3;
3895         struct _ATOM_PPLIB_POWERPLAYTABLE pplib;
3896         struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2;
3897         struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3;
3898 };
3899
3900 union pplib_clock_info {
3901         struct _ATOM_PPLIB_R600_CLOCK_INFO r600;
3902         struct _ATOM_PPLIB_RS780_CLOCK_INFO rs780;
3903         struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO evergreen;
3904         struct _ATOM_PPLIB_SUMO_CLOCK_INFO sumo;
3905 };
3906
3907 union pplib_power_state {
3908         struct _ATOM_PPLIB_STATE v1;
3909         struct _ATOM_PPLIB_STATE_V2 v2;
3910 };
3911
3912 static void ni_parse_pplib_non_clock_info(struct radeon_device *rdev,
3913                                           struct radeon_ps *rps,
3914                                           struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info,
3915                                           u8 table_rev)
3916 {
3917         rps->caps = le32_to_cpu(non_clock_info->ulCapsAndSettings);
3918         rps->class = le16_to_cpu(non_clock_info->usClassification);
3919         rps->class2 = le16_to_cpu(non_clock_info->usClassification2);
3920
3921         if (ATOM_PPLIB_NONCLOCKINFO_VER1 < table_rev) {
3922                 rps->vclk = le32_to_cpu(non_clock_info->ulVCLK);
3923                 rps->dclk = le32_to_cpu(non_clock_info->ulDCLK);
3924         } else if (r600_is_uvd_state(rps->class, rps->class2)) {
3925                 rps->vclk = RV770_DEFAULT_VCLK_FREQ;
3926                 rps->dclk = RV770_DEFAULT_DCLK_FREQ;
3927         } else {
3928                 rps->vclk = 0;
3929                 rps->dclk = 0;
3930         }
3931
3932         if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT)
3933                 rdev->pm.dpm.boot_ps = rps;
3934         if (rps->class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE)
3935                 rdev->pm.dpm.uvd_ps = rps;
3936 }
3937
3938 static void ni_parse_pplib_clock_info(struct radeon_device *rdev,
3939                                       struct radeon_ps *rps, int index,
3940                                       union pplib_clock_info *clock_info)
3941 {
3942         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3943         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3944         struct ni_ps *ps = ni_get_ps(rps);
3945         u16 vddc;
3946         struct rv7xx_pl *pl = &ps->performance_levels[index];
3947
3948         ps->performance_level_count = index + 1;
3949
3950         pl->sclk = le16_to_cpu(clock_info->evergreen.usEngineClockLow);
3951         pl->sclk |= clock_info->evergreen.ucEngineClockHigh << 16;
3952         pl->mclk = le16_to_cpu(clock_info->evergreen.usMemoryClockLow);
3953         pl->mclk |= clock_info->evergreen.ucMemoryClockHigh << 16;
3954
3955         pl->vddc = le16_to_cpu(clock_info->evergreen.usVDDC);
3956         pl->vddci = le16_to_cpu(clock_info->evergreen.usVDDCI);
3957         pl->flags = le32_to_cpu(clock_info->evergreen.ulFlags);
3958
3959         /* patch up vddc if necessary */
3960         if (pl->vddc == 0xff01) {
3961                 if (radeon_atom_get_max_vddc(rdev, 0, 0, &vddc) == 0)
3962                         pl->vddc = vddc;
3963         }
3964
3965         if (rps->class & ATOM_PPLIB_CLASSIFICATION_ACPI) {
3966                 pi->acpi_vddc = pl->vddc;
3967                 eg_pi->acpi_vddci = pl->vddci;
3968                 if (ps->performance_levels[0].flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
3969                         pi->acpi_pcie_gen2 = true;
3970                 else
3971                         pi->acpi_pcie_gen2 = false;
3972         }
3973
3974         if (rps->class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) {
3975                 eg_pi->ulv.supported = true;
3976                 eg_pi->ulv.pl = pl;
3977         }
3978
3979         if (pi->min_vddc_in_table > pl->vddc)
3980                 pi->min_vddc_in_table = pl->vddc;
3981
3982         if (pi->max_vddc_in_table < pl->vddc)
3983                 pi->max_vddc_in_table = pl->vddc;
3984
3985         /* patch up boot state */
3986         if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) {
3987                 u16 vddc, vddci, mvdd;
3988                 radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd);
3989                 pl->mclk = rdev->clock.default_mclk;
3990                 pl->sclk = rdev->clock.default_sclk;
3991                 pl->vddc = vddc;
3992                 pl->vddci = vddci;
3993         }
3994
3995         if ((rps->class & ATOM_PPLIB_CLASSIFICATION_UI_MASK) ==
3996             ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE) {
3997                 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = pl->sclk;
3998                 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = pl->mclk;
3999                 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = pl->vddc;
4000                 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = pl->vddci;
4001         }
4002 }
4003
4004 static int ni_parse_power_table(struct radeon_device *rdev)
4005 {
4006         struct radeon_mode_info *mode_info = &rdev->mode_info;
4007         struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info;
4008         union pplib_power_state *power_state;
4009         int i, j;
4010         union pplib_clock_info *clock_info;
4011         union power_info *power_info;
4012         int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
4013         u16 data_offset;
4014         u8 frev, crev;
4015         struct ni_ps *ps;
4016
4017         if (!atom_parse_data_header(mode_info->atom_context, index, NULL,
4018                                    &frev, &crev, &data_offset))
4019                 return -EINVAL;
4020         power_info = (union power_info *)(mode_info->atom_context->bios + data_offset);
4021
4022         rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) *
4023                                   power_info->pplib.ucNumStates, GFP_KERNEL);
4024         if (!rdev->pm.dpm.ps)
4025                 return -ENOMEM;
4026         rdev->pm.dpm.platform_caps = le32_to_cpu(power_info->pplib.ulPlatformCaps);
4027         rdev->pm.dpm.backbias_response_time = le16_to_cpu(power_info->pplib.usBackbiasTime);
4028         rdev->pm.dpm.voltage_response_time = le16_to_cpu(power_info->pplib.usVoltageTime);
4029
4030         for (i = 0; i < power_info->pplib.ucNumStates; i++) {
4031                 power_state = (union pplib_power_state *)
4032                         (mode_info->atom_context->bios + data_offset +
4033                          le16_to_cpu(power_info->pplib.usStateArrayOffset) +
4034                          i * power_info->pplib.ucStateEntrySize);
4035                 non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *)
4036                         (mode_info->atom_context->bios + data_offset +
4037                          le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset) +
4038                          (power_state->v1.ucNonClockStateIndex *
4039                           power_info->pplib.ucNonClockSize));
4040                 if (power_info->pplib.ucStateEntrySize - 1) {
4041                         u8 *idx;
4042                         ps = kzalloc(sizeof(struct ni_ps), GFP_KERNEL);
4043                         if (ps == NULL) {
4044                                 kfree(rdev->pm.dpm.ps);
4045                                 return -ENOMEM;
4046                         }
4047                         rdev->pm.dpm.ps[i].ps_priv = ps;
4048                         ni_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i],
4049                                                          non_clock_info,
4050                                                          power_info->pplib.ucNonClockSize);
4051                         idx = (u8 *)&power_state->v1.ucClockStateIndices[0];
4052                         for (j = 0; j < (power_info->pplib.ucStateEntrySize - 1); j++) {
4053                                 clock_info = (union pplib_clock_info *)
4054                                         (mode_info->atom_context->bios + data_offset +
4055                                          le16_to_cpu(power_info->pplib.usClockInfoArrayOffset) +
4056                                          (idx[j] * power_info->pplib.ucClockInfoSize));
4057                                 ni_parse_pplib_clock_info(rdev,
4058                                                           &rdev->pm.dpm.ps[i], j,
4059                                                           clock_info);
4060                         }
4061                 }
4062         }
4063         rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates;
4064         return 0;
4065 }
4066
4067 int ni_dpm_init(struct radeon_device *rdev)
4068 {
4069         struct rv7xx_power_info *pi;
4070         struct evergreen_power_info *eg_pi;
4071         struct ni_power_info *ni_pi;
4072         struct atom_clock_dividers dividers;
4073         int ret;
4074
4075         ni_pi = kzalloc(sizeof(struct ni_power_info), GFP_KERNEL);
4076         if (ni_pi == NULL)
4077                 return -ENOMEM;
4078         rdev->pm.dpm.priv = ni_pi;
4079         eg_pi = &ni_pi->eg;
4080         pi = &eg_pi->rv7xx;
4081
4082         rv770_get_max_vddc(rdev);
4083
4084         eg_pi->ulv.supported = false;
4085         pi->acpi_vddc = 0;
4086         eg_pi->acpi_vddci = 0;
4087         pi->min_vddc_in_table = 0;
4088         pi->max_vddc_in_table = 0;
4089
4090         ret = ni_parse_power_table(rdev);
4091         if (ret)
4092                 return ret;
4093         ret = r600_parse_extended_power_table(rdev);
4094         if (ret)
4095                 return ret;
4096
4097         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries =
4098                 kzalloc(4 * sizeof(struct radeon_clock_voltage_dependency_entry), GFP_KERNEL);
4099         if (!rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries) {
4100                 r600_free_extended_power_table(rdev);
4101                 return -ENOMEM;
4102         }
4103         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.count = 4;
4104         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].clk = 0;
4105         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].v = 0;
4106         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].clk = 36000;
4107         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].v = 720;
4108         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].clk = 54000;
4109         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].v = 810;
4110         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].clk = 72000;
4111         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].v = 900;
4112
4113         ni_patch_dependency_tables_based_on_leakage(rdev);
4114
4115         if (rdev->pm.dpm.voltage_response_time == 0)
4116                 rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
4117         if (rdev->pm.dpm.backbias_response_time == 0)
4118                 rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
4119
4120         ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
4121                                              0, false, &dividers);
4122         if (ret)
4123                 pi->ref_div = dividers.ref_div + 1;
4124         else
4125                 pi->ref_div = R600_REFERENCEDIVIDER_DFLT;
4126
4127         pi->rlp = RV770_RLP_DFLT;
4128         pi->rmp = RV770_RMP_DFLT;
4129         pi->lhp = RV770_LHP_DFLT;
4130         pi->lmp = RV770_LMP_DFLT;
4131
4132         eg_pi->ats[0].rlp = RV770_RLP_DFLT;
4133         eg_pi->ats[0].rmp = RV770_RMP_DFLT;
4134         eg_pi->ats[0].lhp = RV770_LHP_DFLT;
4135         eg_pi->ats[0].lmp = RV770_LMP_DFLT;
4136
4137         eg_pi->ats[1].rlp = BTC_RLP_UVD_DFLT;
4138         eg_pi->ats[1].rmp = BTC_RMP_UVD_DFLT;
4139         eg_pi->ats[1].lhp = BTC_LHP_UVD_DFLT;
4140         eg_pi->ats[1].lmp = BTC_LMP_UVD_DFLT;
4141
4142         eg_pi->smu_uvd_hs = true;
4143
4144         if (rdev->pdev->device == 0x6707) {
4145                 pi->mclk_strobe_mode_threshold = 55000;
4146                 pi->mclk_edc_enable_threshold = 55000;
4147                 eg_pi->mclk_edc_wr_enable_threshold = 55000;
4148         } else {
4149                 pi->mclk_strobe_mode_threshold = 40000;
4150                 pi->mclk_edc_enable_threshold = 40000;
4151                 eg_pi->mclk_edc_wr_enable_threshold = 40000;
4152         }
4153         ni_pi->mclk_rtt_mode_threshold = eg_pi->mclk_edc_wr_enable_threshold;
4154
4155         pi->voltage_control =
4156                 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0);
4157
4158         pi->mvdd_control =
4159                 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0);
4160
4161         eg_pi->vddci_control =
4162                 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0);
4163
4164         rv770_get_engine_memory_ss(rdev);
4165
4166         pi->asi = RV770_ASI_DFLT;
4167         pi->pasi = CYPRESS_HASI_DFLT;
4168         pi->vrc = CYPRESS_VRC_DFLT;
4169
4170         pi->power_gating = false;
4171
4172         pi->gfx_clock_gating = true;
4173
4174         pi->mg_clock_gating = true;
4175         pi->mgcgtssm = true;
4176         eg_pi->ls_clock_gating = false;
4177         eg_pi->sclk_deep_sleep = false;
4178
4179         pi->dynamic_pcie_gen2 = true;
4180
4181         if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE)
4182                 pi->thermal_protection = true;
4183         else
4184                 pi->thermal_protection = false;
4185
4186         pi->display_gap = true;
4187
4188         pi->dcodt = true;
4189
4190         pi->ulps = true;
4191
4192         eg_pi->dynamic_ac_timing = true;
4193         eg_pi->abm = true;
4194         eg_pi->mcls = true;
4195         eg_pi->light_sleep = true;
4196         eg_pi->memory_transition = true;
4197 #if defined(CONFIG_ACPI)
4198         eg_pi->pcie_performance_request =
4199                 radeon_acpi_is_pcie_performance_request_supported(rdev);
4200 #else
4201         eg_pi->pcie_performance_request = false;
4202 #endif
4203
4204         eg_pi->dll_default_on = false;
4205
4206         eg_pi->sclk_deep_sleep = false;
4207
4208         pi->mclk_stutter_mode_threshold = 0;
4209
4210         pi->sram_end = SMC_RAM_END;
4211
4212         rdev->pm.dpm.dyn_state.mclk_sclk_ratio = 3;
4213         rdev->pm.dpm.dyn_state.vddc_vddci_delta = 200;
4214         rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2 = 900;
4215         rdev->pm.dpm.dyn_state.valid_sclk_values.count = ARRAY_SIZE(btc_valid_sclk);
4216         rdev->pm.dpm.dyn_state.valid_sclk_values.values = btc_valid_sclk;
4217         rdev->pm.dpm.dyn_state.valid_mclk_values.count = 0;
4218         rdev->pm.dpm.dyn_state.valid_mclk_values.values = NULL;
4219         rdev->pm.dpm.dyn_state.sclk_mclk_delta = 12500;
4220
4221         ni_pi->cac_data.leakage_coefficients.at = 516;
4222         ni_pi->cac_data.leakage_coefficients.bt = 18;
4223         ni_pi->cac_data.leakage_coefficients.av = 51;
4224         ni_pi->cac_data.leakage_coefficients.bv = 2957;
4225
4226         switch (rdev->pdev->device) {
4227         case 0x6700:
4228         case 0x6701:
4229         case 0x6702:
4230         case 0x6703:
4231         case 0x6718:
4232                 ni_pi->cac_weights = &cac_weights_cayman_xt;
4233                 break;
4234         case 0x6705:
4235         case 0x6719:
4236         case 0x671D:
4237         case 0x671C:
4238         default:
4239                 ni_pi->cac_weights = &cac_weights_cayman_pro;
4240                 break;
4241         case 0x6704:
4242         case 0x6706:
4243         case 0x6707:
4244         case 0x6708:
4245         case 0x6709:
4246                 ni_pi->cac_weights = &cac_weights_cayman_le;
4247                 break;
4248         }
4249
4250         if (ni_pi->cac_weights->enable_power_containment_by_default) {
4251                 ni_pi->enable_power_containment = true;
4252                 ni_pi->enable_cac = true;
4253                 ni_pi->enable_sq_ramping = true;
4254         } else {
4255                 ni_pi->enable_power_containment = false;
4256                 ni_pi->enable_cac = false;
4257                 ni_pi->enable_sq_ramping = false;
4258         }
4259
4260         ni_pi->driver_calculate_cac_leakage = false;
4261         ni_pi->cac_configuration_required = true;
4262
4263         if (ni_pi->cac_configuration_required) {
4264                 ni_pi->support_cac_long_term_average = true;
4265                 ni_pi->lta_window_size = ni_pi->cac_weights->l2_lta_window_size;
4266                 ni_pi->lts_truncate = ni_pi->cac_weights->lts_truncate;
4267         } else {
4268                 ni_pi->support_cac_long_term_average = false;
4269                 ni_pi->lta_window_size = 0;
4270                 ni_pi->lts_truncate = 0;
4271         }
4272
4273         ni_pi->use_power_boost_limit = true;
4274
4275         /* make sure dc limits are valid */
4276         if ((rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.sclk == 0) ||
4277             (rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.mclk == 0))
4278                 rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc =
4279                         rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
4280
4281         return 0;
4282 }
4283
4284 void ni_dpm_fini(struct radeon_device *rdev)
4285 {
4286         int i;
4287
4288         for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
4289                 kfree(rdev->pm.dpm.ps[i].ps_priv);
4290         }
4291         kfree(rdev->pm.dpm.ps);
4292         kfree(rdev->pm.dpm.priv);
4293         kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries);
4294         r600_free_extended_power_table(rdev);
4295 }
4296
4297 void ni_dpm_print_power_state(struct radeon_device *rdev,
4298                               struct radeon_ps *rps)
4299 {
4300         struct ni_ps *ps = ni_get_ps(rps);
4301         struct rv7xx_pl *pl;
4302         int i;
4303
4304         r600_dpm_print_class_info(rps->class, rps->class2);
4305         r600_dpm_print_cap_info(rps->caps);
4306         printk("\tuvd    vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
4307         for (i = 0; i < ps->performance_level_count; i++) {
4308                 pl = &ps->performance_levels[i];
4309                 if (rdev->family >= CHIP_TAHITI)
4310                         printk("\t\tpower level %d    sclk: %u mclk: %u vddc: %u vddci: %u pcie gen: %u\n",
4311                                i, pl->sclk, pl->mclk, pl->vddc, pl->vddci, pl->pcie_gen + 1);
4312                 else
4313                         printk("\t\tpower level %d    sclk: %u mclk: %u vddc: %u vddci: %u\n",
4314                                i, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
4315         }
4316         r600_dpm_print_ps_status(rdev, rps);
4317 }
4318
4319 void ni_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
4320                                                     struct seq_file *m)
4321 {
4322         struct radeon_ps *rps = rdev->pm.dpm.current_ps;
4323         struct ni_ps *ps = ni_get_ps(rps);
4324         struct rv7xx_pl *pl;
4325         u32 current_index =
4326                 (RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_STATE_INDEX_MASK) >>
4327                 CURRENT_STATE_INDEX_SHIFT;
4328
4329         if (current_index >= ps->performance_level_count) {
4330                 seq_printf(m, "invalid dpm profile %d\n", current_index);
4331         } else {
4332                 pl = &ps->performance_levels[current_index];
4333                 seq_printf(m, "uvd    vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
4334                 seq_printf(m, "power level %d    sclk: %u mclk: %u vddc: %u vddci: %u\n",
4335                            current_index, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
4336         }
4337 }
4338
4339 u32 ni_dpm_get_sclk(struct radeon_device *rdev, bool low)
4340 {
4341         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4342         struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
4343
4344         if (low)
4345                 return requested_state->performance_levels[0].sclk;
4346         else
4347                 return requested_state->performance_levels[requested_state->performance_level_count - 1].sclk;
4348 }
4349
4350 u32 ni_dpm_get_mclk(struct radeon_device *rdev, bool low)
4351 {
4352         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4353         struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
4354
4355         if (low)
4356                 return requested_state->performance_levels[0].mclk;
4357         else
4358                 return requested_state->performance_levels[requested_state->performance_level_count - 1].mclk;
4359 }
4360