Subversion Repositories Kolibri OS

Rev

Rev 3764 | Rev 5179 | Go to most recent revision | Details | Compare with Previous | Last modification | View Log | RSS feed

Rev Author Line No. Line
1430 serge 1
/*
2
 * Copyright 2010 Advanced Micro Devices, Inc.
3
 *
4
 * Permission is hereby granted, free of charge, to any person obtaining a
5
 * copy of this software and associated documentation files (the "Software"),
6
 * to deal in the Software without restriction, including without limitation
7
 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8
 * and/or sell copies of the Software, and to permit persons to whom the
9
 * Software is furnished to do so, subject to the following conditions:
10
 *
11
 * The above copyright notice and this permission notice shall be included in
12
 * all copies or substantial portions of the Software.
13
 *
14
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17
 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18
 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19
 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20
 * OTHER DEALINGS IN THE SOFTWARE.
21
 *
22
 * Authors: Alex Deucher
23
 */
24
#include 
25
//#include 
1963 serge 26
#include 
2997 Serge 27
#include 
1430 serge 28
#include "radeon.h"
1963 serge 29
#include "radeon_asic.h"
2997 Serge 30
#include 
1963 serge 31
#include "evergreend.h"
1430 serge 32
#include "atom.h"
33
#include "avivod.h"
34
#include "evergreen_reg.h"
1986 serge 35
#include "evergreen_blit_shaders.h"
5078 serge 36
#include "radeon_ucode.h"
1430 serge 37
 
2997 Serge 38
static const u32 crtc_offsets[6] =
39
{
40
	EVERGREEN_CRTC0_REGISTER_OFFSET,
41
	EVERGREEN_CRTC1_REGISTER_OFFSET,
42
	EVERGREEN_CRTC2_REGISTER_OFFSET,
43
	EVERGREEN_CRTC3_REGISTER_OFFSET,
44
	EVERGREEN_CRTC4_REGISTER_OFFSET,
45
	EVERGREEN_CRTC5_REGISTER_OFFSET
46
};
47
 
5078 serge 48
#include "clearstate_evergreen.h"
49
 
50
static const u32 sumo_rlc_save_restore_register_list[] =
51
{
52
	0x98fc,
53
	0x9830,
54
	0x9834,
55
	0x9838,
56
	0x9870,
57
	0x9874,
58
	0x8a14,
59
	0x8b24,
60
	0x8bcc,
61
	0x8b10,
62
	0x8d00,
63
	0x8d04,
64
	0x8c00,
65
	0x8c04,
66
	0x8c08,
67
	0x8c0c,
68
	0x8d8c,
69
	0x8c20,
70
	0x8c24,
71
	0x8c28,
72
	0x8c18,
73
	0x8c1c,
74
	0x8cf0,
75
	0x8e2c,
76
	0x8e38,
77
	0x8c30,
78
	0x9508,
79
	0x9688,
80
	0x9608,
81
	0x960c,
82
	0x9610,
83
	0x9614,
84
	0x88c4,
85
	0x88d4,
86
	0xa008,
87
	0x900c,
88
	0x9100,
89
	0x913c,
90
	0x98f8,
91
	0x98f4,
92
	0x9b7c,
93
	0x3f8c,
94
	0x8950,
95
	0x8954,
96
	0x8a18,
97
	0x8b28,
98
	0x9144,
99
	0x9148,
100
	0x914c,
101
	0x3f90,
102
	0x3f94,
103
	0x915c,
104
	0x9160,
105
	0x9178,
106
	0x917c,
107
	0x9180,
108
	0x918c,
109
	0x9190,
110
	0x9194,
111
	0x9198,
112
	0x919c,
113
	0x91a8,
114
	0x91ac,
115
	0x91b0,
116
	0x91b4,
117
	0x91b8,
118
	0x91c4,
119
	0x91c8,
120
	0x91cc,
121
	0x91d0,
122
	0x91d4,
123
	0x91e0,
124
	0x91e4,
125
	0x91ec,
126
	0x91f0,
127
	0x91f4,
128
	0x9200,
129
	0x9204,
130
	0x929c,
131
	0x9150,
132
	0x802c,
133
};
134
 
1430 serge 135
static void evergreen_gpu_init(struct radeon_device *rdev);
136
void evergreen_fini(struct radeon_device *rdev);
2997 Serge 137
void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
5078 serge 138
void evergreen_program_aspm(struct radeon_device *rdev);
2997 Serge 139
extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
140
				     int ring, u32 cp_int_cntl);
5078 serge 141
extern void cayman_vm_decode_fault(struct radeon_device *rdev,
142
				   u32 status, u32 addr);
143
void cik_init_cp_pg_table(struct radeon_device *rdev);
1430 serge 144
 
5078 serge 145
extern u32 si_get_csb_size(struct radeon_device *rdev);
146
extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
147
extern u32 cik_get_csb_size(struct radeon_device *rdev);
148
extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
149
extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
150
 
3764 Serge 151
static const u32 evergreen_golden_registers[] =
152
{
153
	0x3f90, 0xffff0000, 0xff000000,
154
	0x9148, 0xffff0000, 0xff000000,
155
	0x3f94, 0xffff0000, 0xff000000,
156
	0x914c, 0xffff0000, 0xff000000,
157
	0x9b7c, 0xffffffff, 0x00000000,
158
	0x8a14, 0xffffffff, 0x00000007,
159
	0x8b10, 0xffffffff, 0x00000000,
160
	0x960c, 0xffffffff, 0x54763210,
161
	0x88c4, 0xffffffff, 0x000000c2,
162
	0x88d4, 0xffffffff, 0x00000010,
163
	0x8974, 0xffffffff, 0x00000000,
164
	0xc78, 0x00000080, 0x00000080,
165
	0x5eb4, 0xffffffff, 0x00000002,
166
	0x5e78, 0xffffffff, 0x001000f0,
167
	0x6104, 0x01000300, 0x00000000,
168
	0x5bc0, 0x00300000, 0x00000000,
169
	0x7030, 0xffffffff, 0x00000011,
170
	0x7c30, 0xffffffff, 0x00000011,
171
	0x10830, 0xffffffff, 0x00000011,
172
	0x11430, 0xffffffff, 0x00000011,
173
	0x12030, 0xffffffff, 0x00000011,
174
	0x12c30, 0xffffffff, 0x00000011,
175
	0xd02c, 0xffffffff, 0x08421000,
176
	0x240c, 0xffffffff, 0x00000380,
177
	0x8b24, 0xffffffff, 0x00ff0fff,
178
	0x28a4c, 0x06000000, 0x06000000,
179
	0x10c, 0x00000001, 0x00000001,
180
	0x8d00, 0xffffffff, 0x100e4848,
181
	0x8d04, 0xffffffff, 0x00164745,
182
	0x8c00, 0xffffffff, 0xe4000003,
183
	0x8c04, 0xffffffff, 0x40600060,
184
	0x8c08, 0xffffffff, 0x001c001c,
185
	0x8cf0, 0xffffffff, 0x08e00620,
186
	0x8c20, 0xffffffff, 0x00800080,
187
	0x8c24, 0xffffffff, 0x00800080,
188
	0x8c18, 0xffffffff, 0x20202078,
189
	0x8c1c, 0xffffffff, 0x00001010,
190
	0x28350, 0xffffffff, 0x00000000,
191
	0xa008, 0xffffffff, 0x00010000,
5078 serge 192
	0x5c4, 0xffffffff, 0x00000001,
3764 Serge 193
	0x9508, 0xffffffff, 0x00000002,
194
	0x913c, 0x0000000f, 0x0000000a
195
};
196
 
197
static const u32 evergreen_golden_registers2[] =
198
{
199
	0x2f4c, 0xffffffff, 0x00000000,
200
	0x54f4, 0xffffffff, 0x00000000,
201
	0x54f0, 0xffffffff, 0x00000000,
202
	0x5498, 0xffffffff, 0x00000000,
203
	0x549c, 0xffffffff, 0x00000000,
204
	0x5494, 0xffffffff, 0x00000000,
205
	0x53cc, 0xffffffff, 0x00000000,
206
	0x53c8, 0xffffffff, 0x00000000,
207
	0x53c4, 0xffffffff, 0x00000000,
208
	0x53c0, 0xffffffff, 0x00000000,
209
	0x53bc, 0xffffffff, 0x00000000,
210
	0x53b8, 0xffffffff, 0x00000000,
211
	0x53b4, 0xffffffff, 0x00000000,
212
	0x53b0, 0xffffffff, 0x00000000
213
};
214
 
215
static const u32 cypress_mgcg_init[] =
216
{
217
	0x802c, 0xffffffff, 0xc0000000,
218
	0x5448, 0xffffffff, 0x00000100,
219
	0x55e4, 0xffffffff, 0x00000100,
220
	0x160c, 0xffffffff, 0x00000100,
221
	0x5644, 0xffffffff, 0x00000100,
222
	0xc164, 0xffffffff, 0x00000100,
223
	0x8a18, 0xffffffff, 0x00000100,
224
	0x897c, 0xffffffff, 0x06000100,
225
	0x8b28, 0xffffffff, 0x00000100,
226
	0x9144, 0xffffffff, 0x00000100,
227
	0x9a60, 0xffffffff, 0x00000100,
228
	0x9868, 0xffffffff, 0x00000100,
229
	0x8d58, 0xffffffff, 0x00000100,
230
	0x9510, 0xffffffff, 0x00000100,
231
	0x949c, 0xffffffff, 0x00000100,
232
	0x9654, 0xffffffff, 0x00000100,
233
	0x9030, 0xffffffff, 0x00000100,
234
	0x9034, 0xffffffff, 0x00000100,
235
	0x9038, 0xffffffff, 0x00000100,
236
	0x903c, 0xffffffff, 0x00000100,
237
	0x9040, 0xffffffff, 0x00000100,
238
	0xa200, 0xffffffff, 0x00000100,
239
	0xa204, 0xffffffff, 0x00000100,
240
	0xa208, 0xffffffff, 0x00000100,
241
	0xa20c, 0xffffffff, 0x00000100,
242
	0x971c, 0xffffffff, 0x00000100,
243
	0x977c, 0xffffffff, 0x00000100,
244
	0x3f80, 0xffffffff, 0x00000100,
245
	0xa210, 0xffffffff, 0x00000100,
246
	0xa214, 0xffffffff, 0x00000100,
247
	0x4d8, 0xffffffff, 0x00000100,
248
	0x9784, 0xffffffff, 0x00000100,
249
	0x9698, 0xffffffff, 0x00000100,
250
	0x4d4, 0xffffffff, 0x00000200,
251
	0x30cc, 0xffffffff, 0x00000100,
252
	0xd0c0, 0xffffffff, 0xff000100,
253
	0x802c, 0xffffffff, 0x40000000,
254
	0x915c, 0xffffffff, 0x00010000,
255
	0x9160, 0xffffffff, 0x00030002,
256
	0x9178, 0xffffffff, 0x00070000,
257
	0x917c, 0xffffffff, 0x00030002,
258
	0x9180, 0xffffffff, 0x00050004,
259
	0x918c, 0xffffffff, 0x00010006,
260
	0x9190, 0xffffffff, 0x00090008,
261
	0x9194, 0xffffffff, 0x00070000,
262
	0x9198, 0xffffffff, 0x00030002,
263
	0x919c, 0xffffffff, 0x00050004,
264
	0x91a8, 0xffffffff, 0x00010006,
265
	0x91ac, 0xffffffff, 0x00090008,
266
	0x91b0, 0xffffffff, 0x00070000,
267
	0x91b4, 0xffffffff, 0x00030002,
268
	0x91b8, 0xffffffff, 0x00050004,
269
	0x91c4, 0xffffffff, 0x00010006,
270
	0x91c8, 0xffffffff, 0x00090008,
271
	0x91cc, 0xffffffff, 0x00070000,
272
	0x91d0, 0xffffffff, 0x00030002,
273
	0x91d4, 0xffffffff, 0x00050004,
274
	0x91e0, 0xffffffff, 0x00010006,
275
	0x91e4, 0xffffffff, 0x00090008,
276
	0x91e8, 0xffffffff, 0x00000000,
277
	0x91ec, 0xffffffff, 0x00070000,
278
	0x91f0, 0xffffffff, 0x00030002,
279
	0x91f4, 0xffffffff, 0x00050004,
280
	0x9200, 0xffffffff, 0x00010006,
281
	0x9204, 0xffffffff, 0x00090008,
282
	0x9208, 0xffffffff, 0x00070000,
283
	0x920c, 0xffffffff, 0x00030002,
284
	0x9210, 0xffffffff, 0x00050004,
285
	0x921c, 0xffffffff, 0x00010006,
286
	0x9220, 0xffffffff, 0x00090008,
287
	0x9224, 0xffffffff, 0x00070000,
288
	0x9228, 0xffffffff, 0x00030002,
289
	0x922c, 0xffffffff, 0x00050004,
290
	0x9238, 0xffffffff, 0x00010006,
291
	0x923c, 0xffffffff, 0x00090008,
292
	0x9240, 0xffffffff, 0x00070000,
293
	0x9244, 0xffffffff, 0x00030002,
294
	0x9248, 0xffffffff, 0x00050004,
295
	0x9254, 0xffffffff, 0x00010006,
296
	0x9258, 0xffffffff, 0x00090008,
297
	0x925c, 0xffffffff, 0x00070000,
298
	0x9260, 0xffffffff, 0x00030002,
299
	0x9264, 0xffffffff, 0x00050004,
300
	0x9270, 0xffffffff, 0x00010006,
301
	0x9274, 0xffffffff, 0x00090008,
302
	0x9278, 0xffffffff, 0x00070000,
303
	0x927c, 0xffffffff, 0x00030002,
304
	0x9280, 0xffffffff, 0x00050004,
305
	0x928c, 0xffffffff, 0x00010006,
306
	0x9290, 0xffffffff, 0x00090008,
307
	0x9294, 0xffffffff, 0x00000000,
308
	0x929c, 0xffffffff, 0x00000001,
309
	0x802c, 0xffffffff, 0x40010000,
310
	0x915c, 0xffffffff, 0x00010000,
311
	0x9160, 0xffffffff, 0x00030002,
312
	0x9178, 0xffffffff, 0x00070000,
313
	0x917c, 0xffffffff, 0x00030002,
314
	0x9180, 0xffffffff, 0x00050004,
315
	0x918c, 0xffffffff, 0x00010006,
316
	0x9190, 0xffffffff, 0x00090008,
317
	0x9194, 0xffffffff, 0x00070000,
318
	0x9198, 0xffffffff, 0x00030002,
319
	0x919c, 0xffffffff, 0x00050004,
320
	0x91a8, 0xffffffff, 0x00010006,
321
	0x91ac, 0xffffffff, 0x00090008,
322
	0x91b0, 0xffffffff, 0x00070000,
323
	0x91b4, 0xffffffff, 0x00030002,
324
	0x91b8, 0xffffffff, 0x00050004,
325
	0x91c4, 0xffffffff, 0x00010006,
326
	0x91c8, 0xffffffff, 0x00090008,
327
	0x91cc, 0xffffffff, 0x00070000,
328
	0x91d0, 0xffffffff, 0x00030002,
329
	0x91d4, 0xffffffff, 0x00050004,
330
	0x91e0, 0xffffffff, 0x00010006,
331
	0x91e4, 0xffffffff, 0x00090008,
332
	0x91e8, 0xffffffff, 0x00000000,
333
	0x91ec, 0xffffffff, 0x00070000,
334
	0x91f0, 0xffffffff, 0x00030002,
335
	0x91f4, 0xffffffff, 0x00050004,
336
	0x9200, 0xffffffff, 0x00010006,
337
	0x9204, 0xffffffff, 0x00090008,
338
	0x9208, 0xffffffff, 0x00070000,
339
	0x920c, 0xffffffff, 0x00030002,
340
	0x9210, 0xffffffff, 0x00050004,
341
	0x921c, 0xffffffff, 0x00010006,
342
	0x9220, 0xffffffff, 0x00090008,
343
	0x9224, 0xffffffff, 0x00070000,
344
	0x9228, 0xffffffff, 0x00030002,
345
	0x922c, 0xffffffff, 0x00050004,
346
	0x9238, 0xffffffff, 0x00010006,
347
	0x923c, 0xffffffff, 0x00090008,
348
	0x9240, 0xffffffff, 0x00070000,
349
	0x9244, 0xffffffff, 0x00030002,
350
	0x9248, 0xffffffff, 0x00050004,
351
	0x9254, 0xffffffff, 0x00010006,
352
	0x9258, 0xffffffff, 0x00090008,
353
	0x925c, 0xffffffff, 0x00070000,
354
	0x9260, 0xffffffff, 0x00030002,
355
	0x9264, 0xffffffff, 0x00050004,
356
	0x9270, 0xffffffff, 0x00010006,
357
	0x9274, 0xffffffff, 0x00090008,
358
	0x9278, 0xffffffff, 0x00070000,
359
	0x927c, 0xffffffff, 0x00030002,
360
	0x9280, 0xffffffff, 0x00050004,
361
	0x928c, 0xffffffff, 0x00010006,
362
	0x9290, 0xffffffff, 0x00090008,
363
	0x9294, 0xffffffff, 0x00000000,
364
	0x929c, 0xffffffff, 0x00000001,
365
	0x802c, 0xffffffff, 0xc0000000
366
};
367
 
368
static const u32 redwood_mgcg_init[] =
369
{
370
	0x802c, 0xffffffff, 0xc0000000,
371
	0x5448, 0xffffffff, 0x00000100,
372
	0x55e4, 0xffffffff, 0x00000100,
373
	0x160c, 0xffffffff, 0x00000100,
374
	0x5644, 0xffffffff, 0x00000100,
375
	0xc164, 0xffffffff, 0x00000100,
376
	0x8a18, 0xffffffff, 0x00000100,
377
	0x897c, 0xffffffff, 0x06000100,
378
	0x8b28, 0xffffffff, 0x00000100,
379
	0x9144, 0xffffffff, 0x00000100,
380
	0x9a60, 0xffffffff, 0x00000100,
381
	0x9868, 0xffffffff, 0x00000100,
382
	0x8d58, 0xffffffff, 0x00000100,
383
	0x9510, 0xffffffff, 0x00000100,
384
	0x949c, 0xffffffff, 0x00000100,
385
	0x9654, 0xffffffff, 0x00000100,
386
	0x9030, 0xffffffff, 0x00000100,
387
	0x9034, 0xffffffff, 0x00000100,
388
	0x9038, 0xffffffff, 0x00000100,
389
	0x903c, 0xffffffff, 0x00000100,
390
	0x9040, 0xffffffff, 0x00000100,
391
	0xa200, 0xffffffff, 0x00000100,
392
	0xa204, 0xffffffff, 0x00000100,
393
	0xa208, 0xffffffff, 0x00000100,
394
	0xa20c, 0xffffffff, 0x00000100,
395
	0x971c, 0xffffffff, 0x00000100,
396
	0x977c, 0xffffffff, 0x00000100,
397
	0x3f80, 0xffffffff, 0x00000100,
398
	0xa210, 0xffffffff, 0x00000100,
399
	0xa214, 0xffffffff, 0x00000100,
400
	0x4d8, 0xffffffff, 0x00000100,
401
	0x9784, 0xffffffff, 0x00000100,
402
	0x9698, 0xffffffff, 0x00000100,
403
	0x4d4, 0xffffffff, 0x00000200,
404
	0x30cc, 0xffffffff, 0x00000100,
405
	0xd0c0, 0xffffffff, 0xff000100,
406
	0x802c, 0xffffffff, 0x40000000,
407
	0x915c, 0xffffffff, 0x00010000,
408
	0x9160, 0xffffffff, 0x00030002,
409
	0x9178, 0xffffffff, 0x00070000,
410
	0x917c, 0xffffffff, 0x00030002,
411
	0x9180, 0xffffffff, 0x00050004,
412
	0x918c, 0xffffffff, 0x00010006,
413
	0x9190, 0xffffffff, 0x00090008,
414
	0x9194, 0xffffffff, 0x00070000,
415
	0x9198, 0xffffffff, 0x00030002,
416
	0x919c, 0xffffffff, 0x00050004,
417
	0x91a8, 0xffffffff, 0x00010006,
418
	0x91ac, 0xffffffff, 0x00090008,
419
	0x91b0, 0xffffffff, 0x00070000,
420
	0x91b4, 0xffffffff, 0x00030002,
421
	0x91b8, 0xffffffff, 0x00050004,
422
	0x91c4, 0xffffffff, 0x00010006,
423
	0x91c8, 0xffffffff, 0x00090008,
424
	0x91cc, 0xffffffff, 0x00070000,
425
	0x91d0, 0xffffffff, 0x00030002,
426
	0x91d4, 0xffffffff, 0x00050004,
427
	0x91e0, 0xffffffff, 0x00010006,
428
	0x91e4, 0xffffffff, 0x00090008,
429
	0x91e8, 0xffffffff, 0x00000000,
430
	0x91ec, 0xffffffff, 0x00070000,
431
	0x91f0, 0xffffffff, 0x00030002,
432
	0x91f4, 0xffffffff, 0x00050004,
433
	0x9200, 0xffffffff, 0x00010006,
434
	0x9204, 0xffffffff, 0x00090008,
435
	0x9294, 0xffffffff, 0x00000000,
436
	0x929c, 0xffffffff, 0x00000001,
437
	0x802c, 0xffffffff, 0xc0000000
438
};
439
 
440
static const u32 cedar_golden_registers[] =
441
{
442
	0x3f90, 0xffff0000, 0xff000000,
443
	0x9148, 0xffff0000, 0xff000000,
444
	0x3f94, 0xffff0000, 0xff000000,
445
	0x914c, 0xffff0000, 0xff000000,
446
	0x9b7c, 0xffffffff, 0x00000000,
447
	0x8a14, 0xffffffff, 0x00000007,
448
	0x8b10, 0xffffffff, 0x00000000,
449
	0x960c, 0xffffffff, 0x54763210,
450
	0x88c4, 0xffffffff, 0x000000c2,
451
	0x88d4, 0xffffffff, 0x00000000,
452
	0x8974, 0xffffffff, 0x00000000,
453
	0xc78, 0x00000080, 0x00000080,
454
	0x5eb4, 0xffffffff, 0x00000002,
455
	0x5e78, 0xffffffff, 0x001000f0,
456
	0x6104, 0x01000300, 0x00000000,
457
	0x5bc0, 0x00300000, 0x00000000,
458
	0x7030, 0xffffffff, 0x00000011,
459
	0x7c30, 0xffffffff, 0x00000011,
460
	0x10830, 0xffffffff, 0x00000011,
461
	0x11430, 0xffffffff, 0x00000011,
462
	0xd02c, 0xffffffff, 0x08421000,
463
	0x240c, 0xffffffff, 0x00000380,
464
	0x8b24, 0xffffffff, 0x00ff0fff,
465
	0x28a4c, 0x06000000, 0x06000000,
466
	0x10c, 0x00000001, 0x00000001,
467
	0x8d00, 0xffffffff, 0x100e4848,
468
	0x8d04, 0xffffffff, 0x00164745,
469
	0x8c00, 0xffffffff, 0xe4000003,
470
	0x8c04, 0xffffffff, 0x40600060,
471
	0x8c08, 0xffffffff, 0x001c001c,
472
	0x8cf0, 0xffffffff, 0x08e00410,
473
	0x8c20, 0xffffffff, 0x00800080,
474
	0x8c24, 0xffffffff, 0x00800080,
475
	0x8c18, 0xffffffff, 0x20202078,
476
	0x8c1c, 0xffffffff, 0x00001010,
477
	0x28350, 0xffffffff, 0x00000000,
478
	0xa008, 0xffffffff, 0x00010000,
5078 serge 479
	0x5c4, 0xffffffff, 0x00000001,
3764 Serge 480
	0x9508, 0xffffffff, 0x00000002
481
};
482
 
483
static const u32 cedar_mgcg_init[] =
484
{
485
	0x802c, 0xffffffff, 0xc0000000,
486
	0x5448, 0xffffffff, 0x00000100,
487
	0x55e4, 0xffffffff, 0x00000100,
488
	0x160c, 0xffffffff, 0x00000100,
489
	0x5644, 0xffffffff, 0x00000100,
490
	0xc164, 0xffffffff, 0x00000100,
491
	0x8a18, 0xffffffff, 0x00000100,
492
	0x897c, 0xffffffff, 0x06000100,
493
	0x8b28, 0xffffffff, 0x00000100,
494
	0x9144, 0xffffffff, 0x00000100,
495
	0x9a60, 0xffffffff, 0x00000100,
496
	0x9868, 0xffffffff, 0x00000100,
497
	0x8d58, 0xffffffff, 0x00000100,
498
	0x9510, 0xffffffff, 0x00000100,
499
	0x949c, 0xffffffff, 0x00000100,
500
	0x9654, 0xffffffff, 0x00000100,
501
	0x9030, 0xffffffff, 0x00000100,
502
	0x9034, 0xffffffff, 0x00000100,
503
	0x9038, 0xffffffff, 0x00000100,
504
	0x903c, 0xffffffff, 0x00000100,
505
	0x9040, 0xffffffff, 0x00000100,
506
	0xa200, 0xffffffff, 0x00000100,
507
	0xa204, 0xffffffff, 0x00000100,
508
	0xa208, 0xffffffff, 0x00000100,
509
	0xa20c, 0xffffffff, 0x00000100,
510
	0x971c, 0xffffffff, 0x00000100,
511
	0x977c, 0xffffffff, 0x00000100,
512
	0x3f80, 0xffffffff, 0x00000100,
513
	0xa210, 0xffffffff, 0x00000100,
514
	0xa214, 0xffffffff, 0x00000100,
515
	0x4d8, 0xffffffff, 0x00000100,
516
	0x9784, 0xffffffff, 0x00000100,
517
	0x9698, 0xffffffff, 0x00000100,
518
	0x4d4, 0xffffffff, 0x00000200,
519
	0x30cc, 0xffffffff, 0x00000100,
520
	0xd0c0, 0xffffffff, 0xff000100,
521
	0x802c, 0xffffffff, 0x40000000,
522
	0x915c, 0xffffffff, 0x00010000,
523
	0x9178, 0xffffffff, 0x00050000,
524
	0x917c, 0xffffffff, 0x00030002,
525
	0x918c, 0xffffffff, 0x00010004,
526
	0x9190, 0xffffffff, 0x00070006,
527
	0x9194, 0xffffffff, 0x00050000,
528
	0x9198, 0xffffffff, 0x00030002,
529
	0x91a8, 0xffffffff, 0x00010004,
530
	0x91ac, 0xffffffff, 0x00070006,
531
	0x91e8, 0xffffffff, 0x00000000,
532
	0x9294, 0xffffffff, 0x00000000,
533
	0x929c, 0xffffffff, 0x00000001,
534
	0x802c, 0xffffffff, 0xc0000000
535
};
536
 
537
static const u32 juniper_mgcg_init[] =
538
{
539
	0x802c, 0xffffffff, 0xc0000000,
540
	0x5448, 0xffffffff, 0x00000100,
541
	0x55e4, 0xffffffff, 0x00000100,
542
	0x160c, 0xffffffff, 0x00000100,
543
	0x5644, 0xffffffff, 0x00000100,
544
	0xc164, 0xffffffff, 0x00000100,
545
	0x8a18, 0xffffffff, 0x00000100,
546
	0x897c, 0xffffffff, 0x06000100,
547
	0x8b28, 0xffffffff, 0x00000100,
548
	0x9144, 0xffffffff, 0x00000100,
549
	0x9a60, 0xffffffff, 0x00000100,
550
	0x9868, 0xffffffff, 0x00000100,
551
	0x8d58, 0xffffffff, 0x00000100,
552
	0x9510, 0xffffffff, 0x00000100,
553
	0x949c, 0xffffffff, 0x00000100,
554
	0x9654, 0xffffffff, 0x00000100,
555
	0x9030, 0xffffffff, 0x00000100,
556
	0x9034, 0xffffffff, 0x00000100,
557
	0x9038, 0xffffffff, 0x00000100,
558
	0x903c, 0xffffffff, 0x00000100,
559
	0x9040, 0xffffffff, 0x00000100,
560
	0xa200, 0xffffffff, 0x00000100,
561
	0xa204, 0xffffffff, 0x00000100,
562
	0xa208, 0xffffffff, 0x00000100,
563
	0xa20c, 0xffffffff, 0x00000100,
564
	0x971c, 0xffffffff, 0x00000100,
565
	0xd0c0, 0xffffffff, 0xff000100,
566
	0x802c, 0xffffffff, 0x40000000,
567
	0x915c, 0xffffffff, 0x00010000,
568
	0x9160, 0xffffffff, 0x00030002,
569
	0x9178, 0xffffffff, 0x00070000,
570
	0x917c, 0xffffffff, 0x00030002,
571
	0x9180, 0xffffffff, 0x00050004,
572
	0x918c, 0xffffffff, 0x00010006,
573
	0x9190, 0xffffffff, 0x00090008,
574
	0x9194, 0xffffffff, 0x00070000,
575
	0x9198, 0xffffffff, 0x00030002,
576
	0x919c, 0xffffffff, 0x00050004,
577
	0x91a8, 0xffffffff, 0x00010006,
578
	0x91ac, 0xffffffff, 0x00090008,
579
	0x91b0, 0xffffffff, 0x00070000,
580
	0x91b4, 0xffffffff, 0x00030002,
581
	0x91b8, 0xffffffff, 0x00050004,
582
	0x91c4, 0xffffffff, 0x00010006,
583
	0x91c8, 0xffffffff, 0x00090008,
584
	0x91cc, 0xffffffff, 0x00070000,
585
	0x91d0, 0xffffffff, 0x00030002,
586
	0x91d4, 0xffffffff, 0x00050004,
587
	0x91e0, 0xffffffff, 0x00010006,
588
	0x91e4, 0xffffffff, 0x00090008,
589
	0x91e8, 0xffffffff, 0x00000000,
590
	0x91ec, 0xffffffff, 0x00070000,
591
	0x91f0, 0xffffffff, 0x00030002,
592
	0x91f4, 0xffffffff, 0x00050004,
593
	0x9200, 0xffffffff, 0x00010006,
594
	0x9204, 0xffffffff, 0x00090008,
595
	0x9208, 0xffffffff, 0x00070000,
596
	0x920c, 0xffffffff, 0x00030002,
597
	0x9210, 0xffffffff, 0x00050004,
598
	0x921c, 0xffffffff, 0x00010006,
599
	0x9220, 0xffffffff, 0x00090008,
600
	0x9224, 0xffffffff, 0x00070000,
601
	0x9228, 0xffffffff, 0x00030002,
602
	0x922c, 0xffffffff, 0x00050004,
603
	0x9238, 0xffffffff, 0x00010006,
604
	0x923c, 0xffffffff, 0x00090008,
605
	0x9240, 0xffffffff, 0x00070000,
606
	0x9244, 0xffffffff, 0x00030002,
607
	0x9248, 0xffffffff, 0x00050004,
608
	0x9254, 0xffffffff, 0x00010006,
609
	0x9258, 0xffffffff, 0x00090008,
610
	0x925c, 0xffffffff, 0x00070000,
611
	0x9260, 0xffffffff, 0x00030002,
612
	0x9264, 0xffffffff, 0x00050004,
613
	0x9270, 0xffffffff, 0x00010006,
614
	0x9274, 0xffffffff, 0x00090008,
615
	0x9278, 0xffffffff, 0x00070000,
616
	0x927c, 0xffffffff, 0x00030002,
617
	0x9280, 0xffffffff, 0x00050004,
618
	0x928c, 0xffffffff, 0x00010006,
619
	0x9290, 0xffffffff, 0x00090008,
620
	0x9294, 0xffffffff, 0x00000000,
621
	0x929c, 0xffffffff, 0x00000001,
622
	0x802c, 0xffffffff, 0xc0000000,
623
	0x977c, 0xffffffff, 0x00000100,
624
	0x3f80, 0xffffffff, 0x00000100,
625
	0xa210, 0xffffffff, 0x00000100,
626
	0xa214, 0xffffffff, 0x00000100,
627
	0x4d8, 0xffffffff, 0x00000100,
628
	0x9784, 0xffffffff, 0x00000100,
629
	0x9698, 0xffffffff, 0x00000100,
630
	0x4d4, 0xffffffff, 0x00000200,
631
	0x30cc, 0xffffffff, 0x00000100,
632
	0x802c, 0xffffffff, 0xc0000000
633
};
634
 
635
static const u32 supersumo_golden_registers[] =
636
{
637
	0x5eb4, 0xffffffff, 0x00000002,
5078 serge 638
	0x5c4, 0xffffffff, 0x00000001,
3764 Serge 639
	0x7030, 0xffffffff, 0x00000011,
640
	0x7c30, 0xffffffff, 0x00000011,
641
	0x6104, 0x01000300, 0x00000000,
642
	0x5bc0, 0x00300000, 0x00000000,
643
	0x8c04, 0xffffffff, 0x40600060,
644
	0x8c08, 0xffffffff, 0x001c001c,
645
	0x8c20, 0xffffffff, 0x00800080,
646
	0x8c24, 0xffffffff, 0x00800080,
647
	0x8c18, 0xffffffff, 0x20202078,
648
	0x8c1c, 0xffffffff, 0x00001010,
649
	0x918c, 0xffffffff, 0x00010006,
650
	0x91a8, 0xffffffff, 0x00010006,
651
	0x91c4, 0xffffffff, 0x00010006,
652
	0x91e0, 0xffffffff, 0x00010006,
653
	0x9200, 0xffffffff, 0x00010006,
654
	0x9150, 0xffffffff, 0x6e944040,
655
	0x917c, 0xffffffff, 0x00030002,
656
	0x9180, 0xffffffff, 0x00050004,
657
	0x9198, 0xffffffff, 0x00030002,
658
	0x919c, 0xffffffff, 0x00050004,
659
	0x91b4, 0xffffffff, 0x00030002,
660
	0x91b8, 0xffffffff, 0x00050004,
661
	0x91d0, 0xffffffff, 0x00030002,
662
	0x91d4, 0xffffffff, 0x00050004,
663
	0x91f0, 0xffffffff, 0x00030002,
664
	0x91f4, 0xffffffff, 0x00050004,
665
	0x915c, 0xffffffff, 0x00010000,
666
	0x9160, 0xffffffff, 0x00030002,
667
	0x3f90, 0xffff0000, 0xff000000,
668
	0x9178, 0xffffffff, 0x00070000,
669
	0x9194, 0xffffffff, 0x00070000,
670
	0x91b0, 0xffffffff, 0x00070000,
671
	0x91cc, 0xffffffff, 0x00070000,
672
	0x91ec, 0xffffffff, 0x00070000,
673
	0x9148, 0xffff0000, 0xff000000,
674
	0x9190, 0xffffffff, 0x00090008,
675
	0x91ac, 0xffffffff, 0x00090008,
676
	0x91c8, 0xffffffff, 0x00090008,
677
	0x91e4, 0xffffffff, 0x00090008,
678
	0x9204, 0xffffffff, 0x00090008,
679
	0x3f94, 0xffff0000, 0xff000000,
680
	0x914c, 0xffff0000, 0xff000000,
681
	0x929c, 0xffffffff, 0x00000001,
682
	0x8a18, 0xffffffff, 0x00000100,
683
	0x8b28, 0xffffffff, 0x00000100,
684
	0x9144, 0xffffffff, 0x00000100,
685
	0x5644, 0xffffffff, 0x00000100,
686
	0x9b7c, 0xffffffff, 0x00000000,
687
	0x8030, 0xffffffff, 0x0000100a,
688
	0x8a14, 0xffffffff, 0x00000007,
689
	0x8b24, 0xffffffff, 0x00ff0fff,
690
	0x8b10, 0xffffffff, 0x00000000,
691
	0x28a4c, 0x06000000, 0x06000000,
692
	0x4d8, 0xffffffff, 0x00000100,
693
	0x913c, 0xffff000f, 0x0100000a,
694
	0x960c, 0xffffffff, 0x54763210,
695
	0x88c4, 0xffffffff, 0x000000c2,
696
	0x88d4, 0xffffffff, 0x00000010,
697
	0x8974, 0xffffffff, 0x00000000,
698
	0xc78, 0x00000080, 0x00000080,
699
	0x5e78, 0xffffffff, 0x001000f0,
700
	0xd02c, 0xffffffff, 0x08421000,
701
	0xa008, 0xffffffff, 0x00010000,
702
	0x8d00, 0xffffffff, 0x100e4848,
703
	0x8d04, 0xffffffff, 0x00164745,
704
	0x8c00, 0xffffffff, 0xe4000003,
705
	0x8cf0, 0x1fffffff, 0x08e00620,
706
	0x28350, 0xffffffff, 0x00000000,
707
	0x9508, 0xffffffff, 0x00000002
708
};
709
 
710
static const u32 sumo_golden_registers[] =
711
{
712
	0x900c, 0x00ffffff, 0x0017071f,
713
	0x8c18, 0xffffffff, 0x10101060,
714
	0x8c1c, 0xffffffff, 0x00001010,
715
	0x8c30, 0x0000000f, 0x00000005,
716
	0x9688, 0x0000000f, 0x00000007
717
};
718
 
719
static const u32 wrestler_golden_registers[] =
720
{
721
	0x5eb4, 0xffffffff, 0x00000002,
5078 serge 722
	0x5c4, 0xffffffff, 0x00000001,
3764 Serge 723
	0x7030, 0xffffffff, 0x00000011,
724
	0x7c30, 0xffffffff, 0x00000011,
725
	0x6104, 0x01000300, 0x00000000,
726
	0x5bc0, 0x00300000, 0x00000000,
727
	0x918c, 0xffffffff, 0x00010006,
728
	0x91a8, 0xffffffff, 0x00010006,
729
	0x9150, 0xffffffff, 0x6e944040,
730
	0x917c, 0xffffffff, 0x00030002,
731
	0x9198, 0xffffffff, 0x00030002,
732
	0x915c, 0xffffffff, 0x00010000,
733
	0x3f90, 0xffff0000, 0xff000000,
734
	0x9178, 0xffffffff, 0x00070000,
735
	0x9194, 0xffffffff, 0x00070000,
736
	0x9148, 0xffff0000, 0xff000000,
737
	0x9190, 0xffffffff, 0x00090008,
738
	0x91ac, 0xffffffff, 0x00090008,
739
	0x3f94, 0xffff0000, 0xff000000,
740
	0x914c, 0xffff0000, 0xff000000,
741
	0x929c, 0xffffffff, 0x00000001,
742
	0x8a18, 0xffffffff, 0x00000100,
743
	0x8b28, 0xffffffff, 0x00000100,
744
	0x9144, 0xffffffff, 0x00000100,
745
	0x9b7c, 0xffffffff, 0x00000000,
746
	0x8030, 0xffffffff, 0x0000100a,
747
	0x8a14, 0xffffffff, 0x00000001,
748
	0x8b24, 0xffffffff, 0x00ff0fff,
749
	0x8b10, 0xffffffff, 0x00000000,
750
	0x28a4c, 0x06000000, 0x06000000,
751
	0x4d8, 0xffffffff, 0x00000100,
752
	0x913c, 0xffff000f, 0x0100000a,
753
	0x960c, 0xffffffff, 0x54763210,
754
	0x88c4, 0xffffffff, 0x000000c2,
755
	0x88d4, 0xffffffff, 0x00000010,
756
	0x8974, 0xffffffff, 0x00000000,
757
	0xc78, 0x00000080, 0x00000080,
758
	0x5e78, 0xffffffff, 0x001000f0,
759
	0xd02c, 0xffffffff, 0x08421000,
760
	0xa008, 0xffffffff, 0x00010000,
761
	0x8d00, 0xffffffff, 0x100e4848,
762
	0x8d04, 0xffffffff, 0x00164745,
763
	0x8c00, 0xffffffff, 0xe4000003,
764
	0x8cf0, 0x1fffffff, 0x08e00410,
765
	0x28350, 0xffffffff, 0x00000000,
766
	0x9508, 0xffffffff, 0x00000002,
767
	0x900c, 0xffffffff, 0x0017071f,
768
	0x8c18, 0xffffffff, 0x10101060,
769
	0x8c1c, 0xffffffff, 0x00001010
770
};
771
 
772
static const u32 barts_golden_registers[] =
773
{
774
	0x5eb4, 0xffffffff, 0x00000002,
775
	0x5e78, 0x8f311ff1, 0x001000f0,
776
	0x3f90, 0xffff0000, 0xff000000,
777
	0x9148, 0xffff0000, 0xff000000,
778
	0x3f94, 0xffff0000, 0xff000000,
779
	0x914c, 0xffff0000, 0xff000000,
780
	0xc78, 0x00000080, 0x00000080,
781
	0xbd4, 0x70073777, 0x00010001,
782
	0xd02c, 0xbfffff1f, 0x08421000,
783
	0xd0b8, 0x03773777, 0x02011003,
784
	0x5bc0, 0x00200000, 0x50100000,
785
	0x98f8, 0x33773777, 0x02011003,
786
	0x98fc, 0xffffffff, 0x76543210,
787
	0x7030, 0x31000311, 0x00000011,
788
	0x2f48, 0x00000007, 0x02011003,
789
	0x6b28, 0x00000010, 0x00000012,
790
	0x7728, 0x00000010, 0x00000012,
791
	0x10328, 0x00000010, 0x00000012,
792
	0x10f28, 0x00000010, 0x00000012,
793
	0x11b28, 0x00000010, 0x00000012,
794
	0x12728, 0x00000010, 0x00000012,
795
	0x240c, 0x000007ff, 0x00000380,
796
	0x8a14, 0xf000001f, 0x00000007,
797
	0x8b24, 0x3fff3fff, 0x00ff0fff,
798
	0x8b10, 0x0000ff0f, 0x00000000,
799
	0x28a4c, 0x07ffffff, 0x06000000,
800
	0x10c, 0x00000001, 0x00010003,
801
	0xa02c, 0xffffffff, 0x0000009b,
802
	0x913c, 0x0000000f, 0x0100000a,
803
	0x8d00, 0xffff7f7f, 0x100e4848,
804
	0x8d04, 0x00ffffff, 0x00164745,
805
	0x8c00, 0xfffc0003, 0xe4000003,
806
	0x8c04, 0xf8ff00ff, 0x40600060,
807
	0x8c08, 0x00ff00ff, 0x001c001c,
808
	0x8cf0, 0x1fff1fff, 0x08e00620,
809
	0x8c20, 0x0fff0fff, 0x00800080,
810
	0x8c24, 0x0fff0fff, 0x00800080,
811
	0x8c18, 0xffffffff, 0x20202078,
812
	0x8c1c, 0x0000ffff, 0x00001010,
813
	0x28350, 0x00000f01, 0x00000000,
814
	0x9508, 0x3700001f, 0x00000002,
815
	0x960c, 0xffffffff, 0x54763210,
816
	0x88c4, 0x001f3ae3, 0x000000c2,
817
	0x88d4, 0x0000001f, 0x00000010,
818
	0x8974, 0xffffffff, 0x00000000
819
};
820
 
821
static const u32 turks_golden_registers[] =
822
{
823
	0x5eb4, 0xffffffff, 0x00000002,
824
	0x5e78, 0x8f311ff1, 0x001000f0,
825
	0x8c8, 0x00003000, 0x00001070,
826
	0x8cc, 0x000fffff, 0x00040035,
827
	0x3f90, 0xffff0000, 0xfff00000,
828
	0x9148, 0xffff0000, 0xfff00000,
829
	0x3f94, 0xffff0000, 0xfff00000,
830
	0x914c, 0xffff0000, 0xfff00000,
831
	0xc78, 0x00000080, 0x00000080,
832
	0xbd4, 0x00073007, 0x00010002,
833
	0xd02c, 0xbfffff1f, 0x08421000,
834
	0xd0b8, 0x03773777, 0x02010002,
835
	0x5bc0, 0x00200000, 0x50100000,
836
	0x98f8, 0x33773777, 0x00010002,
837
	0x98fc, 0xffffffff, 0x33221100,
838
	0x7030, 0x31000311, 0x00000011,
839
	0x2f48, 0x33773777, 0x00010002,
840
	0x6b28, 0x00000010, 0x00000012,
841
	0x7728, 0x00000010, 0x00000012,
842
	0x10328, 0x00000010, 0x00000012,
843
	0x10f28, 0x00000010, 0x00000012,
844
	0x11b28, 0x00000010, 0x00000012,
845
	0x12728, 0x00000010, 0x00000012,
846
	0x240c, 0x000007ff, 0x00000380,
847
	0x8a14, 0xf000001f, 0x00000007,
848
	0x8b24, 0x3fff3fff, 0x00ff0fff,
849
	0x8b10, 0x0000ff0f, 0x00000000,
850
	0x28a4c, 0x07ffffff, 0x06000000,
851
	0x10c, 0x00000001, 0x00010003,
852
	0xa02c, 0xffffffff, 0x0000009b,
853
	0x913c, 0x0000000f, 0x0100000a,
854
	0x8d00, 0xffff7f7f, 0x100e4848,
855
	0x8d04, 0x00ffffff, 0x00164745,
856
	0x8c00, 0xfffc0003, 0xe4000003,
857
	0x8c04, 0xf8ff00ff, 0x40600060,
858
	0x8c08, 0x00ff00ff, 0x001c001c,
859
	0x8cf0, 0x1fff1fff, 0x08e00410,
860
	0x8c20, 0x0fff0fff, 0x00800080,
861
	0x8c24, 0x0fff0fff, 0x00800080,
862
	0x8c18, 0xffffffff, 0x20202078,
863
	0x8c1c, 0x0000ffff, 0x00001010,
864
	0x28350, 0x00000f01, 0x00000000,
865
	0x9508, 0x3700001f, 0x00000002,
866
	0x960c, 0xffffffff, 0x54763210,
867
	0x88c4, 0x001f3ae3, 0x000000c2,
868
	0x88d4, 0x0000001f, 0x00000010,
869
	0x8974, 0xffffffff, 0x00000000
870
};
871
 
872
static const u32 caicos_golden_registers[] =
873
{
874
	0x5eb4, 0xffffffff, 0x00000002,
875
	0x5e78, 0x8f311ff1, 0x001000f0,
876
	0x8c8, 0x00003420, 0x00001450,
877
	0x8cc, 0x000fffff, 0x00040035,
878
	0x3f90, 0xffff0000, 0xfffc0000,
879
	0x9148, 0xffff0000, 0xfffc0000,
880
	0x3f94, 0xffff0000, 0xfffc0000,
881
	0x914c, 0xffff0000, 0xfffc0000,
882
	0xc78, 0x00000080, 0x00000080,
883
	0xbd4, 0x00073007, 0x00010001,
884
	0xd02c, 0xbfffff1f, 0x08421000,
885
	0xd0b8, 0x03773777, 0x02010001,
886
	0x5bc0, 0x00200000, 0x50100000,
887
	0x98f8, 0x33773777, 0x02010001,
888
	0x98fc, 0xffffffff, 0x33221100,
889
	0x7030, 0x31000311, 0x00000011,
890
	0x2f48, 0x33773777, 0x02010001,
891
	0x6b28, 0x00000010, 0x00000012,
892
	0x7728, 0x00000010, 0x00000012,
893
	0x10328, 0x00000010, 0x00000012,
894
	0x10f28, 0x00000010, 0x00000012,
895
	0x11b28, 0x00000010, 0x00000012,
896
	0x12728, 0x00000010, 0x00000012,
897
	0x240c, 0x000007ff, 0x00000380,
898
	0x8a14, 0xf000001f, 0x00000001,
899
	0x8b24, 0x3fff3fff, 0x00ff0fff,
900
	0x8b10, 0x0000ff0f, 0x00000000,
901
	0x28a4c, 0x07ffffff, 0x06000000,
902
	0x10c, 0x00000001, 0x00010003,
903
	0xa02c, 0xffffffff, 0x0000009b,
904
	0x913c, 0x0000000f, 0x0100000a,
905
	0x8d00, 0xffff7f7f, 0x100e4848,
906
	0x8d04, 0x00ffffff, 0x00164745,
907
	0x8c00, 0xfffc0003, 0xe4000003,
908
	0x8c04, 0xf8ff00ff, 0x40600060,
909
	0x8c08, 0x00ff00ff, 0x001c001c,
910
	0x8cf0, 0x1fff1fff, 0x08e00410,
911
	0x8c20, 0x0fff0fff, 0x00800080,
912
	0x8c24, 0x0fff0fff, 0x00800080,
913
	0x8c18, 0xffffffff, 0x20202078,
914
	0x8c1c, 0x0000ffff, 0x00001010,
915
	0x28350, 0x00000f01, 0x00000000,
916
	0x9508, 0x3700001f, 0x00000002,
917
	0x960c, 0xffffffff, 0x54763210,
918
	0x88c4, 0x001f3ae3, 0x000000c2,
919
	0x88d4, 0x0000001f, 0x00000010,
920
	0x8974, 0xffffffff, 0x00000000
921
};
922
 
923
static void evergreen_init_golden_registers(struct radeon_device *rdev)
924
{
925
	switch (rdev->family) {
926
	case CHIP_CYPRESS:
927
	case CHIP_HEMLOCK:
928
		radeon_program_register_sequence(rdev,
929
						 evergreen_golden_registers,
930
						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
931
		radeon_program_register_sequence(rdev,
932
						 evergreen_golden_registers2,
933
						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
934
		radeon_program_register_sequence(rdev,
935
						 cypress_mgcg_init,
936
						 (const u32)ARRAY_SIZE(cypress_mgcg_init));
937
		break;
938
	case CHIP_JUNIPER:
939
		radeon_program_register_sequence(rdev,
940
						 evergreen_golden_registers,
941
						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
942
		radeon_program_register_sequence(rdev,
943
						 evergreen_golden_registers2,
944
						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
945
		radeon_program_register_sequence(rdev,
946
						 juniper_mgcg_init,
947
						 (const u32)ARRAY_SIZE(juniper_mgcg_init));
948
		break;
949
	case CHIP_REDWOOD:
950
		radeon_program_register_sequence(rdev,
951
						 evergreen_golden_registers,
952
						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
953
		radeon_program_register_sequence(rdev,
954
						 evergreen_golden_registers2,
955
						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
956
		radeon_program_register_sequence(rdev,
957
						 redwood_mgcg_init,
958
						 (const u32)ARRAY_SIZE(redwood_mgcg_init));
959
		break;
960
	case CHIP_CEDAR:
961
		radeon_program_register_sequence(rdev,
962
						 cedar_golden_registers,
963
						 (const u32)ARRAY_SIZE(cedar_golden_registers));
964
		radeon_program_register_sequence(rdev,
965
						 evergreen_golden_registers2,
966
						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
967
		radeon_program_register_sequence(rdev,
968
						 cedar_mgcg_init,
969
						 (const u32)ARRAY_SIZE(cedar_mgcg_init));
970
		break;
971
	case CHIP_PALM:
972
		radeon_program_register_sequence(rdev,
973
						 wrestler_golden_registers,
974
						 (const u32)ARRAY_SIZE(wrestler_golden_registers));
975
		break;
976
	case CHIP_SUMO:
977
		radeon_program_register_sequence(rdev,
978
						 supersumo_golden_registers,
979
						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
980
		break;
981
	case CHIP_SUMO2:
982
		radeon_program_register_sequence(rdev,
983
						 supersumo_golden_registers,
984
						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
985
		radeon_program_register_sequence(rdev,
986
						 sumo_golden_registers,
987
						 (const u32)ARRAY_SIZE(sumo_golden_registers));
988
		break;
989
	case CHIP_BARTS:
990
		radeon_program_register_sequence(rdev,
991
						 barts_golden_registers,
992
						 (const u32)ARRAY_SIZE(barts_golden_registers));
993
		break;
994
	case CHIP_TURKS:
995
		radeon_program_register_sequence(rdev,
996
						 turks_golden_registers,
997
						 (const u32)ARRAY_SIZE(turks_golden_registers));
998
		break;
999
	case CHIP_CAICOS:
1000
		radeon_program_register_sequence(rdev,
1001
						 caicos_golden_registers,
1002
						 (const u32)ARRAY_SIZE(caicos_golden_registers));
1003
		break;
1004
	default:
1005
		break;
1006
	}
1007
}
1008
 
2997 Serge 1009
void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1010
			     unsigned *bankh, unsigned *mtaspect,
1011
			     unsigned *tile_split)
1012
{
1013
	*bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1014
	*bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1015
	*mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1016
	*tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1017
	switch (*bankw) {
1018
	default:
1019
	case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1020
	case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1021
	case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1022
	case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1023
	}
1024
	switch (*bankh) {
1025
	default:
1026
	case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1027
	case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1028
	case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1029
	case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1030
	}
1031
	switch (*mtaspect) {
1032
	default:
1033
	case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1034
	case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1035
	case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1036
	case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1037
	}
1038
}
1990 serge 1039
 
3764 Serge 1040
static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1041
			      u32 cntl_reg, u32 status_reg)
1042
{
1043
	int r, i;
1044
	struct atom_clock_dividers dividers;
1045
 
1046
        r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1047
					   clock, false, ÷rs);
1048
	if (r)
1049
		return r;
1050
 
1051
	WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1052
 
1053
	for (i = 0; i < 100; i++) {
1054
		if (RREG32(status_reg) & DCLK_STATUS)
1055
			break;
1056
		mdelay(10);
1057
	}
1058
	if (i == 100)
1059
		return -ETIMEDOUT;
1060
 
1061
	return 0;
1062
}
1063
 
1064
int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1065
{
1066
	int r = 0;
1067
	u32 cg_scratch = RREG32(CG_SCRATCH1);
1068
 
1069
	r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1070
	if (r)
1071
		goto done;
1072
	cg_scratch &= 0xffff0000;
1073
	cg_scratch |= vclk / 100; /* Mhz */
1074
 
1075
	r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1076
	if (r)
1077
		goto done;
1078
	cg_scratch &= 0x0000ffff;
1079
	cg_scratch |= (dclk / 100) << 16; /* Mhz */
1080
 
1081
done:
1082
	WREG32(CG_SCRATCH1, cg_scratch);
1083
 
1084
	return r;
1085
}
1086
 
1087
int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1088
{
1089
	/* start off with something large */
1090
	unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1091
	int r;
1092
 
1093
	/* bypass vclk and dclk with bclk */
1094
	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1095
		VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1096
		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1097
 
1098
	/* put PLL in bypass mode */
1099
	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1100
 
1101
	if (!vclk || !dclk) {
1102
		/* keep the Bypass mode, put PLL to sleep */
1103
		WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1104
		return 0;
1105
	}
1106
 
1107
//   r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1108
//                     16384, 0x03FFFFFF, 0, 128, 5,
1109
//                     &fb_div, &vclk_div, &dclk_div);
1110
	if (r)
1111
		return r;
1112
 
1113
	/* set VCO_MODE to 1 */
1114
	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1115
 
1116
	/* toggle UPLL_SLEEP to 1 then back to 0 */
1117
	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1118
	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1119
 
1120
	/* deassert UPLL_RESET */
1121
	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1122
 
1123
	mdelay(1);
1124
 
1125
//   r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1126
//   if (r)
1127
//       return r;
1128
 
1129
	/* assert UPLL_RESET again */
1130
	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1131
 
1132
	/* disable spread spectrum. */
1133
	WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1134
 
1135
	/* set feedback divider */
1136
	WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1137
 
1138
	/* set ref divider to 0 */
1139
	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1140
 
1141
	if (fb_div < 307200)
1142
		WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1143
	else
1144
		WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1145
 
1146
	/* set PDIV_A and PDIV_B */
1147
	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1148
		UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1149
		~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1150
 
1151
	/* give the PLL some time to settle */
1152
	mdelay(15);
1153
 
1154
	/* deassert PLL_RESET */
1155
	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1156
 
1157
	mdelay(15);
1158
 
1159
	/* switch from bypass mode to normal mode */
1160
	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1161
 
1162
//   r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1163
//   if (r)
1164
//       return r;
1165
 
1166
	/* switch VCLK and DCLK selection */
1167
	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1168
		VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1169
		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1170
 
1171
	mdelay(100);
1172
 
1173
	return 0;
1174
}
1175
 
2997 Serge 1176
void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1177
{
5078 serge 1178
	int readrq;
1179
	u16 v;
1990 serge 1180
 
5078 serge 1181
	readrq = pcie_get_readrq(rdev->pdev);
1182
	v = ffs(readrq) - 8;
2997 Serge 1183
	/* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1184
	 * to avoid hangs or perfomance issues
1185
	 */
5078 serge 1186
	if ((v == 0) || (v == 6) || (v == 7))
1187
		pcie_set_readrq(rdev->pdev, 512);
1188
}
1189
 
1190
void dce4_program_fmt(struct drm_encoder *encoder)
1191
{
1192
	struct drm_device *dev = encoder->dev;
1193
	struct radeon_device *rdev = dev->dev_private;
1194
	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1195
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1196
	struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1197
	int bpc = 0;
1198
	u32 tmp = 0;
1199
	enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1200
 
1201
	if (connector) {
1202
		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1203
		bpc = radeon_get_monitor_bpc(connector);
1204
		dither = radeon_connector->dither;
2997 Serge 1205
	}
5078 serge 1206
 
1207
	/* LVDS/eDP FMT is set up by atom */
1208
	if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1209
		return;
1210
 
1211
	/* not needed for analog */
1212
	if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1213
	    (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1214
		return;
1215
 
1216
	if (bpc == 0)
1217
		return;
1218
 
1219
	switch (bpc) {
1220
	case 6:
1221
		if (dither == RADEON_FMT_DITHER_ENABLE)
1222
			/* XXX sort out optimal dither settings */
1223
			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1224
				FMT_SPATIAL_DITHER_EN);
1225
		else
1226
			tmp |= FMT_TRUNCATE_EN;
1227
		break;
1228
	case 8:
1229
		if (dither == RADEON_FMT_DITHER_ENABLE)
1230
			/* XXX sort out optimal dither settings */
1231
			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1232
				FMT_RGB_RANDOM_ENABLE |
1233
				FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1234
		else
1235
			tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1236
		break;
1237
	case 10:
1238
	default:
1239
		/* not needed */
1240
		break;
1241
	}
1242
 
1243
	WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
2997 Serge 1244
}
1245
 
3764 Serge 1246
static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1247
{
1248
	if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1249
		return true;
1250
	else
1251
		return false;
1252
}
1253
 
1254
static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1255
{
1256
	u32 pos1, pos2;
1257
 
1258
	pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1259
	pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1260
 
1261
	if (pos1 != pos2)
1262
		return true;
1263
	else
1264
		return false;
1265
}
1266
 
2997 Serge 1267
/**
1268
 * dce4_wait_for_vblank - vblank wait asic callback.
1269
 *
1270
 * @rdev: radeon_device pointer
1271
 * @crtc: crtc to wait for vblank on
1272
 *
1273
 * Wait for vblank on the requested crtc (evergreen+).
1274
 */
1275
void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1276
{
3764 Serge 1277
	unsigned i = 0;
2997 Serge 1278
 
1279
	if (crtc >= rdev->num_crtc)
1280
		return;
1281
 
3764 Serge 1282
	if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1283
		return;
1284
 
1285
	/* depending on when we hit vblank, we may be close to active; if so,
1286
	 * wait for another frame.
1287
	 */
1288
	while (dce4_is_in_vblank(rdev, crtc)) {
1289
		if (i++ % 100 == 0) {
1290
			if (!dce4_is_counter_moving(rdev, crtc))
2997 Serge 1291
				break;
1292
		}
3764 Serge 1293
	}
1294
 
1295
	while (!dce4_is_in_vblank(rdev, crtc)) {
1296
		if (i++ % 100 == 0) {
1297
			if (!dce4_is_counter_moving(rdev, crtc))
2997 Serge 1298
				break;
1299
		}
1300
	}
1301
}
1302
 
1303
/**
1304
 * evergreen_page_flip - pageflip callback.
1305
 *
1306
 * @rdev: radeon_device pointer
1307
 * @crtc_id: crtc to cleanup pageflip on
1308
 * @crtc_base: new address of the crtc (GPU MC address)
1309
 *
1310
 * Does the actual pageflip (evergreen+).
1311
 * During vblank we take the crtc lock and wait for the update_pending
1312
 * bit to go high, when it does, we release the lock, and allow the
1313
 * double buffered update to take place.
1314
 * Returns the current update pending status.
1315
 */
5078 serge 1316
void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1990 serge 1317
{
1318
	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1319
	u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
2997 Serge 1320
	int i;
1990 serge 1321
 
1322
	/* Lock the graphics update lock */
1323
	tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
1324
	WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1325
 
1326
	/* update the scanout addresses */
1327
	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1328
	       upper_32_bits(crtc_base));
1329
	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1330
	       (u32)crtc_base);
1331
 
1332
	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1333
	       upper_32_bits(crtc_base));
1334
	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1335
	       (u32)crtc_base);
1336
 
1337
	/* Wait for update_pending to go high. */
2997 Serge 1338
	for (i = 0; i < rdev->usec_timeout; i++) {
1339
		if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING)
1340
			break;
1341
		udelay(1);
1342
	}
1990 serge 1343
	DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
1344
 
1345
	/* Unlock the lock, so double-buffering can take place inside vblank */
1346
	tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
1347
	WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
5078 serge 1348
}
1990 serge 1349
 
5078 serge 1350
/**
1351
 * evergreen_page_flip_pending - check if page flip is still pending
1352
 *
1353
 * @rdev: radeon_device pointer
1354
 * @crtc_id: crtc to check
1355
 *
1356
 * Returns the current update pending status.
1357
 */
1358
bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
1359
{
1360
	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1361
 
1990 serge 1362
	/* Return current update_pending status: */
5078 serge 1363
	return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
1364
		EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
1990 serge 1365
}
1366
 
1367
/* get temperature in millidegrees */
1368
int evergreen_get_temp(struct radeon_device *rdev)
1369
{
1370
	u32 temp, toffset;
1371
	int actual_temp = 0;
1372
 
1373
	if (rdev->family == CHIP_JUNIPER) {
1374
		toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1375
			TOFFSET_SHIFT;
1376
		temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1377
			TS0_ADC_DOUT_SHIFT;
1378
 
1379
		if (toffset & 0x100)
1380
			actual_temp = temp / 2 - (0x200 - toffset);
1381
		else
1382
			actual_temp = temp / 2 + toffset;
1383
 
1384
		actual_temp = actual_temp * 1000;
1385
 
1386
	} else {
1387
		temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1388
			ASIC_T_SHIFT;
1389
 
1390
		if (temp & 0x400)
1391
			actual_temp = -256;
1392
		else if (temp & 0x200)
1393
			actual_temp = 255;
1394
		else if (temp & 0x100) {
1395
			actual_temp = temp & 0x1ff;
1396
			actual_temp |= ~0x1ff;
1397
		} else
1398
			actual_temp = temp & 0xff;
1399
 
1400
		actual_temp = (actual_temp * 1000) / 2;
1401
	}
1402
 
1403
	return actual_temp;
1404
}
1405
 
1406
int sumo_get_temp(struct radeon_device *rdev)
1407
{
1408
	u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1409
	int actual_temp = temp - 49;
1410
 
1411
	return actual_temp * 1000;
1412
}
1413
 
2997 Serge 1414
/**
1415
 * sumo_pm_init_profile - Initialize power profiles callback.
1416
 *
1417
 * @rdev: radeon_device pointer
1418
 *
1419
 * Initialize the power states used in profile mode
1420
 * (sumo, trinity, SI).
1421
 * Used for profile mode only.
1422
 */
1423
void sumo_pm_init_profile(struct radeon_device *rdev)
1424
{
1425
	int idx;
1426
 
1427
	/* default */
1428
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1429
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1430
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1431
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1432
 
1433
	/* low,mid sh/mh */
1434
	if (rdev->flags & RADEON_IS_MOBILITY)
1435
		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1436
	else
1437
		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1438
 
1439
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1440
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1441
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1442
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1443
 
1444
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1445
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1446
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1447
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1448
 
1449
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1450
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1451
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1452
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1453
 
1454
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1455
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1456
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1457
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1458
 
1459
	/* high sh/mh */
1460
	idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1461
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1462
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1463
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1464
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1465
		rdev->pm.power_state[idx].num_clock_modes - 1;
1466
 
1467
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1468
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1469
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1470
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1471
		rdev->pm.power_state[idx].num_clock_modes - 1;
1472
}
1473
 
1474
/**
3764 Serge 1475
 * btc_pm_init_profile - Initialize power profiles callback.
1476
 *
1477
 * @rdev: radeon_device pointer
1478
 *
1479
 * Initialize the power states used in profile mode
1480
 * (BTC, cayman).
1481
 * Used for profile mode only.
1482
 */
1483
void btc_pm_init_profile(struct radeon_device *rdev)
1484
{
1485
	int idx;
1486
 
1487
	/* default */
1488
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1489
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1490
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1491
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1492
	/* starting with BTC, there is one state that is used for both
1493
	 * MH and SH.  Difference is that we always use the high clock index for
1494
	 * mclk.
1495
	 */
1496
	if (rdev->flags & RADEON_IS_MOBILITY)
1497
		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1498
	else
1499
		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1500
	/* low sh */
1501
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1502
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1503
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1504
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1505
	/* mid sh */
1506
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1507
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1508
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1509
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1510
	/* high sh */
1511
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1512
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1513
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1514
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1515
	/* low mh */
1516
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1517
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1518
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1519
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1520
	/* mid mh */
1521
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1522
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1523
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1524
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1525
	/* high mh */
1526
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1527
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1528
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1529
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1530
}
1531
 
1532
/**
2997 Serge 1533
 * evergreen_pm_misc - set additional pm hw parameters callback.
1534
 *
1535
 * @rdev: radeon_device pointer
1536
 *
1537
 * Set non-clock parameters associated with a power state
1538
 * (voltage, etc.) (evergreen+).
1539
 */
1990 serge 1540
void evergreen_pm_misc(struct radeon_device *rdev)
1541
{
1542
	int req_ps_idx = rdev->pm.requested_power_state_index;
1543
	int req_cm_idx = rdev->pm.requested_clock_mode_index;
1544
	struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1545
	struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1546
 
1547
	if (voltage->type == VOLTAGE_SW) {
5078 serge 1548
		/* 0xff0x are flags rather then an actual voltage */
1549
		if ((voltage->voltage & 0xff00) == 0xff00)
1990 serge 1550
			return;
1551
		if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1552
			radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1553
			rdev->pm.current_vddc = voltage->voltage;
1554
			DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1555
		}
3764 Serge 1556
 
1557
		/* starting with BTC, there is one state that is used for both
1558
		 * MH and SH.  Difference is that we always use the high clock index for
1559
		 * mclk and vddci.
1560
		 */
1561
		if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1562
		    (rdev->family >= CHIP_BARTS) &&
1563
		    rdev->pm.active_crtc_count &&
1564
		    ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1565
		     (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1566
			voltage = &rdev->pm.power_state[req_ps_idx].
1567
				clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1568
 
5078 serge 1569
		/* 0xff0x are flags rather then an actual voltage */
1570
		if ((voltage->vddci & 0xff00) == 0xff00)
1990 serge 1571
			return;
1572
		if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1573
			radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1574
			rdev->pm.current_vddci = voltage->vddci;
1575
			DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1576
		}
1577
	}
1578
}
1579
 
2997 Serge 1580
/**
1581
 * evergreen_pm_prepare - pre-power state change callback.
1582
 *
1583
 * @rdev: radeon_device pointer
1584
 *
1585
 * Prepare for a power state change (evergreen+).
1586
 */
1990 serge 1587
void evergreen_pm_prepare(struct radeon_device *rdev)
1588
{
1589
	struct drm_device *ddev = rdev->ddev;
1590
	struct drm_crtc *crtc;
1591
	struct radeon_crtc *radeon_crtc;
1592
	u32 tmp;
1593
 
1594
	/* disable any active CRTCs */
1595
	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1596
		radeon_crtc = to_radeon_crtc(crtc);
1597
		if (radeon_crtc->enabled) {
1598
			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1599
			tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1600
			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1601
		}
1602
	}
1603
}
1604
 
2997 Serge 1605
/**
1606
 * evergreen_pm_finish - post-power state change callback.
1607
 *
1608
 * @rdev: radeon_device pointer
1609
 *
1610
 * Clean up after a power state change (evergreen+).
1611
 */
1990 serge 1612
void evergreen_pm_finish(struct radeon_device *rdev)
1613
{
1614
	struct drm_device *ddev = rdev->ddev;
1615
	struct drm_crtc *crtc;
1616
	struct radeon_crtc *radeon_crtc;
1617
	u32 tmp;
1618
 
1619
	/* enable any active CRTCs */
1620
	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1621
		radeon_crtc = to_radeon_crtc(crtc);
1622
		if (radeon_crtc->enabled) {
1623
			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1624
			tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1625
			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1626
		}
1627
	}
1628
}
1629
 
2997 Serge 1630
/**
1631
 * evergreen_hpd_sense - hpd sense callback.
1632
 *
1633
 * @rdev: radeon_device pointer
1634
 * @hpd: hpd (hotplug detect) pin
1635
 *
1636
 * Checks if a digital monitor is connected (evergreen+).
1637
 * Returns true if connected, false if not connected.
1638
 */
1430 serge 1639
bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1640
{
1641
	bool connected = false;
1963 serge 1642
 
1643
	switch (hpd) {
1644
	case RADEON_HPD_1:
1645
		if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1646
			connected = true;
1647
		break;
1648
	case RADEON_HPD_2:
1649
		if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1650
			connected = true;
1651
		break;
1652
	case RADEON_HPD_3:
1653
		if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1654
			connected = true;
1655
		break;
1656
	case RADEON_HPD_4:
1657
		if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1658
			connected = true;
1659
		break;
1660
	case RADEON_HPD_5:
1661
		if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1662
			connected = true;
1663
		break;
1664
	case RADEON_HPD_6:
1665
		if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1666
			connected = true;
1667
			break;
1668
	default:
1669
		break;
1670
	}
1671
 
1430 serge 1672
	return connected;
1673
}
1674
 
2997 Serge 1675
/**
1676
 * evergreen_hpd_set_polarity - hpd set polarity callback.
1677
 *
1678
 * @rdev: radeon_device pointer
1679
 * @hpd: hpd (hotplug detect) pin
1680
 *
1681
 * Set the polarity of the hpd pin (evergreen+).
1682
 */
1430 serge 1683
void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1684
				enum radeon_hpd_id hpd)
1685
{
1963 serge 1686
	u32 tmp;
1687
	bool connected = evergreen_hpd_sense(rdev, hpd);
1688
 
1689
	switch (hpd) {
1690
	case RADEON_HPD_1:
1691
		tmp = RREG32(DC_HPD1_INT_CONTROL);
1692
		if (connected)
1693
			tmp &= ~DC_HPDx_INT_POLARITY;
1694
		else
1695
			tmp |= DC_HPDx_INT_POLARITY;
1696
		WREG32(DC_HPD1_INT_CONTROL, tmp);
1697
		break;
1698
	case RADEON_HPD_2:
1699
		tmp = RREG32(DC_HPD2_INT_CONTROL);
1700
		if (connected)
1701
			tmp &= ~DC_HPDx_INT_POLARITY;
1702
		else
1703
			tmp |= DC_HPDx_INT_POLARITY;
1704
		WREG32(DC_HPD2_INT_CONTROL, tmp);
1705
		break;
1706
	case RADEON_HPD_3:
1707
		tmp = RREG32(DC_HPD3_INT_CONTROL);
1708
		if (connected)
1709
			tmp &= ~DC_HPDx_INT_POLARITY;
1710
		else
1711
			tmp |= DC_HPDx_INT_POLARITY;
1712
		WREG32(DC_HPD3_INT_CONTROL, tmp);
1713
		break;
1714
	case RADEON_HPD_4:
1715
		tmp = RREG32(DC_HPD4_INT_CONTROL);
1716
		if (connected)
1717
			tmp &= ~DC_HPDx_INT_POLARITY;
1718
		else
1719
			tmp |= DC_HPDx_INT_POLARITY;
1720
		WREG32(DC_HPD4_INT_CONTROL, tmp);
1721
		break;
1722
	case RADEON_HPD_5:
1723
		tmp = RREG32(DC_HPD5_INT_CONTROL);
1724
		if (connected)
1725
			tmp &= ~DC_HPDx_INT_POLARITY;
1726
		else
1727
			tmp |= DC_HPDx_INT_POLARITY;
1728
		WREG32(DC_HPD5_INT_CONTROL, tmp);
1729
			break;
1730
	case RADEON_HPD_6:
1731
		tmp = RREG32(DC_HPD6_INT_CONTROL);
1732
		if (connected)
1733
			tmp &= ~DC_HPDx_INT_POLARITY;
1734
		else
1735
			tmp |= DC_HPDx_INT_POLARITY;
1736
		WREG32(DC_HPD6_INT_CONTROL, tmp);
1737
		break;
1738
	default:
1739
		break;
1740
	}
1430 serge 1741
}
1742
 
2997 Serge 1743
/**
1744
 * evergreen_hpd_init - hpd setup callback.
1745
 *
1746
 * @rdev: radeon_device pointer
1747
 *
1748
 * Setup the hpd pins used by the card (evergreen+).
1749
 * Enable the pin, set the polarity, and enable the hpd interrupts.
1750
 */
1430 serge 1751
void evergreen_hpd_init(struct radeon_device *rdev)
1752
{
1963 serge 1753
	struct drm_device *dev = rdev->ddev;
1754
	struct drm_connector *connector;
2997 Serge 1755
	unsigned enabled = 0;
1963 serge 1756
	u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1757
		DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1758
 
1759
	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1760
		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
3764 Serge 1761
 
1762
		if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1763
		    connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1764
			/* don't try to enable hpd on eDP or LVDS avoid breaking the
1765
			 * aux dp channel on imac and help (but not completely fix)
1766
			 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1767
			 * also avoid interrupt storms during dpms.
1768
			 */
1769
			continue;
1770
		}
1963 serge 1771
		switch (radeon_connector->hpd.hpd) {
1772
		case RADEON_HPD_1:
1773
			WREG32(DC_HPD1_CONTROL, tmp);
1774
			break;
1775
		case RADEON_HPD_2:
1776
			WREG32(DC_HPD2_CONTROL, tmp);
1777
			break;
1778
		case RADEON_HPD_3:
1779
			WREG32(DC_HPD3_CONTROL, tmp);
1780
			break;
1781
		case RADEON_HPD_4:
1782
			WREG32(DC_HPD4_CONTROL, tmp);
1783
			break;
1784
		case RADEON_HPD_5:
1785
			WREG32(DC_HPD5_CONTROL, tmp);
1786
			break;
1787
		case RADEON_HPD_6:
1788
			WREG32(DC_HPD6_CONTROL, tmp);
1789
			break;
1790
		default:
1791
			break;
1792
		}
2997 Serge 1793
		radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1794
		enabled |= 1 << radeon_connector->hpd.hpd;
1963 serge 1795
	}
2997 Serge 1796
//   radeon_irq_kms_enable_hpd(rdev, enabled);
1430 serge 1797
}
1798
 
2997 Serge 1799
/**
1800
 * evergreen_hpd_fini - hpd tear down callback.
1801
 *
1802
 * @rdev: radeon_device pointer
1803
 *
1804
 * Tear down the hpd pins used by the card (evergreen+).
1805
 * Disable the hpd interrupts.
1806
 */
1963 serge 1807
void evergreen_hpd_fini(struct radeon_device *rdev)
1808
{
1809
	struct drm_device *dev = rdev->ddev;
1810
	struct drm_connector *connector;
2997 Serge 1811
	unsigned disabled = 0;
1430 serge 1812
 
1963 serge 1813
	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1814
		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1815
		switch (radeon_connector->hpd.hpd) {
1816
		case RADEON_HPD_1:
1817
			WREG32(DC_HPD1_CONTROL, 0);
1818
			break;
1819
		case RADEON_HPD_2:
1820
			WREG32(DC_HPD2_CONTROL, 0);
1821
			break;
1822
		case RADEON_HPD_3:
1823
			WREG32(DC_HPD3_CONTROL, 0);
1824
			break;
1825
		case RADEON_HPD_4:
1826
			WREG32(DC_HPD4_CONTROL, 0);
1827
			break;
1828
		case RADEON_HPD_5:
1829
			WREG32(DC_HPD5_CONTROL, 0);
1830
			break;
1831
		case RADEON_HPD_6:
1832
			WREG32(DC_HPD6_CONTROL, 0);
1833
			break;
1834
		default:
1835
			break;
1836
		}
2997 Serge 1837
		disabled |= 1 << radeon_connector->hpd.hpd;
1963 serge 1838
	}
2997 Serge 1839
//   radeon_irq_kms_disable_hpd(rdev, disabled);
1430 serge 1840
}
1841
 
1986 serge 1842
/* watermark setup */
1963 serge 1843
 
1986 serge 1844
static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1845
					struct radeon_crtc *radeon_crtc,
1846
					struct drm_display_mode *mode,
1847
					struct drm_display_mode *other_mode)
1848
{
5078 serge 1849
	u32 tmp, buffer_alloc, i;
1850
	u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1986 serge 1851
	/*
1852
	 * Line Buffer Setup
1853
	 * There are 3 line buffers, each one shared by 2 display controllers.
1854
	 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1855
	 * the display controllers.  The paritioning is done via one of four
1856
	 * preset allocations specified in bits 2:0:
1857
	 * first display controller
1858
	 *  0 - first half of lb (3840 * 2)
1859
	 *  1 - first 3/4 of lb (5760 * 2)
1860
	 *  2 - whole lb (7680 * 2), other crtc must be disabled
1861
	 *  3 - first 1/4 of lb (1920 * 2)
1862
	 * second display controller
1863
	 *  4 - second half of lb (3840 * 2)
1864
	 *  5 - second 3/4 of lb (5760 * 2)
1865
	 *  6 - whole lb (7680 * 2), other crtc must be disabled
1866
	 *  7 - last 1/4 of lb (1920 * 2)
1867
	 */
1868
	/* this can get tricky if we have two large displays on a paired group
1869
	 * of crtcs.  Ideally for multiple large displays we'd assign them to
1870
	 * non-linked crtcs for maximum line buffer allocation.
1871
	 */
1872
	if (radeon_crtc->base.enabled && mode) {
5078 serge 1873
		if (other_mode) {
1986 serge 1874
			tmp = 0; /* 1/2 */
5078 serge 1875
			buffer_alloc = 1;
1876
		} else {
1986 serge 1877
			tmp = 2; /* whole */
5078 serge 1878
			buffer_alloc = 2;
1879
		}
1880
	} else {
1986 serge 1881
		tmp = 0;
5078 serge 1882
		buffer_alloc = 0;
1883
	}
1963 serge 1884
 
1986 serge 1885
	/* second controller of the pair uses second half of the lb */
1886
	if (radeon_crtc->crtc_id % 2)
1887
		tmp += 4;
1888
	WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1889
 
5078 serge 1890
	if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1891
		WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1892
		       DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1893
		for (i = 0; i < rdev->usec_timeout; i++) {
1894
			if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1895
			    DMIF_BUFFERS_ALLOCATED_COMPLETED)
1896
				break;
1897
			udelay(1);
1898
		}
1899
	}
1900
 
1986 serge 1901
	if (radeon_crtc->base.enabled && mode) {
1902
		switch (tmp) {
1903
		case 0:
1904
		case 4:
1905
		default:
1906
			if (ASIC_IS_DCE5(rdev))
1907
				return 4096 * 2;
1908
			else
1909
				return 3840 * 2;
1910
		case 1:
1911
		case 5:
1912
			if (ASIC_IS_DCE5(rdev))
1913
				return 6144 * 2;
1914
			else
1915
				return 5760 * 2;
1916
		case 2:
1917
		case 6:
1918
			if (ASIC_IS_DCE5(rdev))
1919
				return 8192 * 2;
1920
			else
1921
				return 7680 * 2;
1922
		case 3:
1923
		case 7:
1924
			if (ASIC_IS_DCE5(rdev))
1925
				return 2048 * 2;
1926
			else
1927
				return 1920 * 2;
1928
		}
1929
	}
1930
 
1931
	/* controller not enabled, so no lb used */
1932
	return 0;
1933
}
1934
 
2997 Serge 1935
u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1986 serge 1936
{
1937
	u32 tmp = RREG32(MC_SHARED_CHMAP);
1938
 
1939
	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1940
	case 0:
1941
	default:
1942
		return 1;
1943
	case 1:
1944
		return 2;
1945
	case 2:
1946
		return 4;
1947
	case 3:
1948
		return 8;
1949
	}
1950
}
1951
 
1952
struct evergreen_wm_params {
1953
	u32 dram_channels; /* number of dram channels */
1954
	u32 yclk;          /* bandwidth per dram data pin in kHz */
1955
	u32 sclk;          /* engine clock in kHz */
1956
	u32 disp_clk;      /* display clock in kHz */
1957
	u32 src_width;     /* viewport width */
1958
	u32 active_time;   /* active display time in ns */
1959
	u32 blank_time;    /* blank time in ns */
1960
	bool interlaced;    /* mode is interlaced */
1961
	fixed20_12 vsc;    /* vertical scale ratio */
1962
	u32 num_heads;     /* number of active crtcs */
1963
	u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1964
	u32 lb_size;       /* line buffer allocated to pipe */
1965
	u32 vtaps;         /* vertical scaler taps */
1966
};
1967
 
1968
static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1969
{
1970
	/* Calculate DRAM Bandwidth and the part allocated to display. */
1971
	fixed20_12 dram_efficiency; /* 0.7 */
1972
	fixed20_12 yclk, dram_channels, bandwidth;
1973
	fixed20_12 a;
1974
 
1975
	a.full = dfixed_const(1000);
1976
	yclk.full = dfixed_const(wm->yclk);
1977
	yclk.full = dfixed_div(yclk, a);
1978
	dram_channels.full = dfixed_const(wm->dram_channels * 4);
1979
	a.full = dfixed_const(10);
1980
	dram_efficiency.full = dfixed_const(7);
1981
	dram_efficiency.full = dfixed_div(dram_efficiency, a);
1982
	bandwidth.full = dfixed_mul(dram_channels, yclk);
1983
	bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1984
 
1985
	return dfixed_trunc(bandwidth);
1986
}
1987
 
1988
static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1989
{
1990
	/* Calculate DRAM Bandwidth and the part allocated to display. */
1991
	fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1992
	fixed20_12 yclk, dram_channels, bandwidth;
1993
	fixed20_12 a;
1994
 
1995
	a.full = dfixed_const(1000);
1996
	yclk.full = dfixed_const(wm->yclk);
1997
	yclk.full = dfixed_div(yclk, a);
1998
	dram_channels.full = dfixed_const(wm->dram_channels * 4);
1999
	a.full = dfixed_const(10);
2000
	disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
2001
	disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
2002
	bandwidth.full = dfixed_mul(dram_channels, yclk);
2003
	bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
2004
 
2005
	return dfixed_trunc(bandwidth);
2006
}
2007
 
2008
static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
2009
{
2010
	/* Calculate the display Data return Bandwidth */
2011
	fixed20_12 return_efficiency; /* 0.8 */
2012
	fixed20_12 sclk, bandwidth;
2013
	fixed20_12 a;
2014
 
2015
	a.full = dfixed_const(1000);
2016
	sclk.full = dfixed_const(wm->sclk);
2017
	sclk.full = dfixed_div(sclk, a);
2018
	a.full = dfixed_const(10);
2019
	return_efficiency.full = dfixed_const(8);
2020
	return_efficiency.full = dfixed_div(return_efficiency, a);
2021
	a.full = dfixed_const(32);
2022
	bandwidth.full = dfixed_mul(a, sclk);
2023
	bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2024
 
2025
	return dfixed_trunc(bandwidth);
2026
}
2027
 
2028
static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2029
{
2030
	/* Calculate the DMIF Request Bandwidth */
2031
	fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2032
	fixed20_12 disp_clk, bandwidth;
2033
	fixed20_12 a;
2034
 
2035
	a.full = dfixed_const(1000);
2036
	disp_clk.full = dfixed_const(wm->disp_clk);
2037
	disp_clk.full = dfixed_div(disp_clk, a);
2038
	a.full = dfixed_const(10);
2039
	disp_clk_request_efficiency.full = dfixed_const(8);
2040
	disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2041
	a.full = dfixed_const(32);
2042
	bandwidth.full = dfixed_mul(a, disp_clk);
2043
	bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2044
 
2045
	return dfixed_trunc(bandwidth);
2046
}
2047
 
2048
static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2049
{
2050
	/* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2051
	u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2052
	u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2053
	u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2054
 
2055
	return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2056
}
2057
 
2058
static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2059
{
2060
	/* Calculate the display mode Average Bandwidth
2061
	 * DisplayMode should contain the source and destination dimensions,
2062
	 * timing, etc.
2063
	 */
2064
	fixed20_12 bpp;
2065
	fixed20_12 line_time;
2066
	fixed20_12 src_width;
2067
	fixed20_12 bandwidth;
2068
	fixed20_12 a;
2069
 
2070
	a.full = dfixed_const(1000);
2071
	line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2072
	line_time.full = dfixed_div(line_time, a);
2073
	bpp.full = dfixed_const(wm->bytes_per_pixel);
2074
	src_width.full = dfixed_const(wm->src_width);
2075
	bandwidth.full = dfixed_mul(src_width, bpp);
2076
	bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2077
	bandwidth.full = dfixed_div(bandwidth, line_time);
2078
 
2079
	return dfixed_trunc(bandwidth);
2080
}
2081
 
2082
static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2083
{
2084
	/* First calcualte the latency in ns */
2085
	u32 mc_latency = 2000; /* 2000 ns. */
2086
	u32 available_bandwidth = evergreen_available_bandwidth(wm);
2087
	u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2088
	u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2089
	u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2090
	u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2091
		(wm->num_heads * cursor_line_pair_return_time);
2092
	u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2093
	u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2094
	fixed20_12 a, b, c;
2095
 
2096
	if (wm->num_heads == 0)
2097
		return 0;
2098
 
2099
	a.full = dfixed_const(2);
2100
	b.full = dfixed_const(1);
2101
	if ((wm->vsc.full > a.full) ||
2102
	    ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2103
	    (wm->vtaps >= 5) ||
2104
	    ((wm->vsc.full >= a.full) && wm->interlaced))
2105
		max_src_lines_per_dst_line = 4;
2106
	else
2107
		max_src_lines_per_dst_line = 2;
2108
 
2109
	a.full = dfixed_const(available_bandwidth);
2110
	b.full = dfixed_const(wm->num_heads);
2111
	a.full = dfixed_div(a, b);
2112
 
2113
	b.full = dfixed_const(1000);
2114
	c.full = dfixed_const(wm->disp_clk);
2115
	b.full = dfixed_div(c, b);
2116
	c.full = dfixed_const(wm->bytes_per_pixel);
2117
	b.full = dfixed_mul(b, c);
2118
 
2119
	lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2120
 
2121
	a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2122
	b.full = dfixed_const(1000);
2123
	c.full = dfixed_const(lb_fill_bw);
2124
	b.full = dfixed_div(c, b);
2125
	a.full = dfixed_div(a, b);
2126
	line_fill_time = dfixed_trunc(a);
2127
 
2128
	if (line_fill_time < wm->active_time)
2129
		return latency;
2130
	else
2131
		return latency + (line_fill_time - wm->active_time);
2132
 
2133
}
2134
 
2135
static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2136
{
2137
	if (evergreen_average_bandwidth(wm) <=
2138
	    (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2139
		return true;
2140
	else
2141
		return false;
2142
};
2143
 
2144
static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2145
{
2146
	if (evergreen_average_bandwidth(wm) <=
2147
	    (evergreen_available_bandwidth(wm) / wm->num_heads))
2148
		return true;
2149
	else
2150
		return false;
2151
};
2152
 
2153
static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2154
{
2155
	u32 lb_partitions = wm->lb_size / wm->src_width;
2156
	u32 line_time = wm->active_time + wm->blank_time;
2157
	u32 latency_tolerant_lines;
2158
	u32 latency_hiding;
2159
	fixed20_12 a;
2160
 
2161
	a.full = dfixed_const(1);
2162
	if (wm->vsc.full > a.full)
2163
		latency_tolerant_lines = 1;
2164
	else {
2165
		if (lb_partitions <= (wm->vtaps + 1))
2166
			latency_tolerant_lines = 1;
2167
		else
2168
			latency_tolerant_lines = 2;
2169
	}
2170
 
2171
	latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2172
 
2173
	if (evergreen_latency_watermark(wm) <= latency_hiding)
2174
		return true;
2175
	else
2176
		return false;
2177
}
2178
 
2179
static void evergreen_program_watermarks(struct radeon_device *rdev,
2180
					 struct radeon_crtc *radeon_crtc,
2181
					 u32 lb_size, u32 num_heads)
2182
{
2183
	struct drm_display_mode *mode = &radeon_crtc->base.mode;
5078 serge 2184
	struct evergreen_wm_params wm_low, wm_high;
2185
	u32 dram_channels;
1986 serge 2186
	u32 pixel_period;
2187
	u32 line_time = 0;
2188
	u32 latency_watermark_a = 0, latency_watermark_b = 0;
2189
	u32 priority_a_mark = 0, priority_b_mark = 0;
2190
	u32 priority_a_cnt = PRIORITY_OFF;
2191
	u32 priority_b_cnt = PRIORITY_OFF;
2192
	u32 pipe_offset = radeon_crtc->crtc_id * 16;
2193
	u32 tmp, arb_control3;
2194
	fixed20_12 a, b, c;
2195
 
2196
	if (radeon_crtc->base.enabled && num_heads && mode) {
2197
		pixel_period = 1000000 / (u32)mode->clock;
2198
		line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2199
		priority_a_cnt = 0;
2200
		priority_b_cnt = 0;
5078 serge 2201
		dram_channels = evergreen_get_number_of_dram_channels(rdev);
1986 serge 2202
 
5078 serge 2203
		/* watermark for high clocks */
2204
		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2205
			wm_high.yclk =
2206
				radeon_dpm_get_mclk(rdev, false) * 10;
2207
			wm_high.sclk =
2208
				radeon_dpm_get_sclk(rdev, false) * 10;
2209
		} else {
2210
			wm_high.yclk = rdev->pm.current_mclk * 10;
2211
			wm_high.sclk = rdev->pm.current_sclk * 10;
2212
		}
2213
 
2214
		wm_high.disp_clk = mode->clock;
2215
		wm_high.src_width = mode->crtc_hdisplay;
2216
		wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2217
		wm_high.blank_time = line_time - wm_high.active_time;
2218
		wm_high.interlaced = false;
1986 serge 2219
		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
5078 serge 2220
			wm_high.interlaced = true;
2221
		wm_high.vsc = radeon_crtc->vsc;
2222
		wm_high.vtaps = 1;
1986 serge 2223
		if (radeon_crtc->rmx_type != RMX_OFF)
5078 serge 2224
			wm_high.vtaps = 2;
2225
		wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2226
		wm_high.lb_size = lb_size;
2227
		wm_high.dram_channels = dram_channels;
2228
		wm_high.num_heads = num_heads;
1986 serge 2229
 
5078 serge 2230
		/* watermark for low clocks */
2231
		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2232
			wm_low.yclk =
2233
				radeon_dpm_get_mclk(rdev, true) * 10;
2234
			wm_low.sclk =
2235
				radeon_dpm_get_sclk(rdev, true) * 10;
2236
		} else {
2237
			wm_low.yclk = rdev->pm.current_mclk * 10;
2238
			wm_low.sclk = rdev->pm.current_sclk * 10;
2239
		}
2240
 
2241
		wm_low.disp_clk = mode->clock;
2242
		wm_low.src_width = mode->crtc_hdisplay;
2243
		wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2244
		wm_low.blank_time = line_time - wm_low.active_time;
2245
		wm_low.interlaced = false;
2246
		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2247
			wm_low.interlaced = true;
2248
		wm_low.vsc = radeon_crtc->vsc;
2249
		wm_low.vtaps = 1;
2250
		if (radeon_crtc->rmx_type != RMX_OFF)
2251
			wm_low.vtaps = 2;
2252
		wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2253
		wm_low.lb_size = lb_size;
2254
		wm_low.dram_channels = dram_channels;
2255
		wm_low.num_heads = num_heads;
2256
 
1986 serge 2257
		/* set for high clocks */
5078 serge 2258
		latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
1986 serge 2259
		/* set for low clocks */
5078 serge 2260
		latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
1986 serge 2261
 
2262
		/* possibly force display priority to high */
2263
		/* should really do this at mode validation time... */
5078 serge 2264
		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2265
		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2266
		    !evergreen_check_latency_hiding(&wm_high) ||
1986 serge 2267
		    (rdev->disp_priority == 2)) {
5078 serge 2268
			DRM_DEBUG_KMS("force priority a to high\n");
1986 serge 2269
			priority_a_cnt |= PRIORITY_ALWAYS_ON;
5078 serge 2270
		}
2271
		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2272
		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2273
		    !evergreen_check_latency_hiding(&wm_low) ||
2274
		    (rdev->disp_priority == 2)) {
2275
			DRM_DEBUG_KMS("force priority b to high\n");
1986 serge 2276
			priority_b_cnt |= PRIORITY_ALWAYS_ON;
2277
		}
2278
 
2279
		a.full = dfixed_const(1000);
2280
		b.full = dfixed_const(mode->clock);
2281
		b.full = dfixed_div(b, a);
2282
		c.full = dfixed_const(latency_watermark_a);
2283
		c.full = dfixed_mul(c, b);
2284
		c.full = dfixed_mul(c, radeon_crtc->hsc);
2285
		c.full = dfixed_div(c, a);
2286
		a.full = dfixed_const(16);
2287
		c.full = dfixed_div(c, a);
2288
		priority_a_mark = dfixed_trunc(c);
2289
		priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2290
 
2291
		a.full = dfixed_const(1000);
2292
		b.full = dfixed_const(mode->clock);
2293
		b.full = dfixed_div(b, a);
2294
		c.full = dfixed_const(latency_watermark_b);
2295
		c.full = dfixed_mul(c, b);
2296
		c.full = dfixed_mul(c, radeon_crtc->hsc);
2297
		c.full = dfixed_div(c, a);
2298
		a.full = dfixed_const(16);
2299
		c.full = dfixed_div(c, a);
2300
		priority_b_mark = dfixed_trunc(c);
2301
		priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2302
	}
2303
 
2304
	/* select wm A */
2305
	arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2306
	tmp = arb_control3;
2307
	tmp &= ~LATENCY_WATERMARK_MASK(3);
2308
	tmp |= LATENCY_WATERMARK_MASK(1);
2309
	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2310
	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2311
	       (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2312
		LATENCY_HIGH_WATERMARK(line_time)));
2313
	/* select wm B */
2314
	tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2315
	tmp &= ~LATENCY_WATERMARK_MASK(3);
2316
	tmp |= LATENCY_WATERMARK_MASK(2);
2317
	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2318
	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2319
	       (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2320
		LATENCY_HIGH_WATERMARK(line_time)));
2321
	/* restore original selection */
2322
	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2323
 
2324
	/* write the priority marks */
2325
	WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2326
	WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2327
 
5078 serge 2328
	/* save values for DPM */
2329
	radeon_crtc->line_time = line_time;
2330
	radeon_crtc->wm_high = latency_watermark_a;
2331
	radeon_crtc->wm_low = latency_watermark_b;
1986 serge 2332
}
2333
 
2997 Serge 2334
/**
2335
 * evergreen_bandwidth_update - update display watermarks callback.
2336
 *
2337
 * @rdev: radeon_device pointer
2338
 *
2339
 * Update the display watermarks based on the requested mode(s)
2340
 * (evergreen+).
2341
 */
1963 serge 2342
void evergreen_bandwidth_update(struct radeon_device *rdev)
1430 serge 2343
{
1986 serge 2344
	struct drm_display_mode *mode0 = NULL;
2345
	struct drm_display_mode *mode1 = NULL;
2346
	u32 num_heads = 0, lb_size;
2347
	int i;
2348
 
2349
	radeon_update_display_priority(rdev);
2350
 
2351
	for (i = 0; i < rdev->num_crtc; i++) {
2352
		if (rdev->mode_info.crtcs[i]->base.enabled)
2353
			num_heads++;
2354
	}
2355
	for (i = 0; i < rdev->num_crtc; i += 2) {
2356
		mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2357
		mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2358
		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2359
		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2360
		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2361
		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2362
	}
1430 serge 2363
}
2364
 
2997 Serge 2365
/**
2366
 * evergreen_mc_wait_for_idle - wait for MC idle callback.
2367
 *
2368
 * @rdev: radeon_device pointer
2369
 *
2370
 * Wait for the MC (memory controller) to be idle.
2371
 * (evergreen+).
2372
 * Returns 0 if the MC is idle, -1 if not.
2373
 */
1963 serge 2374
int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
1430 serge 2375
{
2376
	unsigned i;
2377
	u32 tmp;
2378
 
2379
	for (i = 0; i < rdev->usec_timeout; i++) {
2380
		/* read MC_STATUS */
2381
		tmp = RREG32(SRBM_STATUS) & 0x1F00;
2382
		if (!tmp)
2383
			return 0;
2384
		udelay(1);
2385
	}
2386
	return -1;
2387
}
2388
 
2389
/*
2390
 * GART
2391
 */
1963 serge 2392
void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2393
{
2394
	unsigned i;
2395
	u32 tmp;
2396
 
2397
	WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2398
 
2399
	WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2400
	for (i = 0; i < rdev->usec_timeout; i++) {
2401
		/* read MC_STATUS */
2402
		tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2403
		tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2404
		if (tmp == 2) {
2405
			printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2406
			return;
2407
		}
2408
		if (tmp) {
2409
			return;
2410
		}
2411
		udelay(1);
2412
	}
2413
}
2414
 
2997 Serge 2415
static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
1430 serge 2416
{
2417
	u32 tmp;
1963 serge 2418
	int r;
1430 serge 2419
 
2997 Serge 2420
	if (rdev->gart.robj == NULL) {
1430 serge 2421
		dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2422
		return -EINVAL;
2423
	}
2424
	r = radeon_gart_table_vram_pin(rdev);
2425
	if (r)
2426
		return r;
2427
	/* Setup L2 cache */
2428
	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2429
				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2430
				EFFECTIVE_L2_QUEUE_SIZE(7));
2431
	WREG32(VM_L2_CNTL2, 0);
2432
	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2433
	/* Setup TLB control */
2434
	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2435
		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2436
		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2437
		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
1963 serge 2438
	if (rdev->flags & RADEON_IS_IGP) {
2439
		WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2440
		WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2441
		WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2442
	} else {
3031 serge 2443
		WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2444
		WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2445
		WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2997 Serge 2446
		if ((rdev->family == CHIP_JUNIPER) ||
2447
		    (rdev->family == CHIP_CYPRESS) ||
2448
		    (rdev->family == CHIP_HEMLOCK) ||
2449
		    (rdev->family == CHIP_BARTS))
2450
			WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
1963 serge 2451
	}
1430 serge 2452
	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2453
	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2454
	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2455
	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2456
	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2457
	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2458
	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2459
	WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2460
				RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2461
	WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2462
			(u32)(rdev->dummy_page.addr >> 12));
1963 serge 2463
	WREG32(VM_CONTEXT1_CNTL, 0);
1430 serge 2464
 
1963 serge 2465
	evergreen_pcie_gart_tlb_flush(rdev);
2997 Serge 2466
	DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2467
		 (unsigned)(rdev->mc.gtt_size >> 20),
2468
		 (unsigned long long)rdev->gart.table_addr);
1430 serge 2469
	rdev->gart.ready = true;
2470
	return 0;
2471
}
2472
 
2997 Serge 2473
static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
1430 serge 2474
{
2475
	u32 tmp;
2476
 
2477
	/* Disable all tables */
1963 serge 2478
	WREG32(VM_CONTEXT0_CNTL, 0);
2479
	WREG32(VM_CONTEXT1_CNTL, 0);
1430 serge 2480
 
2481
	/* Setup L2 cache */
2482
	WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2483
				EFFECTIVE_L2_QUEUE_SIZE(7));
2484
	WREG32(VM_L2_CNTL2, 0);
2485
	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2486
	/* Setup TLB control */
2487
	tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2488
	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2489
	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2490
	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2491
	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2492
	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2493
	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2494
	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2997 Serge 2495
	radeon_gart_table_vram_unpin(rdev);
1430 serge 2496
}
2497
 
2997 Serge 2498
static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
1430 serge 2499
{
2500
	evergreen_pcie_gart_disable(rdev);
2501
	radeon_gart_table_vram_free(rdev);
2502
	radeon_gart_fini(rdev);
2503
}
2504
 
2505
 
2997 Serge 2506
static void evergreen_agp_enable(struct radeon_device *rdev)
1430 serge 2507
{
2508
	u32 tmp;
2509
 
2510
	/* Setup L2 cache */
2511
	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2512
				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2513
				EFFECTIVE_L2_QUEUE_SIZE(7));
2514
	WREG32(VM_L2_CNTL2, 0);
2515
	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2516
	/* Setup TLB control */
2517
	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2518
		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2519
		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2520
		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2521
	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2522
	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2523
	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2524
	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2525
	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2526
	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2527
	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
1963 serge 2528
	WREG32(VM_CONTEXT0_CNTL, 0);
2529
	WREG32(VM_CONTEXT1_CNTL, 0);
1430 serge 2530
}
2531
 
1963 serge 2532
void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
1430 serge 2533
{
2997 Serge 2534
	u32 crtc_enabled, tmp, frame_count, blackout;
2535
	int i, j;
2536
 
3764 Serge 2537
	if (!ASIC_IS_NODCE(rdev)) {
1430 serge 2538
	save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2539
	save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2540
 
2997 Serge 2541
	/* disable VGA render */
1430 serge 2542
	WREG32(VGA_RENDER_CONTROL, 0);
3764 Serge 2543
	}
2997 Serge 2544
	/* blank the display controllers */
2545
	for (i = 0; i < rdev->num_crtc; i++) {
2546
		crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2547
		if (crtc_enabled) {
2548
			save->crtc_enabled[i] = true;
2549
			if (ASIC_IS_DCE6(rdev)) {
2550
				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2551
				if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2552
					radeon_wait_for_vblank(rdev, i);
3764 Serge 2553
					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2997 Serge 2554
					tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2555
					WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
3031 serge 2556
				}
2997 Serge 2557
			} else {
2558
				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2559
				if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2560
					radeon_wait_for_vblank(rdev, i);
3764 Serge 2561
					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2997 Serge 2562
					tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2563
					WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
3764 Serge 2564
					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
3031 serge 2565
				}
2566
			}
2997 Serge 2567
			/* wait for the next frame */
2568
			frame_count = radeon_get_vblank_counter(rdev, i);
2569
			for (j = 0; j < rdev->usec_timeout; j++) {
2570
				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2571
					break;
2572
				udelay(1);
3031 serge 2573
			}
3764 Serge 2574
 
2575
			/* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2576
			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2577
			tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2578
			tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2579
			WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2580
			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2581
			save->crtc_enabled[i] = false;
2582
			/* ***** */
3120 serge 2583
		} else {
2584
			save->crtc_enabled[i] = false;
3031 serge 2585
		}
1963 serge 2586
	}
1430 serge 2587
 
2997 Serge 2588
	radeon_mc_wait_for_idle(rdev);
2589
 
2590
	blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2591
	if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2592
		/* Block CPU access */
2593
		WREG32(BIF_FB_EN, 0);
2594
		/* blackout the MC */
2595
		blackout &= ~BLACKOUT_MODE_MASK;
2596
		WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2005 serge 2597
	}
3764 Serge 2598
	/* wait for the MC to settle */
2599
	udelay(100);
2600
 
2601
	/* lock double buffered regs */
2602
	for (i = 0; i < rdev->num_crtc; i++) {
2603
		if (save->crtc_enabled[i]) {
2604
			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2605
			if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2606
				tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2607
				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2608
			}
2609
			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2610
			if (!(tmp & 1)) {
2611
				tmp |= 1;
2612
				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2613
			}
2614
		}
2615
	}
1430 serge 2616
}
2617
 
1963 serge 2618
void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
1430 serge 2619
{
2997 Serge 2620
	u32 tmp, frame_count;
2621
	int i, j;
1430 serge 2622
 
2997 Serge 2623
	/* update crtc base addresses */
2624
	for (i = 0; i < rdev->num_crtc; i++) {
2625
		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
3031 serge 2626
		       upper_32_bits(rdev->mc.vram_start));
2997 Serge 2627
		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
3031 serge 2628
		       upper_32_bits(rdev->mc.vram_start));
2997 Serge 2629
		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
3031 serge 2630
		       (u32)rdev->mc.vram_start);
2997 Serge 2631
		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
3031 serge 2632
		       (u32)rdev->mc.vram_start);
2997 Serge 2633
	}
3764 Serge 2634
 
2635
	if (!ASIC_IS_NODCE(rdev)) {
2997 Serge 2636
	WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2637
	WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
3764 Serge 2638
	}
1430 serge 2639
 
3764 Serge 2640
	/* unlock regs and wait for update */
2641
	for (i = 0; i < rdev->num_crtc; i++) {
2642
		if (save->crtc_enabled[i]) {
2643
			tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
5078 serge 2644
			if ((tmp & 0x7) != 3) {
2645
				tmp &= ~0x7;
2646
				tmp |= 0x3;
3764 Serge 2647
				WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2648
			}
2649
			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2650
			if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2651
				tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2652
				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2653
			}
2654
			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2655
			if (tmp & 1) {
2656
				tmp &= ~1;
2657
				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2658
			}
2659
			for (j = 0; j < rdev->usec_timeout; j++) {
2660
				tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2661
				if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2662
					break;
2663
				udelay(1);
2664
			}
2665
		}
2666
	}
2667
 
2997 Serge 2668
	/* unblackout the MC */
2669
	tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2670
	tmp &= ~BLACKOUT_MODE_MASK;
2671
	WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2672
	/* allow CPU access */
2673
	WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
1430 serge 2674
 
2997 Serge 2675
	for (i = 0; i < rdev->num_crtc; i++) {
3031 serge 2676
		if (save->crtc_enabled[i]) {
2997 Serge 2677
			if (ASIC_IS_DCE6(rdev)) {
2678
				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
5078 serge 2679
				tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
3764 Serge 2680
				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2997 Serge 2681
				WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
3764 Serge 2682
				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2997 Serge 2683
			} else {
2684
				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2685
				tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
3764 Serge 2686
				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2997 Serge 2687
				WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
3764 Serge 2688
				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2997 Serge 2689
			}
2690
			/* wait for the next frame */
2691
			frame_count = radeon_get_vblank_counter(rdev, i);
2692
			for (j = 0; j < rdev->usec_timeout; j++) {
2693
				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2694
					break;
2695
				udelay(1);
2696
			}
3031 serge 2697
		}
2005 serge 2698
	}
3764 Serge 2699
	if (!ASIC_IS_NODCE(rdev)) {
2997 Serge 2700
	/* Unlock vga access */
1430 serge 2701
	WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2702
	mdelay(1);
2703
	WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
3764 Serge 2704
	}
1430 serge 2705
}
2706
 
1963 serge 2707
void evergreen_mc_program(struct radeon_device *rdev)
1430 serge 2708
{
2709
	struct evergreen_mc_save save;
2710
	u32 tmp;
2711
	int i, j;
2712
 
2713
	/* Initialize HDP */
2714
	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2715
		WREG32((0x2c14 + j), 0x00000000);
2716
		WREG32((0x2c18 + j), 0x00000000);
2717
		WREG32((0x2c1c + j), 0x00000000);
2718
		WREG32((0x2c20 + j), 0x00000000);
2719
		WREG32((0x2c24 + j), 0x00000000);
2720
	}
2721
	WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2722
 
2723
	evergreen_mc_stop(rdev, &save);
2724
	if (evergreen_mc_wait_for_idle(rdev)) {
2725
		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2726
	}
2727
	/* Lockout access through VGA aperture*/
2728
	WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2729
	/* Update configuration */
2730
	if (rdev->flags & RADEON_IS_AGP) {
2731
		if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2732
			/* VRAM before AGP */
2733
			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2734
				rdev->mc.vram_start >> 12);
2735
			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2736
				rdev->mc.gtt_end >> 12);
2737
		} else {
2738
			/* VRAM after AGP */
2739
			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2740
				rdev->mc.gtt_start >> 12);
2741
			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2742
				rdev->mc.vram_end >> 12);
2743
		}
2744
	} else {
2745
		WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2746
			rdev->mc.vram_start >> 12);
2747
		WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2748
			rdev->mc.vram_end >> 12);
2749
	}
2997 Serge 2750
	WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2751
	/* llano/ontario only */
2752
	if ((rdev->family == CHIP_PALM) ||
2753
	    (rdev->family == CHIP_SUMO) ||
2754
	    (rdev->family == CHIP_SUMO2)) {
1963 serge 2755
		tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2756
		tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2757
		tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2758
		WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2759
	}
1430 serge 2760
	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2761
	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2762
	WREG32(MC_VM_FB_LOCATION, tmp);
2763
	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
1963 serge 2764
	WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2765
	WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
1430 serge 2766
	if (rdev->flags & RADEON_IS_AGP) {
2767
		WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2768
		WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2769
		WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2770
	} else {
2771
		WREG32(MC_VM_AGP_BASE, 0);
2772
		WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2773
		WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2774
	}
2775
	if (evergreen_mc_wait_for_idle(rdev)) {
2776
		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2777
	}
2778
	evergreen_mc_resume(rdev, &save);
2779
	/* we need to own VRAM, so turn off the VGA renderer here
2780
	 * to stop it overwriting our objects */
2781
	rv515_vga_render_disable(rdev);
2782
}
2783
 
2784
/*
2785
 * CP.
2786
 */
1986 serge 2787
void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2788
{
2997 Serge 2789
	struct radeon_ring *ring = &rdev->ring[ib->ring];
2790
	u32 next_rptr;
2791
 
1986 serge 2792
	/* set to DX10/11 mode */
2997 Serge 2793
	radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2794
	radeon_ring_write(ring, 1);
2795
 
2796
	if (ring->rptr_save_reg) {
2797
		next_rptr = ring->wptr + 3 + 4;
2798
		radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3764 Serge 2799
		radeon_ring_write(ring, ((ring->rptr_save_reg -
2997 Serge 2800
					  PACKET3_SET_CONFIG_REG_START) >> 2));
2801
		radeon_ring_write(ring, next_rptr);
2802
	} else if (rdev->wb.enabled) {
2803
		next_rptr = ring->wptr + 5 + 4;
2804
		radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2805
		radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2806
		radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2807
		radeon_ring_write(ring, next_rptr);
2808
		radeon_ring_write(ring, 0);
2809
	}
2810
 
2811
	radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2812
	radeon_ring_write(ring,
1986 serge 2813
#ifdef __BIG_ENDIAN
2814
			  (2 << 0) |
2815
#endif
2816
			  (ib->gpu_addr & 0xFFFFFFFC));
2997 Serge 2817
	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2818
	radeon_ring_write(ring, ib->length_dw);
1986 serge 2819
}
1963 serge 2820
 
1986 serge 2821
 
1963 serge 2822
static int evergreen_cp_load_microcode(struct radeon_device *rdev)
1430 serge 2823
{
1963 serge 2824
	const __be32 *fw_data;
2825
	int i;
2826
 
2827
	if (!rdev->me_fw || !rdev->pfp_fw)
2828
		return -EINVAL;
2829
 
2830
	r700_cp_stop(rdev);
2831
	WREG32(CP_RB_CNTL,
2832
#ifdef __BIG_ENDIAN
2833
	       BUF_SWAP_32BIT |
2834
#endif
2835
	       RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2836
 
2837
	fw_data = (const __be32 *)rdev->pfp_fw->data;
2838
	WREG32(CP_PFP_UCODE_ADDR, 0);
2839
	for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2840
		WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2841
	WREG32(CP_PFP_UCODE_ADDR, 0);
2842
 
2843
	fw_data = (const __be32 *)rdev->me_fw->data;
2844
	WREG32(CP_ME_RAM_WADDR, 0);
2845
	for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2846
		WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2847
 
2848
	WREG32(CP_PFP_UCODE_ADDR, 0);
2849
	WREG32(CP_ME_RAM_WADDR, 0);
2850
	WREG32(CP_ME_RAM_RADDR, 0);
2851
	return 0;
1430 serge 2852
}
2853
 
1963 serge 2854
static int evergreen_cp_start(struct radeon_device *rdev)
2855
{
2997 Serge 2856
	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
1963 serge 2857
	int r, i;
2858
	uint32_t cp_me;
1430 serge 2859
 
2997 Serge 2860
	r = radeon_ring_lock(rdev, ring, 7);
1963 serge 2861
	if (r) {
2862
		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2863
		return r;
2864
	}
2997 Serge 2865
	radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2866
	radeon_ring_write(ring, 0x1);
2867
	radeon_ring_write(ring, 0x0);
2868
	radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2869
	radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2870
	radeon_ring_write(ring, 0);
2871
	radeon_ring_write(ring, 0);
5078 serge 2872
	radeon_ring_unlock_commit(rdev, ring, false);
1963 serge 2873
 
2874
	cp_me = 0xff;
2875
	WREG32(CP_ME_CNTL, cp_me);
2876
 
2997 Serge 2877
	r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
1963 serge 2878
	if (r) {
2879
		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2880
		return r;
2881
	}
2882
 
2883
	/* setup clear context state */
2997 Serge 2884
	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2885
	radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
1963 serge 2886
 
2887
	for (i = 0; i < evergreen_default_size; i++)
2997 Serge 2888
		radeon_ring_write(ring, evergreen_default_state[i]);
1963 serge 2889
 
2997 Serge 2890
	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2891
	radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
1963 serge 2892
 
2893
	/* set clear context state */
2997 Serge 2894
	radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2895
	radeon_ring_write(ring, 0);
1963 serge 2896
 
2897
	/* SQ_VTX_BASE_VTX_LOC */
2997 Serge 2898
	radeon_ring_write(ring, 0xc0026f00);
2899
	radeon_ring_write(ring, 0x00000000);
2900
	radeon_ring_write(ring, 0x00000000);
2901
	radeon_ring_write(ring, 0x00000000);
1963 serge 2902
 
2903
	/* Clear consts */
2997 Serge 2904
	radeon_ring_write(ring, 0xc0036f00);
2905
	radeon_ring_write(ring, 0x00000bc4);
2906
	radeon_ring_write(ring, 0xffffffff);
2907
	radeon_ring_write(ring, 0xffffffff);
2908
	radeon_ring_write(ring, 0xffffffff);
1963 serge 2909
 
2997 Serge 2910
	radeon_ring_write(ring, 0xc0026900);
2911
	radeon_ring_write(ring, 0x00000316);
2912
	radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2913
	radeon_ring_write(ring, 0x00000010); /*  */
1963 serge 2914
 
5078 serge 2915
	radeon_ring_unlock_commit(rdev, ring, false);
1963 serge 2916
 
2917
	return 0;
2918
}
2919
 
2997 Serge 2920
static int evergreen_cp_resume(struct radeon_device *rdev)
1430 serge 2921
{
2997 Serge 2922
	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
1963 serge 2923
	u32 tmp;
2924
	u32 rb_bufsz;
2925
	int r;
1430 serge 2926
 
1963 serge 2927
	/* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
2928
	WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
2929
				 SOFT_RESET_PA |
2930
				 SOFT_RESET_SH |
2931
				 SOFT_RESET_VGT |
2160 serge 2932
				 SOFT_RESET_SPI |
1963 serge 2933
				 SOFT_RESET_SX));
2934
	RREG32(GRBM_SOFT_RESET);
2935
	mdelay(15);
2936
	WREG32(GRBM_SOFT_RESET, 0);
2937
	RREG32(GRBM_SOFT_RESET);
2938
 
2939
	/* Set ring buffer size */
5078 serge 2940
	rb_bufsz = order_base_2(ring->ring_size / 8);
2941
	tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
1963 serge 2942
#ifdef __BIG_ENDIAN
2943
	tmp |= BUF_SWAP_32BIT;
2944
#endif
2945
	WREG32(CP_RB_CNTL, tmp);
2997 Serge 2946
	WREG32(CP_SEM_WAIT_TIMER, 0x0);
2947
	WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
1963 serge 2948
 
2949
	/* Set the write pointer delay */
2950
	WREG32(CP_RB_WPTR_DELAY, 0);
2951
 
2952
	/* Initialize the ring buffer's read and write pointers */
2953
	WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2954
	WREG32(CP_RB_RPTR_WR, 0);
2997 Serge 2955
	ring->wptr = 0;
2956
	WREG32(CP_RB_WPTR, ring->wptr);
1963 serge 2957
 
3120 serge 2958
	/* set the wb address whether it's enabled or not */
1963 serge 2959
	WREG32(CP_RB_RPTR_ADDR,
2960
	       ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
2961
	WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2962
	WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2963
 
2964
	if (rdev->wb.enabled)
2965
		WREG32(SCRATCH_UMSK, 0xff);
2966
	else {
2967
		tmp |= RB_NO_UPDATE;
2968
		WREG32(SCRATCH_UMSK, 0);
2969
	}
2970
 
2971
	mdelay(1);
2972
	WREG32(CP_RB_CNTL, tmp);
2973
 
2997 Serge 2974
	WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
1963 serge 2975
	WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2976
 
2977
	evergreen_cp_start(rdev);
2997 Serge 2978
	ring->ready = true;
2979
	r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
1963 serge 2980
	if (r) {
2997 Serge 2981
		ring->ready = false;
1963 serge 2982
		return r;
2983
	}
1430 serge 2984
	return 0;
2985
}
2986
 
2987
/*
2988
 * Core functions
2989
 */
2990
static void evergreen_gpu_init(struct radeon_device *rdev)
2991
{
2997 Serge 2992
	u32 gb_addr_config;
1963 serge 2993
	u32 mc_shared_chmap, mc_arb_ramcfg;
2994
	u32 sx_debug_1;
2995
	u32 smx_dc_ctl0;
2996
	u32 sq_config;
2997
	u32 sq_lds_resource_mgmt;
2998
	u32 sq_gpr_resource_mgmt_1;
2999
	u32 sq_gpr_resource_mgmt_2;
3000
	u32 sq_gpr_resource_mgmt_3;
3001
	u32 sq_thread_resource_mgmt;
3002
	u32 sq_thread_resource_mgmt_2;
3003
	u32 sq_stack_resource_mgmt_1;
3004
	u32 sq_stack_resource_mgmt_2;
3005
	u32 sq_stack_resource_mgmt_3;
3006
	u32 vgt_cache_invalidation;
3007
	u32 hdp_host_path_cntl, tmp;
2997 Serge 3008
	u32 disabled_rb_mask;
1963 serge 3009
	int i, j, num_shader_engines, ps_thread_count;
3010
 
3011
	switch (rdev->family) {
3012
	case CHIP_CYPRESS:
3013
	case CHIP_HEMLOCK:
3014
		rdev->config.evergreen.num_ses = 2;
3015
		rdev->config.evergreen.max_pipes = 4;
3016
		rdev->config.evergreen.max_tile_pipes = 8;
3017
		rdev->config.evergreen.max_simds = 10;
3018
		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3019
		rdev->config.evergreen.max_gprs = 256;
3020
		rdev->config.evergreen.max_threads = 248;
3021
		rdev->config.evergreen.max_gs_threads = 32;
3022
		rdev->config.evergreen.max_stack_entries = 512;
3023
		rdev->config.evergreen.sx_num_of_sets = 4;
3024
		rdev->config.evergreen.sx_max_export_size = 256;
3025
		rdev->config.evergreen.sx_max_export_pos_size = 64;
3026
		rdev->config.evergreen.sx_max_export_smx_size = 192;
3027
		rdev->config.evergreen.max_hw_contexts = 8;
3028
		rdev->config.evergreen.sq_num_cf_insts = 2;
3029
 
3030
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3031
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3032
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2997 Serge 3033
		gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
1963 serge 3034
		break;
3035
	case CHIP_JUNIPER:
3036
		rdev->config.evergreen.num_ses = 1;
3037
		rdev->config.evergreen.max_pipes = 4;
3038
		rdev->config.evergreen.max_tile_pipes = 4;
3039
		rdev->config.evergreen.max_simds = 10;
3040
		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3041
		rdev->config.evergreen.max_gprs = 256;
3042
		rdev->config.evergreen.max_threads = 248;
3043
		rdev->config.evergreen.max_gs_threads = 32;
3044
		rdev->config.evergreen.max_stack_entries = 512;
3045
		rdev->config.evergreen.sx_num_of_sets = 4;
3046
		rdev->config.evergreen.sx_max_export_size = 256;
3047
		rdev->config.evergreen.sx_max_export_pos_size = 64;
3048
		rdev->config.evergreen.sx_max_export_smx_size = 192;
3049
		rdev->config.evergreen.max_hw_contexts = 8;
3050
		rdev->config.evergreen.sq_num_cf_insts = 2;
3051
 
3052
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3053
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3054
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2997 Serge 3055
		gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
1963 serge 3056
		break;
3057
	case CHIP_REDWOOD:
3058
		rdev->config.evergreen.num_ses = 1;
3059
		rdev->config.evergreen.max_pipes = 4;
3060
		rdev->config.evergreen.max_tile_pipes = 4;
3061
		rdev->config.evergreen.max_simds = 5;
3062
		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3063
		rdev->config.evergreen.max_gprs = 256;
3064
		rdev->config.evergreen.max_threads = 248;
3065
		rdev->config.evergreen.max_gs_threads = 32;
3066
		rdev->config.evergreen.max_stack_entries = 256;
3067
		rdev->config.evergreen.sx_num_of_sets = 4;
3068
		rdev->config.evergreen.sx_max_export_size = 256;
3069
		rdev->config.evergreen.sx_max_export_pos_size = 64;
3070
		rdev->config.evergreen.sx_max_export_smx_size = 192;
3071
		rdev->config.evergreen.max_hw_contexts = 8;
3072
		rdev->config.evergreen.sq_num_cf_insts = 2;
3073
 
3074
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3075
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3076
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2997 Serge 3077
		gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
1963 serge 3078
		break;
3079
	case CHIP_CEDAR:
3080
	default:
3081
		rdev->config.evergreen.num_ses = 1;
3082
		rdev->config.evergreen.max_pipes = 2;
3083
		rdev->config.evergreen.max_tile_pipes = 2;
3084
		rdev->config.evergreen.max_simds = 2;
3085
		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3086
		rdev->config.evergreen.max_gprs = 256;
3087
		rdev->config.evergreen.max_threads = 192;
3088
		rdev->config.evergreen.max_gs_threads = 16;
3089
		rdev->config.evergreen.max_stack_entries = 256;
3090
		rdev->config.evergreen.sx_num_of_sets = 4;
3091
		rdev->config.evergreen.sx_max_export_size = 128;
3092
		rdev->config.evergreen.sx_max_export_pos_size = 32;
3093
		rdev->config.evergreen.sx_max_export_smx_size = 96;
3094
		rdev->config.evergreen.max_hw_contexts = 4;
3095
		rdev->config.evergreen.sq_num_cf_insts = 1;
3096
 
3097
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3098
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3099
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2997 Serge 3100
		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
1963 serge 3101
		break;
3102
	case CHIP_PALM:
3103
		rdev->config.evergreen.num_ses = 1;
3104
		rdev->config.evergreen.max_pipes = 2;
3105
		rdev->config.evergreen.max_tile_pipes = 2;
3106
		rdev->config.evergreen.max_simds = 2;
3107
		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3108
		rdev->config.evergreen.max_gprs = 256;
3109
		rdev->config.evergreen.max_threads = 192;
3110
		rdev->config.evergreen.max_gs_threads = 16;
3111
		rdev->config.evergreen.max_stack_entries = 256;
3112
		rdev->config.evergreen.sx_num_of_sets = 4;
3113
		rdev->config.evergreen.sx_max_export_size = 128;
3114
		rdev->config.evergreen.sx_max_export_pos_size = 32;
3115
		rdev->config.evergreen.sx_max_export_smx_size = 96;
3116
		rdev->config.evergreen.max_hw_contexts = 4;
3117
		rdev->config.evergreen.sq_num_cf_insts = 1;
3118
 
3119
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3120
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3121
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2997 Serge 3122
		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
1963 serge 3123
		break;
3124
	case CHIP_SUMO:
3125
		rdev->config.evergreen.num_ses = 1;
3126
		rdev->config.evergreen.max_pipes = 4;
3192 Serge 3127
		rdev->config.evergreen.max_tile_pipes = 4;
1963 serge 3128
		if (rdev->pdev->device == 0x9648)
3129
			rdev->config.evergreen.max_simds = 3;
3130
		else if ((rdev->pdev->device == 0x9647) ||
3131
			 (rdev->pdev->device == 0x964a))
3132
			rdev->config.evergreen.max_simds = 4;
3133
		else
3134
			rdev->config.evergreen.max_simds = 5;
3135
		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3136
		rdev->config.evergreen.max_gprs = 256;
3137
		rdev->config.evergreen.max_threads = 248;
3138
		rdev->config.evergreen.max_gs_threads = 32;
3139
		rdev->config.evergreen.max_stack_entries = 256;
3140
		rdev->config.evergreen.sx_num_of_sets = 4;
3141
		rdev->config.evergreen.sx_max_export_size = 256;
3142
		rdev->config.evergreen.sx_max_export_pos_size = 64;
3143
		rdev->config.evergreen.sx_max_export_smx_size = 192;
3144
		rdev->config.evergreen.max_hw_contexts = 8;
3145
		rdev->config.evergreen.sq_num_cf_insts = 2;
3146
 
3147
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3148
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3149
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3192 Serge 3150
		gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
1963 serge 3151
		break;
3152
	case CHIP_SUMO2:
3153
		rdev->config.evergreen.num_ses = 1;
3154
		rdev->config.evergreen.max_pipes = 4;
3155
		rdev->config.evergreen.max_tile_pipes = 4;
3156
		rdev->config.evergreen.max_simds = 2;
3157
		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3158
		rdev->config.evergreen.max_gprs = 256;
3159
		rdev->config.evergreen.max_threads = 248;
3160
		rdev->config.evergreen.max_gs_threads = 32;
3161
		rdev->config.evergreen.max_stack_entries = 512;
3162
		rdev->config.evergreen.sx_num_of_sets = 4;
3163
		rdev->config.evergreen.sx_max_export_size = 256;
3164
		rdev->config.evergreen.sx_max_export_pos_size = 64;
3165
		rdev->config.evergreen.sx_max_export_smx_size = 192;
5078 serge 3166
		rdev->config.evergreen.max_hw_contexts = 4;
1963 serge 3167
		rdev->config.evergreen.sq_num_cf_insts = 2;
3168
 
3169
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3170
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3171
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3192 Serge 3172
		gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
1963 serge 3173
		break;
3174
	case CHIP_BARTS:
3175
		rdev->config.evergreen.num_ses = 2;
3176
		rdev->config.evergreen.max_pipes = 4;
3177
		rdev->config.evergreen.max_tile_pipes = 8;
3178
		rdev->config.evergreen.max_simds = 7;
3179
		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3180
		rdev->config.evergreen.max_gprs = 256;
3181
		rdev->config.evergreen.max_threads = 248;
3182
		rdev->config.evergreen.max_gs_threads = 32;
3183
		rdev->config.evergreen.max_stack_entries = 512;
3184
		rdev->config.evergreen.sx_num_of_sets = 4;
3185
		rdev->config.evergreen.sx_max_export_size = 256;
3186
		rdev->config.evergreen.sx_max_export_pos_size = 64;
3187
		rdev->config.evergreen.sx_max_export_smx_size = 192;
3188
		rdev->config.evergreen.max_hw_contexts = 8;
3189
		rdev->config.evergreen.sq_num_cf_insts = 2;
3190
 
3191
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3192
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3193
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2997 Serge 3194
		gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
1963 serge 3195
		break;
3196
	case CHIP_TURKS:
3197
		rdev->config.evergreen.num_ses = 1;
3198
		rdev->config.evergreen.max_pipes = 4;
3199
		rdev->config.evergreen.max_tile_pipes = 4;
3200
		rdev->config.evergreen.max_simds = 6;
3201
		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3202
		rdev->config.evergreen.max_gprs = 256;
3203
		rdev->config.evergreen.max_threads = 248;
3204
		rdev->config.evergreen.max_gs_threads = 32;
3205
		rdev->config.evergreen.max_stack_entries = 256;
3206
		rdev->config.evergreen.sx_num_of_sets = 4;
3207
		rdev->config.evergreen.sx_max_export_size = 256;
3208
		rdev->config.evergreen.sx_max_export_pos_size = 64;
3209
		rdev->config.evergreen.sx_max_export_smx_size = 192;
3210
		rdev->config.evergreen.max_hw_contexts = 8;
3211
		rdev->config.evergreen.sq_num_cf_insts = 2;
3212
 
3213
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3214
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3215
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2997 Serge 3216
		gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
1963 serge 3217
		break;
3218
	case CHIP_CAICOS:
3219
		rdev->config.evergreen.num_ses = 1;
3192 Serge 3220
		rdev->config.evergreen.max_pipes = 2;
1963 serge 3221
		rdev->config.evergreen.max_tile_pipes = 2;
3222
		rdev->config.evergreen.max_simds = 2;
3223
		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3224
		rdev->config.evergreen.max_gprs = 256;
3225
		rdev->config.evergreen.max_threads = 192;
3226
		rdev->config.evergreen.max_gs_threads = 16;
3227
		rdev->config.evergreen.max_stack_entries = 256;
3228
		rdev->config.evergreen.sx_num_of_sets = 4;
3229
		rdev->config.evergreen.sx_max_export_size = 128;
3230
		rdev->config.evergreen.sx_max_export_pos_size = 32;
3231
		rdev->config.evergreen.sx_max_export_smx_size = 96;
3232
		rdev->config.evergreen.max_hw_contexts = 4;
3233
		rdev->config.evergreen.sq_num_cf_insts = 1;
3234
 
3235
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3236
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3237
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2997 Serge 3238
		gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
1963 serge 3239
		break;
3240
	}
3241
 
3242
	/* Initialize HDP */
3243
	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3244
		WREG32((0x2c14 + j), 0x00000000);
3245
		WREG32((0x2c18 + j), 0x00000000);
3246
		WREG32((0x2c1c + j), 0x00000000);
3247
		WREG32((0x2c20 + j), 0x00000000);
3248
		WREG32((0x2c24 + j), 0x00000000);
3249
	}
3250
 
3251
	WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3252
 
2997 Serge 3253
	evergreen_fix_pci_max_read_req_size(rdev);
1963 serge 3254
 
3255
	mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
2997 Serge 3256
	if ((rdev->family == CHIP_PALM) ||
3257
	    (rdev->family == CHIP_SUMO) ||
3258
	    (rdev->family == CHIP_SUMO2))
1963 serge 3259
		mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3260
	else
3031 serge 3261
		mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
1963 serge 3262
 
3263
	/* setup tiling info dword.  gb_addr_config is not adequate since it does
3264
	 * not have bank info, so create a custom tiling dword.
3265
	 * bits 3:0   num_pipes
3266
	 * bits 7:4   num_banks
3267
	 * bits 11:8  group_size
3268
	 * bits 15:12 row_size
3269
	 */
3270
	rdev->config.evergreen.tile_config = 0;
3271
	switch (rdev->config.evergreen.max_tile_pipes) {
3272
	case 1:
3273
	default:
3274
		rdev->config.evergreen.tile_config |= (0 << 0);
3275
		break;
3276
	case 2:
3277
		rdev->config.evergreen.tile_config |= (1 << 0);
3278
		break;
3279
	case 4:
3280
		rdev->config.evergreen.tile_config |= (2 << 0);
3281
		break;
3282
	case 8:
3283
		rdev->config.evergreen.tile_config |= (3 << 0);
3284
		break;
3285
	}
1986 serge 3286
	/* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
1963 serge 3287
	if (rdev->flags & RADEON_IS_IGP)
1986 serge 3288
		rdev->config.evergreen.tile_config |= 1 << 4;
2997 Serge 3289
	else {
3290
		switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3291
		case 0: /* four banks */
3292
			rdev->config.evergreen.tile_config |= 0 << 4;
3293
			break;
3294
		case 1: /* eight banks */
3295
			rdev->config.evergreen.tile_config |= 1 << 4;
3296
			break;
3297
		case 2: /* sixteen banks */
3298
		default:
3299
			rdev->config.evergreen.tile_config |= 2 << 4;
3300
			break;
3301
		}
3302
	}
3303
	rdev->config.evergreen.tile_config |= 0 << 8;
1963 serge 3304
	rdev->config.evergreen.tile_config |=
3305
		((gb_addr_config & 0x30000000) >> 28) << 12;
3306
 
2997 Serge 3307
	num_shader_engines = (gb_addr_config & NUM_SHADER_ENGINES(3) >> 12) + 1;
1963 serge 3308
 
2997 Serge 3309
	if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3310
		u32 efuse_straps_4;
3311
		u32 efuse_straps_3;
1963 serge 3312
 
5078 serge 3313
		efuse_straps_4 = RREG32_RCU(0x204);
3314
		efuse_straps_3 = RREG32_RCU(0x203);
2997 Serge 3315
		tmp = (((efuse_straps_4 & 0xf) << 4) |
3316
		      ((efuse_straps_3 & 0xf0000000) >> 28));
3317
	} else {
3318
		tmp = 0;
3319
		for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3320
			u32 rb_disable_bitmap;
1963 serge 3321
 
2997 Serge 3322
			WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3323
			WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3324
			rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3325
			tmp <<= 4;
3326
			tmp |= rb_disable_bitmap;
1963 serge 3327
		}
2997 Serge 3328
	}
3329
	/* enabled rb are just the one not disabled :) */
3330
	disabled_rb_mask = tmp;
3764 Serge 3331
	tmp = 0;
3332
	for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3333
		tmp |= (1 << i);
3334
	/* if all the backends are disabled, fix it up here */
3335
	if ((disabled_rb_mask & tmp) == tmp) {
3336
		for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3337
			disabled_rb_mask &= ~(1 << i);
3338
	}
1963 serge 3339
 
5078 serge 3340
	for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
3341
		u32 simd_disable_bitmap;
3342
 
3343
		WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3344
		WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3345
		simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
3346
		simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
3347
		tmp <<= 16;
3348
		tmp |= simd_disable_bitmap;
3349
	}
3350
	rdev->config.evergreen.active_simds = hweight32(~tmp);
3351
 
2997 Serge 3352
	WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3353
	WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
1963 serge 3354
 
2997 Serge 3355
	WREG32(GB_ADDR_CONFIG, gb_addr_config);
3356
	WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3357
	WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3192 Serge 3358
	WREG32(DMA_TILING_CONFIG, gb_addr_config);
3764 Serge 3359
	WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3360
	WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3361
	WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
1963 serge 3362
 
3764 Serge 3363
	if ((rdev->config.evergreen.max_backends == 1) &&
3364
	    (rdev->flags & RADEON_IS_IGP)) {
3365
		if ((disabled_rb_mask & 3) == 1) {
3366
			/* RB0 disabled, RB1 enabled */
3367
			tmp = 0x11111111;
3368
		} else {
3369
			/* RB1 disabled, RB0 enabled */
3370
			tmp = 0x00000000;
3371
		}
3372
	} else {
2997 Serge 3373
	tmp = gb_addr_config & NUM_PIPES_MASK;
3374
	tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3375
					EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3764 Serge 3376
	}
2997 Serge 3377
	WREG32(GB_BACKEND_MAP, tmp);
1963 serge 3378
 
3379
	WREG32(CGTS_SYS_TCC_DISABLE, 0);
3380
	WREG32(CGTS_TCC_DISABLE, 0);
3381
	WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3382
	WREG32(CGTS_USER_TCC_DISABLE, 0);
3383
 
3384
	/* set HW defaults for 3D engine */
3385
	WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3386
				     ROQ_IB2_START(0x2b)));
3387
 
3388
	WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3389
 
3390
	WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3391
			     SYNC_GRADIENT |
3392
			     SYNC_WALKER |
3393
			     SYNC_ALIGNER));
3394
 
3395
	sx_debug_1 = RREG32(SX_DEBUG_1);
3396
	sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3397
	WREG32(SX_DEBUG_1, sx_debug_1);
3398
 
3399
 
3400
	smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3401
	smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3402
	smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3403
	WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3404
 
2997 Serge 3405
	if (rdev->family <= CHIP_SUMO2)
3406
		WREG32(SMX_SAR_CTL0, 0x00010000);
3407
 
1963 serge 3408
	WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3409
					POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3410
					SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3411
 
3412
	WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3413
				 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3414
				 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3415
 
3416
	WREG32(VGT_NUM_INSTANCES, 1);
3417
	WREG32(SPI_CONFIG_CNTL, 0);
3418
	WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3419
	WREG32(CP_PERFMON_CNTL, 0);
3420
 
3421
	WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3422
				  FETCH_FIFO_HIWATER(0x4) |
3423
				  DONE_FIFO_HIWATER(0xe0) |
3424
				  ALU_UPDATE_FIFO_HIWATER(0x8)));
3425
 
3426
	sq_config = RREG32(SQ_CONFIG);
3427
	sq_config &= ~(PS_PRIO(3) |
3428
		       VS_PRIO(3) |
3429
		       GS_PRIO(3) |
3430
		       ES_PRIO(3));
3431
	sq_config |= (VC_ENABLE |
3432
		      EXPORT_SRC_C |
3433
		      PS_PRIO(0) |
3434
		      VS_PRIO(1) |
3435
		      GS_PRIO(2) |
3436
		      ES_PRIO(3));
3437
 
3438
	switch (rdev->family) {
3439
	case CHIP_CEDAR:
3440
	case CHIP_PALM:
3441
	case CHIP_SUMO:
3442
	case CHIP_SUMO2:
3443
	case CHIP_CAICOS:
3444
		/* no vertex cache */
3445
		sq_config &= ~VC_ENABLE;
3446
		break;
3447
	default:
3448
		break;
3449
	}
3450
 
3451
	sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3452
 
3453
	sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3454
	sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3455
	sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3456
	sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3457
	sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3458
	sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3459
	sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3460
 
3461
	switch (rdev->family) {
3462
	case CHIP_CEDAR:
3463
	case CHIP_PALM:
3464
	case CHIP_SUMO:
3465
	case CHIP_SUMO2:
3466
		ps_thread_count = 96;
3467
		break;
3468
	default:
3469
		ps_thread_count = 128;
3470
		break;
3471
	}
3472
 
3473
	sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3474
	sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3475
	sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3476
	sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3477
	sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3478
	sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3479
 
3480
	sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3481
	sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3482
	sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3483
	sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3484
	sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3485
	sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3486
 
3487
	WREG32(SQ_CONFIG, sq_config);
3488
	WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3489
	WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3490
	WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3491
	WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3492
	WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3493
	WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3494
	WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3495
	WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3496
	WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3497
	WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3498
 
3499
	WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3500
					  FORCE_EOV_MAX_REZ_CNT(255)));
3501
 
3502
	switch (rdev->family) {
3503
	case CHIP_CEDAR:
3504
	case CHIP_PALM:
3505
	case CHIP_SUMO:
3506
	case CHIP_SUMO2:
3507
	case CHIP_CAICOS:
3508
		vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3509
		break;
3510
	default:
3511
		vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3512
		break;
3513
	}
3514
	vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3515
	WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3516
 
3517
	WREG32(VGT_GS_VERTEX_REUSE, 16);
3518
	WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3519
	WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3520
 
3521
	WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3522
	WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3523
 
3524
	WREG32(CB_PERF_CTR0_SEL_0, 0);
3525
	WREG32(CB_PERF_CTR0_SEL_1, 0);
3526
	WREG32(CB_PERF_CTR1_SEL_0, 0);
3527
	WREG32(CB_PERF_CTR1_SEL_1, 0);
3528
	WREG32(CB_PERF_CTR2_SEL_0, 0);
3529
	WREG32(CB_PERF_CTR2_SEL_1, 0);
3530
	WREG32(CB_PERF_CTR3_SEL_0, 0);
3531
	WREG32(CB_PERF_CTR3_SEL_1, 0);
3532
 
3533
	/* clear render buffer base addresses */
3534
	WREG32(CB_COLOR0_BASE, 0);
3535
	WREG32(CB_COLOR1_BASE, 0);
3536
	WREG32(CB_COLOR2_BASE, 0);
3537
	WREG32(CB_COLOR3_BASE, 0);
3538
	WREG32(CB_COLOR4_BASE, 0);
3539
	WREG32(CB_COLOR5_BASE, 0);
3540
	WREG32(CB_COLOR6_BASE, 0);
3541
	WREG32(CB_COLOR7_BASE, 0);
3542
	WREG32(CB_COLOR8_BASE, 0);
3543
	WREG32(CB_COLOR9_BASE, 0);
3544
	WREG32(CB_COLOR10_BASE, 0);
3545
	WREG32(CB_COLOR11_BASE, 0);
3546
 
3547
	/* set the shader const cache sizes to 0 */
3548
	for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3549
		WREG32(i, 0);
3550
	for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3551
		WREG32(i, 0);
3552
 
3553
	tmp = RREG32(HDP_MISC_CNTL);
3554
	tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3555
	WREG32(HDP_MISC_CNTL, tmp);
3556
 
3557
	hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3558
	WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3559
 
3560
	WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3561
 
3562
	udelay(50);
3563
 
1430 serge 3564
}
3565
 
3566
int evergreen_mc_init(struct radeon_device *rdev)
3567
{
3568
	u32 tmp;
3569
	int chansize, numchan;
3570
 
3571
	/* Get VRAM informations */
3572
	rdev->mc.vram_is_ddr = true;
2997 Serge 3573
	if ((rdev->family == CHIP_PALM) ||
3574
	    (rdev->family == CHIP_SUMO) ||
3575
	    (rdev->family == CHIP_SUMO2))
2004 serge 3576
		tmp = RREG32(FUS_MC_ARB_RAMCFG);
3577
	else
3031 serge 3578
		tmp = RREG32(MC_ARB_RAMCFG);
1430 serge 3579
	if (tmp & CHANSIZE_OVERRIDE) {
3580
		chansize = 16;
3581
	} else if (tmp & CHANSIZE_MASK) {
3582
		chansize = 64;
3583
	} else {
3584
		chansize = 32;
3585
	}
3586
	tmp = RREG32(MC_SHARED_CHMAP);
3587
	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3588
	case 0:
3589
	default:
3590
		numchan = 1;
3591
		break;
3592
	case 1:
3593
		numchan = 2;
3594
		break;
3595
	case 2:
3596
		numchan = 4;
3597
		break;
3598
	case 3:
3599
		numchan = 8;
3600
		break;
3601
	}
3602
	rdev->mc.vram_width = numchan * chansize;
3603
	/* Could aper size report 0 ? */
1963 serge 3604
	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3605
	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
1430 serge 3606
	/* Setup GPU memory space */
2997 Serge 3607
	if ((rdev->family == CHIP_PALM) ||
3608
	    (rdev->family == CHIP_SUMO) ||
3609
	    (rdev->family == CHIP_SUMO2)) {
1963 serge 3610
		/* size in bytes on fusion */
3611
		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3612
		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3613
	} else {
2997 Serge 3614
		/* size in MB on evergreen/cayman/tn */
3764 Serge 3615
		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3616
		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
1963 serge 3617
	}
1430 serge 3618
	rdev->mc.visible_vram_size = rdev->mc.aper_size;
1963 serge 3619
	r700_vram_gtt_location(rdev, &rdev->mc);
3620
	radeon_update_bandwidth_info(rdev);
3621
 
1430 serge 3622
	return 0;
3623
}
3624
 
3764 Serge 3625
void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
1430 serge 3626
{
3764 Serge 3627
	dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
1963 serge 3628
		RREG32(GRBM_STATUS));
3764 Serge 3629
	dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
1963 serge 3630
		RREG32(GRBM_STATUS_SE0));
3764 Serge 3631
	dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
1963 serge 3632
		RREG32(GRBM_STATUS_SE1));
3764 Serge 3633
	dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
1963 serge 3634
		RREG32(SRBM_STATUS));
3764 Serge 3635
	dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3636
		RREG32(SRBM_STATUS2));
2997 Serge 3637
	dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3638
		RREG32(CP_STALLED_STAT1));
3639
	dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3640
		RREG32(CP_STALLED_STAT2));
3641
	dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3642
		RREG32(CP_BUSY_STAT));
3643
	dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3644
		RREG32(CP_STAT));
3764 Serge 3645
	dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3646
		RREG32(DMA_STATUS_REG));
3647
	if (rdev->family >= CHIP_CAYMAN) {
3648
		dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3649
			 RREG32(DMA_STATUS_REG + 0x800));
3650
	}
3651
}
3652
 
3653
bool evergreen_is_display_hung(struct radeon_device *rdev)
3654
{
3655
	u32 crtc_hung = 0;
3656
	u32 crtc_status[6];
3657
	u32 i, j, tmp;
3658
 
3659
	for (i = 0; i < rdev->num_crtc; i++) {
3660
		if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3661
			crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3662
			crtc_hung |= (1 << i);
3663
		}
3664
	}
3665
 
3666
	for (j = 0; j < 10; j++) {
3667
		for (i = 0; i < rdev->num_crtc; i++) {
3668
			if (crtc_hung & (1 << i)) {
3669
				tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3670
				if (tmp != crtc_status[i])
3671
					crtc_hung &= ~(1 << i);
3672
			}
3673
		}
3674
		if (crtc_hung == 0)
3675
			return false;
3676
		udelay(100);
3677
	}
3678
 
3679
	return true;
3680
}
3681
 
5078 serge 3682
u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3764 Serge 3683
{
3684
	u32 reset_mask = 0;
3685
	u32 tmp;
3686
 
3687
	/* GRBM_STATUS */
3688
	tmp = RREG32(GRBM_STATUS);
3689
	if (tmp & (PA_BUSY | SC_BUSY |
3690
		   SH_BUSY | SX_BUSY |
3691
		   TA_BUSY | VGT_BUSY |
3692
		   DB_BUSY | CB_BUSY |
3693
		   SPI_BUSY | VGT_BUSY_NO_DMA))
3694
		reset_mask |= RADEON_RESET_GFX;
3695
 
3696
	if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3697
		   CP_BUSY | CP_COHERENCY_BUSY))
3698
		reset_mask |= RADEON_RESET_CP;
3699
 
3700
	if (tmp & GRBM_EE_BUSY)
3701
		reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3702
 
3703
	/* DMA_STATUS_REG */
3704
	tmp = RREG32(DMA_STATUS_REG);
3705
	if (!(tmp & DMA_IDLE))
3706
		reset_mask |= RADEON_RESET_DMA;
3707
 
3708
	/* SRBM_STATUS2 */
3709
	tmp = RREG32(SRBM_STATUS2);
3710
	if (tmp & DMA_BUSY)
3711
		reset_mask |= RADEON_RESET_DMA;
3712
 
3713
	/* SRBM_STATUS */
3714
	tmp = RREG32(SRBM_STATUS);
3715
	if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3716
		reset_mask |= RADEON_RESET_RLC;
3717
 
3718
	if (tmp & IH_BUSY)
3719
		reset_mask |= RADEON_RESET_IH;
3720
 
3721
	if (tmp & SEM_BUSY)
3722
		reset_mask |= RADEON_RESET_SEM;
3723
 
3724
	if (tmp & GRBM_RQ_PENDING)
3725
		reset_mask |= RADEON_RESET_GRBM;
3726
 
3727
	if (tmp & VMC_BUSY)
3728
		reset_mask |= RADEON_RESET_VMC;
3729
 
3730
	if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3731
		   MCC_BUSY | MCD_BUSY))
3732
		reset_mask |= RADEON_RESET_MC;
3733
 
3734
	if (evergreen_is_display_hung(rdev))
3735
		reset_mask |= RADEON_RESET_DISPLAY;
3736
 
3737
	/* VM_L2_STATUS */
3738
	tmp = RREG32(VM_L2_STATUS);
3739
	if (tmp & L2_BUSY)
3740
		reset_mask |= RADEON_RESET_VMC;
3741
 
3742
	/* Skip MC reset as it's mostly likely not hung, just busy */
3743
	if (reset_mask & RADEON_RESET_MC) {
3744
		DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3745
		reset_mask &= ~RADEON_RESET_MC;
3746
	}
3747
 
3748
	return reset_mask;
3749
}
3750
 
3751
static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3752
{
3753
	struct evergreen_mc_save save;
3754
	u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3755
	u32 tmp;
3756
 
3757
	if (reset_mask == 0)
3758
		return;
3759
 
3760
	dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3761
 
3762
	evergreen_print_gpu_status_regs(rdev);
3763
 
3764
	/* Disable CP parsing/prefetching */
3765
	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3766
 
3767
	if (reset_mask & RADEON_RESET_DMA) {
3768
	/* Disable DMA */
3769
	tmp = RREG32(DMA_RB_CNTL);
3770
	tmp &= ~DMA_RB_ENABLE;
3771
	WREG32(DMA_RB_CNTL, tmp);
3772
	}
3773
 
3774
	udelay(50);
3775
 
1963 serge 3776
	evergreen_mc_stop(rdev, &save);
3777
	if (evergreen_mc_wait_for_idle(rdev)) {
3778
		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3779
	}
3780
 
3764 Serge 3781
	if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3782
		grbm_soft_reset |= SOFT_RESET_DB |
3783
			SOFT_RESET_CB |
3784
			SOFT_RESET_PA |
3785
			SOFT_RESET_SC |
3786
			SOFT_RESET_SPI |
3787
			SOFT_RESET_SX |
3788
			SOFT_RESET_SH |
3789
			SOFT_RESET_TC |
3790
			SOFT_RESET_TA |
3791
			SOFT_RESET_VC |
3792
			SOFT_RESET_VGT;
3793
	}
1963 serge 3794
 
3764 Serge 3795
	if (reset_mask & RADEON_RESET_CP) {
3796
		grbm_soft_reset |= SOFT_RESET_CP |
3797
			SOFT_RESET_VGT;
3798
 
3799
		srbm_soft_reset |= SOFT_RESET_GRBM;
3800
	}
3801
 
3802
	if (reset_mask & RADEON_RESET_DMA)
3803
		srbm_soft_reset |= SOFT_RESET_DMA;
3804
 
3805
	if (reset_mask & RADEON_RESET_DISPLAY)
3806
		srbm_soft_reset |= SOFT_RESET_DC;
3807
 
3808
	if (reset_mask & RADEON_RESET_RLC)
3809
		srbm_soft_reset |= SOFT_RESET_RLC;
3810
 
3811
	if (reset_mask & RADEON_RESET_SEM)
3812
		srbm_soft_reset |= SOFT_RESET_SEM;
3813
 
3814
	if (reset_mask & RADEON_RESET_IH)
3815
		srbm_soft_reset |= SOFT_RESET_IH;
3816
 
3817
	if (reset_mask & RADEON_RESET_GRBM)
3818
		srbm_soft_reset |= SOFT_RESET_GRBM;
3819
 
3820
	if (reset_mask & RADEON_RESET_VMC)
3821
		srbm_soft_reset |= SOFT_RESET_VMC;
3822
 
3823
	if (!(rdev->flags & RADEON_IS_IGP)) {
3824
		if (reset_mask & RADEON_RESET_MC)
3825
			srbm_soft_reset |= SOFT_RESET_MC;
3826
	}
3827
 
3828
	if (grbm_soft_reset) {
3829
		tmp = RREG32(GRBM_SOFT_RESET);
3830
		tmp |= grbm_soft_reset;
3831
		dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3832
		WREG32(GRBM_SOFT_RESET, tmp);
3833
		tmp = RREG32(GRBM_SOFT_RESET);
3834
 
3835
		udelay(50);
3836
 
3837
		tmp &= ~grbm_soft_reset;
3838
		WREG32(GRBM_SOFT_RESET, tmp);
3839
		tmp = RREG32(GRBM_SOFT_RESET);
3840
	}
3841
 
3842
	if (srbm_soft_reset) {
3843
		tmp = RREG32(SRBM_SOFT_RESET);
3844
		tmp |= srbm_soft_reset;
3845
		dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3846
		WREG32(SRBM_SOFT_RESET, tmp);
3847
		tmp = RREG32(SRBM_SOFT_RESET);
3848
 
3849
		udelay(50);
3850
 
3851
		tmp &= ~srbm_soft_reset;
3852
		WREG32(SRBM_SOFT_RESET, tmp);
3853
		tmp = RREG32(SRBM_SOFT_RESET);
3854
	}
3855
 
1963 serge 3856
	/* Wait a little for things to settle down */
3857
	udelay(50);
3764 Serge 3858
 
1963 serge 3859
	evergreen_mc_resume(rdev, &save);
3764 Serge 3860
	udelay(50);
3861
 
3862
	evergreen_print_gpu_status_regs(rdev);
1430 serge 3863
}
3864
 
5078 serge 3865
void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
3866
{
3867
	struct evergreen_mc_save save;
3868
	u32 tmp, i;
3869
 
3870
	dev_info(rdev->dev, "GPU pci config reset\n");
3871
 
3872
	/* disable dpm? */
3873
 
3874
	/* Disable CP parsing/prefetching */
3875
	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3876
	udelay(50);
3877
	/* Disable DMA */
3878
	tmp = RREG32(DMA_RB_CNTL);
3879
	tmp &= ~DMA_RB_ENABLE;
3880
	WREG32(DMA_RB_CNTL, tmp);
3881
	/* XXX other engines? */
3882
 
3883
	/* halt the rlc */
3884
	r600_rlc_stop(rdev);
3885
 
3886
	udelay(50);
3887
 
3888
	/* set mclk/sclk to bypass */
3889
	rv770_set_clk_bypass_mode(rdev);
3890
	/* disable BM */
3891
	pci_clear_master(rdev->pdev);
3892
	/* disable mem access */
3893
	evergreen_mc_stop(rdev, &save);
3894
	if (evergreen_mc_wait_for_idle(rdev)) {
3895
		dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
3896
	}
3897
	/* reset */
3898
	radeon_pci_config_reset(rdev);
3899
	/* wait for asic to come out of reset */
3900
	for (i = 0; i < rdev->usec_timeout; i++) {
3901
		if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
3902
			break;
3903
		udelay(1);
3904
	}
3905
}
3906
 
1963 serge 3907
int evergreen_asic_reset(struct radeon_device *rdev)
3908
{
3764 Serge 3909
	u32 reset_mask;
3910
 
3911
	reset_mask = evergreen_gpu_check_soft_reset(rdev);
3912
 
3913
	if (reset_mask)
3914
		r600_set_bios_scratch_engine_hung(rdev, true);
3915
 
5078 serge 3916
	/* try soft reset */
3764 Serge 3917
	evergreen_gpu_soft_reset(rdev, reset_mask);
3918
 
3919
	reset_mask = evergreen_gpu_check_soft_reset(rdev);
3920
 
5078 serge 3921
	/* try pci config reset */
3922
	if (reset_mask && radeon_hard_reset)
3923
		evergreen_gpu_pci_config_reset(rdev);
3924
 
3925
	reset_mask = evergreen_gpu_check_soft_reset(rdev);
3926
 
3764 Serge 3927
	if (!reset_mask)
3928
		r600_set_bios_scratch_engine_hung(rdev, false);
3929
 
3930
	return 0;
1963 serge 3931
}
3932
 
3764 Serge 3933
/**
3934
 * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
3935
 *
3936
 * @rdev: radeon_device pointer
3937
 * @ring: radeon_ring structure holding ring information
3938
 *
3939
 * Check if the GFX engine is locked up.
3940
 * Returns true if the engine appears to be locked up, false if not.
3941
 */
3942
bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3943
{
3944
	u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3945
 
3946
	if (!(reset_mask & (RADEON_RESET_GFX |
3947
					       RADEON_RESET_COMPUTE |
3948
			    RADEON_RESET_CP))) {
5078 serge 3949
		radeon_ring_lockup_update(rdev, ring);
3764 Serge 3950
		return false;
3951
	}
3952
	return radeon_ring_test_lockup(rdev, ring);
3953
}
3954
 
5078 serge 3955
/*
3956
 * RLC
3764 Serge 3957
 */
5078 serge 3958
#define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
3959
#define RLC_CLEAR_STATE_END_MARKER          0x00000001
3960
 
3961
void sumo_rlc_fini(struct radeon_device *rdev)
3764 Serge 3962
{
5078 serge 3963
	int r;
3764 Serge 3964
 
5078 serge 3965
	/* save restore block */
3966
	if (rdev->rlc.save_restore_obj) {
3967
		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
3968
		if (unlikely(r != 0))
3969
			dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
3970
		radeon_bo_unpin(rdev->rlc.save_restore_obj);
3971
		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3972
 
3973
		radeon_bo_unref(&rdev->rlc.save_restore_obj);
3974
		rdev->rlc.save_restore_obj = NULL;
3764 Serge 3975
	}
5078 serge 3976
 
3977
	/* clear state block */
3978
	if (rdev->rlc.clear_state_obj) {
3979
		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
3980
		if (unlikely(r != 0))
3981
			dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
3982
		radeon_bo_unpin(rdev->rlc.clear_state_obj);
3983
		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
3984
 
3985
		radeon_bo_unref(&rdev->rlc.clear_state_obj);
3986
		rdev->rlc.clear_state_obj = NULL;
3987
	}
3988
 
3989
	/* clear state block */
3990
	if (rdev->rlc.cp_table_obj) {
3991
		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
3992
		if (unlikely(r != 0))
3993
			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
3994
		radeon_bo_unpin(rdev->rlc.cp_table_obj);
3995
		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
3996
 
3997
		radeon_bo_unref(&rdev->rlc.cp_table_obj);
3998
		rdev->rlc.cp_table_obj = NULL;
3999
	}
3764 Serge 4000
}
4001
 
5078 serge 4002
#define CP_ME_TABLE_SIZE    96
4003
 
4004
int sumo_rlc_init(struct radeon_device *rdev)
4005
{
4006
	const u32 *src_ptr;
4007
	volatile u32 *dst_ptr;
4008
	u32 dws, data, i, j, k, reg_num;
4009
	u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
4010
	u64 reg_list_mc_addr;
4011
	const struct cs_section_def *cs_data;
4012
	int r;
4013
 
4014
	src_ptr = rdev->rlc.reg_list;
4015
	dws = rdev->rlc.reg_list_size;
4016
	if (rdev->family >= CHIP_BONAIRE) {
4017
		dws += (5 * 16) + 48 + 48 + 64;
4018
	}
4019
	cs_data = rdev->rlc.cs_data;
4020
 
4021
	if (src_ptr) {
4022
		/* save restore block */
4023
		if (rdev->rlc.save_restore_obj == NULL) {
4024
			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4025
					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4026
					     &rdev->rlc.save_restore_obj);
4027
			if (r) {
4028
				dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4029
				return r;
4030
			}
4031
		}
4032
 
4033
		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4034
		if (unlikely(r != 0)) {
4035
			sumo_rlc_fini(rdev);
4036
			return r;
4037
		}
4038
		r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4039
				  &rdev->rlc.save_restore_gpu_addr);
4040
		if (r) {
4041
			radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4042
			dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4043
			sumo_rlc_fini(rdev);
4044
			return r;
4045
		}
4046
 
4047
		r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
4048
		if (r) {
4049
			dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4050
			sumo_rlc_fini(rdev);
4051
			return r;
4052
		}
4053
		/* write the sr buffer */
4054
		dst_ptr = rdev->rlc.sr_ptr;
4055
		if (rdev->family >= CHIP_TAHITI) {
4056
			/* SI */
4057
			for (i = 0; i < rdev->rlc.reg_list_size; i++)
4058
				dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4059
		} else {
4060
			/* ON/LN/TN */
4061
			/* format:
4062
			 * dw0: (reg2 << 16) | reg1
4063
			 * dw1: reg1 save space
4064
			 * dw2: reg2 save space
4065
 */
4066
			for (i = 0; i < dws; i++) {
4067
				data = src_ptr[i] >> 2;
4068
				i++;
4069
				if (i < dws)
4070
					data |= (src_ptr[i] >> 2) << 16;
4071
				j = (((i - 1) * 3) / 2);
4072
				dst_ptr[j] = cpu_to_le32(data);
4073
			}
4074
			j = ((i * 3) / 2);
4075
			dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4076
		}
4077
		radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4078
		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4079
	}
4080
 
4081
	if (cs_data) {
4082
		/* clear state block */
4083
		if (rdev->family >= CHIP_BONAIRE) {
4084
			rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4085
		} else if (rdev->family >= CHIP_TAHITI) {
4086
			rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4087
			dws = rdev->rlc.clear_state_size + (256 / 4);
4088
		} else {
4089
			reg_list_num = 0;
4090
			dws = 0;
4091
			for (i = 0; cs_data[i].section != NULL; i++) {
4092
				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4093
					reg_list_num++;
4094
					dws += cs_data[i].section[j].reg_count;
4095
				}
4096
			}
4097
			reg_list_blk_index = (3 * reg_list_num + 2);
4098
			dws += reg_list_blk_index;
4099
			rdev->rlc.clear_state_size = dws;
4100
		}
4101
 
4102
		if (rdev->rlc.clear_state_obj == NULL) {
4103
			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4104
					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4105
					     &rdev->rlc.clear_state_obj);
4106
			if (r) {
4107
				dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4108
				sumo_rlc_fini(rdev);
4109
				return r;
4110
			}
4111
		}
4112
		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4113
		if (unlikely(r != 0)) {
4114
			sumo_rlc_fini(rdev);
4115
			return r;
4116
		}
4117
		r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4118
				  &rdev->rlc.clear_state_gpu_addr);
4119
		if (r) {
4120
			radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4121
			dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4122
			sumo_rlc_fini(rdev);
4123
			return r;
4124
		}
4125
 
4126
		r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4127
		if (r) {
4128
			dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4129
			sumo_rlc_fini(rdev);
4130
			return r;
4131
		}
4132
		/* set up the cs buffer */
4133
		dst_ptr = rdev->rlc.cs_ptr;
4134
		if (rdev->family >= CHIP_BONAIRE) {
4135
			cik_get_csb_buffer(rdev, dst_ptr);
4136
		} else if (rdev->family >= CHIP_TAHITI) {
4137
			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4138
			dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4139
			dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4140
			dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4141
			si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4142
		} else {
4143
			reg_list_hdr_blk_index = 0;
4144
			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4145
			data = upper_32_bits(reg_list_mc_addr);
4146
			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4147
			reg_list_hdr_blk_index++;
4148
			for (i = 0; cs_data[i].section != NULL; i++) {
4149
				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4150
					reg_num = cs_data[i].section[j].reg_count;
4151
					data = reg_list_mc_addr & 0xffffffff;
4152
					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4153
					reg_list_hdr_blk_index++;
4154
 
4155
					data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4156
					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4157
					reg_list_hdr_blk_index++;
4158
 
4159
					data = 0x08000000 | (reg_num * 4);
4160
					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4161
					reg_list_hdr_blk_index++;
4162
 
4163
					for (k = 0; k < reg_num; k++) {
4164
						data = cs_data[i].section[j].extent[k];
4165
						dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4166
					}
4167
					reg_list_mc_addr += reg_num * 4;
4168
					reg_list_blk_index += reg_num;
4169
				}
4170
			}
4171
			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4172
		}
4173
		radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4174
		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4175
	}
4176
 
4177
	if (rdev->rlc.cp_table_size) {
4178
		if (rdev->rlc.cp_table_obj == NULL) {
4179
			r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
4180
					     PAGE_SIZE, true,
4181
					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4182
					     &rdev->rlc.cp_table_obj);
4183
			if (r) {
4184
				dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4185
				sumo_rlc_fini(rdev);
4186
				return r;
4187
			}
4188
		}
4189
 
4190
		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4191
		if (unlikely(r != 0)) {
4192
			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4193
			sumo_rlc_fini(rdev);
4194
			return r;
4195
		}
4196
		r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4197
				  &rdev->rlc.cp_table_gpu_addr);
4198
		if (r) {
4199
			radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4200
			dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4201
			sumo_rlc_fini(rdev);
4202
			return r;
4203
		}
4204
		r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4205
		if (r) {
4206
			dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4207
			sumo_rlc_fini(rdev);
4208
			return r;
4209
		}
4210
 
4211
		cik_init_cp_pg_table(rdev);
4212
 
4213
		radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4214
		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4215
 
4216
	}
4217
 
4218
	return 0;
4219
}
4220
 
4221
static void evergreen_rlc_start(struct radeon_device *rdev)
4222
{
4223
	u32 mask = RLC_ENABLE;
4224
 
4225
	if (rdev->flags & RADEON_IS_IGP) {
4226
		mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4227
	}
4228
 
4229
	WREG32(RLC_CNTL, mask);
4230
}
4231
 
4232
int evergreen_rlc_resume(struct radeon_device *rdev)
4233
{
4234
	u32 i;
4235
	const __be32 *fw_data;
4236
 
4237
	if (!rdev->rlc_fw)
4238
		return -EINVAL;
4239
 
4240
	r600_rlc_stop(rdev);
4241
 
4242
	WREG32(RLC_HB_CNTL, 0);
4243
 
4244
	if (rdev->flags & RADEON_IS_IGP) {
4245
		if (rdev->family == CHIP_ARUBA) {
4246
			u32 always_on_bitmap =
4247
				3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4248
			/* find out the number of active simds */
4249
			u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4250
			tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4251
			tmp = hweight32(~tmp);
4252
			if (tmp == rdev->config.cayman.max_simds_per_se) {
4253
				WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4254
				WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4255
				WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4256
				WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4257
				WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4258
			}
4259
		} else {
4260
			WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4261
			WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4262
		}
4263
		WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4264
		WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4265
	} else {
4266
		WREG32(RLC_HB_BASE, 0);
4267
		WREG32(RLC_HB_RPTR, 0);
4268
		WREG32(RLC_HB_WPTR, 0);
4269
		WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4270
		WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4271
	}
4272
	WREG32(RLC_MC_CNTL, 0);
4273
	WREG32(RLC_UCODE_CNTL, 0);
4274
 
4275
	fw_data = (const __be32 *)rdev->rlc_fw->data;
4276
	if (rdev->family >= CHIP_ARUBA) {
4277
		for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4278
			WREG32(RLC_UCODE_ADDR, i);
4279
			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4280
		}
4281
	} else if (rdev->family >= CHIP_CAYMAN) {
4282
		for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4283
			WREG32(RLC_UCODE_ADDR, i);
4284
			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4285
		}
4286
	} else {
4287
		for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4288
			WREG32(RLC_UCODE_ADDR, i);
4289
			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4290
		}
4291
	}
4292
	WREG32(RLC_UCODE_ADDR, 0);
4293
 
4294
	evergreen_rlc_start(rdev);
4295
 
4296
	return 0;
4297
}
4298
 
1963 serge 4299
/* Interrupts */
4300
 
4301
u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4302
{
2997 Serge 4303
	if (crtc >= rdev->num_crtc)
3031 serge 4304
		return 0;
2997 Serge 4305
	else
4306
		return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
1963 serge 4307
}
4308
 
4309
void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4310
{
4311
	u32 tmp;
4312
 
2997 Serge 4313
	if (rdev->family >= CHIP_CAYMAN) {
4314
		cayman_cp_int_cntl_setup(rdev, 0,
4315
					 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4316
		cayman_cp_int_cntl_setup(rdev, 1, 0);
4317
		cayman_cp_int_cntl_setup(rdev, 2, 0);
3192 Serge 4318
		tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4319
		WREG32(CAYMAN_DMA1_CNTL, tmp);
2997 Serge 4320
	} else
3031 serge 4321
		WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
3192 Serge 4322
	tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4323
	WREG32(DMA_CNTL, tmp);
1963 serge 4324
	WREG32(GRBM_INT_CNTL, 0);
4325
	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4326
	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
2005 serge 4327
	if (rdev->num_crtc >= 4) {
3031 serge 4328
		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4329
		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
2005 serge 4330
	}
4331
	if (rdev->num_crtc >= 6) {
3031 serge 4332
		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4333
		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
1963 serge 4334
	}
4335
 
4336
	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4337
	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
2005 serge 4338
	if (rdev->num_crtc >= 4) {
3031 serge 4339
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4340
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
2005 serge 4341
	}
4342
	if (rdev->num_crtc >= 6) {
3031 serge 4343
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4344
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
1963 serge 4345
	}
4346
 
5078 serge 4347
	/* only one DAC on DCE5 */
4348
	if (!ASIC_IS_DCE5(rdev))
3031 serge 4349
		WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
1963 serge 4350
	WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4351
 
4352
	tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4353
	WREG32(DC_HPD1_INT_CONTROL, tmp);
4354
	tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4355
	WREG32(DC_HPD2_INT_CONTROL, tmp);
4356
	tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4357
	WREG32(DC_HPD3_INT_CONTROL, tmp);
4358
	tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4359
	WREG32(DC_HPD4_INT_CONTROL, tmp);
4360
	tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4361
	WREG32(DC_HPD5_INT_CONTROL, tmp);
4362
	tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4363
	WREG32(DC_HPD6_INT_CONTROL, tmp);
4364
 
4365
}
2005 serge 4366
 
4367
int evergreen_irq_set(struct radeon_device *rdev)
4368
{
4369
	u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
2997 Serge 4370
	u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
2005 serge 4371
	u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4372
	u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4373
	u32 grbm_int_cntl = 0;
2997 Serge 4374
	u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
3192 Serge 4375
	u32 dma_cntl, dma_cntl1 = 0;
5078 serge 4376
	u32 thermal_int = 0;
2005 serge 4377
 
4378
	if (!rdev->irq.installed) {
4379
		WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4380
		return -EINVAL;
4381
	}
4382
	/* don't enable anything if the ih is disabled */
4383
	if (!rdev->ih.enabled) {
4384
		r600_disable_interrupts(rdev);
4385
		/* force the active interrupt state to all disabled */
4386
		evergreen_disable_interrupt_state(rdev);
4387
		return 0;
4388
	}
4389
 
4390
	hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
4391
	hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
4392
	hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
4393
	hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
4394
	hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
4395
	hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
5078 serge 4396
	if (rdev->family == CHIP_ARUBA)
4397
		thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4398
			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4399
	else
4400
		thermal_int = RREG32(CG_THERMAL_INT) &
4401
			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
2005 serge 4402
 
2997 Serge 4403
	afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4404
	afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4405
	afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4406
	afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4407
	afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4408
	afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4409
 
3192 Serge 4410
	dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4411
 
2997 Serge 4412
	if (rdev->family >= CHIP_CAYMAN) {
4413
		/* enable CP interrupts on all rings */
4414
		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4415
			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4416
			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4417
		}
4418
		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4419
			DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4420
			cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4421
		}
4422
		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4423
			DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4424
			cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4425
		}
4426
	} else {
4427
		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4428
			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
3031 serge 4429
			cp_int_cntl |= RB_INT_ENABLE;
4430
			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4431
		}
2005 serge 4432
	}
2997 Serge 4433
 
3192 Serge 4434
	if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4435
		DRM_DEBUG("r600_irq_set: sw int dma\n");
4436
		dma_cntl |= TRAP_ENABLE;
4437
	}
4438
 
4439
	if (rdev->family >= CHIP_CAYMAN) {
4440
		dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4441
		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4442
			DRM_DEBUG("r600_irq_set: sw int dma1\n");
4443
			dma_cntl1 |= TRAP_ENABLE;
4444
		}
4445
	}
4446
 
5078 serge 4447
	if (rdev->irq.dpm_thermal) {
4448
		DRM_DEBUG("dpm thermal\n");
4449
		thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4450
	}
4451
 
2005 serge 4452
	if (rdev->irq.crtc_vblank_int[0] ||
2997 Serge 4453
	    atomic_read(&rdev->irq.pflip[0])) {
2005 serge 4454
		DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4455
		crtc1 |= VBLANK_INT_MASK;
4456
	}
4457
	if (rdev->irq.crtc_vblank_int[1] ||
2997 Serge 4458
	    atomic_read(&rdev->irq.pflip[1])) {
2005 serge 4459
		DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4460
		crtc2 |= VBLANK_INT_MASK;
4461
	}
4462
	if (rdev->irq.crtc_vblank_int[2] ||
2997 Serge 4463
	    atomic_read(&rdev->irq.pflip[2])) {
2005 serge 4464
		DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4465
		crtc3 |= VBLANK_INT_MASK;
4466
	}
4467
	if (rdev->irq.crtc_vblank_int[3] ||
2997 Serge 4468
	    atomic_read(&rdev->irq.pflip[3])) {
2005 serge 4469
		DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4470
		crtc4 |= VBLANK_INT_MASK;
4471
	}
4472
	if (rdev->irq.crtc_vblank_int[4] ||
2997 Serge 4473
	    atomic_read(&rdev->irq.pflip[4])) {
2005 serge 4474
		DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4475
		crtc5 |= VBLANK_INT_MASK;
4476
	}
4477
	if (rdev->irq.crtc_vblank_int[5] ||
2997 Serge 4478
	    atomic_read(&rdev->irq.pflip[5])) {
2005 serge 4479
		DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4480
		crtc6 |= VBLANK_INT_MASK;
4481
	}
4482
	if (rdev->irq.hpd[0]) {
4483
		DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4484
		hpd1 |= DC_HPDx_INT_EN;
4485
	}
4486
	if (rdev->irq.hpd[1]) {
4487
		DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4488
		hpd2 |= DC_HPDx_INT_EN;
4489
	}
4490
	if (rdev->irq.hpd[2]) {
4491
		DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4492
		hpd3 |= DC_HPDx_INT_EN;
4493
	}
4494
	if (rdev->irq.hpd[3]) {
4495
		DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4496
		hpd4 |= DC_HPDx_INT_EN;
4497
	}
4498
	if (rdev->irq.hpd[4]) {
4499
		DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4500
		hpd5 |= DC_HPDx_INT_EN;
4501
	}
4502
	if (rdev->irq.hpd[5]) {
4503
		DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4504
		hpd6 |= DC_HPDx_INT_EN;
4505
	}
2997 Serge 4506
	if (rdev->irq.afmt[0]) {
4507
		DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4508
		afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
2005 serge 4509
	}
2997 Serge 4510
	if (rdev->irq.afmt[1]) {
4511
		DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4512
		afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4513
	}
4514
	if (rdev->irq.afmt[2]) {
4515
		DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4516
		afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4517
	}
4518
	if (rdev->irq.afmt[3]) {
4519
		DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4520
		afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4521
	}
4522
	if (rdev->irq.afmt[4]) {
4523
		DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4524
		afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4525
	}
4526
	if (rdev->irq.afmt[5]) {
4527
		DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4528
		afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4529
	}
2005 serge 4530
 
2997 Serge 4531
	if (rdev->family >= CHIP_CAYMAN) {
4532
		cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4533
		cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4534
		cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4535
	} else
3031 serge 4536
		WREG32(CP_INT_CNTL, cp_int_cntl);
3192 Serge 4537
 
4538
	WREG32(DMA_CNTL, dma_cntl);
4539
 
4540
	if (rdev->family >= CHIP_CAYMAN)
4541
		WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4542
 
2005 serge 4543
	WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4544
 
4545
	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4546
	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4547
	if (rdev->num_crtc >= 4) {
4548
		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4549
		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4550
	}
4551
	if (rdev->num_crtc >= 6) {
4552
		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4553
		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4554
	}
4555
 
5078 serge 4556
	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
4557
	       GRPH_PFLIP_INT_MASK);
4558
	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
4559
	       GRPH_PFLIP_INT_MASK);
2005 serge 4560
	if (rdev->num_crtc >= 4) {
5078 serge 4561
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
4562
		       GRPH_PFLIP_INT_MASK);
4563
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
4564
		       GRPH_PFLIP_INT_MASK);
2005 serge 4565
	}
4566
	if (rdev->num_crtc >= 6) {
5078 serge 4567
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
4568
		       GRPH_PFLIP_INT_MASK);
4569
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
4570
		       GRPH_PFLIP_INT_MASK);
2005 serge 4571
	}
4572
 
4573
	WREG32(DC_HPD1_INT_CONTROL, hpd1);
4574
	WREG32(DC_HPD2_INT_CONTROL, hpd2);
4575
	WREG32(DC_HPD3_INT_CONTROL, hpd3);
4576
	WREG32(DC_HPD4_INT_CONTROL, hpd4);
4577
	WREG32(DC_HPD5_INT_CONTROL, hpd5);
4578
	WREG32(DC_HPD6_INT_CONTROL, hpd6);
5078 serge 4579
	if (rdev->family == CHIP_ARUBA)
4580
		WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4581
	else
4582
		WREG32(CG_THERMAL_INT, thermal_int);
2005 serge 4583
 
2997 Serge 4584
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4585
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4586
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4587
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4588
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4589
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4590
 
2005 serge 4591
	return 0;
4592
}
4593
 
2997 Serge 4594
static void evergreen_irq_ack(struct radeon_device *rdev)
2005 serge 4595
{
4596
	u32 tmp;
4597
 
4598
	rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4599
	rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4600
	rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4601
	rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4602
	rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4603
	rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4604
	rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4605
	rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4606
	if (rdev->num_crtc >= 4) {
4607
		rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4608
		rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4609
	}
4610
	if (rdev->num_crtc >= 6) {
4611
		rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4612
		rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4613
	}
4614
 
2997 Serge 4615
	rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4616
	rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4617
	rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4618
	rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4619
	rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4620
	rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4621
 
2005 serge 4622
	if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4623
		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4624
	if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4625
		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4626
	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4627
		WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4628
	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4629
		WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4630
	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4631
		WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4632
	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4633
		WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4634
 
4635
	if (rdev->num_crtc >= 4) {
4636
		if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4637
			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4638
		if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4639
			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4640
		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4641
			WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4642
		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4643
			WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4644
		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4645
			WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4646
		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4647
			WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4648
	}
4649
 
4650
	if (rdev->num_crtc >= 6) {
4651
		if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4652
			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4653
		if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4654
			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4655
		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4656
			WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4657
		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4658
			WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4659
		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4660
			WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4661
		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4662
			WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4663
	}
4664
 
4665
	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4666
		tmp = RREG32(DC_HPD1_INT_CONTROL);
4667
		tmp |= DC_HPDx_INT_ACK;
4668
		WREG32(DC_HPD1_INT_CONTROL, tmp);
4669
	}
4670
	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4671
		tmp = RREG32(DC_HPD2_INT_CONTROL);
4672
		tmp |= DC_HPDx_INT_ACK;
4673
		WREG32(DC_HPD2_INT_CONTROL, tmp);
4674
	}
4675
	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4676
		tmp = RREG32(DC_HPD3_INT_CONTROL);
4677
		tmp |= DC_HPDx_INT_ACK;
4678
		WREG32(DC_HPD3_INT_CONTROL, tmp);
4679
	}
4680
	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4681
		tmp = RREG32(DC_HPD4_INT_CONTROL);
4682
		tmp |= DC_HPDx_INT_ACK;
4683
		WREG32(DC_HPD4_INT_CONTROL, tmp);
4684
	}
4685
	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4686
		tmp = RREG32(DC_HPD5_INT_CONTROL);
4687
		tmp |= DC_HPDx_INT_ACK;
4688
		WREG32(DC_HPD5_INT_CONTROL, tmp);
4689
	}
4690
	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4691
		tmp = RREG32(DC_HPD5_INT_CONTROL);
4692
		tmp |= DC_HPDx_INT_ACK;
4693
		WREG32(DC_HPD6_INT_CONTROL, tmp);
4694
	}
2997 Serge 4695
	if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4696
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4697
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4698
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4699
	}
4700
	if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4701
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4702
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4703
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4704
	}
4705
	if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4706
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4707
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4708
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4709
	}
4710
	if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4711
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4712
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4713
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4714
	}
4715
	if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4716
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4717
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4718
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4719
	}
4720
	if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4721
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4722
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4723
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4724
	}
2005 serge 4725
}
2997 Serge 4726
 
4727
static void evergreen_irq_disable(struct radeon_device *rdev)
2005 serge 4728
{
2997 Serge 4729
	r600_disable_interrupts(rdev);
4730
	/* Wait and acknowledge irq */
4731
	mdelay(1);
4732
	evergreen_irq_ack(rdev);
4733
	evergreen_disable_interrupt_state(rdev);
4734
}
4735
 
4736
void evergreen_irq_suspend(struct radeon_device *rdev)
4737
{
4738
	evergreen_irq_disable(rdev);
4739
	r600_rlc_stop(rdev);
4740
}
4741
 
4742
static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4743
{
2005 serge 4744
	u32 wptr, tmp;
4745
 
4746
	if (rdev->wb.enabled)
4747
		wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4748
	else
4749
		wptr = RREG32(IH_RB_WPTR);
4750
 
4751
	if (wptr & RB_OVERFLOW) {
4752
		/* When a ring buffer overflow happen start parsing interrupt
4753
		 * from the last not overwritten vector (wptr + 16). Hopefully
4754
		 * this should allow us to catchup.
4755
		 */
4756
		dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
4757
			wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
4758
		rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4759
		tmp = RREG32(IH_RB_CNTL);
4760
		tmp |= IH_WPTR_OVERFLOW_CLEAR;
4761
		WREG32(IH_RB_CNTL, tmp);
5078 serge 4762
		wptr &= ~RB_OVERFLOW;
2005 serge 4763
	}
4764
	return (wptr & rdev->ih.ptr_mask);
4765
}
4766
 
4767
int evergreen_irq_process(struct radeon_device *rdev)
4768
{
4769
	u32 wptr;
4770
	u32 rptr;
4771
	u32 src_id, src_data;
4772
	u32 ring_index;
4773
	bool queue_hotplug = false;
2997 Serge 4774
	bool queue_hdmi = false;
5078 serge 4775
	bool queue_thermal = false;
4776
	u32 status, addr;
2005 serge 4777
 
4778
	if (!rdev->ih.enabled || rdev->shutdown)
4779
		return IRQ_NONE;
4780
 
4781
	wptr = evergreen_get_ih_wptr(rdev);
2997 Serge 4782
 
4783
restart_ih:
4784
	/* is somebody else already processing irqs? */
4785
	if (atomic_xchg(&rdev->ih.lock, 1))
4786
		return IRQ_NONE;
4787
 
2005 serge 4788
	rptr = rdev->ih.rptr;
4789
	DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4790
 
2175 serge 4791
	/* Order reading of wptr vs. reading of IH ring data */
4792
	rmb();
4793
 
2005 serge 4794
	/* display interrupts */
4795
	evergreen_irq_ack(rdev);
4796
 
4797
	while (rptr != wptr) {
4798
		/* wptr/rptr are in bytes! */
4799
		ring_index = rptr / 4;
4800
		src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4801
		src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4802
 
4803
		switch (src_id) {
4804
		case 1: /* D1 vblank/vline */
4805
			switch (src_data) {
4806
			case 0: /* D1 vblank */
4807
				if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
4808
					if (rdev->irq.crtc_vblank_int[0]) {
4809
				//		drm_handle_vblank(rdev->ddev, 0);
4810
						rdev->pm.vblank_sync = true;
4811
				//		wake_up(&rdev->irq.vblank_queue);
4812
					}
4813
				//	if (rdev->irq.pflip[0])
4814
				//		radeon_crtc_handle_flip(rdev, 0);
4815
					rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4816
					DRM_DEBUG("IH: D1 vblank\n");
4817
				}
4818
				break;
4819
			case 1: /* D1 vline */
4820
				if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
4821
					rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4822
					DRM_DEBUG("IH: D1 vline\n");
4823
				}
4824
				break;
4825
			default:
4826
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4827
				break;
4828
			}
4829
			break;
4830
		case 2: /* D2 vblank/vline */
4831
			switch (src_data) {
4832
			case 0: /* D2 vblank */
4833
				if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
4834
					if (rdev->irq.crtc_vblank_int[1]) {
4835
				//		drm_handle_vblank(rdev->ddev, 1);
4836
						rdev->pm.vblank_sync = true;
4837
				//		wake_up(&rdev->irq.vblank_queue);
4838
					}
4839
			//		if (rdev->irq.pflip[1])
4840
			//			radeon_crtc_handle_flip(rdev, 1);
4841
					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
4842
					DRM_DEBUG("IH: D2 vblank\n");
4843
				}
4844
				break;
4845
			case 1: /* D2 vline */
4846
				if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
4847
					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
4848
					DRM_DEBUG("IH: D2 vline\n");
4849
				}
4850
				break;
4851
			default:
4852
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4853
				break;
4854
			}
4855
			break;
4856
		case 3: /* D3 vblank/vline */
4857
			switch (src_data) {
4858
			case 0: /* D3 vblank */
4859
				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
4860
					if (rdev->irq.crtc_vblank_int[2]) {
4861
				//		drm_handle_vblank(rdev->ddev, 2);
4862
						rdev->pm.vblank_sync = true;
4863
				//		wake_up(&rdev->irq.vblank_queue);
4864
					}
4865
				//	if (rdev->irq.pflip[2])
4866
				//		radeon_crtc_handle_flip(rdev, 2);
4867
					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
4868
					DRM_DEBUG("IH: D3 vblank\n");
4869
				}
4870
				break;
4871
			case 1: /* D3 vline */
4872
				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
4873
					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
4874
					DRM_DEBUG("IH: D3 vline\n");
4875
				}
4876
				break;
4877
			default:
4878
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4879
				break;
4880
			}
4881
			break;
4882
		case 4: /* D4 vblank/vline */
4883
			switch (src_data) {
4884
			case 0: /* D4 vblank */
4885
				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
4886
					if (rdev->irq.crtc_vblank_int[3]) {
4887
					//	drm_handle_vblank(rdev->ddev, 3);
4888
						rdev->pm.vblank_sync = true;
4889
					//	wake_up(&rdev->irq.vblank_queue);
4890
					}
4891
		//			if (rdev->irq.pflip[3])
4892
		//				radeon_crtc_handle_flip(rdev, 3);
4893
					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
4894
					DRM_DEBUG("IH: D4 vblank\n");
4895
				}
4896
				break;
4897
			case 1: /* D4 vline */
4898
				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
4899
					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
4900
					DRM_DEBUG("IH: D4 vline\n");
4901
				}
4902
				break;
4903
			default:
4904
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4905
				break;
4906
			}
4907
			break;
4908
		case 5: /* D5 vblank/vline */
4909
			switch (src_data) {
4910
			case 0: /* D5 vblank */
4911
				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
4912
					if (rdev->irq.crtc_vblank_int[4]) {
4913
//						drm_handle_vblank(rdev->ddev, 4);
4914
						rdev->pm.vblank_sync = true;
4915
//						wake_up(&rdev->irq.vblank_queue);
4916
					}
4917
//					if (rdev->irq.pflip[4])
4918
//						radeon_crtc_handle_flip(rdev, 4);
4919
					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
4920
					DRM_DEBUG("IH: D5 vblank\n");
4921
				}
4922
				break;
4923
			case 1: /* D5 vline */
4924
				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
4925
					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
4926
					DRM_DEBUG("IH: D5 vline\n");
4927
				}
4928
				break;
4929
			default:
4930
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4931
				break;
4932
			}
4933
			break;
4934
		case 6: /* D6 vblank/vline */
4935
			switch (src_data) {
4936
			case 0: /* D6 vblank */
4937
				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
4938
					if (rdev->irq.crtc_vblank_int[5]) {
4939
				//		drm_handle_vblank(rdev->ddev, 5);
4940
						rdev->pm.vblank_sync = true;
4941
				//		wake_up(&rdev->irq.vblank_queue);
4942
					}
4943
			//		if (rdev->irq.pflip[5])
4944
			//			radeon_crtc_handle_flip(rdev, 5);
4945
					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
4946
					DRM_DEBUG("IH: D6 vblank\n");
4947
				}
4948
				break;
4949
			case 1: /* D6 vline */
4950
				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
4951
					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
4952
					DRM_DEBUG("IH: D6 vline\n");
4953
				}
4954
				break;
4955
			default:
4956
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4957
				break;
4958
			}
4959
			break;
5078 serge 4960
		case 8: /* D1 page flip */
4961
		case 10: /* D2 page flip */
4962
		case 12: /* D3 page flip */
4963
		case 14: /* D4 page flip */
4964
		case 16: /* D5 page flip */
4965
		case 18: /* D6 page flip */
4966
			DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
4967
			break;
2005 serge 4968
		case 42: /* HPD hotplug */
4969
			switch (src_data) {
4970
			case 0:
4971
				if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4972
					rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
4973
					queue_hotplug = true;
4974
					DRM_DEBUG("IH: HPD1\n");
4975
				}
4976
				break;
4977
			case 1:
4978
				if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4979
					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
4980
					queue_hotplug = true;
4981
					DRM_DEBUG("IH: HPD2\n");
4982
				}
4983
				break;
4984
			case 2:
4985
				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4986
					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
4987
					queue_hotplug = true;
4988
					DRM_DEBUG("IH: HPD3\n");
4989
				}
4990
				break;
4991
			case 3:
4992
				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4993
					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
4994
					queue_hotplug = true;
4995
					DRM_DEBUG("IH: HPD4\n");
4996
				}
4997
				break;
4998
			case 4:
4999
				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
5000
					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
5001
					queue_hotplug = true;
5002
					DRM_DEBUG("IH: HPD5\n");
5003
				}
5004
				break;
5005
			case 5:
5006
				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
5007
					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
5008
					queue_hotplug = true;
5009
					DRM_DEBUG("IH: HPD6\n");
5010
				}
5011
				break;
5012
			default:
5013
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5014
				break;
5015
			}
5016
			break;
2997 Serge 5017
		case 44: /* hdmi */
5018
			switch (src_data) {
5019
			case 0:
5020
				if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
5021
					rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
5022
					queue_hdmi = true;
5023
					DRM_DEBUG("IH: HDMI0\n");
5024
				}
5025
				break;
5026
			case 1:
5027
				if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
5028
					rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
5029
					queue_hdmi = true;
5030
					DRM_DEBUG("IH: HDMI1\n");
5031
				}
5032
				break;
5033
			case 2:
5034
				if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
5035
					rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
5036
					queue_hdmi = true;
5037
					DRM_DEBUG("IH: HDMI2\n");
5038
				}
5039
				break;
5040
			case 3:
5041
				if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
5042
					rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
5043
					queue_hdmi = true;
5044
					DRM_DEBUG("IH: HDMI3\n");
5045
				}
5046
				break;
5047
			case 4:
5048
				if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
5049
					rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
5050
					queue_hdmi = true;
5051
					DRM_DEBUG("IH: HDMI4\n");
5052
				}
5053
				break;
5054
			case 5:
5055
				if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
5056
					rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5057
					queue_hdmi = true;
5058
					DRM_DEBUG("IH: HDMI5\n");
5059
				}
5060
				break;
5061
			default:
5062
				DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5063
				break;
5064
			}
3764 Serge 5065
		case 124: /* UVD */
5066
			DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5067
			radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
2997 Serge 5068
			break;
3192 Serge 5069
		case 146:
5070
		case 147:
5078 serge 5071
			addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5072
			status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5073
			/* reset addr and status */
5074
			WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5075
			if (addr == 0x0 && status == 0x0)
5076
				break;
3192 Serge 5077
			dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5078
			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
5078 serge 5079
				addr);
3192 Serge 5080
			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5078 serge 5081
				status);
5082
			cayman_vm_decode_fault(rdev, status, addr);
3192 Serge 5083
			break;
2005 serge 5084
		case 176: /* CP_INT in ring buffer */
5085
		case 177: /* CP_INT in IB1 */
5086
		case 178: /* CP_INT in IB2 */
5087
			DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
2997 Serge 5088
			radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
2005 serge 5089
			break;
5090
		case 181: /* CP EOP event */
5091
			DRM_DEBUG("IH: CP EOP\n");
2997 Serge 5092
			if (rdev->family >= CHIP_CAYMAN) {
5093
				switch (src_data) {
5094
				case 0:
5095
					radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5096
					break;
5097
				case 1:
5098
					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5099
					break;
5100
				case 2:
5101
					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5102
					break;
5103
				}
5104
			} else
5105
				radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
2005 serge 5106
			break;
3192 Serge 5107
		case 224: /* DMA trap event */
5108
			DRM_DEBUG("IH: DMA trap\n");
5109
			radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5110
			break;
5078 serge 5111
		case 230: /* thermal low to high */
5112
			DRM_DEBUG("IH: thermal low to high\n");
5113
			rdev->pm.dpm.thermal.high_to_low = false;
5114
			queue_thermal = true;
5115
			break;
5116
		case 231: /* thermal high to low */
5117
			DRM_DEBUG("IH: thermal high to low\n");
5118
			rdev->pm.dpm.thermal.high_to_low = true;
5119
			queue_thermal = true;
5120
			break;
2005 serge 5121
		case 233: /* GUI IDLE */
5122
			DRM_DEBUG("IH: GUI idle\n");
5123
			break;
3192 Serge 5124
		case 244: /* DMA trap event */
5125
			if (rdev->family >= CHIP_CAYMAN) {
5126
				DRM_DEBUG("IH: DMA1 trap\n");
5127
				radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5128
			}
5129
			break;
2005 serge 5130
		default:
5131
			DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5132
			break;
5133
		}
5134
 
5135
		/* wptr/rptr are in bytes! */
5136
		rptr += 16;
5137
		rptr &= rdev->ih.ptr_mask;
5138
	}
2997 Serge 5139
	rdev->ih.rptr = rptr;
5140
	WREG32(IH_RB_RPTR, rdev->ih.rptr);
5141
	atomic_set(&rdev->ih.lock, 0);
5142
 
2005 serge 5143
	/* make sure wptr hasn't changed while processing */
5144
	wptr = evergreen_get_ih_wptr(rdev);
2997 Serge 5145
	if (wptr != rptr)
2005 serge 5146
		goto restart_ih;
2997 Serge 5147
 
2005 serge 5148
	return IRQ_HANDLED;
5149
}
5150
 
1430 serge 5151
static int evergreen_startup(struct radeon_device *rdev)
5152
{
3764 Serge 5153
	struct radeon_ring *ring;
1430 serge 5154
	int r;
5155
 
1990 serge 5156
	/* enable pcie gen2 link */
3031 serge 5157
	evergreen_pcie_gen2_enable(rdev);
5078 serge 5158
	/* enable aspm */
5159
	evergreen_program_aspm(rdev);
1990 serge 5160
 
5078 serge 5161
	/* scratch needs to be initialized before MC */
5162
	r = r600_vram_scratch_init(rdev);
5163
	if (r)
5164
		return r;
5165
 
5166
	evergreen_mc_program(rdev);
5167
 
5168
	if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
1990 serge 5169
		r = ni_mc_load_microcode(rdev);
5170
		if (r) {
5171
			DRM_ERROR("Failed to load MC firmware!\n");
5172
			return r;
5173
		}
1430 serge 5174
	}
1963 serge 5175
 
1430 serge 5176
	if (rdev->flags & RADEON_IS_AGP) {
1963 serge 5177
		evergreen_agp_enable(rdev);
1430 serge 5178
	} else {
5179
		r = evergreen_pcie_gart_enable(rdev);
5180
		if (r)
5181
			return r;
5182
	}
5183
	evergreen_gpu_init(rdev);
2005 serge 5184
 
5078 serge 5185
	/* allocate rlc buffers */
5186
	if (rdev->flags & RADEON_IS_IGP) {
5187
		rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5188
		rdev->rlc.reg_list_size =
5189
			(u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5190
		rdev->rlc.cs_data = evergreen_cs_data;
5191
		r = sumo_rlc_init(rdev);
1963 serge 5192
	if (r) {
5078 serge 5193
			DRM_ERROR("Failed to init rlc BOs!\n");
5194
			return r;
5195
		}
1963 serge 5196
	}
1430 serge 5197
 
1963 serge 5198
	/* allocate wb buffer */
5199
	r = radeon_wb_init(rdev);
5200
	if (r)
5201
		return r;
5202
 
3192 Serge 5203
	r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5204
	if (r) {
5205
		dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5206
		return r;
5207
	}
5208
 
5209
	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5210
	if (r) {
5211
		dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5212
		return r;
5213
	}
5214
 
3764 Serge 5215
//   r = rv770_uvd_resume(rdev);
5216
//   if (!r) {
5217
//       r = radeon_fence_driver_start_ring(rdev,
5218
//                          R600_RING_TYPE_UVD_INDEX);
5219
//       if (r)
5220
//           dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
5221
//   }
5222
 
5078 serge 5223
	if (r)
5224
		rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
3764 Serge 5225
 
1963 serge 5226
	/* Enable IRQ */
3764 Serge 5227
	if (!rdev->irq.installed) {
5228
		r = radeon_irq_kms_init(rdev);
5229
		if (r)
5230
			return r;
5231
	}
5232
 
2005 serge 5233
	r = r600_irq_init(rdev);
5234
	if (r) {
5235
		DRM_ERROR("radeon: IH init failed (%d).\n", r);
5236
//		radeon_irq_kms_fini(rdev);
5237
		return r;
5238
	}
5239
	evergreen_irq_set(rdev);
1963 serge 5240
 
3764 Serge 5241
	ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2997 Serge 5242
	r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5078 serge 5243
			     RADEON_CP_PACKET2);
1430 serge 5244
	if (r)
5245
		return r;
3192 Serge 5246
 
5247
	ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5248
	r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5078 serge 5249
			     DMA_PACKET(DMA_PACKET_NOP, 0, 0));
3192 Serge 5250
	if (r)
5251
		return r;
5252
 
1430 serge 5253
	r = evergreen_cp_load_microcode(rdev);
5254
	if (r)
5255
		return r;
1963 serge 5256
	r = evergreen_cp_resume(rdev);
1430 serge 5257
	if (r)
5258
		return r;
3192 Serge 5259
	r = r600_dma_resume(rdev);
5260
	if (r)
5261
		return r;
1963 serge 5262
 
3764 Serge 5263
 
3192 Serge 5264
	r = radeon_ib_pool_init(rdev);
5265
	if (r) {
5266
		dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5267
		return r;
5268
	}
5269
 
1430 serge 5270
	return 0;
5271
}
5272
 
5273
 
5274
 
5275
 
5276
/* Plan is to move initialization in that function and use
5277
 * helper function so that radeon_device_init pretty much
5278
 * do nothing more than calling asic specific function. This
5279
 * should also allow to remove a bunch of callback function
5280
 * like vram_info.
5281
 */
5282
int evergreen_init(struct radeon_device *rdev)
5283
{
5284
	int r;
5285
 
5286
	/* Read BIOS */
5287
	if (!radeon_get_bios(rdev)) {
5288
		if (ASIC_IS_AVIVO(rdev))
5289
			return -EINVAL;
5290
	}
5291
	/* Must be an ATOMBIOS */
5292
	if (!rdev->is_atom_bios) {
1986 serge 5293
		dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
1430 serge 5294
		return -EINVAL;
5295
	}
5296
	r = radeon_atombios_init(rdev);
5297
	if (r)
5298
		return r;
1986 serge 5299
	/* reset the asic, the gfx blocks are often in a bad state
5300
	 * after the driver is unloaded or after a resume
5301
	 */
5302
	if (radeon_asic_reset(rdev))
5303
		dev_warn(rdev->dev, "GPU reset failed !\n");
1430 serge 5304
	/* Post card if necessary */
1986 serge 5305
	if (!radeon_card_posted(rdev)) {
1430 serge 5306
		if (!rdev->bios) {
5307
			dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5308
			return -EINVAL;
5309
		}
5310
		DRM_INFO("GPU not posted. posting now...\n");
5311
		atom_asic_init(rdev->mode_info.atom_context);
5312
	}
3764 Serge 5313
	/* init golden registers */
5314
	evergreen_init_golden_registers(rdev);
1430 serge 5315
	/* Initialize scratch registers */
5316
	r600_scratch_init(rdev);
5317
	/* Initialize surface registers */
5318
	radeon_surface_init(rdev);
5319
	/* Initialize clocks */
5320
	radeon_get_clock_info(rdev->ddev);
5321
	/* Fence driver */
2005 serge 5322
	r = radeon_fence_driver_init(rdev);
5323
	if (r)
5324
		return r;
3031 serge 5325
	/* initialize AGP */
1430 serge 5326
	if (rdev->flags & RADEON_IS_AGP) {
5327
		r = radeon_agp_init(rdev);
5328
		if (r)
5329
			radeon_agp_disable(rdev);
5330
	}
5331
	/* initialize memory controller */
5332
	r = evergreen_mc_init(rdev);
5333
	if (r)
5334
		return r;
5335
	/* Memory manager */
5336
	r = radeon_bo_init(rdev);
5337
	if (r)
5338
		return r;
1963 serge 5339
 
5078 serge 5340
	if (ASIC_IS_DCE5(rdev)) {
5341
		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5342
			r = ni_init_microcode(rdev);
5343
			if (r) {
5344
				DRM_ERROR("Failed to load firmware!\n");
5345
				return r;
5346
			}
5347
		}
5348
	} else {
5349
		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5350
			r = r600_init_microcode(rdev);
5351
			if (r) {
5352
				DRM_ERROR("Failed to load firmware!\n");
5353
				return r;
5354
			}
5355
		}
5356
	}
5357
 
5358
	/* Initialize power management */
5359
	radeon_pm_init(rdev);
5360
 
2997 Serge 5361
	rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5362
	r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
1430 serge 5363
 
3192 Serge 5364
	rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5365
	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5366
 
3764 Serge 5367
//   r = radeon_uvd_init(rdev);
5368
//   if (!r) {
5369
//       rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5370
//       r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
5371
//                  4096);
5372
//   }
5373
 
2005 serge 5374
	rdev->ih.ring_obj = NULL;
5375
	r600_ih_ring_init(rdev, 64 * 1024);
1430 serge 5376
 
5377
	r = r600_pcie_gart_init(rdev);
5378
	if (r)
5379
		return r;
1963 serge 5380
 
5381
	rdev->accel_working = true;
1430 serge 5382
	r = evergreen_startup(rdev);
5383
	if (r) {
1963 serge 5384
		dev_err(rdev->dev, "disabling GPU acceleration\n");
1430 serge 5385
		rdev->accel_working = false;
5386
	}
2997 Serge 5387
 
5388
	/* Don't start up if the MC ucode is missing on BTC parts.
5389
	 * The default clocks and voltages before the MC ucode
5390
	 * is loaded are not suffient for advanced operations.
5391
	 */
5392
	if (ASIC_IS_DCE5(rdev)) {
5393
		if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5394
			DRM_ERROR("radeon: MC ucode required for NI+.\n");
5395
			return -EINVAL;
2005 serge 5396
		}
1430 serge 5397
	}
2997 Serge 5398
 
1430 serge 5399
	return 0;
5400
}
5401
 
1986 serge 5402
 
2997 Serge 5403
void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
1430 serge 5404
{
3764 Serge 5405
	u32 link_width_cntl, speed_cntl;
1986 serge 5406
 
5407
	if (radeon_pcie_gen2 == 0)
5408
		return;
5409
 
5410
	if (rdev->flags & RADEON_IS_IGP)
5411
		return;
5412
 
5413
	if (!(rdev->flags & RADEON_IS_PCIE))
5414
		return;
5415
 
5416
	/* x2 cards have a special sequence */
5417
	if (ASIC_IS_X2(rdev))
5418
		return;
5419
 
3764 Serge 5420
	if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5421
		(rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
2997 Serge 5422
		return;
5423
 
3764 Serge 5424
	speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
2997 Serge 5425
	if (speed_cntl & LC_CURRENT_DATA_RATE) {
5426
		DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5427
		return;
5428
	}
5429
 
5430
	DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5431
 
1986 serge 5432
	if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5433
	    (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5434
 
3764 Serge 5435
		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
1986 serge 5436
		link_width_cntl &= ~LC_UPCONFIGURE_DIS;
3764 Serge 5437
		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
1986 serge 5438
 
3764 Serge 5439
		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
1986 serge 5440
		speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
3764 Serge 5441
		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
1986 serge 5442
 
3764 Serge 5443
		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
1986 serge 5444
		speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
3764 Serge 5445
		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
1986 serge 5446
 
3764 Serge 5447
		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
1986 serge 5448
		speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
3764 Serge 5449
		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
1986 serge 5450
 
3764 Serge 5451
		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
1986 serge 5452
		speed_cntl |= LC_GEN2_EN_STRAP;
3764 Serge 5453
		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
1986 serge 5454
 
5455
	} else {
3764 Serge 5456
		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
1986 serge 5457
		/* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5458
		if (1)
5459
			link_width_cntl |= LC_UPCONFIGURE_DIS;
5460
		else
5461
			link_width_cntl &= ~LC_UPCONFIGURE_DIS;
3764 Serge 5462
		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
1986 serge 5463
	}
1430 serge 5464
}
5078 serge 5465
 
5466
void evergreen_program_aspm(struct radeon_device *rdev)
5467
{
5468
	u32 data, orig;
5469
	u32 pcie_lc_cntl, pcie_lc_cntl_old;
5470
	bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5471
	/* fusion_platform = true
5472
	 * if the system is a fusion system
5473
	 * (APU or DGPU in a fusion system).
5474
	 * todo: check if the system is a fusion platform.
5475
	 */
5476
	bool fusion_platform = false;
5477
 
5478
	if (radeon_aspm == 0)
5479
		return;
5480
 
5481
	if (!(rdev->flags & RADEON_IS_PCIE))
5482
		return;
5483
 
5484
	switch (rdev->family) {
5485
	case CHIP_CYPRESS:
5486
	case CHIP_HEMLOCK:
5487
	case CHIP_JUNIPER:
5488
	case CHIP_REDWOOD:
5489
	case CHIP_CEDAR:
5490
	case CHIP_SUMO:
5491
	case CHIP_SUMO2:
5492
	case CHIP_PALM:
5493
	case CHIP_ARUBA:
5494
		disable_l0s = true;
5495
		break;
5496
	default:
5497
		disable_l0s = false;
5498
		break;
5499
	}
5500
 
5501
	if (rdev->flags & RADEON_IS_IGP)
5502
		fusion_platform = true; /* XXX also dGPUs in a fusion system */
5503
 
5504
	data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5505
	if (fusion_platform)
5506
		data &= ~MULTI_PIF;
5507
	else
5508
		data |= MULTI_PIF;
5509
	if (data != orig)
5510
		WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
5511
 
5512
	data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
5513
	if (fusion_platform)
5514
		data &= ~MULTI_PIF;
5515
	else
5516
		data |= MULTI_PIF;
5517
	if (data != orig)
5518
		WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
5519
 
5520
	pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
5521
	pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
5522
	if (!disable_l0s) {
5523
		if (rdev->family >= CHIP_BARTS)
5524
			pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
5525
		else
5526
			pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
5527
	}
5528
 
5529
	if (!disable_l1) {
5530
		if (rdev->family >= CHIP_BARTS)
5531
			pcie_lc_cntl |= LC_L1_INACTIVITY(7);
5532
		else
5533
			pcie_lc_cntl |= LC_L1_INACTIVITY(8);
5534
 
5535
		if (!disable_plloff_in_l1) {
5536
			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5537
			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5538
			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5539
			if (data != orig)
5540
				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5541
 
5542
			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5543
			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5544
			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5545
			if (data != orig)
5546
				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5547
 
5548
			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5549
			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5550
			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5551
			if (data != orig)
5552
				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5553
 
5554
			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5555
			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5556
			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5557
			if (data != orig)
5558
				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5559
 
5560
			if (rdev->family >= CHIP_BARTS) {
5561
				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5562
				data &= ~PLL_RAMP_UP_TIME_0_MASK;
5563
				data |= PLL_RAMP_UP_TIME_0(4);
5564
				if (data != orig)
5565
					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5566
 
5567
				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5568
				data &= ~PLL_RAMP_UP_TIME_1_MASK;
5569
				data |= PLL_RAMP_UP_TIME_1(4);
5570
				if (data != orig)
5571
					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5572
 
5573
				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5574
				data &= ~PLL_RAMP_UP_TIME_0_MASK;
5575
				data |= PLL_RAMP_UP_TIME_0(4);
5576
				if (data != orig)
5577
					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5578
 
5579
				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5580
				data &= ~PLL_RAMP_UP_TIME_1_MASK;
5581
				data |= PLL_RAMP_UP_TIME_1(4);
5582
				if (data != orig)
5583
					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5584
			}
5585
 
5586
			data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5587
			data &= ~LC_DYN_LANES_PWR_STATE_MASK;
5588
			data |= LC_DYN_LANES_PWR_STATE(3);
5589
			if (data != orig)
5590
				WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
5591
 
5592
			if (rdev->family >= CHIP_BARTS) {
5593
				data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
5594
				data &= ~LS2_EXIT_TIME_MASK;
5595
				data |= LS2_EXIT_TIME(1);
5596
				if (data != orig)
5597
					WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
5598
 
5599
				data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
5600
				data &= ~LS2_EXIT_TIME_MASK;
5601
				data |= LS2_EXIT_TIME(1);
5602
				if (data != orig)
5603
					WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
5604
			}
5605
		}
5606
	}
5607
 
5608
	/* evergreen parts only */
5609
	if (rdev->family < CHIP_BARTS)
5610
		pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
5611
 
5612
	if (pcie_lc_cntl != pcie_lc_cntl_old)
5613
		WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
5614
}