Subversion Repositories Kolibri OS

Rev

Rev 3192 | Rev 5078 | Go to most recent revision | Details | Compare with Previous | Last modification | View Log | RSS feed

Rev Author Line No. Line
1430 serge 1
/*
2
 * Copyright 2010 Advanced Micro Devices, Inc.
3
 *
4
 * Permission is hereby granted, free of charge, to any person obtaining a
5
 * copy of this software and associated documentation files (the "Software"),
6
 * to deal in the Software without restriction, including without limitation
7
 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8
 * and/or sell copies of the Software, and to permit persons to whom the
9
 * Software is furnished to do so, subject to the following conditions:
10
 *
11
 * The above copyright notice and this permission notice shall be included in
12
 * all copies or substantial portions of the Software.
13
 *
14
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17
 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18
 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19
 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20
 * OTHER DEALINGS IN THE SOFTWARE.
21
 *
22
 * Authors: Alex Deucher
23
 */
24
#include 
25
//#include 
1963 serge 26
#include 
2997 Serge 27
#include 
1430 serge 28
#include "radeon.h"
1963 serge 29
#include "radeon_asic.h"
2997 Serge 30
#include 
1963 serge 31
#include "evergreend.h"
1430 serge 32
#include "atom.h"
33
#include "avivod.h"
34
#include "evergreen_reg.h"
1986 serge 35
#include "evergreen_blit_shaders.h"
1430 serge 36
 
1963 serge 37
#define EVERGREEN_PFP_UCODE_SIZE 1120
38
#define EVERGREEN_PM4_UCODE_SIZE 1376
39
 
2997 Serge 40
static const u32 crtc_offsets[6] =
41
{
42
	EVERGREEN_CRTC0_REGISTER_OFFSET,
43
	EVERGREEN_CRTC1_REGISTER_OFFSET,
44
	EVERGREEN_CRTC2_REGISTER_OFFSET,
45
	EVERGREEN_CRTC3_REGISTER_OFFSET,
46
	EVERGREEN_CRTC4_REGISTER_OFFSET,
47
	EVERGREEN_CRTC5_REGISTER_OFFSET
48
};
49
 
1430 serge 50
static void evergreen_gpu_init(struct radeon_device *rdev);
51
void evergreen_fini(struct radeon_device *rdev);
2997 Serge 52
void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
53
extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
54
				     int ring, u32 cp_int_cntl);
1430 serge 55
 
3764 Serge 56
static const u32 evergreen_golden_registers[] =
57
{
58
	0x3f90, 0xffff0000, 0xff000000,
59
	0x9148, 0xffff0000, 0xff000000,
60
	0x3f94, 0xffff0000, 0xff000000,
61
	0x914c, 0xffff0000, 0xff000000,
62
	0x9b7c, 0xffffffff, 0x00000000,
63
	0x8a14, 0xffffffff, 0x00000007,
64
	0x8b10, 0xffffffff, 0x00000000,
65
	0x960c, 0xffffffff, 0x54763210,
66
	0x88c4, 0xffffffff, 0x000000c2,
67
	0x88d4, 0xffffffff, 0x00000010,
68
	0x8974, 0xffffffff, 0x00000000,
69
	0xc78, 0x00000080, 0x00000080,
70
	0x5eb4, 0xffffffff, 0x00000002,
71
	0x5e78, 0xffffffff, 0x001000f0,
72
	0x6104, 0x01000300, 0x00000000,
73
	0x5bc0, 0x00300000, 0x00000000,
74
	0x7030, 0xffffffff, 0x00000011,
75
	0x7c30, 0xffffffff, 0x00000011,
76
	0x10830, 0xffffffff, 0x00000011,
77
	0x11430, 0xffffffff, 0x00000011,
78
	0x12030, 0xffffffff, 0x00000011,
79
	0x12c30, 0xffffffff, 0x00000011,
80
	0xd02c, 0xffffffff, 0x08421000,
81
	0x240c, 0xffffffff, 0x00000380,
82
	0x8b24, 0xffffffff, 0x00ff0fff,
83
	0x28a4c, 0x06000000, 0x06000000,
84
	0x10c, 0x00000001, 0x00000001,
85
	0x8d00, 0xffffffff, 0x100e4848,
86
	0x8d04, 0xffffffff, 0x00164745,
87
	0x8c00, 0xffffffff, 0xe4000003,
88
	0x8c04, 0xffffffff, 0x40600060,
89
	0x8c08, 0xffffffff, 0x001c001c,
90
	0x8cf0, 0xffffffff, 0x08e00620,
91
	0x8c20, 0xffffffff, 0x00800080,
92
	0x8c24, 0xffffffff, 0x00800080,
93
	0x8c18, 0xffffffff, 0x20202078,
94
	0x8c1c, 0xffffffff, 0x00001010,
95
	0x28350, 0xffffffff, 0x00000000,
96
	0xa008, 0xffffffff, 0x00010000,
97
	0x5cc, 0xffffffff, 0x00000001,
98
	0x9508, 0xffffffff, 0x00000002,
99
	0x913c, 0x0000000f, 0x0000000a
100
};
101
 
102
static const u32 evergreen_golden_registers2[] =
103
{
104
	0x2f4c, 0xffffffff, 0x00000000,
105
	0x54f4, 0xffffffff, 0x00000000,
106
	0x54f0, 0xffffffff, 0x00000000,
107
	0x5498, 0xffffffff, 0x00000000,
108
	0x549c, 0xffffffff, 0x00000000,
109
	0x5494, 0xffffffff, 0x00000000,
110
	0x53cc, 0xffffffff, 0x00000000,
111
	0x53c8, 0xffffffff, 0x00000000,
112
	0x53c4, 0xffffffff, 0x00000000,
113
	0x53c0, 0xffffffff, 0x00000000,
114
	0x53bc, 0xffffffff, 0x00000000,
115
	0x53b8, 0xffffffff, 0x00000000,
116
	0x53b4, 0xffffffff, 0x00000000,
117
	0x53b0, 0xffffffff, 0x00000000
118
};
119
 
120
static const u32 cypress_mgcg_init[] =
121
{
122
	0x802c, 0xffffffff, 0xc0000000,
123
	0x5448, 0xffffffff, 0x00000100,
124
	0x55e4, 0xffffffff, 0x00000100,
125
	0x160c, 0xffffffff, 0x00000100,
126
	0x5644, 0xffffffff, 0x00000100,
127
	0xc164, 0xffffffff, 0x00000100,
128
	0x8a18, 0xffffffff, 0x00000100,
129
	0x897c, 0xffffffff, 0x06000100,
130
	0x8b28, 0xffffffff, 0x00000100,
131
	0x9144, 0xffffffff, 0x00000100,
132
	0x9a60, 0xffffffff, 0x00000100,
133
	0x9868, 0xffffffff, 0x00000100,
134
	0x8d58, 0xffffffff, 0x00000100,
135
	0x9510, 0xffffffff, 0x00000100,
136
	0x949c, 0xffffffff, 0x00000100,
137
	0x9654, 0xffffffff, 0x00000100,
138
	0x9030, 0xffffffff, 0x00000100,
139
	0x9034, 0xffffffff, 0x00000100,
140
	0x9038, 0xffffffff, 0x00000100,
141
	0x903c, 0xffffffff, 0x00000100,
142
	0x9040, 0xffffffff, 0x00000100,
143
	0xa200, 0xffffffff, 0x00000100,
144
	0xa204, 0xffffffff, 0x00000100,
145
	0xa208, 0xffffffff, 0x00000100,
146
	0xa20c, 0xffffffff, 0x00000100,
147
	0x971c, 0xffffffff, 0x00000100,
148
	0x977c, 0xffffffff, 0x00000100,
149
	0x3f80, 0xffffffff, 0x00000100,
150
	0xa210, 0xffffffff, 0x00000100,
151
	0xa214, 0xffffffff, 0x00000100,
152
	0x4d8, 0xffffffff, 0x00000100,
153
	0x9784, 0xffffffff, 0x00000100,
154
	0x9698, 0xffffffff, 0x00000100,
155
	0x4d4, 0xffffffff, 0x00000200,
156
	0x30cc, 0xffffffff, 0x00000100,
157
	0xd0c0, 0xffffffff, 0xff000100,
158
	0x802c, 0xffffffff, 0x40000000,
159
	0x915c, 0xffffffff, 0x00010000,
160
	0x9160, 0xffffffff, 0x00030002,
161
	0x9178, 0xffffffff, 0x00070000,
162
	0x917c, 0xffffffff, 0x00030002,
163
	0x9180, 0xffffffff, 0x00050004,
164
	0x918c, 0xffffffff, 0x00010006,
165
	0x9190, 0xffffffff, 0x00090008,
166
	0x9194, 0xffffffff, 0x00070000,
167
	0x9198, 0xffffffff, 0x00030002,
168
	0x919c, 0xffffffff, 0x00050004,
169
	0x91a8, 0xffffffff, 0x00010006,
170
	0x91ac, 0xffffffff, 0x00090008,
171
	0x91b0, 0xffffffff, 0x00070000,
172
	0x91b4, 0xffffffff, 0x00030002,
173
	0x91b8, 0xffffffff, 0x00050004,
174
	0x91c4, 0xffffffff, 0x00010006,
175
	0x91c8, 0xffffffff, 0x00090008,
176
	0x91cc, 0xffffffff, 0x00070000,
177
	0x91d0, 0xffffffff, 0x00030002,
178
	0x91d4, 0xffffffff, 0x00050004,
179
	0x91e0, 0xffffffff, 0x00010006,
180
	0x91e4, 0xffffffff, 0x00090008,
181
	0x91e8, 0xffffffff, 0x00000000,
182
	0x91ec, 0xffffffff, 0x00070000,
183
	0x91f0, 0xffffffff, 0x00030002,
184
	0x91f4, 0xffffffff, 0x00050004,
185
	0x9200, 0xffffffff, 0x00010006,
186
	0x9204, 0xffffffff, 0x00090008,
187
	0x9208, 0xffffffff, 0x00070000,
188
	0x920c, 0xffffffff, 0x00030002,
189
	0x9210, 0xffffffff, 0x00050004,
190
	0x921c, 0xffffffff, 0x00010006,
191
	0x9220, 0xffffffff, 0x00090008,
192
	0x9224, 0xffffffff, 0x00070000,
193
	0x9228, 0xffffffff, 0x00030002,
194
	0x922c, 0xffffffff, 0x00050004,
195
	0x9238, 0xffffffff, 0x00010006,
196
	0x923c, 0xffffffff, 0x00090008,
197
	0x9240, 0xffffffff, 0x00070000,
198
	0x9244, 0xffffffff, 0x00030002,
199
	0x9248, 0xffffffff, 0x00050004,
200
	0x9254, 0xffffffff, 0x00010006,
201
	0x9258, 0xffffffff, 0x00090008,
202
	0x925c, 0xffffffff, 0x00070000,
203
	0x9260, 0xffffffff, 0x00030002,
204
	0x9264, 0xffffffff, 0x00050004,
205
	0x9270, 0xffffffff, 0x00010006,
206
	0x9274, 0xffffffff, 0x00090008,
207
	0x9278, 0xffffffff, 0x00070000,
208
	0x927c, 0xffffffff, 0x00030002,
209
	0x9280, 0xffffffff, 0x00050004,
210
	0x928c, 0xffffffff, 0x00010006,
211
	0x9290, 0xffffffff, 0x00090008,
212
	0x9294, 0xffffffff, 0x00000000,
213
	0x929c, 0xffffffff, 0x00000001,
214
	0x802c, 0xffffffff, 0x40010000,
215
	0x915c, 0xffffffff, 0x00010000,
216
	0x9160, 0xffffffff, 0x00030002,
217
	0x9178, 0xffffffff, 0x00070000,
218
	0x917c, 0xffffffff, 0x00030002,
219
	0x9180, 0xffffffff, 0x00050004,
220
	0x918c, 0xffffffff, 0x00010006,
221
	0x9190, 0xffffffff, 0x00090008,
222
	0x9194, 0xffffffff, 0x00070000,
223
	0x9198, 0xffffffff, 0x00030002,
224
	0x919c, 0xffffffff, 0x00050004,
225
	0x91a8, 0xffffffff, 0x00010006,
226
	0x91ac, 0xffffffff, 0x00090008,
227
	0x91b0, 0xffffffff, 0x00070000,
228
	0x91b4, 0xffffffff, 0x00030002,
229
	0x91b8, 0xffffffff, 0x00050004,
230
	0x91c4, 0xffffffff, 0x00010006,
231
	0x91c8, 0xffffffff, 0x00090008,
232
	0x91cc, 0xffffffff, 0x00070000,
233
	0x91d0, 0xffffffff, 0x00030002,
234
	0x91d4, 0xffffffff, 0x00050004,
235
	0x91e0, 0xffffffff, 0x00010006,
236
	0x91e4, 0xffffffff, 0x00090008,
237
	0x91e8, 0xffffffff, 0x00000000,
238
	0x91ec, 0xffffffff, 0x00070000,
239
	0x91f0, 0xffffffff, 0x00030002,
240
	0x91f4, 0xffffffff, 0x00050004,
241
	0x9200, 0xffffffff, 0x00010006,
242
	0x9204, 0xffffffff, 0x00090008,
243
	0x9208, 0xffffffff, 0x00070000,
244
	0x920c, 0xffffffff, 0x00030002,
245
	0x9210, 0xffffffff, 0x00050004,
246
	0x921c, 0xffffffff, 0x00010006,
247
	0x9220, 0xffffffff, 0x00090008,
248
	0x9224, 0xffffffff, 0x00070000,
249
	0x9228, 0xffffffff, 0x00030002,
250
	0x922c, 0xffffffff, 0x00050004,
251
	0x9238, 0xffffffff, 0x00010006,
252
	0x923c, 0xffffffff, 0x00090008,
253
	0x9240, 0xffffffff, 0x00070000,
254
	0x9244, 0xffffffff, 0x00030002,
255
	0x9248, 0xffffffff, 0x00050004,
256
	0x9254, 0xffffffff, 0x00010006,
257
	0x9258, 0xffffffff, 0x00090008,
258
	0x925c, 0xffffffff, 0x00070000,
259
	0x9260, 0xffffffff, 0x00030002,
260
	0x9264, 0xffffffff, 0x00050004,
261
	0x9270, 0xffffffff, 0x00010006,
262
	0x9274, 0xffffffff, 0x00090008,
263
	0x9278, 0xffffffff, 0x00070000,
264
	0x927c, 0xffffffff, 0x00030002,
265
	0x9280, 0xffffffff, 0x00050004,
266
	0x928c, 0xffffffff, 0x00010006,
267
	0x9290, 0xffffffff, 0x00090008,
268
	0x9294, 0xffffffff, 0x00000000,
269
	0x929c, 0xffffffff, 0x00000001,
270
	0x802c, 0xffffffff, 0xc0000000
271
};
272
 
273
static const u32 redwood_mgcg_init[] =
274
{
275
	0x802c, 0xffffffff, 0xc0000000,
276
	0x5448, 0xffffffff, 0x00000100,
277
	0x55e4, 0xffffffff, 0x00000100,
278
	0x160c, 0xffffffff, 0x00000100,
279
	0x5644, 0xffffffff, 0x00000100,
280
	0xc164, 0xffffffff, 0x00000100,
281
	0x8a18, 0xffffffff, 0x00000100,
282
	0x897c, 0xffffffff, 0x06000100,
283
	0x8b28, 0xffffffff, 0x00000100,
284
	0x9144, 0xffffffff, 0x00000100,
285
	0x9a60, 0xffffffff, 0x00000100,
286
	0x9868, 0xffffffff, 0x00000100,
287
	0x8d58, 0xffffffff, 0x00000100,
288
	0x9510, 0xffffffff, 0x00000100,
289
	0x949c, 0xffffffff, 0x00000100,
290
	0x9654, 0xffffffff, 0x00000100,
291
	0x9030, 0xffffffff, 0x00000100,
292
	0x9034, 0xffffffff, 0x00000100,
293
	0x9038, 0xffffffff, 0x00000100,
294
	0x903c, 0xffffffff, 0x00000100,
295
	0x9040, 0xffffffff, 0x00000100,
296
	0xa200, 0xffffffff, 0x00000100,
297
	0xa204, 0xffffffff, 0x00000100,
298
	0xa208, 0xffffffff, 0x00000100,
299
	0xa20c, 0xffffffff, 0x00000100,
300
	0x971c, 0xffffffff, 0x00000100,
301
	0x977c, 0xffffffff, 0x00000100,
302
	0x3f80, 0xffffffff, 0x00000100,
303
	0xa210, 0xffffffff, 0x00000100,
304
	0xa214, 0xffffffff, 0x00000100,
305
	0x4d8, 0xffffffff, 0x00000100,
306
	0x9784, 0xffffffff, 0x00000100,
307
	0x9698, 0xffffffff, 0x00000100,
308
	0x4d4, 0xffffffff, 0x00000200,
309
	0x30cc, 0xffffffff, 0x00000100,
310
	0xd0c0, 0xffffffff, 0xff000100,
311
	0x802c, 0xffffffff, 0x40000000,
312
	0x915c, 0xffffffff, 0x00010000,
313
	0x9160, 0xffffffff, 0x00030002,
314
	0x9178, 0xffffffff, 0x00070000,
315
	0x917c, 0xffffffff, 0x00030002,
316
	0x9180, 0xffffffff, 0x00050004,
317
	0x918c, 0xffffffff, 0x00010006,
318
	0x9190, 0xffffffff, 0x00090008,
319
	0x9194, 0xffffffff, 0x00070000,
320
	0x9198, 0xffffffff, 0x00030002,
321
	0x919c, 0xffffffff, 0x00050004,
322
	0x91a8, 0xffffffff, 0x00010006,
323
	0x91ac, 0xffffffff, 0x00090008,
324
	0x91b0, 0xffffffff, 0x00070000,
325
	0x91b4, 0xffffffff, 0x00030002,
326
	0x91b8, 0xffffffff, 0x00050004,
327
	0x91c4, 0xffffffff, 0x00010006,
328
	0x91c8, 0xffffffff, 0x00090008,
329
	0x91cc, 0xffffffff, 0x00070000,
330
	0x91d0, 0xffffffff, 0x00030002,
331
	0x91d4, 0xffffffff, 0x00050004,
332
	0x91e0, 0xffffffff, 0x00010006,
333
	0x91e4, 0xffffffff, 0x00090008,
334
	0x91e8, 0xffffffff, 0x00000000,
335
	0x91ec, 0xffffffff, 0x00070000,
336
	0x91f0, 0xffffffff, 0x00030002,
337
	0x91f4, 0xffffffff, 0x00050004,
338
	0x9200, 0xffffffff, 0x00010006,
339
	0x9204, 0xffffffff, 0x00090008,
340
	0x9294, 0xffffffff, 0x00000000,
341
	0x929c, 0xffffffff, 0x00000001,
342
	0x802c, 0xffffffff, 0xc0000000
343
};
344
 
345
static const u32 cedar_golden_registers[] =
346
{
347
	0x3f90, 0xffff0000, 0xff000000,
348
	0x9148, 0xffff0000, 0xff000000,
349
	0x3f94, 0xffff0000, 0xff000000,
350
	0x914c, 0xffff0000, 0xff000000,
351
	0x9b7c, 0xffffffff, 0x00000000,
352
	0x8a14, 0xffffffff, 0x00000007,
353
	0x8b10, 0xffffffff, 0x00000000,
354
	0x960c, 0xffffffff, 0x54763210,
355
	0x88c4, 0xffffffff, 0x000000c2,
356
	0x88d4, 0xffffffff, 0x00000000,
357
	0x8974, 0xffffffff, 0x00000000,
358
	0xc78, 0x00000080, 0x00000080,
359
	0x5eb4, 0xffffffff, 0x00000002,
360
	0x5e78, 0xffffffff, 0x001000f0,
361
	0x6104, 0x01000300, 0x00000000,
362
	0x5bc0, 0x00300000, 0x00000000,
363
	0x7030, 0xffffffff, 0x00000011,
364
	0x7c30, 0xffffffff, 0x00000011,
365
	0x10830, 0xffffffff, 0x00000011,
366
	0x11430, 0xffffffff, 0x00000011,
367
	0xd02c, 0xffffffff, 0x08421000,
368
	0x240c, 0xffffffff, 0x00000380,
369
	0x8b24, 0xffffffff, 0x00ff0fff,
370
	0x28a4c, 0x06000000, 0x06000000,
371
	0x10c, 0x00000001, 0x00000001,
372
	0x8d00, 0xffffffff, 0x100e4848,
373
	0x8d04, 0xffffffff, 0x00164745,
374
	0x8c00, 0xffffffff, 0xe4000003,
375
	0x8c04, 0xffffffff, 0x40600060,
376
	0x8c08, 0xffffffff, 0x001c001c,
377
	0x8cf0, 0xffffffff, 0x08e00410,
378
	0x8c20, 0xffffffff, 0x00800080,
379
	0x8c24, 0xffffffff, 0x00800080,
380
	0x8c18, 0xffffffff, 0x20202078,
381
	0x8c1c, 0xffffffff, 0x00001010,
382
	0x28350, 0xffffffff, 0x00000000,
383
	0xa008, 0xffffffff, 0x00010000,
384
	0x5cc, 0xffffffff, 0x00000001,
385
	0x9508, 0xffffffff, 0x00000002
386
};
387
 
388
static const u32 cedar_mgcg_init[] =
389
{
390
	0x802c, 0xffffffff, 0xc0000000,
391
	0x5448, 0xffffffff, 0x00000100,
392
	0x55e4, 0xffffffff, 0x00000100,
393
	0x160c, 0xffffffff, 0x00000100,
394
	0x5644, 0xffffffff, 0x00000100,
395
	0xc164, 0xffffffff, 0x00000100,
396
	0x8a18, 0xffffffff, 0x00000100,
397
	0x897c, 0xffffffff, 0x06000100,
398
	0x8b28, 0xffffffff, 0x00000100,
399
	0x9144, 0xffffffff, 0x00000100,
400
	0x9a60, 0xffffffff, 0x00000100,
401
	0x9868, 0xffffffff, 0x00000100,
402
	0x8d58, 0xffffffff, 0x00000100,
403
	0x9510, 0xffffffff, 0x00000100,
404
	0x949c, 0xffffffff, 0x00000100,
405
	0x9654, 0xffffffff, 0x00000100,
406
	0x9030, 0xffffffff, 0x00000100,
407
	0x9034, 0xffffffff, 0x00000100,
408
	0x9038, 0xffffffff, 0x00000100,
409
	0x903c, 0xffffffff, 0x00000100,
410
	0x9040, 0xffffffff, 0x00000100,
411
	0xa200, 0xffffffff, 0x00000100,
412
	0xa204, 0xffffffff, 0x00000100,
413
	0xa208, 0xffffffff, 0x00000100,
414
	0xa20c, 0xffffffff, 0x00000100,
415
	0x971c, 0xffffffff, 0x00000100,
416
	0x977c, 0xffffffff, 0x00000100,
417
	0x3f80, 0xffffffff, 0x00000100,
418
	0xa210, 0xffffffff, 0x00000100,
419
	0xa214, 0xffffffff, 0x00000100,
420
	0x4d8, 0xffffffff, 0x00000100,
421
	0x9784, 0xffffffff, 0x00000100,
422
	0x9698, 0xffffffff, 0x00000100,
423
	0x4d4, 0xffffffff, 0x00000200,
424
	0x30cc, 0xffffffff, 0x00000100,
425
	0xd0c0, 0xffffffff, 0xff000100,
426
	0x802c, 0xffffffff, 0x40000000,
427
	0x915c, 0xffffffff, 0x00010000,
428
	0x9178, 0xffffffff, 0x00050000,
429
	0x917c, 0xffffffff, 0x00030002,
430
	0x918c, 0xffffffff, 0x00010004,
431
	0x9190, 0xffffffff, 0x00070006,
432
	0x9194, 0xffffffff, 0x00050000,
433
	0x9198, 0xffffffff, 0x00030002,
434
	0x91a8, 0xffffffff, 0x00010004,
435
	0x91ac, 0xffffffff, 0x00070006,
436
	0x91e8, 0xffffffff, 0x00000000,
437
	0x9294, 0xffffffff, 0x00000000,
438
	0x929c, 0xffffffff, 0x00000001,
439
	0x802c, 0xffffffff, 0xc0000000
440
};
441
 
442
static const u32 juniper_mgcg_init[] =
443
{
444
	0x802c, 0xffffffff, 0xc0000000,
445
	0x5448, 0xffffffff, 0x00000100,
446
	0x55e4, 0xffffffff, 0x00000100,
447
	0x160c, 0xffffffff, 0x00000100,
448
	0x5644, 0xffffffff, 0x00000100,
449
	0xc164, 0xffffffff, 0x00000100,
450
	0x8a18, 0xffffffff, 0x00000100,
451
	0x897c, 0xffffffff, 0x06000100,
452
	0x8b28, 0xffffffff, 0x00000100,
453
	0x9144, 0xffffffff, 0x00000100,
454
	0x9a60, 0xffffffff, 0x00000100,
455
	0x9868, 0xffffffff, 0x00000100,
456
	0x8d58, 0xffffffff, 0x00000100,
457
	0x9510, 0xffffffff, 0x00000100,
458
	0x949c, 0xffffffff, 0x00000100,
459
	0x9654, 0xffffffff, 0x00000100,
460
	0x9030, 0xffffffff, 0x00000100,
461
	0x9034, 0xffffffff, 0x00000100,
462
	0x9038, 0xffffffff, 0x00000100,
463
	0x903c, 0xffffffff, 0x00000100,
464
	0x9040, 0xffffffff, 0x00000100,
465
	0xa200, 0xffffffff, 0x00000100,
466
	0xa204, 0xffffffff, 0x00000100,
467
	0xa208, 0xffffffff, 0x00000100,
468
	0xa20c, 0xffffffff, 0x00000100,
469
	0x971c, 0xffffffff, 0x00000100,
470
	0xd0c0, 0xffffffff, 0xff000100,
471
	0x802c, 0xffffffff, 0x40000000,
472
	0x915c, 0xffffffff, 0x00010000,
473
	0x9160, 0xffffffff, 0x00030002,
474
	0x9178, 0xffffffff, 0x00070000,
475
	0x917c, 0xffffffff, 0x00030002,
476
	0x9180, 0xffffffff, 0x00050004,
477
	0x918c, 0xffffffff, 0x00010006,
478
	0x9190, 0xffffffff, 0x00090008,
479
	0x9194, 0xffffffff, 0x00070000,
480
	0x9198, 0xffffffff, 0x00030002,
481
	0x919c, 0xffffffff, 0x00050004,
482
	0x91a8, 0xffffffff, 0x00010006,
483
	0x91ac, 0xffffffff, 0x00090008,
484
	0x91b0, 0xffffffff, 0x00070000,
485
	0x91b4, 0xffffffff, 0x00030002,
486
	0x91b8, 0xffffffff, 0x00050004,
487
	0x91c4, 0xffffffff, 0x00010006,
488
	0x91c8, 0xffffffff, 0x00090008,
489
	0x91cc, 0xffffffff, 0x00070000,
490
	0x91d0, 0xffffffff, 0x00030002,
491
	0x91d4, 0xffffffff, 0x00050004,
492
	0x91e0, 0xffffffff, 0x00010006,
493
	0x91e4, 0xffffffff, 0x00090008,
494
	0x91e8, 0xffffffff, 0x00000000,
495
	0x91ec, 0xffffffff, 0x00070000,
496
	0x91f0, 0xffffffff, 0x00030002,
497
	0x91f4, 0xffffffff, 0x00050004,
498
	0x9200, 0xffffffff, 0x00010006,
499
	0x9204, 0xffffffff, 0x00090008,
500
	0x9208, 0xffffffff, 0x00070000,
501
	0x920c, 0xffffffff, 0x00030002,
502
	0x9210, 0xffffffff, 0x00050004,
503
	0x921c, 0xffffffff, 0x00010006,
504
	0x9220, 0xffffffff, 0x00090008,
505
	0x9224, 0xffffffff, 0x00070000,
506
	0x9228, 0xffffffff, 0x00030002,
507
	0x922c, 0xffffffff, 0x00050004,
508
	0x9238, 0xffffffff, 0x00010006,
509
	0x923c, 0xffffffff, 0x00090008,
510
	0x9240, 0xffffffff, 0x00070000,
511
	0x9244, 0xffffffff, 0x00030002,
512
	0x9248, 0xffffffff, 0x00050004,
513
	0x9254, 0xffffffff, 0x00010006,
514
	0x9258, 0xffffffff, 0x00090008,
515
	0x925c, 0xffffffff, 0x00070000,
516
	0x9260, 0xffffffff, 0x00030002,
517
	0x9264, 0xffffffff, 0x00050004,
518
	0x9270, 0xffffffff, 0x00010006,
519
	0x9274, 0xffffffff, 0x00090008,
520
	0x9278, 0xffffffff, 0x00070000,
521
	0x927c, 0xffffffff, 0x00030002,
522
	0x9280, 0xffffffff, 0x00050004,
523
	0x928c, 0xffffffff, 0x00010006,
524
	0x9290, 0xffffffff, 0x00090008,
525
	0x9294, 0xffffffff, 0x00000000,
526
	0x929c, 0xffffffff, 0x00000001,
527
	0x802c, 0xffffffff, 0xc0000000,
528
	0x977c, 0xffffffff, 0x00000100,
529
	0x3f80, 0xffffffff, 0x00000100,
530
	0xa210, 0xffffffff, 0x00000100,
531
	0xa214, 0xffffffff, 0x00000100,
532
	0x4d8, 0xffffffff, 0x00000100,
533
	0x9784, 0xffffffff, 0x00000100,
534
	0x9698, 0xffffffff, 0x00000100,
535
	0x4d4, 0xffffffff, 0x00000200,
536
	0x30cc, 0xffffffff, 0x00000100,
537
	0x802c, 0xffffffff, 0xc0000000
538
};
539
 
540
static const u32 supersumo_golden_registers[] =
541
{
542
	0x5eb4, 0xffffffff, 0x00000002,
543
	0x5cc, 0xffffffff, 0x00000001,
544
	0x7030, 0xffffffff, 0x00000011,
545
	0x7c30, 0xffffffff, 0x00000011,
546
	0x6104, 0x01000300, 0x00000000,
547
	0x5bc0, 0x00300000, 0x00000000,
548
	0x8c04, 0xffffffff, 0x40600060,
549
	0x8c08, 0xffffffff, 0x001c001c,
550
	0x8c20, 0xffffffff, 0x00800080,
551
	0x8c24, 0xffffffff, 0x00800080,
552
	0x8c18, 0xffffffff, 0x20202078,
553
	0x8c1c, 0xffffffff, 0x00001010,
554
	0x918c, 0xffffffff, 0x00010006,
555
	0x91a8, 0xffffffff, 0x00010006,
556
	0x91c4, 0xffffffff, 0x00010006,
557
	0x91e0, 0xffffffff, 0x00010006,
558
	0x9200, 0xffffffff, 0x00010006,
559
	0x9150, 0xffffffff, 0x6e944040,
560
	0x917c, 0xffffffff, 0x00030002,
561
	0x9180, 0xffffffff, 0x00050004,
562
	0x9198, 0xffffffff, 0x00030002,
563
	0x919c, 0xffffffff, 0x00050004,
564
	0x91b4, 0xffffffff, 0x00030002,
565
	0x91b8, 0xffffffff, 0x00050004,
566
	0x91d0, 0xffffffff, 0x00030002,
567
	0x91d4, 0xffffffff, 0x00050004,
568
	0x91f0, 0xffffffff, 0x00030002,
569
	0x91f4, 0xffffffff, 0x00050004,
570
	0x915c, 0xffffffff, 0x00010000,
571
	0x9160, 0xffffffff, 0x00030002,
572
	0x3f90, 0xffff0000, 0xff000000,
573
	0x9178, 0xffffffff, 0x00070000,
574
	0x9194, 0xffffffff, 0x00070000,
575
	0x91b0, 0xffffffff, 0x00070000,
576
	0x91cc, 0xffffffff, 0x00070000,
577
	0x91ec, 0xffffffff, 0x00070000,
578
	0x9148, 0xffff0000, 0xff000000,
579
	0x9190, 0xffffffff, 0x00090008,
580
	0x91ac, 0xffffffff, 0x00090008,
581
	0x91c8, 0xffffffff, 0x00090008,
582
	0x91e4, 0xffffffff, 0x00090008,
583
	0x9204, 0xffffffff, 0x00090008,
584
	0x3f94, 0xffff0000, 0xff000000,
585
	0x914c, 0xffff0000, 0xff000000,
586
	0x929c, 0xffffffff, 0x00000001,
587
	0x8a18, 0xffffffff, 0x00000100,
588
	0x8b28, 0xffffffff, 0x00000100,
589
	0x9144, 0xffffffff, 0x00000100,
590
	0x5644, 0xffffffff, 0x00000100,
591
	0x9b7c, 0xffffffff, 0x00000000,
592
	0x8030, 0xffffffff, 0x0000100a,
593
	0x8a14, 0xffffffff, 0x00000007,
594
	0x8b24, 0xffffffff, 0x00ff0fff,
595
	0x8b10, 0xffffffff, 0x00000000,
596
	0x28a4c, 0x06000000, 0x06000000,
597
	0x4d8, 0xffffffff, 0x00000100,
598
	0x913c, 0xffff000f, 0x0100000a,
599
	0x960c, 0xffffffff, 0x54763210,
600
	0x88c4, 0xffffffff, 0x000000c2,
601
	0x88d4, 0xffffffff, 0x00000010,
602
	0x8974, 0xffffffff, 0x00000000,
603
	0xc78, 0x00000080, 0x00000080,
604
	0x5e78, 0xffffffff, 0x001000f0,
605
	0xd02c, 0xffffffff, 0x08421000,
606
	0xa008, 0xffffffff, 0x00010000,
607
	0x8d00, 0xffffffff, 0x100e4848,
608
	0x8d04, 0xffffffff, 0x00164745,
609
	0x8c00, 0xffffffff, 0xe4000003,
610
	0x8cf0, 0x1fffffff, 0x08e00620,
611
	0x28350, 0xffffffff, 0x00000000,
612
	0x9508, 0xffffffff, 0x00000002
613
};
614
 
615
static const u32 sumo_golden_registers[] =
616
{
617
	0x900c, 0x00ffffff, 0x0017071f,
618
	0x8c18, 0xffffffff, 0x10101060,
619
	0x8c1c, 0xffffffff, 0x00001010,
620
	0x8c30, 0x0000000f, 0x00000005,
621
	0x9688, 0x0000000f, 0x00000007
622
};
623
 
624
static const u32 wrestler_golden_registers[] =
625
{
626
	0x5eb4, 0xffffffff, 0x00000002,
627
	0x5cc, 0xffffffff, 0x00000001,
628
	0x7030, 0xffffffff, 0x00000011,
629
	0x7c30, 0xffffffff, 0x00000011,
630
	0x6104, 0x01000300, 0x00000000,
631
	0x5bc0, 0x00300000, 0x00000000,
632
	0x918c, 0xffffffff, 0x00010006,
633
	0x91a8, 0xffffffff, 0x00010006,
634
	0x9150, 0xffffffff, 0x6e944040,
635
	0x917c, 0xffffffff, 0x00030002,
636
	0x9198, 0xffffffff, 0x00030002,
637
	0x915c, 0xffffffff, 0x00010000,
638
	0x3f90, 0xffff0000, 0xff000000,
639
	0x9178, 0xffffffff, 0x00070000,
640
	0x9194, 0xffffffff, 0x00070000,
641
	0x9148, 0xffff0000, 0xff000000,
642
	0x9190, 0xffffffff, 0x00090008,
643
	0x91ac, 0xffffffff, 0x00090008,
644
	0x3f94, 0xffff0000, 0xff000000,
645
	0x914c, 0xffff0000, 0xff000000,
646
	0x929c, 0xffffffff, 0x00000001,
647
	0x8a18, 0xffffffff, 0x00000100,
648
	0x8b28, 0xffffffff, 0x00000100,
649
	0x9144, 0xffffffff, 0x00000100,
650
	0x9b7c, 0xffffffff, 0x00000000,
651
	0x8030, 0xffffffff, 0x0000100a,
652
	0x8a14, 0xffffffff, 0x00000001,
653
	0x8b24, 0xffffffff, 0x00ff0fff,
654
	0x8b10, 0xffffffff, 0x00000000,
655
	0x28a4c, 0x06000000, 0x06000000,
656
	0x4d8, 0xffffffff, 0x00000100,
657
	0x913c, 0xffff000f, 0x0100000a,
658
	0x960c, 0xffffffff, 0x54763210,
659
	0x88c4, 0xffffffff, 0x000000c2,
660
	0x88d4, 0xffffffff, 0x00000010,
661
	0x8974, 0xffffffff, 0x00000000,
662
	0xc78, 0x00000080, 0x00000080,
663
	0x5e78, 0xffffffff, 0x001000f0,
664
	0xd02c, 0xffffffff, 0x08421000,
665
	0xa008, 0xffffffff, 0x00010000,
666
	0x8d00, 0xffffffff, 0x100e4848,
667
	0x8d04, 0xffffffff, 0x00164745,
668
	0x8c00, 0xffffffff, 0xe4000003,
669
	0x8cf0, 0x1fffffff, 0x08e00410,
670
	0x28350, 0xffffffff, 0x00000000,
671
	0x9508, 0xffffffff, 0x00000002,
672
	0x900c, 0xffffffff, 0x0017071f,
673
	0x8c18, 0xffffffff, 0x10101060,
674
	0x8c1c, 0xffffffff, 0x00001010
675
};
676
 
677
static const u32 barts_golden_registers[] =
678
{
679
	0x5eb4, 0xffffffff, 0x00000002,
680
	0x5e78, 0x8f311ff1, 0x001000f0,
681
	0x3f90, 0xffff0000, 0xff000000,
682
	0x9148, 0xffff0000, 0xff000000,
683
	0x3f94, 0xffff0000, 0xff000000,
684
	0x914c, 0xffff0000, 0xff000000,
685
	0xc78, 0x00000080, 0x00000080,
686
	0xbd4, 0x70073777, 0x00010001,
687
	0xd02c, 0xbfffff1f, 0x08421000,
688
	0xd0b8, 0x03773777, 0x02011003,
689
	0x5bc0, 0x00200000, 0x50100000,
690
	0x98f8, 0x33773777, 0x02011003,
691
	0x98fc, 0xffffffff, 0x76543210,
692
	0x7030, 0x31000311, 0x00000011,
693
	0x2f48, 0x00000007, 0x02011003,
694
	0x6b28, 0x00000010, 0x00000012,
695
	0x7728, 0x00000010, 0x00000012,
696
	0x10328, 0x00000010, 0x00000012,
697
	0x10f28, 0x00000010, 0x00000012,
698
	0x11b28, 0x00000010, 0x00000012,
699
	0x12728, 0x00000010, 0x00000012,
700
	0x240c, 0x000007ff, 0x00000380,
701
	0x8a14, 0xf000001f, 0x00000007,
702
	0x8b24, 0x3fff3fff, 0x00ff0fff,
703
	0x8b10, 0x0000ff0f, 0x00000000,
704
	0x28a4c, 0x07ffffff, 0x06000000,
705
	0x10c, 0x00000001, 0x00010003,
706
	0xa02c, 0xffffffff, 0x0000009b,
707
	0x913c, 0x0000000f, 0x0100000a,
708
	0x8d00, 0xffff7f7f, 0x100e4848,
709
	0x8d04, 0x00ffffff, 0x00164745,
710
	0x8c00, 0xfffc0003, 0xe4000003,
711
	0x8c04, 0xf8ff00ff, 0x40600060,
712
	0x8c08, 0x00ff00ff, 0x001c001c,
713
	0x8cf0, 0x1fff1fff, 0x08e00620,
714
	0x8c20, 0x0fff0fff, 0x00800080,
715
	0x8c24, 0x0fff0fff, 0x00800080,
716
	0x8c18, 0xffffffff, 0x20202078,
717
	0x8c1c, 0x0000ffff, 0x00001010,
718
	0x28350, 0x00000f01, 0x00000000,
719
	0x9508, 0x3700001f, 0x00000002,
720
	0x960c, 0xffffffff, 0x54763210,
721
	0x88c4, 0x001f3ae3, 0x000000c2,
722
	0x88d4, 0x0000001f, 0x00000010,
723
	0x8974, 0xffffffff, 0x00000000
724
};
725
 
726
static const u32 turks_golden_registers[] =
727
{
728
	0x5eb4, 0xffffffff, 0x00000002,
729
	0x5e78, 0x8f311ff1, 0x001000f0,
730
	0x8c8, 0x00003000, 0x00001070,
731
	0x8cc, 0x000fffff, 0x00040035,
732
	0x3f90, 0xffff0000, 0xfff00000,
733
	0x9148, 0xffff0000, 0xfff00000,
734
	0x3f94, 0xffff0000, 0xfff00000,
735
	0x914c, 0xffff0000, 0xfff00000,
736
	0xc78, 0x00000080, 0x00000080,
737
	0xbd4, 0x00073007, 0x00010002,
738
	0xd02c, 0xbfffff1f, 0x08421000,
739
	0xd0b8, 0x03773777, 0x02010002,
740
	0x5bc0, 0x00200000, 0x50100000,
741
	0x98f8, 0x33773777, 0x00010002,
742
	0x98fc, 0xffffffff, 0x33221100,
743
	0x7030, 0x31000311, 0x00000011,
744
	0x2f48, 0x33773777, 0x00010002,
745
	0x6b28, 0x00000010, 0x00000012,
746
	0x7728, 0x00000010, 0x00000012,
747
	0x10328, 0x00000010, 0x00000012,
748
	0x10f28, 0x00000010, 0x00000012,
749
	0x11b28, 0x00000010, 0x00000012,
750
	0x12728, 0x00000010, 0x00000012,
751
	0x240c, 0x000007ff, 0x00000380,
752
	0x8a14, 0xf000001f, 0x00000007,
753
	0x8b24, 0x3fff3fff, 0x00ff0fff,
754
	0x8b10, 0x0000ff0f, 0x00000000,
755
	0x28a4c, 0x07ffffff, 0x06000000,
756
	0x10c, 0x00000001, 0x00010003,
757
	0xa02c, 0xffffffff, 0x0000009b,
758
	0x913c, 0x0000000f, 0x0100000a,
759
	0x8d00, 0xffff7f7f, 0x100e4848,
760
	0x8d04, 0x00ffffff, 0x00164745,
761
	0x8c00, 0xfffc0003, 0xe4000003,
762
	0x8c04, 0xf8ff00ff, 0x40600060,
763
	0x8c08, 0x00ff00ff, 0x001c001c,
764
	0x8cf0, 0x1fff1fff, 0x08e00410,
765
	0x8c20, 0x0fff0fff, 0x00800080,
766
	0x8c24, 0x0fff0fff, 0x00800080,
767
	0x8c18, 0xffffffff, 0x20202078,
768
	0x8c1c, 0x0000ffff, 0x00001010,
769
	0x28350, 0x00000f01, 0x00000000,
770
	0x9508, 0x3700001f, 0x00000002,
771
	0x960c, 0xffffffff, 0x54763210,
772
	0x88c4, 0x001f3ae3, 0x000000c2,
773
	0x88d4, 0x0000001f, 0x00000010,
774
	0x8974, 0xffffffff, 0x00000000
775
};
776
 
777
static const u32 caicos_golden_registers[] =
778
{
779
	0x5eb4, 0xffffffff, 0x00000002,
780
	0x5e78, 0x8f311ff1, 0x001000f0,
781
	0x8c8, 0x00003420, 0x00001450,
782
	0x8cc, 0x000fffff, 0x00040035,
783
	0x3f90, 0xffff0000, 0xfffc0000,
784
	0x9148, 0xffff0000, 0xfffc0000,
785
	0x3f94, 0xffff0000, 0xfffc0000,
786
	0x914c, 0xffff0000, 0xfffc0000,
787
	0xc78, 0x00000080, 0x00000080,
788
	0xbd4, 0x00073007, 0x00010001,
789
	0xd02c, 0xbfffff1f, 0x08421000,
790
	0xd0b8, 0x03773777, 0x02010001,
791
	0x5bc0, 0x00200000, 0x50100000,
792
	0x98f8, 0x33773777, 0x02010001,
793
	0x98fc, 0xffffffff, 0x33221100,
794
	0x7030, 0x31000311, 0x00000011,
795
	0x2f48, 0x33773777, 0x02010001,
796
	0x6b28, 0x00000010, 0x00000012,
797
	0x7728, 0x00000010, 0x00000012,
798
	0x10328, 0x00000010, 0x00000012,
799
	0x10f28, 0x00000010, 0x00000012,
800
	0x11b28, 0x00000010, 0x00000012,
801
	0x12728, 0x00000010, 0x00000012,
802
	0x240c, 0x000007ff, 0x00000380,
803
	0x8a14, 0xf000001f, 0x00000001,
804
	0x8b24, 0x3fff3fff, 0x00ff0fff,
805
	0x8b10, 0x0000ff0f, 0x00000000,
806
	0x28a4c, 0x07ffffff, 0x06000000,
807
	0x10c, 0x00000001, 0x00010003,
808
	0xa02c, 0xffffffff, 0x0000009b,
809
	0x913c, 0x0000000f, 0x0100000a,
810
	0x8d00, 0xffff7f7f, 0x100e4848,
811
	0x8d04, 0x00ffffff, 0x00164745,
812
	0x8c00, 0xfffc0003, 0xe4000003,
813
	0x8c04, 0xf8ff00ff, 0x40600060,
814
	0x8c08, 0x00ff00ff, 0x001c001c,
815
	0x8cf0, 0x1fff1fff, 0x08e00410,
816
	0x8c20, 0x0fff0fff, 0x00800080,
817
	0x8c24, 0x0fff0fff, 0x00800080,
818
	0x8c18, 0xffffffff, 0x20202078,
819
	0x8c1c, 0x0000ffff, 0x00001010,
820
	0x28350, 0x00000f01, 0x00000000,
821
	0x9508, 0x3700001f, 0x00000002,
822
	0x960c, 0xffffffff, 0x54763210,
823
	0x88c4, 0x001f3ae3, 0x000000c2,
824
	0x88d4, 0x0000001f, 0x00000010,
825
	0x8974, 0xffffffff, 0x00000000
826
};
827
 
828
static void evergreen_init_golden_registers(struct radeon_device *rdev)
829
{
830
	switch (rdev->family) {
831
	case CHIP_CYPRESS:
832
	case CHIP_HEMLOCK:
833
		radeon_program_register_sequence(rdev,
834
						 evergreen_golden_registers,
835
						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
836
		radeon_program_register_sequence(rdev,
837
						 evergreen_golden_registers2,
838
						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
839
		radeon_program_register_sequence(rdev,
840
						 cypress_mgcg_init,
841
						 (const u32)ARRAY_SIZE(cypress_mgcg_init));
842
		break;
843
	case CHIP_JUNIPER:
844
		radeon_program_register_sequence(rdev,
845
						 evergreen_golden_registers,
846
						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
847
		radeon_program_register_sequence(rdev,
848
						 evergreen_golden_registers2,
849
						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
850
		radeon_program_register_sequence(rdev,
851
						 juniper_mgcg_init,
852
						 (const u32)ARRAY_SIZE(juniper_mgcg_init));
853
		break;
854
	case CHIP_REDWOOD:
855
		radeon_program_register_sequence(rdev,
856
						 evergreen_golden_registers,
857
						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
858
		radeon_program_register_sequence(rdev,
859
						 evergreen_golden_registers2,
860
						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
861
		radeon_program_register_sequence(rdev,
862
						 redwood_mgcg_init,
863
						 (const u32)ARRAY_SIZE(redwood_mgcg_init));
864
		break;
865
	case CHIP_CEDAR:
866
		radeon_program_register_sequence(rdev,
867
						 cedar_golden_registers,
868
						 (const u32)ARRAY_SIZE(cedar_golden_registers));
869
		radeon_program_register_sequence(rdev,
870
						 evergreen_golden_registers2,
871
						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
872
		radeon_program_register_sequence(rdev,
873
						 cedar_mgcg_init,
874
						 (const u32)ARRAY_SIZE(cedar_mgcg_init));
875
		break;
876
	case CHIP_PALM:
877
		radeon_program_register_sequence(rdev,
878
						 wrestler_golden_registers,
879
						 (const u32)ARRAY_SIZE(wrestler_golden_registers));
880
		break;
881
	case CHIP_SUMO:
882
		radeon_program_register_sequence(rdev,
883
						 supersumo_golden_registers,
884
						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
885
		break;
886
	case CHIP_SUMO2:
887
		radeon_program_register_sequence(rdev,
888
						 supersumo_golden_registers,
889
						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
890
		radeon_program_register_sequence(rdev,
891
						 sumo_golden_registers,
892
						 (const u32)ARRAY_SIZE(sumo_golden_registers));
893
		break;
894
	case CHIP_BARTS:
895
		radeon_program_register_sequence(rdev,
896
						 barts_golden_registers,
897
						 (const u32)ARRAY_SIZE(barts_golden_registers));
898
		break;
899
	case CHIP_TURKS:
900
		radeon_program_register_sequence(rdev,
901
						 turks_golden_registers,
902
						 (const u32)ARRAY_SIZE(turks_golden_registers));
903
		break;
904
	case CHIP_CAICOS:
905
		radeon_program_register_sequence(rdev,
906
						 caicos_golden_registers,
907
						 (const u32)ARRAY_SIZE(caicos_golden_registers));
908
		break;
909
	default:
910
		break;
911
	}
912
}
913
 
2997 Serge 914
void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
915
			     unsigned *bankh, unsigned *mtaspect,
916
			     unsigned *tile_split)
917
{
918
	*bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
919
	*bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
920
	*mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
921
	*tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
922
	switch (*bankw) {
923
	default:
924
	case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
925
	case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
926
	case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
927
	case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
928
	}
929
	switch (*bankh) {
930
	default:
931
	case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
932
	case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
933
	case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
934
	case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
935
	}
936
	switch (*mtaspect) {
937
	default:
938
	case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
939
	case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
940
	case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
941
	case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
942
	}
943
}
1990 serge 944
 
3764 Serge 945
static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
946
			      u32 cntl_reg, u32 status_reg)
947
{
948
	int r, i;
949
	struct atom_clock_dividers dividers;
950
 
951
        r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
952
					   clock, false, ÷rs);
953
	if (r)
954
		return r;
955
 
956
	WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
957
 
958
	for (i = 0; i < 100; i++) {
959
		if (RREG32(status_reg) & DCLK_STATUS)
960
			break;
961
		mdelay(10);
962
	}
963
	if (i == 100)
964
		return -ETIMEDOUT;
965
 
966
	return 0;
967
}
968
 
969
int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
970
{
971
	int r = 0;
972
	u32 cg_scratch = RREG32(CG_SCRATCH1);
973
 
974
	r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
975
	if (r)
976
		goto done;
977
	cg_scratch &= 0xffff0000;
978
	cg_scratch |= vclk / 100; /* Mhz */
979
 
980
	r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
981
	if (r)
982
		goto done;
983
	cg_scratch &= 0x0000ffff;
984
	cg_scratch |= (dclk / 100) << 16; /* Mhz */
985
 
986
done:
987
	WREG32(CG_SCRATCH1, cg_scratch);
988
 
989
	return r;
990
}
991
 
992
int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
993
{
994
	/* start off with something large */
995
	unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
996
	int r;
997
 
998
	/* bypass vclk and dclk with bclk */
999
	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1000
		VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1001
		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1002
 
1003
	/* put PLL in bypass mode */
1004
	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1005
 
1006
	if (!vclk || !dclk) {
1007
		/* keep the Bypass mode, put PLL to sleep */
1008
		WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1009
		return 0;
1010
	}
1011
 
1012
//   r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1013
//                     16384, 0x03FFFFFF, 0, 128, 5,
1014
//                     &fb_div, &vclk_div, &dclk_div);
1015
	if (r)
1016
		return r;
1017
 
1018
	/* set VCO_MODE to 1 */
1019
	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1020
 
1021
	/* toggle UPLL_SLEEP to 1 then back to 0 */
1022
	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1023
	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1024
 
1025
	/* deassert UPLL_RESET */
1026
	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1027
 
1028
	mdelay(1);
1029
 
1030
//   r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1031
//   if (r)
1032
//       return r;
1033
 
1034
	/* assert UPLL_RESET again */
1035
	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1036
 
1037
	/* disable spread spectrum. */
1038
	WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1039
 
1040
	/* set feedback divider */
1041
	WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1042
 
1043
	/* set ref divider to 0 */
1044
	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1045
 
1046
	if (fb_div < 307200)
1047
		WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1048
	else
1049
		WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1050
 
1051
	/* set PDIV_A and PDIV_B */
1052
	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1053
		UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1054
		~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1055
 
1056
	/* give the PLL some time to settle */
1057
	mdelay(15);
1058
 
1059
	/* deassert PLL_RESET */
1060
	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1061
 
1062
	mdelay(15);
1063
 
1064
	/* switch from bypass mode to normal mode */
1065
	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1066
 
1067
//   r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1068
//   if (r)
1069
//       return r;
1070
 
1071
	/* switch VCLK and DCLK selection */
1072
	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1073
		VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1074
		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1075
 
1076
	mdelay(100);
1077
 
1078
	return 0;
1079
}
1080
 
2997 Serge 1081
void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1082
{
1083
	u16 ctl, v;
1084
	int err;
1990 serge 1085
 
2997 Serge 1086
	err = pcie_capability_read_word(rdev->pdev, PCI_EXP_DEVCTL, &ctl);
1087
	if (err)
1088
		return;
1089
 
1090
	v = (ctl & PCI_EXP_DEVCTL_READRQ) >> 12;
1091
 
1092
	/* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1093
	 * to avoid hangs or perfomance issues
1094
	 */
1095
	if ((v == 0) || (v == 6) || (v == 7)) {
1096
		ctl &= ~PCI_EXP_DEVCTL_READRQ;
1097
		ctl |= (2 << 12);
1098
		pcie_capability_write_word(rdev->pdev, PCI_EXP_DEVCTL, ctl);
1099
	}
1100
}
1101
 
3764 Serge 1102
static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1103
{
1104
	if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1105
		return true;
1106
	else
1107
		return false;
1108
}
1109
 
1110
static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1111
{
1112
	u32 pos1, pos2;
1113
 
1114
	pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1115
	pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1116
 
1117
	if (pos1 != pos2)
1118
		return true;
1119
	else
1120
		return false;
1121
}
1122
 
2997 Serge 1123
/**
1124
 * dce4_wait_for_vblank - vblank wait asic callback.
1125
 *
1126
 * @rdev: radeon_device pointer
1127
 * @crtc: crtc to wait for vblank on
1128
 *
1129
 * Wait for vblank on the requested crtc (evergreen+).
1130
 */
1131
void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1132
{
3764 Serge 1133
	unsigned i = 0;
2997 Serge 1134
 
1135
	if (crtc >= rdev->num_crtc)
1136
		return;
1137
 
3764 Serge 1138
	if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1139
		return;
1140
 
1141
	/* depending on when we hit vblank, we may be close to active; if so,
1142
	 * wait for another frame.
1143
	 */
1144
	while (dce4_is_in_vblank(rdev, crtc)) {
1145
		if (i++ % 100 == 0) {
1146
			if (!dce4_is_counter_moving(rdev, crtc))
2997 Serge 1147
				break;
1148
		}
3764 Serge 1149
	}
1150
 
1151
	while (!dce4_is_in_vblank(rdev, crtc)) {
1152
		if (i++ % 100 == 0) {
1153
			if (!dce4_is_counter_moving(rdev, crtc))
2997 Serge 1154
				break;
1155
		}
1156
	}
1157
}
1158
 
1159
 
1160
/**
1161
 * evergreen_page_flip - pageflip callback.
1162
 *
1163
 * @rdev: radeon_device pointer
1164
 * @crtc_id: crtc to cleanup pageflip on
1165
 * @crtc_base: new address of the crtc (GPU MC address)
1166
 *
1167
 * Does the actual pageflip (evergreen+).
1168
 * During vblank we take the crtc lock and wait for the update_pending
1169
 * bit to go high, when it does, we release the lock, and allow the
1170
 * double buffered update to take place.
1171
 * Returns the current update pending status.
1172
 */
1990 serge 1173
u32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1174
{
1175
	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1176
	u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
2997 Serge 1177
	int i;
1990 serge 1178
 
1179
	/* Lock the graphics update lock */
1180
	tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
1181
	WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1182
 
1183
	/* update the scanout addresses */
1184
	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1185
	       upper_32_bits(crtc_base));
1186
	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1187
	       (u32)crtc_base);
1188
 
1189
	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1190
	       upper_32_bits(crtc_base));
1191
	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1192
	       (u32)crtc_base);
1193
 
1194
	/* Wait for update_pending to go high. */
2997 Serge 1195
	for (i = 0; i < rdev->usec_timeout; i++) {
1196
		if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING)
1197
			break;
1198
		udelay(1);
1199
	}
1990 serge 1200
	DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
1201
 
1202
	/* Unlock the lock, so double-buffering can take place inside vblank */
1203
	tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
1204
	WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1205
 
1206
	/* Return current update_pending status: */
1207
	return RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING;
1208
}
1209
 
1210
/* get temperature in millidegrees */
1211
int evergreen_get_temp(struct radeon_device *rdev)
1212
{
1213
	u32 temp, toffset;
1214
	int actual_temp = 0;
1215
 
1216
	if (rdev->family == CHIP_JUNIPER) {
1217
		toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1218
			TOFFSET_SHIFT;
1219
		temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1220
			TS0_ADC_DOUT_SHIFT;
1221
 
1222
		if (toffset & 0x100)
1223
			actual_temp = temp / 2 - (0x200 - toffset);
1224
		else
1225
			actual_temp = temp / 2 + toffset;
1226
 
1227
		actual_temp = actual_temp * 1000;
1228
 
1229
	} else {
1230
		temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1231
			ASIC_T_SHIFT;
1232
 
1233
		if (temp & 0x400)
1234
			actual_temp = -256;
1235
		else if (temp & 0x200)
1236
			actual_temp = 255;
1237
		else if (temp & 0x100) {
1238
			actual_temp = temp & 0x1ff;
1239
			actual_temp |= ~0x1ff;
1240
		} else
1241
			actual_temp = temp & 0xff;
1242
 
1243
		actual_temp = (actual_temp * 1000) / 2;
1244
	}
1245
 
1246
	return actual_temp;
1247
}
1248
 
1249
int sumo_get_temp(struct radeon_device *rdev)
1250
{
1251
	u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1252
	int actual_temp = temp - 49;
1253
 
1254
	return actual_temp * 1000;
1255
}
1256
 
2997 Serge 1257
/**
1258
 * sumo_pm_init_profile - Initialize power profiles callback.
1259
 *
1260
 * @rdev: radeon_device pointer
1261
 *
1262
 * Initialize the power states used in profile mode
1263
 * (sumo, trinity, SI).
1264
 * Used for profile mode only.
1265
 */
1266
void sumo_pm_init_profile(struct radeon_device *rdev)
1267
{
1268
	int idx;
1269
 
1270
	/* default */
1271
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1272
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1273
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1274
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1275
 
1276
	/* low,mid sh/mh */
1277
	if (rdev->flags & RADEON_IS_MOBILITY)
1278
		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1279
	else
1280
		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1281
 
1282
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1283
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1284
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1285
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1286
 
1287
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1288
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1289
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1290
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1291
 
1292
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1293
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1294
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1295
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1296
 
1297
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1298
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1299
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1300
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1301
 
1302
	/* high sh/mh */
1303
	idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1304
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1305
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1306
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1307
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1308
		rdev->pm.power_state[idx].num_clock_modes - 1;
1309
 
1310
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1311
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1312
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1313
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1314
		rdev->pm.power_state[idx].num_clock_modes - 1;
1315
}
1316
 
1317
/**
3764 Serge 1318
 * btc_pm_init_profile - Initialize power profiles callback.
1319
 *
1320
 * @rdev: radeon_device pointer
1321
 *
1322
 * Initialize the power states used in profile mode
1323
 * (BTC, cayman).
1324
 * Used for profile mode only.
1325
 */
1326
void btc_pm_init_profile(struct radeon_device *rdev)
1327
{
1328
	int idx;
1329
 
1330
	/* default */
1331
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1332
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1333
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1334
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1335
	/* starting with BTC, there is one state that is used for both
1336
	 * MH and SH.  Difference is that we always use the high clock index for
1337
	 * mclk.
1338
	 */
1339
	if (rdev->flags & RADEON_IS_MOBILITY)
1340
		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1341
	else
1342
		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1343
	/* low sh */
1344
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1345
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1346
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1347
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1348
	/* mid sh */
1349
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1350
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1351
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1352
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1353
	/* high sh */
1354
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1355
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1356
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1357
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1358
	/* low mh */
1359
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1360
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1361
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1362
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1363
	/* mid mh */
1364
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1365
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1366
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1367
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1368
	/* high mh */
1369
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1370
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1371
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1372
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1373
}
1374
 
1375
/**
2997 Serge 1376
 * evergreen_pm_misc - set additional pm hw parameters callback.
1377
 *
1378
 * @rdev: radeon_device pointer
1379
 *
1380
 * Set non-clock parameters associated with a power state
1381
 * (voltage, etc.) (evergreen+).
1382
 */
1990 serge 1383
void evergreen_pm_misc(struct radeon_device *rdev)
1384
{
1385
	int req_ps_idx = rdev->pm.requested_power_state_index;
1386
	int req_cm_idx = rdev->pm.requested_clock_mode_index;
1387
	struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1388
	struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1389
 
1390
	if (voltage->type == VOLTAGE_SW) {
1391
		/* 0xff01 is a flag rather then an actual voltage */
1392
		if (voltage->voltage == 0xff01)
1393
			return;
1394
		if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1395
			radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1396
			rdev->pm.current_vddc = voltage->voltage;
1397
			DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1398
		}
3764 Serge 1399
 
1400
		/* starting with BTC, there is one state that is used for both
1401
		 * MH and SH.  Difference is that we always use the high clock index for
1402
		 * mclk and vddci.
1403
		 */
1404
		if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1405
		    (rdev->family >= CHIP_BARTS) &&
1406
		    rdev->pm.active_crtc_count &&
1407
		    ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1408
		     (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1409
			voltage = &rdev->pm.power_state[req_ps_idx].
1410
				clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1411
 
1990 serge 1412
		/* 0xff01 is a flag rather then an actual voltage */
1413
		if (voltage->vddci == 0xff01)
1414
			return;
1415
		if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1416
			radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1417
			rdev->pm.current_vddci = voltage->vddci;
1418
			DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1419
		}
1420
	}
1421
}
1422
 
2997 Serge 1423
/**
1424
 * evergreen_pm_prepare - pre-power state change callback.
1425
 *
1426
 * @rdev: radeon_device pointer
1427
 *
1428
 * Prepare for a power state change (evergreen+).
1429
 */
1990 serge 1430
void evergreen_pm_prepare(struct radeon_device *rdev)
1431
{
1432
	struct drm_device *ddev = rdev->ddev;
1433
	struct drm_crtc *crtc;
1434
	struct radeon_crtc *radeon_crtc;
1435
	u32 tmp;
1436
 
1437
	/* disable any active CRTCs */
1438
	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1439
		radeon_crtc = to_radeon_crtc(crtc);
1440
		if (radeon_crtc->enabled) {
1441
			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1442
			tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1443
			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1444
		}
1445
	}
1446
}
1447
 
2997 Serge 1448
/**
1449
 * evergreen_pm_finish - post-power state change callback.
1450
 *
1451
 * @rdev: radeon_device pointer
1452
 *
1453
 * Clean up after a power state change (evergreen+).
1454
 */
1990 serge 1455
void evergreen_pm_finish(struct radeon_device *rdev)
1456
{
1457
	struct drm_device *ddev = rdev->ddev;
1458
	struct drm_crtc *crtc;
1459
	struct radeon_crtc *radeon_crtc;
1460
	u32 tmp;
1461
 
1462
	/* enable any active CRTCs */
1463
	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1464
		radeon_crtc = to_radeon_crtc(crtc);
1465
		if (radeon_crtc->enabled) {
1466
			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1467
			tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1468
			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1469
		}
1470
	}
1471
}
1472
 
2997 Serge 1473
/**
1474
 * evergreen_hpd_sense - hpd sense callback.
1475
 *
1476
 * @rdev: radeon_device pointer
1477
 * @hpd: hpd (hotplug detect) pin
1478
 *
1479
 * Checks if a digital monitor is connected (evergreen+).
1480
 * Returns true if connected, false if not connected.
1481
 */
1430 serge 1482
bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1483
{
1484
	bool connected = false;
1963 serge 1485
 
1486
	switch (hpd) {
1487
	case RADEON_HPD_1:
1488
		if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1489
			connected = true;
1490
		break;
1491
	case RADEON_HPD_2:
1492
		if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1493
			connected = true;
1494
		break;
1495
	case RADEON_HPD_3:
1496
		if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1497
			connected = true;
1498
		break;
1499
	case RADEON_HPD_4:
1500
		if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1501
			connected = true;
1502
		break;
1503
	case RADEON_HPD_5:
1504
		if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1505
			connected = true;
1506
		break;
1507
	case RADEON_HPD_6:
1508
		if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1509
			connected = true;
1510
			break;
1511
	default:
1512
		break;
1513
	}
1514
 
1430 serge 1515
	return connected;
1516
}
1517
 
2997 Serge 1518
/**
1519
 * evergreen_hpd_set_polarity - hpd set polarity callback.
1520
 *
1521
 * @rdev: radeon_device pointer
1522
 * @hpd: hpd (hotplug detect) pin
1523
 *
1524
 * Set the polarity of the hpd pin (evergreen+).
1525
 */
1430 serge 1526
void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1527
				enum radeon_hpd_id hpd)
1528
{
1963 serge 1529
	u32 tmp;
1530
	bool connected = evergreen_hpd_sense(rdev, hpd);
1531
 
1532
	switch (hpd) {
1533
	case RADEON_HPD_1:
1534
		tmp = RREG32(DC_HPD1_INT_CONTROL);
1535
		if (connected)
1536
			tmp &= ~DC_HPDx_INT_POLARITY;
1537
		else
1538
			tmp |= DC_HPDx_INT_POLARITY;
1539
		WREG32(DC_HPD1_INT_CONTROL, tmp);
1540
		break;
1541
	case RADEON_HPD_2:
1542
		tmp = RREG32(DC_HPD2_INT_CONTROL);
1543
		if (connected)
1544
			tmp &= ~DC_HPDx_INT_POLARITY;
1545
		else
1546
			tmp |= DC_HPDx_INT_POLARITY;
1547
		WREG32(DC_HPD2_INT_CONTROL, tmp);
1548
		break;
1549
	case RADEON_HPD_3:
1550
		tmp = RREG32(DC_HPD3_INT_CONTROL);
1551
		if (connected)
1552
			tmp &= ~DC_HPDx_INT_POLARITY;
1553
		else
1554
			tmp |= DC_HPDx_INT_POLARITY;
1555
		WREG32(DC_HPD3_INT_CONTROL, tmp);
1556
		break;
1557
	case RADEON_HPD_4:
1558
		tmp = RREG32(DC_HPD4_INT_CONTROL);
1559
		if (connected)
1560
			tmp &= ~DC_HPDx_INT_POLARITY;
1561
		else
1562
			tmp |= DC_HPDx_INT_POLARITY;
1563
		WREG32(DC_HPD4_INT_CONTROL, tmp);
1564
		break;
1565
	case RADEON_HPD_5:
1566
		tmp = RREG32(DC_HPD5_INT_CONTROL);
1567
		if (connected)
1568
			tmp &= ~DC_HPDx_INT_POLARITY;
1569
		else
1570
			tmp |= DC_HPDx_INT_POLARITY;
1571
		WREG32(DC_HPD5_INT_CONTROL, tmp);
1572
			break;
1573
	case RADEON_HPD_6:
1574
		tmp = RREG32(DC_HPD6_INT_CONTROL);
1575
		if (connected)
1576
			tmp &= ~DC_HPDx_INT_POLARITY;
1577
		else
1578
			tmp |= DC_HPDx_INT_POLARITY;
1579
		WREG32(DC_HPD6_INT_CONTROL, tmp);
1580
		break;
1581
	default:
1582
		break;
1583
	}
1430 serge 1584
}
1585
 
2997 Serge 1586
/**
1587
 * evergreen_hpd_init - hpd setup callback.
1588
 *
1589
 * @rdev: radeon_device pointer
1590
 *
1591
 * Setup the hpd pins used by the card (evergreen+).
1592
 * Enable the pin, set the polarity, and enable the hpd interrupts.
1593
 */
1430 serge 1594
void evergreen_hpd_init(struct radeon_device *rdev)
1595
{
1963 serge 1596
	struct drm_device *dev = rdev->ddev;
1597
	struct drm_connector *connector;
2997 Serge 1598
	unsigned enabled = 0;
1963 serge 1599
	u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1600
		DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1601
 
1602
	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1603
		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
3764 Serge 1604
 
1605
		if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1606
		    connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1607
			/* don't try to enable hpd on eDP or LVDS avoid breaking the
1608
			 * aux dp channel on imac and help (but not completely fix)
1609
			 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1610
			 * also avoid interrupt storms during dpms.
1611
			 */
1612
			continue;
1613
		}
1963 serge 1614
		switch (radeon_connector->hpd.hpd) {
1615
		case RADEON_HPD_1:
1616
			WREG32(DC_HPD1_CONTROL, tmp);
1617
			break;
1618
		case RADEON_HPD_2:
1619
			WREG32(DC_HPD2_CONTROL, tmp);
1620
			break;
1621
		case RADEON_HPD_3:
1622
			WREG32(DC_HPD3_CONTROL, tmp);
1623
			break;
1624
		case RADEON_HPD_4:
1625
			WREG32(DC_HPD4_CONTROL, tmp);
1626
			break;
1627
		case RADEON_HPD_5:
1628
			WREG32(DC_HPD5_CONTROL, tmp);
1629
			break;
1630
		case RADEON_HPD_6:
1631
			WREG32(DC_HPD6_CONTROL, tmp);
1632
			break;
1633
		default:
1634
			break;
1635
		}
2997 Serge 1636
		radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1637
		enabled |= 1 << radeon_connector->hpd.hpd;
1963 serge 1638
	}
2997 Serge 1639
//   radeon_irq_kms_enable_hpd(rdev, enabled);
1430 serge 1640
}
1641
 
2997 Serge 1642
/**
1643
 * evergreen_hpd_fini - hpd tear down callback.
1644
 *
1645
 * @rdev: radeon_device pointer
1646
 *
1647
 * Tear down the hpd pins used by the card (evergreen+).
1648
 * Disable the hpd interrupts.
1649
 */
1963 serge 1650
void evergreen_hpd_fini(struct radeon_device *rdev)
1651
{
1652
	struct drm_device *dev = rdev->ddev;
1653
	struct drm_connector *connector;
2997 Serge 1654
	unsigned disabled = 0;
1430 serge 1655
 
1963 serge 1656
	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1657
		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1658
		switch (radeon_connector->hpd.hpd) {
1659
		case RADEON_HPD_1:
1660
			WREG32(DC_HPD1_CONTROL, 0);
1661
			break;
1662
		case RADEON_HPD_2:
1663
			WREG32(DC_HPD2_CONTROL, 0);
1664
			break;
1665
		case RADEON_HPD_3:
1666
			WREG32(DC_HPD3_CONTROL, 0);
1667
			break;
1668
		case RADEON_HPD_4:
1669
			WREG32(DC_HPD4_CONTROL, 0);
1670
			break;
1671
		case RADEON_HPD_5:
1672
			WREG32(DC_HPD5_CONTROL, 0);
1673
			break;
1674
		case RADEON_HPD_6:
1675
			WREG32(DC_HPD6_CONTROL, 0);
1676
			break;
1677
		default:
1678
			break;
1679
		}
2997 Serge 1680
		disabled |= 1 << radeon_connector->hpd.hpd;
1963 serge 1681
	}
2997 Serge 1682
//   radeon_irq_kms_disable_hpd(rdev, disabled);
1430 serge 1683
}
1684
 
1986 serge 1685
/* watermark setup */
1963 serge 1686
 
1986 serge 1687
static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1688
					struct radeon_crtc *radeon_crtc,
1689
					struct drm_display_mode *mode,
1690
					struct drm_display_mode *other_mode)
1691
{
1692
	u32 tmp;
1693
	/*
1694
	 * Line Buffer Setup
1695
	 * There are 3 line buffers, each one shared by 2 display controllers.
1696
	 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1697
	 * the display controllers.  The paritioning is done via one of four
1698
	 * preset allocations specified in bits 2:0:
1699
	 * first display controller
1700
	 *  0 - first half of lb (3840 * 2)
1701
	 *  1 - first 3/4 of lb (5760 * 2)
1702
	 *  2 - whole lb (7680 * 2), other crtc must be disabled
1703
	 *  3 - first 1/4 of lb (1920 * 2)
1704
	 * second display controller
1705
	 *  4 - second half of lb (3840 * 2)
1706
	 *  5 - second 3/4 of lb (5760 * 2)
1707
	 *  6 - whole lb (7680 * 2), other crtc must be disabled
1708
	 *  7 - last 1/4 of lb (1920 * 2)
1709
	 */
1710
	/* this can get tricky if we have two large displays on a paired group
1711
	 * of crtcs.  Ideally for multiple large displays we'd assign them to
1712
	 * non-linked crtcs for maximum line buffer allocation.
1713
	 */
1714
	if (radeon_crtc->base.enabled && mode) {
1715
		if (other_mode)
1716
			tmp = 0; /* 1/2 */
1717
		else
1718
			tmp = 2; /* whole */
1719
	} else
1720
		tmp = 0;
1963 serge 1721
 
1986 serge 1722
	/* second controller of the pair uses second half of the lb */
1723
	if (radeon_crtc->crtc_id % 2)
1724
		tmp += 4;
1725
	WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1726
 
1727
	if (radeon_crtc->base.enabled && mode) {
1728
		switch (tmp) {
1729
		case 0:
1730
		case 4:
1731
		default:
1732
			if (ASIC_IS_DCE5(rdev))
1733
				return 4096 * 2;
1734
			else
1735
				return 3840 * 2;
1736
		case 1:
1737
		case 5:
1738
			if (ASIC_IS_DCE5(rdev))
1739
				return 6144 * 2;
1740
			else
1741
				return 5760 * 2;
1742
		case 2:
1743
		case 6:
1744
			if (ASIC_IS_DCE5(rdev))
1745
				return 8192 * 2;
1746
			else
1747
				return 7680 * 2;
1748
		case 3:
1749
		case 7:
1750
			if (ASIC_IS_DCE5(rdev))
1751
				return 2048 * 2;
1752
			else
1753
				return 1920 * 2;
1754
		}
1755
	}
1756
 
1757
	/* controller not enabled, so no lb used */
1758
	return 0;
1759
}
1760
 
2997 Serge 1761
u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1986 serge 1762
{
1763
	u32 tmp = RREG32(MC_SHARED_CHMAP);
1764
 
1765
	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1766
	case 0:
1767
	default:
1768
		return 1;
1769
	case 1:
1770
		return 2;
1771
	case 2:
1772
		return 4;
1773
	case 3:
1774
		return 8;
1775
	}
1776
}
1777
 
1778
struct evergreen_wm_params {
1779
	u32 dram_channels; /* number of dram channels */
1780
	u32 yclk;          /* bandwidth per dram data pin in kHz */
1781
	u32 sclk;          /* engine clock in kHz */
1782
	u32 disp_clk;      /* display clock in kHz */
1783
	u32 src_width;     /* viewport width */
1784
	u32 active_time;   /* active display time in ns */
1785
	u32 blank_time;    /* blank time in ns */
1786
	bool interlaced;    /* mode is interlaced */
1787
	fixed20_12 vsc;    /* vertical scale ratio */
1788
	u32 num_heads;     /* number of active crtcs */
1789
	u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1790
	u32 lb_size;       /* line buffer allocated to pipe */
1791
	u32 vtaps;         /* vertical scaler taps */
1792
};
1793
 
1794
static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1795
{
1796
	/* Calculate DRAM Bandwidth and the part allocated to display. */
1797
	fixed20_12 dram_efficiency; /* 0.7 */
1798
	fixed20_12 yclk, dram_channels, bandwidth;
1799
	fixed20_12 a;
1800
 
1801
	a.full = dfixed_const(1000);
1802
	yclk.full = dfixed_const(wm->yclk);
1803
	yclk.full = dfixed_div(yclk, a);
1804
	dram_channels.full = dfixed_const(wm->dram_channels * 4);
1805
	a.full = dfixed_const(10);
1806
	dram_efficiency.full = dfixed_const(7);
1807
	dram_efficiency.full = dfixed_div(dram_efficiency, a);
1808
	bandwidth.full = dfixed_mul(dram_channels, yclk);
1809
	bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1810
 
1811
	return dfixed_trunc(bandwidth);
1812
}
1813
 
1814
static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1815
{
1816
	/* Calculate DRAM Bandwidth and the part allocated to display. */
1817
	fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1818
	fixed20_12 yclk, dram_channels, bandwidth;
1819
	fixed20_12 a;
1820
 
1821
	a.full = dfixed_const(1000);
1822
	yclk.full = dfixed_const(wm->yclk);
1823
	yclk.full = dfixed_div(yclk, a);
1824
	dram_channels.full = dfixed_const(wm->dram_channels * 4);
1825
	a.full = dfixed_const(10);
1826
	disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
1827
	disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
1828
	bandwidth.full = dfixed_mul(dram_channels, yclk);
1829
	bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
1830
 
1831
	return dfixed_trunc(bandwidth);
1832
}
1833
 
1834
static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
1835
{
1836
	/* Calculate the display Data return Bandwidth */
1837
	fixed20_12 return_efficiency; /* 0.8 */
1838
	fixed20_12 sclk, bandwidth;
1839
	fixed20_12 a;
1840
 
1841
	a.full = dfixed_const(1000);
1842
	sclk.full = dfixed_const(wm->sclk);
1843
	sclk.full = dfixed_div(sclk, a);
1844
	a.full = dfixed_const(10);
1845
	return_efficiency.full = dfixed_const(8);
1846
	return_efficiency.full = dfixed_div(return_efficiency, a);
1847
	a.full = dfixed_const(32);
1848
	bandwidth.full = dfixed_mul(a, sclk);
1849
	bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
1850
 
1851
	return dfixed_trunc(bandwidth);
1852
}
1853
 
1854
static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
1855
{
1856
	/* Calculate the DMIF Request Bandwidth */
1857
	fixed20_12 disp_clk_request_efficiency; /* 0.8 */
1858
	fixed20_12 disp_clk, bandwidth;
1859
	fixed20_12 a;
1860
 
1861
	a.full = dfixed_const(1000);
1862
	disp_clk.full = dfixed_const(wm->disp_clk);
1863
	disp_clk.full = dfixed_div(disp_clk, a);
1864
	a.full = dfixed_const(10);
1865
	disp_clk_request_efficiency.full = dfixed_const(8);
1866
	disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
1867
	a.full = dfixed_const(32);
1868
	bandwidth.full = dfixed_mul(a, disp_clk);
1869
	bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
1870
 
1871
	return dfixed_trunc(bandwidth);
1872
}
1873
 
1874
static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
1875
{
1876
	/* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
1877
	u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
1878
	u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
1879
	u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
1880
 
1881
	return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
1882
}
1883
 
1884
static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
1885
{
1886
	/* Calculate the display mode Average Bandwidth
1887
	 * DisplayMode should contain the source and destination dimensions,
1888
	 * timing, etc.
1889
	 */
1890
	fixed20_12 bpp;
1891
	fixed20_12 line_time;
1892
	fixed20_12 src_width;
1893
	fixed20_12 bandwidth;
1894
	fixed20_12 a;
1895
 
1896
	a.full = dfixed_const(1000);
1897
	line_time.full = dfixed_const(wm->active_time + wm->blank_time);
1898
	line_time.full = dfixed_div(line_time, a);
1899
	bpp.full = dfixed_const(wm->bytes_per_pixel);
1900
	src_width.full = dfixed_const(wm->src_width);
1901
	bandwidth.full = dfixed_mul(src_width, bpp);
1902
	bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
1903
	bandwidth.full = dfixed_div(bandwidth, line_time);
1904
 
1905
	return dfixed_trunc(bandwidth);
1906
}
1907
 
1908
static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
1909
{
1910
	/* First calcualte the latency in ns */
1911
	u32 mc_latency = 2000; /* 2000 ns. */
1912
	u32 available_bandwidth = evergreen_available_bandwidth(wm);
1913
	u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
1914
	u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
1915
	u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
1916
	u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
1917
		(wm->num_heads * cursor_line_pair_return_time);
1918
	u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
1919
	u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
1920
	fixed20_12 a, b, c;
1921
 
1922
	if (wm->num_heads == 0)
1923
		return 0;
1924
 
1925
	a.full = dfixed_const(2);
1926
	b.full = dfixed_const(1);
1927
	if ((wm->vsc.full > a.full) ||
1928
	    ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
1929
	    (wm->vtaps >= 5) ||
1930
	    ((wm->vsc.full >= a.full) && wm->interlaced))
1931
		max_src_lines_per_dst_line = 4;
1932
	else
1933
		max_src_lines_per_dst_line = 2;
1934
 
1935
	a.full = dfixed_const(available_bandwidth);
1936
	b.full = dfixed_const(wm->num_heads);
1937
	a.full = dfixed_div(a, b);
1938
 
1939
	b.full = dfixed_const(1000);
1940
	c.full = dfixed_const(wm->disp_clk);
1941
	b.full = dfixed_div(c, b);
1942
	c.full = dfixed_const(wm->bytes_per_pixel);
1943
	b.full = dfixed_mul(b, c);
1944
 
1945
	lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
1946
 
1947
	a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
1948
	b.full = dfixed_const(1000);
1949
	c.full = dfixed_const(lb_fill_bw);
1950
	b.full = dfixed_div(c, b);
1951
	a.full = dfixed_div(a, b);
1952
	line_fill_time = dfixed_trunc(a);
1953
 
1954
	if (line_fill_time < wm->active_time)
1955
		return latency;
1956
	else
1957
		return latency + (line_fill_time - wm->active_time);
1958
 
1959
}
1960
 
1961
static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1962
{
1963
	if (evergreen_average_bandwidth(wm) <=
1964
	    (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
1965
		return true;
1966
	else
1967
		return false;
1968
};
1969
 
1970
static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
1971
{
1972
	if (evergreen_average_bandwidth(wm) <=
1973
	    (evergreen_available_bandwidth(wm) / wm->num_heads))
1974
		return true;
1975
	else
1976
		return false;
1977
};
1978
 
1979
static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
1980
{
1981
	u32 lb_partitions = wm->lb_size / wm->src_width;
1982
	u32 line_time = wm->active_time + wm->blank_time;
1983
	u32 latency_tolerant_lines;
1984
	u32 latency_hiding;
1985
	fixed20_12 a;
1986
 
1987
	a.full = dfixed_const(1);
1988
	if (wm->vsc.full > a.full)
1989
		latency_tolerant_lines = 1;
1990
	else {
1991
		if (lb_partitions <= (wm->vtaps + 1))
1992
			latency_tolerant_lines = 1;
1993
		else
1994
			latency_tolerant_lines = 2;
1995
	}
1996
 
1997
	latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
1998
 
1999
	if (evergreen_latency_watermark(wm) <= latency_hiding)
2000
		return true;
2001
	else
2002
		return false;
2003
}
2004
 
2005
static void evergreen_program_watermarks(struct radeon_device *rdev,
2006
					 struct radeon_crtc *radeon_crtc,
2007
					 u32 lb_size, u32 num_heads)
2008
{
2009
	struct drm_display_mode *mode = &radeon_crtc->base.mode;
2010
	struct evergreen_wm_params wm;
2011
	u32 pixel_period;
2012
	u32 line_time = 0;
2013
	u32 latency_watermark_a = 0, latency_watermark_b = 0;
2014
	u32 priority_a_mark = 0, priority_b_mark = 0;
2015
	u32 priority_a_cnt = PRIORITY_OFF;
2016
	u32 priority_b_cnt = PRIORITY_OFF;
2017
	u32 pipe_offset = radeon_crtc->crtc_id * 16;
2018
	u32 tmp, arb_control3;
2019
	fixed20_12 a, b, c;
2020
 
2021
	if (radeon_crtc->base.enabled && num_heads && mode) {
2022
		pixel_period = 1000000 / (u32)mode->clock;
2023
		line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2024
		priority_a_cnt = 0;
2025
		priority_b_cnt = 0;
2026
 
2027
		wm.yclk = rdev->pm.current_mclk * 10;
2028
		wm.sclk = rdev->pm.current_sclk * 10;
2029
		wm.disp_clk = mode->clock;
2030
		wm.src_width = mode->crtc_hdisplay;
2031
		wm.active_time = mode->crtc_hdisplay * pixel_period;
2032
		wm.blank_time = line_time - wm.active_time;
2033
		wm.interlaced = false;
2034
		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2035
			wm.interlaced = true;
2036
		wm.vsc = radeon_crtc->vsc;
2037
		wm.vtaps = 1;
2038
		if (radeon_crtc->rmx_type != RMX_OFF)
2039
			wm.vtaps = 2;
2040
		wm.bytes_per_pixel = 4; /* XXX: get this from fb config */
2041
		wm.lb_size = lb_size;
2042
		wm.dram_channels = evergreen_get_number_of_dram_channels(rdev);
2043
		wm.num_heads = num_heads;
2044
 
2045
		/* set for high clocks */
2046
		latency_watermark_a = min(evergreen_latency_watermark(&wm), (u32)65535);
2047
		/* set for low clocks */
2048
		/* wm.yclk = low clk; wm.sclk = low clk */
2049
		latency_watermark_b = min(evergreen_latency_watermark(&wm), (u32)65535);
2050
 
2051
		/* possibly force display priority to high */
2052
		/* should really do this at mode validation time... */
2053
		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm) ||
2054
		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm) ||
2055
		    !evergreen_check_latency_hiding(&wm) ||
2056
		    (rdev->disp_priority == 2)) {
2160 serge 2057
			DRM_DEBUG_KMS("force priority to high\n");
1986 serge 2058
			priority_a_cnt |= PRIORITY_ALWAYS_ON;
2059
			priority_b_cnt |= PRIORITY_ALWAYS_ON;
2060
		}
2061
 
2062
		a.full = dfixed_const(1000);
2063
		b.full = dfixed_const(mode->clock);
2064
		b.full = dfixed_div(b, a);
2065
		c.full = dfixed_const(latency_watermark_a);
2066
		c.full = dfixed_mul(c, b);
2067
		c.full = dfixed_mul(c, radeon_crtc->hsc);
2068
		c.full = dfixed_div(c, a);
2069
		a.full = dfixed_const(16);
2070
		c.full = dfixed_div(c, a);
2071
		priority_a_mark = dfixed_trunc(c);
2072
		priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2073
 
2074
		a.full = dfixed_const(1000);
2075
		b.full = dfixed_const(mode->clock);
2076
		b.full = dfixed_div(b, a);
2077
		c.full = dfixed_const(latency_watermark_b);
2078
		c.full = dfixed_mul(c, b);
2079
		c.full = dfixed_mul(c, radeon_crtc->hsc);
2080
		c.full = dfixed_div(c, a);
2081
		a.full = dfixed_const(16);
2082
		c.full = dfixed_div(c, a);
2083
		priority_b_mark = dfixed_trunc(c);
2084
		priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2085
	}
2086
 
2087
	/* select wm A */
2088
	arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2089
	tmp = arb_control3;
2090
	tmp &= ~LATENCY_WATERMARK_MASK(3);
2091
	tmp |= LATENCY_WATERMARK_MASK(1);
2092
	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2093
	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2094
	       (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2095
		LATENCY_HIGH_WATERMARK(line_time)));
2096
	/* select wm B */
2097
	tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2098
	tmp &= ~LATENCY_WATERMARK_MASK(3);
2099
	tmp |= LATENCY_WATERMARK_MASK(2);
2100
	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2101
	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2102
	       (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2103
		LATENCY_HIGH_WATERMARK(line_time)));
2104
	/* restore original selection */
2105
	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2106
 
2107
	/* write the priority marks */
2108
	WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2109
	WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2110
 
2111
}
2112
 
2997 Serge 2113
/**
2114
 * evergreen_bandwidth_update - update display watermarks callback.
2115
 *
2116
 * @rdev: radeon_device pointer
2117
 *
2118
 * Update the display watermarks based on the requested mode(s)
2119
 * (evergreen+).
2120
 */
1963 serge 2121
void evergreen_bandwidth_update(struct radeon_device *rdev)
1430 serge 2122
{
1986 serge 2123
	struct drm_display_mode *mode0 = NULL;
2124
	struct drm_display_mode *mode1 = NULL;
2125
	u32 num_heads = 0, lb_size;
2126
	int i;
2127
 
2128
	radeon_update_display_priority(rdev);
2129
 
2130
	for (i = 0; i < rdev->num_crtc; i++) {
2131
		if (rdev->mode_info.crtcs[i]->base.enabled)
2132
			num_heads++;
2133
	}
2134
	for (i = 0; i < rdev->num_crtc; i += 2) {
2135
		mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2136
		mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2137
		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2138
		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2139
		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2140
		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2141
	}
1430 serge 2142
}
2143
 
2997 Serge 2144
/**
2145
 * evergreen_mc_wait_for_idle - wait for MC idle callback.
2146
 *
2147
 * @rdev: radeon_device pointer
2148
 *
2149
 * Wait for the MC (memory controller) to be idle.
2150
 * (evergreen+).
2151
 * Returns 0 if the MC is idle, -1 if not.
2152
 */
1963 serge 2153
int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
1430 serge 2154
{
2155
	unsigned i;
2156
	u32 tmp;
2157
 
2158
	for (i = 0; i < rdev->usec_timeout; i++) {
2159
		/* read MC_STATUS */
2160
		tmp = RREG32(SRBM_STATUS) & 0x1F00;
2161
		if (!tmp)
2162
			return 0;
2163
		udelay(1);
2164
	}
2165
	return -1;
2166
}
2167
 
2168
/*
2169
 * GART
2170
 */
1963 serge 2171
void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2172
{
2173
	unsigned i;
2174
	u32 tmp;
2175
 
2176
	WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2177
 
2178
	WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2179
	for (i = 0; i < rdev->usec_timeout; i++) {
2180
		/* read MC_STATUS */
2181
		tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2182
		tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2183
		if (tmp == 2) {
2184
			printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2185
			return;
2186
		}
2187
		if (tmp) {
2188
			return;
2189
		}
2190
		udelay(1);
2191
	}
2192
}
2193
 
2997 Serge 2194
static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
1430 serge 2195
{
2196
	u32 tmp;
1963 serge 2197
	int r;
1430 serge 2198
 
2997 Serge 2199
	if (rdev->gart.robj == NULL) {
1430 serge 2200
		dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2201
		return -EINVAL;
2202
	}
2203
	r = radeon_gart_table_vram_pin(rdev);
2204
	if (r)
2205
		return r;
2206
	radeon_gart_restore(rdev);
2207
	/* Setup L2 cache */
2208
	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2209
				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2210
				EFFECTIVE_L2_QUEUE_SIZE(7));
2211
	WREG32(VM_L2_CNTL2, 0);
2212
	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2213
	/* Setup TLB control */
2214
	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2215
		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2216
		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2217
		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
1963 serge 2218
	if (rdev->flags & RADEON_IS_IGP) {
2219
		WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2220
		WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2221
		WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2222
	} else {
3031 serge 2223
		WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2224
		WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2225
		WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2997 Serge 2226
		if ((rdev->family == CHIP_JUNIPER) ||
2227
		    (rdev->family == CHIP_CYPRESS) ||
2228
		    (rdev->family == CHIP_HEMLOCK) ||
2229
		    (rdev->family == CHIP_BARTS))
2230
			WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
1963 serge 2231
	}
1430 serge 2232
	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2233
	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2234
	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2235
	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2236
	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2237
	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2238
	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2239
	WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2240
				RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2241
	WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2242
			(u32)(rdev->dummy_page.addr >> 12));
1963 serge 2243
	WREG32(VM_CONTEXT1_CNTL, 0);
1430 serge 2244
 
1963 serge 2245
	evergreen_pcie_gart_tlb_flush(rdev);
2997 Serge 2246
	DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2247
		 (unsigned)(rdev->mc.gtt_size >> 20),
2248
		 (unsigned long long)rdev->gart.table_addr);
1430 serge 2249
	rdev->gart.ready = true;
2250
	return 0;
2251
}
2252
 
2997 Serge 2253
static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
1430 serge 2254
{
2255
	u32 tmp;
2256
 
2257
	/* Disable all tables */
1963 serge 2258
	WREG32(VM_CONTEXT0_CNTL, 0);
2259
	WREG32(VM_CONTEXT1_CNTL, 0);
1430 serge 2260
 
2261
	/* Setup L2 cache */
2262
	WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2263
				EFFECTIVE_L2_QUEUE_SIZE(7));
2264
	WREG32(VM_L2_CNTL2, 0);
2265
	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2266
	/* Setup TLB control */
2267
	tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2268
	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2269
	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2270
	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2271
	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2272
	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2273
	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2274
	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2997 Serge 2275
	radeon_gart_table_vram_unpin(rdev);
1430 serge 2276
}
2277
 
2997 Serge 2278
static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
1430 serge 2279
{
2280
	evergreen_pcie_gart_disable(rdev);
2281
	radeon_gart_table_vram_free(rdev);
2282
	radeon_gart_fini(rdev);
2283
}
2284
 
2285
 
2997 Serge 2286
static void evergreen_agp_enable(struct radeon_device *rdev)
1430 serge 2287
{
2288
	u32 tmp;
2289
 
2290
	/* Setup L2 cache */
2291
	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2292
				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2293
				EFFECTIVE_L2_QUEUE_SIZE(7));
2294
	WREG32(VM_L2_CNTL2, 0);
2295
	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2296
	/* Setup TLB control */
2297
	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2298
		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2299
		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2300
		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2301
	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2302
	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2303
	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2304
	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2305
	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2306
	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2307
	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
1963 serge 2308
	WREG32(VM_CONTEXT0_CNTL, 0);
2309
	WREG32(VM_CONTEXT1_CNTL, 0);
1430 serge 2310
}
2311
 
1963 serge 2312
void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
1430 serge 2313
{
2997 Serge 2314
	u32 crtc_enabled, tmp, frame_count, blackout;
2315
	int i, j;
2316
 
3764 Serge 2317
	if (!ASIC_IS_NODCE(rdev)) {
1430 serge 2318
	save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2319
	save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2320
 
2997 Serge 2321
	/* disable VGA render */
1430 serge 2322
	WREG32(VGA_RENDER_CONTROL, 0);
3764 Serge 2323
	}
2997 Serge 2324
	/* blank the display controllers */
2325
	for (i = 0; i < rdev->num_crtc; i++) {
2326
		crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2327
		if (crtc_enabled) {
2328
			save->crtc_enabled[i] = true;
2329
			if (ASIC_IS_DCE6(rdev)) {
2330
				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2331
				if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2332
					radeon_wait_for_vblank(rdev, i);
3764 Serge 2333
					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2997 Serge 2334
					tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2335
					WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
3031 serge 2336
				}
2997 Serge 2337
			} else {
2338
				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2339
				if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2340
					radeon_wait_for_vblank(rdev, i);
3764 Serge 2341
					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2997 Serge 2342
					tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2343
					WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
3764 Serge 2344
					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
3031 serge 2345
				}
2346
			}
2997 Serge 2347
			/* wait for the next frame */
2348
			frame_count = radeon_get_vblank_counter(rdev, i);
2349
			for (j = 0; j < rdev->usec_timeout; j++) {
2350
				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2351
					break;
2352
				udelay(1);
3031 serge 2353
			}
3764 Serge 2354
 
2355
			/* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2356
			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2357
			tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2358
			tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2359
			WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2360
			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2361
			save->crtc_enabled[i] = false;
2362
			/* ***** */
3120 serge 2363
		} else {
2364
			save->crtc_enabled[i] = false;
3031 serge 2365
		}
1963 serge 2366
	}
1430 serge 2367
 
2997 Serge 2368
	radeon_mc_wait_for_idle(rdev);
2369
 
2370
	blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2371
	if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2372
		/* Block CPU access */
2373
		WREG32(BIF_FB_EN, 0);
2374
		/* blackout the MC */
2375
		blackout &= ~BLACKOUT_MODE_MASK;
2376
		WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2005 serge 2377
	}
3764 Serge 2378
	/* wait for the MC to settle */
2379
	udelay(100);
2380
 
2381
	/* lock double buffered regs */
2382
	for (i = 0; i < rdev->num_crtc; i++) {
2383
		if (save->crtc_enabled[i]) {
2384
			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2385
			if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2386
				tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2387
				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2388
			}
2389
			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2390
			if (!(tmp & 1)) {
2391
				tmp |= 1;
2392
				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2393
			}
2394
		}
2395
	}
1430 serge 2396
}
2397
 
1963 serge 2398
void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
1430 serge 2399
{
2997 Serge 2400
	u32 tmp, frame_count;
2401
	int i, j;
1430 serge 2402
 
2997 Serge 2403
	/* update crtc base addresses */
2404
	for (i = 0; i < rdev->num_crtc; i++) {
2405
		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
3031 serge 2406
		       upper_32_bits(rdev->mc.vram_start));
2997 Serge 2407
		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
3031 serge 2408
		       upper_32_bits(rdev->mc.vram_start));
2997 Serge 2409
		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
3031 serge 2410
		       (u32)rdev->mc.vram_start);
2997 Serge 2411
		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
3031 serge 2412
		       (u32)rdev->mc.vram_start);
2997 Serge 2413
	}
3764 Serge 2414
 
2415
	if (!ASIC_IS_NODCE(rdev)) {
2997 Serge 2416
	WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2417
	WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
3764 Serge 2418
	}
1430 serge 2419
 
3764 Serge 2420
	/* unlock regs and wait for update */
2421
	for (i = 0; i < rdev->num_crtc; i++) {
2422
		if (save->crtc_enabled[i]) {
2423
			tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2424
			if ((tmp & 0x3) != 0) {
2425
				tmp &= ~0x3;
2426
				WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2427
			}
2428
			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2429
			if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2430
				tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2431
				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2432
			}
2433
			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2434
			if (tmp & 1) {
2435
				tmp &= ~1;
2436
				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2437
			}
2438
			for (j = 0; j < rdev->usec_timeout; j++) {
2439
				tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2440
				if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2441
					break;
2442
				udelay(1);
2443
			}
2444
		}
2445
	}
2446
 
2997 Serge 2447
	/* unblackout the MC */
2448
	tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2449
	tmp &= ~BLACKOUT_MODE_MASK;
2450
	WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2451
	/* allow CPU access */
2452
	WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
1430 serge 2453
 
2997 Serge 2454
	for (i = 0; i < rdev->num_crtc; i++) {
3031 serge 2455
		if (save->crtc_enabled[i]) {
2997 Serge 2456
			if (ASIC_IS_DCE6(rdev)) {
2457
				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2458
				tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
3764 Serge 2459
				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2997 Serge 2460
				WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
3764 Serge 2461
				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2997 Serge 2462
			} else {
2463
				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2464
				tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
3764 Serge 2465
				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2997 Serge 2466
				WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
3764 Serge 2467
				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2997 Serge 2468
			}
2469
			/* wait for the next frame */
2470
			frame_count = radeon_get_vblank_counter(rdev, i);
2471
			for (j = 0; j < rdev->usec_timeout; j++) {
2472
				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2473
					break;
2474
				udelay(1);
2475
			}
3031 serge 2476
		}
2005 serge 2477
	}
3764 Serge 2478
	if (!ASIC_IS_NODCE(rdev)) {
2997 Serge 2479
	/* Unlock vga access */
1430 serge 2480
	WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2481
	mdelay(1);
2482
	WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
3764 Serge 2483
	}
1430 serge 2484
}
2485
 
1963 serge 2486
void evergreen_mc_program(struct radeon_device *rdev)
1430 serge 2487
{
2488
	struct evergreen_mc_save save;
2489
	u32 tmp;
2490
	int i, j;
2491
 
2492
	/* Initialize HDP */
2493
	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2494
		WREG32((0x2c14 + j), 0x00000000);
2495
		WREG32((0x2c18 + j), 0x00000000);
2496
		WREG32((0x2c1c + j), 0x00000000);
2497
		WREG32((0x2c20 + j), 0x00000000);
2498
		WREG32((0x2c24 + j), 0x00000000);
2499
	}
2500
	WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2501
 
2502
	evergreen_mc_stop(rdev, &save);
2503
	if (evergreen_mc_wait_for_idle(rdev)) {
2504
		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2505
	}
2506
	/* Lockout access through VGA aperture*/
2507
	WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2508
	/* Update configuration */
2509
	if (rdev->flags & RADEON_IS_AGP) {
2510
		if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2511
			/* VRAM before AGP */
2512
			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2513
				rdev->mc.vram_start >> 12);
2514
			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2515
				rdev->mc.gtt_end >> 12);
2516
		} else {
2517
			/* VRAM after AGP */
2518
			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2519
				rdev->mc.gtt_start >> 12);
2520
			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2521
				rdev->mc.vram_end >> 12);
2522
		}
2523
	} else {
2524
		WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2525
			rdev->mc.vram_start >> 12);
2526
		WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2527
			rdev->mc.vram_end >> 12);
2528
	}
2997 Serge 2529
	WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2530
	/* llano/ontario only */
2531
	if ((rdev->family == CHIP_PALM) ||
2532
	    (rdev->family == CHIP_SUMO) ||
2533
	    (rdev->family == CHIP_SUMO2)) {
1963 serge 2534
		tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2535
		tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2536
		tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2537
		WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2538
	}
1430 serge 2539
	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2540
	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2541
	WREG32(MC_VM_FB_LOCATION, tmp);
2542
	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
1963 serge 2543
	WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2544
	WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
1430 serge 2545
	if (rdev->flags & RADEON_IS_AGP) {
2546
		WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2547
		WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2548
		WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2549
	} else {
2550
		WREG32(MC_VM_AGP_BASE, 0);
2551
		WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2552
		WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2553
	}
2554
	if (evergreen_mc_wait_for_idle(rdev)) {
2555
		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2556
	}
2557
	evergreen_mc_resume(rdev, &save);
2558
	/* we need to own VRAM, so turn off the VGA renderer here
2559
	 * to stop it overwriting our objects */
2560
	rv515_vga_render_disable(rdev);
2561
}
2562
 
2563
/*
2564
 * CP.
2565
 */
1986 serge 2566
void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2567
{
2997 Serge 2568
	struct radeon_ring *ring = &rdev->ring[ib->ring];
2569
	u32 next_rptr;
2570
 
1986 serge 2571
	/* set to DX10/11 mode */
2997 Serge 2572
	radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2573
	radeon_ring_write(ring, 1);
2574
 
2575
	if (ring->rptr_save_reg) {
2576
		next_rptr = ring->wptr + 3 + 4;
2577
		radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3764 Serge 2578
		radeon_ring_write(ring, ((ring->rptr_save_reg -
2997 Serge 2579
					  PACKET3_SET_CONFIG_REG_START) >> 2));
2580
		radeon_ring_write(ring, next_rptr);
2581
	} else if (rdev->wb.enabled) {
2582
		next_rptr = ring->wptr + 5 + 4;
2583
		radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2584
		radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2585
		radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2586
		radeon_ring_write(ring, next_rptr);
2587
		radeon_ring_write(ring, 0);
2588
	}
2589
 
2590
	radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2591
	radeon_ring_write(ring,
1986 serge 2592
#ifdef __BIG_ENDIAN
2593
			  (2 << 0) |
2594
#endif
2595
			  (ib->gpu_addr & 0xFFFFFFFC));
2997 Serge 2596
	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2597
	radeon_ring_write(ring, ib->length_dw);
1986 serge 2598
}
1963 serge 2599
 
1986 serge 2600
 
1963 serge 2601
static int evergreen_cp_load_microcode(struct radeon_device *rdev)
1430 serge 2602
{
1963 serge 2603
	const __be32 *fw_data;
2604
	int i;
2605
 
2606
	if (!rdev->me_fw || !rdev->pfp_fw)
2607
		return -EINVAL;
2608
 
2609
	r700_cp_stop(rdev);
2610
	WREG32(CP_RB_CNTL,
2611
#ifdef __BIG_ENDIAN
2612
	       BUF_SWAP_32BIT |
2613
#endif
2614
	       RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2615
 
2616
	fw_data = (const __be32 *)rdev->pfp_fw->data;
2617
	WREG32(CP_PFP_UCODE_ADDR, 0);
2618
	for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2619
		WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2620
	WREG32(CP_PFP_UCODE_ADDR, 0);
2621
 
2622
	fw_data = (const __be32 *)rdev->me_fw->data;
2623
	WREG32(CP_ME_RAM_WADDR, 0);
2624
	for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2625
		WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2626
 
2627
	WREG32(CP_PFP_UCODE_ADDR, 0);
2628
	WREG32(CP_ME_RAM_WADDR, 0);
2629
	WREG32(CP_ME_RAM_RADDR, 0);
2630
	return 0;
1430 serge 2631
}
2632
 
1963 serge 2633
static int evergreen_cp_start(struct radeon_device *rdev)
2634
{
2997 Serge 2635
	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
1963 serge 2636
	int r, i;
2637
	uint32_t cp_me;
1430 serge 2638
 
2997 Serge 2639
	r = radeon_ring_lock(rdev, ring, 7);
1963 serge 2640
	if (r) {
2641
		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2642
		return r;
2643
	}
2997 Serge 2644
	radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2645
	radeon_ring_write(ring, 0x1);
2646
	radeon_ring_write(ring, 0x0);
2647
	radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2648
	radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2649
	radeon_ring_write(ring, 0);
2650
	radeon_ring_write(ring, 0);
2651
	radeon_ring_unlock_commit(rdev, ring);
1963 serge 2652
 
2653
	cp_me = 0xff;
2654
	WREG32(CP_ME_CNTL, cp_me);
2655
 
2997 Serge 2656
	r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
1963 serge 2657
	if (r) {
2658
		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2659
		return r;
2660
	}
2661
 
2662
	/* setup clear context state */
2997 Serge 2663
	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2664
	radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
1963 serge 2665
 
2666
	for (i = 0; i < evergreen_default_size; i++)
2997 Serge 2667
		radeon_ring_write(ring, evergreen_default_state[i]);
1963 serge 2668
 
2997 Serge 2669
	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2670
	radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
1963 serge 2671
 
2672
	/* set clear context state */
2997 Serge 2673
	radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2674
	radeon_ring_write(ring, 0);
1963 serge 2675
 
2676
	/* SQ_VTX_BASE_VTX_LOC */
2997 Serge 2677
	radeon_ring_write(ring, 0xc0026f00);
2678
	radeon_ring_write(ring, 0x00000000);
2679
	radeon_ring_write(ring, 0x00000000);
2680
	radeon_ring_write(ring, 0x00000000);
1963 serge 2681
 
2682
	/* Clear consts */
2997 Serge 2683
	radeon_ring_write(ring, 0xc0036f00);
2684
	radeon_ring_write(ring, 0x00000bc4);
2685
	radeon_ring_write(ring, 0xffffffff);
2686
	radeon_ring_write(ring, 0xffffffff);
2687
	radeon_ring_write(ring, 0xffffffff);
1963 serge 2688
 
2997 Serge 2689
	radeon_ring_write(ring, 0xc0026900);
2690
	radeon_ring_write(ring, 0x00000316);
2691
	radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2692
	radeon_ring_write(ring, 0x00000010); /*  */
1963 serge 2693
 
2997 Serge 2694
	radeon_ring_unlock_commit(rdev, ring);
1963 serge 2695
 
2696
	return 0;
2697
}
2698
 
2997 Serge 2699
static int evergreen_cp_resume(struct radeon_device *rdev)
1430 serge 2700
{
2997 Serge 2701
	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
1963 serge 2702
	u32 tmp;
2703
	u32 rb_bufsz;
2704
	int r;
1430 serge 2705
 
1963 serge 2706
	/* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
2707
	WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
2708
				 SOFT_RESET_PA |
2709
				 SOFT_RESET_SH |
2710
				 SOFT_RESET_VGT |
2160 serge 2711
				 SOFT_RESET_SPI |
1963 serge 2712
				 SOFT_RESET_SX));
2713
	RREG32(GRBM_SOFT_RESET);
2714
	mdelay(15);
2715
	WREG32(GRBM_SOFT_RESET, 0);
2716
	RREG32(GRBM_SOFT_RESET);
2717
 
2718
	/* Set ring buffer size */
2997 Serge 2719
	rb_bufsz = drm_order(ring->ring_size / 8);
1963 serge 2720
	tmp = (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
2721
#ifdef __BIG_ENDIAN
2722
	tmp |= BUF_SWAP_32BIT;
2723
#endif
2724
	WREG32(CP_RB_CNTL, tmp);
2997 Serge 2725
	WREG32(CP_SEM_WAIT_TIMER, 0x0);
2726
	WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
1963 serge 2727
 
2728
	/* Set the write pointer delay */
2729
	WREG32(CP_RB_WPTR_DELAY, 0);
2730
 
2731
	/* Initialize the ring buffer's read and write pointers */
2732
	WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2733
	WREG32(CP_RB_RPTR_WR, 0);
2997 Serge 2734
	ring->wptr = 0;
2735
	WREG32(CP_RB_WPTR, ring->wptr);
1963 serge 2736
 
3120 serge 2737
	/* set the wb address whether it's enabled or not */
1963 serge 2738
	WREG32(CP_RB_RPTR_ADDR,
2739
	       ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
2740
	WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2741
	WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2742
 
2743
	if (rdev->wb.enabled)
2744
		WREG32(SCRATCH_UMSK, 0xff);
2745
	else {
2746
		tmp |= RB_NO_UPDATE;
2747
		WREG32(SCRATCH_UMSK, 0);
2748
	}
2749
 
2750
	mdelay(1);
2751
	WREG32(CP_RB_CNTL, tmp);
2752
 
2997 Serge 2753
	WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
1963 serge 2754
	WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2755
 
2997 Serge 2756
	ring->rptr = RREG32(CP_RB_RPTR);
1963 serge 2757
 
2758
	evergreen_cp_start(rdev);
2997 Serge 2759
	ring->ready = true;
2760
	r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
1963 serge 2761
	if (r) {
2997 Serge 2762
		ring->ready = false;
1963 serge 2763
		return r;
2764
	}
1430 serge 2765
	return 0;
2766
}
2767
 
2768
/*
2769
 * Core functions
2770
 */
2771
static void evergreen_gpu_init(struct radeon_device *rdev)
2772
{
2997 Serge 2773
	u32 gb_addr_config;
1963 serge 2774
	u32 mc_shared_chmap, mc_arb_ramcfg;
2775
	u32 sx_debug_1;
2776
	u32 smx_dc_ctl0;
2777
	u32 sq_config;
2778
	u32 sq_lds_resource_mgmt;
2779
	u32 sq_gpr_resource_mgmt_1;
2780
	u32 sq_gpr_resource_mgmt_2;
2781
	u32 sq_gpr_resource_mgmt_3;
2782
	u32 sq_thread_resource_mgmt;
2783
	u32 sq_thread_resource_mgmt_2;
2784
	u32 sq_stack_resource_mgmt_1;
2785
	u32 sq_stack_resource_mgmt_2;
2786
	u32 sq_stack_resource_mgmt_3;
2787
	u32 vgt_cache_invalidation;
2788
	u32 hdp_host_path_cntl, tmp;
2997 Serge 2789
	u32 disabled_rb_mask;
1963 serge 2790
	int i, j, num_shader_engines, ps_thread_count;
2791
 
2792
	switch (rdev->family) {
2793
	case CHIP_CYPRESS:
2794
	case CHIP_HEMLOCK:
2795
		rdev->config.evergreen.num_ses = 2;
2796
		rdev->config.evergreen.max_pipes = 4;
2797
		rdev->config.evergreen.max_tile_pipes = 8;
2798
		rdev->config.evergreen.max_simds = 10;
2799
		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2800
		rdev->config.evergreen.max_gprs = 256;
2801
		rdev->config.evergreen.max_threads = 248;
2802
		rdev->config.evergreen.max_gs_threads = 32;
2803
		rdev->config.evergreen.max_stack_entries = 512;
2804
		rdev->config.evergreen.sx_num_of_sets = 4;
2805
		rdev->config.evergreen.sx_max_export_size = 256;
2806
		rdev->config.evergreen.sx_max_export_pos_size = 64;
2807
		rdev->config.evergreen.sx_max_export_smx_size = 192;
2808
		rdev->config.evergreen.max_hw_contexts = 8;
2809
		rdev->config.evergreen.sq_num_cf_insts = 2;
2810
 
2811
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2812
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2813
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2997 Serge 2814
		gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
1963 serge 2815
		break;
2816
	case CHIP_JUNIPER:
2817
		rdev->config.evergreen.num_ses = 1;
2818
		rdev->config.evergreen.max_pipes = 4;
2819
		rdev->config.evergreen.max_tile_pipes = 4;
2820
		rdev->config.evergreen.max_simds = 10;
2821
		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2822
		rdev->config.evergreen.max_gprs = 256;
2823
		rdev->config.evergreen.max_threads = 248;
2824
		rdev->config.evergreen.max_gs_threads = 32;
2825
		rdev->config.evergreen.max_stack_entries = 512;
2826
		rdev->config.evergreen.sx_num_of_sets = 4;
2827
		rdev->config.evergreen.sx_max_export_size = 256;
2828
		rdev->config.evergreen.sx_max_export_pos_size = 64;
2829
		rdev->config.evergreen.sx_max_export_smx_size = 192;
2830
		rdev->config.evergreen.max_hw_contexts = 8;
2831
		rdev->config.evergreen.sq_num_cf_insts = 2;
2832
 
2833
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2834
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2835
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2997 Serge 2836
		gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
1963 serge 2837
		break;
2838
	case CHIP_REDWOOD:
2839
		rdev->config.evergreen.num_ses = 1;
2840
		rdev->config.evergreen.max_pipes = 4;
2841
		rdev->config.evergreen.max_tile_pipes = 4;
2842
		rdev->config.evergreen.max_simds = 5;
2843
		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
2844
		rdev->config.evergreen.max_gprs = 256;
2845
		rdev->config.evergreen.max_threads = 248;
2846
		rdev->config.evergreen.max_gs_threads = 32;
2847
		rdev->config.evergreen.max_stack_entries = 256;
2848
		rdev->config.evergreen.sx_num_of_sets = 4;
2849
		rdev->config.evergreen.sx_max_export_size = 256;
2850
		rdev->config.evergreen.sx_max_export_pos_size = 64;
2851
		rdev->config.evergreen.sx_max_export_smx_size = 192;
2852
		rdev->config.evergreen.max_hw_contexts = 8;
2853
		rdev->config.evergreen.sq_num_cf_insts = 2;
2854
 
2855
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2856
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2857
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2997 Serge 2858
		gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
1963 serge 2859
		break;
2860
	case CHIP_CEDAR:
2861
	default:
2862
		rdev->config.evergreen.num_ses = 1;
2863
		rdev->config.evergreen.max_pipes = 2;
2864
		rdev->config.evergreen.max_tile_pipes = 2;
2865
		rdev->config.evergreen.max_simds = 2;
2866
		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
2867
		rdev->config.evergreen.max_gprs = 256;
2868
		rdev->config.evergreen.max_threads = 192;
2869
		rdev->config.evergreen.max_gs_threads = 16;
2870
		rdev->config.evergreen.max_stack_entries = 256;
2871
		rdev->config.evergreen.sx_num_of_sets = 4;
2872
		rdev->config.evergreen.sx_max_export_size = 128;
2873
		rdev->config.evergreen.sx_max_export_pos_size = 32;
2874
		rdev->config.evergreen.sx_max_export_smx_size = 96;
2875
		rdev->config.evergreen.max_hw_contexts = 4;
2876
		rdev->config.evergreen.sq_num_cf_insts = 1;
2877
 
2878
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2879
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2880
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2997 Serge 2881
		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
1963 serge 2882
		break;
2883
	case CHIP_PALM:
2884
		rdev->config.evergreen.num_ses = 1;
2885
		rdev->config.evergreen.max_pipes = 2;
2886
		rdev->config.evergreen.max_tile_pipes = 2;
2887
		rdev->config.evergreen.max_simds = 2;
2888
		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
2889
		rdev->config.evergreen.max_gprs = 256;
2890
		rdev->config.evergreen.max_threads = 192;
2891
		rdev->config.evergreen.max_gs_threads = 16;
2892
		rdev->config.evergreen.max_stack_entries = 256;
2893
		rdev->config.evergreen.sx_num_of_sets = 4;
2894
		rdev->config.evergreen.sx_max_export_size = 128;
2895
		rdev->config.evergreen.sx_max_export_pos_size = 32;
2896
		rdev->config.evergreen.sx_max_export_smx_size = 96;
2897
		rdev->config.evergreen.max_hw_contexts = 4;
2898
		rdev->config.evergreen.sq_num_cf_insts = 1;
2899
 
2900
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2901
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2902
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2997 Serge 2903
		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
1963 serge 2904
		break;
2905
	case CHIP_SUMO:
2906
		rdev->config.evergreen.num_ses = 1;
2907
		rdev->config.evergreen.max_pipes = 4;
3192 Serge 2908
		rdev->config.evergreen.max_tile_pipes = 4;
1963 serge 2909
		if (rdev->pdev->device == 0x9648)
2910
			rdev->config.evergreen.max_simds = 3;
2911
		else if ((rdev->pdev->device == 0x9647) ||
2912
			 (rdev->pdev->device == 0x964a))
2913
			rdev->config.evergreen.max_simds = 4;
2914
		else
2915
			rdev->config.evergreen.max_simds = 5;
2916
		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
2917
		rdev->config.evergreen.max_gprs = 256;
2918
		rdev->config.evergreen.max_threads = 248;
2919
		rdev->config.evergreen.max_gs_threads = 32;
2920
		rdev->config.evergreen.max_stack_entries = 256;
2921
		rdev->config.evergreen.sx_num_of_sets = 4;
2922
		rdev->config.evergreen.sx_max_export_size = 256;
2923
		rdev->config.evergreen.sx_max_export_pos_size = 64;
2924
		rdev->config.evergreen.sx_max_export_smx_size = 192;
2925
		rdev->config.evergreen.max_hw_contexts = 8;
2926
		rdev->config.evergreen.sq_num_cf_insts = 2;
2927
 
2928
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2929
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2930
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3192 Serge 2931
		gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
1963 serge 2932
		break;
2933
	case CHIP_SUMO2:
2934
		rdev->config.evergreen.num_ses = 1;
2935
		rdev->config.evergreen.max_pipes = 4;
2936
		rdev->config.evergreen.max_tile_pipes = 4;
2937
		rdev->config.evergreen.max_simds = 2;
2938
		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
2939
		rdev->config.evergreen.max_gprs = 256;
2940
		rdev->config.evergreen.max_threads = 248;
2941
		rdev->config.evergreen.max_gs_threads = 32;
2942
		rdev->config.evergreen.max_stack_entries = 512;
2943
		rdev->config.evergreen.sx_num_of_sets = 4;
2944
		rdev->config.evergreen.sx_max_export_size = 256;
2945
		rdev->config.evergreen.sx_max_export_pos_size = 64;
2946
		rdev->config.evergreen.sx_max_export_smx_size = 192;
2947
		rdev->config.evergreen.max_hw_contexts = 8;
2948
		rdev->config.evergreen.sq_num_cf_insts = 2;
2949
 
2950
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2951
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2952
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3192 Serge 2953
		gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
1963 serge 2954
		break;
2955
	case CHIP_BARTS:
2956
		rdev->config.evergreen.num_ses = 2;
2957
		rdev->config.evergreen.max_pipes = 4;
2958
		rdev->config.evergreen.max_tile_pipes = 8;
2959
		rdev->config.evergreen.max_simds = 7;
2960
		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2961
		rdev->config.evergreen.max_gprs = 256;
2962
		rdev->config.evergreen.max_threads = 248;
2963
		rdev->config.evergreen.max_gs_threads = 32;
2964
		rdev->config.evergreen.max_stack_entries = 512;
2965
		rdev->config.evergreen.sx_num_of_sets = 4;
2966
		rdev->config.evergreen.sx_max_export_size = 256;
2967
		rdev->config.evergreen.sx_max_export_pos_size = 64;
2968
		rdev->config.evergreen.sx_max_export_smx_size = 192;
2969
		rdev->config.evergreen.max_hw_contexts = 8;
2970
		rdev->config.evergreen.sq_num_cf_insts = 2;
2971
 
2972
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2973
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2974
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2997 Serge 2975
		gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
1963 serge 2976
		break;
2977
	case CHIP_TURKS:
2978
		rdev->config.evergreen.num_ses = 1;
2979
		rdev->config.evergreen.max_pipes = 4;
2980
		rdev->config.evergreen.max_tile_pipes = 4;
2981
		rdev->config.evergreen.max_simds = 6;
2982
		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
2983
		rdev->config.evergreen.max_gprs = 256;
2984
		rdev->config.evergreen.max_threads = 248;
2985
		rdev->config.evergreen.max_gs_threads = 32;
2986
		rdev->config.evergreen.max_stack_entries = 256;
2987
		rdev->config.evergreen.sx_num_of_sets = 4;
2988
		rdev->config.evergreen.sx_max_export_size = 256;
2989
		rdev->config.evergreen.sx_max_export_pos_size = 64;
2990
		rdev->config.evergreen.sx_max_export_smx_size = 192;
2991
		rdev->config.evergreen.max_hw_contexts = 8;
2992
		rdev->config.evergreen.sq_num_cf_insts = 2;
2993
 
2994
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2995
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2996
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2997 Serge 2997
		gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
1963 serge 2998
		break;
2999
	case CHIP_CAICOS:
3000
		rdev->config.evergreen.num_ses = 1;
3192 Serge 3001
		rdev->config.evergreen.max_pipes = 2;
1963 serge 3002
		rdev->config.evergreen.max_tile_pipes = 2;
3003
		rdev->config.evergreen.max_simds = 2;
3004
		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3005
		rdev->config.evergreen.max_gprs = 256;
3006
		rdev->config.evergreen.max_threads = 192;
3007
		rdev->config.evergreen.max_gs_threads = 16;
3008
		rdev->config.evergreen.max_stack_entries = 256;
3009
		rdev->config.evergreen.sx_num_of_sets = 4;
3010
		rdev->config.evergreen.sx_max_export_size = 128;
3011
		rdev->config.evergreen.sx_max_export_pos_size = 32;
3012
		rdev->config.evergreen.sx_max_export_smx_size = 96;
3013
		rdev->config.evergreen.max_hw_contexts = 4;
3014
		rdev->config.evergreen.sq_num_cf_insts = 1;
3015
 
3016
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3017
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3018
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2997 Serge 3019
		gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
1963 serge 3020
		break;
3021
	}
3022
 
3023
	/* Initialize HDP */
3024
	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3025
		WREG32((0x2c14 + j), 0x00000000);
3026
		WREG32((0x2c18 + j), 0x00000000);
3027
		WREG32((0x2c1c + j), 0x00000000);
3028
		WREG32((0x2c20 + j), 0x00000000);
3029
		WREG32((0x2c24 + j), 0x00000000);
3030
	}
3031
 
3032
	WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3033
 
2997 Serge 3034
	evergreen_fix_pci_max_read_req_size(rdev);
1963 serge 3035
 
3036
	mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
2997 Serge 3037
	if ((rdev->family == CHIP_PALM) ||
3038
	    (rdev->family == CHIP_SUMO) ||
3039
	    (rdev->family == CHIP_SUMO2))
1963 serge 3040
		mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3041
	else
3031 serge 3042
		mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
1963 serge 3043
 
3044
	/* setup tiling info dword.  gb_addr_config is not adequate since it does
3045
	 * not have bank info, so create a custom tiling dword.
3046
	 * bits 3:0   num_pipes
3047
	 * bits 7:4   num_banks
3048
	 * bits 11:8  group_size
3049
	 * bits 15:12 row_size
3050
	 */
3051
	rdev->config.evergreen.tile_config = 0;
3052
	switch (rdev->config.evergreen.max_tile_pipes) {
3053
	case 1:
3054
	default:
3055
		rdev->config.evergreen.tile_config |= (0 << 0);
3056
		break;
3057
	case 2:
3058
		rdev->config.evergreen.tile_config |= (1 << 0);
3059
		break;
3060
	case 4:
3061
		rdev->config.evergreen.tile_config |= (2 << 0);
3062
		break;
3063
	case 8:
3064
		rdev->config.evergreen.tile_config |= (3 << 0);
3065
		break;
3066
	}
1986 serge 3067
	/* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
1963 serge 3068
	if (rdev->flags & RADEON_IS_IGP)
1986 serge 3069
		rdev->config.evergreen.tile_config |= 1 << 4;
2997 Serge 3070
	else {
3071
		switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3072
		case 0: /* four banks */
3073
			rdev->config.evergreen.tile_config |= 0 << 4;
3074
			break;
3075
		case 1: /* eight banks */
3076
			rdev->config.evergreen.tile_config |= 1 << 4;
3077
			break;
3078
		case 2: /* sixteen banks */
3079
		default:
3080
			rdev->config.evergreen.tile_config |= 2 << 4;
3081
			break;
3082
		}
3083
	}
3084
	rdev->config.evergreen.tile_config |= 0 << 8;
1963 serge 3085
	rdev->config.evergreen.tile_config |=
3086
		((gb_addr_config & 0x30000000) >> 28) << 12;
3087
 
2997 Serge 3088
	num_shader_engines = (gb_addr_config & NUM_SHADER_ENGINES(3) >> 12) + 1;
1963 serge 3089
 
2997 Serge 3090
	if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3091
		u32 efuse_straps_4;
3092
		u32 efuse_straps_3;
1963 serge 3093
 
2997 Serge 3094
		WREG32(RCU_IND_INDEX, 0x204);
3095
		efuse_straps_4 = RREG32(RCU_IND_DATA);
3096
		WREG32(RCU_IND_INDEX, 0x203);
3097
		efuse_straps_3 = RREG32(RCU_IND_DATA);
3098
		tmp = (((efuse_straps_4 & 0xf) << 4) |
3099
		      ((efuse_straps_3 & 0xf0000000) >> 28));
3100
	} else {
3101
		tmp = 0;
3102
		for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3103
			u32 rb_disable_bitmap;
1963 serge 3104
 
2997 Serge 3105
			WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3106
			WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3107
			rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3108
			tmp <<= 4;
3109
			tmp |= rb_disable_bitmap;
1963 serge 3110
		}
2997 Serge 3111
	}
3112
	/* enabled rb are just the one not disabled :) */
3113
	disabled_rb_mask = tmp;
3764 Serge 3114
	tmp = 0;
3115
	for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3116
		tmp |= (1 << i);
3117
	/* if all the backends are disabled, fix it up here */
3118
	if ((disabled_rb_mask & tmp) == tmp) {
3119
		for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3120
			disabled_rb_mask &= ~(1 << i);
3121
	}
1963 serge 3122
 
2997 Serge 3123
	WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3124
	WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
1963 serge 3125
 
2997 Serge 3126
	WREG32(GB_ADDR_CONFIG, gb_addr_config);
3127
	WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3128
	WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3192 Serge 3129
	WREG32(DMA_TILING_CONFIG, gb_addr_config);
3764 Serge 3130
	WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3131
	WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3132
	WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
1963 serge 3133
 
3764 Serge 3134
	if ((rdev->config.evergreen.max_backends == 1) &&
3135
	    (rdev->flags & RADEON_IS_IGP)) {
3136
		if ((disabled_rb_mask & 3) == 1) {
3137
			/* RB0 disabled, RB1 enabled */
3138
			tmp = 0x11111111;
3139
		} else {
3140
			/* RB1 disabled, RB0 enabled */
3141
			tmp = 0x00000000;
3142
		}
3143
	} else {
2997 Serge 3144
	tmp = gb_addr_config & NUM_PIPES_MASK;
3145
	tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3146
					EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3764 Serge 3147
	}
2997 Serge 3148
	WREG32(GB_BACKEND_MAP, tmp);
1963 serge 3149
 
3150
	WREG32(CGTS_SYS_TCC_DISABLE, 0);
3151
	WREG32(CGTS_TCC_DISABLE, 0);
3152
	WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3153
	WREG32(CGTS_USER_TCC_DISABLE, 0);
3154
 
3155
	/* set HW defaults for 3D engine */
3156
	WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3157
				     ROQ_IB2_START(0x2b)));
3158
 
3159
	WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3160
 
3161
	WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3162
			     SYNC_GRADIENT |
3163
			     SYNC_WALKER |
3164
			     SYNC_ALIGNER));
3165
 
3166
	sx_debug_1 = RREG32(SX_DEBUG_1);
3167
	sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3168
	WREG32(SX_DEBUG_1, sx_debug_1);
3169
 
3170
 
3171
	smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3172
	smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3173
	smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3174
	WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3175
 
2997 Serge 3176
	if (rdev->family <= CHIP_SUMO2)
3177
		WREG32(SMX_SAR_CTL0, 0x00010000);
3178
 
1963 serge 3179
	WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3180
					POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3181
					SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3182
 
3183
	WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3184
				 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3185
				 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3186
 
3187
	WREG32(VGT_NUM_INSTANCES, 1);
3188
	WREG32(SPI_CONFIG_CNTL, 0);
3189
	WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3190
	WREG32(CP_PERFMON_CNTL, 0);
3191
 
3192
	WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3193
				  FETCH_FIFO_HIWATER(0x4) |
3194
				  DONE_FIFO_HIWATER(0xe0) |
3195
				  ALU_UPDATE_FIFO_HIWATER(0x8)));
3196
 
3197
	sq_config = RREG32(SQ_CONFIG);
3198
	sq_config &= ~(PS_PRIO(3) |
3199
		       VS_PRIO(3) |
3200
		       GS_PRIO(3) |
3201
		       ES_PRIO(3));
3202
	sq_config |= (VC_ENABLE |
3203
		      EXPORT_SRC_C |
3204
		      PS_PRIO(0) |
3205
		      VS_PRIO(1) |
3206
		      GS_PRIO(2) |
3207
		      ES_PRIO(3));
3208
 
3209
	switch (rdev->family) {
3210
	case CHIP_CEDAR:
3211
	case CHIP_PALM:
3212
	case CHIP_SUMO:
3213
	case CHIP_SUMO2:
3214
	case CHIP_CAICOS:
3215
		/* no vertex cache */
3216
		sq_config &= ~VC_ENABLE;
3217
		break;
3218
	default:
3219
		break;
3220
	}
3221
 
3222
	sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3223
 
3224
	sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3225
	sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3226
	sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3227
	sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3228
	sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3229
	sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3230
	sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3231
 
3232
	switch (rdev->family) {
3233
	case CHIP_CEDAR:
3234
	case CHIP_PALM:
3235
	case CHIP_SUMO:
3236
	case CHIP_SUMO2:
3237
		ps_thread_count = 96;
3238
		break;
3239
	default:
3240
		ps_thread_count = 128;
3241
		break;
3242
	}
3243
 
3244
	sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3245
	sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3246
	sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3247
	sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3248
	sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3249
	sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3250
 
3251
	sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3252
	sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3253
	sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3254
	sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3255
	sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3256
	sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3257
 
3258
	WREG32(SQ_CONFIG, sq_config);
3259
	WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3260
	WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3261
	WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3262
	WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3263
	WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3264
	WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3265
	WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3266
	WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3267
	WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3268
	WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3269
 
3270
	WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3271
					  FORCE_EOV_MAX_REZ_CNT(255)));
3272
 
3273
	switch (rdev->family) {
3274
	case CHIP_CEDAR:
3275
	case CHIP_PALM:
3276
	case CHIP_SUMO:
3277
	case CHIP_SUMO2:
3278
	case CHIP_CAICOS:
3279
		vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3280
		break;
3281
	default:
3282
		vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3283
		break;
3284
	}
3285
	vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3286
	WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3287
 
3288
	WREG32(VGT_GS_VERTEX_REUSE, 16);
3289
	WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3290
	WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3291
 
3292
	WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3293
	WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3294
 
3295
	WREG32(CB_PERF_CTR0_SEL_0, 0);
3296
	WREG32(CB_PERF_CTR0_SEL_1, 0);
3297
	WREG32(CB_PERF_CTR1_SEL_0, 0);
3298
	WREG32(CB_PERF_CTR1_SEL_1, 0);
3299
	WREG32(CB_PERF_CTR2_SEL_0, 0);
3300
	WREG32(CB_PERF_CTR2_SEL_1, 0);
3301
	WREG32(CB_PERF_CTR3_SEL_0, 0);
3302
	WREG32(CB_PERF_CTR3_SEL_1, 0);
3303
 
3304
	/* clear render buffer base addresses */
3305
	WREG32(CB_COLOR0_BASE, 0);
3306
	WREG32(CB_COLOR1_BASE, 0);
3307
	WREG32(CB_COLOR2_BASE, 0);
3308
	WREG32(CB_COLOR3_BASE, 0);
3309
	WREG32(CB_COLOR4_BASE, 0);
3310
	WREG32(CB_COLOR5_BASE, 0);
3311
	WREG32(CB_COLOR6_BASE, 0);
3312
	WREG32(CB_COLOR7_BASE, 0);
3313
	WREG32(CB_COLOR8_BASE, 0);
3314
	WREG32(CB_COLOR9_BASE, 0);
3315
	WREG32(CB_COLOR10_BASE, 0);
3316
	WREG32(CB_COLOR11_BASE, 0);
3317
 
3318
	/* set the shader const cache sizes to 0 */
3319
	for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3320
		WREG32(i, 0);
3321
	for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3322
		WREG32(i, 0);
3323
 
3324
	tmp = RREG32(HDP_MISC_CNTL);
3325
	tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3326
	WREG32(HDP_MISC_CNTL, tmp);
3327
 
3328
	hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3329
	WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3330
 
3331
	WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3332
 
3333
	udelay(50);
3334
 
1430 serge 3335
}
3336
 
3337
int evergreen_mc_init(struct radeon_device *rdev)
3338
{
3339
	u32 tmp;
3340
	int chansize, numchan;
3341
 
3342
	/* Get VRAM informations */
3343
	rdev->mc.vram_is_ddr = true;
2997 Serge 3344
	if ((rdev->family == CHIP_PALM) ||
3345
	    (rdev->family == CHIP_SUMO) ||
3346
	    (rdev->family == CHIP_SUMO2))
2004 serge 3347
		tmp = RREG32(FUS_MC_ARB_RAMCFG);
3348
	else
3031 serge 3349
		tmp = RREG32(MC_ARB_RAMCFG);
1430 serge 3350
	if (tmp & CHANSIZE_OVERRIDE) {
3351
		chansize = 16;
3352
	} else if (tmp & CHANSIZE_MASK) {
3353
		chansize = 64;
3354
	} else {
3355
		chansize = 32;
3356
	}
3357
	tmp = RREG32(MC_SHARED_CHMAP);
3358
	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3359
	case 0:
3360
	default:
3361
		numchan = 1;
3362
		break;
3363
	case 1:
3364
		numchan = 2;
3365
		break;
3366
	case 2:
3367
		numchan = 4;
3368
		break;
3369
	case 3:
3370
		numchan = 8;
3371
		break;
3372
	}
3373
	rdev->mc.vram_width = numchan * chansize;
3374
	/* Could aper size report 0 ? */
1963 serge 3375
	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3376
	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
1430 serge 3377
	/* Setup GPU memory space */
2997 Serge 3378
	if ((rdev->family == CHIP_PALM) ||
3379
	    (rdev->family == CHIP_SUMO) ||
3380
	    (rdev->family == CHIP_SUMO2)) {
1963 serge 3381
		/* size in bytes on fusion */
3382
		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3383
		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3384
	} else {
2997 Serge 3385
		/* size in MB on evergreen/cayman/tn */
3764 Serge 3386
		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3387
		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
1963 serge 3388
	}
1430 serge 3389
	rdev->mc.visible_vram_size = rdev->mc.aper_size;
1963 serge 3390
	r700_vram_gtt_location(rdev, &rdev->mc);
3391
	radeon_update_bandwidth_info(rdev);
3392
 
1430 serge 3393
	return 0;
3394
}
3395
 
3764 Serge 3396
void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
1430 serge 3397
{
3764 Serge 3398
	dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
1963 serge 3399
		RREG32(GRBM_STATUS));
3764 Serge 3400
	dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
1963 serge 3401
		RREG32(GRBM_STATUS_SE0));
3764 Serge 3402
	dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
1963 serge 3403
		RREG32(GRBM_STATUS_SE1));
3764 Serge 3404
	dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
1963 serge 3405
		RREG32(SRBM_STATUS));
3764 Serge 3406
	dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3407
		RREG32(SRBM_STATUS2));
2997 Serge 3408
	dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3409
		RREG32(CP_STALLED_STAT1));
3410
	dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3411
		RREG32(CP_STALLED_STAT2));
3412
	dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3413
		RREG32(CP_BUSY_STAT));
3414
	dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3415
		RREG32(CP_STAT));
3764 Serge 3416
	dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3417
		RREG32(DMA_STATUS_REG));
3418
	if (rdev->family >= CHIP_CAYMAN) {
3419
		dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3420
			 RREG32(DMA_STATUS_REG + 0x800));
3421
	}
3422
}
3423
 
3424
bool evergreen_is_display_hung(struct radeon_device *rdev)
3425
{
3426
	u32 crtc_hung = 0;
3427
	u32 crtc_status[6];
3428
	u32 i, j, tmp;
3429
 
3430
	for (i = 0; i < rdev->num_crtc; i++) {
3431
		if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3432
			crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3433
			crtc_hung |= (1 << i);
3434
		}
3435
	}
3436
 
3437
	for (j = 0; j < 10; j++) {
3438
		for (i = 0; i < rdev->num_crtc; i++) {
3439
			if (crtc_hung & (1 << i)) {
3440
				tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3441
				if (tmp != crtc_status[i])
3442
					crtc_hung &= ~(1 << i);
3443
			}
3444
		}
3445
		if (crtc_hung == 0)
3446
			return false;
3447
		udelay(100);
3448
	}
3449
 
3450
	return true;
3451
}
3452
 
3453
static u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3454
{
3455
	u32 reset_mask = 0;
3456
	u32 tmp;
3457
 
3458
	/* GRBM_STATUS */
3459
	tmp = RREG32(GRBM_STATUS);
3460
	if (tmp & (PA_BUSY | SC_BUSY |
3461
		   SH_BUSY | SX_BUSY |
3462
		   TA_BUSY | VGT_BUSY |
3463
		   DB_BUSY | CB_BUSY |
3464
		   SPI_BUSY | VGT_BUSY_NO_DMA))
3465
		reset_mask |= RADEON_RESET_GFX;
3466
 
3467
	if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3468
		   CP_BUSY | CP_COHERENCY_BUSY))
3469
		reset_mask |= RADEON_RESET_CP;
3470
 
3471
	if (tmp & GRBM_EE_BUSY)
3472
		reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3473
 
3474
	/* DMA_STATUS_REG */
3475
	tmp = RREG32(DMA_STATUS_REG);
3476
	if (!(tmp & DMA_IDLE))
3477
		reset_mask |= RADEON_RESET_DMA;
3478
 
3479
	/* SRBM_STATUS2 */
3480
	tmp = RREG32(SRBM_STATUS2);
3481
	if (tmp & DMA_BUSY)
3482
		reset_mask |= RADEON_RESET_DMA;
3483
 
3484
	/* SRBM_STATUS */
3485
	tmp = RREG32(SRBM_STATUS);
3486
	if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3487
		reset_mask |= RADEON_RESET_RLC;
3488
 
3489
	if (tmp & IH_BUSY)
3490
		reset_mask |= RADEON_RESET_IH;
3491
 
3492
	if (tmp & SEM_BUSY)
3493
		reset_mask |= RADEON_RESET_SEM;
3494
 
3495
	if (tmp & GRBM_RQ_PENDING)
3496
		reset_mask |= RADEON_RESET_GRBM;
3497
 
3498
	if (tmp & VMC_BUSY)
3499
		reset_mask |= RADEON_RESET_VMC;
3500
 
3501
	if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3502
		   MCC_BUSY | MCD_BUSY))
3503
		reset_mask |= RADEON_RESET_MC;
3504
 
3505
	if (evergreen_is_display_hung(rdev))
3506
		reset_mask |= RADEON_RESET_DISPLAY;
3507
 
3508
	/* VM_L2_STATUS */
3509
	tmp = RREG32(VM_L2_STATUS);
3510
	if (tmp & L2_BUSY)
3511
		reset_mask |= RADEON_RESET_VMC;
3512
 
3513
	/* Skip MC reset as it's mostly likely not hung, just busy */
3514
	if (reset_mask & RADEON_RESET_MC) {
3515
		DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3516
		reset_mask &= ~RADEON_RESET_MC;
3517
	}
3518
 
3519
	return reset_mask;
3520
}
3521
 
3522
static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3523
{
3524
	struct evergreen_mc_save save;
3525
	u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3526
	u32 tmp;
3527
 
3528
	if (reset_mask == 0)
3529
		return;
3530
 
3531
	dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3532
 
3533
	evergreen_print_gpu_status_regs(rdev);
3534
 
3535
	/* Disable CP parsing/prefetching */
3536
	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3537
 
3538
	if (reset_mask & RADEON_RESET_DMA) {
3539
	/* Disable DMA */
3540
	tmp = RREG32(DMA_RB_CNTL);
3541
	tmp &= ~DMA_RB_ENABLE;
3542
	WREG32(DMA_RB_CNTL, tmp);
3543
	}
3544
 
3545
	udelay(50);
3546
 
1963 serge 3547
	evergreen_mc_stop(rdev, &save);
3548
	if (evergreen_mc_wait_for_idle(rdev)) {
3549
		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3550
	}
3551
 
3764 Serge 3552
	if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3553
		grbm_soft_reset |= SOFT_RESET_DB |
3554
			SOFT_RESET_CB |
3555
			SOFT_RESET_PA |
3556
			SOFT_RESET_SC |
3557
			SOFT_RESET_SPI |
3558
			SOFT_RESET_SX |
3559
			SOFT_RESET_SH |
3560
			SOFT_RESET_TC |
3561
			SOFT_RESET_TA |
3562
			SOFT_RESET_VC |
3563
			SOFT_RESET_VGT;
3564
	}
1963 serge 3565
 
3764 Serge 3566
	if (reset_mask & RADEON_RESET_CP) {
3567
		grbm_soft_reset |= SOFT_RESET_CP |
3568
			SOFT_RESET_VGT;
3569
 
3570
		srbm_soft_reset |= SOFT_RESET_GRBM;
3571
	}
3572
 
3573
	if (reset_mask & RADEON_RESET_DMA)
3574
		srbm_soft_reset |= SOFT_RESET_DMA;
3575
 
3576
	if (reset_mask & RADEON_RESET_DISPLAY)
3577
		srbm_soft_reset |= SOFT_RESET_DC;
3578
 
3579
	if (reset_mask & RADEON_RESET_RLC)
3580
		srbm_soft_reset |= SOFT_RESET_RLC;
3581
 
3582
	if (reset_mask & RADEON_RESET_SEM)
3583
		srbm_soft_reset |= SOFT_RESET_SEM;
3584
 
3585
	if (reset_mask & RADEON_RESET_IH)
3586
		srbm_soft_reset |= SOFT_RESET_IH;
3587
 
3588
	if (reset_mask & RADEON_RESET_GRBM)
3589
		srbm_soft_reset |= SOFT_RESET_GRBM;
3590
 
3591
	if (reset_mask & RADEON_RESET_VMC)
3592
		srbm_soft_reset |= SOFT_RESET_VMC;
3593
 
3594
	if (!(rdev->flags & RADEON_IS_IGP)) {
3595
		if (reset_mask & RADEON_RESET_MC)
3596
			srbm_soft_reset |= SOFT_RESET_MC;
3597
	}
3598
 
3599
	if (grbm_soft_reset) {
3600
		tmp = RREG32(GRBM_SOFT_RESET);
3601
		tmp |= grbm_soft_reset;
3602
		dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3603
		WREG32(GRBM_SOFT_RESET, tmp);
3604
		tmp = RREG32(GRBM_SOFT_RESET);
3605
 
3606
		udelay(50);
3607
 
3608
		tmp &= ~grbm_soft_reset;
3609
		WREG32(GRBM_SOFT_RESET, tmp);
3610
		tmp = RREG32(GRBM_SOFT_RESET);
3611
	}
3612
 
3613
	if (srbm_soft_reset) {
3614
		tmp = RREG32(SRBM_SOFT_RESET);
3615
		tmp |= srbm_soft_reset;
3616
		dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3617
		WREG32(SRBM_SOFT_RESET, tmp);
3618
		tmp = RREG32(SRBM_SOFT_RESET);
3619
 
3620
		udelay(50);
3621
 
3622
		tmp &= ~srbm_soft_reset;
3623
		WREG32(SRBM_SOFT_RESET, tmp);
3624
		tmp = RREG32(SRBM_SOFT_RESET);
3625
	}
3626
 
1963 serge 3627
	/* Wait a little for things to settle down */
3628
	udelay(50);
3764 Serge 3629
 
1963 serge 3630
	evergreen_mc_resume(rdev, &save);
3764 Serge 3631
	udelay(50);
3632
 
3633
	evergreen_print_gpu_status_regs(rdev);
1430 serge 3634
}
3635
 
1963 serge 3636
int evergreen_asic_reset(struct radeon_device *rdev)
3637
{
3764 Serge 3638
	u32 reset_mask;
3639
 
3640
	reset_mask = evergreen_gpu_check_soft_reset(rdev);
3641
 
3642
	if (reset_mask)
3643
		r600_set_bios_scratch_engine_hung(rdev, true);
3644
 
3645
	evergreen_gpu_soft_reset(rdev, reset_mask);
3646
 
3647
	reset_mask = evergreen_gpu_check_soft_reset(rdev);
3648
 
3649
	if (!reset_mask)
3650
		r600_set_bios_scratch_engine_hung(rdev, false);
3651
 
3652
	return 0;
1963 serge 3653
}
3654
 
3764 Serge 3655
/**
3656
 * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
3657
 *
3658
 * @rdev: radeon_device pointer
3659
 * @ring: radeon_ring structure holding ring information
3660
 *
3661
 * Check if the GFX engine is locked up.
3662
 * Returns true if the engine appears to be locked up, false if not.
3663
 */
3664
bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3665
{
3666
	u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3667
 
3668
	if (!(reset_mask & (RADEON_RESET_GFX |
3669
					       RADEON_RESET_COMPUTE |
3670
			    RADEON_RESET_CP))) {
3671
		radeon_ring_lockup_update(ring);
3672
		return false;
3673
	}
3674
	/* force CP activities */
3675
	radeon_ring_force_activity(rdev, ring);
3676
	return radeon_ring_test_lockup(rdev, ring);
3677
}
3678
 
3679
/**
3680
 * evergreen_dma_is_lockup - Check if the DMA engine is locked up
3681
 *
3682
 * @rdev: radeon_device pointer
3683
 * @ring: radeon_ring structure holding ring information
3684
 *
3685
 * Check if the async DMA engine is locked up.
3686
 * Returns true if the engine appears to be locked up, false if not.
3687
 */
3688
bool evergreen_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3689
{
3690
	u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3691
 
3692
	if (!(reset_mask & RADEON_RESET_DMA)) {
3693
		radeon_ring_lockup_update(ring);
3694
		return false;
3695
	}
3696
	/* force ring activities */
3697
	radeon_ring_force_activity(rdev, ring);
3698
	return radeon_ring_test_lockup(rdev, ring);
3699
}
3700
 
1963 serge 3701
/* Interrupts */
3702
 
3703
u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
3704
{
2997 Serge 3705
	if (crtc >= rdev->num_crtc)
3031 serge 3706
		return 0;
2997 Serge 3707
	else
3708
		return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
1963 serge 3709
}
3710
 
3711
void evergreen_disable_interrupt_state(struct radeon_device *rdev)
3712
{
3713
	u32 tmp;
3714
 
2997 Serge 3715
	if (rdev->family >= CHIP_CAYMAN) {
3716
		cayman_cp_int_cntl_setup(rdev, 0,
3717
					 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
3718
		cayman_cp_int_cntl_setup(rdev, 1, 0);
3719
		cayman_cp_int_cntl_setup(rdev, 2, 0);
3192 Serge 3720
		tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
3721
		WREG32(CAYMAN_DMA1_CNTL, tmp);
2997 Serge 3722
	} else
3031 serge 3723
		WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
3192 Serge 3724
	tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
3725
	WREG32(DMA_CNTL, tmp);
1963 serge 3726
	WREG32(GRBM_INT_CNTL, 0);
3727
	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
3728
	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
2005 serge 3729
	if (rdev->num_crtc >= 4) {
3031 serge 3730
		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
3731
		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
2005 serge 3732
	}
3733
	if (rdev->num_crtc >= 6) {
3031 serge 3734
		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
3735
		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
1963 serge 3736
	}
3737
 
3738
	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
3739
	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
2005 serge 3740
	if (rdev->num_crtc >= 4) {
3031 serge 3741
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
3742
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
2005 serge 3743
	}
3744
	if (rdev->num_crtc >= 6) {
3031 serge 3745
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
3746
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
1963 serge 3747
	}
3748
 
2997 Serge 3749
	/* only one DAC on DCE6 */
3750
	if (!ASIC_IS_DCE6(rdev))
3031 serge 3751
		WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
1963 serge 3752
	WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
3753
 
3754
	tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3755
	WREG32(DC_HPD1_INT_CONTROL, tmp);
3756
	tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3757
	WREG32(DC_HPD2_INT_CONTROL, tmp);
3758
	tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3759
	WREG32(DC_HPD3_INT_CONTROL, tmp);
3760
	tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3761
	WREG32(DC_HPD4_INT_CONTROL, tmp);
3762
	tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3763
	WREG32(DC_HPD5_INT_CONTROL, tmp);
3764
	tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3765
	WREG32(DC_HPD6_INT_CONTROL, tmp);
3766
 
3767
}
2005 serge 3768
 
3769
int evergreen_irq_set(struct radeon_device *rdev)
3770
{
3771
	u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
2997 Serge 3772
	u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
2005 serge 3773
	u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
3774
	u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
3775
	u32 grbm_int_cntl = 0;
3776
	u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;
2997 Serge 3777
	u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
3192 Serge 3778
	u32 dma_cntl, dma_cntl1 = 0;
2005 serge 3779
 
3780
	if (!rdev->irq.installed) {
3781
		WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
3782
		return -EINVAL;
3783
	}
3784
	/* don't enable anything if the ih is disabled */
3785
	if (!rdev->ih.enabled) {
3786
		r600_disable_interrupts(rdev);
3787
		/* force the active interrupt state to all disabled */
3788
		evergreen_disable_interrupt_state(rdev);
3789
		return 0;
3790
	}
3791
 
3792
	hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
3793
	hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
3794
	hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
3795
	hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
3796
	hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
3797
	hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
3798
 
2997 Serge 3799
	afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3800
	afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3801
	afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3802
	afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3803
	afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3804
	afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3805
 
3192 Serge 3806
	dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
3807
 
2997 Serge 3808
	if (rdev->family >= CHIP_CAYMAN) {
3809
		/* enable CP interrupts on all rings */
3810
		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
3811
			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
3812
			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
3813
		}
3814
		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
3815
			DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
3816
			cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
3817
		}
3818
		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
3819
			DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
3820
			cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
3821
		}
3822
	} else {
3823
		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
3824
			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
3031 serge 3825
			cp_int_cntl |= RB_INT_ENABLE;
3826
			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
3827
		}
2005 serge 3828
	}
2997 Serge 3829
 
3192 Serge 3830
	if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
3831
		DRM_DEBUG("r600_irq_set: sw int dma\n");
3832
		dma_cntl |= TRAP_ENABLE;
3833
	}
3834
 
3835
	if (rdev->family >= CHIP_CAYMAN) {
3836
		dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
3837
		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
3838
			DRM_DEBUG("r600_irq_set: sw int dma1\n");
3839
			dma_cntl1 |= TRAP_ENABLE;
3840
		}
3841
	}
3842
 
2005 serge 3843
	if (rdev->irq.crtc_vblank_int[0] ||
2997 Serge 3844
	    atomic_read(&rdev->irq.pflip[0])) {
2005 serge 3845
		DRM_DEBUG("evergreen_irq_set: vblank 0\n");
3846
		crtc1 |= VBLANK_INT_MASK;
3847
	}
3848
	if (rdev->irq.crtc_vblank_int[1] ||
2997 Serge 3849
	    atomic_read(&rdev->irq.pflip[1])) {
2005 serge 3850
		DRM_DEBUG("evergreen_irq_set: vblank 1\n");
3851
		crtc2 |= VBLANK_INT_MASK;
3852
	}
3853
	if (rdev->irq.crtc_vblank_int[2] ||
2997 Serge 3854
	    atomic_read(&rdev->irq.pflip[2])) {
2005 serge 3855
		DRM_DEBUG("evergreen_irq_set: vblank 2\n");
3856
		crtc3 |= VBLANK_INT_MASK;
3857
	}
3858
	if (rdev->irq.crtc_vblank_int[3] ||
2997 Serge 3859
	    atomic_read(&rdev->irq.pflip[3])) {
2005 serge 3860
		DRM_DEBUG("evergreen_irq_set: vblank 3\n");
3861
		crtc4 |= VBLANK_INT_MASK;
3862
	}
3863
	if (rdev->irq.crtc_vblank_int[4] ||
2997 Serge 3864
	    atomic_read(&rdev->irq.pflip[4])) {
2005 serge 3865
		DRM_DEBUG("evergreen_irq_set: vblank 4\n");
3866
		crtc5 |= VBLANK_INT_MASK;
3867
	}
3868
	if (rdev->irq.crtc_vblank_int[5] ||
2997 Serge 3869
	    atomic_read(&rdev->irq.pflip[5])) {
2005 serge 3870
		DRM_DEBUG("evergreen_irq_set: vblank 5\n");
3871
		crtc6 |= VBLANK_INT_MASK;
3872
	}
3873
	if (rdev->irq.hpd[0]) {
3874
		DRM_DEBUG("evergreen_irq_set: hpd 1\n");
3875
		hpd1 |= DC_HPDx_INT_EN;
3876
	}
3877
	if (rdev->irq.hpd[1]) {
3878
		DRM_DEBUG("evergreen_irq_set: hpd 2\n");
3879
		hpd2 |= DC_HPDx_INT_EN;
3880
	}
3881
	if (rdev->irq.hpd[2]) {
3882
		DRM_DEBUG("evergreen_irq_set: hpd 3\n");
3883
		hpd3 |= DC_HPDx_INT_EN;
3884
	}
3885
	if (rdev->irq.hpd[3]) {
3886
		DRM_DEBUG("evergreen_irq_set: hpd 4\n");
3887
		hpd4 |= DC_HPDx_INT_EN;
3888
	}
3889
	if (rdev->irq.hpd[4]) {
3890
		DRM_DEBUG("evergreen_irq_set: hpd 5\n");
3891
		hpd5 |= DC_HPDx_INT_EN;
3892
	}
3893
	if (rdev->irq.hpd[5]) {
3894
		DRM_DEBUG("evergreen_irq_set: hpd 6\n");
3895
		hpd6 |= DC_HPDx_INT_EN;
3896
	}
2997 Serge 3897
	if (rdev->irq.afmt[0]) {
3898
		DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
3899
		afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
2005 serge 3900
	}
2997 Serge 3901
	if (rdev->irq.afmt[1]) {
3902
		DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
3903
		afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3904
	}
3905
	if (rdev->irq.afmt[2]) {
3906
		DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
3907
		afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3908
	}
3909
	if (rdev->irq.afmt[3]) {
3910
		DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
3911
		afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3912
	}
3913
	if (rdev->irq.afmt[4]) {
3914
		DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
3915
		afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3916
	}
3917
	if (rdev->irq.afmt[5]) {
3918
		DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
3919
		afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3920
	}
2005 serge 3921
 
2997 Serge 3922
	if (rdev->family >= CHIP_CAYMAN) {
3923
		cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
3924
		cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
3925
		cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
3926
	} else
3031 serge 3927
		WREG32(CP_INT_CNTL, cp_int_cntl);
3192 Serge 3928
 
3929
	WREG32(DMA_CNTL, dma_cntl);
3930
 
3931
	if (rdev->family >= CHIP_CAYMAN)
3932
		WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
3933
 
2005 serge 3934
	WREG32(GRBM_INT_CNTL, grbm_int_cntl);
3935
 
3936
	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
3937
	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
3938
	if (rdev->num_crtc >= 4) {
3939
		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
3940
		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
3941
	}
3942
	if (rdev->num_crtc >= 6) {
3943
		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
3944
		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
3945
	}
3946
 
3947
	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, grph1);
3948
	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, grph2);
3949
	if (rdev->num_crtc >= 4) {
3950
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, grph3);
3951
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, grph4);
3952
	}
3953
	if (rdev->num_crtc >= 6) {
3954
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, grph5);
3955
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, grph6);
3956
	}
3957
 
3958
	WREG32(DC_HPD1_INT_CONTROL, hpd1);
3959
	WREG32(DC_HPD2_INT_CONTROL, hpd2);
3960
	WREG32(DC_HPD3_INT_CONTROL, hpd3);
3961
	WREG32(DC_HPD4_INT_CONTROL, hpd4);
3962
	WREG32(DC_HPD5_INT_CONTROL, hpd5);
3963
	WREG32(DC_HPD6_INT_CONTROL, hpd6);
3964
 
2997 Serge 3965
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
3966
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
3967
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
3968
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
3969
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
3970
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
3971
 
2005 serge 3972
	return 0;
3973
}
3974
 
2997 Serge 3975
static void evergreen_irq_ack(struct radeon_device *rdev)
2005 serge 3976
{
3977
	u32 tmp;
3978
 
3979
	rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
3980
	rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
3981
	rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
3982
	rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
3983
	rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
3984
	rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
3985
	rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
3986
	rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
3987
	if (rdev->num_crtc >= 4) {
3988
		rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
3989
		rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
3990
	}
3991
	if (rdev->num_crtc >= 6) {
3992
		rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
3993
		rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
3994
	}
3995
 
2997 Serge 3996
	rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
3997
	rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
3998
	rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
3999
	rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4000
	rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4001
	rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4002
 
2005 serge 4003
	if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4004
		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4005
	if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4006
		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4007
	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4008
		WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4009
	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4010
		WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4011
	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4012
		WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4013
	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4014
		WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4015
 
4016
	if (rdev->num_crtc >= 4) {
4017
		if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4018
			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4019
		if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4020
			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4021
		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4022
			WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4023
		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4024
			WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4025
		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4026
			WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4027
		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4028
			WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4029
	}
4030
 
4031
	if (rdev->num_crtc >= 6) {
4032
		if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4033
			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4034
		if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4035
			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4036
		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4037
			WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4038
		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4039
			WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4040
		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4041
			WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4042
		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4043
			WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4044
	}
4045
 
4046
	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4047
		tmp = RREG32(DC_HPD1_INT_CONTROL);
4048
		tmp |= DC_HPDx_INT_ACK;
4049
		WREG32(DC_HPD1_INT_CONTROL, tmp);
4050
	}
4051
	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4052
		tmp = RREG32(DC_HPD2_INT_CONTROL);
4053
		tmp |= DC_HPDx_INT_ACK;
4054
		WREG32(DC_HPD2_INT_CONTROL, tmp);
4055
	}
4056
	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4057
		tmp = RREG32(DC_HPD3_INT_CONTROL);
4058
		tmp |= DC_HPDx_INT_ACK;
4059
		WREG32(DC_HPD3_INT_CONTROL, tmp);
4060
	}
4061
	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4062
		tmp = RREG32(DC_HPD4_INT_CONTROL);
4063
		tmp |= DC_HPDx_INT_ACK;
4064
		WREG32(DC_HPD4_INT_CONTROL, tmp);
4065
	}
4066
	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4067
		tmp = RREG32(DC_HPD5_INT_CONTROL);
4068
		tmp |= DC_HPDx_INT_ACK;
4069
		WREG32(DC_HPD5_INT_CONTROL, tmp);
4070
	}
4071
	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4072
		tmp = RREG32(DC_HPD5_INT_CONTROL);
4073
		tmp |= DC_HPDx_INT_ACK;
4074
		WREG32(DC_HPD6_INT_CONTROL, tmp);
4075
	}
2997 Serge 4076
	if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4077
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4078
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4079
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4080
	}
4081
	if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4082
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4083
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4084
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4085
	}
4086
	if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4087
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4088
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4089
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4090
	}
4091
	if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4092
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4093
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4094
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4095
	}
4096
	if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4097
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4098
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4099
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4100
	}
4101
	if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4102
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4103
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4104
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4105
	}
2005 serge 4106
}
2997 Serge 4107
 
4108
static void evergreen_irq_disable(struct radeon_device *rdev)
2005 serge 4109
{
2997 Serge 4110
	r600_disable_interrupts(rdev);
4111
	/* Wait and acknowledge irq */
4112
	mdelay(1);
4113
	evergreen_irq_ack(rdev);
4114
	evergreen_disable_interrupt_state(rdev);
4115
}
4116
 
4117
void evergreen_irq_suspend(struct radeon_device *rdev)
4118
{
4119
	evergreen_irq_disable(rdev);
4120
	r600_rlc_stop(rdev);
4121
}
4122
 
4123
static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4124
{
2005 serge 4125
	u32 wptr, tmp;
4126
 
4127
	if (rdev->wb.enabled)
4128
		wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4129
	else
4130
		wptr = RREG32(IH_RB_WPTR);
4131
 
4132
	if (wptr & RB_OVERFLOW) {
4133
		/* When a ring buffer overflow happen start parsing interrupt
4134
		 * from the last not overwritten vector (wptr + 16). Hopefully
4135
		 * this should allow us to catchup.
4136
		 */
4137
		dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
4138
			wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
4139
		rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4140
		tmp = RREG32(IH_RB_CNTL);
4141
		tmp |= IH_WPTR_OVERFLOW_CLEAR;
4142
		WREG32(IH_RB_CNTL, tmp);
4143
	}
4144
	return (wptr & rdev->ih.ptr_mask);
4145
}
4146
 
4147
int evergreen_irq_process(struct radeon_device *rdev)
4148
{
4149
	u32 wptr;
4150
	u32 rptr;
4151
	u32 src_id, src_data;
4152
	u32 ring_index;
4153
	bool queue_hotplug = false;
2997 Serge 4154
	bool queue_hdmi = false;
2005 serge 4155
 
4156
	if (!rdev->ih.enabled || rdev->shutdown)
4157
		return IRQ_NONE;
4158
 
4159
	wptr = evergreen_get_ih_wptr(rdev);
2997 Serge 4160
 
4161
restart_ih:
4162
	/* is somebody else already processing irqs? */
4163
	if (atomic_xchg(&rdev->ih.lock, 1))
4164
		return IRQ_NONE;
4165
 
2005 serge 4166
	rptr = rdev->ih.rptr;
4167
	DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4168
 
2175 serge 4169
	/* Order reading of wptr vs. reading of IH ring data */
4170
	rmb();
4171
 
2005 serge 4172
	/* display interrupts */
4173
	evergreen_irq_ack(rdev);
4174
 
4175
	while (rptr != wptr) {
4176
		/* wptr/rptr are in bytes! */
4177
		ring_index = rptr / 4;
4178
		src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4179
		src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4180
 
4181
		switch (src_id) {
4182
		case 1: /* D1 vblank/vline */
4183
			switch (src_data) {
4184
			case 0: /* D1 vblank */
4185
				if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
4186
					if (rdev->irq.crtc_vblank_int[0]) {
4187
				//		drm_handle_vblank(rdev->ddev, 0);
4188
						rdev->pm.vblank_sync = true;
4189
				//		wake_up(&rdev->irq.vblank_queue);
4190
					}
4191
				//	if (rdev->irq.pflip[0])
4192
				//		radeon_crtc_handle_flip(rdev, 0);
4193
					rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4194
					DRM_DEBUG("IH: D1 vblank\n");
4195
				}
4196
				break;
4197
			case 1: /* D1 vline */
4198
				if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
4199
					rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4200
					DRM_DEBUG("IH: D1 vline\n");
4201
				}
4202
				break;
4203
			default:
4204
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4205
				break;
4206
			}
4207
			break;
4208
		case 2: /* D2 vblank/vline */
4209
			switch (src_data) {
4210
			case 0: /* D2 vblank */
4211
				if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
4212
					if (rdev->irq.crtc_vblank_int[1]) {
4213
				//		drm_handle_vblank(rdev->ddev, 1);
4214
						rdev->pm.vblank_sync = true;
4215
				//		wake_up(&rdev->irq.vblank_queue);
4216
					}
4217
			//		if (rdev->irq.pflip[1])
4218
			//			radeon_crtc_handle_flip(rdev, 1);
4219
					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
4220
					DRM_DEBUG("IH: D2 vblank\n");
4221
				}
4222
				break;
4223
			case 1: /* D2 vline */
4224
				if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
4225
					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
4226
					DRM_DEBUG("IH: D2 vline\n");
4227
				}
4228
				break;
4229
			default:
4230
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4231
				break;
4232
			}
4233
			break;
4234
		case 3: /* D3 vblank/vline */
4235
			switch (src_data) {
4236
			case 0: /* D3 vblank */
4237
				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
4238
					if (rdev->irq.crtc_vblank_int[2]) {
4239
				//		drm_handle_vblank(rdev->ddev, 2);
4240
						rdev->pm.vblank_sync = true;
4241
				//		wake_up(&rdev->irq.vblank_queue);
4242
					}
4243
				//	if (rdev->irq.pflip[2])
4244
				//		radeon_crtc_handle_flip(rdev, 2);
4245
					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
4246
					DRM_DEBUG("IH: D3 vblank\n");
4247
				}
4248
				break;
4249
			case 1: /* D3 vline */
4250
				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
4251
					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
4252
					DRM_DEBUG("IH: D3 vline\n");
4253
				}
4254
				break;
4255
			default:
4256
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4257
				break;
4258
			}
4259
			break;
4260
		case 4: /* D4 vblank/vline */
4261
			switch (src_data) {
4262
			case 0: /* D4 vblank */
4263
				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
4264
					if (rdev->irq.crtc_vblank_int[3]) {
4265
					//	drm_handle_vblank(rdev->ddev, 3);
4266
						rdev->pm.vblank_sync = true;
4267
					//	wake_up(&rdev->irq.vblank_queue);
4268
					}
4269
		//			if (rdev->irq.pflip[3])
4270
		//				radeon_crtc_handle_flip(rdev, 3);
4271
					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
4272
					DRM_DEBUG("IH: D4 vblank\n");
4273
				}
4274
				break;
4275
			case 1: /* D4 vline */
4276
				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
4277
					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
4278
					DRM_DEBUG("IH: D4 vline\n");
4279
				}
4280
				break;
4281
			default:
4282
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4283
				break;
4284
			}
4285
			break;
4286
		case 5: /* D5 vblank/vline */
4287
			switch (src_data) {
4288
			case 0: /* D5 vblank */
4289
				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
4290
					if (rdev->irq.crtc_vblank_int[4]) {
4291
//						drm_handle_vblank(rdev->ddev, 4);
4292
						rdev->pm.vblank_sync = true;
4293
//						wake_up(&rdev->irq.vblank_queue);
4294
					}
4295
//					if (rdev->irq.pflip[4])
4296
//						radeon_crtc_handle_flip(rdev, 4);
4297
					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
4298
					DRM_DEBUG("IH: D5 vblank\n");
4299
				}
4300
				break;
4301
			case 1: /* D5 vline */
4302
				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
4303
					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
4304
					DRM_DEBUG("IH: D5 vline\n");
4305
				}
4306
				break;
4307
			default:
4308
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4309
				break;
4310
			}
4311
			break;
4312
		case 6: /* D6 vblank/vline */
4313
			switch (src_data) {
4314
			case 0: /* D6 vblank */
4315
				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
4316
					if (rdev->irq.crtc_vblank_int[5]) {
4317
				//		drm_handle_vblank(rdev->ddev, 5);
4318
						rdev->pm.vblank_sync = true;
4319
				//		wake_up(&rdev->irq.vblank_queue);
4320
					}
4321
			//		if (rdev->irq.pflip[5])
4322
			//			radeon_crtc_handle_flip(rdev, 5);
4323
					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
4324
					DRM_DEBUG("IH: D6 vblank\n");
4325
				}
4326
				break;
4327
			case 1: /* D6 vline */
4328
				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
4329
					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
4330
					DRM_DEBUG("IH: D6 vline\n");
4331
				}
4332
				break;
4333
			default:
4334
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4335
				break;
4336
			}
4337
			break;
4338
		case 42: /* HPD hotplug */
4339
			switch (src_data) {
4340
			case 0:
4341
				if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4342
					rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
4343
					queue_hotplug = true;
4344
					DRM_DEBUG("IH: HPD1\n");
4345
				}
4346
				break;
4347
			case 1:
4348
				if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4349
					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
4350
					queue_hotplug = true;
4351
					DRM_DEBUG("IH: HPD2\n");
4352
				}
4353
				break;
4354
			case 2:
4355
				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4356
					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
4357
					queue_hotplug = true;
4358
					DRM_DEBUG("IH: HPD3\n");
4359
				}
4360
				break;
4361
			case 3:
4362
				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4363
					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
4364
					queue_hotplug = true;
4365
					DRM_DEBUG("IH: HPD4\n");
4366
				}
4367
				break;
4368
			case 4:
4369
				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4370
					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
4371
					queue_hotplug = true;
4372
					DRM_DEBUG("IH: HPD5\n");
4373
				}
4374
				break;
4375
			case 5:
4376
				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4377
					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
4378
					queue_hotplug = true;
4379
					DRM_DEBUG("IH: HPD6\n");
4380
				}
4381
				break;
4382
			default:
4383
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4384
				break;
4385
			}
4386
			break;
2997 Serge 4387
		case 44: /* hdmi */
4388
			switch (src_data) {
4389
			case 0:
4390
				if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4391
					rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
4392
					queue_hdmi = true;
4393
					DRM_DEBUG("IH: HDMI0\n");
4394
				}
4395
				break;
4396
			case 1:
4397
				if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4398
					rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
4399
					queue_hdmi = true;
4400
					DRM_DEBUG("IH: HDMI1\n");
4401
				}
4402
				break;
4403
			case 2:
4404
				if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4405
					rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
4406
					queue_hdmi = true;
4407
					DRM_DEBUG("IH: HDMI2\n");
4408
				}
4409
				break;
4410
			case 3:
4411
				if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4412
					rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
4413
					queue_hdmi = true;
4414
					DRM_DEBUG("IH: HDMI3\n");
4415
				}
4416
				break;
4417
			case 4:
4418
				if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4419
					rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
4420
					queue_hdmi = true;
4421
					DRM_DEBUG("IH: HDMI4\n");
4422
				}
4423
				break;
4424
			case 5:
4425
				if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4426
					rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
4427
					queue_hdmi = true;
4428
					DRM_DEBUG("IH: HDMI5\n");
4429
				}
4430
				break;
4431
			default:
4432
				DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
4433
				break;
4434
			}
3764 Serge 4435
		case 124: /* UVD */
4436
			DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
4437
			radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
2997 Serge 4438
			break;
3192 Serge 4439
		case 146:
4440
		case 147:
4441
			dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
4442
			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
4443
				RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR));
4444
			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
4445
				RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS));
4446
			/* reset addr and status */
4447
			WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
4448
			break;
2005 serge 4449
		case 176: /* CP_INT in ring buffer */
4450
		case 177: /* CP_INT in IB1 */
4451
		case 178: /* CP_INT in IB2 */
4452
			DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
2997 Serge 4453
			radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
2005 serge 4454
			break;
4455
		case 181: /* CP EOP event */
4456
			DRM_DEBUG("IH: CP EOP\n");
2997 Serge 4457
			if (rdev->family >= CHIP_CAYMAN) {
4458
				switch (src_data) {
4459
				case 0:
4460
					radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4461
					break;
4462
				case 1:
4463
					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
4464
					break;
4465
				case 2:
4466
					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
4467
					break;
4468
				}
4469
			} else
4470
				radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
2005 serge 4471
			break;
3192 Serge 4472
		case 224: /* DMA trap event */
4473
			DRM_DEBUG("IH: DMA trap\n");
4474
			radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
4475
			break;
2005 serge 4476
		case 233: /* GUI IDLE */
4477
			DRM_DEBUG("IH: GUI idle\n");
4478
			break;
3192 Serge 4479
		case 244: /* DMA trap event */
4480
			if (rdev->family >= CHIP_CAYMAN) {
4481
				DRM_DEBUG("IH: DMA1 trap\n");
4482
				radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
4483
			}
4484
			break;
2005 serge 4485
		default:
4486
			DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4487
			break;
4488
		}
4489
 
4490
		/* wptr/rptr are in bytes! */
4491
		rptr += 16;
4492
		rptr &= rdev->ih.ptr_mask;
4493
	}
2997 Serge 4494
	rdev->ih.rptr = rptr;
4495
	WREG32(IH_RB_RPTR, rdev->ih.rptr);
4496
	atomic_set(&rdev->ih.lock, 0);
4497
 
2005 serge 4498
	/* make sure wptr hasn't changed while processing */
4499
	wptr = evergreen_get_ih_wptr(rdev);
2997 Serge 4500
	if (wptr != rptr)
2005 serge 4501
		goto restart_ih;
2997 Serge 4502
 
2005 serge 4503
	return IRQ_HANDLED;
4504
}
4505
 
3192 Serge 4506
/**
4507
 * evergreen_dma_fence_ring_emit - emit a fence on the DMA ring
4508
 *
4509
 * @rdev: radeon_device pointer
4510
 * @fence: radeon fence object
4511
 *
4512
 * Add a DMA fence packet to the ring to write
4513
 * the fence seq number and DMA trap packet to generate
4514
 * an interrupt if needed (evergreen-SI).
4515
 */
4516
void evergreen_dma_fence_ring_emit(struct radeon_device *rdev,
4517
				   struct radeon_fence *fence)
4518
{
4519
	struct radeon_ring *ring = &rdev->ring[fence->ring];
4520
	u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
4521
	/* write the fence */
3764 Serge 4522
	radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_FENCE, 0, 0));
3192 Serge 4523
	radeon_ring_write(ring, addr & 0xfffffffc);
4524
	radeon_ring_write(ring, (upper_32_bits(addr) & 0xff));
4525
	radeon_ring_write(ring, fence->seq);
4526
	/* generate an interrupt */
3764 Serge 4527
	radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_TRAP, 0, 0));
3192 Serge 4528
	/* flush HDP */
3764 Serge 4529
	radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0));
4530
	radeon_ring_write(ring, (0xf << 16) | (HDP_MEM_COHERENCY_FLUSH_CNTL >> 2));
3192 Serge 4531
	radeon_ring_write(ring, 1);
4532
}
4533
 
4534
/**
4535
 * evergreen_dma_ring_ib_execute - schedule an IB on the DMA engine
4536
 *
4537
 * @rdev: radeon_device pointer
4538
 * @ib: IB object to schedule
4539
 *
4540
 * Schedule an IB in the DMA ring (evergreen).
4541
 */
4542
void evergreen_dma_ring_ib_execute(struct radeon_device *rdev,
4543
				   struct radeon_ib *ib)
4544
{
4545
	struct radeon_ring *ring = &rdev->ring[ib->ring];
4546
 
4547
	if (rdev->wb.enabled) {
4548
		u32 next_rptr = ring->wptr + 4;
4549
		while ((next_rptr & 7) != 5)
4550
			next_rptr++;
4551
		next_rptr += 3;
3764 Serge 4552
		radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_WRITE, 0, 1));
3192 Serge 4553
		radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
4554
		radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr) & 0xff);
4555
		radeon_ring_write(ring, next_rptr);
4556
	}
4557
 
4558
	/* The indirect buffer packet must end on an 8 DW boundary in the DMA ring.
4559
	 * Pad as necessary with NOPs.
4560
	 */
4561
	while ((ring->wptr & 7) != 5)
3764 Serge 4562
		radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_NOP, 0, 0));
4563
	radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_INDIRECT_BUFFER, 0, 0));
3192 Serge 4564
	radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0));
4565
	radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF));
4566
 
4567
}
4568
 
4569
/**
4570
 * evergreen_copy_dma - copy pages using the DMA engine
4571
 *
4572
 * @rdev: radeon_device pointer
4573
 * @src_offset: src GPU address
4574
 * @dst_offset: dst GPU address
4575
 * @num_gpu_pages: number of GPU pages to xfer
4576
 * @fence: radeon fence object
4577
 *
4578
 * Copy GPU paging using the DMA engine (evergreen-cayman).
4579
 * Used by the radeon ttm implementation to move pages if
4580
 * registered as the asic copy callback.
4581
 */
4582
int evergreen_copy_dma(struct radeon_device *rdev,
4583
		       uint64_t src_offset, uint64_t dst_offset,
4584
		       unsigned num_gpu_pages,
4585
		       struct radeon_fence **fence)
4586
{
4587
	struct radeon_semaphore *sem = NULL;
4588
	int ring_index = rdev->asic->copy.dma_ring_index;
4589
	struct radeon_ring *ring = &rdev->ring[ring_index];
4590
	u32 size_in_dw, cur_size_in_dw;
4591
	int i, num_loops;
4592
	int r = 0;
4593
 
4594
	r = radeon_semaphore_create(rdev, &sem);
4595
	if (r) {
4596
		DRM_ERROR("radeon: moving bo (%d).\n", r);
4597
		return r;
4598
	}
4599
 
4600
	size_in_dw = (num_gpu_pages << RADEON_GPU_PAGE_SHIFT) / 4;
4601
	num_loops = DIV_ROUND_UP(size_in_dw, 0xfffff);
4602
	r = radeon_ring_lock(rdev, ring, num_loops * 5 + 11);
4603
	if (r) {
4604
		DRM_ERROR("radeon: moving bo (%d).\n", r);
4605
		radeon_semaphore_free(rdev, &sem, NULL);
4606
		return r;
4607
	}
4608
 
4609
	if (radeon_fence_need_sync(*fence, ring->idx)) {
4610
		radeon_semaphore_sync_rings(rdev, sem, (*fence)->ring,
4611
					    ring->idx);
4612
		radeon_fence_note_sync(*fence, ring->idx);
4613
	} else {
4614
		radeon_semaphore_free(rdev, &sem, NULL);
4615
	}
4616
 
4617
	for (i = 0; i < num_loops; i++) {
4618
		cur_size_in_dw = size_in_dw;
4619
		if (cur_size_in_dw > 0xFFFFF)
4620
			cur_size_in_dw = 0xFFFFF;
4621
		size_in_dw -= cur_size_in_dw;
3764 Serge 4622
		radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_COPY, 0, cur_size_in_dw));
3192 Serge 4623
		radeon_ring_write(ring, dst_offset & 0xfffffffc);
4624
		radeon_ring_write(ring, src_offset & 0xfffffffc);
4625
		radeon_ring_write(ring, upper_32_bits(dst_offset) & 0xff);
4626
		radeon_ring_write(ring, upper_32_bits(src_offset) & 0xff);
4627
		src_offset += cur_size_in_dw * 4;
4628
		dst_offset += cur_size_in_dw * 4;
4629
	}
4630
 
4631
	r = radeon_fence_emit(rdev, fence, ring->idx);
4632
	if (r) {
4633
		radeon_ring_unlock_undo(rdev, ring);
4634
		return r;
4635
	}
4636
 
4637
	radeon_ring_unlock_commit(rdev, ring);
4638
	radeon_semaphore_free(rdev, &sem, *fence);
4639
 
4640
	return r;
4641
}
4642
 
1430 serge 4643
static int evergreen_startup(struct radeon_device *rdev)
4644
{
3764 Serge 4645
	struct radeon_ring *ring;
1430 serge 4646
	int r;
4647
 
1990 serge 4648
	/* enable pcie gen2 link */
3031 serge 4649
	evergreen_pcie_gen2_enable(rdev);
1990 serge 4650
 
4651
	if (ASIC_IS_DCE5(rdev)) {
4652
		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
4653
			r = ni_init_microcode(rdev);
4654
			if (r) {
4655
				DRM_ERROR("Failed to load firmware!\n");
4656
				return r;
4657
			}
4658
		}
4659
		r = ni_mc_load_microcode(rdev);
4660
		if (r) {
4661
			DRM_ERROR("Failed to load MC firmware!\n");
4662
			return r;
4663
		}
4664
	} else {
3031 serge 4665
		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
4666
			r = r600_init_microcode(rdev);
4667
			if (r) {
4668
				DRM_ERROR("Failed to load firmware!\n");
4669
				return r;
4670
			}
1430 serge 4671
		}
4672
	}
1963 serge 4673
 
2997 Serge 4674
	r = r600_vram_scratch_init(rdev);
4675
	if (r)
4676
		return r;
4677
 
1430 serge 4678
	evergreen_mc_program(rdev);
4679
	if (rdev->flags & RADEON_IS_AGP) {
1963 serge 4680
		evergreen_agp_enable(rdev);
1430 serge 4681
	} else {
4682
		r = evergreen_pcie_gart_enable(rdev);
4683
		if (r)
4684
			return r;
4685
	}
4686
	evergreen_gpu_init(rdev);
2005 serge 4687
 
1963 serge 4688
	r = evergreen_blit_init(rdev);
4689
	if (r) {
2997 Serge 4690
//       r600_blit_fini(rdev);
4691
		rdev->asic->copy.copy = NULL;
1963 serge 4692
		dev_warn(rdev->dev, "failed blitter (%d) falling back to memcpy\n", r);
4693
	}
1430 serge 4694
 
1963 serge 4695
	/* allocate wb buffer */
4696
	r = radeon_wb_init(rdev);
4697
	if (r)
4698
		return r;
4699
 
3192 Serge 4700
	r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
4701
	if (r) {
4702
		dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
4703
		return r;
4704
	}
4705
 
4706
	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
4707
	if (r) {
4708
		dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
4709
		return r;
4710
	}
4711
 
3764 Serge 4712
//   r = rv770_uvd_resume(rdev);
4713
//   if (!r) {
4714
//       r = radeon_fence_driver_start_ring(rdev,
4715
//                          R600_RING_TYPE_UVD_INDEX);
4716
//       if (r)
4717
//           dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
4718
//   }
4719
 
4720
//   if (r)
4721
//       rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
4722
 
1963 serge 4723
	/* Enable IRQ */
3764 Serge 4724
	if (!rdev->irq.installed) {
4725
		r = radeon_irq_kms_init(rdev);
4726
		if (r)
4727
			return r;
4728
	}
4729
 
2005 serge 4730
	r = r600_irq_init(rdev);
4731
	if (r) {
4732
		DRM_ERROR("radeon: IH init failed (%d).\n", r);
4733
//		radeon_irq_kms_fini(rdev);
4734
		return r;
4735
	}
4736
	evergreen_irq_set(rdev);
1963 serge 4737
 
3764 Serge 4738
	ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2997 Serge 4739
	r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
4740
			     R600_CP_RB_RPTR, R600_CP_RB_WPTR,
4741
			     0, 0xfffff, RADEON_CP_PACKET2);
1430 serge 4742
	if (r)
4743
		return r;
3192 Serge 4744
 
4745
	ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
4746
	r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
4747
			     DMA_RB_RPTR, DMA_RB_WPTR,
3764 Serge 4748
			     2, 0x3fffc, DMA_PACKET(DMA_PACKET_NOP, 0, 0));
3192 Serge 4749
	if (r)
4750
		return r;
4751
 
1430 serge 4752
	r = evergreen_cp_load_microcode(rdev);
4753
	if (r)
4754
		return r;
1963 serge 4755
	r = evergreen_cp_resume(rdev);
1430 serge 4756
	if (r)
4757
		return r;
3192 Serge 4758
	r = r600_dma_resume(rdev);
4759
	if (r)
4760
		return r;
1963 serge 4761
 
3764 Serge 4762
	ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
4763
	if (ring->ring_size) {
4764
		r = radeon_ring_init(rdev, ring, ring->ring_size,
4765
				     R600_WB_UVD_RPTR_OFFSET,
4766
				     UVD_RBC_RB_RPTR, UVD_RBC_RB_WPTR,
4767
				     0, 0xfffff, RADEON_CP_PACKET2);
4768
		if (!r)
4769
			r = r600_uvd_init(rdev);
4770
 
4771
		if (r)
4772
			DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
4773
	}
4774
 
3192 Serge 4775
	r = radeon_ib_pool_init(rdev);
4776
	if (r) {
4777
		dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
4778
		return r;
4779
	}
4780
 
1430 serge 4781
	return 0;
4782
}
4783
 
4784
 
4785
 
2997 Serge 4786
#if 0
1430 serge 4787
 
2005 serge 4788
int evergreen_copy_blit(struct radeon_device *rdev,
4789
			uint64_t src_offset, uint64_t dst_offset,
4790
			unsigned num_pages, struct radeon_fence *fence)
4791
{
4792
	int r;
1430 serge 4793
 
2005 serge 4794
	mutex_lock(&rdev->r600_blit.mutex);
4795
	rdev->r600_blit.vb_ib = NULL;
4796
	r = evergreen_blit_prepare_copy(rdev, num_pages * RADEON_GPU_PAGE_SIZE);
4797
	if (r) {
4798
		if (rdev->r600_blit.vb_ib)
4799
			radeon_ib_free(rdev, &rdev->r600_blit.vb_ib);
4800
		mutex_unlock(&rdev->r600_blit.mutex);
4801
		return r;
4802
	}
4803
	evergreen_kms_blit_copy(rdev, src_offset, dst_offset, num_pages * RADEON_GPU_PAGE_SIZE);
4804
	evergreen_blit_done_copy(rdev, fence);
4805
	mutex_unlock(&rdev->r600_blit.mutex);
4806
	return 0;
4807
}
2997 Serge 4808
#endif
1430 serge 4809
 
4810
/* Plan is to move initialization in that function and use
4811
 * helper function so that radeon_device_init pretty much
4812
 * do nothing more than calling asic specific function. This
4813
 * should also allow to remove a bunch of callback function
4814
 * like vram_info.
4815
 */
4816
int evergreen_init(struct radeon_device *rdev)
4817
{
4818
	int r;
4819
 
4820
	/* Read BIOS */
4821
	if (!radeon_get_bios(rdev)) {
4822
		if (ASIC_IS_AVIVO(rdev))
4823
			return -EINVAL;
4824
	}
4825
	/* Must be an ATOMBIOS */
4826
	if (!rdev->is_atom_bios) {
1986 serge 4827
		dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
1430 serge 4828
		return -EINVAL;
4829
	}
4830
	r = radeon_atombios_init(rdev);
4831
	if (r)
4832
		return r;
1986 serge 4833
	/* reset the asic, the gfx blocks are often in a bad state
4834
	 * after the driver is unloaded or after a resume
4835
	 */
4836
	if (radeon_asic_reset(rdev))
4837
		dev_warn(rdev->dev, "GPU reset failed !\n");
1430 serge 4838
	/* Post card if necessary */
1986 serge 4839
	if (!radeon_card_posted(rdev)) {
1430 serge 4840
		if (!rdev->bios) {
4841
			dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
4842
			return -EINVAL;
4843
		}
4844
		DRM_INFO("GPU not posted. posting now...\n");
4845
		atom_asic_init(rdev->mode_info.atom_context);
4846
	}
3764 Serge 4847
	/* init golden registers */
4848
	evergreen_init_golden_registers(rdev);
1430 serge 4849
	/* Initialize scratch registers */
4850
	r600_scratch_init(rdev);
4851
	/* Initialize surface registers */
4852
	radeon_surface_init(rdev);
4853
	/* Initialize clocks */
4854
	radeon_get_clock_info(rdev->ddev);
4855
	/* Fence driver */
2005 serge 4856
	r = radeon_fence_driver_init(rdev);
4857
	if (r)
4858
		return r;
3031 serge 4859
	/* initialize AGP */
1430 serge 4860
	if (rdev->flags & RADEON_IS_AGP) {
4861
		r = radeon_agp_init(rdev);
4862
		if (r)
4863
			radeon_agp_disable(rdev);
4864
	}
4865
	/* initialize memory controller */
4866
	r = evergreen_mc_init(rdev);
4867
	if (r)
4868
		return r;
4869
	/* Memory manager */
4870
	r = radeon_bo_init(rdev);
4871
	if (r)
4872
		return r;
1963 serge 4873
 
2997 Serge 4874
	rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
4875
	r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
1430 serge 4876
 
3192 Serge 4877
	rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
4878
	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
4879
 
3764 Serge 4880
//   r = radeon_uvd_init(rdev);
4881
//   if (!r) {
4882
//       rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
4883
//       r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
4884
//                  4096);
4885
//   }
4886
 
2005 serge 4887
	rdev->ih.ring_obj = NULL;
4888
	r600_ih_ring_init(rdev, 64 * 1024);
1430 serge 4889
 
4890
	r = r600_pcie_gart_init(rdev);
4891
	if (r)
4892
		return r;
1963 serge 4893
 
4894
	rdev->accel_working = true;
1430 serge 4895
	r = evergreen_startup(rdev);
4896
	if (r) {
1963 serge 4897
		dev_err(rdev->dev, "disabling GPU acceleration\n");
1430 serge 4898
		rdev->accel_working = false;
4899
	}
2997 Serge 4900
 
4901
	/* Don't start up if the MC ucode is missing on BTC parts.
4902
	 * The default clocks and voltages before the MC ucode
4903
	 * is loaded are not suffient for advanced operations.
4904
	 */
4905
	if (ASIC_IS_DCE5(rdev)) {
4906
		if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
4907
			DRM_ERROR("radeon: MC ucode required for NI+.\n");
4908
			return -EINVAL;
2005 serge 4909
		}
1430 serge 4910
	}
2997 Serge 4911
 
1430 serge 4912
	return 0;
4913
}
4914
 
1986 serge 4915
 
2997 Serge 4916
void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
1430 serge 4917
{
3764 Serge 4918
	u32 link_width_cntl, speed_cntl;
1986 serge 4919
 
4920
	if (radeon_pcie_gen2 == 0)
4921
		return;
4922
 
4923
	if (rdev->flags & RADEON_IS_IGP)
4924
		return;
4925
 
4926
	if (!(rdev->flags & RADEON_IS_PCIE))
4927
		return;
4928
 
4929
	/* x2 cards have a special sequence */
4930
	if (ASIC_IS_X2(rdev))
4931
		return;
4932
 
3764 Serge 4933
	if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
4934
		(rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
2997 Serge 4935
		return;
4936
 
3764 Serge 4937
	speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
2997 Serge 4938
	if (speed_cntl & LC_CURRENT_DATA_RATE) {
4939
		DRM_INFO("PCIE gen 2 link speeds already enabled\n");
4940
		return;
4941
	}
4942
 
4943
	DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
4944
 
1986 serge 4945
	if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
4946
	    (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
4947
 
3764 Serge 4948
		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
1986 serge 4949
		link_width_cntl &= ~LC_UPCONFIGURE_DIS;
3764 Serge 4950
		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
1986 serge 4951
 
3764 Serge 4952
		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
1986 serge 4953
		speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
3764 Serge 4954
		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
1986 serge 4955
 
3764 Serge 4956
		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
1986 serge 4957
		speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
3764 Serge 4958
		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
1986 serge 4959
 
3764 Serge 4960
		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
1986 serge 4961
		speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
3764 Serge 4962
		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
1986 serge 4963
 
3764 Serge 4964
		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
1986 serge 4965
		speed_cntl |= LC_GEN2_EN_STRAP;
3764 Serge 4966
		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
1986 serge 4967
 
4968
	} else {
3764 Serge 4969
		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
1986 serge 4970
		/* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
4971
		if (1)
4972
			link_width_cntl |= LC_UPCONFIGURE_DIS;
4973
		else
4974
			link_width_cntl &= ~LC_UPCONFIGURE_DIS;
3764 Serge 4975
		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
1986 serge 4976
	}
1430 serge 4977
}