Subversion Repositories Kolibri OS

Rev

Rev 6104 | Go to most recent revision | Only display areas with differences | Regard whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 6104 Rev 6661
1
/*
1
/*
2
 * Copyright 2010 Advanced Micro Devices, Inc.
2
 * Copyright 2010 Advanced Micro Devices, Inc.
3
 *
3
 *
4
 * Permission is hereby granted, free of charge, to any person obtaining a
4
 * Permission is hereby granted, free of charge, to any person obtaining a
5
 * copy of this software and associated documentation files (the "Software"),
5
 * copy of this software and associated documentation files (the "Software"),
6
 * to deal in the Software without restriction, including without limitation
6
 * to deal in the Software without restriction, including without limitation
7
 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
7
 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8
 * and/or sell copies of the Software, and to permit persons to whom the
8
 * and/or sell copies of the Software, and to permit persons to whom the
9
 * Software is furnished to do so, subject to the following conditions:
9
 * Software is furnished to do so, subject to the following conditions:
10
 *
10
 *
11
 * The above copyright notice and this permission notice shall be included in
11
 * The above copyright notice and this permission notice shall be included in
12
 * all copies or substantial portions of the Software.
12
 * all copies or substantial portions of the Software.
13
 *
13
 *
14
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
16
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17
 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
17
 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18
 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
18
 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19
 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
19
 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20
 * OTHER DEALINGS IN THE SOFTWARE.
20
 * OTHER DEALINGS IN THE SOFTWARE.
21
 *
21
 *
22
 * Authors: Alex Deucher
22
 * Authors: Alex Deucher
23
 */
23
 */
24
#include 
24
#include 
25
#include 
25
#include 
26
#include 
26
#include 
27
#include "radeon.h"
27
#include "radeon.h"
28
#include "radeon_asic.h"
28
#include "radeon_asic.h"
29
#include "radeon_audio.h"
29
#include "radeon_audio.h"
30
#include 
30
#include 
31
#include "evergreend.h"
31
#include "evergreend.h"
32
#include "atom.h"
32
#include "atom.h"
33
#include "avivod.h"
33
#include "avivod.h"
34
#include "evergreen_reg.h"
34
#include "evergreen_reg.h"
35
#include "evergreen_blit_shaders.h"
35
#include "evergreen_blit_shaders.h"
36
#include "radeon_ucode.h"
36
#include "radeon_ucode.h"
37
 
37
 
38
/*
38
/*
39
 * Indirect registers accessor
39
 * Indirect registers accessor
40
 */
40
 */
41
u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg)
41
u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg)
42
{
42
{
43
	unsigned long flags;
43
	unsigned long flags;
44
	u32 r;
44
	u32 r;
45
 
45
 
46
	spin_lock_irqsave(&rdev->cg_idx_lock, flags);
46
	spin_lock_irqsave(&rdev->cg_idx_lock, flags);
47
	WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
47
	WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
48
	r = RREG32(EVERGREEN_CG_IND_DATA);
48
	r = RREG32(EVERGREEN_CG_IND_DATA);
49
	spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
49
	spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
50
	return r;
50
	return r;
51
}
51
}
52
 
52
 
53
void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v)
53
void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v)
54
{
54
{
55
	unsigned long flags;
55
	unsigned long flags;
56
 
56
 
57
	spin_lock_irqsave(&rdev->cg_idx_lock, flags);
57
	spin_lock_irqsave(&rdev->cg_idx_lock, flags);
58
	WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
58
	WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
59
	WREG32(EVERGREEN_CG_IND_DATA, (v));
59
	WREG32(EVERGREEN_CG_IND_DATA, (v));
60
	spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
60
	spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
61
}
61
}
62
 
62
 
63
u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg)
63
u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg)
64
{
64
{
65
	unsigned long flags;
65
	unsigned long flags;
66
	u32 r;
66
	u32 r;
67
 
67
 
68
	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
68
	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
69
	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
69
	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
70
	r = RREG32(EVERGREEN_PIF_PHY0_DATA);
70
	r = RREG32(EVERGREEN_PIF_PHY0_DATA);
71
	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
71
	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
72
	return r;
72
	return r;
73
}
73
}
74
 
74
 
75
void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v)
75
void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v)
76
{
76
{
77
	unsigned long flags;
77
	unsigned long flags;
78
 
78
 
79
	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
79
	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
80
	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
80
	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
81
	WREG32(EVERGREEN_PIF_PHY0_DATA, (v));
81
	WREG32(EVERGREEN_PIF_PHY0_DATA, (v));
82
	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
82
	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
83
}
83
}
84
 
84
 
85
u32 eg_pif_phy1_rreg(struct radeon_device *rdev, u32 reg)
85
u32 eg_pif_phy1_rreg(struct radeon_device *rdev, u32 reg)
86
{
86
{
87
	unsigned long flags;
87
	unsigned long flags;
88
	u32 r;
88
	u32 r;
89
 
89
 
90
	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
90
	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
91
	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
91
	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
92
	r = RREG32(EVERGREEN_PIF_PHY1_DATA);
92
	r = RREG32(EVERGREEN_PIF_PHY1_DATA);
93
	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
93
	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
94
	return r;
94
	return r;
95
}
95
}
96
 
96
 
97
void eg_pif_phy1_wreg(struct radeon_device *rdev, u32 reg, u32 v)
97
void eg_pif_phy1_wreg(struct radeon_device *rdev, u32 reg, u32 v)
98
{
98
{
99
	unsigned long flags;
99
	unsigned long flags;
100
 
100
 
101
	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
101
	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
102
	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
102
	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
103
	WREG32(EVERGREEN_PIF_PHY1_DATA, (v));
103
	WREG32(EVERGREEN_PIF_PHY1_DATA, (v));
104
	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
104
	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
105
}
105
}
106
 
106
 
107
static const u32 crtc_offsets[6] =
107
static const u32 crtc_offsets[6] =
108
{
108
{
109
	EVERGREEN_CRTC0_REGISTER_OFFSET,
109
	EVERGREEN_CRTC0_REGISTER_OFFSET,
110
	EVERGREEN_CRTC1_REGISTER_OFFSET,
110
	EVERGREEN_CRTC1_REGISTER_OFFSET,
111
	EVERGREEN_CRTC2_REGISTER_OFFSET,
111
	EVERGREEN_CRTC2_REGISTER_OFFSET,
112
	EVERGREEN_CRTC3_REGISTER_OFFSET,
112
	EVERGREEN_CRTC3_REGISTER_OFFSET,
113
	EVERGREEN_CRTC4_REGISTER_OFFSET,
113
	EVERGREEN_CRTC4_REGISTER_OFFSET,
114
	EVERGREEN_CRTC5_REGISTER_OFFSET
114
	EVERGREEN_CRTC5_REGISTER_OFFSET
115
};
115
};
116
 
116
 
117
#include "clearstate_evergreen.h"
117
#include "clearstate_evergreen.h"
118
 
118
 
119
static const u32 sumo_rlc_save_restore_register_list[] =
119
static const u32 sumo_rlc_save_restore_register_list[] =
120
{
120
{
121
	0x98fc,
121
	0x98fc,
122
	0x9830,
122
	0x9830,
123
	0x9834,
123
	0x9834,
124
	0x9838,
124
	0x9838,
125
	0x9870,
125
	0x9870,
126
	0x9874,
126
	0x9874,
127
	0x8a14,
127
	0x8a14,
128
	0x8b24,
128
	0x8b24,
129
	0x8bcc,
129
	0x8bcc,
130
	0x8b10,
130
	0x8b10,
131
	0x8d00,
131
	0x8d00,
132
	0x8d04,
132
	0x8d04,
133
	0x8c00,
133
	0x8c00,
134
	0x8c04,
134
	0x8c04,
135
	0x8c08,
135
	0x8c08,
136
	0x8c0c,
136
	0x8c0c,
137
	0x8d8c,
137
	0x8d8c,
138
	0x8c20,
138
	0x8c20,
139
	0x8c24,
139
	0x8c24,
140
	0x8c28,
140
	0x8c28,
141
	0x8c18,
141
	0x8c18,
142
	0x8c1c,
142
	0x8c1c,
143
	0x8cf0,
143
	0x8cf0,
144
	0x8e2c,
144
	0x8e2c,
145
	0x8e38,
145
	0x8e38,
146
	0x8c30,
146
	0x8c30,
147
	0x9508,
147
	0x9508,
148
	0x9688,
148
	0x9688,
149
	0x9608,
149
	0x9608,
150
	0x960c,
150
	0x960c,
151
	0x9610,
151
	0x9610,
152
	0x9614,
152
	0x9614,
153
	0x88c4,
153
	0x88c4,
154
	0x88d4,
154
	0x88d4,
155
	0xa008,
155
	0xa008,
156
	0x900c,
156
	0x900c,
157
	0x9100,
157
	0x9100,
158
	0x913c,
158
	0x913c,
159
	0x98f8,
159
	0x98f8,
160
	0x98f4,
160
	0x98f4,
161
	0x9b7c,
161
	0x9b7c,
162
	0x3f8c,
162
	0x3f8c,
163
	0x8950,
163
	0x8950,
164
	0x8954,
164
	0x8954,
165
	0x8a18,
165
	0x8a18,
166
	0x8b28,
166
	0x8b28,
167
	0x9144,
167
	0x9144,
168
	0x9148,
168
	0x9148,
169
	0x914c,
169
	0x914c,
170
	0x3f90,
170
	0x3f90,
171
	0x3f94,
171
	0x3f94,
172
	0x915c,
172
	0x915c,
173
	0x9160,
173
	0x9160,
174
	0x9178,
174
	0x9178,
175
	0x917c,
175
	0x917c,
176
	0x9180,
176
	0x9180,
177
	0x918c,
177
	0x918c,
178
	0x9190,
178
	0x9190,
179
	0x9194,
179
	0x9194,
180
	0x9198,
180
	0x9198,
181
	0x919c,
181
	0x919c,
182
	0x91a8,
182
	0x91a8,
183
	0x91ac,
183
	0x91ac,
184
	0x91b0,
184
	0x91b0,
185
	0x91b4,
185
	0x91b4,
186
	0x91b8,
186
	0x91b8,
187
	0x91c4,
187
	0x91c4,
188
	0x91c8,
188
	0x91c8,
189
	0x91cc,
189
	0x91cc,
190
	0x91d0,
190
	0x91d0,
191
	0x91d4,
191
	0x91d4,
192
	0x91e0,
192
	0x91e0,
193
	0x91e4,
193
	0x91e4,
194
	0x91ec,
194
	0x91ec,
195
	0x91f0,
195
	0x91f0,
196
	0x91f4,
196
	0x91f4,
197
	0x9200,
197
	0x9200,
198
	0x9204,
198
	0x9204,
199
	0x929c,
199
	0x929c,
200
	0x9150,
200
	0x9150,
201
	0x802c,
201
	0x802c,
202
};
202
};
203
 
203
 
204
static void evergreen_gpu_init(struct radeon_device *rdev);
204
static void evergreen_gpu_init(struct radeon_device *rdev);
205
void evergreen_fini(struct radeon_device *rdev);
205
void evergreen_fini(struct radeon_device *rdev);
206
void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
206
void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
207
void evergreen_program_aspm(struct radeon_device *rdev);
207
void evergreen_program_aspm(struct radeon_device *rdev);
208
extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
208
extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
209
				     int ring, u32 cp_int_cntl);
209
				     int ring, u32 cp_int_cntl);
210
extern void cayman_vm_decode_fault(struct radeon_device *rdev,
210
extern void cayman_vm_decode_fault(struct radeon_device *rdev,
211
				   u32 status, u32 addr);
211
				   u32 status, u32 addr);
212
void cik_init_cp_pg_table(struct radeon_device *rdev);
212
void cik_init_cp_pg_table(struct radeon_device *rdev);
213
 
213
 
214
extern u32 si_get_csb_size(struct radeon_device *rdev);
214
extern u32 si_get_csb_size(struct radeon_device *rdev);
215
extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
215
extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
216
extern u32 cik_get_csb_size(struct radeon_device *rdev);
216
extern u32 cik_get_csb_size(struct radeon_device *rdev);
217
extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
217
extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
218
extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
218
extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
219
 
219
 
220
static const u32 evergreen_golden_registers[] =
220
static const u32 evergreen_golden_registers[] =
221
{
221
{
222
	0x3f90, 0xffff0000, 0xff000000,
222
	0x3f90, 0xffff0000, 0xff000000,
223
	0x9148, 0xffff0000, 0xff000000,
223
	0x9148, 0xffff0000, 0xff000000,
224
	0x3f94, 0xffff0000, 0xff000000,
224
	0x3f94, 0xffff0000, 0xff000000,
225
	0x914c, 0xffff0000, 0xff000000,
225
	0x914c, 0xffff0000, 0xff000000,
226
	0x9b7c, 0xffffffff, 0x00000000,
226
	0x9b7c, 0xffffffff, 0x00000000,
227
	0x8a14, 0xffffffff, 0x00000007,
227
	0x8a14, 0xffffffff, 0x00000007,
228
	0x8b10, 0xffffffff, 0x00000000,
228
	0x8b10, 0xffffffff, 0x00000000,
229
	0x960c, 0xffffffff, 0x54763210,
229
	0x960c, 0xffffffff, 0x54763210,
230
	0x88c4, 0xffffffff, 0x000000c2,
230
	0x88c4, 0xffffffff, 0x000000c2,
231
	0x88d4, 0xffffffff, 0x00000010,
231
	0x88d4, 0xffffffff, 0x00000010,
232
	0x8974, 0xffffffff, 0x00000000,
232
	0x8974, 0xffffffff, 0x00000000,
233
	0xc78, 0x00000080, 0x00000080,
233
	0xc78, 0x00000080, 0x00000080,
234
	0x5eb4, 0xffffffff, 0x00000002,
234
	0x5eb4, 0xffffffff, 0x00000002,
235
	0x5e78, 0xffffffff, 0x001000f0,
235
	0x5e78, 0xffffffff, 0x001000f0,
236
	0x6104, 0x01000300, 0x00000000,
236
	0x6104, 0x01000300, 0x00000000,
237
	0x5bc0, 0x00300000, 0x00000000,
237
	0x5bc0, 0x00300000, 0x00000000,
238
	0x7030, 0xffffffff, 0x00000011,
238
	0x7030, 0xffffffff, 0x00000011,
239
	0x7c30, 0xffffffff, 0x00000011,
239
	0x7c30, 0xffffffff, 0x00000011,
240
	0x10830, 0xffffffff, 0x00000011,
240
	0x10830, 0xffffffff, 0x00000011,
241
	0x11430, 0xffffffff, 0x00000011,
241
	0x11430, 0xffffffff, 0x00000011,
242
	0x12030, 0xffffffff, 0x00000011,
242
	0x12030, 0xffffffff, 0x00000011,
243
	0x12c30, 0xffffffff, 0x00000011,
243
	0x12c30, 0xffffffff, 0x00000011,
244
	0xd02c, 0xffffffff, 0x08421000,
244
	0xd02c, 0xffffffff, 0x08421000,
245
	0x240c, 0xffffffff, 0x00000380,
245
	0x240c, 0xffffffff, 0x00000380,
246
	0x8b24, 0xffffffff, 0x00ff0fff,
246
	0x8b24, 0xffffffff, 0x00ff0fff,
247
	0x28a4c, 0x06000000, 0x06000000,
247
	0x28a4c, 0x06000000, 0x06000000,
248
	0x10c, 0x00000001, 0x00000001,
248
	0x10c, 0x00000001, 0x00000001,
249
	0x8d00, 0xffffffff, 0x100e4848,
249
	0x8d00, 0xffffffff, 0x100e4848,
250
	0x8d04, 0xffffffff, 0x00164745,
250
	0x8d04, 0xffffffff, 0x00164745,
251
	0x8c00, 0xffffffff, 0xe4000003,
251
	0x8c00, 0xffffffff, 0xe4000003,
252
	0x8c04, 0xffffffff, 0x40600060,
252
	0x8c04, 0xffffffff, 0x40600060,
253
	0x8c08, 0xffffffff, 0x001c001c,
253
	0x8c08, 0xffffffff, 0x001c001c,
254
	0x8cf0, 0xffffffff, 0x08e00620,
254
	0x8cf0, 0xffffffff, 0x08e00620,
255
	0x8c20, 0xffffffff, 0x00800080,
255
	0x8c20, 0xffffffff, 0x00800080,
256
	0x8c24, 0xffffffff, 0x00800080,
256
	0x8c24, 0xffffffff, 0x00800080,
257
	0x8c18, 0xffffffff, 0x20202078,
257
	0x8c18, 0xffffffff, 0x20202078,
258
	0x8c1c, 0xffffffff, 0x00001010,
258
	0x8c1c, 0xffffffff, 0x00001010,
259
	0x28350, 0xffffffff, 0x00000000,
259
	0x28350, 0xffffffff, 0x00000000,
260
	0xa008, 0xffffffff, 0x00010000,
260
	0xa008, 0xffffffff, 0x00010000,
261
	0x5c4, 0xffffffff, 0x00000001,
261
	0x5c4, 0xffffffff, 0x00000001,
262
	0x9508, 0xffffffff, 0x00000002,
262
	0x9508, 0xffffffff, 0x00000002,
263
	0x913c, 0x0000000f, 0x0000000a
263
	0x913c, 0x0000000f, 0x0000000a
264
};
264
};
265
 
265
 
266
static const u32 evergreen_golden_registers2[] =
266
static const u32 evergreen_golden_registers2[] =
267
{
267
{
268
	0x2f4c, 0xffffffff, 0x00000000,
268
	0x2f4c, 0xffffffff, 0x00000000,
269
	0x54f4, 0xffffffff, 0x00000000,
269
	0x54f4, 0xffffffff, 0x00000000,
270
	0x54f0, 0xffffffff, 0x00000000,
270
	0x54f0, 0xffffffff, 0x00000000,
271
	0x5498, 0xffffffff, 0x00000000,
271
	0x5498, 0xffffffff, 0x00000000,
272
	0x549c, 0xffffffff, 0x00000000,
272
	0x549c, 0xffffffff, 0x00000000,
273
	0x5494, 0xffffffff, 0x00000000,
273
	0x5494, 0xffffffff, 0x00000000,
274
	0x53cc, 0xffffffff, 0x00000000,
274
	0x53cc, 0xffffffff, 0x00000000,
275
	0x53c8, 0xffffffff, 0x00000000,
275
	0x53c8, 0xffffffff, 0x00000000,
276
	0x53c4, 0xffffffff, 0x00000000,
276
	0x53c4, 0xffffffff, 0x00000000,
277
	0x53c0, 0xffffffff, 0x00000000,
277
	0x53c0, 0xffffffff, 0x00000000,
278
	0x53bc, 0xffffffff, 0x00000000,
278
	0x53bc, 0xffffffff, 0x00000000,
279
	0x53b8, 0xffffffff, 0x00000000,
279
	0x53b8, 0xffffffff, 0x00000000,
280
	0x53b4, 0xffffffff, 0x00000000,
280
	0x53b4, 0xffffffff, 0x00000000,
281
	0x53b0, 0xffffffff, 0x00000000
281
	0x53b0, 0xffffffff, 0x00000000
282
};
282
};
283
 
283
 
284
static const u32 cypress_mgcg_init[] =
284
static const u32 cypress_mgcg_init[] =
285
{
285
{
286
	0x802c, 0xffffffff, 0xc0000000,
286
	0x802c, 0xffffffff, 0xc0000000,
287
	0x5448, 0xffffffff, 0x00000100,
287
	0x5448, 0xffffffff, 0x00000100,
288
	0x55e4, 0xffffffff, 0x00000100,
288
	0x55e4, 0xffffffff, 0x00000100,
289
	0x160c, 0xffffffff, 0x00000100,
289
	0x160c, 0xffffffff, 0x00000100,
290
	0x5644, 0xffffffff, 0x00000100,
290
	0x5644, 0xffffffff, 0x00000100,
291
	0xc164, 0xffffffff, 0x00000100,
291
	0xc164, 0xffffffff, 0x00000100,
292
	0x8a18, 0xffffffff, 0x00000100,
292
	0x8a18, 0xffffffff, 0x00000100,
293
	0x897c, 0xffffffff, 0x06000100,
293
	0x897c, 0xffffffff, 0x06000100,
294
	0x8b28, 0xffffffff, 0x00000100,
294
	0x8b28, 0xffffffff, 0x00000100,
295
	0x9144, 0xffffffff, 0x00000100,
295
	0x9144, 0xffffffff, 0x00000100,
296
	0x9a60, 0xffffffff, 0x00000100,
296
	0x9a60, 0xffffffff, 0x00000100,
297
	0x9868, 0xffffffff, 0x00000100,
297
	0x9868, 0xffffffff, 0x00000100,
298
	0x8d58, 0xffffffff, 0x00000100,
298
	0x8d58, 0xffffffff, 0x00000100,
299
	0x9510, 0xffffffff, 0x00000100,
299
	0x9510, 0xffffffff, 0x00000100,
300
	0x949c, 0xffffffff, 0x00000100,
300
	0x949c, 0xffffffff, 0x00000100,
301
	0x9654, 0xffffffff, 0x00000100,
301
	0x9654, 0xffffffff, 0x00000100,
302
	0x9030, 0xffffffff, 0x00000100,
302
	0x9030, 0xffffffff, 0x00000100,
303
	0x9034, 0xffffffff, 0x00000100,
303
	0x9034, 0xffffffff, 0x00000100,
304
	0x9038, 0xffffffff, 0x00000100,
304
	0x9038, 0xffffffff, 0x00000100,
305
	0x903c, 0xffffffff, 0x00000100,
305
	0x903c, 0xffffffff, 0x00000100,
306
	0x9040, 0xffffffff, 0x00000100,
306
	0x9040, 0xffffffff, 0x00000100,
307
	0xa200, 0xffffffff, 0x00000100,
307
	0xa200, 0xffffffff, 0x00000100,
308
	0xa204, 0xffffffff, 0x00000100,
308
	0xa204, 0xffffffff, 0x00000100,
309
	0xa208, 0xffffffff, 0x00000100,
309
	0xa208, 0xffffffff, 0x00000100,
310
	0xa20c, 0xffffffff, 0x00000100,
310
	0xa20c, 0xffffffff, 0x00000100,
311
	0x971c, 0xffffffff, 0x00000100,
311
	0x971c, 0xffffffff, 0x00000100,
312
	0x977c, 0xffffffff, 0x00000100,
312
	0x977c, 0xffffffff, 0x00000100,
313
	0x3f80, 0xffffffff, 0x00000100,
313
	0x3f80, 0xffffffff, 0x00000100,
314
	0xa210, 0xffffffff, 0x00000100,
314
	0xa210, 0xffffffff, 0x00000100,
315
	0xa214, 0xffffffff, 0x00000100,
315
	0xa214, 0xffffffff, 0x00000100,
316
	0x4d8, 0xffffffff, 0x00000100,
316
	0x4d8, 0xffffffff, 0x00000100,
317
	0x9784, 0xffffffff, 0x00000100,
317
	0x9784, 0xffffffff, 0x00000100,
318
	0x9698, 0xffffffff, 0x00000100,
318
	0x9698, 0xffffffff, 0x00000100,
319
	0x4d4, 0xffffffff, 0x00000200,
319
	0x4d4, 0xffffffff, 0x00000200,
320
	0x30cc, 0xffffffff, 0x00000100,
320
	0x30cc, 0xffffffff, 0x00000100,
321
	0xd0c0, 0xffffffff, 0xff000100,
321
	0xd0c0, 0xffffffff, 0xff000100,
322
	0x802c, 0xffffffff, 0x40000000,
322
	0x802c, 0xffffffff, 0x40000000,
323
	0x915c, 0xffffffff, 0x00010000,
323
	0x915c, 0xffffffff, 0x00010000,
324
	0x9160, 0xffffffff, 0x00030002,
324
	0x9160, 0xffffffff, 0x00030002,
325
	0x9178, 0xffffffff, 0x00070000,
325
	0x9178, 0xffffffff, 0x00070000,
326
	0x917c, 0xffffffff, 0x00030002,
326
	0x917c, 0xffffffff, 0x00030002,
327
	0x9180, 0xffffffff, 0x00050004,
327
	0x9180, 0xffffffff, 0x00050004,
328
	0x918c, 0xffffffff, 0x00010006,
328
	0x918c, 0xffffffff, 0x00010006,
329
	0x9190, 0xffffffff, 0x00090008,
329
	0x9190, 0xffffffff, 0x00090008,
330
	0x9194, 0xffffffff, 0x00070000,
330
	0x9194, 0xffffffff, 0x00070000,
331
	0x9198, 0xffffffff, 0x00030002,
331
	0x9198, 0xffffffff, 0x00030002,
332
	0x919c, 0xffffffff, 0x00050004,
332
	0x919c, 0xffffffff, 0x00050004,
333
	0x91a8, 0xffffffff, 0x00010006,
333
	0x91a8, 0xffffffff, 0x00010006,
334
	0x91ac, 0xffffffff, 0x00090008,
334
	0x91ac, 0xffffffff, 0x00090008,
335
	0x91b0, 0xffffffff, 0x00070000,
335
	0x91b0, 0xffffffff, 0x00070000,
336
	0x91b4, 0xffffffff, 0x00030002,
336
	0x91b4, 0xffffffff, 0x00030002,
337
	0x91b8, 0xffffffff, 0x00050004,
337
	0x91b8, 0xffffffff, 0x00050004,
338
	0x91c4, 0xffffffff, 0x00010006,
338
	0x91c4, 0xffffffff, 0x00010006,
339
	0x91c8, 0xffffffff, 0x00090008,
339
	0x91c8, 0xffffffff, 0x00090008,
340
	0x91cc, 0xffffffff, 0x00070000,
340
	0x91cc, 0xffffffff, 0x00070000,
341
	0x91d0, 0xffffffff, 0x00030002,
341
	0x91d0, 0xffffffff, 0x00030002,
342
	0x91d4, 0xffffffff, 0x00050004,
342
	0x91d4, 0xffffffff, 0x00050004,
343
	0x91e0, 0xffffffff, 0x00010006,
343
	0x91e0, 0xffffffff, 0x00010006,
344
	0x91e4, 0xffffffff, 0x00090008,
344
	0x91e4, 0xffffffff, 0x00090008,
345
	0x91e8, 0xffffffff, 0x00000000,
345
	0x91e8, 0xffffffff, 0x00000000,
346
	0x91ec, 0xffffffff, 0x00070000,
346
	0x91ec, 0xffffffff, 0x00070000,
347
	0x91f0, 0xffffffff, 0x00030002,
347
	0x91f0, 0xffffffff, 0x00030002,
348
	0x91f4, 0xffffffff, 0x00050004,
348
	0x91f4, 0xffffffff, 0x00050004,
349
	0x9200, 0xffffffff, 0x00010006,
349
	0x9200, 0xffffffff, 0x00010006,
350
	0x9204, 0xffffffff, 0x00090008,
350
	0x9204, 0xffffffff, 0x00090008,
351
	0x9208, 0xffffffff, 0x00070000,
351
	0x9208, 0xffffffff, 0x00070000,
352
	0x920c, 0xffffffff, 0x00030002,
352
	0x920c, 0xffffffff, 0x00030002,
353
	0x9210, 0xffffffff, 0x00050004,
353
	0x9210, 0xffffffff, 0x00050004,
354
	0x921c, 0xffffffff, 0x00010006,
354
	0x921c, 0xffffffff, 0x00010006,
355
	0x9220, 0xffffffff, 0x00090008,
355
	0x9220, 0xffffffff, 0x00090008,
356
	0x9224, 0xffffffff, 0x00070000,
356
	0x9224, 0xffffffff, 0x00070000,
357
	0x9228, 0xffffffff, 0x00030002,
357
	0x9228, 0xffffffff, 0x00030002,
358
	0x922c, 0xffffffff, 0x00050004,
358
	0x922c, 0xffffffff, 0x00050004,
359
	0x9238, 0xffffffff, 0x00010006,
359
	0x9238, 0xffffffff, 0x00010006,
360
	0x923c, 0xffffffff, 0x00090008,
360
	0x923c, 0xffffffff, 0x00090008,
361
	0x9240, 0xffffffff, 0x00070000,
361
	0x9240, 0xffffffff, 0x00070000,
362
	0x9244, 0xffffffff, 0x00030002,
362
	0x9244, 0xffffffff, 0x00030002,
363
	0x9248, 0xffffffff, 0x00050004,
363
	0x9248, 0xffffffff, 0x00050004,
364
	0x9254, 0xffffffff, 0x00010006,
364
	0x9254, 0xffffffff, 0x00010006,
365
	0x9258, 0xffffffff, 0x00090008,
365
	0x9258, 0xffffffff, 0x00090008,
366
	0x925c, 0xffffffff, 0x00070000,
366
	0x925c, 0xffffffff, 0x00070000,
367
	0x9260, 0xffffffff, 0x00030002,
367
	0x9260, 0xffffffff, 0x00030002,
368
	0x9264, 0xffffffff, 0x00050004,
368
	0x9264, 0xffffffff, 0x00050004,
369
	0x9270, 0xffffffff, 0x00010006,
369
	0x9270, 0xffffffff, 0x00010006,
370
	0x9274, 0xffffffff, 0x00090008,
370
	0x9274, 0xffffffff, 0x00090008,
371
	0x9278, 0xffffffff, 0x00070000,
371
	0x9278, 0xffffffff, 0x00070000,
372
	0x927c, 0xffffffff, 0x00030002,
372
	0x927c, 0xffffffff, 0x00030002,
373
	0x9280, 0xffffffff, 0x00050004,
373
	0x9280, 0xffffffff, 0x00050004,
374
	0x928c, 0xffffffff, 0x00010006,
374
	0x928c, 0xffffffff, 0x00010006,
375
	0x9290, 0xffffffff, 0x00090008,
375
	0x9290, 0xffffffff, 0x00090008,
376
	0x9294, 0xffffffff, 0x00000000,
376
	0x9294, 0xffffffff, 0x00000000,
377
	0x929c, 0xffffffff, 0x00000001,
377
	0x929c, 0xffffffff, 0x00000001,
378
	0x802c, 0xffffffff, 0x40010000,
378
	0x802c, 0xffffffff, 0x40010000,
379
	0x915c, 0xffffffff, 0x00010000,
379
	0x915c, 0xffffffff, 0x00010000,
380
	0x9160, 0xffffffff, 0x00030002,
380
	0x9160, 0xffffffff, 0x00030002,
381
	0x9178, 0xffffffff, 0x00070000,
381
	0x9178, 0xffffffff, 0x00070000,
382
	0x917c, 0xffffffff, 0x00030002,
382
	0x917c, 0xffffffff, 0x00030002,
383
	0x9180, 0xffffffff, 0x00050004,
383
	0x9180, 0xffffffff, 0x00050004,
384
	0x918c, 0xffffffff, 0x00010006,
384
	0x918c, 0xffffffff, 0x00010006,
385
	0x9190, 0xffffffff, 0x00090008,
385
	0x9190, 0xffffffff, 0x00090008,
386
	0x9194, 0xffffffff, 0x00070000,
386
	0x9194, 0xffffffff, 0x00070000,
387
	0x9198, 0xffffffff, 0x00030002,
387
	0x9198, 0xffffffff, 0x00030002,
388
	0x919c, 0xffffffff, 0x00050004,
388
	0x919c, 0xffffffff, 0x00050004,
389
	0x91a8, 0xffffffff, 0x00010006,
389
	0x91a8, 0xffffffff, 0x00010006,
390
	0x91ac, 0xffffffff, 0x00090008,
390
	0x91ac, 0xffffffff, 0x00090008,
391
	0x91b0, 0xffffffff, 0x00070000,
391
	0x91b0, 0xffffffff, 0x00070000,
392
	0x91b4, 0xffffffff, 0x00030002,
392
	0x91b4, 0xffffffff, 0x00030002,
393
	0x91b8, 0xffffffff, 0x00050004,
393
	0x91b8, 0xffffffff, 0x00050004,
394
	0x91c4, 0xffffffff, 0x00010006,
394
	0x91c4, 0xffffffff, 0x00010006,
395
	0x91c8, 0xffffffff, 0x00090008,
395
	0x91c8, 0xffffffff, 0x00090008,
396
	0x91cc, 0xffffffff, 0x00070000,
396
	0x91cc, 0xffffffff, 0x00070000,
397
	0x91d0, 0xffffffff, 0x00030002,
397
	0x91d0, 0xffffffff, 0x00030002,
398
	0x91d4, 0xffffffff, 0x00050004,
398
	0x91d4, 0xffffffff, 0x00050004,
399
	0x91e0, 0xffffffff, 0x00010006,
399
	0x91e0, 0xffffffff, 0x00010006,
400
	0x91e4, 0xffffffff, 0x00090008,
400
	0x91e4, 0xffffffff, 0x00090008,
401
	0x91e8, 0xffffffff, 0x00000000,
401
	0x91e8, 0xffffffff, 0x00000000,
402
	0x91ec, 0xffffffff, 0x00070000,
402
	0x91ec, 0xffffffff, 0x00070000,
403
	0x91f0, 0xffffffff, 0x00030002,
403
	0x91f0, 0xffffffff, 0x00030002,
404
	0x91f4, 0xffffffff, 0x00050004,
404
	0x91f4, 0xffffffff, 0x00050004,
405
	0x9200, 0xffffffff, 0x00010006,
405
	0x9200, 0xffffffff, 0x00010006,
406
	0x9204, 0xffffffff, 0x00090008,
406
	0x9204, 0xffffffff, 0x00090008,
407
	0x9208, 0xffffffff, 0x00070000,
407
	0x9208, 0xffffffff, 0x00070000,
408
	0x920c, 0xffffffff, 0x00030002,
408
	0x920c, 0xffffffff, 0x00030002,
409
	0x9210, 0xffffffff, 0x00050004,
409
	0x9210, 0xffffffff, 0x00050004,
410
	0x921c, 0xffffffff, 0x00010006,
410
	0x921c, 0xffffffff, 0x00010006,
411
	0x9220, 0xffffffff, 0x00090008,
411
	0x9220, 0xffffffff, 0x00090008,
412
	0x9224, 0xffffffff, 0x00070000,
412
	0x9224, 0xffffffff, 0x00070000,
413
	0x9228, 0xffffffff, 0x00030002,
413
	0x9228, 0xffffffff, 0x00030002,
414
	0x922c, 0xffffffff, 0x00050004,
414
	0x922c, 0xffffffff, 0x00050004,
415
	0x9238, 0xffffffff, 0x00010006,
415
	0x9238, 0xffffffff, 0x00010006,
416
	0x923c, 0xffffffff, 0x00090008,
416
	0x923c, 0xffffffff, 0x00090008,
417
	0x9240, 0xffffffff, 0x00070000,
417
	0x9240, 0xffffffff, 0x00070000,
418
	0x9244, 0xffffffff, 0x00030002,
418
	0x9244, 0xffffffff, 0x00030002,
419
	0x9248, 0xffffffff, 0x00050004,
419
	0x9248, 0xffffffff, 0x00050004,
420
	0x9254, 0xffffffff, 0x00010006,
420
	0x9254, 0xffffffff, 0x00010006,
421
	0x9258, 0xffffffff, 0x00090008,
421
	0x9258, 0xffffffff, 0x00090008,
422
	0x925c, 0xffffffff, 0x00070000,
422
	0x925c, 0xffffffff, 0x00070000,
423
	0x9260, 0xffffffff, 0x00030002,
423
	0x9260, 0xffffffff, 0x00030002,
424
	0x9264, 0xffffffff, 0x00050004,
424
	0x9264, 0xffffffff, 0x00050004,
425
	0x9270, 0xffffffff, 0x00010006,
425
	0x9270, 0xffffffff, 0x00010006,
426
	0x9274, 0xffffffff, 0x00090008,
426
	0x9274, 0xffffffff, 0x00090008,
427
	0x9278, 0xffffffff, 0x00070000,
427
	0x9278, 0xffffffff, 0x00070000,
428
	0x927c, 0xffffffff, 0x00030002,
428
	0x927c, 0xffffffff, 0x00030002,
429
	0x9280, 0xffffffff, 0x00050004,
429
	0x9280, 0xffffffff, 0x00050004,
430
	0x928c, 0xffffffff, 0x00010006,
430
	0x928c, 0xffffffff, 0x00010006,
431
	0x9290, 0xffffffff, 0x00090008,
431
	0x9290, 0xffffffff, 0x00090008,
432
	0x9294, 0xffffffff, 0x00000000,
432
	0x9294, 0xffffffff, 0x00000000,
433
	0x929c, 0xffffffff, 0x00000001,
433
	0x929c, 0xffffffff, 0x00000001,
434
	0x802c, 0xffffffff, 0xc0000000
434
	0x802c, 0xffffffff, 0xc0000000
435
};
435
};
436
 
436
 
437
static const u32 redwood_mgcg_init[] =
437
static const u32 redwood_mgcg_init[] =
438
{
438
{
439
	0x802c, 0xffffffff, 0xc0000000,
439
	0x802c, 0xffffffff, 0xc0000000,
440
	0x5448, 0xffffffff, 0x00000100,
440
	0x5448, 0xffffffff, 0x00000100,
441
	0x55e4, 0xffffffff, 0x00000100,
441
	0x55e4, 0xffffffff, 0x00000100,
442
	0x160c, 0xffffffff, 0x00000100,
442
	0x160c, 0xffffffff, 0x00000100,
443
	0x5644, 0xffffffff, 0x00000100,
443
	0x5644, 0xffffffff, 0x00000100,
444
	0xc164, 0xffffffff, 0x00000100,
444
	0xc164, 0xffffffff, 0x00000100,
445
	0x8a18, 0xffffffff, 0x00000100,
445
	0x8a18, 0xffffffff, 0x00000100,
446
	0x897c, 0xffffffff, 0x06000100,
446
	0x897c, 0xffffffff, 0x06000100,
447
	0x8b28, 0xffffffff, 0x00000100,
447
	0x8b28, 0xffffffff, 0x00000100,
448
	0x9144, 0xffffffff, 0x00000100,
448
	0x9144, 0xffffffff, 0x00000100,
449
	0x9a60, 0xffffffff, 0x00000100,
449
	0x9a60, 0xffffffff, 0x00000100,
450
	0x9868, 0xffffffff, 0x00000100,
450
	0x9868, 0xffffffff, 0x00000100,
451
	0x8d58, 0xffffffff, 0x00000100,
451
	0x8d58, 0xffffffff, 0x00000100,
452
	0x9510, 0xffffffff, 0x00000100,
452
	0x9510, 0xffffffff, 0x00000100,
453
	0x949c, 0xffffffff, 0x00000100,
453
	0x949c, 0xffffffff, 0x00000100,
454
	0x9654, 0xffffffff, 0x00000100,
454
	0x9654, 0xffffffff, 0x00000100,
455
	0x9030, 0xffffffff, 0x00000100,
455
	0x9030, 0xffffffff, 0x00000100,
456
	0x9034, 0xffffffff, 0x00000100,
456
	0x9034, 0xffffffff, 0x00000100,
457
	0x9038, 0xffffffff, 0x00000100,
457
	0x9038, 0xffffffff, 0x00000100,
458
	0x903c, 0xffffffff, 0x00000100,
458
	0x903c, 0xffffffff, 0x00000100,
459
	0x9040, 0xffffffff, 0x00000100,
459
	0x9040, 0xffffffff, 0x00000100,
460
	0xa200, 0xffffffff, 0x00000100,
460
	0xa200, 0xffffffff, 0x00000100,
461
	0xa204, 0xffffffff, 0x00000100,
461
	0xa204, 0xffffffff, 0x00000100,
462
	0xa208, 0xffffffff, 0x00000100,
462
	0xa208, 0xffffffff, 0x00000100,
463
	0xa20c, 0xffffffff, 0x00000100,
463
	0xa20c, 0xffffffff, 0x00000100,
464
	0x971c, 0xffffffff, 0x00000100,
464
	0x971c, 0xffffffff, 0x00000100,
465
	0x977c, 0xffffffff, 0x00000100,
465
	0x977c, 0xffffffff, 0x00000100,
466
	0x3f80, 0xffffffff, 0x00000100,
466
	0x3f80, 0xffffffff, 0x00000100,
467
	0xa210, 0xffffffff, 0x00000100,
467
	0xa210, 0xffffffff, 0x00000100,
468
	0xa214, 0xffffffff, 0x00000100,
468
	0xa214, 0xffffffff, 0x00000100,
469
	0x4d8, 0xffffffff, 0x00000100,
469
	0x4d8, 0xffffffff, 0x00000100,
470
	0x9784, 0xffffffff, 0x00000100,
470
	0x9784, 0xffffffff, 0x00000100,
471
	0x9698, 0xffffffff, 0x00000100,
471
	0x9698, 0xffffffff, 0x00000100,
472
	0x4d4, 0xffffffff, 0x00000200,
472
	0x4d4, 0xffffffff, 0x00000200,
473
	0x30cc, 0xffffffff, 0x00000100,
473
	0x30cc, 0xffffffff, 0x00000100,
474
	0xd0c0, 0xffffffff, 0xff000100,
474
	0xd0c0, 0xffffffff, 0xff000100,
475
	0x802c, 0xffffffff, 0x40000000,
475
	0x802c, 0xffffffff, 0x40000000,
476
	0x915c, 0xffffffff, 0x00010000,
476
	0x915c, 0xffffffff, 0x00010000,
477
	0x9160, 0xffffffff, 0x00030002,
477
	0x9160, 0xffffffff, 0x00030002,
478
	0x9178, 0xffffffff, 0x00070000,
478
	0x9178, 0xffffffff, 0x00070000,
479
	0x917c, 0xffffffff, 0x00030002,
479
	0x917c, 0xffffffff, 0x00030002,
480
	0x9180, 0xffffffff, 0x00050004,
480
	0x9180, 0xffffffff, 0x00050004,
481
	0x918c, 0xffffffff, 0x00010006,
481
	0x918c, 0xffffffff, 0x00010006,
482
	0x9190, 0xffffffff, 0x00090008,
482
	0x9190, 0xffffffff, 0x00090008,
483
	0x9194, 0xffffffff, 0x00070000,
483
	0x9194, 0xffffffff, 0x00070000,
484
	0x9198, 0xffffffff, 0x00030002,
484
	0x9198, 0xffffffff, 0x00030002,
485
	0x919c, 0xffffffff, 0x00050004,
485
	0x919c, 0xffffffff, 0x00050004,
486
	0x91a8, 0xffffffff, 0x00010006,
486
	0x91a8, 0xffffffff, 0x00010006,
487
	0x91ac, 0xffffffff, 0x00090008,
487
	0x91ac, 0xffffffff, 0x00090008,
488
	0x91b0, 0xffffffff, 0x00070000,
488
	0x91b0, 0xffffffff, 0x00070000,
489
	0x91b4, 0xffffffff, 0x00030002,
489
	0x91b4, 0xffffffff, 0x00030002,
490
	0x91b8, 0xffffffff, 0x00050004,
490
	0x91b8, 0xffffffff, 0x00050004,
491
	0x91c4, 0xffffffff, 0x00010006,
491
	0x91c4, 0xffffffff, 0x00010006,
492
	0x91c8, 0xffffffff, 0x00090008,
492
	0x91c8, 0xffffffff, 0x00090008,
493
	0x91cc, 0xffffffff, 0x00070000,
493
	0x91cc, 0xffffffff, 0x00070000,
494
	0x91d0, 0xffffffff, 0x00030002,
494
	0x91d0, 0xffffffff, 0x00030002,
495
	0x91d4, 0xffffffff, 0x00050004,
495
	0x91d4, 0xffffffff, 0x00050004,
496
	0x91e0, 0xffffffff, 0x00010006,
496
	0x91e0, 0xffffffff, 0x00010006,
497
	0x91e4, 0xffffffff, 0x00090008,
497
	0x91e4, 0xffffffff, 0x00090008,
498
	0x91e8, 0xffffffff, 0x00000000,
498
	0x91e8, 0xffffffff, 0x00000000,
499
	0x91ec, 0xffffffff, 0x00070000,
499
	0x91ec, 0xffffffff, 0x00070000,
500
	0x91f0, 0xffffffff, 0x00030002,
500
	0x91f0, 0xffffffff, 0x00030002,
501
	0x91f4, 0xffffffff, 0x00050004,
501
	0x91f4, 0xffffffff, 0x00050004,
502
	0x9200, 0xffffffff, 0x00010006,
502
	0x9200, 0xffffffff, 0x00010006,
503
	0x9204, 0xffffffff, 0x00090008,
503
	0x9204, 0xffffffff, 0x00090008,
504
	0x9294, 0xffffffff, 0x00000000,
504
	0x9294, 0xffffffff, 0x00000000,
505
	0x929c, 0xffffffff, 0x00000001,
505
	0x929c, 0xffffffff, 0x00000001,
506
	0x802c, 0xffffffff, 0xc0000000
506
	0x802c, 0xffffffff, 0xc0000000
507
};
507
};
508
 
508
 
509
static const u32 cedar_golden_registers[] =
509
static const u32 cedar_golden_registers[] =
510
{
510
{
511
	0x3f90, 0xffff0000, 0xff000000,
511
	0x3f90, 0xffff0000, 0xff000000,
512
	0x9148, 0xffff0000, 0xff000000,
512
	0x9148, 0xffff0000, 0xff000000,
513
	0x3f94, 0xffff0000, 0xff000000,
513
	0x3f94, 0xffff0000, 0xff000000,
514
	0x914c, 0xffff0000, 0xff000000,
514
	0x914c, 0xffff0000, 0xff000000,
515
	0x9b7c, 0xffffffff, 0x00000000,
515
	0x9b7c, 0xffffffff, 0x00000000,
516
	0x8a14, 0xffffffff, 0x00000007,
516
	0x8a14, 0xffffffff, 0x00000007,
517
	0x8b10, 0xffffffff, 0x00000000,
517
	0x8b10, 0xffffffff, 0x00000000,
518
	0x960c, 0xffffffff, 0x54763210,
518
	0x960c, 0xffffffff, 0x54763210,
519
	0x88c4, 0xffffffff, 0x000000c2,
519
	0x88c4, 0xffffffff, 0x000000c2,
520
	0x88d4, 0xffffffff, 0x00000000,
520
	0x88d4, 0xffffffff, 0x00000000,
521
	0x8974, 0xffffffff, 0x00000000,
521
	0x8974, 0xffffffff, 0x00000000,
522
	0xc78, 0x00000080, 0x00000080,
522
	0xc78, 0x00000080, 0x00000080,
523
	0x5eb4, 0xffffffff, 0x00000002,
523
	0x5eb4, 0xffffffff, 0x00000002,
524
	0x5e78, 0xffffffff, 0x001000f0,
524
	0x5e78, 0xffffffff, 0x001000f0,
525
	0x6104, 0x01000300, 0x00000000,
525
	0x6104, 0x01000300, 0x00000000,
526
	0x5bc0, 0x00300000, 0x00000000,
526
	0x5bc0, 0x00300000, 0x00000000,
527
	0x7030, 0xffffffff, 0x00000011,
527
	0x7030, 0xffffffff, 0x00000011,
528
	0x7c30, 0xffffffff, 0x00000011,
528
	0x7c30, 0xffffffff, 0x00000011,
529
	0x10830, 0xffffffff, 0x00000011,
529
	0x10830, 0xffffffff, 0x00000011,
530
	0x11430, 0xffffffff, 0x00000011,
530
	0x11430, 0xffffffff, 0x00000011,
531
	0xd02c, 0xffffffff, 0x08421000,
531
	0xd02c, 0xffffffff, 0x08421000,
532
	0x240c, 0xffffffff, 0x00000380,
532
	0x240c, 0xffffffff, 0x00000380,
533
	0x8b24, 0xffffffff, 0x00ff0fff,
533
	0x8b24, 0xffffffff, 0x00ff0fff,
534
	0x28a4c, 0x06000000, 0x06000000,
534
	0x28a4c, 0x06000000, 0x06000000,
535
	0x10c, 0x00000001, 0x00000001,
535
	0x10c, 0x00000001, 0x00000001,
536
	0x8d00, 0xffffffff, 0x100e4848,
536
	0x8d00, 0xffffffff, 0x100e4848,
537
	0x8d04, 0xffffffff, 0x00164745,
537
	0x8d04, 0xffffffff, 0x00164745,
538
	0x8c00, 0xffffffff, 0xe4000003,
538
	0x8c00, 0xffffffff, 0xe4000003,
539
	0x8c04, 0xffffffff, 0x40600060,
539
	0x8c04, 0xffffffff, 0x40600060,
540
	0x8c08, 0xffffffff, 0x001c001c,
540
	0x8c08, 0xffffffff, 0x001c001c,
541
	0x8cf0, 0xffffffff, 0x08e00410,
541
	0x8cf0, 0xffffffff, 0x08e00410,
542
	0x8c20, 0xffffffff, 0x00800080,
542
	0x8c20, 0xffffffff, 0x00800080,
543
	0x8c24, 0xffffffff, 0x00800080,
543
	0x8c24, 0xffffffff, 0x00800080,
544
	0x8c18, 0xffffffff, 0x20202078,
544
	0x8c18, 0xffffffff, 0x20202078,
545
	0x8c1c, 0xffffffff, 0x00001010,
545
	0x8c1c, 0xffffffff, 0x00001010,
546
	0x28350, 0xffffffff, 0x00000000,
546
	0x28350, 0xffffffff, 0x00000000,
547
	0xa008, 0xffffffff, 0x00010000,
547
	0xa008, 0xffffffff, 0x00010000,
548
	0x5c4, 0xffffffff, 0x00000001,
548
	0x5c4, 0xffffffff, 0x00000001,
549
	0x9508, 0xffffffff, 0x00000002
549
	0x9508, 0xffffffff, 0x00000002
550
};
550
};
551
 
551
 
552
static const u32 cedar_mgcg_init[] =
552
static const u32 cedar_mgcg_init[] =
553
{
553
{
554
	0x802c, 0xffffffff, 0xc0000000,
554
	0x802c, 0xffffffff, 0xc0000000,
555
	0x5448, 0xffffffff, 0x00000100,
555
	0x5448, 0xffffffff, 0x00000100,
556
	0x55e4, 0xffffffff, 0x00000100,
556
	0x55e4, 0xffffffff, 0x00000100,
557
	0x160c, 0xffffffff, 0x00000100,
557
	0x160c, 0xffffffff, 0x00000100,
558
	0x5644, 0xffffffff, 0x00000100,
558
	0x5644, 0xffffffff, 0x00000100,
559
	0xc164, 0xffffffff, 0x00000100,
559
	0xc164, 0xffffffff, 0x00000100,
560
	0x8a18, 0xffffffff, 0x00000100,
560
	0x8a18, 0xffffffff, 0x00000100,
561
	0x897c, 0xffffffff, 0x06000100,
561
	0x897c, 0xffffffff, 0x06000100,
562
	0x8b28, 0xffffffff, 0x00000100,
562
	0x8b28, 0xffffffff, 0x00000100,
563
	0x9144, 0xffffffff, 0x00000100,
563
	0x9144, 0xffffffff, 0x00000100,
564
	0x9a60, 0xffffffff, 0x00000100,
564
	0x9a60, 0xffffffff, 0x00000100,
565
	0x9868, 0xffffffff, 0x00000100,
565
	0x9868, 0xffffffff, 0x00000100,
566
	0x8d58, 0xffffffff, 0x00000100,
566
	0x8d58, 0xffffffff, 0x00000100,
567
	0x9510, 0xffffffff, 0x00000100,
567
	0x9510, 0xffffffff, 0x00000100,
568
	0x949c, 0xffffffff, 0x00000100,
568
	0x949c, 0xffffffff, 0x00000100,
569
	0x9654, 0xffffffff, 0x00000100,
569
	0x9654, 0xffffffff, 0x00000100,
570
	0x9030, 0xffffffff, 0x00000100,
570
	0x9030, 0xffffffff, 0x00000100,
571
	0x9034, 0xffffffff, 0x00000100,
571
	0x9034, 0xffffffff, 0x00000100,
572
	0x9038, 0xffffffff, 0x00000100,
572
	0x9038, 0xffffffff, 0x00000100,
573
	0x903c, 0xffffffff, 0x00000100,
573
	0x903c, 0xffffffff, 0x00000100,
574
	0x9040, 0xffffffff, 0x00000100,
574
	0x9040, 0xffffffff, 0x00000100,
575
	0xa200, 0xffffffff, 0x00000100,
575
	0xa200, 0xffffffff, 0x00000100,
576
	0xa204, 0xffffffff, 0x00000100,
576
	0xa204, 0xffffffff, 0x00000100,
577
	0xa208, 0xffffffff, 0x00000100,
577
	0xa208, 0xffffffff, 0x00000100,
578
	0xa20c, 0xffffffff, 0x00000100,
578
	0xa20c, 0xffffffff, 0x00000100,
579
	0x971c, 0xffffffff, 0x00000100,
579
	0x971c, 0xffffffff, 0x00000100,
580
	0x977c, 0xffffffff, 0x00000100,
580
	0x977c, 0xffffffff, 0x00000100,
581
	0x3f80, 0xffffffff, 0x00000100,
581
	0x3f80, 0xffffffff, 0x00000100,
582
	0xa210, 0xffffffff, 0x00000100,
582
	0xa210, 0xffffffff, 0x00000100,
583
	0xa214, 0xffffffff, 0x00000100,
583
	0xa214, 0xffffffff, 0x00000100,
584
	0x4d8, 0xffffffff, 0x00000100,
584
	0x4d8, 0xffffffff, 0x00000100,
585
	0x9784, 0xffffffff, 0x00000100,
585
	0x9784, 0xffffffff, 0x00000100,
586
	0x9698, 0xffffffff, 0x00000100,
586
	0x9698, 0xffffffff, 0x00000100,
587
	0x4d4, 0xffffffff, 0x00000200,
587
	0x4d4, 0xffffffff, 0x00000200,
588
	0x30cc, 0xffffffff, 0x00000100,
588
	0x30cc, 0xffffffff, 0x00000100,
589
	0xd0c0, 0xffffffff, 0xff000100,
589
	0xd0c0, 0xffffffff, 0xff000100,
590
	0x802c, 0xffffffff, 0x40000000,
590
	0x802c, 0xffffffff, 0x40000000,
591
	0x915c, 0xffffffff, 0x00010000,
591
	0x915c, 0xffffffff, 0x00010000,
592
	0x9178, 0xffffffff, 0x00050000,
592
	0x9178, 0xffffffff, 0x00050000,
593
	0x917c, 0xffffffff, 0x00030002,
593
	0x917c, 0xffffffff, 0x00030002,
594
	0x918c, 0xffffffff, 0x00010004,
594
	0x918c, 0xffffffff, 0x00010004,
595
	0x9190, 0xffffffff, 0x00070006,
595
	0x9190, 0xffffffff, 0x00070006,
596
	0x9194, 0xffffffff, 0x00050000,
596
	0x9194, 0xffffffff, 0x00050000,
597
	0x9198, 0xffffffff, 0x00030002,
597
	0x9198, 0xffffffff, 0x00030002,
598
	0x91a8, 0xffffffff, 0x00010004,
598
	0x91a8, 0xffffffff, 0x00010004,
599
	0x91ac, 0xffffffff, 0x00070006,
599
	0x91ac, 0xffffffff, 0x00070006,
600
	0x91e8, 0xffffffff, 0x00000000,
600
	0x91e8, 0xffffffff, 0x00000000,
601
	0x9294, 0xffffffff, 0x00000000,
601
	0x9294, 0xffffffff, 0x00000000,
602
	0x929c, 0xffffffff, 0x00000001,
602
	0x929c, 0xffffffff, 0x00000001,
603
	0x802c, 0xffffffff, 0xc0000000
603
	0x802c, 0xffffffff, 0xc0000000
604
};
604
};
605
 
605
 
606
static const u32 juniper_mgcg_init[] =
606
static const u32 juniper_mgcg_init[] =
607
{
607
{
608
	0x802c, 0xffffffff, 0xc0000000,
608
	0x802c, 0xffffffff, 0xc0000000,
609
	0x5448, 0xffffffff, 0x00000100,
609
	0x5448, 0xffffffff, 0x00000100,
610
	0x55e4, 0xffffffff, 0x00000100,
610
	0x55e4, 0xffffffff, 0x00000100,
611
	0x160c, 0xffffffff, 0x00000100,
611
	0x160c, 0xffffffff, 0x00000100,
612
	0x5644, 0xffffffff, 0x00000100,
612
	0x5644, 0xffffffff, 0x00000100,
613
	0xc164, 0xffffffff, 0x00000100,
613
	0xc164, 0xffffffff, 0x00000100,
614
	0x8a18, 0xffffffff, 0x00000100,
614
	0x8a18, 0xffffffff, 0x00000100,
615
	0x897c, 0xffffffff, 0x06000100,
615
	0x897c, 0xffffffff, 0x06000100,
616
	0x8b28, 0xffffffff, 0x00000100,
616
	0x8b28, 0xffffffff, 0x00000100,
617
	0x9144, 0xffffffff, 0x00000100,
617
	0x9144, 0xffffffff, 0x00000100,
618
	0x9a60, 0xffffffff, 0x00000100,
618
	0x9a60, 0xffffffff, 0x00000100,
619
	0x9868, 0xffffffff, 0x00000100,
619
	0x9868, 0xffffffff, 0x00000100,
620
	0x8d58, 0xffffffff, 0x00000100,
620
	0x8d58, 0xffffffff, 0x00000100,
621
	0x9510, 0xffffffff, 0x00000100,
621
	0x9510, 0xffffffff, 0x00000100,
622
	0x949c, 0xffffffff, 0x00000100,
622
	0x949c, 0xffffffff, 0x00000100,
623
	0x9654, 0xffffffff, 0x00000100,
623
	0x9654, 0xffffffff, 0x00000100,
624
	0x9030, 0xffffffff, 0x00000100,
624
	0x9030, 0xffffffff, 0x00000100,
625
	0x9034, 0xffffffff, 0x00000100,
625
	0x9034, 0xffffffff, 0x00000100,
626
	0x9038, 0xffffffff, 0x00000100,
626
	0x9038, 0xffffffff, 0x00000100,
627
	0x903c, 0xffffffff, 0x00000100,
627
	0x903c, 0xffffffff, 0x00000100,
628
	0x9040, 0xffffffff, 0x00000100,
628
	0x9040, 0xffffffff, 0x00000100,
629
	0xa200, 0xffffffff, 0x00000100,
629
	0xa200, 0xffffffff, 0x00000100,
630
	0xa204, 0xffffffff, 0x00000100,
630
	0xa204, 0xffffffff, 0x00000100,
631
	0xa208, 0xffffffff, 0x00000100,
631
	0xa208, 0xffffffff, 0x00000100,
632
	0xa20c, 0xffffffff, 0x00000100,
632
	0xa20c, 0xffffffff, 0x00000100,
633
	0x971c, 0xffffffff, 0x00000100,
633
	0x971c, 0xffffffff, 0x00000100,
634
	0xd0c0, 0xffffffff, 0xff000100,
634
	0xd0c0, 0xffffffff, 0xff000100,
635
	0x802c, 0xffffffff, 0x40000000,
635
	0x802c, 0xffffffff, 0x40000000,
636
	0x915c, 0xffffffff, 0x00010000,
636
	0x915c, 0xffffffff, 0x00010000,
637
	0x9160, 0xffffffff, 0x00030002,
637
	0x9160, 0xffffffff, 0x00030002,
638
	0x9178, 0xffffffff, 0x00070000,
638
	0x9178, 0xffffffff, 0x00070000,
639
	0x917c, 0xffffffff, 0x00030002,
639
	0x917c, 0xffffffff, 0x00030002,
640
	0x9180, 0xffffffff, 0x00050004,
640
	0x9180, 0xffffffff, 0x00050004,
641
	0x918c, 0xffffffff, 0x00010006,
641
	0x918c, 0xffffffff, 0x00010006,
642
	0x9190, 0xffffffff, 0x00090008,
642
	0x9190, 0xffffffff, 0x00090008,
643
	0x9194, 0xffffffff, 0x00070000,
643
	0x9194, 0xffffffff, 0x00070000,
644
	0x9198, 0xffffffff, 0x00030002,
644
	0x9198, 0xffffffff, 0x00030002,
645
	0x919c, 0xffffffff, 0x00050004,
645
	0x919c, 0xffffffff, 0x00050004,
646
	0x91a8, 0xffffffff, 0x00010006,
646
	0x91a8, 0xffffffff, 0x00010006,
647
	0x91ac, 0xffffffff, 0x00090008,
647
	0x91ac, 0xffffffff, 0x00090008,
648
	0x91b0, 0xffffffff, 0x00070000,
648
	0x91b0, 0xffffffff, 0x00070000,
649
	0x91b4, 0xffffffff, 0x00030002,
649
	0x91b4, 0xffffffff, 0x00030002,
650
	0x91b8, 0xffffffff, 0x00050004,
650
	0x91b8, 0xffffffff, 0x00050004,
651
	0x91c4, 0xffffffff, 0x00010006,
651
	0x91c4, 0xffffffff, 0x00010006,
652
	0x91c8, 0xffffffff, 0x00090008,
652
	0x91c8, 0xffffffff, 0x00090008,
653
	0x91cc, 0xffffffff, 0x00070000,
653
	0x91cc, 0xffffffff, 0x00070000,
654
	0x91d0, 0xffffffff, 0x00030002,
654
	0x91d0, 0xffffffff, 0x00030002,
655
	0x91d4, 0xffffffff, 0x00050004,
655
	0x91d4, 0xffffffff, 0x00050004,
656
	0x91e0, 0xffffffff, 0x00010006,
656
	0x91e0, 0xffffffff, 0x00010006,
657
	0x91e4, 0xffffffff, 0x00090008,
657
	0x91e4, 0xffffffff, 0x00090008,
658
	0x91e8, 0xffffffff, 0x00000000,
658
	0x91e8, 0xffffffff, 0x00000000,
659
	0x91ec, 0xffffffff, 0x00070000,
659
	0x91ec, 0xffffffff, 0x00070000,
660
	0x91f0, 0xffffffff, 0x00030002,
660
	0x91f0, 0xffffffff, 0x00030002,
661
	0x91f4, 0xffffffff, 0x00050004,
661
	0x91f4, 0xffffffff, 0x00050004,
662
	0x9200, 0xffffffff, 0x00010006,
662
	0x9200, 0xffffffff, 0x00010006,
663
	0x9204, 0xffffffff, 0x00090008,
663
	0x9204, 0xffffffff, 0x00090008,
664
	0x9208, 0xffffffff, 0x00070000,
664
	0x9208, 0xffffffff, 0x00070000,
665
	0x920c, 0xffffffff, 0x00030002,
665
	0x920c, 0xffffffff, 0x00030002,
666
	0x9210, 0xffffffff, 0x00050004,
666
	0x9210, 0xffffffff, 0x00050004,
667
	0x921c, 0xffffffff, 0x00010006,
667
	0x921c, 0xffffffff, 0x00010006,
668
	0x9220, 0xffffffff, 0x00090008,
668
	0x9220, 0xffffffff, 0x00090008,
669
	0x9224, 0xffffffff, 0x00070000,
669
	0x9224, 0xffffffff, 0x00070000,
670
	0x9228, 0xffffffff, 0x00030002,
670
	0x9228, 0xffffffff, 0x00030002,
671
	0x922c, 0xffffffff, 0x00050004,
671
	0x922c, 0xffffffff, 0x00050004,
672
	0x9238, 0xffffffff, 0x00010006,
672
	0x9238, 0xffffffff, 0x00010006,
673
	0x923c, 0xffffffff, 0x00090008,
673
	0x923c, 0xffffffff, 0x00090008,
674
	0x9240, 0xffffffff, 0x00070000,
674
	0x9240, 0xffffffff, 0x00070000,
675
	0x9244, 0xffffffff, 0x00030002,
675
	0x9244, 0xffffffff, 0x00030002,
676
	0x9248, 0xffffffff, 0x00050004,
676
	0x9248, 0xffffffff, 0x00050004,
677
	0x9254, 0xffffffff, 0x00010006,
677
	0x9254, 0xffffffff, 0x00010006,
678
	0x9258, 0xffffffff, 0x00090008,
678
	0x9258, 0xffffffff, 0x00090008,
679
	0x925c, 0xffffffff, 0x00070000,
679
	0x925c, 0xffffffff, 0x00070000,
680
	0x9260, 0xffffffff, 0x00030002,
680
	0x9260, 0xffffffff, 0x00030002,
681
	0x9264, 0xffffffff, 0x00050004,
681
	0x9264, 0xffffffff, 0x00050004,
682
	0x9270, 0xffffffff, 0x00010006,
682
	0x9270, 0xffffffff, 0x00010006,
683
	0x9274, 0xffffffff, 0x00090008,
683
	0x9274, 0xffffffff, 0x00090008,
684
	0x9278, 0xffffffff, 0x00070000,
684
	0x9278, 0xffffffff, 0x00070000,
685
	0x927c, 0xffffffff, 0x00030002,
685
	0x927c, 0xffffffff, 0x00030002,
686
	0x9280, 0xffffffff, 0x00050004,
686
	0x9280, 0xffffffff, 0x00050004,
687
	0x928c, 0xffffffff, 0x00010006,
687
	0x928c, 0xffffffff, 0x00010006,
688
	0x9290, 0xffffffff, 0x00090008,
688
	0x9290, 0xffffffff, 0x00090008,
689
	0x9294, 0xffffffff, 0x00000000,
689
	0x9294, 0xffffffff, 0x00000000,
690
	0x929c, 0xffffffff, 0x00000001,
690
	0x929c, 0xffffffff, 0x00000001,
691
	0x802c, 0xffffffff, 0xc0000000,
691
	0x802c, 0xffffffff, 0xc0000000,
692
	0x977c, 0xffffffff, 0x00000100,
692
	0x977c, 0xffffffff, 0x00000100,
693
	0x3f80, 0xffffffff, 0x00000100,
693
	0x3f80, 0xffffffff, 0x00000100,
694
	0xa210, 0xffffffff, 0x00000100,
694
	0xa210, 0xffffffff, 0x00000100,
695
	0xa214, 0xffffffff, 0x00000100,
695
	0xa214, 0xffffffff, 0x00000100,
696
	0x4d8, 0xffffffff, 0x00000100,
696
	0x4d8, 0xffffffff, 0x00000100,
697
	0x9784, 0xffffffff, 0x00000100,
697
	0x9784, 0xffffffff, 0x00000100,
698
	0x9698, 0xffffffff, 0x00000100,
698
	0x9698, 0xffffffff, 0x00000100,
699
	0x4d4, 0xffffffff, 0x00000200,
699
	0x4d4, 0xffffffff, 0x00000200,
700
	0x30cc, 0xffffffff, 0x00000100,
700
	0x30cc, 0xffffffff, 0x00000100,
701
	0x802c, 0xffffffff, 0xc0000000
701
	0x802c, 0xffffffff, 0xc0000000
702
};
702
};
703
 
703
 
704
static const u32 supersumo_golden_registers[] =
704
static const u32 supersumo_golden_registers[] =
705
{
705
{
706
	0x5eb4, 0xffffffff, 0x00000002,
706
	0x5eb4, 0xffffffff, 0x00000002,
707
	0x5c4, 0xffffffff, 0x00000001,
707
	0x5c4, 0xffffffff, 0x00000001,
708
	0x7030, 0xffffffff, 0x00000011,
708
	0x7030, 0xffffffff, 0x00000011,
709
	0x7c30, 0xffffffff, 0x00000011,
709
	0x7c30, 0xffffffff, 0x00000011,
710
	0x6104, 0x01000300, 0x00000000,
710
	0x6104, 0x01000300, 0x00000000,
711
	0x5bc0, 0x00300000, 0x00000000,
711
	0x5bc0, 0x00300000, 0x00000000,
712
	0x8c04, 0xffffffff, 0x40600060,
712
	0x8c04, 0xffffffff, 0x40600060,
713
	0x8c08, 0xffffffff, 0x001c001c,
713
	0x8c08, 0xffffffff, 0x001c001c,
714
	0x8c20, 0xffffffff, 0x00800080,
714
	0x8c20, 0xffffffff, 0x00800080,
715
	0x8c24, 0xffffffff, 0x00800080,
715
	0x8c24, 0xffffffff, 0x00800080,
716
	0x8c18, 0xffffffff, 0x20202078,
716
	0x8c18, 0xffffffff, 0x20202078,
717
	0x8c1c, 0xffffffff, 0x00001010,
717
	0x8c1c, 0xffffffff, 0x00001010,
718
	0x918c, 0xffffffff, 0x00010006,
718
	0x918c, 0xffffffff, 0x00010006,
719
	0x91a8, 0xffffffff, 0x00010006,
719
	0x91a8, 0xffffffff, 0x00010006,
720
	0x91c4, 0xffffffff, 0x00010006,
720
	0x91c4, 0xffffffff, 0x00010006,
721
	0x91e0, 0xffffffff, 0x00010006,
721
	0x91e0, 0xffffffff, 0x00010006,
722
	0x9200, 0xffffffff, 0x00010006,
722
	0x9200, 0xffffffff, 0x00010006,
723
	0x9150, 0xffffffff, 0x6e944040,
723
	0x9150, 0xffffffff, 0x6e944040,
724
	0x917c, 0xffffffff, 0x00030002,
724
	0x917c, 0xffffffff, 0x00030002,
725
	0x9180, 0xffffffff, 0x00050004,
725
	0x9180, 0xffffffff, 0x00050004,
726
	0x9198, 0xffffffff, 0x00030002,
726
	0x9198, 0xffffffff, 0x00030002,
727
	0x919c, 0xffffffff, 0x00050004,
727
	0x919c, 0xffffffff, 0x00050004,
728
	0x91b4, 0xffffffff, 0x00030002,
728
	0x91b4, 0xffffffff, 0x00030002,
729
	0x91b8, 0xffffffff, 0x00050004,
729
	0x91b8, 0xffffffff, 0x00050004,
730
	0x91d0, 0xffffffff, 0x00030002,
730
	0x91d0, 0xffffffff, 0x00030002,
731
	0x91d4, 0xffffffff, 0x00050004,
731
	0x91d4, 0xffffffff, 0x00050004,
732
	0x91f0, 0xffffffff, 0x00030002,
732
	0x91f0, 0xffffffff, 0x00030002,
733
	0x91f4, 0xffffffff, 0x00050004,
733
	0x91f4, 0xffffffff, 0x00050004,
734
	0x915c, 0xffffffff, 0x00010000,
734
	0x915c, 0xffffffff, 0x00010000,
735
	0x9160, 0xffffffff, 0x00030002,
735
	0x9160, 0xffffffff, 0x00030002,
736
	0x3f90, 0xffff0000, 0xff000000,
736
	0x3f90, 0xffff0000, 0xff000000,
737
	0x9178, 0xffffffff, 0x00070000,
737
	0x9178, 0xffffffff, 0x00070000,
738
	0x9194, 0xffffffff, 0x00070000,
738
	0x9194, 0xffffffff, 0x00070000,
739
	0x91b0, 0xffffffff, 0x00070000,
739
	0x91b0, 0xffffffff, 0x00070000,
740
	0x91cc, 0xffffffff, 0x00070000,
740
	0x91cc, 0xffffffff, 0x00070000,
741
	0x91ec, 0xffffffff, 0x00070000,
741
	0x91ec, 0xffffffff, 0x00070000,
742
	0x9148, 0xffff0000, 0xff000000,
742
	0x9148, 0xffff0000, 0xff000000,
743
	0x9190, 0xffffffff, 0x00090008,
743
	0x9190, 0xffffffff, 0x00090008,
744
	0x91ac, 0xffffffff, 0x00090008,
744
	0x91ac, 0xffffffff, 0x00090008,
745
	0x91c8, 0xffffffff, 0x00090008,
745
	0x91c8, 0xffffffff, 0x00090008,
746
	0x91e4, 0xffffffff, 0x00090008,
746
	0x91e4, 0xffffffff, 0x00090008,
747
	0x9204, 0xffffffff, 0x00090008,
747
	0x9204, 0xffffffff, 0x00090008,
748
	0x3f94, 0xffff0000, 0xff000000,
748
	0x3f94, 0xffff0000, 0xff000000,
749
	0x914c, 0xffff0000, 0xff000000,
749
	0x914c, 0xffff0000, 0xff000000,
750
	0x929c, 0xffffffff, 0x00000001,
750
	0x929c, 0xffffffff, 0x00000001,
751
	0x8a18, 0xffffffff, 0x00000100,
751
	0x8a18, 0xffffffff, 0x00000100,
752
	0x8b28, 0xffffffff, 0x00000100,
752
	0x8b28, 0xffffffff, 0x00000100,
753
	0x9144, 0xffffffff, 0x00000100,
753
	0x9144, 0xffffffff, 0x00000100,
754
	0x5644, 0xffffffff, 0x00000100,
754
	0x5644, 0xffffffff, 0x00000100,
755
	0x9b7c, 0xffffffff, 0x00000000,
755
	0x9b7c, 0xffffffff, 0x00000000,
756
	0x8030, 0xffffffff, 0x0000100a,
756
	0x8030, 0xffffffff, 0x0000100a,
757
	0x8a14, 0xffffffff, 0x00000007,
757
	0x8a14, 0xffffffff, 0x00000007,
758
	0x8b24, 0xffffffff, 0x00ff0fff,
758
	0x8b24, 0xffffffff, 0x00ff0fff,
759
	0x8b10, 0xffffffff, 0x00000000,
759
	0x8b10, 0xffffffff, 0x00000000,
760
	0x28a4c, 0x06000000, 0x06000000,
760
	0x28a4c, 0x06000000, 0x06000000,
761
	0x4d8, 0xffffffff, 0x00000100,
761
	0x4d8, 0xffffffff, 0x00000100,
762
	0x913c, 0xffff000f, 0x0100000a,
762
	0x913c, 0xffff000f, 0x0100000a,
763
	0x960c, 0xffffffff, 0x54763210,
763
	0x960c, 0xffffffff, 0x54763210,
764
	0x88c4, 0xffffffff, 0x000000c2,
764
	0x88c4, 0xffffffff, 0x000000c2,
765
	0x88d4, 0xffffffff, 0x00000010,
765
	0x88d4, 0xffffffff, 0x00000010,
766
	0x8974, 0xffffffff, 0x00000000,
766
	0x8974, 0xffffffff, 0x00000000,
767
	0xc78, 0x00000080, 0x00000080,
767
	0xc78, 0x00000080, 0x00000080,
768
	0x5e78, 0xffffffff, 0x001000f0,
768
	0x5e78, 0xffffffff, 0x001000f0,
769
	0xd02c, 0xffffffff, 0x08421000,
769
	0xd02c, 0xffffffff, 0x08421000,
770
	0xa008, 0xffffffff, 0x00010000,
770
	0xa008, 0xffffffff, 0x00010000,
771
	0x8d00, 0xffffffff, 0x100e4848,
771
	0x8d00, 0xffffffff, 0x100e4848,
772
	0x8d04, 0xffffffff, 0x00164745,
772
	0x8d04, 0xffffffff, 0x00164745,
773
	0x8c00, 0xffffffff, 0xe4000003,
773
	0x8c00, 0xffffffff, 0xe4000003,
774
	0x8cf0, 0x1fffffff, 0x08e00620,
774
	0x8cf0, 0x1fffffff, 0x08e00620,
775
	0x28350, 0xffffffff, 0x00000000,
775
	0x28350, 0xffffffff, 0x00000000,
776
	0x9508, 0xffffffff, 0x00000002
776
	0x9508, 0xffffffff, 0x00000002
777
};
777
};
778
 
778
 
779
static const u32 sumo_golden_registers[] =
779
static const u32 sumo_golden_registers[] =
780
{
780
{
781
	0x900c, 0x00ffffff, 0x0017071f,
781
	0x900c, 0x00ffffff, 0x0017071f,
782
	0x8c18, 0xffffffff, 0x10101060,
782
	0x8c18, 0xffffffff, 0x10101060,
783
	0x8c1c, 0xffffffff, 0x00001010,
783
	0x8c1c, 0xffffffff, 0x00001010,
784
	0x8c30, 0x0000000f, 0x00000005,
784
	0x8c30, 0x0000000f, 0x00000005,
785
	0x9688, 0x0000000f, 0x00000007
785
	0x9688, 0x0000000f, 0x00000007
786
};
786
};
787
 
787
 
788
static const u32 wrestler_golden_registers[] =
788
static const u32 wrestler_golden_registers[] =
789
{
789
{
790
	0x5eb4, 0xffffffff, 0x00000002,
790
	0x5eb4, 0xffffffff, 0x00000002,
791
	0x5c4, 0xffffffff, 0x00000001,
791
	0x5c4, 0xffffffff, 0x00000001,
792
	0x7030, 0xffffffff, 0x00000011,
792
	0x7030, 0xffffffff, 0x00000011,
793
	0x7c30, 0xffffffff, 0x00000011,
793
	0x7c30, 0xffffffff, 0x00000011,
794
	0x6104, 0x01000300, 0x00000000,
794
	0x6104, 0x01000300, 0x00000000,
795
	0x5bc0, 0x00300000, 0x00000000,
795
	0x5bc0, 0x00300000, 0x00000000,
796
	0x918c, 0xffffffff, 0x00010006,
796
	0x918c, 0xffffffff, 0x00010006,
797
	0x91a8, 0xffffffff, 0x00010006,
797
	0x91a8, 0xffffffff, 0x00010006,
798
	0x9150, 0xffffffff, 0x6e944040,
798
	0x9150, 0xffffffff, 0x6e944040,
799
	0x917c, 0xffffffff, 0x00030002,
799
	0x917c, 0xffffffff, 0x00030002,
800
	0x9198, 0xffffffff, 0x00030002,
800
	0x9198, 0xffffffff, 0x00030002,
801
	0x915c, 0xffffffff, 0x00010000,
801
	0x915c, 0xffffffff, 0x00010000,
802
	0x3f90, 0xffff0000, 0xff000000,
802
	0x3f90, 0xffff0000, 0xff000000,
803
	0x9178, 0xffffffff, 0x00070000,
803
	0x9178, 0xffffffff, 0x00070000,
804
	0x9194, 0xffffffff, 0x00070000,
804
	0x9194, 0xffffffff, 0x00070000,
805
	0x9148, 0xffff0000, 0xff000000,
805
	0x9148, 0xffff0000, 0xff000000,
806
	0x9190, 0xffffffff, 0x00090008,
806
	0x9190, 0xffffffff, 0x00090008,
807
	0x91ac, 0xffffffff, 0x00090008,
807
	0x91ac, 0xffffffff, 0x00090008,
808
	0x3f94, 0xffff0000, 0xff000000,
808
	0x3f94, 0xffff0000, 0xff000000,
809
	0x914c, 0xffff0000, 0xff000000,
809
	0x914c, 0xffff0000, 0xff000000,
810
	0x929c, 0xffffffff, 0x00000001,
810
	0x929c, 0xffffffff, 0x00000001,
811
	0x8a18, 0xffffffff, 0x00000100,
811
	0x8a18, 0xffffffff, 0x00000100,
812
	0x8b28, 0xffffffff, 0x00000100,
812
	0x8b28, 0xffffffff, 0x00000100,
813
	0x9144, 0xffffffff, 0x00000100,
813
	0x9144, 0xffffffff, 0x00000100,
814
	0x9b7c, 0xffffffff, 0x00000000,
814
	0x9b7c, 0xffffffff, 0x00000000,
815
	0x8030, 0xffffffff, 0x0000100a,
815
	0x8030, 0xffffffff, 0x0000100a,
816
	0x8a14, 0xffffffff, 0x00000001,
816
	0x8a14, 0xffffffff, 0x00000001,
817
	0x8b24, 0xffffffff, 0x00ff0fff,
817
	0x8b24, 0xffffffff, 0x00ff0fff,
818
	0x8b10, 0xffffffff, 0x00000000,
818
	0x8b10, 0xffffffff, 0x00000000,
819
	0x28a4c, 0x06000000, 0x06000000,
819
	0x28a4c, 0x06000000, 0x06000000,
820
	0x4d8, 0xffffffff, 0x00000100,
820
	0x4d8, 0xffffffff, 0x00000100,
821
	0x913c, 0xffff000f, 0x0100000a,
821
	0x913c, 0xffff000f, 0x0100000a,
822
	0x960c, 0xffffffff, 0x54763210,
822
	0x960c, 0xffffffff, 0x54763210,
823
	0x88c4, 0xffffffff, 0x000000c2,
823
	0x88c4, 0xffffffff, 0x000000c2,
824
	0x88d4, 0xffffffff, 0x00000010,
824
	0x88d4, 0xffffffff, 0x00000010,
825
	0x8974, 0xffffffff, 0x00000000,
825
	0x8974, 0xffffffff, 0x00000000,
826
	0xc78, 0x00000080, 0x00000080,
826
	0xc78, 0x00000080, 0x00000080,
827
	0x5e78, 0xffffffff, 0x001000f0,
827
	0x5e78, 0xffffffff, 0x001000f0,
828
	0xd02c, 0xffffffff, 0x08421000,
828
	0xd02c, 0xffffffff, 0x08421000,
829
	0xa008, 0xffffffff, 0x00010000,
829
	0xa008, 0xffffffff, 0x00010000,
830
	0x8d00, 0xffffffff, 0x100e4848,
830
	0x8d00, 0xffffffff, 0x100e4848,
831
	0x8d04, 0xffffffff, 0x00164745,
831
	0x8d04, 0xffffffff, 0x00164745,
832
	0x8c00, 0xffffffff, 0xe4000003,
832
	0x8c00, 0xffffffff, 0xe4000003,
833
	0x8cf0, 0x1fffffff, 0x08e00410,
833
	0x8cf0, 0x1fffffff, 0x08e00410,
834
	0x28350, 0xffffffff, 0x00000000,
834
	0x28350, 0xffffffff, 0x00000000,
835
	0x9508, 0xffffffff, 0x00000002,
835
	0x9508, 0xffffffff, 0x00000002,
836
	0x900c, 0xffffffff, 0x0017071f,
836
	0x900c, 0xffffffff, 0x0017071f,
837
	0x8c18, 0xffffffff, 0x10101060,
837
	0x8c18, 0xffffffff, 0x10101060,
838
	0x8c1c, 0xffffffff, 0x00001010
838
	0x8c1c, 0xffffffff, 0x00001010
839
};
839
};
840
 
840
 
841
static const u32 barts_golden_registers[] =
841
static const u32 barts_golden_registers[] =
842
{
842
{
843
	0x5eb4, 0xffffffff, 0x00000002,
843
	0x5eb4, 0xffffffff, 0x00000002,
844
	0x5e78, 0x8f311ff1, 0x001000f0,
844
	0x5e78, 0x8f311ff1, 0x001000f0,
845
	0x3f90, 0xffff0000, 0xff000000,
845
	0x3f90, 0xffff0000, 0xff000000,
846
	0x9148, 0xffff0000, 0xff000000,
846
	0x9148, 0xffff0000, 0xff000000,
847
	0x3f94, 0xffff0000, 0xff000000,
847
	0x3f94, 0xffff0000, 0xff000000,
848
	0x914c, 0xffff0000, 0xff000000,
848
	0x914c, 0xffff0000, 0xff000000,
849
	0xc78, 0x00000080, 0x00000080,
849
	0xc78, 0x00000080, 0x00000080,
850
	0xbd4, 0x70073777, 0x00010001,
850
	0xbd4, 0x70073777, 0x00010001,
851
	0xd02c, 0xbfffff1f, 0x08421000,
851
	0xd02c, 0xbfffff1f, 0x08421000,
852
	0xd0b8, 0x03773777, 0x02011003,
852
	0xd0b8, 0x03773777, 0x02011003,
853
	0x5bc0, 0x00200000, 0x50100000,
853
	0x5bc0, 0x00200000, 0x50100000,
854
	0x98f8, 0x33773777, 0x02011003,
854
	0x98f8, 0x33773777, 0x02011003,
855
	0x98fc, 0xffffffff, 0x76543210,
855
	0x98fc, 0xffffffff, 0x76543210,
856
	0x7030, 0x31000311, 0x00000011,
856
	0x7030, 0x31000311, 0x00000011,
857
	0x2f48, 0x00000007, 0x02011003,
857
	0x2f48, 0x00000007, 0x02011003,
858
	0x6b28, 0x00000010, 0x00000012,
858
	0x6b28, 0x00000010, 0x00000012,
859
	0x7728, 0x00000010, 0x00000012,
859
	0x7728, 0x00000010, 0x00000012,
860
	0x10328, 0x00000010, 0x00000012,
860
	0x10328, 0x00000010, 0x00000012,
861
	0x10f28, 0x00000010, 0x00000012,
861
	0x10f28, 0x00000010, 0x00000012,
862
	0x11b28, 0x00000010, 0x00000012,
862
	0x11b28, 0x00000010, 0x00000012,
863
	0x12728, 0x00000010, 0x00000012,
863
	0x12728, 0x00000010, 0x00000012,
864
	0x240c, 0x000007ff, 0x00000380,
864
	0x240c, 0x000007ff, 0x00000380,
865
	0x8a14, 0xf000001f, 0x00000007,
865
	0x8a14, 0xf000001f, 0x00000007,
866
	0x8b24, 0x3fff3fff, 0x00ff0fff,
866
	0x8b24, 0x3fff3fff, 0x00ff0fff,
867
	0x8b10, 0x0000ff0f, 0x00000000,
867
	0x8b10, 0x0000ff0f, 0x00000000,
868
	0x28a4c, 0x07ffffff, 0x06000000,
868
	0x28a4c, 0x07ffffff, 0x06000000,
869
	0x10c, 0x00000001, 0x00010003,
869
	0x10c, 0x00000001, 0x00010003,
870
	0xa02c, 0xffffffff, 0x0000009b,
870
	0xa02c, 0xffffffff, 0x0000009b,
871
	0x913c, 0x0000000f, 0x0100000a,
871
	0x913c, 0x0000000f, 0x0100000a,
872
	0x8d00, 0xffff7f7f, 0x100e4848,
872
	0x8d00, 0xffff7f7f, 0x100e4848,
873
	0x8d04, 0x00ffffff, 0x00164745,
873
	0x8d04, 0x00ffffff, 0x00164745,
874
	0x8c00, 0xfffc0003, 0xe4000003,
874
	0x8c00, 0xfffc0003, 0xe4000003,
875
	0x8c04, 0xf8ff00ff, 0x40600060,
875
	0x8c04, 0xf8ff00ff, 0x40600060,
876
	0x8c08, 0x00ff00ff, 0x001c001c,
876
	0x8c08, 0x00ff00ff, 0x001c001c,
877
	0x8cf0, 0x1fff1fff, 0x08e00620,
877
	0x8cf0, 0x1fff1fff, 0x08e00620,
878
	0x8c20, 0x0fff0fff, 0x00800080,
878
	0x8c20, 0x0fff0fff, 0x00800080,
879
	0x8c24, 0x0fff0fff, 0x00800080,
879
	0x8c24, 0x0fff0fff, 0x00800080,
880
	0x8c18, 0xffffffff, 0x20202078,
880
	0x8c18, 0xffffffff, 0x20202078,
881
	0x8c1c, 0x0000ffff, 0x00001010,
881
	0x8c1c, 0x0000ffff, 0x00001010,
882
	0x28350, 0x00000f01, 0x00000000,
882
	0x28350, 0x00000f01, 0x00000000,
883
	0x9508, 0x3700001f, 0x00000002,
883
	0x9508, 0x3700001f, 0x00000002,
884
	0x960c, 0xffffffff, 0x54763210,
884
	0x960c, 0xffffffff, 0x54763210,
885
	0x88c4, 0x001f3ae3, 0x000000c2,
885
	0x88c4, 0x001f3ae3, 0x000000c2,
886
	0x88d4, 0x0000001f, 0x00000010,
886
	0x88d4, 0x0000001f, 0x00000010,
887
	0x8974, 0xffffffff, 0x00000000
887
	0x8974, 0xffffffff, 0x00000000
888
};
888
};
889
 
889
 
890
static const u32 turks_golden_registers[] =
890
static const u32 turks_golden_registers[] =
891
{
891
{
892
	0x5eb4, 0xffffffff, 0x00000002,
892
	0x5eb4, 0xffffffff, 0x00000002,
893
	0x5e78, 0x8f311ff1, 0x001000f0,
893
	0x5e78, 0x8f311ff1, 0x001000f0,
894
	0x8c8, 0x00003000, 0x00001070,
894
	0x8c8, 0x00003000, 0x00001070,
895
	0x8cc, 0x000fffff, 0x00040035,
895
	0x8cc, 0x000fffff, 0x00040035,
896
	0x3f90, 0xffff0000, 0xfff00000,
896
	0x3f90, 0xffff0000, 0xfff00000,
897
	0x9148, 0xffff0000, 0xfff00000,
897
	0x9148, 0xffff0000, 0xfff00000,
898
	0x3f94, 0xffff0000, 0xfff00000,
898
	0x3f94, 0xffff0000, 0xfff00000,
899
	0x914c, 0xffff0000, 0xfff00000,
899
	0x914c, 0xffff0000, 0xfff00000,
900
	0xc78, 0x00000080, 0x00000080,
900
	0xc78, 0x00000080, 0x00000080,
901
	0xbd4, 0x00073007, 0x00010002,
901
	0xbd4, 0x00073007, 0x00010002,
902
	0xd02c, 0xbfffff1f, 0x08421000,
902
	0xd02c, 0xbfffff1f, 0x08421000,
903
	0xd0b8, 0x03773777, 0x02010002,
903
	0xd0b8, 0x03773777, 0x02010002,
904
	0x5bc0, 0x00200000, 0x50100000,
904
	0x5bc0, 0x00200000, 0x50100000,
905
	0x98f8, 0x33773777, 0x00010002,
905
	0x98f8, 0x33773777, 0x00010002,
906
	0x98fc, 0xffffffff, 0x33221100,
906
	0x98fc, 0xffffffff, 0x33221100,
907
	0x7030, 0x31000311, 0x00000011,
907
	0x7030, 0x31000311, 0x00000011,
908
	0x2f48, 0x33773777, 0x00010002,
908
	0x2f48, 0x33773777, 0x00010002,
909
	0x6b28, 0x00000010, 0x00000012,
909
	0x6b28, 0x00000010, 0x00000012,
910
	0x7728, 0x00000010, 0x00000012,
910
	0x7728, 0x00000010, 0x00000012,
911
	0x10328, 0x00000010, 0x00000012,
911
	0x10328, 0x00000010, 0x00000012,
912
	0x10f28, 0x00000010, 0x00000012,
912
	0x10f28, 0x00000010, 0x00000012,
913
	0x11b28, 0x00000010, 0x00000012,
913
	0x11b28, 0x00000010, 0x00000012,
914
	0x12728, 0x00000010, 0x00000012,
914
	0x12728, 0x00000010, 0x00000012,
915
	0x240c, 0x000007ff, 0x00000380,
915
	0x240c, 0x000007ff, 0x00000380,
916
	0x8a14, 0xf000001f, 0x00000007,
916
	0x8a14, 0xf000001f, 0x00000007,
917
	0x8b24, 0x3fff3fff, 0x00ff0fff,
917
	0x8b24, 0x3fff3fff, 0x00ff0fff,
918
	0x8b10, 0x0000ff0f, 0x00000000,
918
	0x8b10, 0x0000ff0f, 0x00000000,
919
	0x28a4c, 0x07ffffff, 0x06000000,
919
	0x28a4c, 0x07ffffff, 0x06000000,
920
	0x10c, 0x00000001, 0x00010003,
920
	0x10c, 0x00000001, 0x00010003,
921
	0xa02c, 0xffffffff, 0x0000009b,
921
	0xa02c, 0xffffffff, 0x0000009b,
922
	0x913c, 0x0000000f, 0x0100000a,
922
	0x913c, 0x0000000f, 0x0100000a,
923
	0x8d00, 0xffff7f7f, 0x100e4848,
923
	0x8d00, 0xffff7f7f, 0x100e4848,
924
	0x8d04, 0x00ffffff, 0x00164745,
924
	0x8d04, 0x00ffffff, 0x00164745,
925
	0x8c00, 0xfffc0003, 0xe4000003,
925
	0x8c00, 0xfffc0003, 0xe4000003,
926
	0x8c04, 0xf8ff00ff, 0x40600060,
926
	0x8c04, 0xf8ff00ff, 0x40600060,
927
	0x8c08, 0x00ff00ff, 0x001c001c,
927
	0x8c08, 0x00ff00ff, 0x001c001c,
928
	0x8cf0, 0x1fff1fff, 0x08e00410,
928
	0x8cf0, 0x1fff1fff, 0x08e00410,
929
	0x8c20, 0x0fff0fff, 0x00800080,
929
	0x8c20, 0x0fff0fff, 0x00800080,
930
	0x8c24, 0x0fff0fff, 0x00800080,
930
	0x8c24, 0x0fff0fff, 0x00800080,
931
	0x8c18, 0xffffffff, 0x20202078,
931
	0x8c18, 0xffffffff, 0x20202078,
932
	0x8c1c, 0x0000ffff, 0x00001010,
932
	0x8c1c, 0x0000ffff, 0x00001010,
933
	0x28350, 0x00000f01, 0x00000000,
933
	0x28350, 0x00000f01, 0x00000000,
934
	0x9508, 0x3700001f, 0x00000002,
934
	0x9508, 0x3700001f, 0x00000002,
935
	0x960c, 0xffffffff, 0x54763210,
935
	0x960c, 0xffffffff, 0x54763210,
936
	0x88c4, 0x001f3ae3, 0x000000c2,
936
	0x88c4, 0x001f3ae3, 0x000000c2,
937
	0x88d4, 0x0000001f, 0x00000010,
937
	0x88d4, 0x0000001f, 0x00000010,
938
	0x8974, 0xffffffff, 0x00000000
938
	0x8974, 0xffffffff, 0x00000000
939
};
939
};
940
 
940
 
941
static const u32 caicos_golden_registers[] =
941
static const u32 caicos_golden_registers[] =
942
{
942
{
943
	0x5eb4, 0xffffffff, 0x00000002,
943
	0x5eb4, 0xffffffff, 0x00000002,
944
	0x5e78, 0x8f311ff1, 0x001000f0,
944
	0x5e78, 0x8f311ff1, 0x001000f0,
945
	0x8c8, 0x00003420, 0x00001450,
945
	0x8c8, 0x00003420, 0x00001450,
946
	0x8cc, 0x000fffff, 0x00040035,
946
	0x8cc, 0x000fffff, 0x00040035,
947
	0x3f90, 0xffff0000, 0xfffc0000,
947
	0x3f90, 0xffff0000, 0xfffc0000,
948
	0x9148, 0xffff0000, 0xfffc0000,
948
	0x9148, 0xffff0000, 0xfffc0000,
949
	0x3f94, 0xffff0000, 0xfffc0000,
949
	0x3f94, 0xffff0000, 0xfffc0000,
950
	0x914c, 0xffff0000, 0xfffc0000,
950
	0x914c, 0xffff0000, 0xfffc0000,
951
	0xc78, 0x00000080, 0x00000080,
951
	0xc78, 0x00000080, 0x00000080,
952
	0xbd4, 0x00073007, 0x00010001,
952
	0xbd4, 0x00073007, 0x00010001,
953
	0xd02c, 0xbfffff1f, 0x08421000,
953
	0xd02c, 0xbfffff1f, 0x08421000,
954
	0xd0b8, 0x03773777, 0x02010001,
954
	0xd0b8, 0x03773777, 0x02010001,
955
	0x5bc0, 0x00200000, 0x50100000,
955
	0x5bc0, 0x00200000, 0x50100000,
956
	0x98f8, 0x33773777, 0x02010001,
956
	0x98f8, 0x33773777, 0x02010001,
957
	0x98fc, 0xffffffff, 0x33221100,
957
	0x98fc, 0xffffffff, 0x33221100,
958
	0x7030, 0x31000311, 0x00000011,
958
	0x7030, 0x31000311, 0x00000011,
959
	0x2f48, 0x33773777, 0x02010001,
959
	0x2f48, 0x33773777, 0x02010001,
960
	0x6b28, 0x00000010, 0x00000012,
960
	0x6b28, 0x00000010, 0x00000012,
961
	0x7728, 0x00000010, 0x00000012,
961
	0x7728, 0x00000010, 0x00000012,
962
	0x10328, 0x00000010, 0x00000012,
962
	0x10328, 0x00000010, 0x00000012,
963
	0x10f28, 0x00000010, 0x00000012,
963
	0x10f28, 0x00000010, 0x00000012,
964
	0x11b28, 0x00000010, 0x00000012,
964
	0x11b28, 0x00000010, 0x00000012,
965
	0x12728, 0x00000010, 0x00000012,
965
	0x12728, 0x00000010, 0x00000012,
966
	0x240c, 0x000007ff, 0x00000380,
966
	0x240c, 0x000007ff, 0x00000380,
967
	0x8a14, 0xf000001f, 0x00000001,
967
	0x8a14, 0xf000001f, 0x00000001,
968
	0x8b24, 0x3fff3fff, 0x00ff0fff,
968
	0x8b24, 0x3fff3fff, 0x00ff0fff,
969
	0x8b10, 0x0000ff0f, 0x00000000,
969
	0x8b10, 0x0000ff0f, 0x00000000,
970
	0x28a4c, 0x07ffffff, 0x06000000,
970
	0x28a4c, 0x07ffffff, 0x06000000,
971
	0x10c, 0x00000001, 0x00010003,
971
	0x10c, 0x00000001, 0x00010003,
972
	0xa02c, 0xffffffff, 0x0000009b,
972
	0xa02c, 0xffffffff, 0x0000009b,
973
	0x913c, 0x0000000f, 0x0100000a,
973
	0x913c, 0x0000000f, 0x0100000a,
974
	0x8d00, 0xffff7f7f, 0x100e4848,
974
	0x8d00, 0xffff7f7f, 0x100e4848,
975
	0x8d04, 0x00ffffff, 0x00164745,
975
	0x8d04, 0x00ffffff, 0x00164745,
976
	0x8c00, 0xfffc0003, 0xe4000003,
976
	0x8c00, 0xfffc0003, 0xe4000003,
977
	0x8c04, 0xf8ff00ff, 0x40600060,
977
	0x8c04, 0xf8ff00ff, 0x40600060,
978
	0x8c08, 0x00ff00ff, 0x001c001c,
978
	0x8c08, 0x00ff00ff, 0x001c001c,
979
	0x8cf0, 0x1fff1fff, 0x08e00410,
979
	0x8cf0, 0x1fff1fff, 0x08e00410,
980
	0x8c20, 0x0fff0fff, 0x00800080,
980
	0x8c20, 0x0fff0fff, 0x00800080,
981
	0x8c24, 0x0fff0fff, 0x00800080,
981
	0x8c24, 0x0fff0fff, 0x00800080,
982
	0x8c18, 0xffffffff, 0x20202078,
982
	0x8c18, 0xffffffff, 0x20202078,
983
	0x8c1c, 0x0000ffff, 0x00001010,
983
	0x8c1c, 0x0000ffff, 0x00001010,
984
	0x28350, 0x00000f01, 0x00000000,
984
	0x28350, 0x00000f01, 0x00000000,
985
	0x9508, 0x3700001f, 0x00000002,
985
	0x9508, 0x3700001f, 0x00000002,
986
	0x960c, 0xffffffff, 0x54763210,
986
	0x960c, 0xffffffff, 0x54763210,
987
	0x88c4, 0x001f3ae3, 0x000000c2,
987
	0x88c4, 0x001f3ae3, 0x000000c2,
988
	0x88d4, 0x0000001f, 0x00000010,
988
	0x88d4, 0x0000001f, 0x00000010,
989
	0x8974, 0xffffffff, 0x00000000
989
	0x8974, 0xffffffff, 0x00000000
990
};
990
};
991
 
991
 
992
static void evergreen_init_golden_registers(struct radeon_device *rdev)
992
static void evergreen_init_golden_registers(struct radeon_device *rdev)
993
{
993
{
994
	switch (rdev->family) {
994
	switch (rdev->family) {
995
	case CHIP_CYPRESS:
995
	case CHIP_CYPRESS:
996
	case CHIP_HEMLOCK:
996
	case CHIP_HEMLOCK:
997
		radeon_program_register_sequence(rdev,
997
		radeon_program_register_sequence(rdev,
998
						 evergreen_golden_registers,
998
						 evergreen_golden_registers,
999
						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
999
						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1000
		radeon_program_register_sequence(rdev,
1000
		radeon_program_register_sequence(rdev,
1001
						 evergreen_golden_registers2,
1001
						 evergreen_golden_registers2,
1002
						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1002
						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1003
		radeon_program_register_sequence(rdev,
1003
		radeon_program_register_sequence(rdev,
1004
						 cypress_mgcg_init,
1004
						 cypress_mgcg_init,
1005
						 (const u32)ARRAY_SIZE(cypress_mgcg_init));
1005
						 (const u32)ARRAY_SIZE(cypress_mgcg_init));
1006
		break;
1006
		break;
1007
	case CHIP_JUNIPER:
1007
	case CHIP_JUNIPER:
1008
		radeon_program_register_sequence(rdev,
1008
		radeon_program_register_sequence(rdev,
1009
						 evergreen_golden_registers,
1009
						 evergreen_golden_registers,
1010
						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1010
						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1011
		radeon_program_register_sequence(rdev,
1011
		radeon_program_register_sequence(rdev,
1012
						 evergreen_golden_registers2,
1012
						 evergreen_golden_registers2,
1013
						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1013
						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1014
		radeon_program_register_sequence(rdev,
1014
		radeon_program_register_sequence(rdev,
1015
						 juniper_mgcg_init,
1015
						 juniper_mgcg_init,
1016
						 (const u32)ARRAY_SIZE(juniper_mgcg_init));
1016
						 (const u32)ARRAY_SIZE(juniper_mgcg_init));
1017
		break;
1017
		break;
1018
	case CHIP_REDWOOD:
1018
	case CHIP_REDWOOD:
1019
		radeon_program_register_sequence(rdev,
1019
		radeon_program_register_sequence(rdev,
1020
						 evergreen_golden_registers,
1020
						 evergreen_golden_registers,
1021
						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1021
						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1022
		radeon_program_register_sequence(rdev,
1022
		radeon_program_register_sequence(rdev,
1023
						 evergreen_golden_registers2,
1023
						 evergreen_golden_registers2,
1024
						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1024
						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1025
		radeon_program_register_sequence(rdev,
1025
		radeon_program_register_sequence(rdev,
1026
						 redwood_mgcg_init,
1026
						 redwood_mgcg_init,
1027
						 (const u32)ARRAY_SIZE(redwood_mgcg_init));
1027
						 (const u32)ARRAY_SIZE(redwood_mgcg_init));
1028
		break;
1028
		break;
1029
	case CHIP_CEDAR:
1029
	case CHIP_CEDAR:
1030
		radeon_program_register_sequence(rdev,
1030
		radeon_program_register_sequence(rdev,
1031
						 cedar_golden_registers,
1031
						 cedar_golden_registers,
1032
						 (const u32)ARRAY_SIZE(cedar_golden_registers));
1032
						 (const u32)ARRAY_SIZE(cedar_golden_registers));
1033
		radeon_program_register_sequence(rdev,
1033
		radeon_program_register_sequence(rdev,
1034
						 evergreen_golden_registers2,
1034
						 evergreen_golden_registers2,
1035
						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1035
						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1036
		radeon_program_register_sequence(rdev,
1036
		radeon_program_register_sequence(rdev,
1037
						 cedar_mgcg_init,
1037
						 cedar_mgcg_init,
1038
						 (const u32)ARRAY_SIZE(cedar_mgcg_init));
1038
						 (const u32)ARRAY_SIZE(cedar_mgcg_init));
1039
		break;
1039
		break;
1040
	case CHIP_PALM:
1040
	case CHIP_PALM:
1041
		radeon_program_register_sequence(rdev,
1041
		radeon_program_register_sequence(rdev,
1042
						 wrestler_golden_registers,
1042
						 wrestler_golden_registers,
1043
						 (const u32)ARRAY_SIZE(wrestler_golden_registers));
1043
						 (const u32)ARRAY_SIZE(wrestler_golden_registers));
1044
		break;
1044
		break;
1045
	case CHIP_SUMO:
1045
	case CHIP_SUMO:
1046
		radeon_program_register_sequence(rdev,
1046
		radeon_program_register_sequence(rdev,
1047
						 supersumo_golden_registers,
1047
						 supersumo_golden_registers,
1048
						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1048
						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1049
		break;
1049
		break;
1050
	case CHIP_SUMO2:
1050
	case CHIP_SUMO2:
1051
		radeon_program_register_sequence(rdev,
1051
		radeon_program_register_sequence(rdev,
1052
						 supersumo_golden_registers,
1052
						 supersumo_golden_registers,
1053
						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1053
						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1054
		radeon_program_register_sequence(rdev,
1054
		radeon_program_register_sequence(rdev,
1055
						 sumo_golden_registers,
1055
						 sumo_golden_registers,
1056
						 (const u32)ARRAY_SIZE(sumo_golden_registers));
1056
						 (const u32)ARRAY_SIZE(sumo_golden_registers));
1057
		break;
1057
		break;
1058
	case CHIP_BARTS:
1058
	case CHIP_BARTS:
1059
		radeon_program_register_sequence(rdev,
1059
		radeon_program_register_sequence(rdev,
1060
						 barts_golden_registers,
1060
						 barts_golden_registers,
1061
						 (const u32)ARRAY_SIZE(barts_golden_registers));
1061
						 (const u32)ARRAY_SIZE(barts_golden_registers));
1062
		break;
1062
		break;
1063
	case CHIP_TURKS:
1063
	case CHIP_TURKS:
1064
		radeon_program_register_sequence(rdev,
1064
		radeon_program_register_sequence(rdev,
1065
						 turks_golden_registers,
1065
						 turks_golden_registers,
1066
						 (const u32)ARRAY_SIZE(turks_golden_registers));
1066
						 (const u32)ARRAY_SIZE(turks_golden_registers));
1067
		break;
1067
		break;
1068
	case CHIP_CAICOS:
1068
	case CHIP_CAICOS:
1069
		radeon_program_register_sequence(rdev,
1069
		radeon_program_register_sequence(rdev,
1070
						 caicos_golden_registers,
1070
						 caicos_golden_registers,
1071
						 (const u32)ARRAY_SIZE(caicos_golden_registers));
1071
						 (const u32)ARRAY_SIZE(caicos_golden_registers));
1072
		break;
1072
		break;
1073
	default:
1073
	default:
1074
		break;
1074
		break;
1075
	}
1075
	}
1076
}
1076
}
1077
 
1077
 
1078
/**
1078
/**
1079
 * evergreen_get_allowed_info_register - fetch the register for the info ioctl
1079
 * evergreen_get_allowed_info_register - fetch the register for the info ioctl
1080
 *
1080
 *
1081
 * @rdev: radeon_device pointer
1081
 * @rdev: radeon_device pointer
1082
 * @reg: register offset in bytes
1082
 * @reg: register offset in bytes
1083
 * @val: register value
1083
 * @val: register value
1084
 *
1084
 *
1085
 * Returns 0 for success or -EINVAL for an invalid register
1085
 * Returns 0 for success or -EINVAL for an invalid register
1086
 *
1086
 *
1087
 */
1087
 */
1088
int evergreen_get_allowed_info_register(struct radeon_device *rdev,
1088
int evergreen_get_allowed_info_register(struct radeon_device *rdev,
1089
					u32 reg, u32 *val)
1089
					u32 reg, u32 *val)
1090
{
1090
{
1091
	switch (reg) {
1091
	switch (reg) {
1092
	case GRBM_STATUS:
1092
	case GRBM_STATUS:
1093
	case GRBM_STATUS_SE0:
1093
	case GRBM_STATUS_SE0:
1094
	case GRBM_STATUS_SE1:
1094
	case GRBM_STATUS_SE1:
1095
	case SRBM_STATUS:
1095
	case SRBM_STATUS:
1096
	case SRBM_STATUS2:
1096
	case SRBM_STATUS2:
1097
	case DMA_STATUS_REG:
1097
	case DMA_STATUS_REG:
1098
	case UVD_STATUS:
1098
	case UVD_STATUS:
1099
		*val = RREG32(reg);
1099
		*val = RREG32(reg);
1100
		return 0;
1100
		return 0;
1101
	default:
1101
	default:
1102
		return -EINVAL;
1102
		return -EINVAL;
1103
	}
1103
	}
1104
}
1104
}
1105
 
1105
 
1106
void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1106
void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1107
			     unsigned *bankh, unsigned *mtaspect,
1107
			     unsigned *bankh, unsigned *mtaspect,
1108
			     unsigned *tile_split)
1108
			     unsigned *tile_split)
1109
{
1109
{
1110
	*bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1110
	*bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1111
	*bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1111
	*bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1112
	*mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1112
	*mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1113
	*tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1113
	*tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1114
	switch (*bankw) {
1114
	switch (*bankw) {
1115
	default:
1115
	default:
1116
	case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1116
	case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1117
	case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1117
	case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1118
	case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1118
	case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1119
	case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1119
	case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1120
	}
1120
	}
1121
	switch (*bankh) {
1121
	switch (*bankh) {
1122
	default:
1122
	default:
1123
	case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1123
	case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1124
	case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1124
	case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1125
	case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1125
	case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1126
	case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1126
	case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1127
	}
1127
	}
1128
	switch (*mtaspect) {
1128
	switch (*mtaspect) {
1129
	default:
1129
	default:
1130
	case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1130
	case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1131
	case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1131
	case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1132
	case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1132
	case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1133
	case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1133
	case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1134
	}
1134
	}
1135
}
1135
}
1136
 
1136
 
1137
static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1137
static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1138
			      u32 cntl_reg, u32 status_reg)
1138
			      u32 cntl_reg, u32 status_reg)
1139
{
1139
{
1140
	int r, i;
1140
	int r, i;
1141
	struct atom_clock_dividers dividers;
1141
	struct atom_clock_dividers dividers;
1142
 
1142
 
1143
        r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1143
        r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1144
					   clock, false, ÷rs);
1144
					   clock, false, ÷rs);
1145
	if (r)
1145
	if (r)
1146
		return r;
1146
		return r;
1147
 
1147
 
1148
	WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1148
	WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1149
 
1149
 
1150
	for (i = 0; i < 100; i++) {
1150
	for (i = 0; i < 100; i++) {
1151
		if (RREG32(status_reg) & DCLK_STATUS)
1151
		if (RREG32(status_reg) & DCLK_STATUS)
1152
			break;
1152
			break;
1153
		mdelay(10);
1153
		mdelay(10);
1154
	}
1154
	}
1155
	if (i == 100)
1155
	if (i == 100)
1156
		return -ETIMEDOUT;
1156
		return -ETIMEDOUT;
1157
 
1157
 
1158
	return 0;
1158
	return 0;
1159
}
1159
}
1160
 
1160
 
1161
int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1161
int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1162
{
1162
{
1163
	int r = 0;
1163
	int r = 0;
1164
	u32 cg_scratch = RREG32(CG_SCRATCH1);
1164
	u32 cg_scratch = RREG32(CG_SCRATCH1);
1165
 
1165
 
1166
	r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1166
	r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1167
	if (r)
1167
	if (r)
1168
		goto done;
1168
		goto done;
1169
	cg_scratch &= 0xffff0000;
1169
	cg_scratch &= 0xffff0000;
1170
	cg_scratch |= vclk / 100; /* Mhz */
1170
	cg_scratch |= vclk / 100; /* Mhz */
1171
 
1171
 
1172
	r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1172
	r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1173
	if (r)
1173
	if (r)
1174
		goto done;
1174
		goto done;
1175
	cg_scratch &= 0x0000ffff;
1175
	cg_scratch &= 0x0000ffff;
1176
	cg_scratch |= (dclk / 100) << 16; /* Mhz */
1176
	cg_scratch |= (dclk / 100) << 16; /* Mhz */
1177
 
1177
 
1178
done:
1178
done:
1179
	WREG32(CG_SCRATCH1, cg_scratch);
1179
	WREG32(CG_SCRATCH1, cg_scratch);
1180
 
1180
 
1181
	return r;
1181
	return r;
1182
}
1182
}
1183
 
1183
 
1184
int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1184
int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1185
{
1185
{
1186
	/* start off with something large */
1186
	/* start off with something large */
1187
	unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1187
	unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1188
	int r;
1188
	int r;
1189
 
1189
 
1190
	/* bypass vclk and dclk with bclk */
1190
	/* bypass vclk and dclk with bclk */
1191
	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1191
	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1192
		VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1192
		VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1193
		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1193
		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1194
 
1194
 
1195
	/* put PLL in bypass mode */
1195
	/* put PLL in bypass mode */
1196
	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1196
	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1197
 
1197
 
1198
	if (!vclk || !dclk) {
1198
	if (!vclk || !dclk) {
1199
		/* keep the Bypass mode, put PLL to sleep */
1199
		/* keep the Bypass mode, put PLL to sleep */
1200
		WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1200
		WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1201
		return 0;
1201
		return 0;
1202
	}
1202
	}
1203
 
1203
 
1204
	r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1204
	r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1205
					  16384, 0x03FFFFFF, 0, 128, 5,
1205
					  16384, 0x03FFFFFF, 0, 128, 5,
1206
					  &fb_div, &vclk_div, &dclk_div);
1206
					  &fb_div, &vclk_div, &dclk_div);
1207
	if (r)
1207
	if (r)
1208
		return r;
1208
		return r;
1209
 
1209
 
1210
	/* set VCO_MODE to 1 */
1210
	/* set VCO_MODE to 1 */
1211
	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1211
	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1212
 
1212
 
1213
	/* toggle UPLL_SLEEP to 1 then back to 0 */
1213
	/* toggle UPLL_SLEEP to 1 then back to 0 */
1214
	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1214
	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1215
	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1215
	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1216
 
1216
 
1217
	/* deassert UPLL_RESET */
1217
	/* deassert UPLL_RESET */
1218
	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1218
	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1219
 
1219
 
1220
	mdelay(1);
1220
	mdelay(1);
1221
 
1221
 
1222
	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1222
	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1223
	if (r)
1223
	if (r)
1224
		return r;
1224
		return r;
1225
 
1225
 
1226
	/* assert UPLL_RESET again */
1226
	/* assert UPLL_RESET again */
1227
	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1227
	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1228
 
1228
 
1229
	/* disable spread spectrum. */
1229
	/* disable spread spectrum. */
1230
	WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1230
	WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1231
 
1231
 
1232
	/* set feedback divider */
1232
	/* set feedback divider */
1233
	WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1233
	WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1234
 
1234
 
1235
	/* set ref divider to 0 */
1235
	/* set ref divider to 0 */
1236
	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1236
	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1237
 
1237
 
1238
	if (fb_div < 307200)
1238
	if (fb_div < 307200)
1239
		WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1239
		WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1240
	else
1240
	else
1241
		WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1241
		WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1242
 
1242
 
1243
	/* set PDIV_A and PDIV_B */
1243
	/* set PDIV_A and PDIV_B */
1244
	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1244
	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1245
		UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1245
		UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1246
		~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1246
		~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1247
 
1247
 
1248
	/* give the PLL some time to settle */
1248
	/* give the PLL some time to settle */
1249
	mdelay(15);
1249
	mdelay(15);
1250
 
1250
 
1251
	/* deassert PLL_RESET */
1251
	/* deassert PLL_RESET */
1252
	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1252
	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1253
 
1253
 
1254
	mdelay(15);
1254
	mdelay(15);
1255
 
1255
 
1256
	/* switch from bypass mode to normal mode */
1256
	/* switch from bypass mode to normal mode */
1257
	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1257
	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1258
 
1258
 
1259
	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1259
	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1260
	if (r)
1260
	if (r)
1261
		return r;
1261
		return r;
1262
 
1262
 
1263
	/* switch VCLK and DCLK selection */
1263
	/* switch VCLK and DCLK selection */
1264
	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1264
	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1265
		VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1265
		VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1266
		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1266
		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1267
 
1267
 
1268
	mdelay(100);
1268
	mdelay(100);
1269
 
1269
 
1270
	return 0;
1270
	return 0;
1271
}
1271
}
1272
 
1272
 
1273
void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1273
void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1274
{
1274
{
1275
	int readrq;
1275
	int readrq;
1276
	u16 v;
1276
	u16 v;
1277
 
1277
 
1278
	readrq = pcie_get_readrq(rdev->pdev);
1278
	readrq = pcie_get_readrq(rdev->pdev);
1279
	v = ffs(readrq) - 8;
1279
	v = ffs(readrq) - 8;
1280
	/* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1280
	/* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1281
	 * to avoid hangs or perfomance issues
1281
	 * to avoid hangs or perfomance issues
1282
	 */
1282
	 */
1283
	if ((v == 0) || (v == 6) || (v == 7))
1283
	if ((v == 0) || (v == 6) || (v == 7))
1284
		pcie_set_readrq(rdev->pdev, 512);
1284
		pcie_set_readrq(rdev->pdev, 512);
1285
}
1285
}
1286
 
1286
 
1287
void dce4_program_fmt(struct drm_encoder *encoder)
1287
void dce4_program_fmt(struct drm_encoder *encoder)
1288
{
1288
{
1289
	struct drm_device *dev = encoder->dev;
1289
	struct drm_device *dev = encoder->dev;
1290
	struct radeon_device *rdev = dev->dev_private;
1290
	struct radeon_device *rdev = dev->dev_private;
1291
	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1291
	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1292
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1292
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1293
	struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1293
	struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1294
	int bpc = 0;
1294
	int bpc = 0;
1295
	u32 tmp = 0;
1295
	u32 tmp = 0;
1296
	enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1296
	enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1297
 
1297
 
1298
	if (connector) {
1298
	if (connector) {
1299
		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1299
		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1300
		bpc = radeon_get_monitor_bpc(connector);
1300
		bpc = radeon_get_monitor_bpc(connector);
1301
		dither = radeon_connector->dither;
1301
		dither = radeon_connector->dither;
1302
	}
1302
	}
1303
 
1303
 
1304
	/* LVDS/eDP FMT is set up by atom */
1304
	/* LVDS/eDP FMT is set up by atom */
1305
	if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1305
	if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1306
		return;
1306
		return;
1307
 
1307
 
1308
	/* not needed for analog */
1308
	/* not needed for analog */
1309
	if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1309
	if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1310
	    (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1310
	    (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1311
		return;
1311
		return;
1312
 
1312
 
1313
	if (bpc == 0)
1313
	if (bpc == 0)
1314
		return;
1314
		return;
1315
 
1315
 
1316
	switch (bpc) {
1316
	switch (bpc) {
1317
	case 6:
1317
	case 6:
1318
		if (dither == RADEON_FMT_DITHER_ENABLE)
1318
		if (dither == RADEON_FMT_DITHER_ENABLE)
1319
			/* XXX sort out optimal dither settings */
1319
			/* XXX sort out optimal dither settings */
1320
			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1320
			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1321
				FMT_SPATIAL_DITHER_EN);
1321
				FMT_SPATIAL_DITHER_EN);
1322
		else
1322
		else
1323
			tmp |= FMT_TRUNCATE_EN;
1323
			tmp |= FMT_TRUNCATE_EN;
1324
		break;
1324
		break;
1325
	case 8:
1325
	case 8:
1326
		if (dither == RADEON_FMT_DITHER_ENABLE)
1326
		if (dither == RADEON_FMT_DITHER_ENABLE)
1327
			/* XXX sort out optimal dither settings */
1327
			/* XXX sort out optimal dither settings */
1328
			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1328
			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1329
				FMT_RGB_RANDOM_ENABLE |
1329
				FMT_RGB_RANDOM_ENABLE |
1330
				FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1330
				FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1331
		else
1331
		else
1332
			tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1332
			tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1333
		break;
1333
		break;
1334
	case 10:
1334
	case 10:
1335
	default:
1335
	default:
1336
		/* not needed */
1336
		/* not needed */
1337
		break;
1337
		break;
1338
	}
1338
	}
1339
 
1339
 
1340
	WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1340
	WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1341
}
1341
}
1342
 
1342
 
1343
static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1343
static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1344
{
1344
{
1345
	if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1345
	if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1346
		return true;
1346
		return true;
1347
	else
1347
	else
1348
		return false;
1348
		return false;
1349
}
1349
}
1350
 
1350
 
1351
static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1351
static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1352
{
1352
{
1353
	u32 pos1, pos2;
1353
	u32 pos1, pos2;
1354
 
1354
 
1355
	pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1355
	pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1356
	pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1356
	pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1357
 
1357
 
1358
	if (pos1 != pos2)
1358
	if (pos1 != pos2)
1359
		return true;
1359
		return true;
1360
	else
1360
	else
1361
		return false;
1361
		return false;
1362
}
1362
}
1363
 
1363
 
1364
/**
1364
/**
1365
 * dce4_wait_for_vblank - vblank wait asic callback.
1365
 * dce4_wait_for_vblank - vblank wait asic callback.
1366
 *
1366
 *
1367
 * @rdev: radeon_device pointer
1367
 * @rdev: radeon_device pointer
1368
 * @crtc: crtc to wait for vblank on
1368
 * @crtc: crtc to wait for vblank on
1369
 *
1369
 *
1370
 * Wait for vblank on the requested crtc (evergreen+).
1370
 * Wait for vblank on the requested crtc (evergreen+).
1371
 */
1371
 */
1372
void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1372
void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1373
{
1373
{
1374
	unsigned i = 0;
1374
	unsigned i = 0;
1375
 
1375
 
1376
	if (crtc >= rdev->num_crtc)
1376
	if (crtc >= rdev->num_crtc)
1377
		return;
1377
		return;
1378
 
1378
 
1379
	if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1379
	if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1380
		return;
1380
		return;
1381
 
1381
 
1382
	/* depending on when we hit vblank, we may be close to active; if so,
1382
	/* depending on when we hit vblank, we may be close to active; if so,
1383
	 * wait for another frame.
1383
	 * wait for another frame.
1384
	 */
1384
	 */
1385
	while (dce4_is_in_vblank(rdev, crtc)) {
1385
	while (dce4_is_in_vblank(rdev, crtc)) {
1386
		if (i++ % 100 == 0) {
1386
		if (i++ % 100 == 0) {
1387
			if (!dce4_is_counter_moving(rdev, crtc))
1387
			if (!dce4_is_counter_moving(rdev, crtc))
1388
				break;
1388
				break;
1389
		}
1389
		}
1390
	}
1390
	}
1391
 
1391
 
1392
	while (!dce4_is_in_vblank(rdev, crtc)) {
1392
	while (!dce4_is_in_vblank(rdev, crtc)) {
1393
		if (i++ % 100 == 0) {
1393
		if (i++ % 100 == 0) {
1394
			if (!dce4_is_counter_moving(rdev, crtc))
1394
			if (!dce4_is_counter_moving(rdev, crtc))
1395
				break;
1395
				break;
1396
		}
1396
		}
1397
	}
1397
	}
1398
}
1398
}
1399
 
1399
 
1400
/**
1400
/**
1401
 * evergreen_page_flip - pageflip callback.
1401
 * evergreen_page_flip - pageflip callback.
1402
 *
1402
 *
1403
 * @rdev: radeon_device pointer
1403
 * @rdev: radeon_device pointer
1404
 * @crtc_id: crtc to cleanup pageflip on
1404
 * @crtc_id: crtc to cleanup pageflip on
1405
 * @crtc_base: new address of the crtc (GPU MC address)
1405
 * @crtc_base: new address of the crtc (GPU MC address)
1406
 *
1406
 *
1407
 * Triggers the actual pageflip by updating the primary
1407
 * Triggers the actual pageflip by updating the primary
1408
 * surface base address (evergreen+).
1408
 * surface base address (evergreen+).
1409
 */
1409
 */
1410
void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1410
void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1411
{
1411
{
1412
	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1412
	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1413
 
1413
 
1414
	/* update the scanout addresses */
1414
	/* update the scanout addresses */
1415
	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1415
	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1416
	       upper_32_bits(crtc_base));
1416
	       upper_32_bits(crtc_base));
1417
	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1417
	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1418
	       (u32)crtc_base);
1418
	       (u32)crtc_base);
1419
	/* post the write */
1419
	/* post the write */
1420
	RREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset);
1420
	RREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset);
1421
}
1421
}
1422
 
1422
 
1423
/**
1423
/**
1424
 * evergreen_page_flip_pending - check if page flip is still pending
1424
 * evergreen_page_flip_pending - check if page flip is still pending
1425
 *
1425
 *
1426
 * @rdev: radeon_device pointer
1426
 * @rdev: radeon_device pointer
1427
 * @crtc_id: crtc to check
1427
 * @crtc_id: crtc to check
1428
 *
1428
 *
1429
 * Returns the current update pending status.
1429
 * Returns the current update pending status.
1430
 */
1430
 */
1431
bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
1431
bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
1432
{
1432
{
1433
	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1433
	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1434
 
1434
 
1435
	/* Return current update_pending status: */
1435
	/* Return current update_pending status: */
1436
	return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
1436
	return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
1437
		EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
1437
		EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
1438
}
1438
}
1439
 
1439
 
1440
/* get temperature in millidegrees */
1440
/* get temperature in millidegrees */
1441
int evergreen_get_temp(struct radeon_device *rdev)
1441
int evergreen_get_temp(struct radeon_device *rdev)
1442
{
1442
{
1443
	u32 temp, toffset;
1443
	u32 temp, toffset;
1444
	int actual_temp = 0;
1444
	int actual_temp = 0;
1445
 
1445
 
1446
	if (rdev->family == CHIP_JUNIPER) {
1446
	if (rdev->family == CHIP_JUNIPER) {
1447
		toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1447
		toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1448
			TOFFSET_SHIFT;
1448
			TOFFSET_SHIFT;
1449
		temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1449
		temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1450
			TS0_ADC_DOUT_SHIFT;
1450
			TS0_ADC_DOUT_SHIFT;
1451
 
1451
 
1452
		if (toffset & 0x100)
1452
		if (toffset & 0x100)
1453
			actual_temp = temp / 2 - (0x200 - toffset);
1453
			actual_temp = temp / 2 - (0x200 - toffset);
1454
		else
1454
		else
1455
			actual_temp = temp / 2 + toffset;
1455
			actual_temp = temp / 2 + toffset;
1456
 
1456
 
1457
		actual_temp = actual_temp * 1000;
1457
		actual_temp = actual_temp * 1000;
1458
 
1458
 
1459
	} else {
1459
	} else {
1460
		temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1460
		temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1461
			ASIC_T_SHIFT;
1461
			ASIC_T_SHIFT;
1462
 
1462
 
1463
		if (temp & 0x400)
1463
		if (temp & 0x400)
1464
			actual_temp = -256;
1464
			actual_temp = -256;
1465
		else if (temp & 0x200)
1465
		else if (temp & 0x200)
1466
			actual_temp = 255;
1466
			actual_temp = 255;
1467
		else if (temp & 0x100) {
1467
		else if (temp & 0x100) {
1468
			actual_temp = temp & 0x1ff;
1468
			actual_temp = temp & 0x1ff;
1469
			actual_temp |= ~0x1ff;
1469
			actual_temp |= ~0x1ff;
1470
		} else
1470
		} else
1471
			actual_temp = temp & 0xff;
1471
			actual_temp = temp & 0xff;
1472
 
1472
 
1473
		actual_temp = (actual_temp * 1000) / 2;
1473
		actual_temp = (actual_temp * 1000) / 2;
1474
	}
1474
	}
1475
 
1475
 
1476
	return actual_temp;
1476
	return actual_temp;
1477
}
1477
}
1478
 
1478
 
1479
int sumo_get_temp(struct radeon_device *rdev)
1479
int sumo_get_temp(struct radeon_device *rdev)
1480
{
1480
{
1481
	u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1481
	u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1482
	int actual_temp = temp - 49;
1482
	int actual_temp = temp - 49;
1483
 
1483
 
1484
	return actual_temp * 1000;
1484
	return actual_temp * 1000;
1485
}
1485
}
1486
 
1486
 
1487
/**
1487
/**
1488
 * sumo_pm_init_profile - Initialize power profiles callback.
1488
 * sumo_pm_init_profile - Initialize power profiles callback.
1489
 *
1489
 *
1490
 * @rdev: radeon_device pointer
1490
 * @rdev: radeon_device pointer
1491
 *
1491
 *
1492
 * Initialize the power states used in profile mode
1492
 * Initialize the power states used in profile mode
1493
 * (sumo, trinity, SI).
1493
 * (sumo, trinity, SI).
1494
 * Used for profile mode only.
1494
 * Used for profile mode only.
1495
 */
1495
 */
1496
void sumo_pm_init_profile(struct radeon_device *rdev)
1496
void sumo_pm_init_profile(struct radeon_device *rdev)
1497
{
1497
{
1498
	int idx;
1498
	int idx;
1499
 
1499
 
1500
	/* default */
1500
	/* default */
1501
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1501
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1502
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1502
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1503
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1503
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1504
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1504
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1505
 
1505
 
1506
	/* low,mid sh/mh */
1506
	/* low,mid sh/mh */
1507
	if (rdev->flags & RADEON_IS_MOBILITY)
1507
	if (rdev->flags & RADEON_IS_MOBILITY)
1508
		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1508
		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1509
	else
1509
	else
1510
		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1510
		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1511
 
1511
 
1512
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1512
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1513
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1513
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1514
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1514
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1515
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1515
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1516
 
1516
 
1517
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1517
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1518
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1518
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1519
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1519
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1520
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1520
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1521
 
1521
 
1522
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1522
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1523
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1523
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1524
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1524
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1525
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1525
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1526
 
1526
 
1527
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1527
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1528
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1528
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1529
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1529
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1530
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1530
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1531
 
1531
 
1532
	/* high sh/mh */
1532
	/* high sh/mh */
1533
	idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1533
	idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1534
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1534
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1535
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1535
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1536
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1536
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1537
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1537
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1538
		rdev->pm.power_state[idx].num_clock_modes - 1;
1538
		rdev->pm.power_state[idx].num_clock_modes - 1;
1539
 
1539
 
1540
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1540
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1541
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1541
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1542
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1542
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1543
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1543
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1544
		rdev->pm.power_state[idx].num_clock_modes - 1;
1544
		rdev->pm.power_state[idx].num_clock_modes - 1;
1545
}
1545
}
1546
 
1546
 
1547
/**
1547
/**
1548
 * btc_pm_init_profile - Initialize power profiles callback.
1548
 * btc_pm_init_profile - Initialize power profiles callback.
1549
 *
1549
 *
1550
 * @rdev: radeon_device pointer
1550
 * @rdev: radeon_device pointer
1551
 *
1551
 *
1552
 * Initialize the power states used in profile mode
1552
 * Initialize the power states used in profile mode
1553
 * (BTC, cayman).
1553
 * (BTC, cayman).
1554
 * Used for profile mode only.
1554
 * Used for profile mode only.
1555
 */
1555
 */
1556
void btc_pm_init_profile(struct radeon_device *rdev)
1556
void btc_pm_init_profile(struct radeon_device *rdev)
1557
{
1557
{
1558
	int idx;
1558
	int idx;
1559
 
1559
 
1560
	/* default */
1560
	/* default */
1561
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1561
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1562
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1562
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1563
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1563
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1564
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1564
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1565
	/* starting with BTC, there is one state that is used for both
1565
	/* starting with BTC, there is one state that is used for both
1566
	 * MH and SH.  Difference is that we always use the high clock index for
1566
	 * MH and SH.  Difference is that we always use the high clock index for
1567
	 * mclk.
1567
	 * mclk.
1568
	 */
1568
	 */
1569
	if (rdev->flags & RADEON_IS_MOBILITY)
1569
	if (rdev->flags & RADEON_IS_MOBILITY)
1570
		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1570
		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1571
	else
1571
	else
1572
		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1572
		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1573
	/* low sh */
1573
	/* low sh */
1574
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1574
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1575
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1575
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1576
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1576
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1577
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1577
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1578
	/* mid sh */
1578
	/* mid sh */
1579
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1579
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1580
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1580
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1581
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1581
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1582
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1582
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1583
	/* high sh */
1583
	/* high sh */
1584
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1584
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1585
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1585
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1586
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1586
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1587
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1587
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1588
	/* low mh */
1588
	/* low mh */
1589
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1589
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1590
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1590
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1591
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1591
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1592
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1592
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1593
	/* mid mh */
1593
	/* mid mh */
1594
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1594
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1595
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1595
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1596
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1596
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1597
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1597
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1598
	/* high mh */
1598
	/* high mh */
1599
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1599
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1600
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1600
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1601
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1601
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1602
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1602
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1603
}
1603
}
1604
 
1604
 
1605
/**
1605
/**
1606
 * evergreen_pm_misc - set additional pm hw parameters callback.
1606
 * evergreen_pm_misc - set additional pm hw parameters callback.
1607
 *
1607
 *
1608
 * @rdev: radeon_device pointer
1608
 * @rdev: radeon_device pointer
1609
 *
1609
 *
1610
 * Set non-clock parameters associated with a power state
1610
 * Set non-clock parameters associated with a power state
1611
 * (voltage, etc.) (evergreen+).
1611
 * (voltage, etc.) (evergreen+).
1612
 */
1612
 */
1613
void evergreen_pm_misc(struct radeon_device *rdev)
1613
void evergreen_pm_misc(struct radeon_device *rdev)
1614
{
1614
{
1615
	int req_ps_idx = rdev->pm.requested_power_state_index;
1615
	int req_ps_idx = rdev->pm.requested_power_state_index;
1616
	int req_cm_idx = rdev->pm.requested_clock_mode_index;
1616
	int req_cm_idx = rdev->pm.requested_clock_mode_index;
1617
	struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1617
	struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1618
	struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1618
	struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1619
 
1619
 
1620
	if (voltage->type == VOLTAGE_SW) {
1620
	if (voltage->type == VOLTAGE_SW) {
1621
		/* 0xff0x are flags rather then an actual voltage */
1621
		/* 0xff0x are flags rather then an actual voltage */
1622
		if ((voltage->voltage & 0xff00) == 0xff00)
1622
		if ((voltage->voltage & 0xff00) == 0xff00)
1623
			return;
1623
			return;
1624
		if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1624
		if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1625
			radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1625
			radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1626
			rdev->pm.current_vddc = voltage->voltage;
1626
			rdev->pm.current_vddc = voltage->voltage;
1627
			DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1627
			DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1628
		}
1628
		}
1629
 
1629
 
1630
		/* starting with BTC, there is one state that is used for both
1630
		/* starting with BTC, there is one state that is used for both
1631
		 * MH and SH.  Difference is that we always use the high clock index for
1631
		 * MH and SH.  Difference is that we always use the high clock index for
1632
		 * mclk and vddci.
1632
		 * mclk and vddci.
1633
		 */
1633
		 */
1634
		if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1634
		if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1635
		    (rdev->family >= CHIP_BARTS) &&
1635
		    (rdev->family >= CHIP_BARTS) &&
1636
		    rdev->pm.active_crtc_count &&
1636
		    rdev->pm.active_crtc_count &&
1637
		    ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1637
		    ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1638
		     (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1638
		     (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1639
			voltage = &rdev->pm.power_state[req_ps_idx].
1639
			voltage = &rdev->pm.power_state[req_ps_idx].
1640
				clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1640
				clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1641
 
1641
 
1642
		/* 0xff0x are flags rather then an actual voltage */
1642
		/* 0xff0x are flags rather then an actual voltage */
1643
		if ((voltage->vddci & 0xff00) == 0xff00)
1643
		if ((voltage->vddci & 0xff00) == 0xff00)
1644
			return;
1644
			return;
1645
		if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1645
		if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1646
			radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1646
			radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1647
			rdev->pm.current_vddci = voltage->vddci;
1647
			rdev->pm.current_vddci = voltage->vddci;
1648
			DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1648
			DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1649
		}
1649
		}
1650
	}
1650
	}
1651
}
1651
}
1652
 
1652
 
1653
/**
1653
/**
1654
 * evergreen_pm_prepare - pre-power state change callback.
1654
 * evergreen_pm_prepare - pre-power state change callback.
1655
 *
1655
 *
1656
 * @rdev: radeon_device pointer
1656
 * @rdev: radeon_device pointer
1657
 *
1657
 *
1658
 * Prepare for a power state change (evergreen+).
1658
 * Prepare for a power state change (evergreen+).
1659
 */
1659
 */
1660
void evergreen_pm_prepare(struct radeon_device *rdev)
1660
void evergreen_pm_prepare(struct radeon_device *rdev)
1661
{
1661
{
1662
	struct drm_device *ddev = rdev->ddev;
1662
	struct drm_device *ddev = rdev->ddev;
1663
	struct drm_crtc *crtc;
1663
	struct drm_crtc *crtc;
1664
	struct radeon_crtc *radeon_crtc;
1664
	struct radeon_crtc *radeon_crtc;
1665
	u32 tmp;
1665
	u32 tmp;
1666
 
1666
 
1667
	/* disable any active CRTCs */
1667
	/* disable any active CRTCs */
1668
	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1668
	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1669
		radeon_crtc = to_radeon_crtc(crtc);
1669
		radeon_crtc = to_radeon_crtc(crtc);
1670
		if (radeon_crtc->enabled) {
1670
		if (radeon_crtc->enabled) {
1671
			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1671
			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1672
			tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1672
			tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1673
			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1673
			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1674
		}
1674
		}
1675
	}
1675
	}
1676
}
1676
}
1677
 
1677
 
1678
/**
1678
/**
1679
 * evergreen_pm_finish - post-power state change callback.
1679
 * evergreen_pm_finish - post-power state change callback.
1680
 *
1680
 *
1681
 * @rdev: radeon_device pointer
1681
 * @rdev: radeon_device pointer
1682
 *
1682
 *
1683
 * Clean up after a power state change (evergreen+).
1683
 * Clean up after a power state change (evergreen+).
1684
 */
1684
 */
1685
void evergreen_pm_finish(struct radeon_device *rdev)
1685
void evergreen_pm_finish(struct radeon_device *rdev)
1686
{
1686
{
1687
	struct drm_device *ddev = rdev->ddev;
1687
	struct drm_device *ddev = rdev->ddev;
1688
	struct drm_crtc *crtc;
1688
	struct drm_crtc *crtc;
1689
	struct radeon_crtc *radeon_crtc;
1689
	struct radeon_crtc *radeon_crtc;
1690
	u32 tmp;
1690
	u32 tmp;
1691
 
1691
 
1692
	/* enable any active CRTCs */
1692
	/* enable any active CRTCs */
1693
	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1693
	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1694
		radeon_crtc = to_radeon_crtc(crtc);
1694
		radeon_crtc = to_radeon_crtc(crtc);
1695
		if (radeon_crtc->enabled) {
1695
		if (radeon_crtc->enabled) {
1696
			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1696
			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1697
			tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1697
			tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1698
			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1698
			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1699
		}
1699
		}
1700
	}
1700
	}
1701
}
1701
}
1702
 
1702
 
1703
/**
1703
/**
1704
 * evergreen_hpd_sense - hpd sense callback.
1704
 * evergreen_hpd_sense - hpd sense callback.
1705
 *
1705
 *
1706
 * @rdev: radeon_device pointer
1706
 * @rdev: radeon_device pointer
1707
 * @hpd: hpd (hotplug detect) pin
1707
 * @hpd: hpd (hotplug detect) pin
1708
 *
1708
 *
1709
 * Checks if a digital monitor is connected (evergreen+).
1709
 * Checks if a digital monitor is connected (evergreen+).
1710
 * Returns true if connected, false if not connected.
1710
 * Returns true if connected, false if not connected.
1711
 */
1711
 */
1712
bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1712
bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1713
{
1713
{
1714
	bool connected = false;
1714
	bool connected = false;
1715
 
1715
 
1716
	switch (hpd) {
1716
	switch (hpd) {
1717
	case RADEON_HPD_1:
1717
	case RADEON_HPD_1:
1718
		if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1718
		if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1719
			connected = true;
1719
			connected = true;
1720
		break;
1720
		break;
1721
	case RADEON_HPD_2:
1721
	case RADEON_HPD_2:
1722
		if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1722
		if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1723
			connected = true;
1723
			connected = true;
1724
		break;
1724
		break;
1725
	case RADEON_HPD_3:
1725
	case RADEON_HPD_3:
1726
		if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1726
		if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1727
			connected = true;
1727
			connected = true;
1728
		break;
1728
		break;
1729
	case RADEON_HPD_4:
1729
	case RADEON_HPD_4:
1730
		if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1730
		if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1731
			connected = true;
1731
			connected = true;
1732
		break;
1732
		break;
1733
	case RADEON_HPD_5:
1733
	case RADEON_HPD_5:
1734
		if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1734
		if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1735
			connected = true;
1735
			connected = true;
1736
		break;
1736
		break;
1737
	case RADEON_HPD_6:
1737
	case RADEON_HPD_6:
1738
		if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1738
		if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1739
			connected = true;
1739
			connected = true;
1740
		break;
1740
		break;
1741
	default:
1741
	default:
1742
		break;
1742
		break;
1743
	}
1743
	}
1744
 
1744
 
1745
	return connected;
1745
	return connected;
1746
}
1746
}
1747
 
1747
 
1748
/**
1748
/**
1749
 * evergreen_hpd_set_polarity - hpd set polarity callback.
1749
 * evergreen_hpd_set_polarity - hpd set polarity callback.
1750
 *
1750
 *
1751
 * @rdev: radeon_device pointer
1751
 * @rdev: radeon_device pointer
1752
 * @hpd: hpd (hotplug detect) pin
1752
 * @hpd: hpd (hotplug detect) pin
1753
 *
1753
 *
1754
 * Set the polarity of the hpd pin (evergreen+).
1754
 * Set the polarity of the hpd pin (evergreen+).
1755
 */
1755
 */
1756
void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1756
void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1757
				enum radeon_hpd_id hpd)
1757
				enum radeon_hpd_id hpd)
1758
{
1758
{
1759
	u32 tmp;
1759
	u32 tmp;
1760
	bool connected = evergreen_hpd_sense(rdev, hpd);
1760
	bool connected = evergreen_hpd_sense(rdev, hpd);
1761
 
1761
 
1762
	switch (hpd) {
1762
	switch (hpd) {
1763
	case RADEON_HPD_1:
1763
	case RADEON_HPD_1:
1764
		tmp = RREG32(DC_HPD1_INT_CONTROL);
1764
		tmp = RREG32(DC_HPD1_INT_CONTROL);
1765
		if (connected)
1765
		if (connected)
1766
			tmp &= ~DC_HPDx_INT_POLARITY;
1766
			tmp &= ~DC_HPDx_INT_POLARITY;
1767
		else
1767
		else
1768
			tmp |= DC_HPDx_INT_POLARITY;
1768
			tmp |= DC_HPDx_INT_POLARITY;
1769
		WREG32(DC_HPD1_INT_CONTROL, tmp);
1769
		WREG32(DC_HPD1_INT_CONTROL, tmp);
1770
		break;
1770
		break;
1771
	case RADEON_HPD_2:
1771
	case RADEON_HPD_2:
1772
		tmp = RREG32(DC_HPD2_INT_CONTROL);
1772
		tmp = RREG32(DC_HPD2_INT_CONTROL);
1773
		if (connected)
1773
		if (connected)
1774
			tmp &= ~DC_HPDx_INT_POLARITY;
1774
			tmp &= ~DC_HPDx_INT_POLARITY;
1775
		else
1775
		else
1776
			tmp |= DC_HPDx_INT_POLARITY;
1776
			tmp |= DC_HPDx_INT_POLARITY;
1777
		WREG32(DC_HPD2_INT_CONTROL, tmp);
1777
		WREG32(DC_HPD2_INT_CONTROL, tmp);
1778
		break;
1778
		break;
1779
	case RADEON_HPD_3:
1779
	case RADEON_HPD_3:
1780
		tmp = RREG32(DC_HPD3_INT_CONTROL);
1780
		tmp = RREG32(DC_HPD3_INT_CONTROL);
1781
		if (connected)
1781
		if (connected)
1782
			tmp &= ~DC_HPDx_INT_POLARITY;
1782
			tmp &= ~DC_HPDx_INT_POLARITY;
1783
		else
1783
		else
1784
			tmp |= DC_HPDx_INT_POLARITY;
1784
			tmp |= DC_HPDx_INT_POLARITY;
1785
		WREG32(DC_HPD3_INT_CONTROL, tmp);
1785
		WREG32(DC_HPD3_INT_CONTROL, tmp);
1786
		break;
1786
		break;
1787
	case RADEON_HPD_4:
1787
	case RADEON_HPD_4:
1788
		tmp = RREG32(DC_HPD4_INT_CONTROL);
1788
		tmp = RREG32(DC_HPD4_INT_CONTROL);
1789
		if (connected)
1789
		if (connected)
1790
			tmp &= ~DC_HPDx_INT_POLARITY;
1790
			tmp &= ~DC_HPDx_INT_POLARITY;
1791
		else
1791
		else
1792
			tmp |= DC_HPDx_INT_POLARITY;
1792
			tmp |= DC_HPDx_INT_POLARITY;
1793
		WREG32(DC_HPD4_INT_CONTROL, tmp);
1793
		WREG32(DC_HPD4_INT_CONTROL, tmp);
1794
		break;
1794
		break;
1795
	case RADEON_HPD_5:
1795
	case RADEON_HPD_5:
1796
		tmp = RREG32(DC_HPD5_INT_CONTROL);
1796
		tmp = RREG32(DC_HPD5_INT_CONTROL);
1797
		if (connected)
1797
		if (connected)
1798
			tmp &= ~DC_HPDx_INT_POLARITY;
1798
			tmp &= ~DC_HPDx_INT_POLARITY;
1799
		else
1799
		else
1800
			tmp |= DC_HPDx_INT_POLARITY;
1800
			tmp |= DC_HPDx_INT_POLARITY;
1801
		WREG32(DC_HPD5_INT_CONTROL, tmp);
1801
		WREG32(DC_HPD5_INT_CONTROL, tmp);
1802
			break;
1802
			break;
1803
	case RADEON_HPD_6:
1803
	case RADEON_HPD_6:
1804
		tmp = RREG32(DC_HPD6_INT_CONTROL);
1804
		tmp = RREG32(DC_HPD6_INT_CONTROL);
1805
		if (connected)
1805
		if (connected)
1806
			tmp &= ~DC_HPDx_INT_POLARITY;
1806
			tmp &= ~DC_HPDx_INT_POLARITY;
1807
		else
1807
		else
1808
			tmp |= DC_HPDx_INT_POLARITY;
1808
			tmp |= DC_HPDx_INT_POLARITY;
1809
		WREG32(DC_HPD6_INT_CONTROL, tmp);
1809
		WREG32(DC_HPD6_INT_CONTROL, tmp);
1810
		break;
1810
		break;
1811
	default:
1811
	default:
1812
		break;
1812
		break;
1813
	}
1813
	}
1814
}
1814
}
1815
 
1815
 
1816
/**
1816
/**
1817
 * evergreen_hpd_init - hpd setup callback.
1817
 * evergreen_hpd_init - hpd setup callback.
1818
 *
1818
 *
1819
 * @rdev: radeon_device pointer
1819
 * @rdev: radeon_device pointer
1820
 *
1820
 *
1821
 * Setup the hpd pins used by the card (evergreen+).
1821
 * Setup the hpd pins used by the card (evergreen+).
1822
 * Enable the pin, set the polarity, and enable the hpd interrupts.
1822
 * Enable the pin, set the polarity, and enable the hpd interrupts.
1823
 */
1823
 */
1824
void evergreen_hpd_init(struct radeon_device *rdev)
1824
void evergreen_hpd_init(struct radeon_device *rdev)
1825
{
1825
{
1826
	struct drm_device *dev = rdev->ddev;
1826
	struct drm_device *dev = rdev->ddev;
1827
	struct drm_connector *connector;
1827
	struct drm_connector *connector;
1828
	unsigned enabled = 0;
1828
	unsigned enabled = 0;
1829
	u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1829
	u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1830
		DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1830
		DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1831
 
1831
 
1832
	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1832
	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1833
		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1833
		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1834
 
1834
 
1835
		if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1835
		if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1836
		    connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1836
		    connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1837
			/* don't try to enable hpd on eDP or LVDS avoid breaking the
1837
			/* don't try to enable hpd on eDP or LVDS avoid breaking the
1838
			 * aux dp channel on imac and help (but not completely fix)
1838
			 * aux dp channel on imac and help (but not completely fix)
1839
			 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1839
			 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1840
			 * also avoid interrupt storms during dpms.
1840
			 * also avoid interrupt storms during dpms.
1841
			 */
1841
			 */
1842
			continue;
1842
			continue;
1843
		}
1843
		}
1844
		switch (radeon_connector->hpd.hpd) {
1844
		switch (radeon_connector->hpd.hpd) {
1845
		case RADEON_HPD_1:
1845
		case RADEON_HPD_1:
1846
			WREG32(DC_HPD1_CONTROL, tmp);
1846
			WREG32(DC_HPD1_CONTROL, tmp);
1847
			break;
1847
			break;
1848
		case RADEON_HPD_2:
1848
		case RADEON_HPD_2:
1849
			WREG32(DC_HPD2_CONTROL, tmp);
1849
			WREG32(DC_HPD2_CONTROL, tmp);
1850
			break;
1850
			break;
1851
		case RADEON_HPD_3:
1851
		case RADEON_HPD_3:
1852
			WREG32(DC_HPD3_CONTROL, tmp);
1852
			WREG32(DC_HPD3_CONTROL, tmp);
1853
			break;
1853
			break;
1854
		case RADEON_HPD_4:
1854
		case RADEON_HPD_4:
1855
			WREG32(DC_HPD4_CONTROL, tmp);
1855
			WREG32(DC_HPD4_CONTROL, tmp);
1856
			break;
1856
			break;
1857
		case RADEON_HPD_5:
1857
		case RADEON_HPD_5:
1858
			WREG32(DC_HPD5_CONTROL, tmp);
1858
			WREG32(DC_HPD5_CONTROL, tmp);
1859
			break;
1859
			break;
1860
		case RADEON_HPD_6:
1860
		case RADEON_HPD_6:
1861
			WREG32(DC_HPD6_CONTROL, tmp);
1861
			WREG32(DC_HPD6_CONTROL, tmp);
1862
			break;
1862
			break;
1863
		default:
1863
		default:
1864
			break;
1864
			break;
1865
		}
1865
		}
1866
		radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1866
		radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1867
		enabled |= 1 << radeon_connector->hpd.hpd;
1867
		enabled |= 1 << radeon_connector->hpd.hpd;
1868
	}
1868
	}
1869
//   radeon_irq_kms_enable_hpd(rdev, enabled);
1869
//   radeon_irq_kms_enable_hpd(rdev, enabled);
1870
}
1870
}
1871
 
1871
 
1872
/**
1872
/**
1873
 * evergreen_hpd_fini - hpd tear down callback.
1873
 * evergreen_hpd_fini - hpd tear down callback.
1874
 *
1874
 *
1875
 * @rdev: radeon_device pointer
1875
 * @rdev: radeon_device pointer
1876
 *
1876
 *
1877
 * Tear down the hpd pins used by the card (evergreen+).
1877
 * Tear down the hpd pins used by the card (evergreen+).
1878
 * Disable the hpd interrupts.
1878
 * Disable the hpd interrupts.
1879
 */
1879
 */
1880
void evergreen_hpd_fini(struct radeon_device *rdev)
1880
void evergreen_hpd_fini(struct radeon_device *rdev)
1881
{
1881
{
1882
	struct drm_device *dev = rdev->ddev;
1882
	struct drm_device *dev = rdev->ddev;
1883
	struct drm_connector *connector;
1883
	struct drm_connector *connector;
1884
	unsigned disabled = 0;
1884
	unsigned disabled = 0;
1885
 
1885
 
1886
	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1886
	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1887
		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1887
		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1888
		switch (radeon_connector->hpd.hpd) {
1888
		switch (radeon_connector->hpd.hpd) {
1889
		case RADEON_HPD_1:
1889
		case RADEON_HPD_1:
1890
			WREG32(DC_HPD1_CONTROL, 0);
1890
			WREG32(DC_HPD1_CONTROL, 0);
1891
			break;
1891
			break;
1892
		case RADEON_HPD_2:
1892
		case RADEON_HPD_2:
1893
			WREG32(DC_HPD2_CONTROL, 0);
1893
			WREG32(DC_HPD2_CONTROL, 0);
1894
			break;
1894
			break;
1895
		case RADEON_HPD_3:
1895
		case RADEON_HPD_3:
1896
			WREG32(DC_HPD3_CONTROL, 0);
1896
			WREG32(DC_HPD3_CONTROL, 0);
1897
			break;
1897
			break;
1898
		case RADEON_HPD_4:
1898
		case RADEON_HPD_4:
1899
			WREG32(DC_HPD4_CONTROL, 0);
1899
			WREG32(DC_HPD4_CONTROL, 0);
1900
			break;
1900
			break;
1901
		case RADEON_HPD_5:
1901
		case RADEON_HPD_5:
1902
			WREG32(DC_HPD5_CONTROL, 0);
1902
			WREG32(DC_HPD5_CONTROL, 0);
1903
			break;
1903
			break;
1904
		case RADEON_HPD_6:
1904
		case RADEON_HPD_6:
1905
			WREG32(DC_HPD6_CONTROL, 0);
1905
			WREG32(DC_HPD6_CONTROL, 0);
1906
			break;
1906
			break;
1907
		default:
1907
		default:
1908
			break;
1908
			break;
1909
		}
1909
		}
1910
		disabled |= 1 << radeon_connector->hpd.hpd;
1910
		disabled |= 1 << radeon_connector->hpd.hpd;
1911
	}
1911
	}
1912
//   radeon_irq_kms_disable_hpd(rdev, disabled);
1912
//   radeon_irq_kms_disable_hpd(rdev, disabled);
1913
}
1913
}
1914
 
1914
 
1915
/* watermark setup */
1915
/* watermark setup */
1916
 
1916
 
1917
static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1917
static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1918
					struct radeon_crtc *radeon_crtc,
1918
					struct radeon_crtc *radeon_crtc,
1919
					struct drm_display_mode *mode,
1919
					struct drm_display_mode *mode,
1920
					struct drm_display_mode *other_mode)
1920
					struct drm_display_mode *other_mode)
1921
{
1921
{
1922
	u32 tmp, buffer_alloc, i;
1922
	u32 tmp, buffer_alloc, i;
1923
	u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1923
	u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1924
	/*
1924
	/*
1925
	 * Line Buffer Setup
1925
	 * Line Buffer Setup
1926
	 * There are 3 line buffers, each one shared by 2 display controllers.
1926
	 * There are 3 line buffers, each one shared by 2 display controllers.
1927
	 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1927
	 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1928
	 * the display controllers.  The paritioning is done via one of four
1928
	 * the display controllers.  The paritioning is done via one of four
1929
	 * preset allocations specified in bits 2:0:
1929
	 * preset allocations specified in bits 2:0:
1930
	 * first display controller
1930
	 * first display controller
1931
	 *  0 - first half of lb (3840 * 2)
1931
	 *  0 - first half of lb (3840 * 2)
1932
	 *  1 - first 3/4 of lb (5760 * 2)
1932
	 *  1 - first 3/4 of lb (5760 * 2)
1933
	 *  2 - whole lb (7680 * 2), other crtc must be disabled
1933
	 *  2 - whole lb (7680 * 2), other crtc must be disabled
1934
	 *  3 - first 1/4 of lb (1920 * 2)
1934
	 *  3 - first 1/4 of lb (1920 * 2)
1935
	 * second display controller
1935
	 * second display controller
1936
	 *  4 - second half of lb (3840 * 2)
1936
	 *  4 - second half of lb (3840 * 2)
1937
	 *  5 - second 3/4 of lb (5760 * 2)
1937
	 *  5 - second 3/4 of lb (5760 * 2)
1938
	 *  6 - whole lb (7680 * 2), other crtc must be disabled
1938
	 *  6 - whole lb (7680 * 2), other crtc must be disabled
1939
	 *  7 - last 1/4 of lb (1920 * 2)
1939
	 *  7 - last 1/4 of lb (1920 * 2)
1940
	 */
1940
	 */
1941
	/* this can get tricky if we have two large displays on a paired group
1941
	/* this can get tricky if we have two large displays on a paired group
1942
	 * of crtcs.  Ideally for multiple large displays we'd assign them to
1942
	 * of crtcs.  Ideally for multiple large displays we'd assign them to
1943
	 * non-linked crtcs for maximum line buffer allocation.
1943
	 * non-linked crtcs for maximum line buffer allocation.
1944
	 */
1944
	 */
1945
	if (radeon_crtc->base.enabled && mode) {
1945
	if (radeon_crtc->base.enabled && mode) {
1946
		if (other_mode) {
1946
		if (other_mode) {
1947
			tmp = 0; /* 1/2 */
1947
			tmp = 0; /* 1/2 */
1948
			buffer_alloc = 1;
1948
			buffer_alloc = 1;
1949
		} else {
1949
		} else {
1950
			tmp = 2; /* whole */
1950
			tmp = 2; /* whole */
1951
			buffer_alloc = 2;
1951
			buffer_alloc = 2;
1952
		}
1952
		}
1953
	} else {
1953
	} else {
1954
		tmp = 0;
1954
		tmp = 0;
1955
		buffer_alloc = 0;
1955
		buffer_alloc = 0;
1956
	}
1956
	}
1957
 
1957
 
1958
	/* second controller of the pair uses second half of the lb */
1958
	/* second controller of the pair uses second half of the lb */
1959
	if (radeon_crtc->crtc_id % 2)
1959
	if (radeon_crtc->crtc_id % 2)
1960
		tmp += 4;
1960
		tmp += 4;
1961
	WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1961
	WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1962
 
1962
 
1963
	if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1963
	if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1964
		WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1964
		WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1965
		       DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1965
		       DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1966
		for (i = 0; i < rdev->usec_timeout; i++) {
1966
		for (i = 0; i < rdev->usec_timeout; i++) {
1967
			if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1967
			if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1968
			    DMIF_BUFFERS_ALLOCATED_COMPLETED)
1968
			    DMIF_BUFFERS_ALLOCATED_COMPLETED)
1969
				break;
1969
				break;
1970
			udelay(1);
1970
			udelay(1);
1971
		}
1971
		}
1972
	}
1972
	}
1973
 
1973
 
1974
	if (radeon_crtc->base.enabled && mode) {
1974
	if (radeon_crtc->base.enabled && mode) {
1975
		switch (tmp) {
1975
		switch (tmp) {
1976
		case 0:
1976
		case 0:
1977
		case 4:
1977
		case 4:
1978
		default:
1978
		default:
1979
			if (ASIC_IS_DCE5(rdev))
1979
			if (ASIC_IS_DCE5(rdev))
1980
				return 4096 * 2;
1980
				return 4096 * 2;
1981
			else
1981
			else
1982
				return 3840 * 2;
1982
				return 3840 * 2;
1983
		case 1:
1983
		case 1:
1984
		case 5:
1984
		case 5:
1985
			if (ASIC_IS_DCE5(rdev))
1985
			if (ASIC_IS_DCE5(rdev))
1986
				return 6144 * 2;
1986
				return 6144 * 2;
1987
			else
1987
			else
1988
				return 5760 * 2;
1988
				return 5760 * 2;
1989
		case 2:
1989
		case 2:
1990
		case 6:
1990
		case 6:
1991
			if (ASIC_IS_DCE5(rdev))
1991
			if (ASIC_IS_DCE5(rdev))
1992
				return 8192 * 2;
1992
				return 8192 * 2;
1993
			else
1993
			else
1994
				return 7680 * 2;
1994
				return 7680 * 2;
1995
		case 3:
1995
		case 3:
1996
		case 7:
1996
		case 7:
1997
			if (ASIC_IS_DCE5(rdev))
1997
			if (ASIC_IS_DCE5(rdev))
1998
				return 2048 * 2;
1998
				return 2048 * 2;
1999
			else
1999
			else
2000
				return 1920 * 2;
2000
				return 1920 * 2;
2001
		}
2001
		}
2002
	}
2002
	}
2003
 
2003
 
2004
	/* controller not enabled, so no lb used */
2004
	/* controller not enabled, so no lb used */
2005
	return 0;
2005
	return 0;
2006
}
2006
}
2007
 
2007
 
2008
u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
2008
u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
2009
{
2009
{
2010
	u32 tmp = RREG32(MC_SHARED_CHMAP);
2010
	u32 tmp = RREG32(MC_SHARED_CHMAP);
2011
 
2011
 
2012
	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
2012
	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
2013
	case 0:
2013
	case 0:
2014
	default:
2014
	default:
2015
		return 1;
2015
		return 1;
2016
	case 1:
2016
	case 1:
2017
		return 2;
2017
		return 2;
2018
	case 2:
2018
	case 2:
2019
		return 4;
2019
		return 4;
2020
	case 3:
2020
	case 3:
2021
		return 8;
2021
		return 8;
2022
	}
2022
	}
2023
}
2023
}
2024
 
2024
 
2025
struct evergreen_wm_params {
2025
struct evergreen_wm_params {
2026
	u32 dram_channels; /* number of dram channels */
2026
	u32 dram_channels; /* number of dram channels */
2027
	u32 yclk;          /* bandwidth per dram data pin in kHz */
2027
	u32 yclk;          /* bandwidth per dram data pin in kHz */
2028
	u32 sclk;          /* engine clock in kHz */
2028
	u32 sclk;          /* engine clock in kHz */
2029
	u32 disp_clk;      /* display clock in kHz */
2029
	u32 disp_clk;      /* display clock in kHz */
2030
	u32 src_width;     /* viewport width */
2030
	u32 src_width;     /* viewport width */
2031
	u32 active_time;   /* active display time in ns */
2031
	u32 active_time;   /* active display time in ns */
2032
	u32 blank_time;    /* blank time in ns */
2032
	u32 blank_time;    /* blank time in ns */
2033
	bool interlaced;    /* mode is interlaced */
2033
	bool interlaced;    /* mode is interlaced */
2034
	fixed20_12 vsc;    /* vertical scale ratio */
2034
	fixed20_12 vsc;    /* vertical scale ratio */
2035
	u32 num_heads;     /* number of active crtcs */
2035
	u32 num_heads;     /* number of active crtcs */
2036
	u32 bytes_per_pixel; /* bytes per pixel display + overlay */
2036
	u32 bytes_per_pixel; /* bytes per pixel display + overlay */
2037
	u32 lb_size;       /* line buffer allocated to pipe */
2037
	u32 lb_size;       /* line buffer allocated to pipe */
2038
	u32 vtaps;         /* vertical scaler taps */
2038
	u32 vtaps;         /* vertical scaler taps */
2039
};
2039
};
2040
 
2040
 
2041
static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
2041
static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
2042
{
2042
{
2043
	/* Calculate DRAM Bandwidth and the part allocated to display. */
2043
	/* Calculate DRAM Bandwidth and the part allocated to display. */
2044
	fixed20_12 dram_efficiency; /* 0.7 */
2044
	fixed20_12 dram_efficiency; /* 0.7 */
2045
	fixed20_12 yclk, dram_channels, bandwidth;
2045
	fixed20_12 yclk, dram_channels, bandwidth;
2046
	fixed20_12 a;
2046
	fixed20_12 a;
2047
 
2047
 
2048
	a.full = dfixed_const(1000);
2048
	a.full = dfixed_const(1000);
2049
	yclk.full = dfixed_const(wm->yclk);
2049
	yclk.full = dfixed_const(wm->yclk);
2050
	yclk.full = dfixed_div(yclk, a);
2050
	yclk.full = dfixed_div(yclk, a);
2051
	dram_channels.full = dfixed_const(wm->dram_channels * 4);
2051
	dram_channels.full = dfixed_const(wm->dram_channels * 4);
2052
	a.full = dfixed_const(10);
2052
	a.full = dfixed_const(10);
2053
	dram_efficiency.full = dfixed_const(7);
2053
	dram_efficiency.full = dfixed_const(7);
2054
	dram_efficiency.full = dfixed_div(dram_efficiency, a);
2054
	dram_efficiency.full = dfixed_div(dram_efficiency, a);
2055
	bandwidth.full = dfixed_mul(dram_channels, yclk);
2055
	bandwidth.full = dfixed_mul(dram_channels, yclk);
2056
	bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
2056
	bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
2057
 
2057
 
2058
	return dfixed_trunc(bandwidth);
2058
	return dfixed_trunc(bandwidth);
2059
}
2059
}
2060
 
2060
 
2061
static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2061
static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2062
{
2062
{
2063
	/* Calculate DRAM Bandwidth and the part allocated to display. */
2063
	/* Calculate DRAM Bandwidth and the part allocated to display. */
2064
	fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
2064
	fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
2065
	fixed20_12 yclk, dram_channels, bandwidth;
2065
	fixed20_12 yclk, dram_channels, bandwidth;
2066
	fixed20_12 a;
2066
	fixed20_12 a;
2067
 
2067
 
2068
	a.full = dfixed_const(1000);
2068
	a.full = dfixed_const(1000);
2069
	yclk.full = dfixed_const(wm->yclk);
2069
	yclk.full = dfixed_const(wm->yclk);
2070
	yclk.full = dfixed_div(yclk, a);
2070
	yclk.full = dfixed_div(yclk, a);
2071
	dram_channels.full = dfixed_const(wm->dram_channels * 4);
2071
	dram_channels.full = dfixed_const(wm->dram_channels * 4);
2072
	a.full = dfixed_const(10);
2072
	a.full = dfixed_const(10);
2073
	disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
2073
	disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
2074
	disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
2074
	disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
2075
	bandwidth.full = dfixed_mul(dram_channels, yclk);
2075
	bandwidth.full = dfixed_mul(dram_channels, yclk);
2076
	bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
2076
	bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
2077
 
2077
 
2078
	return dfixed_trunc(bandwidth);
2078
	return dfixed_trunc(bandwidth);
2079
}
2079
}
2080
 
2080
 
2081
static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
2081
static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
2082
{
2082
{
2083
	/* Calculate the display Data return Bandwidth */
2083
	/* Calculate the display Data return Bandwidth */
2084
	fixed20_12 return_efficiency; /* 0.8 */
2084
	fixed20_12 return_efficiency; /* 0.8 */
2085
	fixed20_12 sclk, bandwidth;
2085
	fixed20_12 sclk, bandwidth;
2086
	fixed20_12 a;
2086
	fixed20_12 a;
2087
 
2087
 
2088
	a.full = dfixed_const(1000);
2088
	a.full = dfixed_const(1000);
2089
	sclk.full = dfixed_const(wm->sclk);
2089
	sclk.full = dfixed_const(wm->sclk);
2090
	sclk.full = dfixed_div(sclk, a);
2090
	sclk.full = dfixed_div(sclk, a);
2091
	a.full = dfixed_const(10);
2091
	a.full = dfixed_const(10);
2092
	return_efficiency.full = dfixed_const(8);
2092
	return_efficiency.full = dfixed_const(8);
2093
	return_efficiency.full = dfixed_div(return_efficiency, a);
2093
	return_efficiency.full = dfixed_div(return_efficiency, a);
2094
	a.full = dfixed_const(32);
2094
	a.full = dfixed_const(32);
2095
	bandwidth.full = dfixed_mul(a, sclk);
2095
	bandwidth.full = dfixed_mul(a, sclk);
2096
	bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2096
	bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2097
 
2097
 
2098
	return dfixed_trunc(bandwidth);
2098
	return dfixed_trunc(bandwidth);
2099
}
2099
}
2100
 
2100
 
2101
static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2101
static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2102
{
2102
{
2103
	/* Calculate the DMIF Request Bandwidth */
2103
	/* Calculate the DMIF Request Bandwidth */
2104
	fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2104
	fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2105
	fixed20_12 disp_clk, bandwidth;
2105
	fixed20_12 disp_clk, bandwidth;
2106
	fixed20_12 a;
2106
	fixed20_12 a;
2107
 
2107
 
2108
	a.full = dfixed_const(1000);
2108
	a.full = dfixed_const(1000);
2109
	disp_clk.full = dfixed_const(wm->disp_clk);
2109
	disp_clk.full = dfixed_const(wm->disp_clk);
2110
	disp_clk.full = dfixed_div(disp_clk, a);
2110
	disp_clk.full = dfixed_div(disp_clk, a);
2111
	a.full = dfixed_const(10);
2111
	a.full = dfixed_const(10);
2112
	disp_clk_request_efficiency.full = dfixed_const(8);
2112
	disp_clk_request_efficiency.full = dfixed_const(8);
2113
	disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2113
	disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2114
	a.full = dfixed_const(32);
2114
	a.full = dfixed_const(32);
2115
	bandwidth.full = dfixed_mul(a, disp_clk);
2115
	bandwidth.full = dfixed_mul(a, disp_clk);
2116
	bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2116
	bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2117
 
2117
 
2118
	return dfixed_trunc(bandwidth);
2118
	return dfixed_trunc(bandwidth);
2119
}
2119
}
2120
 
2120
 
2121
static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2121
static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2122
{
2122
{
2123
	/* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2123
	/* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2124
	u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2124
	u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2125
	u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2125
	u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2126
	u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2126
	u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2127
 
2127
 
2128
	return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2128
	return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2129
}
2129
}
2130
 
2130
 
2131
static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2131
static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2132
{
2132
{
2133
	/* Calculate the display mode Average Bandwidth
2133
	/* Calculate the display mode Average Bandwidth
2134
	 * DisplayMode should contain the source and destination dimensions,
2134
	 * DisplayMode should contain the source and destination dimensions,
2135
	 * timing, etc.
2135
	 * timing, etc.
2136
	 */
2136
	 */
2137
	fixed20_12 bpp;
2137
	fixed20_12 bpp;
2138
	fixed20_12 line_time;
2138
	fixed20_12 line_time;
2139
	fixed20_12 src_width;
2139
	fixed20_12 src_width;
2140
	fixed20_12 bandwidth;
2140
	fixed20_12 bandwidth;
2141
	fixed20_12 a;
2141
	fixed20_12 a;
2142
 
2142
 
2143
	a.full = dfixed_const(1000);
2143
	a.full = dfixed_const(1000);
2144
	line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2144
	line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2145
	line_time.full = dfixed_div(line_time, a);
2145
	line_time.full = dfixed_div(line_time, a);
2146
	bpp.full = dfixed_const(wm->bytes_per_pixel);
2146
	bpp.full = dfixed_const(wm->bytes_per_pixel);
2147
	src_width.full = dfixed_const(wm->src_width);
2147
	src_width.full = dfixed_const(wm->src_width);
2148
	bandwidth.full = dfixed_mul(src_width, bpp);
2148
	bandwidth.full = dfixed_mul(src_width, bpp);
2149
	bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2149
	bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2150
	bandwidth.full = dfixed_div(bandwidth, line_time);
2150
	bandwidth.full = dfixed_div(bandwidth, line_time);
2151
 
2151
 
2152
	return dfixed_trunc(bandwidth);
2152
	return dfixed_trunc(bandwidth);
2153
}
2153
}
2154
 
2154
 
2155
static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2155
static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2156
{
2156
{
2157
	/* First calcualte the latency in ns */
2157
	/* First calcualte the latency in ns */
2158
	u32 mc_latency = 2000; /* 2000 ns. */
2158
	u32 mc_latency = 2000; /* 2000 ns. */
2159
	u32 available_bandwidth = evergreen_available_bandwidth(wm);
2159
	u32 available_bandwidth = evergreen_available_bandwidth(wm);
2160
	u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2160
	u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2161
	u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2161
	u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2162
	u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2162
	u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2163
	u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2163
	u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2164
		(wm->num_heads * cursor_line_pair_return_time);
2164
		(wm->num_heads * cursor_line_pair_return_time);
2165
	u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2165
	u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2166
	u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2166
	u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2167
	fixed20_12 a, b, c;
2167
	fixed20_12 a, b, c;
2168
 
2168
 
2169
	if (wm->num_heads == 0)
2169
	if (wm->num_heads == 0)
2170
		return 0;
2170
		return 0;
2171
 
2171
 
2172
	a.full = dfixed_const(2);
2172
	a.full = dfixed_const(2);
2173
	b.full = dfixed_const(1);
2173
	b.full = dfixed_const(1);
2174
	if ((wm->vsc.full > a.full) ||
2174
	if ((wm->vsc.full > a.full) ||
2175
	    ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2175
	    ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2176
	    (wm->vtaps >= 5) ||
2176
	    (wm->vtaps >= 5) ||
2177
	    ((wm->vsc.full >= a.full) && wm->interlaced))
2177
	    ((wm->vsc.full >= a.full) && wm->interlaced))
2178
		max_src_lines_per_dst_line = 4;
2178
		max_src_lines_per_dst_line = 4;
2179
	else
2179
	else
2180
		max_src_lines_per_dst_line = 2;
2180
		max_src_lines_per_dst_line = 2;
2181
 
2181
 
2182
	a.full = dfixed_const(available_bandwidth);
2182
	a.full = dfixed_const(available_bandwidth);
2183
	b.full = dfixed_const(wm->num_heads);
2183
	b.full = dfixed_const(wm->num_heads);
2184
	a.full = dfixed_div(a, b);
2184
	a.full = dfixed_div(a, b);
2185
 
2185
 
2186
	b.full = dfixed_const(1000);
2186
	b.full = dfixed_const(1000);
2187
	c.full = dfixed_const(wm->disp_clk);
2187
	c.full = dfixed_const(wm->disp_clk);
2188
	b.full = dfixed_div(c, b);
2188
	b.full = dfixed_div(c, b);
2189
	c.full = dfixed_const(wm->bytes_per_pixel);
2189
	c.full = dfixed_const(wm->bytes_per_pixel);
2190
	b.full = dfixed_mul(b, c);
2190
	b.full = dfixed_mul(b, c);
2191
 
2191
 
2192
	lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2192
	lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2193
 
2193
 
2194
	a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2194
	a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2195
	b.full = dfixed_const(1000);
2195
	b.full = dfixed_const(1000);
2196
	c.full = dfixed_const(lb_fill_bw);
2196
	c.full = dfixed_const(lb_fill_bw);
2197
	b.full = dfixed_div(c, b);
2197
	b.full = dfixed_div(c, b);
2198
	a.full = dfixed_div(a, b);
2198
	a.full = dfixed_div(a, b);
2199
	line_fill_time = dfixed_trunc(a);
2199
	line_fill_time = dfixed_trunc(a);
2200
 
2200
 
2201
	if (line_fill_time < wm->active_time)
2201
	if (line_fill_time < wm->active_time)
2202
		return latency;
2202
		return latency;
2203
	else
2203
	else
2204
		return latency + (line_fill_time - wm->active_time);
2204
		return latency + (line_fill_time - wm->active_time);
2205
 
2205
 
2206
}
2206
}
2207
 
2207
 
2208
static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2208
static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2209
{
2209
{
2210
	if (evergreen_average_bandwidth(wm) <=
2210
	if (evergreen_average_bandwidth(wm) <=
2211
	    (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2211
	    (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2212
		return true;
2212
		return true;
2213
	else
2213
	else
2214
		return false;
2214
		return false;
2215
};
2215
};
2216
 
2216
 
2217
static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2217
static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2218
{
2218
{
2219
	if (evergreen_average_bandwidth(wm) <=
2219
	if (evergreen_average_bandwidth(wm) <=
2220
	    (evergreen_available_bandwidth(wm) / wm->num_heads))
2220
	    (evergreen_available_bandwidth(wm) / wm->num_heads))
2221
		return true;
2221
		return true;
2222
	else
2222
	else
2223
		return false;
2223
		return false;
2224
};
2224
};
2225
 
2225
 
2226
static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2226
static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2227
{
2227
{
2228
	u32 lb_partitions = wm->lb_size / wm->src_width;
2228
	u32 lb_partitions = wm->lb_size / wm->src_width;
2229
	u32 line_time = wm->active_time + wm->blank_time;
2229
	u32 line_time = wm->active_time + wm->blank_time;
2230
	u32 latency_tolerant_lines;
2230
	u32 latency_tolerant_lines;
2231
	u32 latency_hiding;
2231
	u32 latency_hiding;
2232
	fixed20_12 a;
2232
	fixed20_12 a;
2233
 
2233
 
2234
	a.full = dfixed_const(1);
2234
	a.full = dfixed_const(1);
2235
	if (wm->vsc.full > a.full)
2235
	if (wm->vsc.full > a.full)
2236
		latency_tolerant_lines = 1;
2236
		latency_tolerant_lines = 1;
2237
	else {
2237
	else {
2238
		if (lb_partitions <= (wm->vtaps + 1))
2238
		if (lb_partitions <= (wm->vtaps + 1))
2239
			latency_tolerant_lines = 1;
2239
			latency_tolerant_lines = 1;
2240
		else
2240
		else
2241
			latency_tolerant_lines = 2;
2241
			latency_tolerant_lines = 2;
2242
	}
2242
	}
2243
 
2243
 
2244
	latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2244
	latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2245
 
2245
 
2246
	if (evergreen_latency_watermark(wm) <= latency_hiding)
2246
	if (evergreen_latency_watermark(wm) <= latency_hiding)
2247
		return true;
2247
		return true;
2248
	else
2248
	else
2249
		return false;
2249
		return false;
2250
}
2250
}
2251
 
2251
 
2252
static void evergreen_program_watermarks(struct radeon_device *rdev,
2252
static void evergreen_program_watermarks(struct radeon_device *rdev,
2253
					 struct radeon_crtc *radeon_crtc,
2253
					 struct radeon_crtc *radeon_crtc,
2254
					 u32 lb_size, u32 num_heads)
2254
					 u32 lb_size, u32 num_heads)
2255
{
2255
{
2256
	struct drm_display_mode *mode = &radeon_crtc->base.mode;
2256
	struct drm_display_mode *mode = &radeon_crtc->base.mode;
2257
	struct evergreen_wm_params wm_low, wm_high;
2257
	struct evergreen_wm_params wm_low, wm_high;
2258
	u32 dram_channels;
2258
	u32 dram_channels;
2259
	u32 pixel_period;
2259
	u32 pixel_period;
2260
	u32 line_time = 0;
2260
	u32 line_time = 0;
2261
	u32 latency_watermark_a = 0, latency_watermark_b = 0;
2261
	u32 latency_watermark_a = 0, latency_watermark_b = 0;
2262
	u32 priority_a_mark = 0, priority_b_mark = 0;
2262
	u32 priority_a_mark = 0, priority_b_mark = 0;
2263
	u32 priority_a_cnt = PRIORITY_OFF;
2263
	u32 priority_a_cnt = PRIORITY_OFF;
2264
	u32 priority_b_cnt = PRIORITY_OFF;
2264
	u32 priority_b_cnt = PRIORITY_OFF;
2265
	u32 pipe_offset = radeon_crtc->crtc_id * 16;
2265
	u32 pipe_offset = radeon_crtc->crtc_id * 16;
2266
	u32 tmp, arb_control3;
2266
	u32 tmp, arb_control3;
2267
	fixed20_12 a, b, c;
2267
	fixed20_12 a, b, c;
2268
 
2268
 
2269
	if (radeon_crtc->base.enabled && num_heads && mode) {
2269
	if (radeon_crtc->base.enabled && num_heads && mode) {
2270
		pixel_period = 1000000 / (u32)mode->clock;
2270
		pixel_period = 1000000 / (u32)mode->clock;
2271
		line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2271
		line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2272
		priority_a_cnt = 0;
2272
		priority_a_cnt = 0;
2273
		priority_b_cnt = 0;
2273
		priority_b_cnt = 0;
2274
		dram_channels = evergreen_get_number_of_dram_channels(rdev);
2274
		dram_channels = evergreen_get_number_of_dram_channels(rdev);
2275
 
2275
 
2276
		/* watermark for high clocks */
2276
		/* watermark for high clocks */
2277
		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2277
		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2278
			wm_high.yclk =
2278
			wm_high.yclk =
2279
				radeon_dpm_get_mclk(rdev, false) * 10;
2279
				radeon_dpm_get_mclk(rdev, false) * 10;
2280
			wm_high.sclk =
2280
			wm_high.sclk =
2281
				radeon_dpm_get_sclk(rdev, false) * 10;
2281
				radeon_dpm_get_sclk(rdev, false) * 10;
2282
		} else {
2282
		} else {
2283
			wm_high.yclk = rdev->pm.current_mclk * 10;
2283
			wm_high.yclk = rdev->pm.current_mclk * 10;
2284
			wm_high.sclk = rdev->pm.current_sclk * 10;
2284
			wm_high.sclk = rdev->pm.current_sclk * 10;
2285
		}
2285
		}
2286
 
2286
 
2287
		wm_high.disp_clk = mode->clock;
2287
		wm_high.disp_clk = mode->clock;
2288
		wm_high.src_width = mode->crtc_hdisplay;
2288
		wm_high.src_width = mode->crtc_hdisplay;
2289
		wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2289
		wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2290
		wm_high.blank_time = line_time - wm_high.active_time;
2290
		wm_high.blank_time = line_time - wm_high.active_time;
2291
		wm_high.interlaced = false;
2291
		wm_high.interlaced = false;
2292
		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2292
		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2293
			wm_high.interlaced = true;
2293
			wm_high.interlaced = true;
2294
		wm_high.vsc = radeon_crtc->vsc;
2294
		wm_high.vsc = radeon_crtc->vsc;
2295
		wm_high.vtaps = 1;
2295
		wm_high.vtaps = 1;
2296
		if (radeon_crtc->rmx_type != RMX_OFF)
2296
		if (radeon_crtc->rmx_type != RMX_OFF)
2297
			wm_high.vtaps = 2;
2297
			wm_high.vtaps = 2;
2298
		wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2298
		wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2299
		wm_high.lb_size = lb_size;
2299
		wm_high.lb_size = lb_size;
2300
		wm_high.dram_channels = dram_channels;
2300
		wm_high.dram_channels = dram_channels;
2301
		wm_high.num_heads = num_heads;
2301
		wm_high.num_heads = num_heads;
2302
 
2302
 
2303
		/* watermark for low clocks */
2303
		/* watermark for low clocks */
2304
		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2304
		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2305
			wm_low.yclk =
2305
			wm_low.yclk =
2306
				radeon_dpm_get_mclk(rdev, true) * 10;
2306
				radeon_dpm_get_mclk(rdev, true) * 10;
2307
			wm_low.sclk =
2307
			wm_low.sclk =
2308
				radeon_dpm_get_sclk(rdev, true) * 10;
2308
				radeon_dpm_get_sclk(rdev, true) * 10;
2309
		} else {
2309
		} else {
2310
			wm_low.yclk = rdev->pm.current_mclk * 10;
2310
			wm_low.yclk = rdev->pm.current_mclk * 10;
2311
			wm_low.sclk = rdev->pm.current_sclk * 10;
2311
			wm_low.sclk = rdev->pm.current_sclk * 10;
2312
		}
2312
		}
2313
 
2313
 
2314
		wm_low.disp_clk = mode->clock;
2314
		wm_low.disp_clk = mode->clock;
2315
		wm_low.src_width = mode->crtc_hdisplay;
2315
		wm_low.src_width = mode->crtc_hdisplay;
2316
		wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2316
		wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2317
		wm_low.blank_time = line_time - wm_low.active_time;
2317
		wm_low.blank_time = line_time - wm_low.active_time;
2318
		wm_low.interlaced = false;
2318
		wm_low.interlaced = false;
2319
		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2319
		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2320
			wm_low.interlaced = true;
2320
			wm_low.interlaced = true;
2321
		wm_low.vsc = radeon_crtc->vsc;
2321
		wm_low.vsc = radeon_crtc->vsc;
2322
		wm_low.vtaps = 1;
2322
		wm_low.vtaps = 1;
2323
		if (radeon_crtc->rmx_type != RMX_OFF)
2323
		if (radeon_crtc->rmx_type != RMX_OFF)
2324
			wm_low.vtaps = 2;
2324
			wm_low.vtaps = 2;
2325
		wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2325
		wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2326
		wm_low.lb_size = lb_size;
2326
		wm_low.lb_size = lb_size;
2327
		wm_low.dram_channels = dram_channels;
2327
		wm_low.dram_channels = dram_channels;
2328
		wm_low.num_heads = num_heads;
2328
		wm_low.num_heads = num_heads;
2329
 
2329
 
2330
		/* set for high clocks */
2330
		/* set for high clocks */
2331
		latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2331
		latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2332
		/* set for low clocks */
2332
		/* set for low clocks */
2333
		latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2333
		latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2334
 
2334
 
2335
		/* possibly force display priority to high */
2335
		/* possibly force display priority to high */
2336
		/* should really do this at mode validation time... */
2336
		/* should really do this at mode validation time... */
2337
		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2337
		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2338
		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2338
		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2339
		    !evergreen_check_latency_hiding(&wm_high) ||
2339
		    !evergreen_check_latency_hiding(&wm_high) ||
2340
		    (rdev->disp_priority == 2)) {
2340
		    (rdev->disp_priority == 2)) {
2341
			DRM_DEBUG_KMS("force priority a to high\n");
2341
			DRM_DEBUG_KMS("force priority a to high\n");
2342
			priority_a_cnt |= PRIORITY_ALWAYS_ON;
2342
			priority_a_cnt |= PRIORITY_ALWAYS_ON;
2343
		}
2343
		}
2344
		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2344
		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2345
		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2345
		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2346
		    !evergreen_check_latency_hiding(&wm_low) ||
2346
		    !evergreen_check_latency_hiding(&wm_low) ||
2347
		    (rdev->disp_priority == 2)) {
2347
		    (rdev->disp_priority == 2)) {
2348
			DRM_DEBUG_KMS("force priority b to high\n");
2348
			DRM_DEBUG_KMS("force priority b to high\n");
2349
			priority_b_cnt |= PRIORITY_ALWAYS_ON;
2349
			priority_b_cnt |= PRIORITY_ALWAYS_ON;
2350
		}
2350
		}
2351
 
2351
 
2352
		a.full = dfixed_const(1000);
2352
		a.full = dfixed_const(1000);
2353
		b.full = dfixed_const(mode->clock);
2353
		b.full = dfixed_const(mode->clock);
2354
		b.full = dfixed_div(b, a);
2354
		b.full = dfixed_div(b, a);
2355
		c.full = dfixed_const(latency_watermark_a);
2355
		c.full = dfixed_const(latency_watermark_a);
2356
		c.full = dfixed_mul(c, b);
2356
		c.full = dfixed_mul(c, b);
2357
		c.full = dfixed_mul(c, radeon_crtc->hsc);
2357
		c.full = dfixed_mul(c, radeon_crtc->hsc);
2358
		c.full = dfixed_div(c, a);
2358
		c.full = dfixed_div(c, a);
2359
		a.full = dfixed_const(16);
2359
		a.full = dfixed_const(16);
2360
		c.full = dfixed_div(c, a);
2360
		c.full = dfixed_div(c, a);
2361
		priority_a_mark = dfixed_trunc(c);
2361
		priority_a_mark = dfixed_trunc(c);
2362
		priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2362
		priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2363
 
2363
 
2364
		a.full = dfixed_const(1000);
2364
		a.full = dfixed_const(1000);
2365
		b.full = dfixed_const(mode->clock);
2365
		b.full = dfixed_const(mode->clock);
2366
		b.full = dfixed_div(b, a);
2366
		b.full = dfixed_div(b, a);
2367
		c.full = dfixed_const(latency_watermark_b);
2367
		c.full = dfixed_const(latency_watermark_b);
2368
		c.full = dfixed_mul(c, b);
2368
		c.full = dfixed_mul(c, b);
2369
		c.full = dfixed_mul(c, radeon_crtc->hsc);
2369
		c.full = dfixed_mul(c, radeon_crtc->hsc);
2370
		c.full = dfixed_div(c, a);
2370
		c.full = dfixed_div(c, a);
2371
		a.full = dfixed_const(16);
2371
		a.full = dfixed_const(16);
2372
		c.full = dfixed_div(c, a);
2372
		c.full = dfixed_div(c, a);
2373
		priority_b_mark = dfixed_trunc(c);
2373
		priority_b_mark = dfixed_trunc(c);
2374
		priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2374
		priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2375
 
2375
 
2376
		/* Save number of lines the linebuffer leads before the scanout */
2376
		/* Save number of lines the linebuffer leads before the scanout */
2377
		radeon_crtc->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay);
2377
		radeon_crtc->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay);
2378
	}
2378
	}
2379
 
2379
 
2380
	/* select wm A */
2380
	/* select wm A */
2381
	arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2381
	arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2382
	tmp = arb_control3;
2382
	tmp = arb_control3;
2383
	tmp &= ~LATENCY_WATERMARK_MASK(3);
2383
	tmp &= ~LATENCY_WATERMARK_MASK(3);
2384
	tmp |= LATENCY_WATERMARK_MASK(1);
2384
	tmp |= LATENCY_WATERMARK_MASK(1);
2385
	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2385
	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2386
	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2386
	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2387
	       (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2387
	       (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2388
		LATENCY_HIGH_WATERMARK(line_time)));
2388
		LATENCY_HIGH_WATERMARK(line_time)));
2389
	/* select wm B */
2389
	/* select wm B */
2390
	tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2390
	tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2391
	tmp &= ~LATENCY_WATERMARK_MASK(3);
2391
	tmp &= ~LATENCY_WATERMARK_MASK(3);
2392
	tmp |= LATENCY_WATERMARK_MASK(2);
2392
	tmp |= LATENCY_WATERMARK_MASK(2);
2393
	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2393
	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2394
	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2394
	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2395
	       (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2395
	       (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2396
		LATENCY_HIGH_WATERMARK(line_time)));
2396
		LATENCY_HIGH_WATERMARK(line_time)));
2397
	/* restore original selection */
2397
	/* restore original selection */
2398
	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2398
	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2399
 
2399
 
2400
	/* write the priority marks */
2400
	/* write the priority marks */
2401
	WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2401
	WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2402
	WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2402
	WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2403
 
2403
 
2404
	/* save values for DPM */
2404
	/* save values for DPM */
2405
	radeon_crtc->line_time = line_time;
2405
	radeon_crtc->line_time = line_time;
2406
	radeon_crtc->wm_high = latency_watermark_a;
2406
	radeon_crtc->wm_high = latency_watermark_a;
2407
	radeon_crtc->wm_low = latency_watermark_b;
2407
	radeon_crtc->wm_low = latency_watermark_b;
2408
}
2408
}
2409
 
2409
 
2410
/**
2410
/**
2411
 * evergreen_bandwidth_update - update display watermarks callback.
2411
 * evergreen_bandwidth_update - update display watermarks callback.
2412
 *
2412
 *
2413
 * @rdev: radeon_device pointer
2413
 * @rdev: radeon_device pointer
2414
 *
2414
 *
2415
 * Update the display watermarks based on the requested mode(s)
2415
 * Update the display watermarks based on the requested mode(s)
2416
 * (evergreen+).
2416
 * (evergreen+).
2417
 */
2417
 */
2418
void evergreen_bandwidth_update(struct radeon_device *rdev)
2418
void evergreen_bandwidth_update(struct radeon_device *rdev)
2419
{
2419
{
2420
	struct drm_display_mode *mode0 = NULL;
2420
	struct drm_display_mode *mode0 = NULL;
2421
	struct drm_display_mode *mode1 = NULL;
2421
	struct drm_display_mode *mode1 = NULL;
2422
	u32 num_heads = 0, lb_size;
2422
	u32 num_heads = 0, lb_size;
2423
	int i;
2423
	int i;
2424
 
2424
 
2425
	if (!rdev->mode_info.mode_config_initialized)
2425
	if (!rdev->mode_info.mode_config_initialized)
2426
		return;
2426
		return;
2427
 
2427
 
2428
	radeon_update_display_priority(rdev);
2428
	radeon_update_display_priority(rdev);
2429
 
2429
 
2430
	for (i = 0; i < rdev->num_crtc; i++) {
2430
	for (i = 0; i < rdev->num_crtc; i++) {
2431
		if (rdev->mode_info.crtcs[i]->base.enabled)
2431
		if (rdev->mode_info.crtcs[i]->base.enabled)
2432
			num_heads++;
2432
			num_heads++;
2433
	}
2433
	}
2434
	for (i = 0; i < rdev->num_crtc; i += 2) {
2434
	for (i = 0; i < rdev->num_crtc; i += 2) {
2435
		mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2435
		mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2436
		mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2436
		mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2437
		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2437
		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2438
		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2438
		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2439
		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2439
		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2440
		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2440
		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2441
	}
2441
	}
2442
}
2442
}
2443
 
2443
 
2444
/**
2444
/**
2445
 * evergreen_mc_wait_for_idle - wait for MC idle callback.
2445
 * evergreen_mc_wait_for_idle - wait for MC idle callback.
2446
 *
2446
 *
2447
 * @rdev: radeon_device pointer
2447
 * @rdev: radeon_device pointer
2448
 *
2448
 *
2449
 * Wait for the MC (memory controller) to be idle.
2449
 * Wait for the MC (memory controller) to be idle.
2450
 * (evergreen+).
2450
 * (evergreen+).
2451
 * Returns 0 if the MC is idle, -1 if not.
2451
 * Returns 0 if the MC is idle, -1 if not.
2452
 */
2452
 */
2453
int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2453
int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2454
{
2454
{
2455
	unsigned i;
2455
	unsigned i;
2456
	u32 tmp;
2456
	u32 tmp;
2457
 
2457
 
2458
	for (i = 0; i < rdev->usec_timeout; i++) {
2458
	for (i = 0; i < rdev->usec_timeout; i++) {
2459
		/* read MC_STATUS */
2459
		/* read MC_STATUS */
2460
		tmp = RREG32(SRBM_STATUS) & 0x1F00;
2460
		tmp = RREG32(SRBM_STATUS) & 0x1F00;
2461
		if (!tmp)
2461
		if (!tmp)
2462
			return 0;
2462
			return 0;
2463
		udelay(1);
2463
		udelay(1);
2464
	}
2464
	}
2465
	return -1;
2465
	return -1;
2466
}
2466
}
2467
 
2467
 
2468
/*
2468
/*
2469
 * GART
2469
 * GART
2470
 */
2470
 */
2471
void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2471
void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2472
{
2472
{
2473
	unsigned i;
2473
	unsigned i;
2474
	u32 tmp;
2474
	u32 tmp;
2475
 
2475
 
2476
	WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2476
	WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2477
 
2477
 
2478
	WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2478
	WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2479
	for (i = 0; i < rdev->usec_timeout; i++) {
2479
	for (i = 0; i < rdev->usec_timeout; i++) {
2480
		/* read MC_STATUS */
2480
		/* read MC_STATUS */
2481
		tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2481
		tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2482
		tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2482
		tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2483
		if (tmp == 2) {
2483
		if (tmp == 2) {
2484
			printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2484
			printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2485
			return;
2485
			return;
2486
		}
2486
		}
2487
		if (tmp) {
2487
		if (tmp) {
2488
			return;
2488
			return;
2489
		}
2489
		}
2490
		udelay(1);
2490
		udelay(1);
2491
	}
2491
	}
2492
}
2492
}
2493
 
2493
 
2494
static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2494
static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2495
{
2495
{
2496
	u32 tmp;
2496
	u32 tmp;
2497
	int r;
2497
	int r;
2498
 
2498
 
2499
	if (rdev->gart.robj == NULL) {
2499
	if (rdev->gart.robj == NULL) {
2500
		dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2500
		dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2501
		return -EINVAL;
2501
		return -EINVAL;
2502
	}
2502
	}
2503
	r = radeon_gart_table_vram_pin(rdev);
2503
	r = radeon_gart_table_vram_pin(rdev);
2504
	if (r)
2504
	if (r)
2505
		return r;
2505
		return r;
2506
	/* Setup L2 cache */
2506
	/* Setup L2 cache */
2507
	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2507
	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2508
				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2508
				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2509
				EFFECTIVE_L2_QUEUE_SIZE(7));
2509
				EFFECTIVE_L2_QUEUE_SIZE(7));
2510
	WREG32(VM_L2_CNTL2, 0);
2510
	WREG32(VM_L2_CNTL2, 0);
2511
	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2511
	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2512
	/* Setup TLB control */
2512
	/* Setup TLB control */
2513
	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2513
	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2514
		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2514
		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2515
		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2515
		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2516
		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2516
		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2517
	if (rdev->flags & RADEON_IS_IGP) {
2517
	if (rdev->flags & RADEON_IS_IGP) {
2518
		WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2518
		WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2519
		WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2519
		WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2520
		WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2520
		WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2521
	} else {
2521
	} else {
2522
		WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2522
		WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2523
		WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2523
		WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2524
		WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2524
		WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2525
		if ((rdev->family == CHIP_JUNIPER) ||
2525
		if ((rdev->family == CHIP_JUNIPER) ||
2526
		    (rdev->family == CHIP_CYPRESS) ||
2526
		    (rdev->family == CHIP_CYPRESS) ||
2527
		    (rdev->family == CHIP_HEMLOCK) ||
2527
		    (rdev->family == CHIP_HEMLOCK) ||
2528
		    (rdev->family == CHIP_BARTS))
2528
		    (rdev->family == CHIP_BARTS))
2529
			WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2529
			WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2530
	}
2530
	}
2531
	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2531
	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2532
	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2532
	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2533
	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2533
	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2534
	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2534
	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2535
	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2535
	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2536
	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2536
	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2537
	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2537
	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2538
	WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2538
	WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2539
				RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2539
				RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2540
	WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2540
	WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2541
			(u32)(rdev->dummy_page.addr >> 12));
2541
			(u32)(rdev->dummy_page.addr >> 12));
2542
	WREG32(VM_CONTEXT1_CNTL, 0);
2542
	WREG32(VM_CONTEXT1_CNTL, 0);
2543
 
2543
 
2544
	evergreen_pcie_gart_tlb_flush(rdev);
2544
	evergreen_pcie_gart_tlb_flush(rdev);
2545
	DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2545
	DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2546
		 (unsigned)(rdev->mc.gtt_size >> 20),
2546
		 (unsigned)(rdev->mc.gtt_size >> 20),
2547
		 (unsigned long long)rdev->gart.table_addr);
2547
		 (unsigned long long)rdev->gart.table_addr);
2548
	rdev->gart.ready = true;
2548
	rdev->gart.ready = true;
2549
	return 0;
2549
	return 0;
2550
}
2550
}
2551
 
2551
 
2552
static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2552
static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2553
{
2553
{
2554
	u32 tmp;
2554
	u32 tmp;
2555
 
2555
 
2556
	/* Disable all tables */
2556
	/* Disable all tables */
2557
	WREG32(VM_CONTEXT0_CNTL, 0);
2557
	WREG32(VM_CONTEXT0_CNTL, 0);
2558
	WREG32(VM_CONTEXT1_CNTL, 0);
2558
	WREG32(VM_CONTEXT1_CNTL, 0);
2559
 
2559
 
2560
	/* Setup L2 cache */
2560
	/* Setup L2 cache */
2561
	WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2561
	WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2562
				EFFECTIVE_L2_QUEUE_SIZE(7));
2562
				EFFECTIVE_L2_QUEUE_SIZE(7));
2563
	WREG32(VM_L2_CNTL2, 0);
2563
	WREG32(VM_L2_CNTL2, 0);
2564
	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2564
	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2565
	/* Setup TLB control */
2565
	/* Setup TLB control */
2566
	tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2566
	tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2567
	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2567
	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2568
	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2568
	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2569
	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2569
	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2570
	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2570
	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2571
	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2571
	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2572
	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2572
	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2573
	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2573
	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2574
	radeon_gart_table_vram_unpin(rdev);
2574
	radeon_gart_table_vram_unpin(rdev);
2575
}
2575
}
2576
 
2576
 
2577
static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2577
static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2578
{
2578
{
2579
	evergreen_pcie_gart_disable(rdev);
2579
	evergreen_pcie_gart_disable(rdev);
2580
	radeon_gart_table_vram_free(rdev);
2580
	radeon_gart_table_vram_free(rdev);
2581
	radeon_gart_fini(rdev);
2581
	radeon_gart_fini(rdev);
2582
}
2582
}
2583
 
2583
 
2584
 
2584
 
2585
static void evergreen_agp_enable(struct radeon_device *rdev)
2585
static void evergreen_agp_enable(struct radeon_device *rdev)
2586
{
2586
{
2587
	u32 tmp;
2587
	u32 tmp;
2588
 
2588
 
2589
	/* Setup L2 cache */
2589
	/* Setup L2 cache */
2590
	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2590
	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2591
				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2591
				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2592
				EFFECTIVE_L2_QUEUE_SIZE(7));
2592
				EFFECTIVE_L2_QUEUE_SIZE(7));
2593
	WREG32(VM_L2_CNTL2, 0);
2593
	WREG32(VM_L2_CNTL2, 0);
2594
	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2594
	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2595
	/* Setup TLB control */
2595
	/* Setup TLB control */
2596
	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2596
	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2597
		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2597
		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2598
		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2598
		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2599
		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2599
		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2600
	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2600
	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2601
	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2601
	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2602
	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2602
	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2603
	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2603
	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2604
	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2604
	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2605
	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2605
	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2606
	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2606
	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2607
	WREG32(VM_CONTEXT0_CNTL, 0);
2607
	WREG32(VM_CONTEXT0_CNTL, 0);
2608
	WREG32(VM_CONTEXT1_CNTL, 0);
2608
	WREG32(VM_CONTEXT1_CNTL, 0);
2609
}
2609
}
-
 
2610
 
-
 
2611
static const unsigned ni_dig_offsets[] =
-
 
2612
{
-
 
2613
	NI_DIG0_REGISTER_OFFSET,
-
 
2614
	NI_DIG1_REGISTER_OFFSET,
-
 
2615
	NI_DIG2_REGISTER_OFFSET,
-
 
2616
	NI_DIG3_REGISTER_OFFSET,
-
 
2617
	NI_DIG4_REGISTER_OFFSET,
-
 
2618
	NI_DIG5_REGISTER_OFFSET
-
 
2619
};
-
 
2620
 
-
 
2621
static const unsigned ni_tx_offsets[] =
-
 
2622
{
-
 
2623
	NI_DCIO_UNIPHY0_UNIPHY_TX_CONTROL1,
-
 
2624
	NI_DCIO_UNIPHY1_UNIPHY_TX_CONTROL1,
-
 
2625
	NI_DCIO_UNIPHY2_UNIPHY_TX_CONTROL1,
-
 
2626
	NI_DCIO_UNIPHY3_UNIPHY_TX_CONTROL1,
-
 
2627
	NI_DCIO_UNIPHY4_UNIPHY_TX_CONTROL1,
-
 
2628
	NI_DCIO_UNIPHY5_UNIPHY_TX_CONTROL1
-
 
2629
};
-
 
2630
 
-
 
2631
static const unsigned evergreen_dp_offsets[] =
-
 
2632
{
-
 
2633
	EVERGREEN_DP0_REGISTER_OFFSET,
-
 
2634
	EVERGREEN_DP1_REGISTER_OFFSET,
-
 
2635
	EVERGREEN_DP2_REGISTER_OFFSET,
-
 
2636
	EVERGREEN_DP3_REGISTER_OFFSET,
-
 
2637
	EVERGREEN_DP4_REGISTER_OFFSET,
-
 
2638
	EVERGREEN_DP5_REGISTER_OFFSET
-
 
2639
};
-
 
2640
 
-
 
2641
 
-
 
2642
/*
-
 
2643
 * Assumption is that EVERGREEN_CRTC_MASTER_EN enable for requested crtc
-
 
2644
 * We go from crtc to connector and it is not relible  since it
-
 
2645
 * should be an opposite direction .If crtc is enable then
-
 
2646
 * find the dig_fe which selects this crtc and insure that it enable.
-
 
2647
 * if such dig_fe is found then find dig_be which selects found dig_be and
-
 
2648
 * insure that it enable and in DP_SST mode.
-
 
2649
 * if UNIPHY_PLL_CONTROL1.enable then we should disconnect timing
-
 
2650
 * from dp symbols clocks .
-
 
2651
 */
-
 
2652
static bool evergreen_is_dp_sst_stream_enabled(struct radeon_device *rdev,
-
 
2653
					       unsigned crtc_id, unsigned *ret_dig_fe)
-
 
2654
{
-
 
2655
	unsigned i;
-
 
2656
	unsigned dig_fe;
-
 
2657
	unsigned dig_be;
-
 
2658
	unsigned dig_en_be;
-
 
2659
	unsigned uniphy_pll;
-
 
2660
	unsigned digs_fe_selected;
-
 
2661
	unsigned dig_be_mode;
-
 
2662
	unsigned dig_fe_mask;
-
 
2663
	bool is_enabled = false;
-
 
2664
	bool found_crtc = false;
-
 
2665
 
-
 
2666
	/* loop through all running dig_fe to find selected crtc */
-
 
2667
	for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
-
 
2668
		dig_fe = RREG32(NI_DIG_FE_CNTL + ni_dig_offsets[i]);
-
 
2669
		if (dig_fe & NI_DIG_FE_CNTL_SYMCLK_FE_ON &&
-
 
2670
		    crtc_id == NI_DIG_FE_CNTL_SOURCE_SELECT(dig_fe)) {
-
 
2671
			/* found running pipe */
-
 
2672
			found_crtc = true;
-
 
2673
			dig_fe_mask = 1 << i;
-
 
2674
			dig_fe = i;
-
 
2675
			break;
-
 
2676
		}
-
 
2677
	}
-
 
2678
 
-
 
2679
	if (found_crtc) {
-
 
2680
		/* loop through all running dig_be to find selected dig_fe */
-
 
2681
		for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
-
 
2682
			dig_be = RREG32(NI_DIG_BE_CNTL + ni_dig_offsets[i]);
-
 
2683
			/* if dig_fe_selected by dig_be? */
-
 
2684
			digs_fe_selected = NI_DIG_BE_CNTL_FE_SOURCE_SELECT(dig_be);
-
 
2685
			dig_be_mode = NI_DIG_FE_CNTL_MODE(dig_be);
-
 
2686
			if (dig_fe_mask &  digs_fe_selected &&
-
 
2687
			    /* if dig_be in sst mode? */
-
 
2688
			    dig_be_mode == NI_DIG_BE_DPSST) {
-
 
2689
				dig_en_be = RREG32(NI_DIG_BE_EN_CNTL +
-
 
2690
						   ni_dig_offsets[i]);
-
 
2691
				uniphy_pll = RREG32(NI_DCIO_UNIPHY0_PLL_CONTROL1 +
-
 
2692
						    ni_tx_offsets[i]);
-
 
2693
				/* dig_be enable and tx is running */
-
 
2694
				if (dig_en_be & NI_DIG_BE_EN_CNTL_ENABLE &&
-
 
2695
				    dig_en_be & NI_DIG_BE_EN_CNTL_SYMBCLK_ON &&
-
 
2696
				    uniphy_pll & NI_DCIO_UNIPHY0_PLL_CONTROL1_ENABLE) {
-
 
2697
					is_enabled = true;
-
 
2698
					*ret_dig_fe = dig_fe;
-
 
2699
					break;
-
 
2700
				}
-
 
2701
			}
-
 
2702
		}
-
 
2703
	}
-
 
2704
 
-
 
2705
	return is_enabled;
-
 
2706
}
-
 
2707
 
-
 
2708
/*
-
 
2709
 * Blank dig when in dp sst mode
-
 
2710
 * Dig ignores crtc timing
-
 
2711
 */
-
 
2712
static void evergreen_blank_dp_output(struct radeon_device *rdev,
-
 
2713
				      unsigned dig_fe)
-
 
2714
{
-
 
2715
	unsigned stream_ctrl;
-
 
2716
	unsigned fifo_ctrl;
-
 
2717
	unsigned counter = 0;
-
 
2718
 
-
 
2719
	if (dig_fe >= ARRAY_SIZE(evergreen_dp_offsets)) {
-
 
2720
		DRM_ERROR("invalid dig_fe %d\n", dig_fe);
-
 
2721
		return;
-
 
2722
	}
-
 
2723
 
-
 
2724
	stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
-
 
2725
			     evergreen_dp_offsets[dig_fe]);
-
 
2726
	if (!(stream_ctrl & EVERGREEN_DP_VID_STREAM_CNTL_ENABLE)) {
-
 
2727
		DRM_ERROR("dig %d , should be enable\n", dig_fe);
-
 
2728
		return;
-
 
2729
	}
-
 
2730
 
-
 
2731
	stream_ctrl &=~EVERGREEN_DP_VID_STREAM_CNTL_ENABLE;
-
 
2732
	WREG32(EVERGREEN_DP_VID_STREAM_CNTL +
-
 
2733
	       evergreen_dp_offsets[dig_fe], stream_ctrl);
-
 
2734
 
-
 
2735
	stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
-
 
2736
			     evergreen_dp_offsets[dig_fe]);
-
 
2737
	while (counter < 32 && stream_ctrl & EVERGREEN_DP_VID_STREAM_STATUS) {
-
 
2738
		msleep(1);
-
 
2739
		counter++;
-
 
2740
		stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
-
 
2741
				     evergreen_dp_offsets[dig_fe]);
-
 
2742
	}
-
 
2743
	if (counter >= 32 )
-
 
2744
		DRM_ERROR("counter exceeds %d\n", counter);
-
 
2745
 
-
 
2746
	fifo_ctrl = RREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe]);
-
 
2747
	fifo_ctrl |= EVERGREEN_DP_STEER_FIFO_RESET;
-
 
2748
	WREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe], fifo_ctrl);
-
 
2749
 
-
 
2750
}
2610
 
2751
 
2611
void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2752
void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2612
{
2753
{
2613
	u32 crtc_enabled, tmp, frame_count, blackout;
2754
	u32 crtc_enabled, tmp, frame_count, blackout;
2614
	int i, j;
2755
	int i, j;
-
 
2756
	unsigned dig_fe;
2615
 
2757
 
2616
	if (!ASIC_IS_NODCE(rdev)) {
2758
	if (!ASIC_IS_NODCE(rdev)) {
2617
		save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2759
		save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2618
		save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2760
		save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2619
 
2761
 
2620
		/* disable VGA render */
2762
		/* disable VGA render */
2621
		WREG32(VGA_RENDER_CONTROL, 0);
2763
		WREG32(VGA_RENDER_CONTROL, 0);
2622
	}
2764
	}
2623
	/* blank the display controllers */
2765
	/* blank the display controllers */
2624
	for (i = 0; i < rdev->num_crtc; i++) {
2766
	for (i = 0; i < rdev->num_crtc; i++) {
2625
		crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2767
		crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2626
		if (crtc_enabled) {
2768
		if (crtc_enabled) {
2627
			save->crtc_enabled[i] = true;
2769
			save->crtc_enabled[i] = true;
2628
			if (ASIC_IS_DCE6(rdev)) {
2770
			if (ASIC_IS_DCE6(rdev)) {
2629
				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2771
				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2630
				if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2772
				if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2631
					radeon_wait_for_vblank(rdev, i);
2773
					radeon_wait_for_vblank(rdev, i);
2632
					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2774
					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2633
					tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2775
					tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2634
					WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2776
					WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2635
					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2777
					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2636
				}
2778
				}
2637
			} else {
2779
			} else {
2638
				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2780
				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2639
				if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2781
				if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2640
					radeon_wait_for_vblank(rdev, i);
2782
					radeon_wait_for_vblank(rdev, i);
2641
					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2783
					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2642
					tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2784
					tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2643
					WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2785
					WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2644
					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2786
					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2645
				}
2787
				}
2646
			}
2788
			}
2647
			/* wait for the next frame */
2789
			/* wait for the next frame */
2648
			frame_count = radeon_get_vblank_counter(rdev, i);
2790
			frame_count = radeon_get_vblank_counter(rdev, i);
2649
			for (j = 0; j < rdev->usec_timeout; j++) {
2791
			for (j = 0; j < rdev->usec_timeout; j++) {
2650
				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2792
				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2651
					break;
2793
					break;
2652
				udelay(1);
2794
				udelay(1);
2653
			}
2795
			}
-
 
2796
			/*we should disable dig if it drives dp sst*/
-
 
2797
			/*but we are in radeon_device_init and the topology is unknown*/
-
 
2798
			/*and it is available after radeon_modeset_init*/
-
 
2799
			/*the following method radeon_atom_encoder_dpms_dig*/
-
 
2800
			/*does the job if we initialize it properly*/
-
 
2801
			/*for now we do it this manually*/
2654
 
2802
			/**/
-
 
2803
			if (ASIC_IS_DCE5(rdev) &&
-
 
2804
			    evergreen_is_dp_sst_stream_enabled(rdev, i ,&dig_fe))
-
 
2805
				evergreen_blank_dp_output(rdev, dig_fe);
-
 
2806
			/*we could remove 6 lines below*/
2655
			/* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2807
			/* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2656
			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2808
			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2657
			tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2809
			tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2658
			tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2810
			tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2659
			WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2811
			WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2660
			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2812
			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2661
			save->crtc_enabled[i] = false;
2813
			save->crtc_enabled[i] = false;
2662
			/* ***** */
2814
			/* ***** */
2663
		} else {
2815
		} else {
2664
			save->crtc_enabled[i] = false;
2816
			save->crtc_enabled[i] = false;
2665
		}
2817
		}
2666
	}
2818
	}
2667
 
2819
 
2668
	radeon_mc_wait_for_idle(rdev);
2820
	radeon_mc_wait_for_idle(rdev);
2669
 
2821
 
2670
	blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2822
	blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2671
	if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2823
	if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2672
		/* Block CPU access */
2824
		/* Block CPU access */
2673
		WREG32(BIF_FB_EN, 0);
2825
		WREG32(BIF_FB_EN, 0);
2674
		/* blackout the MC */
2826
		/* blackout the MC */
2675
		blackout &= ~BLACKOUT_MODE_MASK;
2827
		blackout &= ~BLACKOUT_MODE_MASK;
2676
		WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2828
		WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2677
	}
2829
	}
2678
	/* wait for the MC to settle */
2830
	/* wait for the MC to settle */
2679
	udelay(100);
2831
	udelay(100);
2680
 
2832
 
2681
	/* lock double buffered regs */
2833
	/* lock double buffered regs */
2682
	for (i = 0; i < rdev->num_crtc; i++) {
2834
	for (i = 0; i < rdev->num_crtc; i++) {
2683
		if (save->crtc_enabled[i]) {
2835
		if (save->crtc_enabled[i]) {
2684
			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2836
			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2685
			if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2837
			if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2686
				tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2838
				tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2687
				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2839
				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2688
			}
2840
			}
2689
			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2841
			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2690
			if (!(tmp & 1)) {
2842
			if (!(tmp & 1)) {
2691
				tmp |= 1;
2843
				tmp |= 1;
2692
				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2844
				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2693
			}
2845
			}
2694
		}
2846
		}
2695
	}
2847
	}
2696
}
2848
}
2697
 
2849
 
2698
void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2850
void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2699
{
2851
{
2700
	u32 tmp, frame_count;
2852
	u32 tmp, frame_count;
2701
	int i, j;
2853
	int i, j;
2702
 
2854
 
2703
	/* update crtc base addresses */
2855
	/* update crtc base addresses */
2704
	for (i = 0; i < rdev->num_crtc; i++) {
2856
	for (i = 0; i < rdev->num_crtc; i++) {
2705
		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2857
		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2706
		       upper_32_bits(rdev->mc.vram_start));
2858
		       upper_32_bits(rdev->mc.vram_start));
2707
		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2859
		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2708
		       upper_32_bits(rdev->mc.vram_start));
2860
		       upper_32_bits(rdev->mc.vram_start));
2709
		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2861
		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2710
		       (u32)rdev->mc.vram_start);
2862
		       (u32)rdev->mc.vram_start);
2711
		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2863
		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2712
		       (u32)rdev->mc.vram_start);
2864
		       (u32)rdev->mc.vram_start);
2713
	}
2865
	}
2714
 
2866
 
2715
	if (!ASIC_IS_NODCE(rdev)) {
2867
	if (!ASIC_IS_NODCE(rdev)) {
2716
		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2868
		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2717
		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2869
		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2718
	}
2870
	}
2719
 
2871
 
2720
	/* unlock regs and wait for update */
2872
	/* unlock regs and wait for update */
2721
	for (i = 0; i < rdev->num_crtc; i++) {
2873
	for (i = 0; i < rdev->num_crtc; i++) {
2722
		if (save->crtc_enabled[i]) {
2874
		if (save->crtc_enabled[i]) {
2723
			tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2875
			tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2724
			if ((tmp & 0x7) != 3) {
2876
			if ((tmp & 0x7) != 3) {
2725
				tmp &= ~0x7;
2877
				tmp &= ~0x7;
2726
				tmp |= 0x3;
2878
				tmp |= 0x3;
2727
				WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2879
				WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2728
			}
2880
			}
2729
			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2881
			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2730
			if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2882
			if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2731
				tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2883
				tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2732
				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2884
				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2733
			}
2885
			}
2734
			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2886
			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2735
			if (tmp & 1) {
2887
			if (tmp & 1) {
2736
				tmp &= ~1;
2888
				tmp &= ~1;
2737
				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2889
				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2738
			}
2890
			}
2739
			for (j = 0; j < rdev->usec_timeout; j++) {
2891
			for (j = 0; j < rdev->usec_timeout; j++) {
2740
				tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2892
				tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2741
				if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2893
				if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2742
					break;
2894
					break;
2743
				udelay(1);
2895
				udelay(1);
2744
			}
2896
			}
2745
		}
2897
		}
2746
	}
2898
	}
2747
 
2899
 
2748
	/* unblackout the MC */
2900
	/* unblackout the MC */
2749
	tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2901
	tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2750
	tmp &= ~BLACKOUT_MODE_MASK;
2902
	tmp &= ~BLACKOUT_MODE_MASK;
2751
	WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2903
	WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2752
	/* allow CPU access */
2904
	/* allow CPU access */
2753
	WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2905
	WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2754
 
2906
 
2755
	for (i = 0; i < rdev->num_crtc; i++) {
2907
	for (i = 0; i < rdev->num_crtc; i++) {
2756
		if (save->crtc_enabled[i]) {
2908
		if (save->crtc_enabled[i]) {
2757
			if (ASIC_IS_DCE6(rdev)) {
2909
			if (ASIC_IS_DCE6(rdev)) {
2758
				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2910
				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2759
				tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
2911
				tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
2760
				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2912
				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2761
				WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2913
				WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2762
				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2914
				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2763
			} else {
2915
			} else {
2764
				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2916
				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2765
				tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2917
				tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2766
				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2918
				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2767
				WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2919
				WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2768
				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2920
				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2769
			}
2921
			}
2770
			/* wait for the next frame */
2922
			/* wait for the next frame */
2771
			frame_count = radeon_get_vblank_counter(rdev, i);
2923
			frame_count = radeon_get_vblank_counter(rdev, i);
2772
			for (j = 0; j < rdev->usec_timeout; j++) {
2924
			for (j = 0; j < rdev->usec_timeout; j++) {
2773
				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2925
				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2774
					break;
2926
					break;
2775
				udelay(1);
2927
				udelay(1);
2776
			}
2928
			}
2777
		}
2929
		}
2778
	}
2930
	}
2779
	if (!ASIC_IS_NODCE(rdev)) {
2931
	if (!ASIC_IS_NODCE(rdev)) {
2780
		/* Unlock vga access */
2932
		/* Unlock vga access */
2781
		WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2933
		WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2782
		mdelay(1);
2934
		mdelay(1);
2783
		WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2935
		WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2784
	}
2936
	}
2785
}
2937
}
2786
 
2938
 
2787
void evergreen_mc_program(struct radeon_device *rdev)
2939
void evergreen_mc_program(struct radeon_device *rdev)
2788
{
2940
{
2789
	struct evergreen_mc_save save;
2941
	struct evergreen_mc_save save;
2790
	u32 tmp;
2942
	u32 tmp;
2791
	int i, j;
2943
	int i, j;
2792
 
2944
 
2793
	/* Initialize HDP */
2945
	/* Initialize HDP */
2794
	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2946
	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2795
		WREG32((0x2c14 + j), 0x00000000);
2947
		WREG32((0x2c14 + j), 0x00000000);
2796
		WREG32((0x2c18 + j), 0x00000000);
2948
		WREG32((0x2c18 + j), 0x00000000);
2797
		WREG32((0x2c1c + j), 0x00000000);
2949
		WREG32((0x2c1c + j), 0x00000000);
2798
		WREG32((0x2c20 + j), 0x00000000);
2950
		WREG32((0x2c20 + j), 0x00000000);
2799
		WREG32((0x2c24 + j), 0x00000000);
2951
		WREG32((0x2c24 + j), 0x00000000);
2800
	}
2952
	}
2801
	WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2953
	WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2802
 
2954
 
2803
	evergreen_mc_stop(rdev, &save);
2955
	evergreen_mc_stop(rdev, &save);
2804
	if (evergreen_mc_wait_for_idle(rdev)) {
2956
	if (evergreen_mc_wait_for_idle(rdev)) {
2805
		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2957
		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2806
	}
2958
	}
2807
	/* Lockout access through VGA aperture*/
2959
	/* Lockout access through VGA aperture*/
2808
	WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2960
	WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2809
	/* Update configuration */
2961
	/* Update configuration */
2810
	if (rdev->flags & RADEON_IS_AGP) {
2962
	if (rdev->flags & RADEON_IS_AGP) {
2811
		if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2963
		if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2812
			/* VRAM before AGP */
2964
			/* VRAM before AGP */
2813
			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2965
			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2814
				rdev->mc.vram_start >> 12);
2966
				rdev->mc.vram_start >> 12);
2815
			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2967
			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2816
				rdev->mc.gtt_end >> 12);
2968
				rdev->mc.gtt_end >> 12);
2817
		} else {
2969
		} else {
2818
			/* VRAM after AGP */
2970
			/* VRAM after AGP */
2819
			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2971
			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2820
				rdev->mc.gtt_start >> 12);
2972
				rdev->mc.gtt_start >> 12);
2821
			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2973
			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2822
				rdev->mc.vram_end >> 12);
2974
				rdev->mc.vram_end >> 12);
2823
		}
2975
		}
2824
	} else {
2976
	} else {
2825
		WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2977
		WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2826
			rdev->mc.vram_start >> 12);
2978
			rdev->mc.vram_start >> 12);
2827
		WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2979
		WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2828
			rdev->mc.vram_end >> 12);
2980
			rdev->mc.vram_end >> 12);
2829
	}
2981
	}
2830
	WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2982
	WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2831
	/* llano/ontario only */
2983
	/* llano/ontario only */
2832
	if ((rdev->family == CHIP_PALM) ||
2984
	if ((rdev->family == CHIP_PALM) ||
2833
	    (rdev->family == CHIP_SUMO) ||
2985
	    (rdev->family == CHIP_SUMO) ||
2834
	    (rdev->family == CHIP_SUMO2)) {
2986
	    (rdev->family == CHIP_SUMO2)) {
2835
		tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2987
		tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2836
		tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2988
		tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2837
		tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2989
		tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2838
		WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2990
		WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2839
	}
2991
	}
2840
	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2992
	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2841
	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2993
	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2842
	WREG32(MC_VM_FB_LOCATION, tmp);
2994
	WREG32(MC_VM_FB_LOCATION, tmp);
2843
	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2995
	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2844
	WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2996
	WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2845
	WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2997
	WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2846
	if (rdev->flags & RADEON_IS_AGP) {
2998
	if (rdev->flags & RADEON_IS_AGP) {
2847
		WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2999
		WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2848
		WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
3000
		WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2849
		WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
3001
		WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2850
	} else {
3002
	} else {
2851
		WREG32(MC_VM_AGP_BASE, 0);
3003
		WREG32(MC_VM_AGP_BASE, 0);
2852
		WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
3004
		WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2853
		WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
3005
		WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2854
	}
3006
	}
2855
	if (evergreen_mc_wait_for_idle(rdev)) {
3007
	if (evergreen_mc_wait_for_idle(rdev)) {
2856
		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3008
		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2857
	}
3009
	}
2858
	evergreen_mc_resume(rdev, &save);
3010
	evergreen_mc_resume(rdev, &save);
2859
	/* we need to own VRAM, so turn off the VGA renderer here
3011
	/* we need to own VRAM, so turn off the VGA renderer here
2860
	 * to stop it overwriting our objects */
3012
	 * to stop it overwriting our objects */
2861
	rv515_vga_render_disable(rdev);
3013
	rv515_vga_render_disable(rdev);
2862
}
3014
}
2863
 
3015
 
2864
/*
3016
/*
2865
 * CP.
3017
 * CP.
2866
 */
3018
 */
2867
void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
3019
void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2868
{
3020
{
2869
	struct radeon_ring *ring = &rdev->ring[ib->ring];
3021
	struct radeon_ring *ring = &rdev->ring[ib->ring];
2870
	u32 next_rptr;
3022
	u32 next_rptr;
2871
 
3023
 
2872
	/* set to DX10/11 mode */
3024
	/* set to DX10/11 mode */
2873
	radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
3025
	radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2874
	radeon_ring_write(ring, 1);
3026
	radeon_ring_write(ring, 1);
2875
 
3027
 
2876
	if (ring->rptr_save_reg) {
3028
	if (ring->rptr_save_reg) {
2877
		next_rptr = ring->wptr + 3 + 4;
3029
		next_rptr = ring->wptr + 3 + 4;
2878
		radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3030
		radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2879
		radeon_ring_write(ring, ((ring->rptr_save_reg - 
3031
		radeon_ring_write(ring, ((ring->rptr_save_reg - 
2880
					  PACKET3_SET_CONFIG_REG_START) >> 2));
3032
					  PACKET3_SET_CONFIG_REG_START) >> 2));
2881
		radeon_ring_write(ring, next_rptr);
3033
		radeon_ring_write(ring, next_rptr);
2882
	} else if (rdev->wb.enabled) {
3034
	} else if (rdev->wb.enabled) {
2883
		next_rptr = ring->wptr + 5 + 4;
3035
		next_rptr = ring->wptr + 5 + 4;
2884
		radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
3036
		radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2885
		radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
3037
		radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2886
		radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
3038
		radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2887
		radeon_ring_write(ring, next_rptr);
3039
		radeon_ring_write(ring, next_rptr);
2888
		radeon_ring_write(ring, 0);
3040
		radeon_ring_write(ring, 0);
2889
	}
3041
	}
2890
 
3042
 
2891
	radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
3043
	radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2892
	radeon_ring_write(ring,
3044
	radeon_ring_write(ring,
2893
#ifdef __BIG_ENDIAN
3045
#ifdef __BIG_ENDIAN
2894
			  (2 << 0) |
3046
			  (2 << 0) |
2895
#endif
3047
#endif
2896
			  (ib->gpu_addr & 0xFFFFFFFC));
3048
			  (ib->gpu_addr & 0xFFFFFFFC));
2897
	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
3049
	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2898
	radeon_ring_write(ring, ib->length_dw);
3050
	radeon_ring_write(ring, ib->length_dw);
2899
}
3051
}
2900
 
3052
 
2901
 
3053
 
2902
static int evergreen_cp_load_microcode(struct radeon_device *rdev)
3054
static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2903
{
3055
{
2904
	const __be32 *fw_data;
3056
	const __be32 *fw_data;
2905
	int i;
3057
	int i;
2906
 
3058
 
2907
	if (!rdev->me_fw || !rdev->pfp_fw)
3059
	if (!rdev->me_fw || !rdev->pfp_fw)
2908
		return -EINVAL;
3060
		return -EINVAL;
2909
 
3061
 
2910
	r700_cp_stop(rdev);
3062
	r700_cp_stop(rdev);
2911
	WREG32(CP_RB_CNTL,
3063
	WREG32(CP_RB_CNTL,
2912
#ifdef __BIG_ENDIAN
3064
#ifdef __BIG_ENDIAN
2913
	       BUF_SWAP_32BIT |
3065
	       BUF_SWAP_32BIT |
2914
#endif
3066
#endif
2915
	       RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
3067
	       RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2916
 
3068
 
2917
	fw_data = (const __be32 *)rdev->pfp_fw->data;
3069
	fw_data = (const __be32 *)rdev->pfp_fw->data;
2918
	WREG32(CP_PFP_UCODE_ADDR, 0);
3070
	WREG32(CP_PFP_UCODE_ADDR, 0);
2919
	for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
3071
	for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2920
		WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
3072
		WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2921
	WREG32(CP_PFP_UCODE_ADDR, 0);
3073
	WREG32(CP_PFP_UCODE_ADDR, 0);
2922
 
3074
 
2923
	fw_data = (const __be32 *)rdev->me_fw->data;
3075
	fw_data = (const __be32 *)rdev->me_fw->data;
2924
	WREG32(CP_ME_RAM_WADDR, 0);
3076
	WREG32(CP_ME_RAM_WADDR, 0);
2925
	for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
3077
	for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2926
		WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
3078
		WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2927
 
3079
 
2928
	WREG32(CP_PFP_UCODE_ADDR, 0);
3080
	WREG32(CP_PFP_UCODE_ADDR, 0);
2929
	WREG32(CP_ME_RAM_WADDR, 0);
3081
	WREG32(CP_ME_RAM_WADDR, 0);
2930
	WREG32(CP_ME_RAM_RADDR, 0);
3082
	WREG32(CP_ME_RAM_RADDR, 0);
2931
	return 0;
3083
	return 0;
2932
}
3084
}
2933
 
3085
 
2934
static int evergreen_cp_start(struct radeon_device *rdev)
3086
static int evergreen_cp_start(struct radeon_device *rdev)
2935
{
3087
{
2936
	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3088
	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2937
	int r, i;
3089
	int r, i;
2938
	uint32_t cp_me;
3090
	uint32_t cp_me;
2939
 
3091
 
2940
	r = radeon_ring_lock(rdev, ring, 7);
3092
	r = radeon_ring_lock(rdev, ring, 7);
2941
	if (r) {
3093
	if (r) {
2942
		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3094
		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2943
		return r;
3095
		return r;
2944
	}
3096
	}
2945
	radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
3097
	radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2946
	radeon_ring_write(ring, 0x1);
3098
	radeon_ring_write(ring, 0x1);
2947
	radeon_ring_write(ring, 0x0);
3099
	radeon_ring_write(ring, 0x0);
2948
	radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
3100
	radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2949
	radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
3101
	radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2950
	radeon_ring_write(ring, 0);
3102
	radeon_ring_write(ring, 0);
2951
	radeon_ring_write(ring, 0);
3103
	radeon_ring_write(ring, 0);
2952
	radeon_ring_unlock_commit(rdev, ring, false);
3104
	radeon_ring_unlock_commit(rdev, ring, false);
2953
 
3105
 
2954
	cp_me = 0xff;
3106
	cp_me = 0xff;
2955
	WREG32(CP_ME_CNTL, cp_me);
3107
	WREG32(CP_ME_CNTL, cp_me);
2956
 
3108
 
2957
	r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
3109
	r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
2958
	if (r) {
3110
	if (r) {
2959
		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3111
		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2960
		return r;
3112
		return r;
2961
	}
3113
	}
2962
 
3114
 
2963
	/* setup clear context state */
3115
	/* setup clear context state */
2964
	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3116
	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2965
	radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
3117
	radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
2966
 
3118
 
2967
	for (i = 0; i < evergreen_default_size; i++)
3119
	for (i = 0; i < evergreen_default_size; i++)
2968
		radeon_ring_write(ring, evergreen_default_state[i]);
3120
		radeon_ring_write(ring, evergreen_default_state[i]);
2969
 
3121
 
2970
	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3122
	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2971
	radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
3123
	radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
2972
 
3124
 
2973
	/* set clear context state */
3125
	/* set clear context state */
2974
	radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
3126
	radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2975
	radeon_ring_write(ring, 0);
3127
	radeon_ring_write(ring, 0);
2976
 
3128
 
2977
	/* SQ_VTX_BASE_VTX_LOC */
3129
	/* SQ_VTX_BASE_VTX_LOC */
2978
	radeon_ring_write(ring, 0xc0026f00);
3130
	radeon_ring_write(ring, 0xc0026f00);
2979
	radeon_ring_write(ring, 0x00000000);
3131
	radeon_ring_write(ring, 0x00000000);
2980
	radeon_ring_write(ring, 0x00000000);
3132
	radeon_ring_write(ring, 0x00000000);
2981
	radeon_ring_write(ring, 0x00000000);
3133
	radeon_ring_write(ring, 0x00000000);
2982
 
3134
 
2983
	/* Clear consts */
3135
	/* Clear consts */
2984
	radeon_ring_write(ring, 0xc0036f00);
3136
	radeon_ring_write(ring, 0xc0036f00);
2985
	radeon_ring_write(ring, 0x00000bc4);
3137
	radeon_ring_write(ring, 0x00000bc4);
2986
	radeon_ring_write(ring, 0xffffffff);
3138
	radeon_ring_write(ring, 0xffffffff);
2987
	radeon_ring_write(ring, 0xffffffff);
3139
	radeon_ring_write(ring, 0xffffffff);
2988
	radeon_ring_write(ring, 0xffffffff);
3140
	radeon_ring_write(ring, 0xffffffff);
2989
 
3141
 
2990
	radeon_ring_write(ring, 0xc0026900);
3142
	radeon_ring_write(ring, 0xc0026900);
2991
	radeon_ring_write(ring, 0x00000316);
3143
	radeon_ring_write(ring, 0x00000316);
2992
	radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
3144
	radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2993
	radeon_ring_write(ring, 0x00000010); /*  */
3145
	radeon_ring_write(ring, 0x00000010); /*  */
2994
 
3146
 
2995
	radeon_ring_unlock_commit(rdev, ring, false);
3147
	radeon_ring_unlock_commit(rdev, ring, false);
2996
 
3148
 
2997
	return 0;
3149
	return 0;
2998
}
3150
}
2999
 
3151
 
3000
static int evergreen_cp_resume(struct radeon_device *rdev)
3152
static int evergreen_cp_resume(struct radeon_device *rdev)
3001
{
3153
{
3002
	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3154
	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3003
	u32 tmp;
3155
	u32 tmp;
3004
	u32 rb_bufsz;
3156
	u32 rb_bufsz;
3005
	int r;
3157
	int r;
3006
 
3158
 
3007
	/* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
3159
	/* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
3008
	WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
3160
	WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
3009
				 SOFT_RESET_PA |
3161
				 SOFT_RESET_PA |
3010
				 SOFT_RESET_SH |
3162
				 SOFT_RESET_SH |
3011
				 SOFT_RESET_VGT |
3163
				 SOFT_RESET_VGT |
3012
				 SOFT_RESET_SPI |
3164
				 SOFT_RESET_SPI |
3013
				 SOFT_RESET_SX));
3165
				 SOFT_RESET_SX));
3014
	RREG32(GRBM_SOFT_RESET);
3166
	RREG32(GRBM_SOFT_RESET);
3015
	mdelay(15);
3167
	mdelay(15);
3016
	WREG32(GRBM_SOFT_RESET, 0);
3168
	WREG32(GRBM_SOFT_RESET, 0);
3017
	RREG32(GRBM_SOFT_RESET);
3169
	RREG32(GRBM_SOFT_RESET);
3018
 
3170
 
3019
	/* Set ring buffer size */
3171
	/* Set ring buffer size */
3020
	rb_bufsz = order_base_2(ring->ring_size / 8);
3172
	rb_bufsz = order_base_2(ring->ring_size / 8);
3021
	tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3173
	tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3022
#ifdef __BIG_ENDIAN
3174
#ifdef __BIG_ENDIAN
3023
	tmp |= BUF_SWAP_32BIT;
3175
	tmp |= BUF_SWAP_32BIT;
3024
#endif
3176
#endif
3025
	WREG32(CP_RB_CNTL, tmp);
3177
	WREG32(CP_RB_CNTL, tmp);
3026
	WREG32(CP_SEM_WAIT_TIMER, 0x0);
3178
	WREG32(CP_SEM_WAIT_TIMER, 0x0);
3027
	WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
3179
	WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
3028
 
3180
 
3029
	/* Set the write pointer delay */
3181
	/* Set the write pointer delay */
3030
	WREG32(CP_RB_WPTR_DELAY, 0);
3182
	WREG32(CP_RB_WPTR_DELAY, 0);
3031
 
3183
 
3032
	/* Initialize the ring buffer's read and write pointers */
3184
	/* Initialize the ring buffer's read and write pointers */
3033
	WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
3185
	WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
3034
	WREG32(CP_RB_RPTR_WR, 0);
3186
	WREG32(CP_RB_RPTR_WR, 0);
3035
	ring->wptr = 0;
3187
	ring->wptr = 0;
3036
	WREG32(CP_RB_WPTR, ring->wptr);
3188
	WREG32(CP_RB_WPTR, ring->wptr);
3037
 
3189
 
3038
	/* set the wb address whether it's enabled or not */
3190
	/* set the wb address whether it's enabled or not */
3039
	WREG32(CP_RB_RPTR_ADDR,
3191
	WREG32(CP_RB_RPTR_ADDR,
3040
	       ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
3192
	       ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
3041
	WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
3193
	WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
3042
	WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
3194
	WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
3043
 
3195
 
3044
	if (rdev->wb.enabled)
3196
	if (rdev->wb.enabled)
3045
		WREG32(SCRATCH_UMSK, 0xff);
3197
		WREG32(SCRATCH_UMSK, 0xff);
3046
	else {
3198
	else {
3047
		tmp |= RB_NO_UPDATE;
3199
		tmp |= RB_NO_UPDATE;
3048
		WREG32(SCRATCH_UMSK, 0);
3200
		WREG32(SCRATCH_UMSK, 0);
3049
	}
3201
	}
3050
 
3202
 
3051
	mdelay(1);
3203
	mdelay(1);
3052
	WREG32(CP_RB_CNTL, tmp);
3204
	WREG32(CP_RB_CNTL, tmp);
3053
 
3205
 
3054
	WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
3206
	WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
3055
	WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
3207
	WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
3056
 
3208
 
3057
	evergreen_cp_start(rdev);
3209
	evergreen_cp_start(rdev);
3058
	ring->ready = true;
3210
	ring->ready = true;
3059
	r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
3211
	r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
3060
	if (r) {
3212
	if (r) {
3061
		ring->ready = false;
3213
		ring->ready = false;
3062
		return r;
3214
		return r;
3063
	}
3215
	}
3064
	return 0;
3216
	return 0;
3065
}
3217
}
3066
 
3218
 
3067
/*
3219
/*
3068
 * Core functions
3220
 * Core functions
3069
 */
3221
 */
3070
static void evergreen_gpu_init(struct radeon_device *rdev)
3222
static void evergreen_gpu_init(struct radeon_device *rdev)
3071
{
3223
{
3072
	u32 gb_addr_config;
3224
	u32 gb_addr_config;
3073
	u32 mc_shared_chmap, mc_arb_ramcfg;
3225
	u32 mc_shared_chmap, mc_arb_ramcfg;
3074
	u32 sx_debug_1;
3226
	u32 sx_debug_1;
3075
	u32 smx_dc_ctl0;
3227
	u32 smx_dc_ctl0;
3076
	u32 sq_config;
3228
	u32 sq_config;
3077
	u32 sq_lds_resource_mgmt;
3229
	u32 sq_lds_resource_mgmt;
3078
	u32 sq_gpr_resource_mgmt_1;
3230
	u32 sq_gpr_resource_mgmt_1;
3079
	u32 sq_gpr_resource_mgmt_2;
3231
	u32 sq_gpr_resource_mgmt_2;
3080
	u32 sq_gpr_resource_mgmt_3;
3232
	u32 sq_gpr_resource_mgmt_3;
3081
	u32 sq_thread_resource_mgmt;
3233
	u32 sq_thread_resource_mgmt;
3082
	u32 sq_thread_resource_mgmt_2;
3234
	u32 sq_thread_resource_mgmt_2;
3083
	u32 sq_stack_resource_mgmt_1;
3235
	u32 sq_stack_resource_mgmt_1;
3084
	u32 sq_stack_resource_mgmt_2;
3236
	u32 sq_stack_resource_mgmt_2;
3085
	u32 sq_stack_resource_mgmt_3;
3237
	u32 sq_stack_resource_mgmt_3;
3086
	u32 vgt_cache_invalidation;
3238
	u32 vgt_cache_invalidation;
3087
	u32 hdp_host_path_cntl, tmp;
3239
	u32 hdp_host_path_cntl, tmp;
3088
	u32 disabled_rb_mask;
3240
	u32 disabled_rb_mask;
3089
	int i, j, ps_thread_count;
3241
	int i, j, ps_thread_count;
3090
 
3242
 
3091
	switch (rdev->family) {
3243
	switch (rdev->family) {
3092
	case CHIP_CYPRESS:
3244
	case CHIP_CYPRESS:
3093
	case CHIP_HEMLOCK:
3245
	case CHIP_HEMLOCK:
3094
		rdev->config.evergreen.num_ses = 2;
3246
		rdev->config.evergreen.num_ses = 2;
3095
		rdev->config.evergreen.max_pipes = 4;
3247
		rdev->config.evergreen.max_pipes = 4;
3096
		rdev->config.evergreen.max_tile_pipes = 8;
3248
		rdev->config.evergreen.max_tile_pipes = 8;
3097
		rdev->config.evergreen.max_simds = 10;
3249
		rdev->config.evergreen.max_simds = 10;
3098
		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3250
		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3099
		rdev->config.evergreen.max_gprs = 256;
3251
		rdev->config.evergreen.max_gprs = 256;
3100
		rdev->config.evergreen.max_threads = 248;
3252
		rdev->config.evergreen.max_threads = 248;
3101
		rdev->config.evergreen.max_gs_threads = 32;
3253
		rdev->config.evergreen.max_gs_threads = 32;
3102
		rdev->config.evergreen.max_stack_entries = 512;
3254
		rdev->config.evergreen.max_stack_entries = 512;
3103
		rdev->config.evergreen.sx_num_of_sets = 4;
3255
		rdev->config.evergreen.sx_num_of_sets = 4;
3104
		rdev->config.evergreen.sx_max_export_size = 256;
3256
		rdev->config.evergreen.sx_max_export_size = 256;
3105
		rdev->config.evergreen.sx_max_export_pos_size = 64;
3257
		rdev->config.evergreen.sx_max_export_pos_size = 64;
3106
		rdev->config.evergreen.sx_max_export_smx_size = 192;
3258
		rdev->config.evergreen.sx_max_export_smx_size = 192;
3107
		rdev->config.evergreen.max_hw_contexts = 8;
3259
		rdev->config.evergreen.max_hw_contexts = 8;
3108
		rdev->config.evergreen.sq_num_cf_insts = 2;
3260
		rdev->config.evergreen.sq_num_cf_insts = 2;
3109
 
3261
 
3110
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3262
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3111
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3263
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3112
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3264
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3113
		gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3265
		gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3114
		break;
3266
		break;
3115
	case CHIP_JUNIPER:
3267
	case CHIP_JUNIPER:
3116
		rdev->config.evergreen.num_ses = 1;
3268
		rdev->config.evergreen.num_ses = 1;
3117
		rdev->config.evergreen.max_pipes = 4;
3269
		rdev->config.evergreen.max_pipes = 4;
3118
		rdev->config.evergreen.max_tile_pipes = 4;
3270
		rdev->config.evergreen.max_tile_pipes = 4;
3119
		rdev->config.evergreen.max_simds = 10;
3271
		rdev->config.evergreen.max_simds = 10;
3120
		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3272
		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3121
		rdev->config.evergreen.max_gprs = 256;
3273
		rdev->config.evergreen.max_gprs = 256;
3122
		rdev->config.evergreen.max_threads = 248;
3274
		rdev->config.evergreen.max_threads = 248;
3123
		rdev->config.evergreen.max_gs_threads = 32;
3275
		rdev->config.evergreen.max_gs_threads = 32;
3124
		rdev->config.evergreen.max_stack_entries = 512;
3276
		rdev->config.evergreen.max_stack_entries = 512;
3125
		rdev->config.evergreen.sx_num_of_sets = 4;
3277
		rdev->config.evergreen.sx_num_of_sets = 4;
3126
		rdev->config.evergreen.sx_max_export_size = 256;
3278
		rdev->config.evergreen.sx_max_export_size = 256;
3127
		rdev->config.evergreen.sx_max_export_pos_size = 64;
3279
		rdev->config.evergreen.sx_max_export_pos_size = 64;
3128
		rdev->config.evergreen.sx_max_export_smx_size = 192;
3280
		rdev->config.evergreen.sx_max_export_smx_size = 192;
3129
		rdev->config.evergreen.max_hw_contexts = 8;
3281
		rdev->config.evergreen.max_hw_contexts = 8;
3130
		rdev->config.evergreen.sq_num_cf_insts = 2;
3282
		rdev->config.evergreen.sq_num_cf_insts = 2;
3131
 
3283
 
3132
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3284
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3133
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3285
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3134
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3286
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3135
		gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3287
		gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3136
		break;
3288
		break;
3137
	case CHIP_REDWOOD:
3289
	case CHIP_REDWOOD:
3138
		rdev->config.evergreen.num_ses = 1;
3290
		rdev->config.evergreen.num_ses = 1;
3139
		rdev->config.evergreen.max_pipes = 4;
3291
		rdev->config.evergreen.max_pipes = 4;
3140
		rdev->config.evergreen.max_tile_pipes = 4;
3292
		rdev->config.evergreen.max_tile_pipes = 4;
3141
		rdev->config.evergreen.max_simds = 5;
3293
		rdev->config.evergreen.max_simds = 5;
3142
		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3294
		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3143
		rdev->config.evergreen.max_gprs = 256;
3295
		rdev->config.evergreen.max_gprs = 256;
3144
		rdev->config.evergreen.max_threads = 248;
3296
		rdev->config.evergreen.max_threads = 248;
3145
		rdev->config.evergreen.max_gs_threads = 32;
3297
		rdev->config.evergreen.max_gs_threads = 32;
3146
		rdev->config.evergreen.max_stack_entries = 256;
3298
		rdev->config.evergreen.max_stack_entries = 256;
3147
		rdev->config.evergreen.sx_num_of_sets = 4;
3299
		rdev->config.evergreen.sx_num_of_sets = 4;
3148
		rdev->config.evergreen.sx_max_export_size = 256;
3300
		rdev->config.evergreen.sx_max_export_size = 256;
3149
		rdev->config.evergreen.sx_max_export_pos_size = 64;
3301
		rdev->config.evergreen.sx_max_export_pos_size = 64;
3150
		rdev->config.evergreen.sx_max_export_smx_size = 192;
3302
		rdev->config.evergreen.sx_max_export_smx_size = 192;
3151
		rdev->config.evergreen.max_hw_contexts = 8;
3303
		rdev->config.evergreen.max_hw_contexts = 8;
3152
		rdev->config.evergreen.sq_num_cf_insts = 2;
3304
		rdev->config.evergreen.sq_num_cf_insts = 2;
3153
 
3305
 
3154
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3306
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3155
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3307
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3156
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3308
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3157
		gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3309
		gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3158
		break;
3310
		break;
3159
	case CHIP_CEDAR:
3311
	case CHIP_CEDAR:
3160
	default:
3312
	default:
3161
		rdev->config.evergreen.num_ses = 1;
3313
		rdev->config.evergreen.num_ses = 1;
3162
		rdev->config.evergreen.max_pipes = 2;
3314
		rdev->config.evergreen.max_pipes = 2;
3163
		rdev->config.evergreen.max_tile_pipes = 2;
3315
		rdev->config.evergreen.max_tile_pipes = 2;
3164
		rdev->config.evergreen.max_simds = 2;
3316
		rdev->config.evergreen.max_simds = 2;
3165
		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3317
		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3166
		rdev->config.evergreen.max_gprs = 256;
3318
		rdev->config.evergreen.max_gprs = 256;
3167
		rdev->config.evergreen.max_threads = 192;
3319
		rdev->config.evergreen.max_threads = 192;
3168
		rdev->config.evergreen.max_gs_threads = 16;
3320
		rdev->config.evergreen.max_gs_threads = 16;
3169
		rdev->config.evergreen.max_stack_entries = 256;
3321
		rdev->config.evergreen.max_stack_entries = 256;
3170
		rdev->config.evergreen.sx_num_of_sets = 4;
3322
		rdev->config.evergreen.sx_num_of_sets = 4;
3171
		rdev->config.evergreen.sx_max_export_size = 128;
3323
		rdev->config.evergreen.sx_max_export_size = 128;
3172
		rdev->config.evergreen.sx_max_export_pos_size = 32;
3324
		rdev->config.evergreen.sx_max_export_pos_size = 32;
3173
		rdev->config.evergreen.sx_max_export_smx_size = 96;
3325
		rdev->config.evergreen.sx_max_export_smx_size = 96;
3174
		rdev->config.evergreen.max_hw_contexts = 4;
3326
		rdev->config.evergreen.max_hw_contexts = 4;
3175
		rdev->config.evergreen.sq_num_cf_insts = 1;
3327
		rdev->config.evergreen.sq_num_cf_insts = 1;
3176
 
3328
 
3177
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3329
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3178
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3330
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3179
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3331
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3180
		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3332
		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3181
		break;
3333
		break;
3182
	case CHIP_PALM:
3334
	case CHIP_PALM:
3183
		rdev->config.evergreen.num_ses = 1;
3335
		rdev->config.evergreen.num_ses = 1;
3184
		rdev->config.evergreen.max_pipes = 2;
3336
		rdev->config.evergreen.max_pipes = 2;
3185
		rdev->config.evergreen.max_tile_pipes = 2;
3337
		rdev->config.evergreen.max_tile_pipes = 2;
3186
		rdev->config.evergreen.max_simds = 2;
3338
		rdev->config.evergreen.max_simds = 2;
3187
		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3339
		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3188
		rdev->config.evergreen.max_gprs = 256;
3340
		rdev->config.evergreen.max_gprs = 256;
3189
		rdev->config.evergreen.max_threads = 192;
3341
		rdev->config.evergreen.max_threads = 192;
3190
		rdev->config.evergreen.max_gs_threads = 16;
3342
		rdev->config.evergreen.max_gs_threads = 16;
3191
		rdev->config.evergreen.max_stack_entries = 256;
3343
		rdev->config.evergreen.max_stack_entries = 256;
3192
		rdev->config.evergreen.sx_num_of_sets = 4;
3344
		rdev->config.evergreen.sx_num_of_sets = 4;
3193
		rdev->config.evergreen.sx_max_export_size = 128;
3345
		rdev->config.evergreen.sx_max_export_size = 128;
3194
		rdev->config.evergreen.sx_max_export_pos_size = 32;
3346
		rdev->config.evergreen.sx_max_export_pos_size = 32;
3195
		rdev->config.evergreen.sx_max_export_smx_size = 96;
3347
		rdev->config.evergreen.sx_max_export_smx_size = 96;
3196
		rdev->config.evergreen.max_hw_contexts = 4;
3348
		rdev->config.evergreen.max_hw_contexts = 4;
3197
		rdev->config.evergreen.sq_num_cf_insts = 1;
3349
		rdev->config.evergreen.sq_num_cf_insts = 1;
3198
 
3350
 
3199
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3351
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3200
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3352
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3201
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3353
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3202
		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3354
		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3203
		break;
3355
		break;
3204
	case CHIP_SUMO:
3356
	case CHIP_SUMO:
3205
		rdev->config.evergreen.num_ses = 1;
3357
		rdev->config.evergreen.num_ses = 1;
3206
		rdev->config.evergreen.max_pipes = 4;
3358
		rdev->config.evergreen.max_pipes = 4;
3207
		rdev->config.evergreen.max_tile_pipes = 4;
3359
		rdev->config.evergreen.max_tile_pipes = 4;
3208
		if (rdev->pdev->device == 0x9648)
3360
		if (rdev->pdev->device == 0x9648)
3209
			rdev->config.evergreen.max_simds = 3;
3361
			rdev->config.evergreen.max_simds = 3;
3210
		else if ((rdev->pdev->device == 0x9647) ||
3362
		else if ((rdev->pdev->device == 0x9647) ||
3211
			 (rdev->pdev->device == 0x964a))
3363
			 (rdev->pdev->device == 0x964a))
3212
			rdev->config.evergreen.max_simds = 4;
3364
			rdev->config.evergreen.max_simds = 4;
3213
		else
3365
		else
3214
			rdev->config.evergreen.max_simds = 5;
3366
			rdev->config.evergreen.max_simds = 5;
3215
		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3367
		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3216
		rdev->config.evergreen.max_gprs = 256;
3368
		rdev->config.evergreen.max_gprs = 256;
3217
		rdev->config.evergreen.max_threads = 248;
3369
		rdev->config.evergreen.max_threads = 248;
3218
		rdev->config.evergreen.max_gs_threads = 32;
3370
		rdev->config.evergreen.max_gs_threads = 32;
3219
		rdev->config.evergreen.max_stack_entries = 256;
3371
		rdev->config.evergreen.max_stack_entries = 256;
3220
		rdev->config.evergreen.sx_num_of_sets = 4;
3372
		rdev->config.evergreen.sx_num_of_sets = 4;
3221
		rdev->config.evergreen.sx_max_export_size = 256;
3373
		rdev->config.evergreen.sx_max_export_size = 256;
3222
		rdev->config.evergreen.sx_max_export_pos_size = 64;
3374
		rdev->config.evergreen.sx_max_export_pos_size = 64;
3223
		rdev->config.evergreen.sx_max_export_smx_size = 192;
3375
		rdev->config.evergreen.sx_max_export_smx_size = 192;
3224
		rdev->config.evergreen.max_hw_contexts = 8;
3376
		rdev->config.evergreen.max_hw_contexts = 8;
3225
		rdev->config.evergreen.sq_num_cf_insts = 2;
3377
		rdev->config.evergreen.sq_num_cf_insts = 2;
3226
 
3378
 
3227
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3379
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3228
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3380
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3229
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3381
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3230
		gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3382
		gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3231
		break;
3383
		break;
3232
	case CHIP_SUMO2:
3384
	case CHIP_SUMO2:
3233
		rdev->config.evergreen.num_ses = 1;
3385
		rdev->config.evergreen.num_ses = 1;
3234
		rdev->config.evergreen.max_pipes = 4;
3386
		rdev->config.evergreen.max_pipes = 4;
3235
		rdev->config.evergreen.max_tile_pipes = 4;
3387
		rdev->config.evergreen.max_tile_pipes = 4;
3236
		rdev->config.evergreen.max_simds = 2;
3388
		rdev->config.evergreen.max_simds = 2;
3237
		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3389
		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3238
		rdev->config.evergreen.max_gprs = 256;
3390
		rdev->config.evergreen.max_gprs = 256;
3239
		rdev->config.evergreen.max_threads = 248;
3391
		rdev->config.evergreen.max_threads = 248;
3240
		rdev->config.evergreen.max_gs_threads = 32;
3392
		rdev->config.evergreen.max_gs_threads = 32;
3241
		rdev->config.evergreen.max_stack_entries = 512;
3393
		rdev->config.evergreen.max_stack_entries = 512;
3242
		rdev->config.evergreen.sx_num_of_sets = 4;
3394
		rdev->config.evergreen.sx_num_of_sets = 4;
3243
		rdev->config.evergreen.sx_max_export_size = 256;
3395
		rdev->config.evergreen.sx_max_export_size = 256;
3244
		rdev->config.evergreen.sx_max_export_pos_size = 64;
3396
		rdev->config.evergreen.sx_max_export_pos_size = 64;
3245
		rdev->config.evergreen.sx_max_export_smx_size = 192;
3397
		rdev->config.evergreen.sx_max_export_smx_size = 192;
3246
		rdev->config.evergreen.max_hw_contexts = 4;
3398
		rdev->config.evergreen.max_hw_contexts = 4;
3247
		rdev->config.evergreen.sq_num_cf_insts = 2;
3399
		rdev->config.evergreen.sq_num_cf_insts = 2;
3248
 
3400
 
3249
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3401
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3250
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3402
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3251
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3403
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3252
		gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3404
		gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3253
		break;
3405
		break;
3254
	case CHIP_BARTS:
3406
	case CHIP_BARTS:
3255
		rdev->config.evergreen.num_ses = 2;
3407
		rdev->config.evergreen.num_ses = 2;
3256
		rdev->config.evergreen.max_pipes = 4;
3408
		rdev->config.evergreen.max_pipes = 4;
3257
		rdev->config.evergreen.max_tile_pipes = 8;
3409
		rdev->config.evergreen.max_tile_pipes = 8;
3258
		rdev->config.evergreen.max_simds = 7;
3410
		rdev->config.evergreen.max_simds = 7;
3259
		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3411
		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3260
		rdev->config.evergreen.max_gprs = 256;
3412
		rdev->config.evergreen.max_gprs = 256;
3261
		rdev->config.evergreen.max_threads = 248;
3413
		rdev->config.evergreen.max_threads = 248;
3262
		rdev->config.evergreen.max_gs_threads = 32;
3414
		rdev->config.evergreen.max_gs_threads = 32;
3263
		rdev->config.evergreen.max_stack_entries = 512;
3415
		rdev->config.evergreen.max_stack_entries = 512;
3264
		rdev->config.evergreen.sx_num_of_sets = 4;
3416
		rdev->config.evergreen.sx_num_of_sets = 4;
3265
		rdev->config.evergreen.sx_max_export_size = 256;
3417
		rdev->config.evergreen.sx_max_export_size = 256;
3266
		rdev->config.evergreen.sx_max_export_pos_size = 64;
3418
		rdev->config.evergreen.sx_max_export_pos_size = 64;
3267
		rdev->config.evergreen.sx_max_export_smx_size = 192;
3419
		rdev->config.evergreen.sx_max_export_smx_size = 192;
3268
		rdev->config.evergreen.max_hw_contexts = 8;
3420
		rdev->config.evergreen.max_hw_contexts = 8;
3269
		rdev->config.evergreen.sq_num_cf_insts = 2;
3421
		rdev->config.evergreen.sq_num_cf_insts = 2;
3270
 
3422
 
3271
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3423
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3272
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3424
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3273
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3425
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3274
		gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3426
		gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3275
		break;
3427
		break;
3276
	case CHIP_TURKS:
3428
	case CHIP_TURKS:
3277
		rdev->config.evergreen.num_ses = 1;
3429
		rdev->config.evergreen.num_ses = 1;
3278
		rdev->config.evergreen.max_pipes = 4;
3430
		rdev->config.evergreen.max_pipes = 4;
3279
		rdev->config.evergreen.max_tile_pipes = 4;
3431
		rdev->config.evergreen.max_tile_pipes = 4;
3280
		rdev->config.evergreen.max_simds = 6;
3432
		rdev->config.evergreen.max_simds = 6;
3281
		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3433
		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3282
		rdev->config.evergreen.max_gprs = 256;
3434
		rdev->config.evergreen.max_gprs = 256;
3283
		rdev->config.evergreen.max_threads = 248;
3435
		rdev->config.evergreen.max_threads = 248;
3284
		rdev->config.evergreen.max_gs_threads = 32;
3436
		rdev->config.evergreen.max_gs_threads = 32;
3285
		rdev->config.evergreen.max_stack_entries = 256;
3437
		rdev->config.evergreen.max_stack_entries = 256;
3286
		rdev->config.evergreen.sx_num_of_sets = 4;
3438
		rdev->config.evergreen.sx_num_of_sets = 4;
3287
		rdev->config.evergreen.sx_max_export_size = 256;
3439
		rdev->config.evergreen.sx_max_export_size = 256;
3288
		rdev->config.evergreen.sx_max_export_pos_size = 64;
3440
		rdev->config.evergreen.sx_max_export_pos_size = 64;
3289
		rdev->config.evergreen.sx_max_export_smx_size = 192;
3441
		rdev->config.evergreen.sx_max_export_smx_size = 192;
3290
		rdev->config.evergreen.max_hw_contexts = 8;
3442
		rdev->config.evergreen.max_hw_contexts = 8;
3291
		rdev->config.evergreen.sq_num_cf_insts = 2;
3443
		rdev->config.evergreen.sq_num_cf_insts = 2;
3292
 
3444
 
3293
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3445
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3294
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3446
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3295
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3447
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3296
		gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3448
		gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3297
		break;
3449
		break;
3298
	case CHIP_CAICOS:
3450
	case CHIP_CAICOS:
3299
		rdev->config.evergreen.num_ses = 1;
3451
		rdev->config.evergreen.num_ses = 1;
3300
		rdev->config.evergreen.max_pipes = 2;
3452
		rdev->config.evergreen.max_pipes = 2;
3301
		rdev->config.evergreen.max_tile_pipes = 2;
3453
		rdev->config.evergreen.max_tile_pipes = 2;
3302
		rdev->config.evergreen.max_simds = 2;
3454
		rdev->config.evergreen.max_simds = 2;
3303
		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3455
		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3304
		rdev->config.evergreen.max_gprs = 256;
3456
		rdev->config.evergreen.max_gprs = 256;
3305
		rdev->config.evergreen.max_threads = 192;
3457
		rdev->config.evergreen.max_threads = 192;
3306
		rdev->config.evergreen.max_gs_threads = 16;
3458
		rdev->config.evergreen.max_gs_threads = 16;
3307
		rdev->config.evergreen.max_stack_entries = 256;
3459
		rdev->config.evergreen.max_stack_entries = 256;
3308
		rdev->config.evergreen.sx_num_of_sets = 4;
3460
		rdev->config.evergreen.sx_num_of_sets = 4;
3309
		rdev->config.evergreen.sx_max_export_size = 128;
3461
		rdev->config.evergreen.sx_max_export_size = 128;
3310
		rdev->config.evergreen.sx_max_export_pos_size = 32;
3462
		rdev->config.evergreen.sx_max_export_pos_size = 32;
3311
		rdev->config.evergreen.sx_max_export_smx_size = 96;
3463
		rdev->config.evergreen.sx_max_export_smx_size = 96;
3312
		rdev->config.evergreen.max_hw_contexts = 4;
3464
		rdev->config.evergreen.max_hw_contexts = 4;
3313
		rdev->config.evergreen.sq_num_cf_insts = 1;
3465
		rdev->config.evergreen.sq_num_cf_insts = 1;
3314
 
3466
 
3315
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3467
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3316
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3468
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3317
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3469
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3318
		gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3470
		gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3319
		break;
3471
		break;
3320
	}
3472
	}
3321
 
3473
 
3322
	/* Initialize HDP */
3474
	/* Initialize HDP */
3323
	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3475
	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3324
		WREG32((0x2c14 + j), 0x00000000);
3476
		WREG32((0x2c14 + j), 0x00000000);
3325
		WREG32((0x2c18 + j), 0x00000000);
3477
		WREG32((0x2c18 + j), 0x00000000);
3326
		WREG32((0x2c1c + j), 0x00000000);
3478
		WREG32((0x2c1c + j), 0x00000000);
3327
		WREG32((0x2c20 + j), 0x00000000);
3479
		WREG32((0x2c20 + j), 0x00000000);
3328
		WREG32((0x2c24 + j), 0x00000000);
3480
		WREG32((0x2c24 + j), 0x00000000);
3329
	}
3481
	}
3330
 
3482
 
3331
	WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3483
	WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3332
	WREG32(SRBM_INT_CNTL, 0x1);
3484
	WREG32(SRBM_INT_CNTL, 0x1);
3333
	WREG32(SRBM_INT_ACK, 0x1);
3485
	WREG32(SRBM_INT_ACK, 0x1);
3334
 
3486
 
3335
	evergreen_fix_pci_max_read_req_size(rdev);
3487
	evergreen_fix_pci_max_read_req_size(rdev);
3336
 
3488
 
3337
	mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3489
	mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3338
	if ((rdev->family == CHIP_PALM) ||
3490
	if ((rdev->family == CHIP_PALM) ||
3339
	    (rdev->family == CHIP_SUMO) ||
3491
	    (rdev->family == CHIP_SUMO) ||
3340
	    (rdev->family == CHIP_SUMO2))
3492
	    (rdev->family == CHIP_SUMO2))
3341
		mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3493
		mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3342
	else
3494
	else
3343
		mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3495
		mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3344
 
3496
 
3345
	/* setup tiling info dword.  gb_addr_config is not adequate since it does
3497
	/* setup tiling info dword.  gb_addr_config is not adequate since it does
3346
	 * not have bank info, so create a custom tiling dword.
3498
	 * not have bank info, so create a custom tiling dword.
3347
	 * bits 3:0   num_pipes
3499
	 * bits 3:0   num_pipes
3348
	 * bits 7:4   num_banks
3500
	 * bits 7:4   num_banks
3349
	 * bits 11:8  group_size
3501
	 * bits 11:8  group_size
3350
	 * bits 15:12 row_size
3502
	 * bits 15:12 row_size
3351
	 */
3503
	 */
3352
	rdev->config.evergreen.tile_config = 0;
3504
	rdev->config.evergreen.tile_config = 0;
3353
	switch (rdev->config.evergreen.max_tile_pipes) {
3505
	switch (rdev->config.evergreen.max_tile_pipes) {
3354
	case 1:
3506
	case 1:
3355
	default:
3507
	default:
3356
		rdev->config.evergreen.tile_config |= (0 << 0);
3508
		rdev->config.evergreen.tile_config |= (0 << 0);
3357
		break;
3509
		break;
3358
	case 2:
3510
	case 2:
3359
		rdev->config.evergreen.tile_config |= (1 << 0);
3511
		rdev->config.evergreen.tile_config |= (1 << 0);
3360
		break;
3512
		break;
3361
	case 4:
3513
	case 4:
3362
		rdev->config.evergreen.tile_config |= (2 << 0);
3514
		rdev->config.evergreen.tile_config |= (2 << 0);
3363
		break;
3515
		break;
3364
	case 8:
3516
	case 8:
3365
		rdev->config.evergreen.tile_config |= (3 << 0);
3517
		rdev->config.evergreen.tile_config |= (3 << 0);
3366
		break;
3518
		break;
3367
	}
3519
	}
3368
	/* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3520
	/* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3369
	if (rdev->flags & RADEON_IS_IGP)
3521
	if (rdev->flags & RADEON_IS_IGP)
3370
		rdev->config.evergreen.tile_config |= 1 << 4;
3522
		rdev->config.evergreen.tile_config |= 1 << 4;
3371
	else {
3523
	else {
3372
		switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3524
		switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3373
		case 0: /* four banks */
3525
		case 0: /* four banks */
3374
			rdev->config.evergreen.tile_config |= 0 << 4;
3526
			rdev->config.evergreen.tile_config |= 0 << 4;
3375
			break;
3527
			break;
3376
		case 1: /* eight banks */
3528
		case 1: /* eight banks */
3377
			rdev->config.evergreen.tile_config |= 1 << 4;
3529
			rdev->config.evergreen.tile_config |= 1 << 4;
3378
			break;
3530
			break;
3379
		case 2: /* sixteen banks */
3531
		case 2: /* sixteen banks */
3380
		default:
3532
		default:
3381
			rdev->config.evergreen.tile_config |= 2 << 4;
3533
			rdev->config.evergreen.tile_config |= 2 << 4;
3382
			break;
3534
			break;
3383
		}
3535
		}
3384
	}
3536
	}
3385
	rdev->config.evergreen.tile_config |= 0 << 8;
3537
	rdev->config.evergreen.tile_config |= 0 << 8;
3386
	rdev->config.evergreen.tile_config |=
3538
	rdev->config.evergreen.tile_config |=
3387
		((gb_addr_config & 0x30000000) >> 28) << 12;
3539
		((gb_addr_config & 0x30000000) >> 28) << 12;
3388
 
3540
 
3389
	if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3541
	if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3390
		u32 efuse_straps_4;
3542
		u32 efuse_straps_4;
3391
		u32 efuse_straps_3;
3543
		u32 efuse_straps_3;
3392
 
3544
 
3393
		efuse_straps_4 = RREG32_RCU(0x204);
3545
		efuse_straps_4 = RREG32_RCU(0x204);
3394
		efuse_straps_3 = RREG32_RCU(0x203);
3546
		efuse_straps_3 = RREG32_RCU(0x203);
3395
		tmp = (((efuse_straps_4 & 0xf) << 4) |
3547
		tmp = (((efuse_straps_4 & 0xf) << 4) |
3396
		      ((efuse_straps_3 & 0xf0000000) >> 28));
3548
		      ((efuse_straps_3 & 0xf0000000) >> 28));
3397
	} else {
3549
	} else {
3398
		tmp = 0;
3550
		tmp = 0;
3399
		for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3551
		for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3400
			u32 rb_disable_bitmap;
3552
			u32 rb_disable_bitmap;
3401
 
3553
 
3402
			WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3554
			WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3403
			WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3555
			WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3404
			rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3556
			rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3405
			tmp <<= 4;
3557
			tmp <<= 4;
3406
			tmp |= rb_disable_bitmap;
3558
			tmp |= rb_disable_bitmap;
3407
		}
3559
		}
3408
	}
3560
	}
3409
	/* enabled rb are just the one not disabled :) */
3561
	/* enabled rb are just the one not disabled :) */
3410
	disabled_rb_mask = tmp;
3562
	disabled_rb_mask = tmp;
3411
	tmp = 0;
3563
	tmp = 0;
3412
	for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3564
	for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3413
		tmp |= (1 << i);
3565
		tmp |= (1 << i);
3414
	/* if all the backends are disabled, fix it up here */
3566
	/* if all the backends are disabled, fix it up here */
3415
	if ((disabled_rb_mask & tmp) == tmp) {
3567
	if ((disabled_rb_mask & tmp) == tmp) {
3416
		for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3568
		for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3417
			disabled_rb_mask &= ~(1 << i);
3569
			disabled_rb_mask &= ~(1 << i);
3418
	}
3570
	}
3419
 
3571
 
3420
	for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
3572
	for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
3421
		u32 simd_disable_bitmap;
3573
		u32 simd_disable_bitmap;
3422
 
3574
 
3423
		WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3575
		WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3424
		WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3576
		WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3425
		simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
3577
		simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
3426
		simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
3578
		simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
3427
		tmp <<= 16;
3579
		tmp <<= 16;
3428
		tmp |= simd_disable_bitmap;
3580
		tmp |= simd_disable_bitmap;
3429
	}
3581
	}
3430
	rdev->config.evergreen.active_simds = hweight32(~tmp);
3582
	rdev->config.evergreen.active_simds = hweight32(~tmp);
3431
 
3583
 
3432
	WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3584
	WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3433
	WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3585
	WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3434
 
3586
 
3435
	WREG32(GB_ADDR_CONFIG, gb_addr_config);
3587
	WREG32(GB_ADDR_CONFIG, gb_addr_config);
3436
	WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3588
	WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3437
	WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3589
	WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3438
	WREG32(DMA_TILING_CONFIG, gb_addr_config);
3590
	WREG32(DMA_TILING_CONFIG, gb_addr_config);
3439
	WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3591
	WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3440
	WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3592
	WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3441
	WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3593
	WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3442
 
3594
 
3443
	if ((rdev->config.evergreen.max_backends == 1) &&
3595
	if ((rdev->config.evergreen.max_backends == 1) &&
3444
	    (rdev->flags & RADEON_IS_IGP)) {
3596
	    (rdev->flags & RADEON_IS_IGP)) {
3445
		if ((disabled_rb_mask & 3) == 1) {
3597
		if ((disabled_rb_mask & 3) == 1) {
3446
			/* RB0 disabled, RB1 enabled */
3598
			/* RB0 disabled, RB1 enabled */
3447
			tmp = 0x11111111;
3599
			tmp = 0x11111111;
3448
		} else {
3600
		} else {
3449
			/* RB1 disabled, RB0 enabled */
3601
			/* RB1 disabled, RB0 enabled */
3450
			tmp = 0x00000000;
3602
			tmp = 0x00000000;
3451
		}
3603
		}
3452
	} else {
3604
	} else {
3453
		tmp = gb_addr_config & NUM_PIPES_MASK;
3605
		tmp = gb_addr_config & NUM_PIPES_MASK;
3454
		tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3606
		tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3455
						EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3607
						EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3456
	}
3608
	}
3457
	WREG32(GB_BACKEND_MAP, tmp);
3609
	WREG32(GB_BACKEND_MAP, tmp);
3458
 
3610
 
3459
	WREG32(CGTS_SYS_TCC_DISABLE, 0);
3611
	WREG32(CGTS_SYS_TCC_DISABLE, 0);
3460
	WREG32(CGTS_TCC_DISABLE, 0);
3612
	WREG32(CGTS_TCC_DISABLE, 0);
3461
	WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3613
	WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3462
	WREG32(CGTS_USER_TCC_DISABLE, 0);
3614
	WREG32(CGTS_USER_TCC_DISABLE, 0);
3463
 
3615
 
3464
	/* set HW defaults for 3D engine */
3616
	/* set HW defaults for 3D engine */
3465
	WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3617
	WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3466
				     ROQ_IB2_START(0x2b)));
3618
				     ROQ_IB2_START(0x2b)));
3467
 
3619
 
3468
	WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3620
	WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3469
 
3621
 
3470
	WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3622
	WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3471
			     SYNC_GRADIENT |
3623
			     SYNC_GRADIENT |
3472
			     SYNC_WALKER |
3624
			     SYNC_WALKER |
3473
			     SYNC_ALIGNER));
3625
			     SYNC_ALIGNER));
3474
 
3626
 
3475
	sx_debug_1 = RREG32(SX_DEBUG_1);
3627
	sx_debug_1 = RREG32(SX_DEBUG_1);
3476
	sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3628
	sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3477
	WREG32(SX_DEBUG_1, sx_debug_1);
3629
	WREG32(SX_DEBUG_1, sx_debug_1);
3478
 
3630
 
3479
 
3631
 
3480
	smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3632
	smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3481
	smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3633
	smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3482
	smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3634
	smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3483
	WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3635
	WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3484
 
3636
 
3485
	if (rdev->family <= CHIP_SUMO2)
3637
	if (rdev->family <= CHIP_SUMO2)
3486
		WREG32(SMX_SAR_CTL0, 0x00010000);
3638
		WREG32(SMX_SAR_CTL0, 0x00010000);
3487
 
3639
 
3488
	WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3640
	WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3489
					POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3641
					POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3490
					SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3642
					SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3491
 
3643
 
3492
	WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3644
	WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3493
				 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3645
				 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3494
				 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3646
				 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3495
 
3647
 
3496
	WREG32(VGT_NUM_INSTANCES, 1);
3648
	WREG32(VGT_NUM_INSTANCES, 1);
3497
	WREG32(SPI_CONFIG_CNTL, 0);
3649
	WREG32(SPI_CONFIG_CNTL, 0);
3498
	WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3650
	WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3499
	WREG32(CP_PERFMON_CNTL, 0);
3651
	WREG32(CP_PERFMON_CNTL, 0);
3500
 
3652
 
3501
	WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3653
	WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3502
				  FETCH_FIFO_HIWATER(0x4) |
3654
				  FETCH_FIFO_HIWATER(0x4) |
3503
				  DONE_FIFO_HIWATER(0xe0) |
3655
				  DONE_FIFO_HIWATER(0xe0) |
3504
				  ALU_UPDATE_FIFO_HIWATER(0x8)));
3656
				  ALU_UPDATE_FIFO_HIWATER(0x8)));
3505
 
3657
 
3506
	sq_config = RREG32(SQ_CONFIG);
3658
	sq_config = RREG32(SQ_CONFIG);
3507
	sq_config &= ~(PS_PRIO(3) |
3659
	sq_config &= ~(PS_PRIO(3) |
3508
		       VS_PRIO(3) |
3660
		       VS_PRIO(3) |
3509
		       GS_PRIO(3) |
3661
		       GS_PRIO(3) |
3510
		       ES_PRIO(3));
3662
		       ES_PRIO(3));
3511
	sq_config |= (VC_ENABLE |
3663
	sq_config |= (VC_ENABLE |
3512
		      EXPORT_SRC_C |
3664
		      EXPORT_SRC_C |
3513
		      PS_PRIO(0) |
3665
		      PS_PRIO(0) |
3514
		      VS_PRIO(1) |
3666
		      VS_PRIO(1) |
3515
		      GS_PRIO(2) |
3667
		      GS_PRIO(2) |
3516
		      ES_PRIO(3));
3668
		      ES_PRIO(3));
3517
 
3669
 
3518
	switch (rdev->family) {
3670
	switch (rdev->family) {
3519
	case CHIP_CEDAR:
3671
	case CHIP_CEDAR:
3520
	case CHIP_PALM:
3672
	case CHIP_PALM:
3521
	case CHIP_SUMO:
3673
	case CHIP_SUMO:
3522
	case CHIP_SUMO2:
3674
	case CHIP_SUMO2:
3523
	case CHIP_CAICOS:
3675
	case CHIP_CAICOS:
3524
		/* no vertex cache */
3676
		/* no vertex cache */
3525
		sq_config &= ~VC_ENABLE;
3677
		sq_config &= ~VC_ENABLE;
3526
		break;
3678
		break;
3527
	default:
3679
	default:
3528
		break;
3680
		break;
3529
	}
3681
	}
3530
 
3682
 
3531
	sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3683
	sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3532
 
3684
 
3533
	sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3685
	sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3534
	sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3686
	sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3535
	sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3687
	sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3536
	sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3688
	sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3537
	sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3689
	sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3538
	sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3690
	sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3539
	sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3691
	sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3540
 
3692
 
3541
	switch (rdev->family) {
3693
	switch (rdev->family) {
3542
	case CHIP_CEDAR:
3694
	case CHIP_CEDAR:
3543
	case CHIP_PALM:
3695
	case CHIP_PALM:
3544
	case CHIP_SUMO:
3696
	case CHIP_SUMO:
3545
	case CHIP_SUMO2:
3697
	case CHIP_SUMO2:
3546
		ps_thread_count = 96;
3698
		ps_thread_count = 96;
3547
		break;
3699
		break;
3548
	default:
3700
	default:
3549
		ps_thread_count = 128;
3701
		ps_thread_count = 128;
3550
		break;
3702
		break;
3551
	}
3703
	}
3552
 
3704
 
3553
	sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3705
	sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3554
	sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3706
	sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3555
	sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3707
	sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3556
	sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3708
	sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3557
	sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3709
	sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3558
	sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3710
	sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3559
 
3711
 
3560
	sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3712
	sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3561
	sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3713
	sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3562
	sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3714
	sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3563
	sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3715
	sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3564
	sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3716
	sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3565
	sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3717
	sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3566
 
3718
 
3567
	WREG32(SQ_CONFIG, sq_config);
3719
	WREG32(SQ_CONFIG, sq_config);
3568
	WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3720
	WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3569
	WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3721
	WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3570
	WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3722
	WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3571
	WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3723
	WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3572
	WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3724
	WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3573
	WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3725
	WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3574
	WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3726
	WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3575
	WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3727
	WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3576
	WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3728
	WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3577
	WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3729
	WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3578
 
3730
 
3579
	WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3731
	WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3580
					  FORCE_EOV_MAX_REZ_CNT(255)));
3732
					  FORCE_EOV_MAX_REZ_CNT(255)));
3581
 
3733
 
3582
	switch (rdev->family) {
3734
	switch (rdev->family) {
3583
	case CHIP_CEDAR:
3735
	case CHIP_CEDAR:
3584
	case CHIP_PALM:
3736
	case CHIP_PALM:
3585
	case CHIP_SUMO:
3737
	case CHIP_SUMO:
3586
	case CHIP_SUMO2:
3738
	case CHIP_SUMO2:
3587
	case CHIP_CAICOS:
3739
	case CHIP_CAICOS:
3588
		vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3740
		vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3589
		break;
3741
		break;
3590
	default:
3742
	default:
3591
		vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3743
		vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3592
		break;
3744
		break;
3593
	}
3745
	}
3594
	vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3746
	vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3595
	WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3747
	WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3596
 
3748
 
3597
	WREG32(VGT_GS_VERTEX_REUSE, 16);
3749
	WREG32(VGT_GS_VERTEX_REUSE, 16);
3598
	WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3750
	WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3599
	WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3751
	WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3600
 
3752
 
3601
	WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3753
	WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3602
	WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3754
	WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3603
 
3755
 
3604
	WREG32(CB_PERF_CTR0_SEL_0, 0);
3756
	WREG32(CB_PERF_CTR0_SEL_0, 0);
3605
	WREG32(CB_PERF_CTR0_SEL_1, 0);
3757
	WREG32(CB_PERF_CTR0_SEL_1, 0);
3606
	WREG32(CB_PERF_CTR1_SEL_0, 0);
3758
	WREG32(CB_PERF_CTR1_SEL_0, 0);
3607
	WREG32(CB_PERF_CTR1_SEL_1, 0);
3759
	WREG32(CB_PERF_CTR1_SEL_1, 0);
3608
	WREG32(CB_PERF_CTR2_SEL_0, 0);
3760
	WREG32(CB_PERF_CTR2_SEL_0, 0);
3609
	WREG32(CB_PERF_CTR2_SEL_1, 0);
3761
	WREG32(CB_PERF_CTR2_SEL_1, 0);
3610
	WREG32(CB_PERF_CTR3_SEL_0, 0);
3762
	WREG32(CB_PERF_CTR3_SEL_0, 0);
3611
	WREG32(CB_PERF_CTR3_SEL_1, 0);
3763
	WREG32(CB_PERF_CTR3_SEL_1, 0);
3612
 
3764
 
3613
	/* clear render buffer base addresses */
3765
	/* clear render buffer base addresses */
3614
	WREG32(CB_COLOR0_BASE, 0);
3766
	WREG32(CB_COLOR0_BASE, 0);
3615
	WREG32(CB_COLOR1_BASE, 0);
3767
	WREG32(CB_COLOR1_BASE, 0);
3616
	WREG32(CB_COLOR2_BASE, 0);
3768
	WREG32(CB_COLOR2_BASE, 0);
3617
	WREG32(CB_COLOR3_BASE, 0);
3769
	WREG32(CB_COLOR3_BASE, 0);
3618
	WREG32(CB_COLOR4_BASE, 0);
3770
	WREG32(CB_COLOR4_BASE, 0);
3619
	WREG32(CB_COLOR5_BASE, 0);
3771
	WREG32(CB_COLOR5_BASE, 0);
3620
	WREG32(CB_COLOR6_BASE, 0);
3772
	WREG32(CB_COLOR6_BASE, 0);
3621
	WREG32(CB_COLOR7_BASE, 0);
3773
	WREG32(CB_COLOR7_BASE, 0);
3622
	WREG32(CB_COLOR8_BASE, 0);
3774
	WREG32(CB_COLOR8_BASE, 0);
3623
	WREG32(CB_COLOR9_BASE, 0);
3775
	WREG32(CB_COLOR9_BASE, 0);
3624
	WREG32(CB_COLOR10_BASE, 0);
3776
	WREG32(CB_COLOR10_BASE, 0);
3625
	WREG32(CB_COLOR11_BASE, 0);
3777
	WREG32(CB_COLOR11_BASE, 0);
3626
 
3778
 
3627
	/* set the shader const cache sizes to 0 */
3779
	/* set the shader const cache sizes to 0 */
3628
	for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3780
	for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3629
		WREG32(i, 0);
3781
		WREG32(i, 0);
3630
	for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3782
	for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3631
		WREG32(i, 0);
3783
		WREG32(i, 0);
3632
 
3784
 
3633
	tmp = RREG32(HDP_MISC_CNTL);
3785
	tmp = RREG32(HDP_MISC_CNTL);
3634
	tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3786
	tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3635
	WREG32(HDP_MISC_CNTL, tmp);
3787
	WREG32(HDP_MISC_CNTL, tmp);
3636
 
3788
 
3637
	hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3789
	hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3638
	WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3790
	WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3639
 
3791
 
3640
	WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3792
	WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3641
 
3793
 
3642
	udelay(50);
3794
	udelay(50);
3643
 
3795
 
3644
}
3796
}
3645
 
3797
 
3646
int evergreen_mc_init(struct radeon_device *rdev)
3798
int evergreen_mc_init(struct radeon_device *rdev)
3647
{
3799
{
3648
	u32 tmp;
3800
	u32 tmp;
3649
	int chansize, numchan;
3801
	int chansize, numchan;
3650
 
3802
 
3651
	/* Get VRAM informations */
3803
	/* Get VRAM informations */
3652
	rdev->mc.vram_is_ddr = true;
3804
	rdev->mc.vram_is_ddr = true;
3653
	if ((rdev->family == CHIP_PALM) ||
3805
	if ((rdev->family == CHIP_PALM) ||
3654
	    (rdev->family == CHIP_SUMO) ||
3806
	    (rdev->family == CHIP_SUMO) ||
3655
	    (rdev->family == CHIP_SUMO2))
3807
	    (rdev->family == CHIP_SUMO2))
3656
		tmp = RREG32(FUS_MC_ARB_RAMCFG);
3808
		tmp = RREG32(FUS_MC_ARB_RAMCFG);
3657
	else
3809
	else
3658
		tmp = RREG32(MC_ARB_RAMCFG);
3810
		tmp = RREG32(MC_ARB_RAMCFG);
3659
	if (tmp & CHANSIZE_OVERRIDE) {
3811
	if (tmp & CHANSIZE_OVERRIDE) {
3660
		chansize = 16;
3812
		chansize = 16;
3661
	} else if (tmp & CHANSIZE_MASK) {
3813
	} else if (tmp & CHANSIZE_MASK) {
3662
		chansize = 64;
3814
		chansize = 64;
3663
	} else {
3815
	} else {
3664
		chansize = 32;
3816
		chansize = 32;
3665
	}
3817
	}
3666
	tmp = RREG32(MC_SHARED_CHMAP);
3818
	tmp = RREG32(MC_SHARED_CHMAP);
3667
	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3819
	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3668
	case 0:
3820
	case 0:
3669
	default:
3821
	default:
3670
		numchan = 1;
3822
		numchan = 1;
3671
		break;
3823
		break;
3672
	case 1:
3824
	case 1:
3673
		numchan = 2;
3825
		numchan = 2;
3674
		break;
3826
		break;
3675
	case 2:
3827
	case 2:
3676
		numchan = 4;
3828
		numchan = 4;
3677
		break;
3829
		break;
3678
	case 3:
3830
	case 3:
3679
		numchan = 8;
3831
		numchan = 8;
3680
		break;
3832
		break;
3681
	}
3833
	}
3682
	rdev->mc.vram_width = numchan * chansize;
3834
	rdev->mc.vram_width = numchan * chansize;
3683
	/* Could aper size report 0 ? */
3835
	/* Could aper size report 0 ? */
3684
	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3836
	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3685
	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3837
	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3686
	/* Setup GPU memory space */
3838
	/* Setup GPU memory space */
3687
	if ((rdev->family == CHIP_PALM) ||
3839
	if ((rdev->family == CHIP_PALM) ||
3688
	    (rdev->family == CHIP_SUMO) ||
3840
	    (rdev->family == CHIP_SUMO) ||
3689
	    (rdev->family == CHIP_SUMO2)) {
3841
	    (rdev->family == CHIP_SUMO2)) {
3690
		/* size in bytes on fusion */
3842
		/* size in bytes on fusion */
3691
		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3843
		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3692
		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3844
		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3693
	} else {
3845
	} else {
3694
		/* size in MB on evergreen/cayman/tn */
3846
		/* size in MB on evergreen/cayman/tn */
3695
		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3847
		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3696
		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3848
		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3697
	}
3849
	}
3698
	rdev->mc.visible_vram_size = rdev->mc.aper_size;
3850
	rdev->mc.visible_vram_size = rdev->mc.aper_size;
3699
	r700_vram_gtt_location(rdev, &rdev->mc);
3851
	r700_vram_gtt_location(rdev, &rdev->mc);
3700
	radeon_update_bandwidth_info(rdev);
3852
	radeon_update_bandwidth_info(rdev);
3701
 
3853
 
3702
	return 0;
3854
	return 0;
3703
}
3855
}
3704
 
3856
 
3705
void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3857
void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3706
{
3858
{
3707
	dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3859
	dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3708
		RREG32(GRBM_STATUS));
3860
		RREG32(GRBM_STATUS));
3709
	dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3861
	dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3710
		RREG32(GRBM_STATUS_SE0));
3862
		RREG32(GRBM_STATUS_SE0));
3711
	dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3863
	dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3712
		RREG32(GRBM_STATUS_SE1));
3864
		RREG32(GRBM_STATUS_SE1));
3713
	dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3865
	dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3714
		RREG32(SRBM_STATUS));
3866
		RREG32(SRBM_STATUS));
3715
	dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3867
	dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3716
		RREG32(SRBM_STATUS2));
3868
		RREG32(SRBM_STATUS2));
3717
	dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3869
	dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3718
		RREG32(CP_STALLED_STAT1));
3870
		RREG32(CP_STALLED_STAT1));
3719
	dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3871
	dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3720
		RREG32(CP_STALLED_STAT2));
3872
		RREG32(CP_STALLED_STAT2));
3721
	dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3873
	dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3722
		RREG32(CP_BUSY_STAT));
3874
		RREG32(CP_BUSY_STAT));
3723
	dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3875
	dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3724
		RREG32(CP_STAT));
3876
		RREG32(CP_STAT));
3725
	dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3877
	dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3726
		RREG32(DMA_STATUS_REG));
3878
		RREG32(DMA_STATUS_REG));
3727
	if (rdev->family >= CHIP_CAYMAN) {
3879
	if (rdev->family >= CHIP_CAYMAN) {
3728
		dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3880
		dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3729
			 RREG32(DMA_STATUS_REG + 0x800));
3881
			 RREG32(DMA_STATUS_REG + 0x800));
3730
	}
3882
	}
3731
}
3883
}
3732
 
3884
 
3733
bool evergreen_is_display_hung(struct radeon_device *rdev)
3885
bool evergreen_is_display_hung(struct radeon_device *rdev)
3734
{
3886
{
3735
	u32 crtc_hung = 0;
3887
	u32 crtc_hung = 0;
3736
	u32 crtc_status[6];
3888
	u32 crtc_status[6];
3737
	u32 i, j, tmp;
3889
	u32 i, j, tmp;
3738
 
3890
 
3739
	for (i = 0; i < rdev->num_crtc; i++) {
3891
	for (i = 0; i < rdev->num_crtc; i++) {
3740
		if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3892
		if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3741
			crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3893
			crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3742
			crtc_hung |= (1 << i);
3894
			crtc_hung |= (1 << i);
3743
		}
3895
		}
3744
	}
3896
	}
3745
 
3897
 
3746
	for (j = 0; j < 10; j++) {
3898
	for (j = 0; j < 10; j++) {
3747
		for (i = 0; i < rdev->num_crtc; i++) {
3899
		for (i = 0; i < rdev->num_crtc; i++) {
3748
			if (crtc_hung & (1 << i)) {
3900
			if (crtc_hung & (1 << i)) {
3749
				tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3901
				tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3750
				if (tmp != crtc_status[i])
3902
				if (tmp != crtc_status[i])
3751
					crtc_hung &= ~(1 << i);
3903
					crtc_hung &= ~(1 << i);
3752
			}
3904
			}
3753
		}
3905
		}
3754
		if (crtc_hung == 0)
3906
		if (crtc_hung == 0)
3755
			return false;
3907
			return false;
3756
		udelay(100);
3908
		udelay(100);
3757
	}
3909
	}
3758
 
3910
 
3759
	return true;
3911
	return true;
3760
}
3912
}
3761
 
3913
 
3762
u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3914
u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3763
{
3915
{
3764
	u32 reset_mask = 0;
3916
	u32 reset_mask = 0;
3765
	u32 tmp;
3917
	u32 tmp;
3766
 
3918
 
3767
	/* GRBM_STATUS */
3919
	/* GRBM_STATUS */
3768
	tmp = RREG32(GRBM_STATUS);
3920
	tmp = RREG32(GRBM_STATUS);
3769
	if (tmp & (PA_BUSY | SC_BUSY |
3921
	if (tmp & (PA_BUSY | SC_BUSY |
3770
		   SH_BUSY | SX_BUSY |
3922
		   SH_BUSY | SX_BUSY |
3771
		   TA_BUSY | VGT_BUSY |
3923
		   TA_BUSY | VGT_BUSY |
3772
		   DB_BUSY | CB_BUSY |
3924
		   DB_BUSY | CB_BUSY |
3773
		   SPI_BUSY | VGT_BUSY_NO_DMA))
3925
		   SPI_BUSY | VGT_BUSY_NO_DMA))
3774
		reset_mask |= RADEON_RESET_GFX;
3926
		reset_mask |= RADEON_RESET_GFX;
3775
 
3927
 
3776
	if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3928
	if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3777
		   CP_BUSY | CP_COHERENCY_BUSY))
3929
		   CP_BUSY | CP_COHERENCY_BUSY))
3778
		reset_mask |= RADEON_RESET_CP;
3930
		reset_mask |= RADEON_RESET_CP;
3779
 
3931
 
3780
	if (tmp & GRBM_EE_BUSY)
3932
	if (tmp & GRBM_EE_BUSY)
3781
		reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3933
		reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3782
 
3934
 
3783
	/* DMA_STATUS_REG */
3935
	/* DMA_STATUS_REG */
3784
	tmp = RREG32(DMA_STATUS_REG);
3936
	tmp = RREG32(DMA_STATUS_REG);
3785
	if (!(tmp & DMA_IDLE))
3937
	if (!(tmp & DMA_IDLE))
3786
		reset_mask |= RADEON_RESET_DMA;
3938
		reset_mask |= RADEON_RESET_DMA;
3787
 
3939
 
3788
	/* SRBM_STATUS2 */
3940
	/* SRBM_STATUS2 */
3789
	tmp = RREG32(SRBM_STATUS2);
3941
	tmp = RREG32(SRBM_STATUS2);
3790
	if (tmp & DMA_BUSY)
3942
	if (tmp & DMA_BUSY)
3791
		reset_mask |= RADEON_RESET_DMA;
3943
		reset_mask |= RADEON_RESET_DMA;
3792
 
3944
 
3793
	/* SRBM_STATUS */
3945
	/* SRBM_STATUS */
3794
	tmp = RREG32(SRBM_STATUS);
3946
	tmp = RREG32(SRBM_STATUS);
3795
	if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3947
	if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3796
		reset_mask |= RADEON_RESET_RLC;
3948
		reset_mask |= RADEON_RESET_RLC;
3797
 
3949
 
3798
	if (tmp & IH_BUSY)
3950
	if (tmp & IH_BUSY)
3799
		reset_mask |= RADEON_RESET_IH;
3951
		reset_mask |= RADEON_RESET_IH;
3800
 
3952
 
3801
	if (tmp & SEM_BUSY)
3953
	if (tmp & SEM_BUSY)
3802
		reset_mask |= RADEON_RESET_SEM;
3954
		reset_mask |= RADEON_RESET_SEM;
3803
 
3955
 
3804
	if (tmp & GRBM_RQ_PENDING)
3956
	if (tmp & GRBM_RQ_PENDING)
3805
		reset_mask |= RADEON_RESET_GRBM;
3957
		reset_mask |= RADEON_RESET_GRBM;
3806
 
3958
 
3807
	if (tmp & VMC_BUSY)
3959
	if (tmp & VMC_BUSY)
3808
		reset_mask |= RADEON_RESET_VMC;
3960
		reset_mask |= RADEON_RESET_VMC;
3809
 
3961
 
3810
	if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3962
	if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3811
		   MCC_BUSY | MCD_BUSY))
3963
		   MCC_BUSY | MCD_BUSY))
3812
		reset_mask |= RADEON_RESET_MC;
3964
		reset_mask |= RADEON_RESET_MC;
3813
 
3965
 
3814
	if (evergreen_is_display_hung(rdev))
3966
	if (evergreen_is_display_hung(rdev))
3815
		reset_mask |= RADEON_RESET_DISPLAY;
3967
		reset_mask |= RADEON_RESET_DISPLAY;
3816
 
3968
 
3817
	/* VM_L2_STATUS */
3969
	/* VM_L2_STATUS */
3818
	tmp = RREG32(VM_L2_STATUS);
3970
	tmp = RREG32(VM_L2_STATUS);
3819
	if (tmp & L2_BUSY)
3971
	if (tmp & L2_BUSY)
3820
		reset_mask |= RADEON_RESET_VMC;
3972
		reset_mask |= RADEON_RESET_VMC;
3821
 
3973
 
3822
	/* Skip MC reset as it's mostly likely not hung, just busy */
3974
	/* Skip MC reset as it's mostly likely not hung, just busy */
3823
	if (reset_mask & RADEON_RESET_MC) {
3975
	if (reset_mask & RADEON_RESET_MC) {
3824
		DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3976
		DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3825
		reset_mask &= ~RADEON_RESET_MC;
3977
		reset_mask &= ~RADEON_RESET_MC;
3826
	}
3978
	}
3827
 
3979
 
3828
	return reset_mask;
3980
	return reset_mask;
3829
}
3981
}
3830
 
3982
 
3831
static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3983
static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3832
{
3984
{
3833
	struct evergreen_mc_save save;
3985
	struct evergreen_mc_save save;
3834
	u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3986
	u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3835
	u32 tmp;
3987
	u32 tmp;
3836
 
3988
 
3837
	if (reset_mask == 0)
3989
	if (reset_mask == 0)
3838
		return;
3990
		return;
3839
 
3991
 
3840
	dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3992
	dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3841
 
3993
 
3842
	evergreen_print_gpu_status_regs(rdev);
3994
	evergreen_print_gpu_status_regs(rdev);
3843
 
3995
 
3844
	/* Disable CP parsing/prefetching */
3996
	/* Disable CP parsing/prefetching */
3845
	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3997
	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3846
 
3998
 
3847
	if (reset_mask & RADEON_RESET_DMA) {
3999
	if (reset_mask & RADEON_RESET_DMA) {
3848
		/* Disable DMA */
4000
		/* Disable DMA */
3849
		tmp = RREG32(DMA_RB_CNTL);
4001
		tmp = RREG32(DMA_RB_CNTL);
3850
		tmp &= ~DMA_RB_ENABLE;
4002
		tmp &= ~DMA_RB_ENABLE;
3851
		WREG32(DMA_RB_CNTL, tmp);
4003
		WREG32(DMA_RB_CNTL, tmp);
3852
	}
4004
	}
3853
 
4005
 
3854
	udelay(50);
4006
	udelay(50);
3855
 
4007
 
3856
	evergreen_mc_stop(rdev, &save);
4008
	evergreen_mc_stop(rdev, &save);
3857
	if (evergreen_mc_wait_for_idle(rdev)) {
4009
	if (evergreen_mc_wait_for_idle(rdev)) {
3858
		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
4010
		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3859
	}
4011
	}
3860
 
4012
 
3861
	if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
4013
	if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3862
		grbm_soft_reset |= SOFT_RESET_DB |
4014
		grbm_soft_reset |= SOFT_RESET_DB |
3863
			SOFT_RESET_CB |
4015
			SOFT_RESET_CB |
3864
			SOFT_RESET_PA |
4016
			SOFT_RESET_PA |
3865
			SOFT_RESET_SC |
4017
			SOFT_RESET_SC |
3866
			SOFT_RESET_SPI |
4018
			SOFT_RESET_SPI |
3867
			SOFT_RESET_SX |
4019
			SOFT_RESET_SX |
3868
			SOFT_RESET_SH |
4020
			SOFT_RESET_SH |
3869
			SOFT_RESET_TC |
4021
			SOFT_RESET_TC |
3870
			SOFT_RESET_TA |
4022
			SOFT_RESET_TA |
3871
			SOFT_RESET_VC |
4023
			SOFT_RESET_VC |
3872
			SOFT_RESET_VGT;
4024
			SOFT_RESET_VGT;
3873
	}
4025
	}
3874
 
4026
 
3875
	if (reset_mask & RADEON_RESET_CP) {
4027
	if (reset_mask & RADEON_RESET_CP) {
3876
		grbm_soft_reset |= SOFT_RESET_CP |
4028
		grbm_soft_reset |= SOFT_RESET_CP |
3877
			SOFT_RESET_VGT;
4029
			SOFT_RESET_VGT;
3878
 
4030
 
3879
		srbm_soft_reset |= SOFT_RESET_GRBM;
4031
		srbm_soft_reset |= SOFT_RESET_GRBM;
3880
	}
4032
	}
3881
 
4033
 
3882
	if (reset_mask & RADEON_RESET_DMA)
4034
	if (reset_mask & RADEON_RESET_DMA)
3883
		srbm_soft_reset |= SOFT_RESET_DMA;
4035
		srbm_soft_reset |= SOFT_RESET_DMA;
3884
 
4036
 
3885
	if (reset_mask & RADEON_RESET_DISPLAY)
4037
	if (reset_mask & RADEON_RESET_DISPLAY)
3886
		srbm_soft_reset |= SOFT_RESET_DC;
4038
		srbm_soft_reset |= SOFT_RESET_DC;
3887
 
4039
 
3888
	if (reset_mask & RADEON_RESET_RLC)
4040
	if (reset_mask & RADEON_RESET_RLC)
3889
		srbm_soft_reset |= SOFT_RESET_RLC;
4041
		srbm_soft_reset |= SOFT_RESET_RLC;
3890
 
4042
 
3891
	if (reset_mask & RADEON_RESET_SEM)
4043
	if (reset_mask & RADEON_RESET_SEM)
3892
		srbm_soft_reset |= SOFT_RESET_SEM;
4044
		srbm_soft_reset |= SOFT_RESET_SEM;
3893
 
4045
 
3894
	if (reset_mask & RADEON_RESET_IH)
4046
	if (reset_mask & RADEON_RESET_IH)
3895
		srbm_soft_reset |= SOFT_RESET_IH;
4047
		srbm_soft_reset |= SOFT_RESET_IH;
3896
 
4048
 
3897
	if (reset_mask & RADEON_RESET_GRBM)
4049
	if (reset_mask & RADEON_RESET_GRBM)
3898
		srbm_soft_reset |= SOFT_RESET_GRBM;
4050
		srbm_soft_reset |= SOFT_RESET_GRBM;
3899
 
4051
 
3900
	if (reset_mask & RADEON_RESET_VMC)
4052
	if (reset_mask & RADEON_RESET_VMC)
3901
		srbm_soft_reset |= SOFT_RESET_VMC;
4053
		srbm_soft_reset |= SOFT_RESET_VMC;
3902
 
4054
 
3903
	if (!(rdev->flags & RADEON_IS_IGP)) {
4055
	if (!(rdev->flags & RADEON_IS_IGP)) {
3904
		if (reset_mask & RADEON_RESET_MC)
4056
		if (reset_mask & RADEON_RESET_MC)
3905
			srbm_soft_reset |= SOFT_RESET_MC;
4057
			srbm_soft_reset |= SOFT_RESET_MC;
3906
	}
4058
	}
3907
 
4059
 
3908
	if (grbm_soft_reset) {
4060
	if (grbm_soft_reset) {
3909
		tmp = RREG32(GRBM_SOFT_RESET);
4061
		tmp = RREG32(GRBM_SOFT_RESET);
3910
		tmp |= grbm_soft_reset;
4062
		tmp |= grbm_soft_reset;
3911
		dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
4063
		dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3912
		WREG32(GRBM_SOFT_RESET, tmp);
4064
		WREG32(GRBM_SOFT_RESET, tmp);
3913
		tmp = RREG32(GRBM_SOFT_RESET);
4065
		tmp = RREG32(GRBM_SOFT_RESET);
3914
 
4066
 
3915
		udelay(50);
4067
		udelay(50);
3916
 
4068
 
3917
		tmp &= ~grbm_soft_reset;
4069
		tmp &= ~grbm_soft_reset;
3918
		WREG32(GRBM_SOFT_RESET, tmp);
4070
		WREG32(GRBM_SOFT_RESET, tmp);
3919
		tmp = RREG32(GRBM_SOFT_RESET);
4071
		tmp = RREG32(GRBM_SOFT_RESET);
3920
	}
4072
	}
3921
 
4073
 
3922
	if (srbm_soft_reset) {
4074
	if (srbm_soft_reset) {
3923
		tmp = RREG32(SRBM_SOFT_RESET);
4075
		tmp = RREG32(SRBM_SOFT_RESET);
3924
		tmp |= srbm_soft_reset;
4076
		tmp |= srbm_soft_reset;
3925
		dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
4077
		dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3926
		WREG32(SRBM_SOFT_RESET, tmp);
4078
		WREG32(SRBM_SOFT_RESET, tmp);
3927
		tmp = RREG32(SRBM_SOFT_RESET);
4079
		tmp = RREG32(SRBM_SOFT_RESET);
3928
 
4080
 
3929
		udelay(50);
4081
		udelay(50);
3930
 
4082
 
3931
		tmp &= ~srbm_soft_reset;
4083
		tmp &= ~srbm_soft_reset;
3932
		WREG32(SRBM_SOFT_RESET, tmp);
4084
		WREG32(SRBM_SOFT_RESET, tmp);
3933
		tmp = RREG32(SRBM_SOFT_RESET);
4085
		tmp = RREG32(SRBM_SOFT_RESET);
3934
	}
4086
	}
3935
 
4087
 
3936
	/* Wait a little for things to settle down */
4088
	/* Wait a little for things to settle down */
3937
	udelay(50);
4089
	udelay(50);
3938
 
4090
 
3939
	evergreen_mc_resume(rdev, &save);
4091
	evergreen_mc_resume(rdev, &save);
3940
	udelay(50);
4092
	udelay(50);
3941
 
4093
 
3942
	evergreen_print_gpu_status_regs(rdev);
4094
	evergreen_print_gpu_status_regs(rdev);
3943
}
4095
}
3944
 
4096
 
3945
void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
4097
void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
3946
{
4098
{
3947
	struct evergreen_mc_save save;
4099
	struct evergreen_mc_save save;
3948
	u32 tmp, i;
4100
	u32 tmp, i;
3949
 
4101
 
3950
	dev_info(rdev->dev, "GPU pci config reset\n");
4102
	dev_info(rdev->dev, "GPU pci config reset\n");
3951
 
4103
 
3952
	/* disable dpm? */
4104
	/* disable dpm? */
3953
 
4105
 
3954
	/* Disable CP parsing/prefetching */
4106
	/* Disable CP parsing/prefetching */
3955
	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
4107
	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3956
	udelay(50);
4108
	udelay(50);
3957
	/* Disable DMA */
4109
	/* Disable DMA */
3958
	tmp = RREG32(DMA_RB_CNTL);
4110
	tmp = RREG32(DMA_RB_CNTL);
3959
	tmp &= ~DMA_RB_ENABLE;
4111
	tmp &= ~DMA_RB_ENABLE;
3960
	WREG32(DMA_RB_CNTL, tmp);
4112
	WREG32(DMA_RB_CNTL, tmp);
3961
	/* XXX other engines? */
4113
	/* XXX other engines? */
3962
 
4114
 
3963
	/* halt the rlc */
4115
	/* halt the rlc */
3964
	r600_rlc_stop(rdev);
4116
	r600_rlc_stop(rdev);
3965
 
4117
 
3966
	udelay(50);
4118
	udelay(50);
3967
 
4119
 
3968
	/* set mclk/sclk to bypass */
4120
	/* set mclk/sclk to bypass */
3969
	rv770_set_clk_bypass_mode(rdev);
4121
	rv770_set_clk_bypass_mode(rdev);
3970
	/* disable BM */
4122
	/* disable BM */
3971
	pci_clear_master(rdev->pdev);
4123
	pci_clear_master(rdev->pdev);
3972
	/* disable mem access */
4124
	/* disable mem access */
3973
	evergreen_mc_stop(rdev, &save);
4125
	evergreen_mc_stop(rdev, &save);
3974
	if (evergreen_mc_wait_for_idle(rdev)) {
4126
	if (evergreen_mc_wait_for_idle(rdev)) {
3975
		dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
4127
		dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
3976
	}
4128
	}
3977
	/* reset */
4129
	/* reset */
3978
	radeon_pci_config_reset(rdev);
4130
	radeon_pci_config_reset(rdev);
3979
	/* wait for asic to come out of reset */
4131
	/* wait for asic to come out of reset */
3980
	for (i = 0; i < rdev->usec_timeout; i++) {
4132
	for (i = 0; i < rdev->usec_timeout; i++) {
3981
		if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
4133
		if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
3982
			break;
4134
			break;
3983
		udelay(1);
4135
		udelay(1);
3984
	}
4136
	}
3985
}
4137
}
3986
 
4138
 
3987
int evergreen_asic_reset(struct radeon_device *rdev)
4139
int evergreen_asic_reset(struct radeon_device *rdev)
3988
{
4140
{
3989
	u32 reset_mask;
4141
	u32 reset_mask;
3990
 
4142
 
3991
	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4143
	reset_mask = evergreen_gpu_check_soft_reset(rdev);
3992
 
4144
 
3993
	if (reset_mask)
4145
	if (reset_mask)
3994
		r600_set_bios_scratch_engine_hung(rdev, true);
4146
		r600_set_bios_scratch_engine_hung(rdev, true);
3995
 
4147
 
3996
	/* try soft reset */
4148
	/* try soft reset */
3997
	evergreen_gpu_soft_reset(rdev, reset_mask);
4149
	evergreen_gpu_soft_reset(rdev, reset_mask);
3998
 
4150
 
3999
	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4151
	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4000
 
4152
 
4001
	/* try pci config reset */
4153
	/* try pci config reset */
4002
	if (reset_mask && radeon_hard_reset)
4154
	if (reset_mask && radeon_hard_reset)
4003
		evergreen_gpu_pci_config_reset(rdev);
4155
		evergreen_gpu_pci_config_reset(rdev);
4004
 
4156
 
4005
	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4157
	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4006
 
4158
 
4007
	if (!reset_mask)
4159
	if (!reset_mask)
4008
		r600_set_bios_scratch_engine_hung(rdev, false);
4160
		r600_set_bios_scratch_engine_hung(rdev, false);
4009
 
4161
 
4010
	return 0;
4162
	return 0;
4011
}
4163
}
4012
 
4164
 
4013
/**
4165
/**
4014
 * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
4166
 * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
4015
 *
4167
 *
4016
 * @rdev: radeon_device pointer
4168
 * @rdev: radeon_device pointer
4017
 * @ring: radeon_ring structure holding ring information
4169
 * @ring: radeon_ring structure holding ring information
4018
 *
4170
 *
4019
 * Check if the GFX engine is locked up.
4171
 * Check if the GFX engine is locked up.
4020
 * Returns true if the engine appears to be locked up, false if not.
4172
 * Returns true if the engine appears to be locked up, false if not.
4021
 */
4173
 */
4022
bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
4174
bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
4023
{
4175
{
4024
	u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4176
	u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4025
 
4177
 
4026
	if (!(reset_mask & (RADEON_RESET_GFX |
4178
	if (!(reset_mask & (RADEON_RESET_GFX |
4027
			    RADEON_RESET_COMPUTE |
4179
			    RADEON_RESET_COMPUTE |
4028
			    RADEON_RESET_CP))) {
4180
			    RADEON_RESET_CP))) {
4029
		radeon_ring_lockup_update(rdev, ring);
4181
		radeon_ring_lockup_update(rdev, ring);
4030
		return false;
4182
		return false;
4031
	}
4183
	}
4032
	return radeon_ring_test_lockup(rdev, ring);
4184
	return radeon_ring_test_lockup(rdev, ring);
4033
}
4185
}
4034
 
4186
 
4035
/*
4187
/*
4036
 * RLC
4188
 * RLC
4037
 */
4189
 */
4038
#define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
4190
#define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
4039
#define RLC_CLEAR_STATE_END_MARKER          0x00000001
4191
#define RLC_CLEAR_STATE_END_MARKER          0x00000001
4040
 
4192
 
4041
void sumo_rlc_fini(struct radeon_device *rdev)
4193
void sumo_rlc_fini(struct radeon_device *rdev)
4042
{
4194
{
4043
	int r;
4195
	int r;
4044
 
4196
 
4045
	/* save restore block */
4197
	/* save restore block */
4046
	if (rdev->rlc.save_restore_obj) {
4198
	if (rdev->rlc.save_restore_obj) {
4047
		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4199
		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4048
		if (unlikely(r != 0))
4200
		if (unlikely(r != 0))
4049
			dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
4201
			dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
4050
		radeon_bo_unpin(rdev->rlc.save_restore_obj);
4202
		radeon_bo_unpin(rdev->rlc.save_restore_obj);
4051
		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4203
		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4052
 
4204
 
4053
		radeon_bo_unref(&rdev->rlc.save_restore_obj);
4205
		radeon_bo_unref(&rdev->rlc.save_restore_obj);
4054
		rdev->rlc.save_restore_obj = NULL;
4206
		rdev->rlc.save_restore_obj = NULL;
4055
	}
4207
	}
4056
 
4208
 
4057
	/* clear state block */
4209
	/* clear state block */
4058
	if (rdev->rlc.clear_state_obj) {
4210
	if (rdev->rlc.clear_state_obj) {
4059
		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4211
		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4060
		if (unlikely(r != 0))
4212
		if (unlikely(r != 0))
4061
			dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
4213
			dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
4062
		radeon_bo_unpin(rdev->rlc.clear_state_obj);
4214
		radeon_bo_unpin(rdev->rlc.clear_state_obj);
4063
		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4215
		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4064
 
4216
 
4065
		radeon_bo_unref(&rdev->rlc.clear_state_obj);
4217
		radeon_bo_unref(&rdev->rlc.clear_state_obj);
4066
		rdev->rlc.clear_state_obj = NULL;
4218
		rdev->rlc.clear_state_obj = NULL;
4067
	}
4219
	}
4068
 
4220
 
4069
	/* clear state block */
4221
	/* clear state block */
4070
	if (rdev->rlc.cp_table_obj) {
4222
	if (rdev->rlc.cp_table_obj) {
4071
		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4223
		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4072
		if (unlikely(r != 0))
4224
		if (unlikely(r != 0))
4073
			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4225
			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4074
		radeon_bo_unpin(rdev->rlc.cp_table_obj);
4226
		radeon_bo_unpin(rdev->rlc.cp_table_obj);
4075
		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4227
		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4076
 
4228
 
4077
		radeon_bo_unref(&rdev->rlc.cp_table_obj);
4229
		radeon_bo_unref(&rdev->rlc.cp_table_obj);
4078
		rdev->rlc.cp_table_obj = NULL;
4230
		rdev->rlc.cp_table_obj = NULL;
4079
	}
4231
	}
4080
}
4232
}
4081
 
4233
 
4082
#define CP_ME_TABLE_SIZE    96
4234
#define CP_ME_TABLE_SIZE    96
4083
 
4235
 
4084
int sumo_rlc_init(struct radeon_device *rdev)
4236
int sumo_rlc_init(struct radeon_device *rdev)
4085
{
4237
{
4086
	const u32 *src_ptr;
4238
	const u32 *src_ptr;
4087
	volatile u32 *dst_ptr;
4239
	volatile u32 *dst_ptr;
4088
	u32 dws, data, i, j, k, reg_num;
4240
	u32 dws, data, i, j, k, reg_num;
4089
	u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
4241
	u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
4090
	u64 reg_list_mc_addr;
4242
	u64 reg_list_mc_addr;
4091
	const struct cs_section_def *cs_data;
4243
	const struct cs_section_def *cs_data;
4092
	int r;
4244
	int r;
4093
 
4245
 
4094
	src_ptr = rdev->rlc.reg_list;
4246
	src_ptr = rdev->rlc.reg_list;
4095
	dws = rdev->rlc.reg_list_size;
4247
	dws = rdev->rlc.reg_list_size;
4096
	if (rdev->family >= CHIP_BONAIRE) {
4248
	if (rdev->family >= CHIP_BONAIRE) {
4097
		dws += (5 * 16) + 48 + 48 + 64;
4249
		dws += (5 * 16) + 48 + 48 + 64;
4098
	}
4250
	}
4099
	cs_data = rdev->rlc.cs_data;
4251
	cs_data = rdev->rlc.cs_data;
4100
 
4252
 
4101
	if (src_ptr) {
4253
	if (src_ptr) {
4102
		/* save restore block */
4254
		/* save restore block */
4103
		if (rdev->rlc.save_restore_obj == NULL) {
4255
		if (rdev->rlc.save_restore_obj == NULL) {
4104
			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4256
			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4105
					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4257
					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4106
					     NULL, &rdev->rlc.save_restore_obj);
4258
					     NULL, &rdev->rlc.save_restore_obj);
4107
			if (r) {
4259
			if (r) {
4108
				dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4260
				dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4109
				return r;
4261
				return r;
4110
			}
4262
			}
4111
		}
4263
		}
4112
 
4264
 
4113
		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4265
		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4114
		if (unlikely(r != 0)) {
4266
		if (unlikely(r != 0)) {
4115
			sumo_rlc_fini(rdev);
4267
			sumo_rlc_fini(rdev);
4116
			return r;
4268
			return r;
4117
		}
4269
		}
4118
		r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4270
		r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4119
				  &rdev->rlc.save_restore_gpu_addr);
4271
				  &rdev->rlc.save_restore_gpu_addr);
4120
		if (r) {
4272
		if (r) {
4121
			radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4273
			radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4122
			dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4274
			dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4123
			sumo_rlc_fini(rdev);
4275
			sumo_rlc_fini(rdev);
4124
			return r;
4276
			return r;
4125
		}
4277
		}
4126
 
4278
 
4127
		r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
4279
		r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
4128
		if (r) {
4280
		if (r) {
4129
			dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4281
			dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4130
			sumo_rlc_fini(rdev);
4282
			sumo_rlc_fini(rdev);
4131
			return r;
4283
			return r;
4132
		}
4284
		}
4133
		/* write the sr buffer */
4285
		/* write the sr buffer */
4134
		dst_ptr = rdev->rlc.sr_ptr;
4286
		dst_ptr = rdev->rlc.sr_ptr;
4135
		if (rdev->family >= CHIP_TAHITI) {
4287
		if (rdev->family >= CHIP_TAHITI) {
4136
			/* SI */
4288
			/* SI */
4137
			for (i = 0; i < rdev->rlc.reg_list_size; i++)
4289
			for (i = 0; i < rdev->rlc.reg_list_size; i++)
4138
				dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4290
				dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4139
		} else {
4291
		} else {
4140
			/* ON/LN/TN */
4292
			/* ON/LN/TN */
4141
			/* format:
4293
			/* format:
4142
			 * dw0: (reg2 << 16) | reg1
4294
			 * dw0: (reg2 << 16) | reg1
4143
			 * dw1: reg1 save space
4295
			 * dw1: reg1 save space
4144
			 * dw2: reg2 save space
4296
			 * dw2: reg2 save space
4145
			 */
4297
			 */
4146
			for (i = 0; i < dws; i++) {
4298
			for (i = 0; i < dws; i++) {
4147
				data = src_ptr[i] >> 2;
4299
				data = src_ptr[i] >> 2;
4148
				i++;
4300
				i++;
4149
				if (i < dws)
4301
				if (i < dws)
4150
					data |= (src_ptr[i] >> 2) << 16;
4302
					data |= (src_ptr[i] >> 2) << 16;
4151
				j = (((i - 1) * 3) / 2);
4303
				j = (((i - 1) * 3) / 2);
4152
				dst_ptr[j] = cpu_to_le32(data);
4304
				dst_ptr[j] = cpu_to_le32(data);
4153
			}
4305
			}
4154
			j = ((i * 3) / 2);
4306
			j = ((i * 3) / 2);
4155
			dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4307
			dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4156
		}
4308
		}
4157
		radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4309
		radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4158
		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4310
		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4159
	}
4311
	}
4160
 
4312
 
4161
	if (cs_data) {
4313
	if (cs_data) {
4162
		/* clear state block */
4314
		/* clear state block */
4163
		if (rdev->family >= CHIP_BONAIRE) {
4315
		if (rdev->family >= CHIP_BONAIRE) {
4164
			rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4316
			rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4165
		} else if (rdev->family >= CHIP_TAHITI) {
4317
		} else if (rdev->family >= CHIP_TAHITI) {
4166
			rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4318
			rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4167
			dws = rdev->rlc.clear_state_size + (256 / 4);
4319
			dws = rdev->rlc.clear_state_size + (256 / 4);
4168
		} else {
4320
		} else {
4169
			reg_list_num = 0;
4321
			reg_list_num = 0;
4170
			dws = 0;
4322
			dws = 0;
4171
			for (i = 0; cs_data[i].section != NULL; i++) {
4323
			for (i = 0; cs_data[i].section != NULL; i++) {
4172
				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4324
				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4173
					reg_list_num++;
4325
					reg_list_num++;
4174
					dws += cs_data[i].section[j].reg_count;
4326
					dws += cs_data[i].section[j].reg_count;
4175
				}
4327
				}
4176
			}
4328
			}
4177
			reg_list_blk_index = (3 * reg_list_num + 2);
4329
			reg_list_blk_index = (3 * reg_list_num + 2);
4178
			dws += reg_list_blk_index;
4330
			dws += reg_list_blk_index;
4179
			rdev->rlc.clear_state_size = dws;
4331
			rdev->rlc.clear_state_size = dws;
4180
		}
4332
		}
4181
 
4333
 
4182
		if (rdev->rlc.clear_state_obj == NULL) {
4334
		if (rdev->rlc.clear_state_obj == NULL) {
4183
			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4335
			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4184
					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4336
					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4185
					     NULL, &rdev->rlc.clear_state_obj);
4337
					     NULL, &rdev->rlc.clear_state_obj);
4186
			if (r) {
4338
			if (r) {
4187
				dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4339
				dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4188
				sumo_rlc_fini(rdev);
4340
				sumo_rlc_fini(rdev);
4189
				return r;
4341
				return r;
4190
			}
4342
			}
4191
		}
4343
		}
4192
		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4344
		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4193
		if (unlikely(r != 0)) {
4345
		if (unlikely(r != 0)) {
4194
			sumo_rlc_fini(rdev);
4346
			sumo_rlc_fini(rdev);
4195
			return r;
4347
			return r;
4196
		}
4348
		}
4197
		r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4349
		r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4198
				  &rdev->rlc.clear_state_gpu_addr);
4350
				  &rdev->rlc.clear_state_gpu_addr);
4199
		if (r) {
4351
		if (r) {
4200
			radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4352
			radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4201
			dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4353
			dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4202
			sumo_rlc_fini(rdev);
4354
			sumo_rlc_fini(rdev);
4203
			return r;
4355
			return r;
4204
		}
4356
		}
4205
 
4357
 
4206
		r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4358
		r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4207
		if (r) {
4359
		if (r) {
4208
			dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4360
			dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4209
			sumo_rlc_fini(rdev);
4361
			sumo_rlc_fini(rdev);
4210
			return r;
4362
			return r;
4211
		}
4363
		}
4212
		/* set up the cs buffer */
4364
		/* set up the cs buffer */
4213
		dst_ptr = rdev->rlc.cs_ptr;
4365
		dst_ptr = rdev->rlc.cs_ptr;
4214
		if (rdev->family >= CHIP_BONAIRE) {
4366
		if (rdev->family >= CHIP_BONAIRE) {
4215
			cik_get_csb_buffer(rdev, dst_ptr);
4367
			cik_get_csb_buffer(rdev, dst_ptr);
4216
		} else if (rdev->family >= CHIP_TAHITI) {
4368
		} else if (rdev->family >= CHIP_TAHITI) {
4217
			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4369
			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4218
			dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4370
			dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4219
			dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4371
			dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4220
			dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4372
			dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4221
			si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4373
			si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4222
		} else {
4374
		} else {
4223
			reg_list_hdr_blk_index = 0;
4375
			reg_list_hdr_blk_index = 0;
4224
			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4376
			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4225
			data = upper_32_bits(reg_list_mc_addr);
4377
			data = upper_32_bits(reg_list_mc_addr);
4226
			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4378
			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4227
			reg_list_hdr_blk_index++;
4379
			reg_list_hdr_blk_index++;
4228
			for (i = 0; cs_data[i].section != NULL; i++) {
4380
			for (i = 0; cs_data[i].section != NULL; i++) {
4229
				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4381
				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4230
					reg_num = cs_data[i].section[j].reg_count;
4382
					reg_num = cs_data[i].section[j].reg_count;
4231
					data = reg_list_mc_addr & 0xffffffff;
4383
					data = reg_list_mc_addr & 0xffffffff;
4232
					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4384
					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4233
					reg_list_hdr_blk_index++;
4385
					reg_list_hdr_blk_index++;
4234
 
4386
 
4235
					data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4387
					data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4236
					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4388
					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4237
					reg_list_hdr_blk_index++;
4389
					reg_list_hdr_blk_index++;
4238
 
4390
 
4239
					data = 0x08000000 | (reg_num * 4);
4391
					data = 0x08000000 | (reg_num * 4);
4240
					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4392
					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4241
					reg_list_hdr_blk_index++;
4393
					reg_list_hdr_blk_index++;
4242
 
4394
 
4243
					for (k = 0; k < reg_num; k++) {
4395
					for (k = 0; k < reg_num; k++) {
4244
						data = cs_data[i].section[j].extent[k];
4396
						data = cs_data[i].section[j].extent[k];
4245
						dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4397
						dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4246
					}
4398
					}
4247
					reg_list_mc_addr += reg_num * 4;
4399
					reg_list_mc_addr += reg_num * 4;
4248
					reg_list_blk_index += reg_num;
4400
					reg_list_blk_index += reg_num;
4249
				}
4401
				}
4250
			}
4402
			}
4251
			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4403
			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4252
		}
4404
		}
4253
		radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4405
		radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4254
		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4406
		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4255
	}
4407
	}
4256
 
4408
 
4257
	if (rdev->rlc.cp_table_size) {
4409
	if (rdev->rlc.cp_table_size) {
4258
		if (rdev->rlc.cp_table_obj == NULL) {
4410
		if (rdev->rlc.cp_table_obj == NULL) {
4259
			r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
4411
			r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
4260
					     PAGE_SIZE, true,
4412
					     PAGE_SIZE, true,
4261
					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4413
					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4262
					     NULL, &rdev->rlc.cp_table_obj);
4414
					     NULL, &rdev->rlc.cp_table_obj);
4263
			if (r) {
4415
			if (r) {
4264
				dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4416
				dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4265
				sumo_rlc_fini(rdev);
4417
				sumo_rlc_fini(rdev);
4266
				return r;
4418
				return r;
4267
			}
4419
			}
4268
		}
4420
		}
4269
 
4421
 
4270
		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4422
		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4271
		if (unlikely(r != 0)) {
4423
		if (unlikely(r != 0)) {
4272
			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4424
			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4273
			sumo_rlc_fini(rdev);
4425
			sumo_rlc_fini(rdev);
4274
			return r;
4426
			return r;
4275
		}
4427
		}
4276
		r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4428
		r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4277
				  &rdev->rlc.cp_table_gpu_addr);
4429
				  &rdev->rlc.cp_table_gpu_addr);
4278
		if (r) {
4430
		if (r) {
4279
			radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4431
			radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4280
			dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4432
			dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4281
			sumo_rlc_fini(rdev);
4433
			sumo_rlc_fini(rdev);
4282
			return r;
4434
			return r;
4283
		}
4435
		}
4284
		r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4436
		r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4285
		if (r) {
4437
		if (r) {
4286
			dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4438
			dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4287
			sumo_rlc_fini(rdev);
4439
			sumo_rlc_fini(rdev);
4288
			return r;
4440
			return r;
4289
		}
4441
		}
4290
 
4442
 
4291
		cik_init_cp_pg_table(rdev);
4443
		cik_init_cp_pg_table(rdev);
4292
 
4444
 
4293
		radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4445
		radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4294
		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4446
		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4295
 
4447
 
4296
	}
4448
	}
4297
 
4449
 
4298
	return 0;
4450
	return 0;
4299
}
4451
}
4300
 
4452
 
4301
static void evergreen_rlc_start(struct radeon_device *rdev)
4453
static void evergreen_rlc_start(struct radeon_device *rdev)
4302
{
4454
{
4303
	u32 mask = RLC_ENABLE;
4455
	u32 mask = RLC_ENABLE;
4304
 
4456
 
4305
	if (rdev->flags & RADEON_IS_IGP) {
4457
	if (rdev->flags & RADEON_IS_IGP) {
4306
		mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4458
		mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4307
	}
4459
	}
4308
 
4460
 
4309
	WREG32(RLC_CNTL, mask);
4461
	WREG32(RLC_CNTL, mask);
4310
}
4462
}
4311
 
4463
 
4312
int evergreen_rlc_resume(struct radeon_device *rdev)
4464
int evergreen_rlc_resume(struct radeon_device *rdev)
4313
{
4465
{
4314
	u32 i;
4466
	u32 i;
4315
	const __be32 *fw_data;
4467
	const __be32 *fw_data;
4316
 
4468
 
4317
	if (!rdev->rlc_fw)
4469
	if (!rdev->rlc_fw)
4318
		return -EINVAL;
4470
		return -EINVAL;
4319
 
4471
 
4320
	r600_rlc_stop(rdev);
4472
	r600_rlc_stop(rdev);
4321
 
4473
 
4322
	WREG32(RLC_HB_CNTL, 0);
4474
	WREG32(RLC_HB_CNTL, 0);
4323
 
4475
 
4324
	if (rdev->flags & RADEON_IS_IGP) {
4476
	if (rdev->flags & RADEON_IS_IGP) {
4325
		if (rdev->family == CHIP_ARUBA) {
4477
		if (rdev->family == CHIP_ARUBA) {
4326
			u32 always_on_bitmap =
4478
			u32 always_on_bitmap =
4327
				3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4479
				3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4328
			/* find out the number of active simds */
4480
			/* find out the number of active simds */
4329
			u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4481
			u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4330
			tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4482
			tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4331
			tmp = hweight32(~tmp);
4483
			tmp = hweight32(~tmp);
4332
			if (tmp == rdev->config.cayman.max_simds_per_se) {
4484
			if (tmp == rdev->config.cayman.max_simds_per_se) {
4333
				WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4485
				WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4334
				WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4486
				WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4335
				WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4487
				WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4336
				WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4488
				WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4337
				WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4489
				WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4338
			}
4490
			}
4339
		} else {
4491
		} else {
4340
			WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4492
			WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4341
			WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4493
			WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4342
		}
4494
		}
4343
		WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4495
		WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4344
		WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4496
		WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4345
	} else {
4497
	} else {
4346
		WREG32(RLC_HB_BASE, 0);
4498
		WREG32(RLC_HB_BASE, 0);
4347
		WREG32(RLC_HB_RPTR, 0);
4499
		WREG32(RLC_HB_RPTR, 0);
4348
		WREG32(RLC_HB_WPTR, 0);
4500
		WREG32(RLC_HB_WPTR, 0);
4349
		WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4501
		WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4350
		WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4502
		WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4351
	}
4503
	}
4352
	WREG32(RLC_MC_CNTL, 0);
4504
	WREG32(RLC_MC_CNTL, 0);
4353
	WREG32(RLC_UCODE_CNTL, 0);
4505
	WREG32(RLC_UCODE_CNTL, 0);
4354
 
4506
 
4355
	fw_data = (const __be32 *)rdev->rlc_fw->data;
4507
	fw_data = (const __be32 *)rdev->rlc_fw->data;
4356
	if (rdev->family >= CHIP_ARUBA) {
4508
	if (rdev->family >= CHIP_ARUBA) {
4357
		for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4509
		for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4358
			WREG32(RLC_UCODE_ADDR, i);
4510
			WREG32(RLC_UCODE_ADDR, i);
4359
			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4511
			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4360
		}
4512
		}
4361
	} else if (rdev->family >= CHIP_CAYMAN) {
4513
	} else if (rdev->family >= CHIP_CAYMAN) {
4362
		for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4514
		for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4363
			WREG32(RLC_UCODE_ADDR, i);
4515
			WREG32(RLC_UCODE_ADDR, i);
4364
			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4516
			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4365
		}
4517
		}
4366
	} else {
4518
	} else {
4367
		for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4519
		for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4368
			WREG32(RLC_UCODE_ADDR, i);
4520
			WREG32(RLC_UCODE_ADDR, i);
4369
			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4521
			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4370
		}
4522
		}
4371
	}
4523
	}
4372
	WREG32(RLC_UCODE_ADDR, 0);
4524
	WREG32(RLC_UCODE_ADDR, 0);
4373
 
4525
 
4374
	evergreen_rlc_start(rdev);
4526
	evergreen_rlc_start(rdev);
4375
 
4527
 
4376
	return 0;
4528
	return 0;
4377
}
4529
}
4378
 
4530
 
4379
/* Interrupts */
4531
/* Interrupts */
4380
 
4532
 
4381
u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4533
u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4382
{
4534
{
4383
	if (crtc >= rdev->num_crtc)
4535
	if (crtc >= rdev->num_crtc)
4384
		return 0;
4536
		return 0;
4385
	else
4537
	else
4386
		return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4538
		return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4387
}
4539
}
4388
 
4540
 
4389
void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4541
void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4390
{
4542
{
4391
	u32 tmp;
4543
	u32 tmp;
4392
 
4544
 
4393
	if (rdev->family >= CHIP_CAYMAN) {
4545
	if (rdev->family >= CHIP_CAYMAN) {
4394
		cayman_cp_int_cntl_setup(rdev, 0,
4546
		cayman_cp_int_cntl_setup(rdev, 0,
4395
					 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4547
					 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4396
		cayman_cp_int_cntl_setup(rdev, 1, 0);
4548
		cayman_cp_int_cntl_setup(rdev, 1, 0);
4397
		cayman_cp_int_cntl_setup(rdev, 2, 0);
4549
		cayman_cp_int_cntl_setup(rdev, 2, 0);
4398
		tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4550
		tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4399
		WREG32(CAYMAN_DMA1_CNTL, tmp);
4551
		WREG32(CAYMAN_DMA1_CNTL, tmp);
4400
	} else
4552
	} else
4401
		WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4553
		WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4402
	tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4554
	tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4403
	WREG32(DMA_CNTL, tmp);
4555
	WREG32(DMA_CNTL, tmp);
4404
	WREG32(GRBM_INT_CNTL, 0);
4556
	WREG32(GRBM_INT_CNTL, 0);
4405
	WREG32(SRBM_INT_CNTL, 0);
4557
	WREG32(SRBM_INT_CNTL, 0);
4406
	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4558
	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4407
	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4559
	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4408
	if (rdev->num_crtc >= 4) {
4560
	if (rdev->num_crtc >= 4) {
4409
		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4561
		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4410
		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4562
		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4411
	}
4563
	}
4412
	if (rdev->num_crtc >= 6) {
4564
	if (rdev->num_crtc >= 6) {
4413
		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4565
		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4414
		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4566
		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4415
	}
4567
	}
4416
 
4568
 
4417
	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4569
	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4418
	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4570
	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4419
	if (rdev->num_crtc >= 4) {
4571
	if (rdev->num_crtc >= 4) {
4420
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4572
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4421
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4573
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4422
	}
4574
	}
4423
	if (rdev->num_crtc >= 6) {
4575
	if (rdev->num_crtc >= 6) {
4424
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4576
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4425
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4577
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4426
	}
4578
	}
4427
 
4579
 
4428
	/* only one DAC on DCE5 */
4580
	/* only one DAC on DCE5 */
4429
	if (!ASIC_IS_DCE5(rdev))
4581
	if (!ASIC_IS_DCE5(rdev))
4430
		WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4582
		WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4431
	WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4583
	WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4432
 
4584
 
4433
	tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4585
	tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4434
	WREG32(DC_HPD1_INT_CONTROL, tmp);
4586
	WREG32(DC_HPD1_INT_CONTROL, tmp);
4435
	tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4587
	tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4436
	WREG32(DC_HPD2_INT_CONTROL, tmp);
4588
	WREG32(DC_HPD2_INT_CONTROL, tmp);
4437
	tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4589
	tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4438
	WREG32(DC_HPD3_INT_CONTROL, tmp);
4590
	WREG32(DC_HPD3_INT_CONTROL, tmp);
4439
	tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4591
	tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4440
	WREG32(DC_HPD4_INT_CONTROL, tmp);
4592
	WREG32(DC_HPD4_INT_CONTROL, tmp);
4441
	tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4593
	tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4442
	WREG32(DC_HPD5_INT_CONTROL, tmp);
4594
	WREG32(DC_HPD5_INT_CONTROL, tmp);
4443
	tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4595
	tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4444
	WREG32(DC_HPD6_INT_CONTROL, tmp);
4596
	WREG32(DC_HPD6_INT_CONTROL, tmp);
4445
 
4597
 
4446
}
4598
}
4447
 
4599
 
4448
int evergreen_irq_set(struct radeon_device *rdev)
4600
int evergreen_irq_set(struct radeon_device *rdev)
4449
{
4601
{
4450
	u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4602
	u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4451
	u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4603
	u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4452
	u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4604
	u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4453
	u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4605
	u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4454
	u32 grbm_int_cntl = 0;
4606
	u32 grbm_int_cntl = 0;
4455
	u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4607
	u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4456
	u32 dma_cntl, dma_cntl1 = 0;
4608
	u32 dma_cntl, dma_cntl1 = 0;
4457
	u32 thermal_int = 0;
4609
	u32 thermal_int = 0;
4458
 
4610
 
4459
	if (!rdev->irq.installed) {
4611
	if (!rdev->irq.installed) {
4460
		WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4612
		WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4461
		return -EINVAL;
4613
		return -EINVAL;
4462
	}
4614
	}
4463
	/* don't enable anything if the ih is disabled */
4615
	/* don't enable anything if the ih is disabled */
4464
	if (!rdev->ih.enabled) {
4616
	if (!rdev->ih.enabled) {
4465
		r600_disable_interrupts(rdev);
4617
		r600_disable_interrupts(rdev);
4466
		/* force the active interrupt state to all disabled */
4618
		/* force the active interrupt state to all disabled */
4467
		evergreen_disable_interrupt_state(rdev);
4619
		evergreen_disable_interrupt_state(rdev);
4468
		return 0;
4620
		return 0;
4469
	}
4621
	}
4470
 
4622
 
4471
	hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4623
	hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4472
	hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4624
	hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4473
	hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4625
	hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4474
	hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4626
	hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4475
	hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4627
	hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4476
	hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4628
	hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4477
	if (rdev->family == CHIP_ARUBA)
4629
	if (rdev->family == CHIP_ARUBA)
4478
		thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4630
		thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4479
			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4631
			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4480
	else
4632
	else
4481
		thermal_int = RREG32(CG_THERMAL_INT) &
4633
		thermal_int = RREG32(CG_THERMAL_INT) &
4482
			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4634
			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4483
 
4635
 
4484
	afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4636
	afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4485
	afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4637
	afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4486
	afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4638
	afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4487
	afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4639
	afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4488
	afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4640
	afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4489
	afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4641
	afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4490
 
4642
 
4491
	dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4643
	dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4492
 
4644
 
4493
	if (rdev->family >= CHIP_CAYMAN) {
4645
	if (rdev->family >= CHIP_CAYMAN) {
4494
		/* enable CP interrupts on all rings */
4646
		/* enable CP interrupts on all rings */
4495
		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4647
		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4496
			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4648
			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4497
			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4649
			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4498
		}
4650
		}
4499
		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4651
		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4500
			DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4652
			DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4501
			cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4653
			cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4502
		}
4654
		}
4503
		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4655
		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4504
			DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4656
			DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4505
			cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4657
			cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4506
		}
4658
		}
4507
	} else {
4659
	} else {
4508
		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4660
		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4509
			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4661
			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4510
			cp_int_cntl |= RB_INT_ENABLE;
4662
			cp_int_cntl |= RB_INT_ENABLE;
4511
			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4663
			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4512
		}
4664
		}
4513
	}
4665
	}
4514
 
4666
 
4515
	if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4667
	if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4516
		DRM_DEBUG("r600_irq_set: sw int dma\n");
4668
		DRM_DEBUG("r600_irq_set: sw int dma\n");
4517
		dma_cntl |= TRAP_ENABLE;
4669
		dma_cntl |= TRAP_ENABLE;
4518
	}
4670
	}
4519
 
4671
 
4520
	if (rdev->family >= CHIP_CAYMAN) {
4672
	if (rdev->family >= CHIP_CAYMAN) {
4521
		dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4673
		dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4522
		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4674
		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4523
			DRM_DEBUG("r600_irq_set: sw int dma1\n");
4675
			DRM_DEBUG("r600_irq_set: sw int dma1\n");
4524
			dma_cntl1 |= TRAP_ENABLE;
4676
			dma_cntl1 |= TRAP_ENABLE;
4525
		}
4677
		}
4526
	}
4678
	}
4527
 
4679
 
4528
	if (rdev->irq.dpm_thermal) {
4680
	if (rdev->irq.dpm_thermal) {
4529
		DRM_DEBUG("dpm thermal\n");
4681
		DRM_DEBUG("dpm thermal\n");
4530
		thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4682
		thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4531
	}
4683
	}
4532
 
4684
 
4533
	if (rdev->irq.crtc_vblank_int[0] ||
4685
	if (rdev->irq.crtc_vblank_int[0] ||
4534
	    atomic_read(&rdev->irq.pflip[0])) {
4686
	    atomic_read(&rdev->irq.pflip[0])) {
4535
		DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4687
		DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4536
		crtc1 |= VBLANK_INT_MASK;
4688
		crtc1 |= VBLANK_INT_MASK;
4537
	}
4689
	}
4538
	if (rdev->irq.crtc_vblank_int[1] ||
4690
	if (rdev->irq.crtc_vblank_int[1] ||
4539
	    atomic_read(&rdev->irq.pflip[1])) {
4691
	    atomic_read(&rdev->irq.pflip[1])) {
4540
		DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4692
		DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4541
		crtc2 |= VBLANK_INT_MASK;
4693
		crtc2 |= VBLANK_INT_MASK;
4542
	}
4694
	}
4543
	if (rdev->irq.crtc_vblank_int[2] ||
4695
	if (rdev->irq.crtc_vblank_int[2] ||
4544
	    atomic_read(&rdev->irq.pflip[2])) {
4696
	    atomic_read(&rdev->irq.pflip[2])) {
4545
		DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4697
		DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4546
		crtc3 |= VBLANK_INT_MASK;
4698
		crtc3 |= VBLANK_INT_MASK;
4547
	}
4699
	}
4548
	if (rdev->irq.crtc_vblank_int[3] ||
4700
	if (rdev->irq.crtc_vblank_int[3] ||
4549
	    atomic_read(&rdev->irq.pflip[3])) {
4701
	    atomic_read(&rdev->irq.pflip[3])) {
4550
		DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4702
		DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4551
		crtc4 |= VBLANK_INT_MASK;
4703
		crtc4 |= VBLANK_INT_MASK;
4552
	}
4704
	}
4553
	if (rdev->irq.crtc_vblank_int[4] ||
4705
	if (rdev->irq.crtc_vblank_int[4] ||
4554
	    atomic_read(&rdev->irq.pflip[4])) {
4706
	    atomic_read(&rdev->irq.pflip[4])) {
4555
		DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4707
		DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4556
		crtc5 |= VBLANK_INT_MASK;
4708
		crtc5 |= VBLANK_INT_MASK;
4557
	}
4709
	}
4558
	if (rdev->irq.crtc_vblank_int[5] ||
4710
	if (rdev->irq.crtc_vblank_int[5] ||
4559
	    atomic_read(&rdev->irq.pflip[5])) {
4711
	    atomic_read(&rdev->irq.pflip[5])) {
4560
		DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4712
		DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4561
		crtc6 |= VBLANK_INT_MASK;
4713
		crtc6 |= VBLANK_INT_MASK;
4562
	}
4714
	}
4563
	if (rdev->irq.hpd[0]) {
4715
	if (rdev->irq.hpd[0]) {
4564
		DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4716
		DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4565
		hpd1 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4717
		hpd1 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4566
	}
4718
	}
4567
	if (rdev->irq.hpd[1]) {
4719
	if (rdev->irq.hpd[1]) {
4568
		DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4720
		DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4569
		hpd2 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4721
		hpd2 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4570
	}
4722
	}
4571
	if (rdev->irq.hpd[2]) {
4723
	if (rdev->irq.hpd[2]) {
4572
		DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4724
		DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4573
		hpd3 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4725
		hpd3 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4574
	}
4726
	}
4575
	if (rdev->irq.hpd[3]) {
4727
	if (rdev->irq.hpd[3]) {
4576
		DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4728
		DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4577
		hpd4 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4729
		hpd4 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4578
	}
4730
	}
4579
	if (rdev->irq.hpd[4]) {
4731
	if (rdev->irq.hpd[4]) {
4580
		DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4732
		DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4581
		hpd5 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4733
		hpd5 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4582
	}
4734
	}
4583
	if (rdev->irq.hpd[5]) {
4735
	if (rdev->irq.hpd[5]) {
4584
		DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4736
		DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4585
		hpd6 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4737
		hpd6 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4586
	}
4738
	}
4587
	if (rdev->irq.afmt[0]) {
4739
	if (rdev->irq.afmt[0]) {
4588
		DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4740
		DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4589
		afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4741
		afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4590
	}
4742
	}
4591
	if (rdev->irq.afmt[1]) {
4743
	if (rdev->irq.afmt[1]) {
4592
		DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4744
		DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4593
		afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4745
		afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4594
	}
4746
	}
4595
	if (rdev->irq.afmt[2]) {
4747
	if (rdev->irq.afmt[2]) {
4596
		DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4748
		DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4597
		afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4749
		afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4598
	}
4750
	}
4599
	if (rdev->irq.afmt[3]) {
4751
	if (rdev->irq.afmt[3]) {
4600
		DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4752
		DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4601
		afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4753
		afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4602
	}
4754
	}
4603
	if (rdev->irq.afmt[4]) {
4755
	if (rdev->irq.afmt[4]) {
4604
		DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4756
		DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4605
		afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4757
		afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4606
	}
4758
	}
4607
	if (rdev->irq.afmt[5]) {
4759
	if (rdev->irq.afmt[5]) {
4608
		DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4760
		DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4609
		afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4761
		afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4610
	}
4762
	}
4611
 
4763
 
4612
	if (rdev->family >= CHIP_CAYMAN) {
4764
	if (rdev->family >= CHIP_CAYMAN) {
4613
		cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4765
		cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4614
		cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4766
		cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4615
		cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4767
		cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4616
	} else
4768
	} else
4617
		WREG32(CP_INT_CNTL, cp_int_cntl);
4769
		WREG32(CP_INT_CNTL, cp_int_cntl);
4618
 
4770
 
4619
	WREG32(DMA_CNTL, dma_cntl);
4771
	WREG32(DMA_CNTL, dma_cntl);
4620
 
4772
 
4621
	if (rdev->family >= CHIP_CAYMAN)
4773
	if (rdev->family >= CHIP_CAYMAN)
4622
		WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4774
		WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4623
 
4775
 
4624
	WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4776
	WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4625
 
4777
 
4626
	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4778
	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4627
	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4779
	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4628
	if (rdev->num_crtc >= 4) {
4780
	if (rdev->num_crtc >= 4) {
4629
		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4781
		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4630
		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4782
		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4631
	}
4783
	}
4632
	if (rdev->num_crtc >= 6) {
4784
	if (rdev->num_crtc >= 6) {
4633
		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4785
		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4634
		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4786
		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4635
	}
4787
	}
4636
 
4788
 
4637
	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
4789
	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
4638
	       GRPH_PFLIP_INT_MASK);
4790
	       GRPH_PFLIP_INT_MASK);
4639
	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
4791
	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
4640
	       GRPH_PFLIP_INT_MASK);
4792
	       GRPH_PFLIP_INT_MASK);
4641
	if (rdev->num_crtc >= 4) {
4793
	if (rdev->num_crtc >= 4) {
4642
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
4794
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
4643
		       GRPH_PFLIP_INT_MASK);
4795
		       GRPH_PFLIP_INT_MASK);
4644
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
4796
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
4645
		       GRPH_PFLIP_INT_MASK);
4797
		       GRPH_PFLIP_INT_MASK);
4646
	}
4798
	}
4647
	if (rdev->num_crtc >= 6) {
4799
	if (rdev->num_crtc >= 6) {
4648
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
4800
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
4649
		       GRPH_PFLIP_INT_MASK);
4801
		       GRPH_PFLIP_INT_MASK);
4650
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
4802
		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
4651
		       GRPH_PFLIP_INT_MASK);
4803
		       GRPH_PFLIP_INT_MASK);
4652
	}
4804
	}
4653
 
4805
 
4654
	WREG32(DC_HPD1_INT_CONTROL, hpd1);
4806
	WREG32(DC_HPD1_INT_CONTROL, hpd1);
4655
	WREG32(DC_HPD2_INT_CONTROL, hpd2);
4807
	WREG32(DC_HPD2_INT_CONTROL, hpd2);
4656
	WREG32(DC_HPD3_INT_CONTROL, hpd3);
4808
	WREG32(DC_HPD3_INT_CONTROL, hpd3);
4657
	WREG32(DC_HPD4_INT_CONTROL, hpd4);
4809
	WREG32(DC_HPD4_INT_CONTROL, hpd4);
4658
	WREG32(DC_HPD5_INT_CONTROL, hpd5);
4810
	WREG32(DC_HPD5_INT_CONTROL, hpd5);
4659
	WREG32(DC_HPD6_INT_CONTROL, hpd6);
4811
	WREG32(DC_HPD6_INT_CONTROL, hpd6);
4660
	if (rdev->family == CHIP_ARUBA)
4812
	if (rdev->family == CHIP_ARUBA)
4661
		WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4813
		WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4662
	else
4814
	else
4663
		WREG32(CG_THERMAL_INT, thermal_int);
4815
		WREG32(CG_THERMAL_INT, thermal_int);
4664
 
4816
 
4665
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4817
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4666
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4818
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4667
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4819
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4668
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4820
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4669
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4821
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4670
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4822
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4671
 
4823
 
4672
	/* posting read */
4824
	/* posting read */
4673
	RREG32(SRBM_STATUS);
4825
	RREG32(SRBM_STATUS);
4674
 
4826
 
4675
	return 0;
4827
	return 0;
4676
}
4828
}
4677
 
4829
 
4678
static void evergreen_irq_ack(struct radeon_device *rdev)
4830
static void evergreen_irq_ack(struct radeon_device *rdev)
4679
{
4831
{
4680
	u32 tmp;
4832
	u32 tmp;
4681
 
4833
 
4682
	rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4834
	rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4683
	rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4835
	rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4684
	rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4836
	rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4685
	rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4837
	rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4686
	rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4838
	rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4687
	rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4839
	rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4688
	rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4840
	rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4689
	rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4841
	rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4690
	if (rdev->num_crtc >= 4) {
4842
	if (rdev->num_crtc >= 4) {
4691
		rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4843
		rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4692
		rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4844
		rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4693
	}
4845
	}
4694
	if (rdev->num_crtc >= 6) {
4846
	if (rdev->num_crtc >= 6) {
4695
		rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4847
		rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4696
		rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4848
		rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4697
	}
4849
	}
4698
 
4850
 
4699
	rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4851
	rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4700
	rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4852
	rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4701
	rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4853
	rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4702
	rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4854
	rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4703
	rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4855
	rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4704
	rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4856
	rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4705
 
4857
 
4706
	if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4858
	if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4707
		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4859
		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4708
	if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4860
	if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4709
		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4861
		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4710
	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4862
	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4711
		WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4863
		WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4712
	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4864
	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4713
		WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4865
		WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4714
	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4866
	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4715
		WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4867
		WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4716
	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4868
	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4717
		WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4869
		WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4718
 
4870
 
4719
	if (rdev->num_crtc >= 4) {
4871
	if (rdev->num_crtc >= 4) {
4720
		if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4872
		if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4721
			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4873
			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4722
		if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4874
		if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4723
			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4875
			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4724
		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4876
		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4725
			WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4877
			WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4726
		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4878
		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4727
			WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4879
			WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4728
		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4880
		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4729
			WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4881
			WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4730
		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4882
		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4731
			WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4883
			WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4732
	}
4884
	}
4733
 
4885
 
4734
	if (rdev->num_crtc >= 6) {
4886
	if (rdev->num_crtc >= 6) {
4735
		if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4887
		if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4736
			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4888
			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4737
		if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4889
		if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4738
			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4890
			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4739
		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4891
		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4740
			WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4892
			WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4741
		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4893
		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4742
			WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4894
			WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4743
		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4895
		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4744
			WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4896
			WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4745
		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4897
		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4746
			WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4898
			WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4747
	}
4899
	}
4748
 
4900
 
4749
	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4901
	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4750
		tmp = RREG32(DC_HPD1_INT_CONTROL);
4902
		tmp = RREG32(DC_HPD1_INT_CONTROL);
4751
		tmp |= DC_HPDx_INT_ACK;
4903
		tmp |= DC_HPDx_INT_ACK;
4752
		WREG32(DC_HPD1_INT_CONTROL, tmp);
4904
		WREG32(DC_HPD1_INT_CONTROL, tmp);
4753
	}
4905
	}
4754
	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4906
	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4755
		tmp = RREG32(DC_HPD2_INT_CONTROL);
4907
		tmp = RREG32(DC_HPD2_INT_CONTROL);
4756
		tmp |= DC_HPDx_INT_ACK;
4908
		tmp |= DC_HPDx_INT_ACK;
4757
		WREG32(DC_HPD2_INT_CONTROL, tmp);
4909
		WREG32(DC_HPD2_INT_CONTROL, tmp);
4758
	}
4910
	}
4759
	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4911
	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4760
		tmp = RREG32(DC_HPD3_INT_CONTROL);
4912
		tmp = RREG32(DC_HPD3_INT_CONTROL);
4761
		tmp |= DC_HPDx_INT_ACK;
4913
		tmp |= DC_HPDx_INT_ACK;
4762
		WREG32(DC_HPD3_INT_CONTROL, tmp);
4914
		WREG32(DC_HPD3_INT_CONTROL, tmp);
4763
	}
4915
	}
4764
	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4916
	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4765
		tmp = RREG32(DC_HPD4_INT_CONTROL);
4917
		tmp = RREG32(DC_HPD4_INT_CONTROL);
4766
		tmp |= DC_HPDx_INT_ACK;
4918
		tmp |= DC_HPDx_INT_ACK;
4767
		WREG32(DC_HPD4_INT_CONTROL, tmp);
4919
		WREG32(DC_HPD4_INT_CONTROL, tmp);
4768
	}
4920
	}
4769
	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4921
	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4770
		tmp = RREG32(DC_HPD5_INT_CONTROL);
4922
		tmp = RREG32(DC_HPD5_INT_CONTROL);
4771
		tmp |= DC_HPDx_INT_ACK;
4923
		tmp |= DC_HPDx_INT_ACK;
4772
		WREG32(DC_HPD5_INT_CONTROL, tmp);
4924
		WREG32(DC_HPD5_INT_CONTROL, tmp);
4773
	}
4925
	}
4774
	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4926
	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4775
		tmp = RREG32(DC_HPD5_INT_CONTROL);
4927
		tmp = RREG32(DC_HPD5_INT_CONTROL);
4776
		tmp |= DC_HPDx_INT_ACK;
4928
		tmp |= DC_HPDx_INT_ACK;
4777
		WREG32(DC_HPD6_INT_CONTROL, tmp);
4929
		WREG32(DC_HPD6_INT_CONTROL, tmp);
4778
	}
4930
	}
4779
 
4931
 
4780
	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT) {
4932
	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT) {
4781
		tmp = RREG32(DC_HPD1_INT_CONTROL);
4933
		tmp = RREG32(DC_HPD1_INT_CONTROL);
4782
		tmp |= DC_HPDx_RX_INT_ACK;
4934
		tmp |= DC_HPDx_RX_INT_ACK;
4783
		WREG32(DC_HPD1_INT_CONTROL, tmp);
4935
		WREG32(DC_HPD1_INT_CONTROL, tmp);
4784
	}
4936
	}
4785
	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT) {
4937
	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT) {
4786
		tmp = RREG32(DC_HPD2_INT_CONTROL);
4938
		tmp = RREG32(DC_HPD2_INT_CONTROL);
4787
		tmp |= DC_HPDx_RX_INT_ACK;
4939
		tmp |= DC_HPDx_RX_INT_ACK;
4788
		WREG32(DC_HPD2_INT_CONTROL, tmp);
4940
		WREG32(DC_HPD2_INT_CONTROL, tmp);
4789
	}
4941
	}
4790
	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) {
4942
	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) {
4791
		tmp = RREG32(DC_HPD3_INT_CONTROL);
4943
		tmp = RREG32(DC_HPD3_INT_CONTROL);
4792
		tmp |= DC_HPDx_RX_INT_ACK;
4944
		tmp |= DC_HPDx_RX_INT_ACK;
4793
		WREG32(DC_HPD3_INT_CONTROL, tmp);
4945
		WREG32(DC_HPD3_INT_CONTROL, tmp);
4794
	}
4946
	}
4795
	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) {
4947
	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) {
4796
		tmp = RREG32(DC_HPD4_INT_CONTROL);
4948
		tmp = RREG32(DC_HPD4_INT_CONTROL);
4797
		tmp |= DC_HPDx_RX_INT_ACK;
4949
		tmp |= DC_HPDx_RX_INT_ACK;
4798
		WREG32(DC_HPD4_INT_CONTROL, tmp);
4950
		WREG32(DC_HPD4_INT_CONTROL, tmp);
4799
	}
4951
	}
4800
	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) {
4952
	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) {
4801
		tmp = RREG32(DC_HPD5_INT_CONTROL);
4953
		tmp = RREG32(DC_HPD5_INT_CONTROL);
4802
		tmp |= DC_HPDx_RX_INT_ACK;
4954
		tmp |= DC_HPDx_RX_INT_ACK;
4803
		WREG32(DC_HPD5_INT_CONTROL, tmp);
4955
		WREG32(DC_HPD5_INT_CONTROL, tmp);
4804
	}
4956
	}
4805
	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) {
4957
	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) {
4806
		tmp = RREG32(DC_HPD5_INT_CONTROL);
4958
		tmp = RREG32(DC_HPD5_INT_CONTROL);
4807
		tmp |= DC_HPDx_RX_INT_ACK;
4959
		tmp |= DC_HPDx_RX_INT_ACK;
4808
		WREG32(DC_HPD6_INT_CONTROL, tmp);
4960
		WREG32(DC_HPD6_INT_CONTROL, tmp);
4809
	}
4961
	}
4810
 
4962
 
4811
	if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4963
	if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4812
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4964
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4813
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4965
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4814
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4966
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4815
	}
4967
	}
4816
	if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4968
	if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4817
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4969
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4818
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4970
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4819
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4971
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4820
	}
4972
	}
4821
	if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4973
	if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4822
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4974
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4823
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4975
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4824
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4976
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4825
	}
4977
	}
4826
	if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4978
	if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4827
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4979
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4828
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4980
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4829
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4981
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4830
	}
4982
	}
4831
	if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4983
	if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4832
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4984
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4833
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4985
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4834
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4986
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4835
	}
4987
	}
4836
	if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4988
	if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4837
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4989
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4838
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4990
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4839
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4991
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4840
	}
4992
	}
4841
}
4993
}
4842
 
4994
 
4843
static void evergreen_irq_disable(struct radeon_device *rdev)
4995
static void evergreen_irq_disable(struct radeon_device *rdev)
4844
{
4996
{
4845
	r600_disable_interrupts(rdev);
4997
	r600_disable_interrupts(rdev);
4846
	/* Wait and acknowledge irq */
4998
	/* Wait and acknowledge irq */
4847
	mdelay(1);
4999
	mdelay(1);
4848
	evergreen_irq_ack(rdev);
5000
	evergreen_irq_ack(rdev);
4849
	evergreen_disable_interrupt_state(rdev);
5001
	evergreen_disable_interrupt_state(rdev);
4850
}
5002
}
4851
 
5003
 
4852
void evergreen_irq_suspend(struct radeon_device *rdev)
5004
void evergreen_irq_suspend(struct radeon_device *rdev)
4853
{
5005
{
4854
	evergreen_irq_disable(rdev);
5006
	evergreen_irq_disable(rdev);
4855
	r600_rlc_stop(rdev);
5007
	r600_rlc_stop(rdev);
4856
}
5008
}
4857
 
5009
 
4858
static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
5010
static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4859
{
5011
{
4860
	u32 wptr, tmp;
5012
	u32 wptr, tmp;
4861
 
5013
 
4862
	if (rdev->wb.enabled)
5014
	if (rdev->wb.enabled)
4863
		wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
5015
		wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4864
	else
5016
	else
4865
		wptr = RREG32(IH_RB_WPTR);
5017
		wptr = RREG32(IH_RB_WPTR);
4866
 
5018
 
4867
	if (wptr & RB_OVERFLOW) {
5019
	if (wptr & RB_OVERFLOW) {
4868
		wptr &= ~RB_OVERFLOW;
5020
		wptr &= ~RB_OVERFLOW;
4869
		/* When a ring buffer overflow happen start parsing interrupt
5021
		/* When a ring buffer overflow happen start parsing interrupt
4870
		 * from the last not overwritten vector (wptr + 16). Hopefully
5022
		 * from the last not overwritten vector (wptr + 16). Hopefully
4871
		 * this should allow us to catchup.
5023
		 * this should allow us to catchup.
4872
		 */
5024
		 */
4873
		dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
5025
		dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
4874
			 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
5026
			 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
4875
		rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
5027
		rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4876
		tmp = RREG32(IH_RB_CNTL);
5028
		tmp = RREG32(IH_RB_CNTL);
4877
		tmp |= IH_WPTR_OVERFLOW_CLEAR;
5029
		tmp |= IH_WPTR_OVERFLOW_CLEAR;
4878
		WREG32(IH_RB_CNTL, tmp);
5030
		WREG32(IH_RB_CNTL, tmp);
4879
	}
5031
	}
4880
	return (wptr & rdev->ih.ptr_mask);
5032
	return (wptr & rdev->ih.ptr_mask);
4881
}
5033
}
4882
 
5034
 
4883
int evergreen_irq_process(struct radeon_device *rdev)
5035
int evergreen_irq_process(struct radeon_device *rdev)
4884
{
5036
{
4885
	u32 wptr;
5037
	u32 wptr;
4886
	u32 rptr;
5038
	u32 rptr;
4887
	u32 src_id, src_data;
5039
	u32 src_id, src_data;
4888
	u32 ring_index;
5040
	u32 ring_index;
4889
	bool queue_hotplug = false;
5041
	bool queue_hotplug = false;
4890
	bool queue_hdmi = false;
5042
	bool queue_hdmi = false;
4891
	bool queue_dp = false;
5043
	bool queue_dp = false;
4892
	bool queue_thermal = false;
5044
	bool queue_thermal = false;
4893
	u32 status, addr;
5045
	u32 status, addr;
4894
 
5046
 
4895
	if (!rdev->ih.enabled || rdev->shutdown)
5047
	if (!rdev->ih.enabled || rdev->shutdown)
4896
		return IRQ_NONE;
5048
		return IRQ_NONE;
4897
 
5049
 
4898
	wptr = evergreen_get_ih_wptr(rdev);
5050
	wptr = evergreen_get_ih_wptr(rdev);
4899
 
5051
 
4900
restart_ih:
5052
restart_ih:
4901
	/* is somebody else already processing irqs? */
5053
	/* is somebody else already processing irqs? */
4902
	if (atomic_xchg(&rdev->ih.lock, 1))
5054
	if (atomic_xchg(&rdev->ih.lock, 1))
4903
		return IRQ_NONE;
5055
		return IRQ_NONE;
4904
 
5056
 
4905
	rptr = rdev->ih.rptr;
5057
	rptr = rdev->ih.rptr;
4906
	DRM_DEBUG("evergreen_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
5058
	DRM_DEBUG("evergreen_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4907
 
5059
 
4908
	/* Order reading of wptr vs. reading of IH ring data */
5060
	/* Order reading of wptr vs. reading of IH ring data */
4909
	rmb();
5061
	rmb();
4910
 
5062
 
4911
	/* display interrupts */
5063
	/* display interrupts */
4912
	evergreen_irq_ack(rdev);
5064
	evergreen_irq_ack(rdev);
4913
 
5065
 
4914
	while (rptr != wptr) {
5066
	while (rptr != wptr) {
4915
		/* wptr/rptr are in bytes! */
5067
		/* wptr/rptr are in bytes! */
4916
		ring_index = rptr / 4;
5068
		ring_index = rptr / 4;
4917
		src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
5069
		src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4918
		src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
5070
		src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4919
 
5071
 
4920
		switch (src_id) {
5072
		switch (src_id) {
4921
		case 1: /* D1 vblank/vline */
5073
		case 1: /* D1 vblank/vline */
4922
			switch (src_data) {
5074
			switch (src_data) {
4923
			case 0: /* D1 vblank */
5075
			case 0: /* D1 vblank */
4924
				if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT))
5076
				if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT))
4925
					DRM_DEBUG("IH: D1 vblank - IH event w/o asserted irq bit?\n");
5077
					DRM_DEBUG("IH: D1 vblank - IH event w/o asserted irq bit?\n");
4926
 
5078
 
4927
				if (rdev->irq.crtc_vblank_int[0]) {
5079
				if (rdev->irq.crtc_vblank_int[0]) {
4928
					drm_handle_vblank(rdev->ddev, 0);
5080
					drm_handle_vblank(rdev->ddev, 0);
4929
					rdev->pm.vblank_sync = true;
5081
					rdev->pm.vblank_sync = true;
4930
					wake_up(&rdev->irq.vblank_queue);
5082
					wake_up(&rdev->irq.vblank_queue);
4931
				}
5083
				}
4932
				if (atomic_read(&rdev->irq.pflip[0]))
5084
				if (atomic_read(&rdev->irq.pflip[0]))
4933
					radeon_crtc_handle_vblank(rdev, 0);
5085
					radeon_crtc_handle_vblank(rdev, 0);
4934
				rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
5086
				rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4935
				DRM_DEBUG("IH: D1 vblank\n");
5087
				DRM_DEBUG("IH: D1 vblank\n");
4936
 
5088
 
4937
				break;
5089
				break;
4938
			case 1: /* D1 vline */
5090
			case 1: /* D1 vline */
4939
				if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT))
5091
				if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT))
4940
					DRM_DEBUG("IH: D1 vline - IH event w/o asserted irq bit?\n");
5092
					DRM_DEBUG("IH: D1 vline - IH event w/o asserted irq bit?\n");
4941
 
5093
 
4942
				rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
5094
				rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4943
				DRM_DEBUG("IH: D1 vline\n");
5095
				DRM_DEBUG("IH: D1 vline\n");
4944
 
5096
 
4945
				break;
5097
				break;
4946
			default:
5098
			default:
4947
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5099
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4948
				break;
5100
				break;
4949
			}
5101
			}
4950
			break;
5102
			break;
4951
		case 2: /* D2 vblank/vline */
5103
		case 2: /* D2 vblank/vline */
4952
			switch (src_data) {
5104
			switch (src_data) {
4953
			case 0: /* D2 vblank */
5105
			case 0: /* D2 vblank */
4954
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT))
5106
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT))
4955
					DRM_DEBUG("IH: D2 vblank - IH event w/o asserted irq bit?\n");
5107
					DRM_DEBUG("IH: D2 vblank - IH event w/o asserted irq bit?\n");
4956
 
5108
 
4957
				if (rdev->irq.crtc_vblank_int[1]) {
5109
				if (rdev->irq.crtc_vblank_int[1]) {
4958
					drm_handle_vblank(rdev->ddev, 1);
5110
					drm_handle_vblank(rdev->ddev, 1);
4959
					rdev->pm.vblank_sync = true;
5111
					rdev->pm.vblank_sync = true;
4960
					wake_up(&rdev->irq.vblank_queue);
5112
					wake_up(&rdev->irq.vblank_queue);
4961
				}
5113
				}
4962
				if (atomic_read(&rdev->irq.pflip[1]))
5114
				if (atomic_read(&rdev->irq.pflip[1]))
4963
					radeon_crtc_handle_vblank(rdev, 1);
5115
					radeon_crtc_handle_vblank(rdev, 1);
4964
				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
5116
				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
4965
				DRM_DEBUG("IH: D2 vblank\n");
5117
				DRM_DEBUG("IH: D2 vblank\n");
4966
 
5118
 
4967
				break;
5119
				break;
4968
			case 1: /* D2 vline */
5120
			case 1: /* D2 vline */
4969
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT))
5121
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT))
4970
					DRM_DEBUG("IH: D2 vline - IH event w/o asserted irq bit?\n");
5122
					DRM_DEBUG("IH: D2 vline - IH event w/o asserted irq bit?\n");
4971
 
5123
 
4972
				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
5124
				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
4973
				DRM_DEBUG("IH: D2 vline\n");
5125
				DRM_DEBUG("IH: D2 vline\n");
4974
 
5126
 
4975
				break;
5127
				break;
4976
			default:
5128
			default:
4977
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5129
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4978
				break;
5130
				break;
4979
			}
5131
			}
4980
			break;
5132
			break;
4981
		case 3: /* D3 vblank/vline */
5133
		case 3: /* D3 vblank/vline */
4982
			switch (src_data) {
5134
			switch (src_data) {
4983
			case 0: /* D3 vblank */
5135
			case 0: /* D3 vblank */
4984
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT))
5136
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT))
4985
					DRM_DEBUG("IH: D3 vblank - IH event w/o asserted irq bit?\n");
5137
					DRM_DEBUG("IH: D3 vblank - IH event w/o asserted irq bit?\n");
4986
 
5138
 
4987
				if (rdev->irq.crtc_vblank_int[2]) {
5139
				if (rdev->irq.crtc_vblank_int[2]) {
4988
					drm_handle_vblank(rdev->ddev, 2);
5140
					drm_handle_vblank(rdev->ddev, 2);
4989
					rdev->pm.vblank_sync = true;
5141
					rdev->pm.vblank_sync = true;
4990
					wake_up(&rdev->irq.vblank_queue);
5142
					wake_up(&rdev->irq.vblank_queue);
4991
				}
5143
				}
4992
				if (atomic_read(&rdev->irq.pflip[2]))
5144
				if (atomic_read(&rdev->irq.pflip[2]))
4993
					radeon_crtc_handle_vblank(rdev, 2);
5145
					radeon_crtc_handle_vblank(rdev, 2);
4994
				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
5146
				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
4995
				DRM_DEBUG("IH: D3 vblank\n");
5147
				DRM_DEBUG("IH: D3 vblank\n");
4996
 
5148
 
4997
				break;
5149
				break;
4998
			case 1: /* D3 vline */
5150
			case 1: /* D3 vline */
4999
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT))
5151
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT))
5000
					DRM_DEBUG("IH: D3 vline - IH event w/o asserted irq bit?\n");
5152
					DRM_DEBUG("IH: D3 vline - IH event w/o asserted irq bit?\n");
5001
 
5153
 
5002
				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
5154
				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
5003
				DRM_DEBUG("IH: D3 vline\n");
5155
				DRM_DEBUG("IH: D3 vline\n");
5004
 
5156
 
5005
				break;
5157
				break;
5006
			default:
5158
			default:
5007
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5159
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5008
				break;
5160
				break;
5009
			}
5161
			}
5010
			break;
5162
			break;
5011
		case 4: /* D4 vblank/vline */
5163
		case 4: /* D4 vblank/vline */
5012
			switch (src_data) {
5164
			switch (src_data) {
5013
			case 0: /* D4 vblank */
5165
			case 0: /* D4 vblank */
5014
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT))
5166
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT))
5015
					DRM_DEBUG("IH: D4 vblank - IH event w/o asserted irq bit?\n");
5167
					DRM_DEBUG("IH: D4 vblank - IH event w/o asserted irq bit?\n");
5016
 
5168
 
5017
				if (rdev->irq.crtc_vblank_int[3]) {
5169
				if (rdev->irq.crtc_vblank_int[3]) {
5018
					drm_handle_vblank(rdev->ddev, 3);
5170
					drm_handle_vblank(rdev->ddev, 3);
5019
					rdev->pm.vblank_sync = true;
5171
					rdev->pm.vblank_sync = true;
5020
					wake_up(&rdev->irq.vblank_queue);
5172
					wake_up(&rdev->irq.vblank_queue);
5021
				}
5173
				}
5022
				if (atomic_read(&rdev->irq.pflip[3]))
5174
				if (atomic_read(&rdev->irq.pflip[3]))
5023
					radeon_crtc_handle_vblank(rdev, 3);
5175
					radeon_crtc_handle_vblank(rdev, 3);
5024
				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
5176
				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
5025
				DRM_DEBUG("IH: D4 vblank\n");
5177
				DRM_DEBUG("IH: D4 vblank\n");
5026
 
5178
 
5027
				break;
5179
				break;
5028
			case 1: /* D4 vline */
5180
			case 1: /* D4 vline */
5029
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT))
5181
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT))
5030
					DRM_DEBUG("IH: D4 vline - IH event w/o asserted irq bit?\n");
5182
					DRM_DEBUG("IH: D4 vline - IH event w/o asserted irq bit?\n");
5031
 
5183
 
5032
				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
5184
				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
5033
				DRM_DEBUG("IH: D4 vline\n");
5185
				DRM_DEBUG("IH: D4 vline\n");
5034
 
5186
 
5035
				break;
5187
				break;
5036
			default:
5188
			default:
5037
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5189
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5038
				break;
5190
				break;
5039
			}
5191
			}
5040
			break;
5192
			break;
5041
		case 5: /* D5 vblank/vline */
5193
		case 5: /* D5 vblank/vline */
5042
			switch (src_data) {
5194
			switch (src_data) {
5043
			case 0: /* D5 vblank */
5195
			case 0: /* D5 vblank */
5044
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT))
5196
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT))
5045
					DRM_DEBUG("IH: D5 vblank - IH event w/o asserted irq bit?\n");
5197
					DRM_DEBUG("IH: D5 vblank - IH event w/o asserted irq bit?\n");
5046
 
5198
 
5047
				if (rdev->irq.crtc_vblank_int[4]) {
5199
				if (rdev->irq.crtc_vblank_int[4]) {
5048
					drm_handle_vblank(rdev->ddev, 4);
5200
					drm_handle_vblank(rdev->ddev, 4);
5049
					rdev->pm.vblank_sync = true;
5201
					rdev->pm.vblank_sync = true;
5050
					wake_up(&rdev->irq.vblank_queue);
5202
					wake_up(&rdev->irq.vblank_queue);
5051
				}
5203
				}
5052
				if (atomic_read(&rdev->irq.pflip[4]))
5204
				if (atomic_read(&rdev->irq.pflip[4]))
5053
					radeon_crtc_handle_vblank(rdev, 4);
5205
					radeon_crtc_handle_vblank(rdev, 4);
5054
				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
5206
				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
5055
				DRM_DEBUG("IH: D5 vblank\n");
5207
				DRM_DEBUG("IH: D5 vblank\n");
5056
 
5208
 
5057
				break;
5209
				break;
5058
			case 1: /* D5 vline */
5210
			case 1: /* D5 vline */
5059
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT))
5211
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT))
5060
					DRM_DEBUG("IH: D5 vline - IH event w/o asserted irq bit?\n");
5212
					DRM_DEBUG("IH: D5 vline - IH event w/o asserted irq bit?\n");
5061
 
5213
 
5062
				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
5214
				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
5063
				DRM_DEBUG("IH: D5 vline\n");
5215
				DRM_DEBUG("IH: D5 vline\n");
5064
 
5216
 
5065
				break;
5217
				break;
5066
			default:
5218
			default:
5067
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5219
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5068
				break;
5220
				break;
5069
			}
5221
			}
5070
			break;
5222
			break;
5071
		case 6: /* D6 vblank/vline */
5223
		case 6: /* D6 vblank/vline */
5072
			switch (src_data) {
5224
			switch (src_data) {
5073
			case 0: /* D6 vblank */
5225
			case 0: /* D6 vblank */
5074
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT))
5226
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT))
5075
					DRM_DEBUG("IH: D6 vblank - IH event w/o asserted irq bit?\n");
5227
					DRM_DEBUG("IH: D6 vblank - IH event w/o asserted irq bit?\n");
5076
 
5228
 
5077
				if (rdev->irq.crtc_vblank_int[5]) {
5229
				if (rdev->irq.crtc_vblank_int[5]) {
5078
					drm_handle_vblank(rdev->ddev, 5);
5230
					drm_handle_vblank(rdev->ddev, 5);
5079
					rdev->pm.vblank_sync = true;
5231
					rdev->pm.vblank_sync = true;
5080
					wake_up(&rdev->irq.vblank_queue);
5232
					wake_up(&rdev->irq.vblank_queue);
5081
				}
5233
				}
5082
				if (atomic_read(&rdev->irq.pflip[5]))
5234
				if (atomic_read(&rdev->irq.pflip[5]))
5083
					radeon_crtc_handle_vblank(rdev, 5);
5235
					radeon_crtc_handle_vblank(rdev, 5);
5084
				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
5236
				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
5085
				DRM_DEBUG("IH: D6 vblank\n");
5237
				DRM_DEBUG("IH: D6 vblank\n");
5086
 
5238
 
5087
				break;
5239
				break;
5088
			case 1: /* D6 vline */
5240
			case 1: /* D6 vline */
5089
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT))
5241
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT))
5090
					DRM_DEBUG("IH: D6 vline - IH event w/o asserted irq bit?\n");
5242
					DRM_DEBUG("IH: D6 vline - IH event w/o asserted irq bit?\n");
5091
 
5243
 
5092
				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
5244
				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
5093
				DRM_DEBUG("IH: D6 vline\n");
5245
				DRM_DEBUG("IH: D6 vline\n");
5094
 
5246
 
5095
				break;
5247
				break;
5096
			default:
5248
			default:
5097
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5249
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5098
				break;
5250
				break;
5099
			}
5251
			}
5100
			break;
5252
			break;
5101
		case 8: /* D1 page flip */
5253
		case 8: /* D1 page flip */
5102
		case 10: /* D2 page flip */
5254
		case 10: /* D2 page flip */
5103
		case 12: /* D3 page flip */
5255
		case 12: /* D3 page flip */
5104
		case 14: /* D4 page flip */
5256
		case 14: /* D4 page flip */
5105
		case 16: /* D5 page flip */
5257
		case 16: /* D5 page flip */
5106
		case 18: /* D6 page flip */
5258
		case 18: /* D6 page flip */
5107
			DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
5259
			DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
5108
			break;
5260
			break;
5109
		case 42: /* HPD hotplug */
5261
		case 42: /* HPD hotplug */
5110
			switch (src_data) {
5262
			switch (src_data) {
5111
			case 0:
5263
			case 0:
5112
				if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT))
5264
				if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT))
5113
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5265
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5114
 
5266
 
5115
				rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
5267
				rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
5116
				queue_hotplug = true;
5268
				queue_hotplug = true;
5117
				DRM_DEBUG("IH: HPD1\n");
5269
				DRM_DEBUG("IH: HPD1\n");
5118
				break;
5270
				break;
5119
			case 1:
5271
			case 1:
5120
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT))
5272
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT))
5121
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5273
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5122
 
5274
 
5123
				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
5275
				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
5124
				queue_hotplug = true;
5276
				queue_hotplug = true;
5125
				DRM_DEBUG("IH: HPD2\n");
5277
				DRM_DEBUG("IH: HPD2\n");
5126
				break;
5278
				break;
5127
			case 2:
5279
			case 2:
5128
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT))
5280
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT))
5129
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5281
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5130
 
5282
 
5131
				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
5283
				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
5132
				queue_hotplug = true;
5284
				queue_hotplug = true;
5133
				DRM_DEBUG("IH: HPD3\n");
5285
				DRM_DEBUG("IH: HPD3\n");
5134
				break;
5286
				break;
5135
			case 3:
5287
			case 3:
5136
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT))
5288
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT))
5137
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5289
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5138
 
5290
 
5139
				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
5291
				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
5140
				queue_hotplug = true;
5292
				queue_hotplug = true;
5141
				DRM_DEBUG("IH: HPD4\n");
5293
				DRM_DEBUG("IH: HPD4\n");
5142
				break;
5294
				break;
5143
			case 4:
5295
			case 4:
5144
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT))
5296
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT))
5145
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5297
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5146
 
5298
 
5147
				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
5299
				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
5148
				queue_hotplug = true;
5300
				queue_hotplug = true;
5149
				DRM_DEBUG("IH: HPD5\n");
5301
				DRM_DEBUG("IH: HPD5\n");
5150
				break;
5302
				break;
5151
			case 5:
5303
			case 5:
5152
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT))
5304
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT))
5153
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5305
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5154
 
5306
 
5155
				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
5307
				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
5156
				queue_hotplug = true;
5308
				queue_hotplug = true;
5157
				DRM_DEBUG("IH: HPD6\n");
5309
				DRM_DEBUG("IH: HPD6\n");
5158
				break;
5310
				break;
5159
			case 6:
5311
			case 6:
5160
				if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT))
5312
				if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT))
5161
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5313
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5162
 
5314
 
5163
				rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_RX_INTERRUPT;
5315
				rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_RX_INTERRUPT;
5164
				queue_dp = true;
5316
				queue_dp = true;
5165
				DRM_DEBUG("IH: HPD_RX 1\n");
5317
				DRM_DEBUG("IH: HPD_RX 1\n");
5166
				break;
5318
				break;
5167
			case 7:
5319
			case 7:
5168
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT))
5320
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT))
5169
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5321
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5170
 
5322
 
5171
				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT;
5323
				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT;
5172
				queue_dp = true;
5324
				queue_dp = true;
5173
				DRM_DEBUG("IH: HPD_RX 2\n");
5325
				DRM_DEBUG("IH: HPD_RX 2\n");
5174
				break;
5326
				break;
5175
			case 8:
5327
			case 8:
5176
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT))
5328
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT))
5177
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5329
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5178
 
5330
 
5179
				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT;
5331
				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT;
5180
				queue_dp = true;
5332
				queue_dp = true;
5181
				DRM_DEBUG("IH: HPD_RX 3\n");
5333
				DRM_DEBUG("IH: HPD_RX 3\n");
5182
				break;
5334
				break;
5183
			case 9:
5335
			case 9:
5184
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT))
5336
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT))
5185
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5337
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5186
 
5338
 
5187
				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT;
5339
				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT;
5188
				queue_dp = true;
5340
				queue_dp = true;
5189
				DRM_DEBUG("IH: HPD_RX 4\n");
5341
				DRM_DEBUG("IH: HPD_RX 4\n");
5190
				break;
5342
				break;
5191
			case 10:
5343
			case 10:
5192
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT))
5344
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT))
5193
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5345
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5194
 
5346
 
5195
				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT;
5347
				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT;
5196
				queue_dp = true;
5348
				queue_dp = true;
5197
				DRM_DEBUG("IH: HPD_RX 5\n");
5349
				DRM_DEBUG("IH: HPD_RX 5\n");
5198
				break;
5350
				break;
5199
			case 11:
5351
			case 11:
5200
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT))
5352
				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT))
5201
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5353
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5202
 
5354
 
5203
				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT;
5355
				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT;
5204
				queue_dp = true;
5356
				queue_dp = true;
5205
				DRM_DEBUG("IH: HPD_RX 6\n");
5357
				DRM_DEBUG("IH: HPD_RX 6\n");
5206
				break;
5358
				break;
5207
			default:
5359
			default:
5208
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5360
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5209
				break;
5361
				break;
5210
			}
5362
			}
5211
			break;
5363
			break;
5212
		case 44: /* hdmi */
5364
		case 44: /* hdmi */
5213
			switch (src_data) {
5365
			switch (src_data) {
5214
			case 0:
5366
			case 0:
5215
				if (!(rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG))
5367
				if (!(rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG))
5216
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5368
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5217
 
5369
 
5218
				rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
5370
				rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
5219
				queue_hdmi = true;
5371
				queue_hdmi = true;
5220
				DRM_DEBUG("IH: HDMI0\n");
5372
				DRM_DEBUG("IH: HDMI0\n");
5221
				break;
5373
				break;
5222
			case 1:
5374
			case 1:
5223
				if (!(rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG))
5375
				if (!(rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG))
5224
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5376
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5225
 
5377
 
5226
				rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
5378
				rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
5227
				queue_hdmi = true;
5379
				queue_hdmi = true;
5228
				DRM_DEBUG("IH: HDMI1\n");
5380
				DRM_DEBUG("IH: HDMI1\n");
5229
				break;
5381
				break;
5230
			case 2:
5382
			case 2:
5231
				if (!(rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG))
5383
				if (!(rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG))
5232
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5384
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5233
 
5385
 
5234
				rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
5386
				rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
5235
				queue_hdmi = true;
5387
				queue_hdmi = true;
5236
				DRM_DEBUG("IH: HDMI2\n");
5388
				DRM_DEBUG("IH: HDMI2\n");
5237
				break;
5389
				break;
5238
			case 3:
5390
			case 3:
5239
				if (!(rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG))
5391
				if (!(rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG))
5240
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5392
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5241
 
5393
 
5242
				rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
5394
				rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
5243
				queue_hdmi = true;
5395
				queue_hdmi = true;
5244
				DRM_DEBUG("IH: HDMI3\n");
5396
				DRM_DEBUG("IH: HDMI3\n");
5245
				break;
5397
				break;
5246
			case 4:
5398
			case 4:
5247
				if (!(rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG))
5399
				if (!(rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG))
5248
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5400
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5249
 
5401
 
5250
				rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
5402
				rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
5251
				queue_hdmi = true;
5403
				queue_hdmi = true;
5252
				DRM_DEBUG("IH: HDMI4\n");
5404
				DRM_DEBUG("IH: HDMI4\n");
5253
				break;
5405
				break;
5254
			case 5:
5406
			case 5:
5255
				if (!(rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG))
5407
				if (!(rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG))
5256
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5408
					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5257
 
5409
 
5258
				rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5410
				rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5259
				queue_hdmi = true;
5411
				queue_hdmi = true;
5260
				DRM_DEBUG("IH: HDMI5\n");
5412
				DRM_DEBUG("IH: HDMI5\n");
5261
				break;
5413
				break;
5262
			default:
5414
			default:
5263
				DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5415
				DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5264
				break;
5416
				break;
5265
			}
5417
			}
5266
		case 96:
5418
		case 96:
5267
			DRM_ERROR("SRBM_READ_ERROR: 0x%x\n", RREG32(SRBM_READ_ERROR));
5419
			DRM_ERROR("SRBM_READ_ERROR: 0x%x\n", RREG32(SRBM_READ_ERROR));
5268
			WREG32(SRBM_INT_ACK, 0x1);
5420
			WREG32(SRBM_INT_ACK, 0x1);
5269
			break;
5421
			break;
5270
		case 124: /* UVD */
5422
		case 124: /* UVD */
5271
			DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5423
			DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5272
			radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5424
			radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5273
			break;
5425
			break;
5274
		case 146:
5426
		case 146:
5275
		case 147:
5427
		case 147:
5276
			addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5428
			addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5277
			status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5429
			status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5278
			/* reset addr and status */
5430
			/* reset addr and status */
5279
			WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5431
			WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5280
			if (addr == 0x0 && status == 0x0)
5432
			if (addr == 0x0 && status == 0x0)
5281
				break;
5433
				break;
5282
			dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5434
			dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5283
			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
5435
			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
5284
				addr);
5436
				addr);
5285
			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5437
			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5286
				status);
5438
				status);
5287
			cayman_vm_decode_fault(rdev, status, addr);
5439
			cayman_vm_decode_fault(rdev, status, addr);
5288
			break;
5440
			break;
5289
		case 176: /* CP_INT in ring buffer */
5441
		case 176: /* CP_INT in ring buffer */
5290
		case 177: /* CP_INT in IB1 */
5442
		case 177: /* CP_INT in IB1 */
5291
		case 178: /* CP_INT in IB2 */
5443
		case 178: /* CP_INT in IB2 */
5292
			DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5444
			DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5293
			radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5445
			radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5294
			break;
5446
			break;
5295
		case 181: /* CP EOP event */
5447
		case 181: /* CP EOP event */
5296
			DRM_DEBUG("IH: CP EOP\n");
5448
			DRM_DEBUG("IH: CP EOP\n");
5297
			if (rdev->family >= CHIP_CAYMAN) {
5449
			if (rdev->family >= CHIP_CAYMAN) {
5298
				switch (src_data) {
5450
				switch (src_data) {
5299
				case 0:
5451
				case 0:
5300
					radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5452
					radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5301
					break;
5453
					break;
5302
				case 1:
5454
				case 1:
5303
					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5455
					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5304
					break;
5456
					break;
5305
				case 2:
5457
				case 2:
5306
					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5458
					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5307
					break;
5459
					break;
5308
				}
5460
				}
5309
			} else
5461
			} else
5310
				radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5462
				radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5311
			break;
5463
			break;
5312
		case 224: /* DMA trap event */
5464
		case 224: /* DMA trap event */
5313
			DRM_DEBUG("IH: DMA trap\n");
5465
			DRM_DEBUG("IH: DMA trap\n");
5314
			radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5466
			radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5315
			break;
5467
			break;
5316
		case 230: /* thermal low to high */
5468
		case 230: /* thermal low to high */
5317
			DRM_DEBUG("IH: thermal low to high\n");
5469
			DRM_DEBUG("IH: thermal low to high\n");
5318
			rdev->pm.dpm.thermal.high_to_low = false;
5470
			rdev->pm.dpm.thermal.high_to_low = false;
5319
			queue_thermal = true;
5471
			queue_thermal = true;
5320
			break;
5472
			break;
5321
		case 231: /* thermal high to low */
5473
		case 231: /* thermal high to low */
5322
			DRM_DEBUG("IH: thermal high to low\n");
5474
			DRM_DEBUG("IH: thermal high to low\n");
5323
			rdev->pm.dpm.thermal.high_to_low = true;
5475
			rdev->pm.dpm.thermal.high_to_low = true;
5324
			queue_thermal = true;
5476
			queue_thermal = true;
5325
			break;
5477
			break;
5326
		case 233: /* GUI IDLE */
5478
		case 233: /* GUI IDLE */
5327
			DRM_DEBUG("IH: GUI idle\n");
5479
			DRM_DEBUG("IH: GUI idle\n");
5328
			break;
5480
			break;
5329
		case 244: /* DMA trap event */
5481
		case 244: /* DMA trap event */
5330
			if (rdev->family >= CHIP_CAYMAN) {
5482
			if (rdev->family >= CHIP_CAYMAN) {
5331
				DRM_DEBUG("IH: DMA1 trap\n");
5483
				DRM_DEBUG("IH: DMA1 trap\n");
5332
				radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5484
				radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5333
			}
5485
			}
5334
			break;
5486
			break;
5335
		default:
5487
		default:
5336
			DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5488
			DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5337
			break;
5489
			break;
5338
		}
5490
		}
5339
 
5491
 
5340
		/* wptr/rptr are in bytes! */
5492
		/* wptr/rptr are in bytes! */
5341
		rptr += 16;
5493
		rptr += 16;
5342
		rptr &= rdev->ih.ptr_mask;
5494
		rptr &= rdev->ih.ptr_mask;
5343
		WREG32(IH_RB_RPTR, rptr);
5495
		WREG32(IH_RB_RPTR, rptr);
5344
	}
5496
	}
5345
	rdev->ih.rptr = rptr;
5497
	rdev->ih.rptr = rptr;
5346
	atomic_set(&rdev->ih.lock, 0);
5498
	atomic_set(&rdev->ih.lock, 0);
5347
 
5499
 
5348
	/* make sure wptr hasn't changed while processing */
5500
	/* make sure wptr hasn't changed while processing */
5349
	wptr = evergreen_get_ih_wptr(rdev);
5501
	wptr = evergreen_get_ih_wptr(rdev);
5350
	if (wptr != rptr)
5502
	if (wptr != rptr)
5351
		goto restart_ih;
5503
		goto restart_ih;
5352
 
5504
 
5353
	return IRQ_HANDLED;
5505
	return IRQ_HANDLED;
5354
}
5506
}
5355
 
5507
 
5356
static int evergreen_startup(struct radeon_device *rdev)
5508
static int evergreen_startup(struct radeon_device *rdev)
5357
{
5509
{
5358
	struct radeon_ring *ring;
5510
	struct radeon_ring *ring;
5359
	int r;
5511
	int r;
5360
 
5512
 
5361
	/* enable pcie gen2 link */
5513
	/* enable pcie gen2 link */
5362
	evergreen_pcie_gen2_enable(rdev);
5514
	evergreen_pcie_gen2_enable(rdev);
5363
	/* enable aspm */
5515
	/* enable aspm */
5364
	evergreen_program_aspm(rdev);
5516
	evergreen_program_aspm(rdev);
5365
 
5517
 
5366
	/* scratch needs to be initialized before MC */
5518
	/* scratch needs to be initialized before MC */
5367
	r = r600_vram_scratch_init(rdev);
5519
	r = r600_vram_scratch_init(rdev);
5368
	if (r)
5520
	if (r)
5369
		return r;
5521
		return r;
5370
 
5522
 
5371
	evergreen_mc_program(rdev);
5523
	evergreen_mc_program(rdev);
5372
 
5524
 
5373
	if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5525
	if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5374
		r = ni_mc_load_microcode(rdev);
5526
		r = ni_mc_load_microcode(rdev);
5375
		if (r) {
5527
		if (r) {
5376
			DRM_ERROR("Failed to load MC firmware!\n");
5528
			DRM_ERROR("Failed to load MC firmware!\n");
5377
			return r;
5529
			return r;
5378
		}
5530
		}
5379
	}
5531
	}
5380
 
5532
 
5381
	if (rdev->flags & RADEON_IS_AGP) {
5533
	if (rdev->flags & RADEON_IS_AGP) {
5382
		evergreen_agp_enable(rdev);
5534
		evergreen_agp_enable(rdev);
5383
	} else {
5535
	} else {
5384
		r = evergreen_pcie_gart_enable(rdev);
5536
		r = evergreen_pcie_gart_enable(rdev);
5385
		if (r)
5537
		if (r)
5386
			return r;
5538
			return r;
5387
	}
5539
	}
5388
	evergreen_gpu_init(rdev);
5540
	evergreen_gpu_init(rdev);
5389
 
5541
 
5390
	/* allocate rlc buffers */
5542
	/* allocate rlc buffers */
5391
	if (rdev->flags & RADEON_IS_IGP) {
5543
	if (rdev->flags & RADEON_IS_IGP) {
5392
		rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5544
		rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5393
		rdev->rlc.reg_list_size =
5545
		rdev->rlc.reg_list_size =
5394
			(u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5546
			(u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5395
		rdev->rlc.cs_data = evergreen_cs_data;
5547
		rdev->rlc.cs_data = evergreen_cs_data;
5396
		r = sumo_rlc_init(rdev);
5548
		r = sumo_rlc_init(rdev);
5397
		if (r) {
5549
		if (r) {
5398
			DRM_ERROR("Failed to init rlc BOs!\n");
5550
			DRM_ERROR("Failed to init rlc BOs!\n");
5399
			return r;
5551
			return r;
5400
		}
5552
		}
5401
	}
5553
	}
5402
 
5554
 
5403
	/* allocate wb buffer */
5555
	/* allocate wb buffer */
5404
	r = radeon_wb_init(rdev);
5556
	r = radeon_wb_init(rdev);
5405
	if (r)
5557
	if (r)
5406
		return r;
5558
		return r;
5407
 
5559
 
5408
	r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5560
	r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5409
	if (r) {
5561
	if (r) {
5410
		dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5562
		dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5411
		return r;
5563
		return r;
5412
	}
5564
	}
5413
 
5565
 
5414
	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5566
	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5415
	if (r) {
5567
	if (r) {
5416
		dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5568
		dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5417
		return r;
5569
		return r;
5418
	}
5570
	}
5419
 
5571
 
5420
	r = uvd_v2_2_resume(rdev);
5572
	r = uvd_v2_2_resume(rdev);
5421
	if (!r) {
5573
	if (!r) {
5422
		r = radeon_fence_driver_start_ring(rdev,
5574
		r = radeon_fence_driver_start_ring(rdev,
5423
						   R600_RING_TYPE_UVD_INDEX);
5575
						   R600_RING_TYPE_UVD_INDEX);
5424
		if (r)
5576
		if (r)
5425
			dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
5577
			dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
5426
	}
5578
	}
5427
 
5579
 
5428
	if (r)
5580
	if (r)
5429
		rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5581
		rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5430
 
5582
 
5431
	/* Enable IRQ */
5583
	/* Enable IRQ */
5432
	if (!rdev->irq.installed) {
5584
	if (!rdev->irq.installed) {
5433
		r = radeon_irq_kms_init(rdev);
5585
		r = radeon_irq_kms_init(rdev);
5434
		if (r)
5586
		if (r)
5435
			return r;
5587
			return r;
5436
	}
5588
	}
5437
 
5589
 
5438
	r = r600_irq_init(rdev);
5590
	r = r600_irq_init(rdev);
5439
	if (r) {
5591
	if (r) {
5440
		DRM_ERROR("radeon: IH init failed (%d).\n", r);
5592
		DRM_ERROR("radeon: IH init failed (%d).\n", r);
5441
		radeon_irq_kms_fini(rdev);
5593
		radeon_irq_kms_fini(rdev);
5442
		return r;
5594
		return r;
5443
	}
5595
	}
5444
	evergreen_irq_set(rdev);
5596
	evergreen_irq_set(rdev);
5445
 
5597
 
5446
	ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5598
	ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5447
	r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5599
	r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5448
			     RADEON_CP_PACKET2);
5600
			     RADEON_CP_PACKET2);
5449
	if (r)
5601
	if (r)
5450
		return r;
5602
		return r;
5451
 
5603
 
5452
	ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5604
	ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5453
	r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5605
	r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5454
			     DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5606
			     DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5455
	if (r)
5607
	if (r)
5456
		return r;
5608
		return r;
5457
 
5609
 
5458
	r = evergreen_cp_load_microcode(rdev);
5610
	r = evergreen_cp_load_microcode(rdev);
5459
	if (r)
5611
	if (r)
5460
		return r;
5612
		return r;
5461
	r = evergreen_cp_resume(rdev);
5613
	r = evergreen_cp_resume(rdev);
5462
	if (r)
5614
	if (r)
5463
		return r;
5615
		return r;
5464
	r = r600_dma_resume(rdev);
5616
	r = r600_dma_resume(rdev);
5465
	if (r)
5617
	if (r)
5466
		return r;
5618
		return r;
5467
 
5619
 
5468
	ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5620
	ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5469
	if (ring->ring_size) {
5621
	if (ring->ring_size) {
5470
		r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
5622
		r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
5471
				     RADEON_CP_PACKET2);
5623
				     RADEON_CP_PACKET2);
5472
		if (!r)
5624
		if (!r)
5473
			r = uvd_v1_0_init(rdev);
5625
			r = uvd_v1_0_init(rdev);
5474
 
5626
 
5475
		if (r)
5627
		if (r)
5476
			DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
5628
			DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
5477
	}
5629
	}
5478
 
5630
 
5479
	r = radeon_ib_pool_init(rdev);
5631
	r = radeon_ib_pool_init(rdev);
5480
	if (r) {
5632
	if (r) {
5481
		dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5633
		dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5482
		return r;
5634
		return r;
5483
	}
5635
	}
5484
 
5636
 
5485
	return 0;
5637
	return 0;
5486
}
5638
}
5487
 
5639
 
5488
 
5640
 
5489
 
5641
 
5490
 
5642
 
5491
/* Plan is to move initialization in that function and use
5643
/* Plan is to move initialization in that function and use
5492
 * helper function so that radeon_device_init pretty much
5644
 * helper function so that radeon_device_init pretty much
5493
 * do nothing more than calling asic specific function. This
5645
 * do nothing more than calling asic specific function. This
5494
 * should also allow to remove a bunch of callback function
5646
 * should also allow to remove a bunch of callback function
5495
 * like vram_info.
5647
 * like vram_info.
5496
 */
5648
 */
5497
int evergreen_init(struct radeon_device *rdev)
5649
int evergreen_init(struct radeon_device *rdev)
5498
{
5650
{
5499
	int r;
5651
	int r;
5500
 
5652
 
5501
	/* Read BIOS */
5653
	/* Read BIOS */
5502
	if (!radeon_get_bios(rdev)) {
5654
	if (!radeon_get_bios(rdev)) {
5503
		if (ASIC_IS_AVIVO(rdev))
5655
		if (ASIC_IS_AVIVO(rdev))
5504
			return -EINVAL;
5656
			return -EINVAL;
5505
	}
5657
	}
5506
	/* Must be an ATOMBIOS */
5658
	/* Must be an ATOMBIOS */
5507
	if (!rdev->is_atom_bios) {
5659
	if (!rdev->is_atom_bios) {
5508
		dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5660
		dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5509
		return -EINVAL;
5661
		return -EINVAL;
5510
	}
5662
	}
5511
	r = radeon_atombios_init(rdev);
5663
	r = radeon_atombios_init(rdev);
5512
	if (r)
5664
	if (r)
5513
		return r;
5665
		return r;
5514
	/* reset the asic, the gfx blocks are often in a bad state
5666
	/* reset the asic, the gfx blocks are often in a bad state
5515
	 * after the driver is unloaded or after a resume
5667
	 * after the driver is unloaded or after a resume
5516
	 */
5668
	 */
5517
	if (radeon_asic_reset(rdev))
5669
	if (radeon_asic_reset(rdev))
5518
		dev_warn(rdev->dev, "GPU reset failed !\n");
5670
		dev_warn(rdev->dev, "GPU reset failed !\n");
5519
	/* Post card if necessary */
5671
	/* Post card if necessary */
5520
	if (!radeon_card_posted(rdev)) {
5672
	if (!radeon_card_posted(rdev)) {
5521
		if (!rdev->bios) {
5673
		if (!rdev->bios) {
5522
			dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5674
			dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5523
			return -EINVAL;
5675
			return -EINVAL;
5524
		}
5676
		}
5525
		DRM_INFO("GPU not posted. posting now...\n");
5677
		DRM_INFO("GPU not posted. posting now...\n");
5526
		atom_asic_init(rdev->mode_info.atom_context);
5678
		atom_asic_init(rdev->mode_info.atom_context);
5527
	}
5679
	}
5528
	/* init golden registers */
5680
	/* init golden registers */
5529
	evergreen_init_golden_registers(rdev);
5681
	evergreen_init_golden_registers(rdev);
5530
	/* Initialize scratch registers */
5682
	/* Initialize scratch registers */
5531
	r600_scratch_init(rdev);
5683
	r600_scratch_init(rdev);
5532
	/* Initialize surface registers */
5684
	/* Initialize surface registers */
5533
	radeon_surface_init(rdev);
5685
	radeon_surface_init(rdev);
5534
	/* Initialize clocks */
5686
	/* Initialize clocks */
5535
	radeon_get_clock_info(rdev->ddev);
5687
	radeon_get_clock_info(rdev->ddev);
5536
	/* Fence driver */
5688
	/* Fence driver */
5537
	r = radeon_fence_driver_init(rdev);
5689
	r = radeon_fence_driver_init(rdev);
5538
	if (r)
5690
	if (r)
5539
		return r;
5691
		return r;
5540
	/* initialize AGP */
5692
	/* initialize AGP */
5541
	if (rdev->flags & RADEON_IS_AGP) {
5693
	if (rdev->flags & RADEON_IS_AGP) {
5542
		r = radeon_agp_init(rdev);
5694
		r = radeon_agp_init(rdev);
5543
		if (r)
5695
		if (r)
5544
			radeon_agp_disable(rdev);
5696
			radeon_agp_disable(rdev);
5545
	}
5697
	}
5546
	/* initialize memory controller */
5698
	/* initialize memory controller */
5547
	r = evergreen_mc_init(rdev);
5699
	r = evergreen_mc_init(rdev);
5548
	if (r)
5700
	if (r)
5549
		return r;
5701
		return r;
5550
	/* Memory manager */
5702
	/* Memory manager */
5551
	r = radeon_bo_init(rdev);
5703
	r = radeon_bo_init(rdev);
5552
	if (r)
5704
	if (r)
5553
		return r;
5705
		return r;
5554
 
5706
 
5555
	if (ASIC_IS_DCE5(rdev)) {
5707
	if (ASIC_IS_DCE5(rdev)) {
5556
		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5708
		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5557
			r = ni_init_microcode(rdev);
5709
			r = ni_init_microcode(rdev);
5558
			if (r) {
5710
			if (r) {
5559
				DRM_ERROR("Failed to load firmware!\n");
5711
				DRM_ERROR("Failed to load firmware!\n");
5560
				return r;
5712
				return r;
5561
			}
5713
			}
5562
		}
5714
		}
5563
	} else {
5715
	} else {
5564
		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5716
		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5565
			r = r600_init_microcode(rdev);
5717
			r = r600_init_microcode(rdev);
5566
			if (r) {
5718
			if (r) {
5567
				DRM_ERROR("Failed to load firmware!\n");
5719
				DRM_ERROR("Failed to load firmware!\n");
5568
				return r;
5720
				return r;
5569
			}
5721
			}
5570
		}
5722
		}
5571
	}
5723
	}
5572
 
5724
 
5573
	/* Initialize power management */
5725
	/* Initialize power management */
5574
	radeon_pm_init(rdev);
5726
	radeon_pm_init(rdev);
5575
 
5727
 
5576
	rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5728
	rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5577
	r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5729
	r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5578
 
5730
 
5579
	rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5731
	rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5580
	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5732
	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5581
 
5733
 
5582
	r = radeon_uvd_init(rdev);
5734
	r = radeon_uvd_init(rdev);
5583
	if (!r) {
5735
	if (!r) {
5584
		rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5736
		rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5585
		r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
5737
		r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
5586
			       4096);
5738
			       4096);
5587
	}
5739
	}
5588
 
5740
 
5589
	rdev->ih.ring_obj = NULL;
5741
	rdev->ih.ring_obj = NULL;
5590
	r600_ih_ring_init(rdev, 64 * 1024);
5742
	r600_ih_ring_init(rdev, 64 * 1024);
5591
 
5743
 
5592
	r = r600_pcie_gart_init(rdev);
5744
	r = r600_pcie_gart_init(rdev);
5593
	if (r)
5745
	if (r)
5594
		return r;
5746
		return r;
5595
 
5747
 
5596
	rdev->accel_working = true;
5748
	rdev->accel_working = true;
5597
	r = evergreen_startup(rdev);
5749
	r = evergreen_startup(rdev);
5598
	if (r) {
5750
	if (r) {
5599
		dev_err(rdev->dev, "disabling GPU acceleration\n");
5751
		dev_err(rdev->dev, "disabling GPU acceleration\n");
5600
		r700_cp_fini(rdev);
5752
		r700_cp_fini(rdev);
5601
		r600_dma_fini(rdev);
5753
		r600_dma_fini(rdev);
5602
		r600_irq_fini(rdev);
5754
		r600_irq_fini(rdev);
5603
		if (rdev->flags & RADEON_IS_IGP)
5755
		if (rdev->flags & RADEON_IS_IGP)
5604
			sumo_rlc_fini(rdev);
5756
			sumo_rlc_fini(rdev);
5605
		radeon_wb_fini(rdev);
5757
		radeon_wb_fini(rdev);
5606
		radeon_ib_pool_fini(rdev);
5758
		radeon_ib_pool_fini(rdev);
5607
		radeon_irq_kms_fini(rdev);
5759
		radeon_irq_kms_fini(rdev);
5608
		evergreen_pcie_gart_fini(rdev);
5760
		evergreen_pcie_gart_fini(rdev);
5609
		rdev->accel_working = false;
5761
		rdev->accel_working = false;
5610
	}
5762
	}
5611
 
5763
 
5612
	/* Don't start up if the MC ucode is missing on BTC parts.
5764
	/* Don't start up if the MC ucode is missing on BTC parts.
5613
	 * The default clocks and voltages before the MC ucode
5765
	 * The default clocks and voltages before the MC ucode
5614
	 * is loaded are not suffient for advanced operations.
5766
	 * is loaded are not suffient for advanced operations.
5615
	 */
5767
	 */
5616
	if (ASIC_IS_DCE5(rdev)) {
5768
	if (ASIC_IS_DCE5(rdev)) {
5617
		if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5769
		if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5618
			DRM_ERROR("radeon: MC ucode required for NI+.\n");
5770
			DRM_ERROR("radeon: MC ucode required for NI+.\n");
5619
			return -EINVAL;
5771
			return -EINVAL;
5620
		}
5772
		}
5621
	}
5773
	}
5622
 
5774
 
5623
	return 0;
5775
	return 0;
5624
}
5776
}
5625
 
5777
 
5626
void evergreen_fini(struct radeon_device *rdev)
5778
void evergreen_fini(struct radeon_device *rdev)
5627
{
5779
{
5628
	radeon_pm_fini(rdev);
5780
	radeon_pm_fini(rdev);
5629
	radeon_audio_fini(rdev);
5781
	radeon_audio_fini(rdev);
5630
	r700_cp_fini(rdev);
5782
	r700_cp_fini(rdev);
5631
	r600_dma_fini(rdev);
5783
	r600_dma_fini(rdev);
5632
	r600_irq_fini(rdev);
5784
	r600_irq_fini(rdev);
5633
	if (rdev->flags & RADEON_IS_IGP)
5785
	if (rdev->flags & RADEON_IS_IGP)
5634
		sumo_rlc_fini(rdev);
5786
		sumo_rlc_fini(rdev);
5635
	radeon_wb_fini(rdev);
5787
	radeon_wb_fini(rdev);
5636
	radeon_ib_pool_fini(rdev);
5788
	radeon_ib_pool_fini(rdev);
5637
	radeon_irq_kms_fini(rdev);
5789
	radeon_irq_kms_fini(rdev);
5638
	uvd_v1_0_fini(rdev);
5790
	uvd_v1_0_fini(rdev);
5639
	radeon_uvd_fini(rdev);
5791
	radeon_uvd_fini(rdev);
5640
	evergreen_pcie_gart_fini(rdev);
5792
	evergreen_pcie_gart_fini(rdev);
5641
	r600_vram_scratch_fini(rdev);
5793
	r600_vram_scratch_fini(rdev);
5642
	radeon_gem_fini(rdev);
5794
	radeon_gem_fini(rdev);
5643
	radeon_fence_driver_fini(rdev);
5795
	radeon_fence_driver_fini(rdev);
5644
	radeon_agp_fini(rdev);
5796
	radeon_agp_fini(rdev);
5645
	radeon_bo_fini(rdev);
5797
	radeon_bo_fini(rdev);
5646
	radeon_atombios_fini(rdev);
5798
	radeon_atombios_fini(rdev);
5647
	kfree(rdev->bios);
5799
	kfree(rdev->bios);
5648
	rdev->bios = NULL;
5800
	rdev->bios = NULL;
5649
}
5801
}
5650
 
5802
 
5651
void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5803
void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5652
{
5804
{
5653
	u32 link_width_cntl, speed_cntl;
5805
	u32 link_width_cntl, speed_cntl;
5654
 
5806
 
5655
	if (radeon_pcie_gen2 == 0)
5807
	if (radeon_pcie_gen2 == 0)
5656
		return;
5808
		return;
5657
 
5809
 
5658
	if (rdev->flags & RADEON_IS_IGP)
5810
	if (rdev->flags & RADEON_IS_IGP)
5659
		return;
5811
		return;
5660
 
5812
 
5661
	if (!(rdev->flags & RADEON_IS_PCIE))
5813
	if (!(rdev->flags & RADEON_IS_PCIE))
5662
		return;
5814
		return;
5663
 
5815
 
5664
	/* x2 cards have a special sequence */
5816
	/* x2 cards have a special sequence */
5665
	if (ASIC_IS_X2(rdev))
5817
	if (ASIC_IS_X2(rdev))
5666
		return;
5818
		return;
5667
 
5819
 
5668
	if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5820
	if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5669
		(rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5821
		(rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5670
		return;
5822
		return;
5671
 
5823
 
5672
	speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5824
	speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5673
	if (speed_cntl & LC_CURRENT_DATA_RATE) {
5825
	if (speed_cntl & LC_CURRENT_DATA_RATE) {
5674
		DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5826
		DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5675
		return;
5827
		return;
5676
	}
5828
	}
5677
 
5829
 
5678
	DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5830
	DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5679
 
5831
 
5680
	if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5832
	if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5681
	    (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5833
	    (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5682
 
5834
 
5683
		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5835
		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5684
		link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5836
		link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5685
		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5837
		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5686
 
5838
 
5687
		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5839
		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5688
		speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5840
		speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5689
		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5841
		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5690
 
5842
 
5691
		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5843
		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5692
		speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5844
		speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5693
		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5845
		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5694
 
5846
 
5695
		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5847
		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5696
		speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5848
		speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5697
		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5849
		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5698
 
5850
 
5699
		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5851
		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5700
		speed_cntl |= LC_GEN2_EN_STRAP;
5852
		speed_cntl |= LC_GEN2_EN_STRAP;
5701
		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5853
		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5702
 
5854
 
5703
	} else {
5855
	} else {
5704
		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5856
		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5705
		/* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5857
		/* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5706
		if (1)
5858
		if (1)
5707
			link_width_cntl |= LC_UPCONFIGURE_DIS;
5859
			link_width_cntl |= LC_UPCONFIGURE_DIS;
5708
		else
5860
		else
5709
			link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5861
			link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5710
		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5862
		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5711
	}
5863
	}
5712
}
5864
}
5713
 
5865
 
5714
void evergreen_program_aspm(struct radeon_device *rdev)
5866
void evergreen_program_aspm(struct radeon_device *rdev)
5715
{
5867
{
5716
	u32 data, orig;
5868
	u32 data, orig;
5717
	u32 pcie_lc_cntl, pcie_lc_cntl_old;
5869
	u32 pcie_lc_cntl, pcie_lc_cntl_old;
5718
	bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5870
	bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5719
	/* fusion_platform = true
5871
	/* fusion_platform = true
5720
	 * if the system is a fusion system
5872
	 * if the system is a fusion system
5721
	 * (APU or DGPU in a fusion system).
5873
	 * (APU or DGPU in a fusion system).
5722
	 * todo: check if the system is a fusion platform.
5874
	 * todo: check if the system is a fusion platform.
5723
	 */
5875
	 */
5724
	bool fusion_platform = false;
5876
	bool fusion_platform = false;
5725
 
5877
 
5726
	if (radeon_aspm == 0)
5878
	if (radeon_aspm == 0)
5727
		return;
5879
		return;
5728
 
5880
 
5729
	if (!(rdev->flags & RADEON_IS_PCIE))
5881
	if (!(rdev->flags & RADEON_IS_PCIE))
5730
		return;
5882
		return;
5731
 
5883
 
5732
	switch (rdev->family) {
5884
	switch (rdev->family) {
5733
	case CHIP_CYPRESS:
5885
	case CHIP_CYPRESS:
5734
	case CHIP_HEMLOCK:
5886
	case CHIP_HEMLOCK:
5735
	case CHIP_JUNIPER:
5887
	case CHIP_JUNIPER:
5736
	case CHIP_REDWOOD:
5888
	case CHIP_REDWOOD:
5737
	case CHIP_CEDAR:
5889
	case CHIP_CEDAR:
5738
	case CHIP_SUMO:
5890
	case CHIP_SUMO:
5739
	case CHIP_SUMO2:
5891
	case CHIP_SUMO2:
5740
	case CHIP_PALM:
5892
	case CHIP_PALM:
5741
	case CHIP_ARUBA:
5893
	case CHIP_ARUBA:
5742
		disable_l0s = true;
5894
		disable_l0s = true;
5743
		break;
5895
		break;
5744
	default:
5896
	default:
5745
		disable_l0s = false;
5897
		disable_l0s = false;
5746
		break;
5898
		break;
5747
	}
5899
	}
5748
 
5900
 
5749
	if (rdev->flags & RADEON_IS_IGP)
5901
	if (rdev->flags & RADEON_IS_IGP)
5750
		fusion_platform = true; /* XXX also dGPUs in a fusion system */
5902
		fusion_platform = true; /* XXX also dGPUs in a fusion system */
5751
 
5903
 
5752
	data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5904
	data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5753
	if (fusion_platform)
5905
	if (fusion_platform)
5754
		data &= ~MULTI_PIF;
5906
		data &= ~MULTI_PIF;
5755
	else
5907
	else
5756
		data |= MULTI_PIF;
5908
		data |= MULTI_PIF;
5757
	if (data != orig)
5909
	if (data != orig)
5758
		WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
5910
		WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
5759
 
5911
 
5760
	data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
5912
	data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
5761
	if (fusion_platform)
5913
	if (fusion_platform)
5762
		data &= ~MULTI_PIF;
5914
		data &= ~MULTI_PIF;
5763
	else
5915
	else
5764
		data |= MULTI_PIF;
5916
		data |= MULTI_PIF;
5765
	if (data != orig)
5917
	if (data != orig)
5766
		WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
5918
		WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
5767
 
5919
 
5768
	pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
5920
	pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
5769
	pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
5921
	pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
5770
	if (!disable_l0s) {
5922
	if (!disable_l0s) {
5771
		if (rdev->family >= CHIP_BARTS)
5923
		if (rdev->family >= CHIP_BARTS)
5772
			pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
5924
			pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
5773
		else
5925
		else
5774
			pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
5926
			pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
5775
	}
5927
	}
5776
 
5928
 
5777
	if (!disable_l1) {
5929
	if (!disable_l1) {
5778
		if (rdev->family >= CHIP_BARTS)
5930
		if (rdev->family >= CHIP_BARTS)
5779
			pcie_lc_cntl |= LC_L1_INACTIVITY(7);
5931
			pcie_lc_cntl |= LC_L1_INACTIVITY(7);
5780
		else
5932
		else
5781
			pcie_lc_cntl |= LC_L1_INACTIVITY(8);
5933
			pcie_lc_cntl |= LC_L1_INACTIVITY(8);
5782
 
5934
 
5783
		if (!disable_plloff_in_l1) {
5935
		if (!disable_plloff_in_l1) {
5784
			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5936
			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5785
			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5937
			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5786
			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5938
			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5787
			if (data != orig)
5939
			if (data != orig)
5788
				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5940
				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5789
 
5941
 
5790
			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5942
			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5791
			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5943
			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5792
			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5944
			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5793
			if (data != orig)
5945
			if (data != orig)
5794
				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5946
				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5795
 
5947
 
5796
			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5948
			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5797
			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5949
			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5798
			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5950
			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5799
			if (data != orig)
5951
			if (data != orig)
5800
				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5952
				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5801
 
5953
 
5802
			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5954
			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5803
			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5955
			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5804
			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5956
			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5805
			if (data != orig)
5957
			if (data != orig)
5806
				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5958
				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5807
 
5959
 
5808
			if (rdev->family >= CHIP_BARTS) {
5960
			if (rdev->family >= CHIP_BARTS) {
5809
				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5961
				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5810
				data &= ~PLL_RAMP_UP_TIME_0_MASK;
5962
				data &= ~PLL_RAMP_UP_TIME_0_MASK;
5811
				data |= PLL_RAMP_UP_TIME_0(4);
5963
				data |= PLL_RAMP_UP_TIME_0(4);
5812
				if (data != orig)
5964
				if (data != orig)
5813
					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5965
					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5814
 
5966
 
5815
				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5967
				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5816
				data &= ~PLL_RAMP_UP_TIME_1_MASK;
5968
				data &= ~PLL_RAMP_UP_TIME_1_MASK;
5817
				data |= PLL_RAMP_UP_TIME_1(4);
5969
				data |= PLL_RAMP_UP_TIME_1(4);
5818
				if (data != orig)
5970
				if (data != orig)
5819
					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5971
					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5820
 
5972
 
5821
				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5973
				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5822
				data &= ~PLL_RAMP_UP_TIME_0_MASK;
5974
				data &= ~PLL_RAMP_UP_TIME_0_MASK;
5823
				data |= PLL_RAMP_UP_TIME_0(4);
5975
				data |= PLL_RAMP_UP_TIME_0(4);
5824
				if (data != orig)
5976
				if (data != orig)
5825
					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5977
					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5826
 
5978
 
5827
				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5979
				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5828
				data &= ~PLL_RAMP_UP_TIME_1_MASK;
5980
				data &= ~PLL_RAMP_UP_TIME_1_MASK;
5829
				data |= PLL_RAMP_UP_TIME_1(4);
5981
				data |= PLL_RAMP_UP_TIME_1(4);
5830
				if (data != orig)
5982
				if (data != orig)
5831
					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5983
					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5832
			}
5984
			}
5833
 
5985
 
5834
			data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5986
			data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5835
			data &= ~LC_DYN_LANES_PWR_STATE_MASK;
5987
			data &= ~LC_DYN_LANES_PWR_STATE_MASK;
5836
			data |= LC_DYN_LANES_PWR_STATE(3);
5988
			data |= LC_DYN_LANES_PWR_STATE(3);
5837
			if (data != orig)
5989
			if (data != orig)
5838
				WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
5990
				WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
5839
 
5991
 
5840
			if (rdev->family >= CHIP_BARTS) {
5992
			if (rdev->family >= CHIP_BARTS) {
5841
				data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
5993
				data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
5842
				data &= ~LS2_EXIT_TIME_MASK;
5994
				data &= ~LS2_EXIT_TIME_MASK;
5843
				data |= LS2_EXIT_TIME(1);
5995
				data |= LS2_EXIT_TIME(1);
5844
				if (data != orig)
5996
				if (data != orig)
5845
					WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
5997
					WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
5846
 
5998
 
5847
				data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
5999
				data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
5848
				data &= ~LS2_EXIT_TIME_MASK;
6000
				data &= ~LS2_EXIT_TIME_MASK;
5849
				data |= LS2_EXIT_TIME(1);
6001
				data |= LS2_EXIT_TIME(1);
5850
				if (data != orig)
6002
				if (data != orig)
5851
					WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
6003
					WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
5852
			}
6004
			}
5853
		}
6005
		}
5854
	}
6006
	}
5855
 
6007
 
5856
	/* evergreen parts only */
6008
	/* evergreen parts only */
5857
	if (rdev->family < CHIP_BARTS)
6009
	if (rdev->family < CHIP_BARTS)
5858
		pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
6010
		pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
5859
 
6011
 
5860
	if (pcie_lc_cntl != pcie_lc_cntl_old)
6012
	if (pcie_lc_cntl != pcie_lc_cntl_old)
5861
		WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
6013
		WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
5862
}
6014
}
5863
>
6015
>
5864
>
6016
>