Subversion Repositories Kolibri OS

Rev

Rev 2175 | Rev 3031 | Go to most recent revision | Show entire file | Regard whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 2175 Rev 2997
Line 22... Line 22...
22
 * Authors: Alex Deucher
22
 * Authors: Alex Deucher
23
 */
23
 */
24
#include 
24
#include 
25
//#include 
25
//#include 
26
#include 
26
#include 
27
#include "drmP.h"
27
#include 
28
#include "radeon.h"
28
#include "radeon.h"
29
#include "radeon_asic.h"
29
#include "radeon_asic.h"
30
#include "radeon_drm.h"
30
#include 
31
#include "evergreend.h"
31
#include "evergreend.h"
32
#include "atom.h"
32
#include "atom.h"
33
#include "avivod.h"
33
#include "avivod.h"
34
#include "evergreen_reg.h"
34
#include "evergreen_reg.h"
35
#include "evergreen_blit_shaders.h"
35
#include "evergreen_blit_shaders.h"
Line 36... Line 36...
36
 
36
 
37
#define EVERGREEN_PFP_UCODE_SIZE 1120
37
#define EVERGREEN_PFP_UCODE_SIZE 1120
Line -... Line 38...
-
 
38
#define EVERGREEN_PM4_UCODE_SIZE 1376
-
 
39
 
-
 
40
static const u32 crtc_offsets[6] =
-
 
41
{
-
 
42
	EVERGREEN_CRTC0_REGISTER_OFFSET,
-
 
43
	EVERGREEN_CRTC1_REGISTER_OFFSET,
-
 
44
	EVERGREEN_CRTC2_REGISTER_OFFSET,
-
 
45
	EVERGREEN_CRTC3_REGISTER_OFFSET,
-
 
46
	EVERGREEN_CRTC4_REGISTER_OFFSET,
-
 
47
	EVERGREEN_CRTC5_REGISTER_OFFSET
38
#define EVERGREEN_PM4_UCODE_SIZE 1376
48
};
39
 
49
 
40
static void evergreen_gpu_init(struct radeon_device *rdev);
50
static void evergreen_gpu_init(struct radeon_device *rdev);
-
 
51
void evergreen_fini(struct radeon_device *rdev);
-
 
52
void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
-
 
53
extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
-
 
54
				     int ring, u32 cp_int_cntl);
-
 
55
 
-
 
56
void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
-
 
57
			     unsigned *bankh, unsigned *mtaspect,
-
 
58
			     unsigned *tile_split)
-
 
59
{
-
 
60
	*bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
-
 
61
	*bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
-
 
62
	*mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
-
 
63
	*tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
-
 
64
	switch (*bankw) {
-
 
65
	default:
-
 
66
	case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
-
 
67
	case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
-
 
68
	case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
-
 
69
	case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
-
 
70
	}
-
 
71
	switch (*bankh) {
-
 
72
	default:
-
 
73
	case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
-
 
74
	case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
-
 
75
	case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
-
 
76
	case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
-
 
77
	}
-
 
78
	switch (*mtaspect) {
-
 
79
	default:
-
 
80
	case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
-
 
81
	case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
-
 
82
	case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
-
 
83
	case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
-
 
84
	}
-
 
85
}
-
 
86
 
-
 
87
void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
-
 
88
{
-
 
89
	u16 ctl, v;
-
 
90
	int err;
-
 
91
 
-
 
92
	err = pcie_capability_read_word(rdev->pdev, PCI_EXP_DEVCTL, &ctl);
-
 
93
	if (err)
-
 
94
		return;
-
 
95
 
-
 
96
	v = (ctl & PCI_EXP_DEVCTL_READRQ) >> 12;
-
 
97
 
-
 
98
	/* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
-
 
99
	 * to avoid hangs or perfomance issues
-
 
100
	 */
-
 
101
	if ((v == 0) || (v == 6) || (v == 7)) {
-
 
102
		ctl &= ~PCI_EXP_DEVCTL_READRQ;
-
 
103
		ctl |= (2 << 12);
-
 
104
		pcie_capability_write_word(rdev->pdev, PCI_EXP_DEVCTL, ctl);
-
 
105
	}
-
 
106
}
-
 
107
 
-
 
108
/**
-
 
109
 * dce4_wait_for_vblank - vblank wait asic callback.
-
 
110
 *
-
 
111
 * @rdev: radeon_device pointer
-
 
112
 * @crtc: crtc to wait for vblank on
-
 
113
 *
-
 
114
 * Wait for vblank on the requested crtc (evergreen+).
-
 
115
 */
-
 
116
void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
-
 
117
{
-
 
118
	int i;
-
 
119
 
Line -... Line 120...
-
 
120
	if (crtc >= rdev->num_crtc)
-
 
121
		return;
-
 
122
 
-
 
123
	if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN) {
-
 
124
		for (i = 0; i < rdev->usec_timeout; i++) {
-
 
125
			if (!(RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK))
-
 
126
				break;
-
 
127
			udelay(1);
-
 
128
		}
-
 
129
		for (i = 0; i < rdev->usec_timeout; i++) {
-
 
130
			if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
-
 
131
				break;
-
 
132
			udelay(1);
Line -... Line 133...
-
 
133
		}
-
 
134
	}
-
 
135
}
-
 
136
 
-
 
137
 
-
 
138
/**
-
 
139
 * evergreen_page_flip - pageflip callback.
-
 
140
 *
-
 
141
 * @rdev: radeon_device pointer
-
 
142
 * @crtc_id: crtc to cleanup pageflip on
-
 
143
 * @crtc_base: new address of the crtc (GPU MC address)
-
 
144
 *
-
 
145
 * Does the actual pageflip (evergreen+).
41
void evergreen_fini(struct radeon_device *rdev);
146
 * During vblank we take the crtc lock and wait for the update_pending
42
static void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
147
 * bit to go high, when it does, we release the lock, and allow the
43
 
148
 * double buffered update to take place.
44
 
149
 * Returns the current update pending status.
-
 
150
 */
Line 45... Line 151...
45
 
151
u32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
46
u32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
152
{
47
{
153
	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
Line 62... Line 168...
62
	       upper_32_bits(crtc_base));
168
	       upper_32_bits(crtc_base));
63
	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
169
	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
64
	       (u32)crtc_base);
170
	       (u32)crtc_base);
Line 65... Line 171...
65
 
171
 
-
 
172
	/* Wait for update_pending to go high. */
66
	/* Wait for update_pending to go high. */
173
	for (i = 0; i < rdev->usec_timeout; i++) {
-
 
174
		if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING)
-
 
175
			break;
-
 
176
		udelay(1);
67
	while (!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING));
177
	}
Line 68... Line 178...
68
	DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
178
	DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
69
 
179
 
70
	/* Unlock the lock, so double-buffering can take place inside vblank */
180
	/* Unlock the lock, so double-buffering can take place inside vblank */
Line 120... Line 230...
120
	int actual_temp = temp - 49;
230
	int actual_temp = temp - 49;
Line 121... Line 231...
121
 
231
 
122
	return actual_temp * 1000;
232
	return actual_temp * 1000;
Line -... Line 233...
-
 
233
}
-
 
234
 
-
 
235
/**
-
 
236
 * sumo_pm_init_profile - Initialize power profiles callback.
-
 
237
 *
-
 
238
 * @rdev: radeon_device pointer
-
 
239
 *
-
 
240
 * Initialize the power states used in profile mode
-
 
241
 * (sumo, trinity, SI).
-
 
242
 * Used for profile mode only.
-
 
243
 */
-
 
244
void sumo_pm_init_profile(struct radeon_device *rdev)
-
 
245
{
-
 
246
	int idx;
-
 
247
 
-
 
248
	/* default */
-
 
249
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
-
 
250
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
-
 
251
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
-
 
252
	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
-
 
253
 
-
 
254
	/* low,mid sh/mh */
-
 
255
	if (rdev->flags & RADEON_IS_MOBILITY)
-
 
256
		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
-
 
257
	else
-
 
258
		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
-
 
259
 
-
 
260
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
-
 
261
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
-
 
262
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
-
 
263
	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
-
 
264
 
-
 
265
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
-
 
266
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
-
 
267
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
-
 
268
	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
-
 
269
 
-
 
270
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
-
 
271
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
-
 
272
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
-
 
273
	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
-
 
274
 
-
 
275
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
-
 
276
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
-
 
277
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
-
 
278
	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
-
 
279
 
-
 
280
	/* high sh/mh */
-
 
281
	idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
-
 
282
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
-
 
283
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
-
 
284
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
-
 
285
	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
-
 
286
		rdev->pm.power_state[idx].num_clock_modes - 1;
-
 
287
 
-
 
288
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
-
 
289
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
-
 
290
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
-
 
291
	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
-
 
292
		rdev->pm.power_state[idx].num_clock_modes - 1;
-
 
293
}
-
 
294
 
-
 
295
/**
-
 
296
 * evergreen_pm_misc - set additional pm hw parameters callback.
-
 
297
 *
-
 
298
 * @rdev: radeon_device pointer
-
 
299
 *
-
 
300
 * Set non-clock parameters associated with a power state
123
}
301
 * (voltage, etc.) (evergreen+).
124
 
302
 */
125
void evergreen_pm_misc(struct radeon_device *rdev)
303
void evergreen_pm_misc(struct radeon_device *rdev)
126
{
304
{
127
	int req_ps_idx = rdev->pm.requested_power_state_index;
305
	int req_ps_idx = rdev->pm.requested_power_state_index;
Line 147... Line 325...
147
			DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
325
			DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
148
		}
326
		}
149
	}
327
	}
150
}
328
}
Line -... Line 329...
-
 
329
 
-
 
330
/**
-
 
331
 * evergreen_pm_prepare - pre-power state change callback.
-
 
332
 *
-
 
333
 * @rdev: radeon_device pointer
-
 
334
 *
-
 
335
 * Prepare for a power state change (evergreen+).
151
 
336
 */
152
void evergreen_pm_prepare(struct radeon_device *rdev)
337
void evergreen_pm_prepare(struct radeon_device *rdev)
153
{
338
{
154
	struct drm_device *ddev = rdev->ddev;
339
	struct drm_device *ddev = rdev->ddev;
155
	struct drm_crtc *crtc;
340
	struct drm_crtc *crtc;
Line 165... Line 350...
165
			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
350
			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
166
		}
351
		}
167
	}
352
	}
168
}
353
}
Line -... Line 354...
-
 
354
 
-
 
355
/**
-
 
356
 * evergreen_pm_finish - post-power state change callback.
-
 
357
 *
-
 
358
 * @rdev: radeon_device pointer
-
 
359
 *
-
 
360
 * Clean up after a power state change (evergreen+).
169
 
361
 */
170
void evergreen_pm_finish(struct radeon_device *rdev)
362
void evergreen_pm_finish(struct radeon_device *rdev)
171
{
363
{
172
	struct drm_device *ddev = rdev->ddev;
364
	struct drm_device *ddev = rdev->ddev;
173
	struct drm_crtc *crtc;
365
	struct drm_crtc *crtc;
Line 183... Line 375...
183
			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
375
			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
184
		}
376
		}
185
	}
377
	}
186
}
378
}
Line -... Line 379...
-
 
379
 
-
 
380
/**
-
 
381
 * evergreen_hpd_sense - hpd sense callback.
-
 
382
 *
-
 
383
 * @rdev: radeon_device pointer
-
 
384
 * @hpd: hpd (hotplug detect) pin
-
 
385
 *
-
 
386
 * Checks if a digital monitor is connected (evergreen+).
-
 
387
 * Returns true if connected, false if not connected.
187
 
388
 */
188
bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
389
bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
189
{
390
{
Line 190... Line 391...
190
	bool connected = false;
391
	bool connected = false;
Line 219... Line 420...
219
	}
420
	}
Line 220... Line 421...
220
 
421
 
221
	return connected;
422
	return connected;
Line -... Line 423...
-
 
423
}
-
 
424
 
-
 
425
/**
-
 
426
 * evergreen_hpd_set_polarity - hpd set polarity callback.
-
 
427
 *
-
 
428
 * @rdev: radeon_device pointer
-
 
429
 * @hpd: hpd (hotplug detect) pin
-
 
430
 *
222
}
431
 * Set the polarity of the hpd pin (evergreen+).
223
 
432
 */
224
void evergreen_hpd_set_polarity(struct radeon_device *rdev,
433
void evergreen_hpd_set_polarity(struct radeon_device *rdev,
225
				enum radeon_hpd_id hpd)
434
				enum radeon_hpd_id hpd)
226
{
435
{
Line 279... Line 488...
279
	default:
488
	default:
280
		break;
489
		break;
281
	}
490
	}
282
}
491
}
Line -... Line 492...
-
 
492
 
-
 
493
/**
-
 
494
 * evergreen_hpd_init - hpd setup callback.
-
 
495
 *
-
 
496
 * @rdev: radeon_device pointer
-
 
497
 *
-
 
498
 * Setup the hpd pins used by the card (evergreen+).
-
 
499
 * Enable the pin, set the polarity, and enable the hpd interrupts.
283
 
500
 */
284
void evergreen_hpd_init(struct radeon_device *rdev)
501
void evergreen_hpd_init(struct radeon_device *rdev)
285
{
502
{
286
	struct drm_device *dev = rdev->ddev;
503
	struct drm_device *dev = rdev->ddev;
-
 
504
	struct drm_connector *connector;
287
	struct drm_connector *connector;
505
	unsigned enabled = 0;
288
	u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
506
	u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
Line 289... Line 507...
289
		DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
507
		DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
290
 
508
 
291
	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
509
	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
292
		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
510
		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
293
		switch (radeon_connector->hpd.hpd) {
511
		switch (radeon_connector->hpd.hpd) {
294
		case RADEON_HPD_1:
-
 
295
			WREG32(DC_HPD1_CONTROL, tmp);
512
		case RADEON_HPD_1:
296
			rdev->irq.hpd[0] = true;
513
			WREG32(DC_HPD1_CONTROL, tmp);
297
			break;
514
			break;
298
		case RADEON_HPD_2:
-
 
299
			WREG32(DC_HPD2_CONTROL, tmp);
515
		case RADEON_HPD_2:
300
			rdev->irq.hpd[1] = true;
516
			WREG32(DC_HPD2_CONTROL, tmp);
301
			break;
517
			break;
302
		case RADEON_HPD_3:
-
 
303
			WREG32(DC_HPD3_CONTROL, tmp);
518
		case RADEON_HPD_3:
304
			rdev->irq.hpd[2] = true;
519
			WREG32(DC_HPD3_CONTROL, tmp);
305
			break;
520
			break;
306
		case RADEON_HPD_4:
-
 
307
			WREG32(DC_HPD4_CONTROL, tmp);
521
		case RADEON_HPD_4:
308
			rdev->irq.hpd[3] = true;
522
			WREG32(DC_HPD4_CONTROL, tmp);
309
			break;
523
			break;
310
		case RADEON_HPD_5:
-
 
311
			WREG32(DC_HPD5_CONTROL, tmp);
524
		case RADEON_HPD_5:
312
			rdev->irq.hpd[4] = true;
525
			WREG32(DC_HPD5_CONTROL, tmp);
313
			break;
526
			break;
314
		case RADEON_HPD_6:
-
 
315
			WREG32(DC_HPD6_CONTROL, tmp);
527
		case RADEON_HPD_6:
316
			rdev->irq.hpd[5] = true;
528
			WREG32(DC_HPD6_CONTROL, tmp);
317
			break;
529
			break;
318
		default:
530
		default:
-
 
531
			break;
-
 
532
		}
319
			break;
533
		radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
320
		}
-
 
321
	}
534
		enabled |= 1 << radeon_connector->hpd.hpd;
322
	if (rdev->irq.installed)
535
	}
Line -... Line 536...
-
 
536
//   radeon_irq_kms_enable_hpd(rdev, enabled);
-
 
537
}
-
 
538
 
-
 
539
/**
-
 
540
 * evergreen_hpd_fini - hpd tear down callback.
-
 
541
 *
-
 
542
 * @rdev: radeon_device pointer
-
 
543
 *
323
		evergreen_irq_set(rdev);
544
 * Tear down the hpd pins used by the card (evergreen+).
324
}
545
 * Disable the hpd interrupts.
325
 
546
 */
326
void evergreen_hpd_fini(struct radeon_device *rdev)
547
void evergreen_hpd_fini(struct radeon_device *rdev)
-
 
548
{
Line 327... Line 549...
327
{
549
	struct drm_device *dev = rdev->ddev;
328
	struct drm_device *dev = rdev->ddev;
550
	struct drm_connector *connector;
329
	struct drm_connector *connector;
551
	unsigned disabled = 0;
330
 
552
 
331
	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
553
	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
332
		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
-
 
333
		switch (radeon_connector->hpd.hpd) {
554
		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
334
		case RADEON_HPD_1:
555
		switch (radeon_connector->hpd.hpd) {
335
			WREG32(DC_HPD1_CONTROL, 0);
556
		case RADEON_HPD_1:
336
			rdev->irq.hpd[0] = false;
-
 
337
			break;
557
			WREG32(DC_HPD1_CONTROL, 0);
338
		case RADEON_HPD_2:
558
			break;
339
			WREG32(DC_HPD2_CONTROL, 0);
559
		case RADEON_HPD_2:
340
			rdev->irq.hpd[1] = false;
-
 
341
			break;
560
			WREG32(DC_HPD2_CONTROL, 0);
342
		case RADEON_HPD_3:
561
			break;
343
			WREG32(DC_HPD3_CONTROL, 0);
562
		case RADEON_HPD_3:
344
			rdev->irq.hpd[2] = false;
-
 
345
			break;
563
			WREG32(DC_HPD3_CONTROL, 0);
346
		case RADEON_HPD_4:
564
			break;
347
			WREG32(DC_HPD4_CONTROL, 0);
565
		case RADEON_HPD_4:
348
			rdev->irq.hpd[3] = false;
-
 
349
			break;
566
			WREG32(DC_HPD4_CONTROL, 0);
350
		case RADEON_HPD_5:
567
			break;
351
			WREG32(DC_HPD5_CONTROL, 0);
568
		case RADEON_HPD_5:
352
			rdev->irq.hpd[4] = false;
-
 
353
			break;
569
			WREG32(DC_HPD5_CONTROL, 0);
354
		case RADEON_HPD_6:
570
			break;
355
			WREG32(DC_HPD6_CONTROL, 0);
571
		case RADEON_HPD_6:
356
			rdev->irq.hpd[5] = false;
572
			WREG32(DC_HPD6_CONTROL, 0);
-
 
573
			break;
357
			break;
574
		default:
-
 
575
			break;
358
		default:
576
		}
Line 359... Line 577...
359
			break;
577
		disabled |= 1 << radeon_connector->hpd.hpd;
Line 360... Line 578...
360
		}
578
	}
Line 435... Line 653...
435
 
653
 
436
	/* controller not enabled, so no lb used */
654
	/* controller not enabled, so no lb used */
437
	return 0;
655
	return 0;
Line 438... Line 656...
438
}
656
}
439
 
657
 
440
static u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
658
u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
Line 441... Line 659...
441
{
659
{
442
	u32 tmp = RREG32(MC_SHARED_CHMAP);
660
	u32 tmp = RREG32(MC_SHARED_CHMAP);
Line 787... Line 1005...
787
	WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
1005
	WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
788
	WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
1006
	WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
Line 789... Line 1007...
789
 
1007
 
Line -... Line 1008...
-
 
1008
}
-
 
1009
 
-
 
1010
/**
-
 
1011
 * evergreen_bandwidth_update - update display watermarks callback.
-
 
1012
 *
-
 
1013
 * @rdev: radeon_device pointer
-
 
1014
 *
-
 
1015
 * Update the display watermarks based on the requested mode(s)
790
}
1016
 * (evergreen+).
791
 
1017
 */
792
void evergreen_bandwidth_update(struct radeon_device *rdev)
1018
void evergreen_bandwidth_update(struct radeon_device *rdev)
793
{
1019
{
794
	struct drm_display_mode *mode0 = NULL;
1020
	struct drm_display_mode *mode0 = NULL;
Line 810... Line 1036...
810
		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
1036
		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
811
		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
1037
		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
812
	}
1038
	}
813
}
1039
}
Line -... Line 1040...
-
 
1040
 
-
 
1041
/**
-
 
1042
 * evergreen_mc_wait_for_idle - wait for MC idle callback.
-
 
1043
 *
-
 
1044
 * @rdev: radeon_device pointer
-
 
1045
 *
-
 
1046
 * Wait for the MC (memory controller) to be idle.
-
 
1047
 * (evergreen+).
-
 
1048
 * Returns 0 if the MC is idle, -1 if not.
814
 
1049
 */
815
int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
1050
int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
816
{
1051
{
817
	unsigned i;
1052
	unsigned i;
Line 851... Line 1086...
851
		}
1086
		}
852
		udelay(1);
1087
		udelay(1);
853
	}
1088
	}
854
}
1089
}
Line 855... Line 1090...
855
 
1090
 
856
int evergreen_pcie_gart_enable(struct radeon_device *rdev)
1091
static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
857
{
1092
{
858
	u32 tmp;
1093
	u32 tmp;
Line 859... Line 1094...
859
	int r;
1094
	int r;
860
 
1095
 
861
	if (rdev->gart.table.vram.robj == NULL) {
1096
	if (rdev->gart.robj == NULL) {
862
		dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
1097
		dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
863
		return -EINVAL;
1098
		return -EINVAL;
864
	}
1099
	}
Line 883... Line 1118...
883
		WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
1118
		WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
884
	} else {
1119
	} else {
885
	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
1120
	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
886
	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
1121
	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
887
	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
1122
	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
-
 
1123
		if ((rdev->family == CHIP_JUNIPER) ||
-
 
1124
		    (rdev->family == CHIP_CYPRESS) ||
-
 
1125
		    (rdev->family == CHIP_HEMLOCK) ||
-
 
1126
		    (rdev->family == CHIP_BARTS))
-
 
1127
			WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
888
	}
1128
	}
889
	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
1129
	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
890
	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
1130
	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
891
	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
1131
	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
892
	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
1132
	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
Line 898... Line 1138...
898
	WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
1138
	WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
899
			(u32)(rdev->dummy_page.addr >> 12));
1139
			(u32)(rdev->dummy_page.addr >> 12));
900
	WREG32(VM_CONTEXT1_CNTL, 0);
1140
	WREG32(VM_CONTEXT1_CNTL, 0);
Line 901... Line 1141...
901
 
1141
 
-
 
1142
	evergreen_pcie_gart_tlb_flush(rdev);
-
 
1143
	DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
-
 
1144
		 (unsigned)(rdev->mc.gtt_size >> 20),
902
	evergreen_pcie_gart_tlb_flush(rdev);
1145
		 (unsigned long long)rdev->gart.table_addr);
903
	rdev->gart.ready = true;
1146
	rdev->gart.ready = true;
904
	return 0;
1147
	return 0;
Line 905... Line 1148...
905
}
1148
}
906
 
1149
 
907
void evergreen_pcie_gart_disable(struct radeon_device *rdev)
1150
static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
908
{
-
 
Line 909... Line 1151...
909
	u32 tmp;
1151
{
910
	int r;
1152
	u32 tmp;
911
 
1153
 
Line 925... Line 1167...
925
	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
1167
	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
926
	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
1168
	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
927
	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
1169
	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
928
	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
1170
	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
929
	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
1171
	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
930
	if (rdev->gart.table.vram.robj) {
1172
	radeon_gart_table_vram_unpin(rdev);
931
		r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
-
 
932
		if (likely(r == 0)) {
-
 
933
			radeon_bo_kunmap(rdev->gart.table.vram.robj);
-
 
934
			radeon_bo_unpin(rdev->gart.table.vram.robj);
-
 
935
			radeon_bo_unreserve(rdev->gart.table.vram.robj);
-
 
936
		}
-
 
937
	}
-
 
938
}
1173
}
Line 939... Line 1174...
939
 
1174
 
940
void evergreen_pcie_gart_fini(struct radeon_device *rdev)
1175
static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
941
{
1176
{
942
	evergreen_pcie_gart_disable(rdev);
1177
	evergreen_pcie_gart_disable(rdev);
943
	radeon_gart_table_vram_free(rdev);
1178
	radeon_gart_table_vram_free(rdev);
944
	radeon_gart_fini(rdev);
1179
	radeon_gart_fini(rdev);
Line 945... Line 1180...
945
}
1180
}
946
 
1181
 
947
 
1182
 
Line 948... Line 1183...
948
void evergreen_agp_enable(struct radeon_device *rdev)
1183
static void evergreen_agp_enable(struct radeon_device *rdev)
949
{
1184
{
Line 971... Line 1206...
971
	WREG32(VM_CONTEXT1_CNTL, 0);
1206
	WREG32(VM_CONTEXT1_CNTL, 0);
972
}
1207
}
Line 973... Line 1208...
973
 
1208
 
974
void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
1209
void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
975
{
1210
{
976
	save->vga_control[0] = RREG32(D1VGA_CONTROL);
1211
	u32 crtc_enabled, tmp, frame_count, blackout;
-
 
1212
	int i, j;
977
	save->vga_control[1] = RREG32(D2VGA_CONTROL);
1213
 
978
	save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
1214
	save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
979
	save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
-
 
980
	save->crtc_control[0] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
-
 
981
	save->crtc_control[1] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
-
 
982
	if (rdev->num_crtc >= 4) {
-
 
983
		save->vga_control[2] = RREG32(EVERGREEN_D3VGA_CONTROL);
-
 
984
		save->vga_control[3] = RREG32(EVERGREEN_D4VGA_CONTROL);
-
 
985
	save->crtc_control[2] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
-
 
986
	save->crtc_control[3] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
-
 
987
	}
-
 
988
	if (rdev->num_crtc >= 6) {
-
 
989
		save->vga_control[4] = RREG32(EVERGREEN_D5VGA_CONTROL);
-
 
990
		save->vga_control[5] = RREG32(EVERGREEN_D6VGA_CONTROL);
-
 
991
	save->crtc_control[4] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
-
 
992
	save->crtc_control[5] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
-
 
Line 993... Line 1215...
993
	}
1215
	save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
994
 
1216
 
-
 
1217
	/* disable VGA render */
-
 
1218
	WREG32(VGA_RENDER_CONTROL, 0);
995
	/* Stop all video */
1219
	/* blank the display controllers */
-
 
1220
	for (i = 0; i < rdev->num_crtc; i++) {
-
 
1221
		crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
-
 
1222
		if (crtc_enabled) {
996
	WREG32(VGA_RENDER_CONTROL, 0);
1223
			save->crtc_enabled[i] = true;
-
 
1224
			if (ASIC_IS_DCE6(rdev)) {
997
	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
1225
				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
998
	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
1226
				if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
999
	if (rdev->num_crtc >= 4) {
1227
					radeon_wait_for_vblank(rdev, i);
1000
	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
1228
					tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
-
 
1229
					WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
-
 
1230
	}
-
 
1231
			} else {
1001
	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
1232
				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
1002
	}
1233
				if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
1003
	if (rdev->num_crtc >= 6) {
1234
					radeon_wait_for_vblank(rdev, i);
1004
	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
1235
					tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1005
	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
-
 
1006
	}
-
 
1007
	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
-
 
1008
	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
-
 
1009
	if (rdev->num_crtc >= 4) {
-
 
1010
	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
1236
					WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
1011
	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
1237
	}
1012
	}
1238
	}
-
 
1239
			/* wait for the next frame */
1013
	if (rdev->num_crtc >= 6) {
1240
			frame_count = radeon_get_vblank_counter(rdev, i);
-
 
1241
			for (j = 0; j < rdev->usec_timeout; j++) {
-
 
1242
				if (radeon_get_vblank_counter(rdev, i) != frame_count)
1014
	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
1243
					break;
1015
	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
-
 
1016
	}
-
 
1017
	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
-
 
1018
	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
-
 
1019
	if (rdev->num_crtc >= 4) {
-
 
1020
	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
1244
				udelay(1);
1021
	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
-
 
1022
	}
-
 
1023
	if (rdev->num_crtc >= 6) {
-
 
1024
	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
1245
	}
Line 1025... Line 1246...
1025
	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
1246
	}
-
 
1247
	}
1026
	}
1248
 
1027
 
1249
	radeon_mc_wait_for_idle(rdev);
1028
	WREG32(D1VGA_CONTROL, 0);
1250
 
1029
	WREG32(D2VGA_CONTROL, 0);
1251
	blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
1030
	if (rdev->num_crtc >= 4) {
-
 
1031
	WREG32(EVERGREEN_D3VGA_CONTROL, 0);
1252
	if ((blackout & BLACKOUT_MODE_MASK) != 1) {
1032
	WREG32(EVERGREEN_D4VGA_CONTROL, 0);
1253
		/* Block CPU access */
1033
	}
1254
		WREG32(BIF_FB_EN, 0);
1034
	if (rdev->num_crtc >= 6) {
1255
		/* blackout the MC */
1035
	WREG32(EVERGREEN_D5VGA_CONTROL, 0);
1256
		blackout &= ~BLACKOUT_MODE_MASK;
Line 1036... Line 1257...
1036
	WREG32(EVERGREEN_D6VGA_CONTROL, 0);
1257
		WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
1037
	}
1258
	}
1038
}
-
 
1039
 
-
 
1040
void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
-
 
1041
{
-
 
1042
	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
-
 
1043
	       upper_32_bits(rdev->mc.vram_start));
1259
}
1044
	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
-
 
1045
	       upper_32_bits(rdev->mc.vram_start));
-
 
1046
	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
-
 
1047
	       (u32)rdev->mc.vram_start);
-
 
1048
	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
-
 
1049
	       (u32)rdev->mc.vram_start);
-
 
1050
 
-
 
1051
	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
-
 
1052
	       upper_32_bits(rdev->mc.vram_start));
-
 
1053
	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
-
 
1054
	       upper_32_bits(rdev->mc.vram_start));
-
 
1055
	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
-
 
1056
	       (u32)rdev->mc.vram_start);
-
 
1057
	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
-
 
1058
	       (u32)rdev->mc.vram_start);
-
 
1059
 
-
 
1060
	if (rdev->num_crtc >= 4) {
-
 
1061
	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
-
 
1062
	       upper_32_bits(rdev->mc.vram_start));
-
 
1063
	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
-
 
1064
	       upper_32_bits(rdev->mc.vram_start));
-
 
1065
	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
-
 
1066
	       (u32)rdev->mc.vram_start);
-
 
1067
	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
-
 
1068
	       (u32)rdev->mc.vram_start);
-
 
1069
 
-
 
1070
	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
-
 
1071
	       upper_32_bits(rdev->mc.vram_start));
-
 
1072
	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
-
 
1073
	       upper_32_bits(rdev->mc.vram_start));
-
 
1074
	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
-
 
1075
	       (u32)rdev->mc.vram_start);
1260
 
1076
	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
-
 
1077
	       (u32)rdev->mc.vram_start);
-
 
1078
	}
-
 
1079
	if (rdev->num_crtc >= 6) {
-
 
1080
	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
-
 
1081
	       upper_32_bits(rdev->mc.vram_start));
-
 
1082
	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
-
 
1083
	       upper_32_bits(rdev->mc.vram_start));
-
 
Line -... Line 1261...
-
 
1261
void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
-
 
1262
{
1084
	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
1263
	u32 tmp, frame_count;
1085
	       (u32)rdev->mc.vram_start);
1264
	int i, j;
1086
	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
1265
 
1087
	       (u32)rdev->mc.vram_start);
1266
	/* update crtc base addresses */
1088
 
1267
	for (i = 0; i < rdev->num_crtc; i++) {
1089
	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
1268
		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
1090
	       upper_32_bits(rdev->mc.vram_start));
1269
	       upper_32_bits(rdev->mc.vram_start));
1091
	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
1270
		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
1092
	       upper_32_bits(rdev->mc.vram_start));
1271
	       upper_32_bits(rdev->mc.vram_start));
1093
	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
-
 
1094
	       (u32)rdev->mc.vram_start);
1272
		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
1095
	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
1273
	       (u32)rdev->mc.vram_start);
-
 
1274
		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
1096
	       (u32)rdev->mc.vram_start);
1275
	       (u32)rdev->mc.vram_start);
1097
	}
1276
	}
1098
 
-
 
1099
	WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
1277
	WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
1100
	WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
-
 
1101
	/* Unlock host access */
1278
	WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
1102
	WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
1279
 
1103
	mdelay(1);
-
 
1104
	/* Restore video state */
1280
	/* unblackout the MC */
1105
	WREG32(D1VGA_CONTROL, save->vga_control[0]);
1281
	tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
1106
	WREG32(D2VGA_CONTROL, save->vga_control[1]);
1282
	tmp &= ~BLACKOUT_MODE_MASK;
1107
	if (rdev->num_crtc >= 4) {
-
 
1108
	WREG32(EVERGREEN_D3VGA_CONTROL, save->vga_control[2]);
1283
	WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
1109
	WREG32(EVERGREEN_D4VGA_CONTROL, save->vga_control[3]);
-
 
1110
	}
-
 
1111
	if (rdev->num_crtc >= 6) {
-
 
1112
	WREG32(EVERGREEN_D5VGA_CONTROL, save->vga_control[4]);
1284
	/* allow CPU access */
1113
	WREG32(EVERGREEN_D6VGA_CONTROL, save->vga_control[5]);
-
 
1114
	}
1285
	WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
1115
	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
-
 
1116
	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
-
 
1117
	if (rdev->num_crtc >= 4) {
1286
 
1118
	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
1287
	for (i = 0; i < rdev->num_crtc; i++) {
1119
	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
1288
		if (save->crtc_enabled) {
1120
	}
-
 
1121
	if (rdev->num_crtc >= 6) {
1289
			if (ASIC_IS_DCE6(rdev)) {
1122
	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
-
 
1123
	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
1290
				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
1124
	}
1291
				tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
1125
	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, save->crtc_control[0]);
1292
				WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
1126
	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, save->crtc_control[1]);
1293
			} else {
1127
	if (rdev->num_crtc >= 4) {
1294
				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
-
 
1295
				tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1128
	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, save->crtc_control[2]);
1296
				WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
-
 
1297
			}
-
 
1298
			/* wait for the next frame */
1129
	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, save->crtc_control[3]);
1299
			frame_count = radeon_get_vblank_counter(rdev, i);
1130
	}
-
 
1131
	if (rdev->num_crtc >= 6) {
-
 
1132
	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, save->crtc_control[4]);
-
 
1133
	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, save->crtc_control[5]);
-
 
1134
	}
-
 
1135
	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
1300
			for (j = 0; j < rdev->usec_timeout; j++) {
1136
	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
-
 
1137
	if (rdev->num_crtc >= 4) {
-
 
1138
	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
-
 
1139
	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
1301
				if (radeon_get_vblank_counter(rdev, i) != frame_count)
-
 
1302
					break;
-
 
1303
				udelay(1);
-
 
1304
			}
1140
	}
1305
	}
1141
	if (rdev->num_crtc >= 6) {
1306
	}
Line 1142... Line 1307...
1142
	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
1307
	/* Unlock vga access */
1143
	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
1308
	WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
Line 1186... Line 1351...
1186
		WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
1351
		WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
1187
			rdev->mc.vram_start >> 12);
1352
			rdev->mc.vram_start >> 12);
1188
		WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
1353
		WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
1189
			rdev->mc.vram_end >> 12);
1354
			rdev->mc.vram_end >> 12);
1190
	}
1355
	}
1191
	WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, 0);
1356
	WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
-
 
1357
	/* llano/ontario only */
1192
	if (rdev->flags & RADEON_IS_IGP) {
1358
	if ((rdev->family == CHIP_PALM) ||
-
 
1359
	    (rdev->family == CHIP_SUMO) ||
-
 
1360
	    (rdev->family == CHIP_SUMO2)) {
1193
		tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
1361
		tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
1194
		tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
1362
		tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
1195
		tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
1363
		tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
1196
		WREG32(MC_FUS_VM_FB_OFFSET, tmp);
1364
		WREG32(MC_FUS_VM_FB_OFFSET, tmp);
1197
	}
1365
	}
Line 1222... Line 1390...
1222
/*
1390
/*
1223
 * CP.
1391
 * CP.
1224
 */
1392
 */
1225
void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
1393
void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
1226
{
1394
{
-
 
1395
	struct radeon_ring *ring = &rdev->ring[ib->ring];
-
 
1396
	u32 next_rptr;
-
 
1397
 
1227
	/* set to DX10/11 mode */
1398
	/* set to DX10/11 mode */
1228
	radeon_ring_write(rdev, PACKET3(PACKET3_MODE_CONTROL, 0));
1399
	radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
1229
	radeon_ring_write(rdev, 1);
1400
	radeon_ring_write(ring, 1);
-
 
1401
 
1230
	/* FIXME: implement */
1402
	if (ring->rptr_save_reg) {
-
 
1403
		next_rptr = ring->wptr + 3 + 4;
-
 
1404
		radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
-
 
1405
		radeon_ring_write(ring, ((ring->rptr_save_reg -
-
 
1406
					  PACKET3_SET_CONFIG_REG_START) >> 2));
-
 
1407
		radeon_ring_write(ring, next_rptr);
-
 
1408
	} else if (rdev->wb.enabled) {
-
 
1409
		next_rptr = ring->wptr + 5 + 4;
-
 
1410
		radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
-
 
1411
		radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
-
 
1412
		radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
-
 
1413
		radeon_ring_write(ring, next_rptr);
-
 
1414
		radeon_ring_write(ring, 0);
-
 
1415
	}
-
 
1416
 
1231
	radeon_ring_write(rdev, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
1417
	radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
1232
	radeon_ring_write(rdev,
1418
	radeon_ring_write(ring,
1233
#ifdef __BIG_ENDIAN
1419
#ifdef __BIG_ENDIAN
1234
			  (2 << 0) |
1420
			  (2 << 0) |
1235
#endif
1421
#endif
1236
			  (ib->gpu_addr & 0xFFFFFFFC));
1422
			  (ib->gpu_addr & 0xFFFFFFFC));
1237
	radeon_ring_write(rdev, upper_32_bits(ib->gpu_addr) & 0xFF);
1423
	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
1238
	radeon_ring_write(rdev, ib->length_dw);
1424
	radeon_ring_write(ring, ib->length_dw);
1239
}
1425
}
Line 1240... Line 1426...
1240
 
1426
 
1241
 
1427
 
Line 1271... Line 1457...
1271
	return 0;
1457
	return 0;
1272
}
1458
}
Line 1273... Line 1459...
1273
 
1459
 
1274
static int evergreen_cp_start(struct radeon_device *rdev)
1460
static int evergreen_cp_start(struct radeon_device *rdev)
-
 
1461
{
1275
{
1462
	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
1276
	int r, i;
1463
	int r, i;
Line 1277... Line 1464...
1277
	uint32_t cp_me;
1464
	uint32_t cp_me;
1278
 
1465
 
1279
	r = radeon_ring_lock(rdev, 7);
1466
	r = radeon_ring_lock(rdev, ring, 7);
1280
	if (r) {
1467
	if (r) {
1281
		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
1468
		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
1282
		return r;
1469
		return r;
1283
	}
1470
	}
1284
	radeon_ring_write(rdev, PACKET3(PACKET3_ME_INITIALIZE, 5));
1471
	radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
1285
	radeon_ring_write(rdev, 0x1);
1472
	radeon_ring_write(ring, 0x1);
1286
	radeon_ring_write(rdev, 0x0);
1473
	radeon_ring_write(ring, 0x0);
1287
	radeon_ring_write(rdev, rdev->config.evergreen.max_hw_contexts - 1);
1474
	radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
1288
	radeon_ring_write(rdev, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
1475
	radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
1289
	radeon_ring_write(rdev, 0);
1476
	radeon_ring_write(ring, 0);
Line 1290... Line 1477...
1290
	radeon_ring_write(rdev, 0);
1477
	radeon_ring_write(ring, 0);
1291
	radeon_ring_unlock_commit(rdev);
1478
	radeon_ring_unlock_commit(rdev, ring);
Line 1292... Line 1479...
1292
 
1479
 
1293
	cp_me = 0xff;
1480
	cp_me = 0xff;
1294
	WREG32(CP_ME_CNTL, cp_me);
1481
	WREG32(CP_ME_CNTL, cp_me);
1295
 
1482
 
1296
	r = radeon_ring_lock(rdev, evergreen_default_size + 19);
1483
	r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
Line 1297... Line 1484...
1297
	if (r) {
1484
	if (r) {
1298
		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
1485
		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
1299
		return r;
1486
		return r;
Line 1300... Line 1487...
1300
	}
1487
	}
1301
 
1488
 
Line 1302... Line 1489...
1302
	/* setup clear context state */
1489
	/* setup clear context state */
1303
	radeon_ring_write(rdev, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
1490
	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
Line 1304... Line 1491...
1304
	radeon_ring_write(rdev, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
1491
	radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
1305
 
1492
 
1306
	for (i = 0; i < evergreen_default_size; i++)
1493
	for (i = 0; i < evergreen_default_size; i++)
Line 1307... Line 1494...
1307
		radeon_ring_write(rdev, evergreen_default_state[i]);
1494
		radeon_ring_write(ring, evergreen_default_state[i]);
1308
 
1495
 
1309
	radeon_ring_write(rdev, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
1496
	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
1310
	radeon_ring_write(rdev, PACKET3_PREAMBLE_END_CLEAR_STATE);
1497
	radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
1311
 
1498
 
Line 1312... Line 1499...
1312
	/* set clear context state */
1499
	/* set clear context state */
1313
	radeon_ring_write(rdev, PACKET3(PACKET3_CLEAR_STATE, 0));
1500
	radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
1314
	radeon_ring_write(rdev, 0);
1501
	radeon_ring_write(ring, 0);
1315
 
1502
 
1316
	/* SQ_VTX_BASE_VTX_LOC */
1503
	/* SQ_VTX_BASE_VTX_LOC */
1317
	radeon_ring_write(rdev, 0xc0026f00);
1504
	radeon_ring_write(ring, 0xc0026f00);
1318
	radeon_ring_write(rdev, 0x00000000);
1505
	radeon_ring_write(ring, 0x00000000);
1319
	radeon_ring_write(rdev, 0x00000000);
1506
	radeon_ring_write(ring, 0x00000000);
1320
	radeon_ring_write(rdev, 0x00000000);
1507
	radeon_ring_write(ring, 0x00000000);
1321
 
1508
 
1322
	/* Clear consts */
1509
	/* Clear consts */
Line 1323... Line 1510...
1323
	radeon_ring_write(rdev, 0xc0036f00);
1510
	radeon_ring_write(ring, 0xc0036f00);
Line 1324... Line 1511...
1324
	radeon_ring_write(rdev, 0x00000bc4);
1511
	radeon_ring_write(ring, 0x00000bc4);
1325
	radeon_ring_write(rdev, 0xffffffff);
1512
	radeon_ring_write(ring, 0xffffffff);
Line 1326... Line 1513...
1326
	radeon_ring_write(rdev, 0xffffffff);
1513
	radeon_ring_write(ring, 0xffffffff);
1327
	radeon_ring_write(rdev, 0xffffffff);
1514
	radeon_ring_write(ring, 0xffffffff);
-
 
1515
 
1328
 
1516
	radeon_ring_write(ring, 0xc0026900);
1329
	radeon_ring_write(rdev, 0xc0026900);
1517
	radeon_ring_write(ring, 0x00000316);
1330
	radeon_ring_write(rdev, 0x00000316);
1518
	radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
Line 1331... Line 1519...
1331
	radeon_ring_write(rdev, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
1519
	radeon_ring_write(ring, 0x00000010); /*  */
Line 1353... Line 1541...
1353
	mdelay(15);
1541
	mdelay(15);
1354
	WREG32(GRBM_SOFT_RESET, 0);
1542
	WREG32(GRBM_SOFT_RESET, 0);
1355
	RREG32(GRBM_SOFT_RESET);
1543
	RREG32(GRBM_SOFT_RESET);
Line 1356... Line 1544...
1356
 
1544
 
1357
	/* Set ring buffer size */
1545
	/* Set ring buffer size */
1358
	rb_bufsz = drm_order(rdev->cp.ring_size / 8);
1546
	rb_bufsz = drm_order(ring->ring_size / 8);
1359
	tmp = (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
1547
	tmp = (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
1360
#ifdef __BIG_ENDIAN
1548
#ifdef __BIG_ENDIAN
1361
	tmp |= BUF_SWAP_32BIT;
1549
	tmp |= BUF_SWAP_32BIT;
1362
#endif
1550
#endif
1363
	WREG32(CP_RB_CNTL, tmp);
1551
	WREG32(CP_RB_CNTL, tmp);
-
 
1552
	WREG32(CP_SEM_WAIT_TIMER, 0x0);
Line 1364... Line 1553...
1364
	WREG32(CP_SEM_WAIT_TIMER, 0x4);
1553
	WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
1365
 
1554
 
Line 1366... Line 1555...
1366
	/* Set the write pointer delay */
1555
	/* Set the write pointer delay */
1367
	WREG32(CP_RB_WPTR_DELAY, 0);
1556
	WREG32(CP_RB_WPTR_DELAY, 0);
1368
 
1557
 
-
 
1558
	/* Initialize the ring buffer's read and write pointers */
1369
	/* Initialize the ring buffer's read and write pointers */
1559
	WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
Line 1370... Line 1560...
1370
	WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
1560
	WREG32(CP_RB_RPTR_WR, 0);
1371
	WREG32(CP_RB_RPTR_WR, 0);
1561
	ring->wptr = 0;
1372
	WREG32(CP_RB_WPTR, 0);
1562
	WREG32(CP_RB_WPTR, ring->wptr);
1373
 
1563
 
Line 1385... Line 1575...
1385
	}
1575
	}
Line 1386... Line 1576...
1386
 
1576
 
1387
	mdelay(1);
1577
	mdelay(1);
Line 1388... Line 1578...
1388
	WREG32(CP_RB_CNTL, tmp);
1578
	WREG32(CP_RB_CNTL, tmp);
1389
 
1579
 
Line 1390... Line 1580...
1390
	WREG32(CP_RB_BASE, rdev->cp.gpu_addr >> 8);
1580
	WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
1391
	WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
-
 
Line 1392... Line 1581...
1392
 
1581
	WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
1393
	rdev->cp.rptr = RREG32(CP_RB_RPTR);
1582
 
1394
	rdev->cp.wptr = RREG32(CP_RB_WPTR);
1583
	ring->rptr = RREG32(CP_RB_RPTR);
1395
 
1584
 
1396
	evergreen_cp_start(rdev);
1585
	evergreen_cp_start(rdev);
1397
	rdev->cp.ready = true;
1586
	ring->ready = true;
1398
	r = radeon_ring_test(rdev);
1587
	r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
1399
	if (r) {
1588
	if (r) {
1400
		rdev->cp.ready = false;
1589
		ring->ready = false;
Line 1401... Line 1590...
1401
		return r;
1590
		return r;
1402
	}
1591
	}
1403
	return 0;
1592
	return 0;
1404
}
-
 
1405
 
-
 
1406
/*
-
 
1407
 * Core functions
-
 
1408
 */
-
 
1409
static u32 evergreen_get_tile_pipe_to_backend_map(struct radeon_device *rdev,
-
 
1410
						  u32 num_tile_pipes,
-
 
1411
						  u32 num_backends,
-
 
1412
						  u32 backend_disable_mask)
-
 
1413
{
-
 
1414
	u32 backend_map = 0;
-
 
1415
	u32 enabled_backends_mask = 0;
-
 
1416
	u32 enabled_backends_count = 0;
-
 
1417
	u32 cur_pipe;
-
 
1418
	u32 swizzle_pipe[EVERGREEN_MAX_PIPES];
-
 
1419
	u32 cur_backend = 0;
-
 
1420
	u32 i;
-
 
1421
	bool force_no_swizzle;
-
 
1422
 
-
 
1423
	if (num_tile_pipes > EVERGREEN_MAX_PIPES)
-
 
1424
		num_tile_pipes = EVERGREEN_MAX_PIPES;
-
 
1425
	if (num_tile_pipes < 1)
-
 
1426
		num_tile_pipes = 1;
-
 
1427
	if (num_backends > EVERGREEN_MAX_BACKENDS)
-
 
1428
		num_backends = EVERGREEN_MAX_BACKENDS;
-
 
1429
	if (num_backends < 1)
-
 
1430
		num_backends = 1;
-
 
1431
 
-
 
1432
	for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) {
-
 
1433
		if (((backend_disable_mask >> i) & 1) == 0) {
-
 
1434
			enabled_backends_mask |= (1 << i);
-
 
1435
			++enabled_backends_count;
-
 
1436
		}
-
 
1437
		if (enabled_backends_count == num_backends)
-
 
1438
			break;
-
 
1439
	}
-
 
1440
 
-
 
1441
	if (enabled_backends_count == 0) {
-
 
1442
		enabled_backends_mask = 1;
-
 
1443
		enabled_backends_count = 1;
-
 
1444
	}
-
 
1445
 
-
 
1446
	if (enabled_backends_count != num_backends)
-
 
1447
		num_backends = enabled_backends_count;
-
 
1448
 
-
 
1449
	memset((uint8_t *)&swizzle_pipe[0], 0, sizeof(u32) * EVERGREEN_MAX_PIPES);
-
 
1450
	switch (rdev->family) {
-
 
1451
	case CHIP_CEDAR:
-
 
1452
	case CHIP_REDWOOD:
-
 
1453
	case CHIP_PALM:
-
 
1454
	case CHIP_SUMO:
-
 
1455
	case CHIP_SUMO2:
-
 
1456
	case CHIP_TURKS:
-
 
1457
	case CHIP_CAICOS:
-
 
1458
		force_no_swizzle = false;
-
 
1459
		break;
-
 
1460
	case CHIP_CYPRESS:
-
 
1461
	case CHIP_HEMLOCK:
-
 
1462
	case CHIP_JUNIPER:
-
 
1463
	case CHIP_BARTS:
-
 
1464
	default:
-
 
1465
		force_no_swizzle = true;
-
 
1466
		break;
-
 
1467
	}
-
 
1468
	if (force_no_swizzle) {
-
 
1469
		bool last_backend_enabled = false;
-
 
1470
 
-
 
1471
		force_no_swizzle = false;
-
 
1472
		for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) {
-
 
1473
			if (((enabled_backends_mask >> i) & 1) == 1) {
-
 
1474
				if (last_backend_enabled)
-
 
1475
					force_no_swizzle = true;
-
 
1476
				last_backend_enabled = true;
-
 
1477
			} else
-
 
1478
				last_backend_enabled = false;
-
 
1479
		}
-
 
1480
	}
-
 
1481
 
-
 
1482
	switch (num_tile_pipes) {
-
 
1483
	case 1:
-
 
1484
	case 3:
-
 
1485
	case 5:
-
 
1486
	case 7:
-
 
1487
		DRM_ERROR("odd number of pipes!\n");
-
 
1488
		break;
-
 
1489
	case 2:
-
 
1490
		swizzle_pipe[0] = 0;
-
 
1491
		swizzle_pipe[1] = 1;
-
 
1492
		break;
-
 
1493
	case 4:
-
 
1494
		if (force_no_swizzle) {
-
 
1495
			swizzle_pipe[0] = 0;
-
 
1496
			swizzle_pipe[1] = 1;
-
 
1497
			swizzle_pipe[2] = 2;
-
 
1498
			swizzle_pipe[3] = 3;
-
 
1499
		} else {
-
 
1500
			swizzle_pipe[0] = 0;
-
 
1501
			swizzle_pipe[1] = 2;
-
 
1502
			swizzle_pipe[2] = 1;
-
 
1503
			swizzle_pipe[3] = 3;
-
 
1504
		}
-
 
1505
		break;
-
 
1506
	case 6:
-
 
1507
		if (force_no_swizzle) {
-
 
1508
			swizzle_pipe[0] = 0;
-
 
1509
			swizzle_pipe[1] = 1;
-
 
1510
			swizzle_pipe[2] = 2;
-
 
1511
			swizzle_pipe[3] = 3;
-
 
1512
			swizzle_pipe[4] = 4;
-
 
1513
			swizzle_pipe[5] = 5;
-
 
1514
		} else {
-
 
1515
			swizzle_pipe[0] = 0;
-
 
1516
			swizzle_pipe[1] = 2;
-
 
1517
			swizzle_pipe[2] = 4;
-
 
1518
			swizzle_pipe[3] = 1;
-
 
1519
			swizzle_pipe[4] = 3;
-
 
1520
			swizzle_pipe[5] = 5;
-
 
1521
		}
-
 
1522
		break;
-
 
1523
	case 8:
-
 
1524
		if (force_no_swizzle) {
-
 
1525
			swizzle_pipe[0] = 0;
-
 
1526
			swizzle_pipe[1] = 1;
-
 
1527
			swizzle_pipe[2] = 2;
-
 
1528
			swizzle_pipe[3] = 3;
-
 
1529
			swizzle_pipe[4] = 4;
-
 
1530
			swizzle_pipe[5] = 5;
-
 
1531
			swizzle_pipe[6] = 6;
-
 
1532
			swizzle_pipe[7] = 7;
-
 
1533
		} else {
-
 
1534
			swizzle_pipe[0] = 0;
-
 
1535
			swizzle_pipe[1] = 2;
-
 
1536
			swizzle_pipe[2] = 4;
-
 
1537
			swizzle_pipe[3] = 6;
-
 
1538
			swizzle_pipe[4] = 1;
-
 
1539
			swizzle_pipe[5] = 3;
-
 
1540
			swizzle_pipe[6] = 5;
-
 
1541
			swizzle_pipe[7] = 7;
-
 
1542
		}
-
 
1543
		break;
-
 
1544
	}
-
 
1545
 
-
 
1546
	for (cur_pipe = 0; cur_pipe < num_tile_pipes; ++cur_pipe) {
-
 
1547
		while (((1 << cur_backend) & enabled_backends_mask) == 0)
-
 
1548
			cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS;
-
 
1549
 
-
 
1550
		backend_map |= (((cur_backend & 0xf) << (swizzle_pipe[cur_pipe] * 4)));
-
 
1551
 
-
 
1552
		cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS;
-
 
1553
	}
-
 
1554
 
-
 
1555
	return backend_map;
-
 
1556
}
-
 
1557
 
-
 
1558
static void evergreen_program_channel_remap(struct radeon_device *rdev)
-
 
1559
{
-
 
1560
	u32 tcp_chan_steer_lo, tcp_chan_steer_hi, mc_shared_chremap, tmp;
-
 
1561
 
-
 
1562
	tmp = RREG32(MC_SHARED_CHMAP);
-
 
1563
	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
-
 
1564
	case 0:
-
 
1565
	case 1:
-
 
1566
	case 2:
-
 
1567
	case 3:
-
 
1568
	default:
-
 
1569
		/* default mapping */
-
 
1570
		mc_shared_chremap = 0x00fac688;
-
 
1571
		break;
-
 
1572
	}
-
 
1573
 
-
 
1574
	switch (rdev->family) {
-
 
1575
	case CHIP_HEMLOCK:
-
 
1576
	case CHIP_CYPRESS:
-
 
1577
	case CHIP_BARTS:
-
 
1578
		tcp_chan_steer_lo = 0x54763210;
-
 
1579
		tcp_chan_steer_hi = 0x0000ba98;
-
 
1580
		break;
-
 
1581
	case CHIP_JUNIPER:
-
 
1582
	case CHIP_REDWOOD:
-
 
1583
	case CHIP_CEDAR:
-
 
1584
	case CHIP_PALM:
-
 
1585
	case CHIP_SUMO:
-
 
1586
	case CHIP_SUMO2:
-
 
1587
	case CHIP_TURKS:
-
 
1588
	case CHIP_CAICOS:
-
 
1589
	default:
-
 
1590
		tcp_chan_steer_lo = 0x76543210;
-
 
1591
		tcp_chan_steer_hi = 0x0000ba98;
-
 
1592
		break;
-
 
1593
	}
-
 
1594
 
-
 
1595
	WREG32(TCP_CHAN_STEER_LO, tcp_chan_steer_lo);
1593
}
1596
	WREG32(TCP_CHAN_STEER_HI, tcp_chan_steer_hi);
1594
 
1597
	WREG32(MC_SHARED_CHREMAP, mc_shared_chremap);
-
 
1598
}
-
 
1599
 
1595
/*
1600
static void evergreen_gpu_init(struct radeon_device *rdev)
1596
 * Core functions
1601
{
-
 
1602
	u32 cc_rb_backend_disable = 0;
-
 
1603
	u32 cc_gc_shader_pipe_config;
1597
 */
1604
	u32 gb_addr_config = 0;
1598
static void evergreen_gpu_init(struct radeon_device *rdev)
1605
	u32 mc_shared_chmap, mc_arb_ramcfg;
1599
{
1606
	u32 gb_backend_map;
1600
	u32 gb_addr_config;
1607
	u32 grbm_gfx_index;
1601
	u32 mc_shared_chmap, mc_arb_ramcfg;
Line 1617... Line 1611...
1617
	u32 sq_stack_resource_mgmt_1;
1611
	u32 sq_stack_resource_mgmt_1;
1618
	u32 sq_stack_resource_mgmt_2;
1612
	u32 sq_stack_resource_mgmt_2;
1619
	u32 sq_stack_resource_mgmt_3;
1613
	u32 sq_stack_resource_mgmt_3;
1620
	u32 vgt_cache_invalidation;
1614
	u32 vgt_cache_invalidation;
1621
	u32 hdp_host_path_cntl, tmp;
1615
	u32 hdp_host_path_cntl, tmp;
-
 
1616
	u32 disabled_rb_mask;
1622
	int i, j, num_shader_engines, ps_thread_count;
1617
	int i, j, num_shader_engines, ps_thread_count;
Line 1623... Line 1618...
1623
 
1618
 
1624
	switch (rdev->family) {
1619
	switch (rdev->family) {
1625
	case CHIP_CYPRESS:
1620
	case CHIP_CYPRESS:
Line 1641... Line 1636...
1641
		rdev->config.evergreen.sq_num_cf_insts = 2;
1636
		rdev->config.evergreen.sq_num_cf_insts = 2;
Line 1642... Line 1637...
1642
 
1637
 
1643
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1638
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1644
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1639
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
-
 
1640
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1645
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1641
		gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
1646
		break;
1642
		break;
1647
	case CHIP_JUNIPER:
1643
	case CHIP_JUNIPER:
1648
		rdev->config.evergreen.num_ses = 1;
1644
		rdev->config.evergreen.num_ses = 1;
1649
		rdev->config.evergreen.max_pipes = 4;
1645
		rdev->config.evergreen.max_pipes = 4;
Line 1662... Line 1658...
1662
		rdev->config.evergreen.sq_num_cf_insts = 2;
1658
		rdev->config.evergreen.sq_num_cf_insts = 2;
Line 1663... Line 1659...
1663
 
1659
 
1664
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1660
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1665
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1661
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
-
 
1662
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1666
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1663
		gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
1667
		break;
1664
		break;
1668
	case CHIP_REDWOOD:
1665
	case CHIP_REDWOOD:
1669
		rdev->config.evergreen.num_ses = 1;
1666
		rdev->config.evergreen.num_ses = 1;
1670
		rdev->config.evergreen.max_pipes = 4;
1667
		rdev->config.evergreen.max_pipes = 4;
Line 1683... Line 1680...
1683
		rdev->config.evergreen.sq_num_cf_insts = 2;
1680
		rdev->config.evergreen.sq_num_cf_insts = 2;
Line 1684... Line 1681...
1684
 
1681
 
1685
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1682
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1686
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1683
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
-
 
1684
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1687
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1685
		gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
1688
		break;
1686
		break;
1689
	case CHIP_CEDAR:
1687
	case CHIP_CEDAR:
1690
	default:
1688
	default:
1691
		rdev->config.evergreen.num_ses = 1;
1689
		rdev->config.evergreen.num_ses = 1;
Line 1705... Line 1703...
1705
		rdev->config.evergreen.sq_num_cf_insts = 1;
1703
		rdev->config.evergreen.sq_num_cf_insts = 1;
Line 1706... Line 1704...
1706
 
1704
 
1707
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1705
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1708
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1706
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
-
 
1707
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1709
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1708
		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
1710
		break;
1709
		break;
1711
	case CHIP_PALM:
1710
	case CHIP_PALM:
1712
		rdev->config.evergreen.num_ses = 1;
1711
		rdev->config.evergreen.num_ses = 1;
1713
		rdev->config.evergreen.max_pipes = 2;
1712
		rdev->config.evergreen.max_pipes = 2;
Line 1726... Line 1725...
1726
		rdev->config.evergreen.sq_num_cf_insts = 1;
1725
		rdev->config.evergreen.sq_num_cf_insts = 1;
Line 1727... Line 1726...
1727
 
1726
 
1728
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1727
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1729
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1728
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
-
 
1729
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1730
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1730
		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
1731
		break;
1731
		break;
1732
	case CHIP_SUMO:
1732
	case CHIP_SUMO:
1733
		rdev->config.evergreen.num_ses = 1;
1733
		rdev->config.evergreen.num_ses = 1;
1734
		rdev->config.evergreen.max_pipes = 4;
1734
		rdev->config.evergreen.max_pipes = 4;
Line 1753... Line 1753...
1753
		rdev->config.evergreen.sq_num_cf_insts = 2;
1753
		rdev->config.evergreen.sq_num_cf_insts = 2;
Line 1754... Line 1754...
1754
 
1754
 
1755
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1755
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1756
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1756
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
-
 
1757
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1757
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1758
		gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
1758
		break;
1759
		break;
1759
	case CHIP_SUMO2:
1760
	case CHIP_SUMO2:
1760
		rdev->config.evergreen.num_ses = 1;
1761
		rdev->config.evergreen.num_ses = 1;
1761
		rdev->config.evergreen.max_pipes = 4;
1762
		rdev->config.evergreen.max_pipes = 4;
Line 1774... Line 1775...
1774
		rdev->config.evergreen.sq_num_cf_insts = 2;
1775
		rdev->config.evergreen.sq_num_cf_insts = 2;
Line 1775... Line 1776...
1775
 
1776
 
1776
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1777
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1777
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1778
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
-
 
1779
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1778
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1780
		gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
1779
		break;
1781
		break;
1780
	case CHIP_BARTS:
1782
	case CHIP_BARTS:
1781
		rdev->config.evergreen.num_ses = 2;
1783
		rdev->config.evergreen.num_ses = 2;
1782
		rdev->config.evergreen.max_pipes = 4;
1784
		rdev->config.evergreen.max_pipes = 4;
Line 1795... Line 1797...
1795
		rdev->config.evergreen.sq_num_cf_insts = 2;
1797
		rdev->config.evergreen.sq_num_cf_insts = 2;
Line 1796... Line 1798...
1796
 
1798
 
1797
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1799
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1798
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1800
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
-
 
1801
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1799
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1802
		gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
1800
		break;
1803
		break;
1801
	case CHIP_TURKS:
1804
	case CHIP_TURKS:
1802
		rdev->config.evergreen.num_ses = 1;
1805
		rdev->config.evergreen.num_ses = 1;
1803
		rdev->config.evergreen.max_pipes = 4;
1806
		rdev->config.evergreen.max_pipes = 4;
Line 1816... Line 1819...
1816
		rdev->config.evergreen.sq_num_cf_insts = 2;
1819
		rdev->config.evergreen.sq_num_cf_insts = 2;
Line 1817... Line 1820...
1817
 
1820
 
1818
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1821
		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1819
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1822
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
-
 
1823
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1820
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1824
		gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
1821
		break;
1825
		break;
1822
	case CHIP_CAICOS:
1826
	case CHIP_CAICOS:
1823
		rdev->config.evergreen.num_ses = 1;
1827
		rdev->config.evergreen.num_ses = 1;
1824
		rdev->config.evergreen.max_pipes = 4;
1828
		rdev->config.evergreen.max_pipes = 4;
Line 1837... Line 1841...
1837
		rdev->config.evergreen.sq_num_cf_insts = 1;
1841
		rdev->config.evergreen.sq_num_cf_insts = 1;
Line 1838... Line 1842...
1838
 
1842
 
1839
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1843
		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1840
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1844
		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
-
 
1845
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1841
		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1846
		gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
1842
		break;
1847
		break;
Line 1843... Line 1848...
1843
	}
1848
	}
1844
 
1849
 
Line 1851... Line 1856...
1851
		WREG32((0x2c24 + j), 0x00000000);
1856
		WREG32((0x2c24 + j), 0x00000000);
1852
	}
1857
	}
Line 1853... Line 1858...
1853
 
1858
 
Line 1854... Line -...
1854
	WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
-
 
1855
 
-
 
1856
	cc_gc_shader_pipe_config = RREG32(CC_GC_SHADER_PIPE_CONFIG) & ~2;
-
 
1857
 
-
 
1858
	cc_gc_shader_pipe_config |=
-
 
1859
		INACTIVE_QD_PIPES((EVERGREEN_MAX_PIPES_MASK << rdev->config.evergreen.max_pipes)
-
 
1860
				  & EVERGREEN_MAX_PIPES_MASK);
-
 
1861
	cc_gc_shader_pipe_config |=
1859
	WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
1862
		INACTIVE_SIMDS((EVERGREEN_MAX_SIMDS_MASK << rdev->config.evergreen.max_simds)
-
 
1863
			       & EVERGREEN_MAX_SIMDS_MASK);
-
 
1864
 
-
 
1865
	cc_rb_backend_disable =
-
 
1866
		BACKEND_DISABLE((EVERGREEN_MAX_BACKENDS_MASK << rdev->config.evergreen.max_backends)
-
 
Line 1867... Line 1860...
1867
				& EVERGREEN_MAX_BACKENDS_MASK);
1860
 
1868
 
1861
	evergreen_fix_pci_max_read_req_size(rdev);
-
 
1862
 
-
 
1863
	mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
1869
 
1864
	if ((rdev->family == CHIP_PALM) ||
1870
	mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
1865
	    (rdev->family == CHIP_SUMO) ||
1871
	if (rdev->flags & RADEON_IS_IGP)
1866
	    (rdev->family == CHIP_SUMO2))
Line 1872... Line -...
1872
		mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
-
 
1873
	else
-
 
1874
	mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
-
 
1875
 
-
 
1876
	switch (rdev->config.evergreen.max_tile_pipes) {
-
 
1877
	case 1:
-
 
1878
	default:
-
 
1879
		gb_addr_config |= NUM_PIPES(0);
-
 
1880
		break;
-
 
1881
	case 2:
-
 
1882
		gb_addr_config |= NUM_PIPES(1);
-
 
1883
		break;
-
 
1884
	case 4:
-
 
1885
		gb_addr_config |= NUM_PIPES(2);
-
 
1886
		break;
-
 
1887
	case 8:
-
 
1888
		gb_addr_config |= NUM_PIPES(3);
-
 
1889
		break;
-
 
1890
	}
-
 
1891
 
-
 
1892
	gb_addr_config |= PIPE_INTERLEAVE_SIZE((mc_arb_ramcfg & BURSTLENGTH_MASK) >> BURSTLENGTH_SHIFT);
-
 
1893
	gb_addr_config |= BANK_INTERLEAVE_SIZE(0);
-
 
1894
	gb_addr_config |= NUM_SHADER_ENGINES(rdev->config.evergreen.num_ses - 1);
-
 
1895
	gb_addr_config |= SHADER_ENGINE_TILE_SIZE(1);
-
 
1896
	gb_addr_config |= NUM_GPUS(0); /* Hemlock? */
-
 
1897
	gb_addr_config |= MULTI_GPU_TILE_SIZE(2);
-
 
1898
 
-
 
1899
	if (((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT) > 2)
-
 
1900
		gb_addr_config |= ROW_SIZE(2);
-
 
1901
	else
-
 
1902
		gb_addr_config |= ROW_SIZE((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT);
-
 
1903
 
-
 
1904
	if (rdev->ddev->pdev->device == 0x689e) {
-
 
1905
		u32 efuse_straps_4;
-
 
1906
		u32 efuse_straps_3;
-
 
1907
		u8 efuse_box_bit_131_124;
-
 
1908
 
-
 
1909
		WREG32(RCU_IND_INDEX, 0x204);
-
 
1910
		efuse_straps_4 = RREG32(RCU_IND_DATA);
-
 
1911
		WREG32(RCU_IND_INDEX, 0x203);
-
 
1912
		efuse_straps_3 = RREG32(RCU_IND_DATA);
-
 
1913
		efuse_box_bit_131_124 = (u8)(((efuse_straps_4 & 0xf) << 4) | ((efuse_straps_3 & 0xf0000000) >> 28));
-
 
1914
 
-
 
1915
		switch(efuse_box_bit_131_124) {
-
 
1916
		case 0x00:
-
 
1917
			gb_backend_map = 0x76543210;
-
 
1918
			break;
-
 
1919
		case 0x55:
-
 
1920
			gb_backend_map = 0x77553311;
-
 
1921
			break;
-
 
1922
		case 0x56:
-
 
1923
			gb_backend_map = 0x77553300;
-
 
1924
			break;
-
 
1925
		case 0x59:
-
 
1926
			gb_backend_map = 0x77552211;
-
 
1927
			break;
-
 
1928
		case 0x66:
-
 
1929
			gb_backend_map = 0x77443300;
-
 
1930
			break;
-
 
1931
		case 0x99:
-
 
1932
			gb_backend_map = 0x66552211;
-
 
1933
			break;
-
 
1934
		case 0x5a:
-
 
1935
			gb_backend_map = 0x77552200;
-
 
1936
			break;
-
 
1937
		case 0xaa:
-
 
1938
			gb_backend_map = 0x66442200;
-
 
1939
			break;
-
 
1940
		case 0x95:
-
 
1941
			gb_backend_map = 0x66553311;
-
 
1942
			break;
-
 
1943
		default:
-
 
1944
			DRM_ERROR("bad backend map, using default\n");
-
 
1945
			gb_backend_map =
-
 
1946
				evergreen_get_tile_pipe_to_backend_map(rdev,
-
 
1947
								       rdev->config.evergreen.max_tile_pipes,
-
 
1948
								       rdev->config.evergreen.max_backends,
-
 
1949
								       ((EVERGREEN_MAX_BACKENDS_MASK <<
-
 
1950
								   rdev->config.evergreen.max_backends) &
-
 
1951
									EVERGREEN_MAX_BACKENDS_MASK));
-
 
1952
			break;
-
 
1953
		}
-
 
1954
	} else if (rdev->ddev->pdev->device == 0x68b9) {
-
 
1955
		u32 efuse_straps_3;
-
 
1956
		u8 efuse_box_bit_127_124;
-
 
1957
 
-
 
1958
		WREG32(RCU_IND_INDEX, 0x203);
-
 
1959
		efuse_straps_3 = RREG32(RCU_IND_DATA);
-
 
1960
		efuse_box_bit_127_124 = (u8)((efuse_straps_3 & 0xF0000000) >> 28);
-
 
1961
 
-
 
1962
		switch(efuse_box_bit_127_124) {
-
 
1963
		case 0x0:
-
 
1964
			gb_backend_map = 0x00003210;
-
 
1965
			break;
-
 
1966
		case 0x5:
-
 
1967
		case 0x6:
-
 
1968
		case 0x9:
-
 
1969
		case 0xa:
-
 
1970
			gb_backend_map = 0x00003311;
-
 
1971
			break;
-
 
1972
		default:
-
 
1973
			DRM_ERROR("bad backend map, using default\n");
-
 
1974
			gb_backend_map =
-
 
1975
				evergreen_get_tile_pipe_to_backend_map(rdev,
-
 
1976
								       rdev->config.evergreen.max_tile_pipes,
-
 
1977
								       rdev->config.evergreen.max_backends,
-
 
1978
								       ((EVERGREEN_MAX_BACKENDS_MASK <<
-
 
1979
								   rdev->config.evergreen.max_backends) &
-
 
1980
									EVERGREEN_MAX_BACKENDS_MASK));
-
 
1981
			break;
-
 
1982
		}
-
 
1983
	} else {
-
 
1984
		switch (rdev->family) {
-
 
1985
		case CHIP_CYPRESS:
-
 
1986
		case CHIP_HEMLOCK:
-
 
1987
		case CHIP_BARTS:
-
 
1988
			gb_backend_map = 0x66442200;
-
 
1989
			break;
-
 
1990
		case CHIP_JUNIPER:
-
 
1991
			gb_backend_map = 0x00002200;
-
 
1992
			break;
-
 
1993
		default:
-
 
1994
			gb_backend_map =
-
 
1995
				evergreen_get_tile_pipe_to_backend_map(rdev,
-
 
1996
								       rdev->config.evergreen.max_tile_pipes,
-
 
1997
								       rdev->config.evergreen.max_backends,
-
 
1998
								       ((EVERGREEN_MAX_BACKENDS_MASK <<
-
 
1999
									 rdev->config.evergreen.max_backends) &
-
 
2000
									EVERGREEN_MAX_BACKENDS_MASK));
1867
		mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
2001
		}
1868
	else
2002
	}
1869
	mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
2003
 
1870
 
2004
	/* setup tiling info dword.  gb_addr_config is not adequate since it does
1871
	/* setup tiling info dword.  gb_addr_config is not adequate since it does
Line 2025... Line 1892...
2025
		break;
1892
		break;
2026
	}
1893
	}
2027
	/* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
1894
	/* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
2028
	if (rdev->flags & RADEON_IS_IGP)
1895
	if (rdev->flags & RADEON_IS_IGP)
2029
		rdev->config.evergreen.tile_config |= 1 << 4;
1896
		rdev->config.evergreen.tile_config |= 1 << 4;
2030
	else
1897
	else {
-
 
1898
		switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
-
 
1899
		case 0: /* four banks */
2031
		rdev->config.evergreen.tile_config |=
1900
			rdev->config.evergreen.tile_config |= 0 << 4;
-
 
1901
			break;
-
 
1902
		case 1: /* eight banks */
2032
			((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) << 4;
1903
			rdev->config.evergreen.tile_config |= 1 << 4;
-
 
1904
			break;
-
 
1905
		case 2: /* sixteen banks */
-
 
1906
		default:
2033
	rdev->config.evergreen.tile_config |=
1907
			rdev->config.evergreen.tile_config |= 2 << 4;
-
 
1908
			break;
-
 
1909
		}
-
 
1910
	}
2034
		((mc_arb_ramcfg & BURSTLENGTH_MASK) >> BURSTLENGTH_SHIFT) << 8;
1911
	rdev->config.evergreen.tile_config |= 0 << 8;
2035
	rdev->config.evergreen.tile_config |=
1912
	rdev->config.evergreen.tile_config |=
2036
		((gb_addr_config & 0x30000000) >> 28) << 12;
1913
		((gb_addr_config & 0x30000000) >> 28) << 12;
Line 2037... Line -...
2037
 
-
 
2038
	rdev->config.evergreen.backend_map = gb_backend_map;
-
 
2039
	WREG32(GB_BACKEND_MAP, gb_backend_map);
-
 
2040
	WREG32(GB_ADDR_CONFIG, gb_addr_config);
-
 
2041
	WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
1914
 
Line 2042... Line 1915...
2042
	WREG32(HDP_ADDR_CONFIG, gb_addr_config);
1915
	num_shader_engines = (gb_addr_config & NUM_SHADER_ENGINES(3) >> 12) + 1;
2043
 
-
 
2044
	evergreen_program_channel_remap(rdev);
1916
 
2045
 
1917
	if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
Line -... Line 1918...
-
 
1918
		u32 efuse_straps_4;
-
 
1919
		u32 efuse_straps_3;
-
 
1920
 
2046
	num_shader_engines = ((RREG32(GB_ADDR_CONFIG) & NUM_SHADER_ENGINES(3)) >> 12) + 1;
1921
		WREG32(RCU_IND_INDEX, 0x204);
2047
	grbm_gfx_index = INSTANCE_BROADCAST_WRITES;
1922
		efuse_straps_4 = RREG32(RCU_IND_DATA);
2048
 
1923
		WREG32(RCU_IND_INDEX, 0x203);
-
 
1924
		efuse_straps_3 = RREG32(RCU_IND_DATA);
-
 
1925
		tmp = (((efuse_straps_4 & 0xf) << 4) |
-
 
1926
		      ((efuse_straps_3 & 0xf0000000) >> 28));
2049
	for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
1927
	} else {
Line 2050... Line 1928...
2050
		u32 rb = cc_rb_backend_disable | (0xf0 << 16);
1928
		tmp = 0;
2051
		u32 sp = cc_gc_shader_pipe_config;
1929
		for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
2052
		u32 gfx = grbm_gfx_index | SE_INDEX(i);
1930
			u32 rb_disable_bitmap;
-
 
1931
 
-
 
1932
			WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
-
 
1933
			WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
2053
 
1934
			rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
-
 
1935
			tmp <<= 4;
-
 
1936
			tmp |= rb_disable_bitmap;
Line 2054... Line 1937...
2054
		if (i == num_shader_engines) {
1937
		}
2055
			rb |= BACKEND_DISABLE(EVERGREEN_MAX_BACKENDS_MASK);
1938
	}
Line 2056... Line -...
2056
			sp |= INACTIVE_SIMDS(EVERGREEN_MAX_SIMDS_MASK);
-
 
2057
		}
1939
	/* enabled rb are just the one not disabled :) */
2058
 
1940
	disabled_rb_mask = tmp;
2059
		WREG32(GRBM_GFX_INDEX, gfx);
1941
 
2060
		WREG32(RLC_GFX_INDEX, gfx);
-
 
Line 2061... Line 1942...
2061
 
1942
	WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
-
 
1943
	WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
2062
		WREG32(CC_RB_BACKEND_DISABLE, rb);
1944
 
2063
		WREG32(CC_SYS_RB_BACKEND_DISABLE, rb);
1945
	WREG32(GB_ADDR_CONFIG, gb_addr_config);
Line 2064... Line 1946...
2064
		WREG32(GC_USER_RB_BACKEND_DISABLE, rb);
1946
	WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
2065
		WREG32(CC_GC_SHADER_PIPE_CONFIG, sp);
1947
	WREG32(HDP_ADDR_CONFIG, gb_addr_config);
2066
        }
1948
 
2067
 
1949
	tmp = gb_addr_config & NUM_PIPES_MASK;
Line 2093... Line 1975...
2093
	smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
1975
	smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
2094
	smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
1976
	smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
2095
	smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
1977
	smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
2096
	WREG32(SMX_DC_CTL0, smx_dc_ctl0);
1978
	WREG32(SMX_DC_CTL0, smx_dc_ctl0);
Line -... Line 1979...
-
 
1979
 
-
 
1980
	if (rdev->family <= CHIP_SUMO2)
-
 
1981
		WREG32(SMX_SAR_CTL0, 0x00010000);
2097
 
1982
 
2098
	WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
1983
	WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
2099
					POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
1984
					POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
Line 2100... Line 1985...
2100
					SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
1985
					SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
Line 2258... Line 2143...
2258
	u32 tmp;
2143
	u32 tmp;
2259
	int chansize, numchan;
2144
	int chansize, numchan;
Line 2260... Line 2145...
2260
 
2145
 
2261
	/* Get VRAM informations */
2146
	/* Get VRAM informations */
2262
	rdev->mc.vram_is_ddr = true;
2147
	rdev->mc.vram_is_ddr = true;
-
 
2148
	if ((rdev->family == CHIP_PALM) ||
-
 
2149
	    (rdev->family == CHIP_SUMO) ||
2263
	if (rdev->flags & RADEON_IS_IGP)
2150
	    (rdev->family == CHIP_SUMO2))
2264
		tmp = RREG32(FUS_MC_ARB_RAMCFG);
2151
		tmp = RREG32(FUS_MC_ARB_RAMCFG);
2265
	else
2152
	else
2266
	tmp = RREG32(MC_ARB_RAMCFG);
2153
	tmp = RREG32(MC_ARB_RAMCFG);
2267
	if (tmp & CHANSIZE_OVERRIDE) {
2154
	if (tmp & CHANSIZE_OVERRIDE) {
Line 2290... Line 2177...
2290
	rdev->mc.vram_width = numchan * chansize;
2177
	rdev->mc.vram_width = numchan * chansize;
2291
	/* Could aper size report 0 ? */
2178
	/* Could aper size report 0 ? */
2292
	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
2179
	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
2293
	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
2180
	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
2294
	/* Setup GPU memory space */
2181
	/* Setup GPU memory space */
2295
	if (rdev->flags & RADEON_IS_IGP) {
2182
	if ((rdev->family == CHIP_PALM) ||
-
 
2183
	    (rdev->family == CHIP_SUMO) ||
-
 
2184
	    (rdev->family == CHIP_SUMO2)) {
2296
		/* size in bytes on fusion */
2185
		/* size in bytes on fusion */
2297
		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
2186
		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
2298
		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
2187
		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
2299
	} else {
2188
	} else {
2300
	/* size in MB on evergreen */
2189
		/* size in MB on evergreen/cayman/tn */
2301
	rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
2190
	rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
2302
	rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
2191
	rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
2303
	}
2192
	}
2304
	rdev->mc.visible_vram_size = rdev->mc.aper_size;
2193
	rdev->mc.visible_vram_size = rdev->mc.aper_size;
2305
	r700_vram_gtt_location(rdev, &rdev->mc);
2194
	r700_vram_gtt_location(rdev, &rdev->mc);
2306
	radeon_update_bandwidth_info(rdev);
2195
	radeon_update_bandwidth_info(rdev);
Line 2307... Line 2196...
2307
 
2196
 
2308
	return 0;
2197
	return 0;
Line 2309... Line 2198...
2309
}
2198
}
2310
 
2199
 
2311
bool evergreen_gpu_is_lockup(struct radeon_device *rdev)
2200
bool evergreen_gpu_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
2312
{
2201
{
2313
	u32 srbm_status;
2202
	u32 srbm_status;
2314
	u32 grbm_status;
-
 
2315
	u32 grbm_status_se0, grbm_status_se1;
-
 
Line 2316... Line 2203...
2316
	struct r100_gpu_lockup *lockup = &rdev->config.evergreen.lockup;
2203
	u32 grbm_status;
2317
	int r;
2204
	u32 grbm_status_se0, grbm_status_se1;
2318
 
2205
 
2319
	srbm_status = RREG32(SRBM_STATUS);
2206
	srbm_status = RREG32(SRBM_STATUS);
2320
	grbm_status = RREG32(GRBM_STATUS);
2207
	grbm_status = RREG32(GRBM_STATUS);
2321
	grbm_status_se0 = RREG32(GRBM_STATUS_SE0);
2208
	grbm_status_se0 = RREG32(GRBM_STATUS_SE0);
2322
	grbm_status_se1 = RREG32(GRBM_STATUS_SE1);
2209
	grbm_status_se1 = RREG32(GRBM_STATUS_SE1);
2323
	if (!(grbm_status & GUI_ACTIVE)) {
2210
	if (!(grbm_status & GUI_ACTIVE)) {
2324
		r100_gpu_lockup_update(lockup, &rdev->cp);
2211
		radeon_ring_lockup_update(ring);
2325
	return false;
-
 
2326
	}
-
 
2327
	/* force CP activities */
-
 
2328
	r = radeon_ring_lock(rdev, 2);
-
 
2329
	if (!r) {
-
 
2330
		/* PACKET2 NOP */
2212
	return false;
2331
		radeon_ring_write(rdev, 0x80000000);
-
 
2332
		radeon_ring_write(rdev, 0x80000000);
-
 
2333
		radeon_ring_unlock_commit(rdev);
2213
	}
2334
	}
2214
	/* force CP activities */
Line 2335... Line 2215...
2335
	rdev->cp.rptr = RREG32(CP_RB_RPTR);
2215
	radeon_ring_force_activity(rdev, ring);
2336
	return r100_gpu_cp_is_lockup(rdev, lockup, &rdev->cp);
2216
	return radeon_ring_test_lockup(rdev, ring);
2337
}
2217
}
Line 2351... Line 2231...
2351
		RREG32(GRBM_STATUS_SE0));
2231
		RREG32(GRBM_STATUS_SE0));
2352
	dev_info(rdev->dev, "  GRBM_STATUS_SE1=0x%08X\n",
2232
	dev_info(rdev->dev, "  GRBM_STATUS_SE1=0x%08X\n",
2353
		RREG32(GRBM_STATUS_SE1));
2233
		RREG32(GRBM_STATUS_SE1));
2354
	dev_info(rdev->dev, "  SRBM_STATUS=0x%08X\n",
2234
	dev_info(rdev->dev, "  SRBM_STATUS=0x%08X\n",
2355
		RREG32(SRBM_STATUS));
2235
		RREG32(SRBM_STATUS));
-
 
2236
	dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
-
 
2237
		RREG32(CP_STALLED_STAT1));
-
 
2238
	dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
-
 
2239
		RREG32(CP_STALLED_STAT2));
-
 
2240
	dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
-
 
2241
		RREG32(CP_BUSY_STAT));
-
 
2242
	dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
-
 
2243
		RREG32(CP_STAT));
2356
	evergreen_mc_stop(rdev, &save);
2244
	evergreen_mc_stop(rdev, &save);
2357
	if (evergreen_mc_wait_for_idle(rdev)) {
2245
	if (evergreen_mc_wait_for_idle(rdev)) {
2358
		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2246
		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2359
	}
2247
	}
2360
	/* Disable CP parsing/prefetching */
2248
	/* Disable CP parsing/prefetching */
Line 2388... Line 2276...
2388
		RREG32(GRBM_STATUS_SE0));
2276
		RREG32(GRBM_STATUS_SE0));
2389
	dev_info(rdev->dev, "  GRBM_STATUS_SE1=0x%08X\n",
2277
	dev_info(rdev->dev, "  GRBM_STATUS_SE1=0x%08X\n",
2390
		RREG32(GRBM_STATUS_SE1));
2278
		RREG32(GRBM_STATUS_SE1));
2391
	dev_info(rdev->dev, "  SRBM_STATUS=0x%08X\n",
2279
	dev_info(rdev->dev, "  SRBM_STATUS=0x%08X\n",
2392
		RREG32(SRBM_STATUS));
2280
		RREG32(SRBM_STATUS));
-
 
2281
	dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
-
 
2282
		RREG32(CP_STALLED_STAT1));
-
 
2283
	dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
-
 
2284
		RREG32(CP_STALLED_STAT2));
-
 
2285
	dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
-
 
2286
		RREG32(CP_BUSY_STAT));
-
 
2287
	dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
-
 
2288
		RREG32(CP_STAT));
2393
	evergreen_mc_resume(rdev, &save);
2289
	evergreen_mc_resume(rdev, &save);
2394
	return 0;
2290
	return 0;
2395
}
2291
}
Line 2396... Line 2292...
2396
 
2292
 
Line 2401... Line 2297...
2401
 
2297
 
Line 2402... Line 2298...
2402
/* Interrupts */
2298
/* Interrupts */
2403
 
2299
 
2404
u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
2300
u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
2405
{
-
 
2406
	switch (crtc) {
-
 
2407
	case 0:
-
 
2408
		return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC0_REGISTER_OFFSET);
-
 
2409
	case 1:
-
 
2410
		return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC1_REGISTER_OFFSET);
-
 
2411
	case 2:
-
 
2412
		return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC2_REGISTER_OFFSET);
-
 
2413
	case 3:
-
 
2414
		return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC3_REGISTER_OFFSET);
-
 
2415
	case 4:
-
 
2416
		return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC4_REGISTER_OFFSET);
-
 
2417
	case 5:
-
 
2418
		return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC5_REGISTER_OFFSET);
2301
{
2419
	default:
2302
	if (crtc >= rdev->num_crtc)
-
 
2303
	return 0;
2420
	return 0;
2304
	else
Line 2421... Line 2305...
2421
	}
2305
		return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
2422
}
2306
}
2423
 
2307
 
Line -... Line 2308...
-
 
2308
void evergreen_disable_interrupt_state(struct radeon_device *rdev)
-
 
2309
{
-
 
2310
	u32 tmp;
-
 
2311
 
-
 
2312
	if (rdev->family >= CHIP_CAYMAN) {
-
 
2313
		cayman_cp_int_cntl_setup(rdev, 0,
2424
void evergreen_disable_interrupt_state(struct radeon_device *rdev)
2314
					 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
2425
{
2315
		cayman_cp_int_cntl_setup(rdev, 1, 0);
2426
	u32 tmp;
2316
		cayman_cp_int_cntl_setup(rdev, 2, 0);
2427
 
2317
	} else
2428
	WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
2318
	WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
Line 2447... Line 2337...
2447
	if (rdev->num_crtc >= 6) {
2337
	if (rdev->num_crtc >= 6) {
2448
	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
2338
	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
2449
	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
2339
	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
2450
	}
2340
	}
Line -... Line 2341...
-
 
2341
 
-
 
2342
	/* only one DAC on DCE6 */
2451
 
2343
	if (!ASIC_IS_DCE6(rdev))
2452
	WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
2344
	WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
Line 2453... Line 2345...
2453
	WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
2345
	WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
2454
 
2346
 
Line 2468... Line 2360...
2468
}
2360
}
Line 2469... Line 2361...
2469
 
2361
 
2470
int evergreen_irq_set(struct radeon_device *rdev)
2362
int evergreen_irq_set(struct radeon_device *rdev)
2471
{
2363
{
-
 
2364
	u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
2472
	u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
2365
	u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
2473
	u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
2366
	u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
2474
	u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
2367
	u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
2475
	u32 grbm_int_cntl = 0;
2368
	u32 grbm_int_cntl = 0;
-
 
2369
	u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;
Line 2476... Line 2370...
2476
	u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;
2370
	u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
2477
 
2371
 
2478
	if (!rdev->irq.installed) {
2372
	if (!rdev->irq.installed) {
2479
		WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
2373
		WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
Line 2492... Line 2386...
2492
	hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
2386
	hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
2493
	hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
2387
	hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
2494
	hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
2388
	hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
2495
	hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
2389
	hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
Line -... Line 2390...
-
 
2390
 
-
 
2391
	afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
-
 
2392
	afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
-
 
2393
	afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
-
 
2394
	afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
-
 
2395
	afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
-
 
2396
	afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
2496
 
2397
 
-
 
2398
	if (rdev->family >= CHIP_CAYMAN) {
-
 
2399
		/* enable CP interrupts on all rings */
-
 
2400
		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
-
 
2401
			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
-
 
2402
			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
-
 
2403
		}
-
 
2404
		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
-
 
2405
			DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
-
 
2406
			cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
-
 
2407
		}
-
 
2408
		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
-
 
2409
			DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
-
 
2410
			cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
-
 
2411
		}
-
 
2412
	} else {
2497
	if (rdev->irq.sw_int) {
2413
		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
2498
		DRM_DEBUG("evergreen_irq_set: sw int\n");
2414
			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
2499
		cp_int_cntl |= RB_INT_ENABLE;
2415
		cp_int_cntl |= RB_INT_ENABLE;
2500
		cp_int_cntl |= TIME_STAMP_INT_ENABLE;
2416
		cp_int_cntl |= TIME_STAMP_INT_ENABLE;
-
 
2417
	}
-
 
2418
	}
2501
	}
2419
 
2502
	if (rdev->irq.crtc_vblank_int[0] ||
2420
	if (rdev->irq.crtc_vblank_int[0] ||
2503
	    rdev->irq.pflip[0]) {
2421
	    atomic_read(&rdev->irq.pflip[0])) {
2504
		DRM_DEBUG("evergreen_irq_set: vblank 0\n");
2422
		DRM_DEBUG("evergreen_irq_set: vblank 0\n");
2505
		crtc1 |= VBLANK_INT_MASK;
2423
		crtc1 |= VBLANK_INT_MASK;
2506
	}
2424
	}
2507
	if (rdev->irq.crtc_vblank_int[1] ||
2425
	if (rdev->irq.crtc_vblank_int[1] ||
2508
	    rdev->irq.pflip[1]) {
2426
	    atomic_read(&rdev->irq.pflip[1])) {
2509
		DRM_DEBUG("evergreen_irq_set: vblank 1\n");
2427
		DRM_DEBUG("evergreen_irq_set: vblank 1\n");
2510
		crtc2 |= VBLANK_INT_MASK;
2428
		crtc2 |= VBLANK_INT_MASK;
2511
	}
2429
	}
2512
	if (rdev->irq.crtc_vblank_int[2] ||
2430
	if (rdev->irq.crtc_vblank_int[2] ||
2513
	    rdev->irq.pflip[2]) {
2431
	    atomic_read(&rdev->irq.pflip[2])) {
2514
		DRM_DEBUG("evergreen_irq_set: vblank 2\n");
2432
		DRM_DEBUG("evergreen_irq_set: vblank 2\n");
2515
		crtc3 |= VBLANK_INT_MASK;
2433
		crtc3 |= VBLANK_INT_MASK;
2516
	}
2434
	}
2517
	if (rdev->irq.crtc_vblank_int[3] ||
2435
	if (rdev->irq.crtc_vblank_int[3] ||
2518
	    rdev->irq.pflip[3]) {
2436
	    atomic_read(&rdev->irq.pflip[3])) {
2519
		DRM_DEBUG("evergreen_irq_set: vblank 3\n");
2437
		DRM_DEBUG("evergreen_irq_set: vblank 3\n");
2520
		crtc4 |= VBLANK_INT_MASK;
2438
		crtc4 |= VBLANK_INT_MASK;
2521
	}
2439
	}
2522
	if (rdev->irq.crtc_vblank_int[4] ||
2440
	if (rdev->irq.crtc_vblank_int[4] ||
2523
	    rdev->irq.pflip[4]) {
2441
	    atomic_read(&rdev->irq.pflip[4])) {
2524
		DRM_DEBUG("evergreen_irq_set: vblank 4\n");
2442
		DRM_DEBUG("evergreen_irq_set: vblank 4\n");
2525
		crtc5 |= VBLANK_INT_MASK;
2443
		crtc5 |= VBLANK_INT_MASK;
2526
	}
2444
	}
2527
	if (rdev->irq.crtc_vblank_int[5] ||
2445
	if (rdev->irq.crtc_vblank_int[5] ||
2528
	    rdev->irq.pflip[5]) {
2446
	    atomic_read(&rdev->irq.pflip[5])) {
2529
		DRM_DEBUG("evergreen_irq_set: vblank 5\n");
2447
		DRM_DEBUG("evergreen_irq_set: vblank 5\n");
2530
		crtc6 |= VBLANK_INT_MASK;
2448
		crtc6 |= VBLANK_INT_MASK;
2531
	}
2449
	}
2532
	if (rdev->irq.hpd[0]) {
2450
	if (rdev->irq.hpd[0]) {
Line 2551... Line 2469...
2551
	}
2469
	}
2552
	if (rdev->irq.hpd[5]) {
2470
	if (rdev->irq.hpd[5]) {
2553
		DRM_DEBUG("evergreen_irq_set: hpd 6\n");
2471
		DRM_DEBUG("evergreen_irq_set: hpd 6\n");
2554
		hpd6 |= DC_HPDx_INT_EN;
2472
		hpd6 |= DC_HPDx_INT_EN;
2555
	}
2473
	}
2556
	if (rdev->irq.gui_idle) {
2474
	if (rdev->irq.afmt[0]) {
-
 
2475
		DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
-
 
2476
		afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
-
 
2477
	}
-
 
2478
	if (rdev->irq.afmt[1]) {
-
 
2479
		DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
-
 
2480
		afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
-
 
2481
	}
-
 
2482
	if (rdev->irq.afmt[2]) {
2557
		DRM_DEBUG("gui idle\n");
2483
		DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
-
 
2484
		afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
-
 
2485
	}
-
 
2486
	if (rdev->irq.afmt[3]) {
-
 
2487
		DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
-
 
2488
		afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
-
 
2489
	}
-
 
2490
	if (rdev->irq.afmt[4]) {
-
 
2491
		DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
-
 
2492
		afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
-
 
2493
	}
-
 
2494
	if (rdev->irq.afmt[5]) {
-
 
2495
		DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
2558
		grbm_int_cntl |= GUI_IDLE_INT_ENABLE;
2496
		afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
2559
	}
2497
	}
2560
 
2498
 
-
 
2499
	if (rdev->family >= CHIP_CAYMAN) {
-
 
2500
		cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
-
 
2501
		cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
-
 
2502
		cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
-
 
2503
	} else
2561
	WREG32(CP_INT_CNTL, cp_int_cntl);
2504
	WREG32(CP_INT_CNTL, cp_int_cntl);
2562
	WREG32(GRBM_INT_CNTL, grbm_int_cntl);
2505
	WREG32(GRBM_INT_CNTL, grbm_int_cntl);
Line 2563... Line 2506...
2563
 
2506
 
2564
	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
2507
	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
Line 2588... Line 2531...
2588
	WREG32(DC_HPD3_INT_CONTROL, hpd3);
2531
	WREG32(DC_HPD3_INT_CONTROL, hpd3);
2589
	WREG32(DC_HPD4_INT_CONTROL, hpd4);
2532
	WREG32(DC_HPD4_INT_CONTROL, hpd4);
2590
	WREG32(DC_HPD5_INT_CONTROL, hpd5);
2533
	WREG32(DC_HPD5_INT_CONTROL, hpd5);
2591
	WREG32(DC_HPD6_INT_CONTROL, hpd6);
2534
	WREG32(DC_HPD6_INT_CONTROL, hpd6);
Line -... Line 2535...
-
 
2535
 
-
 
2536
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
-
 
2537
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
-
 
2538
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
-
 
2539
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
-
 
2540
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
-
 
2541
	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
2592
 
2542
 
2593
	return 0;
2543
	return 0;
Line 2594... Line 2544...
2594
}
2544
}
2595
 
2545
 
2596
static inline void evergreen_irq_ack(struct radeon_device *rdev)
2546
static void evergreen_irq_ack(struct radeon_device *rdev)
Line 2597... Line 2547...
2597
{
2547
{
2598
	u32 tmp;
2548
	u32 tmp;
Line 2612... Line 2562...
2612
	if (rdev->num_crtc >= 6) {
2562
	if (rdev->num_crtc >= 6) {
2613
		rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
2563
		rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
2614
		rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
2564
		rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
2615
	}
2565
	}
Line -... Line 2566...
-
 
2566
 
-
 
2567
	rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
-
 
2568
	rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
-
 
2569
	rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
-
 
2570
	rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
-
 
2571
	rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
-
 
2572
	rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
2616
 
2573
 
2617
	if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
2574
	if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
2618
		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2575
		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2619
	if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
2576
	if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
2620
		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2577
		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
Line 2685... Line 2642...
2685
	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
2642
	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
2686
		tmp = RREG32(DC_HPD5_INT_CONTROL);
2643
		tmp = RREG32(DC_HPD5_INT_CONTROL);
2687
		tmp |= DC_HPDx_INT_ACK;
2644
		tmp |= DC_HPDx_INT_ACK;
2688
		WREG32(DC_HPD6_INT_CONTROL, tmp);
2645
		WREG32(DC_HPD6_INT_CONTROL, tmp);
2689
	}
2646
	}
-
 
2647
	if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
-
 
2648
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
-
 
2649
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
-
 
2650
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
-
 
2651
	}
-
 
2652
	if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
-
 
2653
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
-
 
2654
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
-
 
2655
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
-
 
2656
	}
-
 
2657
	if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
-
 
2658
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
-
 
2659
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
-
 
2660
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
-
 
2661
	}
-
 
2662
	if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
-
 
2663
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
-
 
2664
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
-
 
2665
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
-
 
2666
	}
-
 
2667
	if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
-
 
2668
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
-
 
2669
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
-
 
2670
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
-
 
2671
	}
-
 
2672
	if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
-
 
2673
		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
-
 
2674
		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
-
 
2675
		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
-
 
2676
	}
-
 
2677
}
-
 
2678
 
-
 
2679
static void evergreen_irq_disable(struct radeon_device *rdev)
-
 
2680
{
-
 
2681
	r600_disable_interrupts(rdev);
-
 
2682
	/* Wait and acknowledge irq */
-
 
2683
	mdelay(1);
-
 
2684
	evergreen_irq_ack(rdev);
-
 
2685
	evergreen_disable_interrupt_state(rdev);
-
 
2686
}
-
 
2687
 
-
 
2688
void evergreen_irq_suspend(struct radeon_device *rdev)
-
 
2689
{
-
 
2690
	evergreen_irq_disable(rdev);
-
 
2691
	r600_rlc_stop(rdev);
2690
}
2692
}
-
 
2693
 
2691
static inline u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
2694
static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
2692
{
2695
{
2693
	u32 wptr, tmp;
2696
	u32 wptr, tmp;
Line 2694... Line 2697...
2694
 
2697
 
2695
	if (rdev->wb.enabled)
2698
	if (rdev->wb.enabled)
Line 2716... Line 2719...
2716
{
2719
{
2717
	u32 wptr;
2720
	u32 wptr;
2718
	u32 rptr;
2721
	u32 rptr;
2719
	u32 src_id, src_data;
2722
	u32 src_id, src_data;
2720
	u32 ring_index;
2723
	u32 ring_index;
2721
	unsigned long flags;
-
 
2722
	bool queue_hotplug = false;
2724
	bool queue_hotplug = false;
-
 
2725
	bool queue_hdmi = false;
Line 2723... Line 2726...
2723
 
2726
 
2724
	if (!rdev->ih.enabled || rdev->shutdown)
2727
	if (!rdev->ih.enabled || rdev->shutdown)
Line 2725... Line 2728...
2725
		return IRQ_NONE;
2728
		return IRQ_NONE;
-
 
2729
 
-
 
2730
	wptr = evergreen_get_ih_wptr(rdev);
-
 
2731
 
-
 
2732
restart_ih:
-
 
2733
	/* is somebody else already processing irqs? */
-
 
2734
	if (atomic_xchg(&rdev->ih.lock, 1))
2726
 
2735
		return IRQ_NONE;
2727
	wptr = evergreen_get_ih_wptr(rdev);
2736
 
Line 2728... Line -...
2728
	rptr = rdev->ih.rptr;
-
 
2729
	DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
-
 
2730
 
-
 
2731
	spin_lock_irqsave(&rdev->ih.lock, flags);
-
 
2732
	if (rptr == wptr) {
-
 
2733
		spin_unlock_irqrestore(&rdev->ih.lock, flags);
-
 
2734
		return IRQ_NONE;
2737
	rptr = rdev->ih.rptr;
2735
	}
2738
	DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
Line 2736... Line 2739...
2736
restart_ih:
2739
 
2737
	/* Order reading of wptr vs. reading of IH ring data */
2740
	/* Order reading of wptr vs. reading of IH ring data */
Line 2738... Line -...
2738
	rmb();
-
 
2739
 
2741
	rmb();
2740
	/* display interrupts */
2742
 
2741
	evergreen_irq_ack(rdev);
2743
	/* display interrupts */
2742
 
2744
	evergreen_irq_ack(rdev);
2743
	rdev->ih.wptr = wptr;
2745
 
Line 2951... Line 2953...
2951
			default:
2953
			default:
2952
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2954
				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2953
				break;
2955
				break;
2954
			}
2956
			}
2955
			break;
2957
			break;
-
 
2958
		case 44: /* hdmi */
-
 
2959
			switch (src_data) {
-
 
2960
			case 0:
-
 
2961
				if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
-
 
2962
					rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
-
 
2963
					queue_hdmi = true;
-
 
2964
					DRM_DEBUG("IH: HDMI0\n");
-
 
2965
				}
-
 
2966
				break;
-
 
2967
			case 1:
-
 
2968
				if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
-
 
2969
					rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
-
 
2970
					queue_hdmi = true;
-
 
2971
					DRM_DEBUG("IH: HDMI1\n");
-
 
2972
				}
-
 
2973
				break;
-
 
2974
			case 2:
-
 
2975
				if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
-
 
2976
					rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
-
 
2977
					queue_hdmi = true;
-
 
2978
					DRM_DEBUG("IH: HDMI2\n");
-
 
2979
				}
-
 
2980
				break;
-
 
2981
			case 3:
-
 
2982
				if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
-
 
2983
					rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
-
 
2984
					queue_hdmi = true;
-
 
2985
					DRM_DEBUG("IH: HDMI3\n");
-
 
2986
				}
-
 
2987
				break;
-
 
2988
			case 4:
-
 
2989
				if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
-
 
2990
					rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
-
 
2991
					queue_hdmi = true;
-
 
2992
					DRM_DEBUG("IH: HDMI4\n");
-
 
2993
				}
-
 
2994
				break;
-
 
2995
			case 5:
-
 
2996
				if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
-
 
2997
					rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
-
 
2998
					queue_hdmi = true;
-
 
2999
					DRM_DEBUG("IH: HDMI5\n");
-
 
3000
				}
-
 
3001
				break;
-
 
3002
			default:
-
 
3003
				DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
-
 
3004
				break;
-
 
3005
			}
-
 
3006
			break;
2956
		case 176: /* CP_INT in ring buffer */
3007
		case 176: /* CP_INT in ring buffer */
2957
		case 177: /* CP_INT in IB1 */
3008
		case 177: /* CP_INT in IB1 */
2958
		case 178: /* CP_INT in IB2 */
3009
		case 178: /* CP_INT in IB2 */
2959
			DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
3010
			DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
2960
			radeon_fence_process(rdev);
3011
			radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
2961
			break;
3012
			break;
2962
		case 181: /* CP EOP event */
3013
		case 181: /* CP EOP event */
2963
			DRM_DEBUG("IH: CP EOP\n");
3014
			DRM_DEBUG("IH: CP EOP\n");
-
 
3015
			if (rdev->family >= CHIP_CAYMAN) {
-
 
3016
				switch (src_data) {
-
 
3017
				case 0:
-
 
3018
					radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
-
 
3019
					break;
-
 
3020
				case 1:
-
 
3021
					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
-
 
3022
					break;
-
 
3023
				case 2:
-
 
3024
					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
-
 
3025
					break;
-
 
3026
				}
-
 
3027
			} else
2964
			radeon_fence_process(rdev);
3028
				radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
2965
			break;
3029
			break;
2966
		case 233: /* GUI IDLE */
3030
		case 233: /* GUI IDLE */
2967
			DRM_DEBUG("IH: GUI idle\n");
3031
			DRM_DEBUG("IH: GUI idle\n");
2968
			rdev->pm.gui_idle = true;
-
 
2969
//			wake_up(&rdev->irq.idle_queue);
-
 
2970
			break;
3032
			break;
2971
		default:
3033
		default:
2972
			DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
3034
			DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2973
			break;
3035
			break;
2974
		}
3036
		}
Line 2975... Line 3037...
2975
 
3037
 
2976
		/* wptr/rptr are in bytes! */
3038
		/* wptr/rptr are in bytes! */
2977
		rptr += 16;
3039
		rptr += 16;
2978
		rptr &= rdev->ih.ptr_mask;
3040
		rptr &= rdev->ih.ptr_mask;
-
 
3041
	}
-
 
3042
	rdev->ih.rptr = rptr;
-
 
3043
	WREG32(IH_RB_RPTR, rdev->ih.rptr);
-
 
3044
	atomic_set(&rdev->ih.lock, 0);
2979
	}
3045
 
2980
	/* make sure wptr hasn't changed while processing */
3046
	/* make sure wptr hasn't changed while processing */
2981
	wptr = evergreen_get_ih_wptr(rdev);
3047
	wptr = evergreen_get_ih_wptr(rdev);
2982
	if (wptr != rdev->ih.wptr)
3048
	if (wptr != rptr)
2983
		goto restart_ih;
-
 
2984
//	if (queue_hotplug)
-
 
2985
//		schedule_work(&rdev->hotplug_work);
-
 
2986
	rdev->ih.rptr = rptr;
-
 
2987
	WREG32(IH_RB_RPTR, rdev->ih.rptr);
-
 
-
 
3049
		goto restart_ih;
2988
	spin_unlock_irqrestore(&rdev->ih.lock, flags);
3050
 
2989
	return IRQ_HANDLED;
3051
	return IRQ_HANDLED;
Line 2990... Line 3052...
2990
}
3052
}
2991
 
3053
 
-
 
3054
static int evergreen_startup(struct radeon_device *rdev)
2992
static int evergreen_startup(struct radeon_device *rdev)
3055
{
Line 2993... Line 3056...
2993
{
3056
	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2994
	int r;
-
 
2995
 
3057
	int r;
Line 2996... Line 3058...
2996
	/* enable pcie gen2 link */
3058
 
2997
	if (!ASIC_IS_DCE5(rdev))
3059
	/* enable pcie gen2 link */
2998
		evergreen_pcie_gen2_enable(rdev);
3060
		evergreen_pcie_gen2_enable(rdev);
Line 3018... Line 3080...
3018
			return r;
3080
			return r;
3019
		}
3081
		}
3020
	}
3082
	}
3021
	}
3083
	}
Line -... Line 3084...
-
 
3084
 
-
 
3085
	r = r600_vram_scratch_init(rdev);
-
 
3086
	if (r)
-
 
3087
		return r;
3022
 
3088
 
3023
	evergreen_mc_program(rdev);
3089
	evergreen_mc_program(rdev);
3024
	if (rdev->flags & RADEON_IS_AGP) {
3090
	if (rdev->flags & RADEON_IS_AGP) {
3025
		evergreen_agp_enable(rdev);
3091
		evergreen_agp_enable(rdev);
3026
	} else {
3092
	} else {
Line 3030... Line 3096...
3030
	}
3096
	}
3031
	evergreen_gpu_init(rdev);
3097
	evergreen_gpu_init(rdev);
Line 3032... Line 3098...
3032
 
3098
 
3033
	r = evergreen_blit_init(rdev);
3099
	r = evergreen_blit_init(rdev);
3034
	if (r) {
3100
	if (r) {
3035
		evergreen_blit_fini(rdev);
3101
//       r600_blit_fini(rdev);
3036
		rdev->asic->copy = NULL;
3102
		rdev->asic->copy.copy = NULL;
3037
		dev_warn(rdev->dev, "failed blitter (%d) falling back to memcpy\n", r);
3103
		dev_warn(rdev->dev, "failed blitter (%d) falling back to memcpy\n", r);
Line 3038... Line 3104...
3038
	}
3104
	}
3039
 
3105
 
Line 3049... Line 3115...
3049
//		radeon_irq_kms_fini(rdev);
3115
//		radeon_irq_kms_fini(rdev);
3050
		return r;
3116
		return r;
3051
	}
3117
	}
3052
	evergreen_irq_set(rdev);
3118
	evergreen_irq_set(rdev);
Line 3053... Line 3119...
3053
 
3119
 
-
 
3120
	r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
-
 
3121
			     R600_CP_RB_RPTR, R600_CP_RB_WPTR,
3054
    r = radeon_ring_init(rdev, rdev->cp.ring_size);
3122
			     0, 0xfffff, RADEON_CP_PACKET2);
3055
	if (r)
3123
	if (r)
3056
		return r;
3124
		return r;
3057
	r = evergreen_cp_load_microcode(rdev);
3125
	r = evergreen_cp_load_microcode(rdev);
3058
	if (r)
3126
	if (r)
Line 3064... Line 3132...
3064
	return 0;
3132
	return 0;
3065
}
3133
}
Line 3066... Line 3134...
3066
 
3134
 
Line 3067... Line 3135...
3067
 
3135
 
3068
 
3136
 
3069
 
3137
#if 0
3070
 
3138
 
Line 3086... Line 3154...
3086
	evergreen_kms_blit_copy(rdev, src_offset, dst_offset, num_pages * RADEON_GPU_PAGE_SIZE);
3154
	evergreen_kms_blit_copy(rdev, src_offset, dst_offset, num_pages * RADEON_GPU_PAGE_SIZE);
3087
	evergreen_blit_done_copy(rdev, fence);
3155
	evergreen_blit_done_copy(rdev, fence);
3088
	mutex_unlock(&rdev->r600_blit.mutex);
3156
	mutex_unlock(&rdev->r600_blit.mutex);
3089
	return 0;
3157
	return 0;
3090
}
3158
}
-
 
3159
#endif
Line 3091... Line 3160...
3091
 
3160
 
3092
/* Plan is to move initialization in that function and use
3161
/* Plan is to move initialization in that function and use
3093
 * helper function so that radeon_device_init pretty much
3162
 * helper function so that radeon_device_init pretty much
3094
 * do nothing more than calling asic specific function. This
3163
 * do nothing more than calling asic specific function. This
Line 3097... Line 3166...
3097
 */
3166
 */
3098
int evergreen_init(struct radeon_device *rdev)
3167
int evergreen_init(struct radeon_device *rdev)
3099
{
3168
{
3100
	int r;
3169
	int r;
Line 3101... Line -...
3101
 
-
 
3102
	/* This don't do much */
-
 
3103
	r = radeon_gem_init(rdev);
-
 
3104
	if (r)
-
 
3105
		return r;
3170
 
3106
	/* Read BIOS */
3171
	/* Read BIOS */
3107
	if (!radeon_get_bios(rdev)) {
3172
	if (!radeon_get_bios(rdev)) {
3108
		if (ASIC_IS_AVIVO(rdev))
3173
		if (ASIC_IS_AVIVO(rdev))
3109
			return -EINVAL;
3174
			return -EINVAL;
Line 3157... Line 3222...
3157
 
3222
 
3158
	r = radeon_irq_kms_init(rdev);
3223
	r = radeon_irq_kms_init(rdev);
3159
	if (r)
3224
	if (r)
Line 3160... Line 3225...
3160
		return r;
3225
		return r;
3161
 
3226
 
Line 3162... Line 3227...
3162
	rdev->cp.ring_obj = NULL;
3227
	rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
3163
	r600_ring_init(rdev, 1024 * 1024);
3228
	r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
Line 3164... Line 3229...
3164
 
3229
 
Line 3173... Line 3238...
3173
	r = evergreen_startup(rdev);
3238
	r = evergreen_startup(rdev);
3174
	if (r) {
3239
	if (r) {
3175
		dev_err(rdev->dev, "disabling GPU acceleration\n");
3240
		dev_err(rdev->dev, "disabling GPU acceleration\n");
3176
		rdev->accel_working = false;
3241
		rdev->accel_working = false;
3177
	}
3242
	}
-
 
3243
 
3178
	if (rdev->accel_working) {
3244
	/* Don't start up if the MC ucode is missing on BTC parts.
3179
		r = radeon_ib_pool_init(rdev);
3245
	 * The default clocks and voltages before the MC ucode
3180
		if (r) {
-
 
3181
			DRM_ERROR("radeon: failed initializing IB pool (%d).\n", r);
3246
	 * is loaded are not suffient for advanced operations.
3182
			rdev->accel_working = false;
-
 
3183
		}
3247
	 */
3184
		r = r600_ib_test(rdev);
3248
	if (ASIC_IS_DCE5(rdev)) {
3185
		if (r) {
3249
		if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
3186
			DRM_ERROR("radeon: failed testing IB (%d).\n", r);
3250
			DRM_ERROR("radeon: MC ucode required for NI+.\n");
3187
			rdev->accel_working = false;
3251
			return -EINVAL;
3188
		}
3252
		}
3189
	}
3253
	}
-
 
3254
 
3190
	return 0;
3255
	return 0;
3191
}
3256
}
Line 3192... Line 3257...
3192
 
3257
 
3193
 
3258
 
3194
static void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
3259
void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
-
 
3260
{
Line 3195... Line 3261...
3195
{
3261
	u32 link_width_cntl, speed_cntl, mask;
3196
	u32 link_width_cntl, speed_cntl;
3262
	int ret;
Line 3197... Line 3263...
3197
 
3263
 
Line 3206... Line 3272...
3206
 
3272
 
3207
	/* x2 cards have a special sequence */
3273
	/* x2 cards have a special sequence */
3208
	if (ASIC_IS_X2(rdev))
3274
	if (ASIC_IS_X2(rdev))
Line -... Line 3275...
-
 
3275
		return;
-
 
3276
 
-
 
3277
	ret = drm_pcie_get_speed_cap_mask(rdev->ddev, &mask);
-
 
3278
	if (ret != 0)
-
 
3279
		return;
-
 
3280
 
-
 
3281
	if (!(mask & DRM_PCIE_SPEED_50))
3209
		return;
3282
		return;
-
 
3283
 
-
 
3284
	speed_cntl = RREG32_PCIE_P(PCIE_LC_SPEED_CNTL);
-
 
3285
	if (speed_cntl & LC_CURRENT_DATA_RATE) {
-
 
3286
		DRM_INFO("PCIE gen 2 link speeds already enabled\n");
-
 
3287
		return;
-
 
3288
	}
-
 
3289
 
3210
 
3290
	DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
3211
	speed_cntl = RREG32_PCIE_P(PCIE_LC_SPEED_CNTL);
3291
 
Line 3212... Line 3292...
3212
	if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
3292
	if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
3213
	    (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
3293
	    (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {