Subversion Repositories Kolibri OS

Rev

Rev 1128 | Rev 1179 | Go to most recent revision | Only display areas with differences | Regard whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 1128 Rev 1129
1
/*
1
/*
2
 * Copyright 2007-8 Advanced Micro Devices, Inc.
2
 * Copyright 2007-8 Advanced Micro Devices, Inc.
3
 * Copyright 2008 Red Hat Inc.
3
 * Copyright 2008 Red Hat Inc.
4
 *
4
 *
5
 * Permission is hereby granted, free of charge, to any person obtaining a
5
 * Permission is hereby granted, free of charge, to any person obtaining a
6
 * copy of this software and associated documentation files (the "Software"),
6
 * copy of this software and associated documentation files (the "Software"),
7
 * to deal in the Software without restriction, including without limitation
7
 * to deal in the Software without restriction, including without limitation
8
 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8
 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9
 * and/or sell copies of the Software, and to permit persons to whom the
9
 * and/or sell copies of the Software, and to permit persons to whom the
10
 * Software is furnished to do so, subject to the following conditions:
10
 * Software is furnished to do so, subject to the following conditions:
11
 *
11
 *
12
 * The above copyright notice and this permission notice shall be included in
12
 * The above copyright notice and this permission notice shall be included in
13
 * all copies or substantial portions of the Software.
13
 * all copies or substantial portions of the Software.
14
 *
14
 *
15
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18
 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18
 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19
 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19
 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20
 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20
 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21
 * OTHER DEALINGS IN THE SOFTWARE.
21
 * OTHER DEALINGS IN THE SOFTWARE.
22
 *
22
 *
23
 * Authors: Dave Airlie
23
 * Authors: Dave Airlie
24
 *          Alex Deucher
24
 *          Alex Deucher
25
 */
25
 */
26
#include 
26
#include 
27
#include 
27
#include 
28
#include "radeon_drm.h"
28
#include "radeon_drm.h"
29
#include "radeon_fixed.h"
29
#include "radeon_fixed.h"
30
#include "radeon.h"
30
#include "radeon.h"
31
#include "atom.h"
31
#include "atom.h"
32
#include "atom-bits.h"
32
#include "atom-bits.h"
33
 
33
 
34
static void atombios_lock_crtc(struct drm_crtc *crtc, int lock)
34
static void atombios_lock_crtc(struct drm_crtc *crtc, int lock)
35
{
35
{
36
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
36
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
37
	struct drm_device *dev = crtc->dev;
37
	struct drm_device *dev = crtc->dev;
38
	struct radeon_device *rdev = dev->dev_private;
38
	struct radeon_device *rdev = dev->dev_private;
39
	int index =
39
	int index =
40
	    GetIndexIntoMasterTable(COMMAND, UpdateCRTC_DoubleBufferRegisters);
40
	    GetIndexIntoMasterTable(COMMAND, UpdateCRTC_DoubleBufferRegisters);
41
	ENABLE_CRTC_PS_ALLOCATION args;
41
	ENABLE_CRTC_PS_ALLOCATION args;
42
 
42
 
43
	memset(&args, 0, sizeof(args));
43
	memset(&args, 0, sizeof(args));
44
 
44
 
45
	args.ucCRTC = radeon_crtc->crtc_id;
45
	args.ucCRTC = radeon_crtc->crtc_id;
46
	args.ucEnable = lock;
46
	args.ucEnable = lock;
47
 
47
 
48
	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
48
	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
49
}
49
}
50
 
50
 
51
static void atombios_enable_crtc(struct drm_crtc *crtc, int state)
51
static void atombios_enable_crtc(struct drm_crtc *crtc, int state)
52
{
52
{
53
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
53
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
54
	struct drm_device *dev = crtc->dev;
54
	struct drm_device *dev = crtc->dev;
55
	struct radeon_device *rdev = dev->dev_private;
55
	struct radeon_device *rdev = dev->dev_private;
56
	int index = GetIndexIntoMasterTable(COMMAND, EnableCRTC);
56
	int index = GetIndexIntoMasterTable(COMMAND, EnableCRTC);
57
	ENABLE_CRTC_PS_ALLOCATION args;
57
	ENABLE_CRTC_PS_ALLOCATION args;
58
 
58
 
59
	memset(&args, 0, sizeof(args));
59
	memset(&args, 0, sizeof(args));
60
 
60
 
61
	args.ucCRTC = radeon_crtc->crtc_id;
61
	args.ucCRTC = radeon_crtc->crtc_id;
62
	args.ucEnable = state;
62
	args.ucEnable = state;
63
 
63
 
64
	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
64
	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
65
}
65
}
66
 
66
 
67
static void atombios_enable_crtc_memreq(struct drm_crtc *crtc, int state)
67
static void atombios_enable_crtc_memreq(struct drm_crtc *crtc, int state)
68
{
68
{
69
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
69
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
70
	struct drm_device *dev = crtc->dev;
70
	struct drm_device *dev = crtc->dev;
71
	struct radeon_device *rdev = dev->dev_private;
71
	struct radeon_device *rdev = dev->dev_private;
72
	int index = GetIndexIntoMasterTable(COMMAND, EnableCRTCMemReq);
72
	int index = GetIndexIntoMasterTable(COMMAND, EnableCRTCMemReq);
73
	ENABLE_CRTC_PS_ALLOCATION args;
73
	ENABLE_CRTC_PS_ALLOCATION args;
74
 
74
 
75
	memset(&args, 0, sizeof(args));
75
	memset(&args, 0, sizeof(args));
76
 
76
 
77
	args.ucCRTC = radeon_crtc->crtc_id;
77
	args.ucCRTC = radeon_crtc->crtc_id;
78
	args.ucEnable = state;
78
	args.ucEnable = state;
79
 
79
 
80
	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
80
	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
81
}
81
}
82
 
82
 
83
static void atombios_blank_crtc(struct drm_crtc *crtc, int state)
83
static void atombios_blank_crtc(struct drm_crtc *crtc, int state)
84
{
84
{
85
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
85
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
86
	struct drm_device *dev = crtc->dev;
86
	struct drm_device *dev = crtc->dev;
87
	struct radeon_device *rdev = dev->dev_private;
87
	struct radeon_device *rdev = dev->dev_private;
88
	int index = GetIndexIntoMasterTable(COMMAND, BlankCRTC);
88
	int index = GetIndexIntoMasterTable(COMMAND, BlankCRTC);
89
	BLANK_CRTC_PS_ALLOCATION args;
89
	BLANK_CRTC_PS_ALLOCATION args;
90
 
90
 
91
	memset(&args, 0, sizeof(args));
91
	memset(&args, 0, sizeof(args));
92
 
92
 
93
	args.ucCRTC = radeon_crtc->crtc_id;
93
	args.ucCRTC = radeon_crtc->crtc_id;
94
	args.ucBlanking = state;
94
	args.ucBlanking = state;
95
 
95
 
96
	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
96
	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
97
}
97
}
98
 
98
 
99
void atombios_crtc_dpms(struct drm_crtc *crtc, int mode)
99
void atombios_crtc_dpms(struct drm_crtc *crtc, int mode)
100
{
100
{
101
	struct drm_device *dev = crtc->dev;
101
	struct drm_device *dev = crtc->dev;
102
	struct radeon_device *rdev = dev->dev_private;
102
	struct radeon_device *rdev = dev->dev_private;
103
 
103
 
104
	switch (mode) {
104
	switch (mode) {
105
	case DRM_MODE_DPMS_ON:
105
	case DRM_MODE_DPMS_ON:
106
		if (ASIC_IS_DCE3(rdev))
106
		if (ASIC_IS_DCE3(rdev))
107
			atombios_enable_crtc_memreq(crtc, 1);
107
			atombios_enable_crtc_memreq(crtc, 1);
108
		atombios_enable_crtc(crtc, 1);
108
		atombios_enable_crtc(crtc, 1);
109
		atombios_blank_crtc(crtc, 0);
109
		atombios_blank_crtc(crtc, 0);
110
		break;
110
		break;
111
	case DRM_MODE_DPMS_STANDBY:
111
	case DRM_MODE_DPMS_STANDBY:
112
	case DRM_MODE_DPMS_SUSPEND:
112
	case DRM_MODE_DPMS_SUSPEND:
113
	case DRM_MODE_DPMS_OFF:
113
	case DRM_MODE_DPMS_OFF:
114
		atombios_blank_crtc(crtc, 1);
114
		atombios_blank_crtc(crtc, 1);
115
		atombios_enable_crtc(crtc, 0);
115
		atombios_enable_crtc(crtc, 0);
116
		if (ASIC_IS_DCE3(rdev))
116
		if (ASIC_IS_DCE3(rdev))
117
			atombios_enable_crtc_memreq(crtc, 0);
117
			atombios_enable_crtc_memreq(crtc, 0);
118
		break;
118
		break;
119
	}
119
	}
120
 
120
 
121
	if (mode != DRM_MODE_DPMS_OFF) {
121
	if (mode != DRM_MODE_DPMS_OFF) {
122
		radeon_crtc_load_lut(crtc);
122
		radeon_crtc_load_lut(crtc);
123
	}
123
	}
124
}
124
}
125
 
125
 
126
static void
126
static void
127
atombios_set_crtc_dtd_timing(struct drm_crtc *crtc,
127
atombios_set_crtc_dtd_timing(struct drm_crtc *crtc,
128
			     SET_CRTC_USING_DTD_TIMING_PARAMETERS * crtc_param)
128
			     SET_CRTC_USING_DTD_TIMING_PARAMETERS * crtc_param)
129
{
129
{
130
	struct drm_device *dev = crtc->dev;
130
	struct drm_device *dev = crtc->dev;
131
	struct radeon_device *rdev = dev->dev_private;
131
	struct radeon_device *rdev = dev->dev_private;
132
	SET_CRTC_USING_DTD_TIMING_PARAMETERS conv_param;
132
	SET_CRTC_USING_DTD_TIMING_PARAMETERS conv_param;
133
	int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_UsingDTDTiming);
133
	int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_UsingDTDTiming);
134
 
134
 
135
	conv_param.usH_Size = cpu_to_le16(crtc_param->usH_Size);
135
	conv_param.usH_Size = cpu_to_le16(crtc_param->usH_Size);
136
	conv_param.usH_Blanking_Time =
136
	conv_param.usH_Blanking_Time =
137
	    cpu_to_le16(crtc_param->usH_Blanking_Time);
137
	    cpu_to_le16(crtc_param->usH_Blanking_Time);
138
	conv_param.usV_Size = cpu_to_le16(crtc_param->usV_Size);
138
	conv_param.usV_Size = cpu_to_le16(crtc_param->usV_Size);
139
	conv_param.usV_Blanking_Time =
139
	conv_param.usV_Blanking_Time =
140
	    cpu_to_le16(crtc_param->usV_Blanking_Time);
140
	    cpu_to_le16(crtc_param->usV_Blanking_Time);
141
	conv_param.usH_SyncOffset = cpu_to_le16(crtc_param->usH_SyncOffset);
141
	conv_param.usH_SyncOffset = cpu_to_le16(crtc_param->usH_SyncOffset);
142
	conv_param.usH_SyncWidth = cpu_to_le16(crtc_param->usH_SyncWidth);
142
	conv_param.usH_SyncWidth = cpu_to_le16(crtc_param->usH_SyncWidth);
143
	conv_param.usV_SyncOffset = cpu_to_le16(crtc_param->usV_SyncOffset);
143
	conv_param.usV_SyncOffset = cpu_to_le16(crtc_param->usV_SyncOffset);
144
	conv_param.usV_SyncWidth = cpu_to_le16(crtc_param->usV_SyncWidth);
144
	conv_param.usV_SyncWidth = cpu_to_le16(crtc_param->usV_SyncWidth);
145
	conv_param.susModeMiscInfo.usAccess =
145
	conv_param.susModeMiscInfo.usAccess =
146
	    cpu_to_le16(crtc_param->susModeMiscInfo.usAccess);
146
	    cpu_to_le16(crtc_param->susModeMiscInfo.usAccess);
147
	conv_param.ucCRTC = crtc_param->ucCRTC;
147
	conv_param.ucCRTC = crtc_param->ucCRTC;
148
 
148
 
149
	printk("executing set crtc dtd timing\n");
149
	printk("executing set crtc dtd timing\n");
150
	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&conv_param);
150
	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&conv_param);
151
}
151
}
152
 
152
 
153
void atombios_crtc_set_timing(struct drm_crtc *crtc,
153
void atombios_crtc_set_timing(struct drm_crtc *crtc,
154
			      SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION *
154
			      SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION *
155
			      crtc_param)
155
			      crtc_param)
156
{
156
{
157
	struct drm_device *dev = crtc->dev;
157
	struct drm_device *dev = crtc->dev;
158
	struct radeon_device *rdev = dev->dev_private;
158
	struct radeon_device *rdev = dev->dev_private;
159
	SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION conv_param;
159
	SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION conv_param;
160
	int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_Timing);
160
	int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_Timing);
161
 
161
 
162
	conv_param.usH_Total = cpu_to_le16(crtc_param->usH_Total);
162
	conv_param.usH_Total = cpu_to_le16(crtc_param->usH_Total);
163
	conv_param.usH_Disp = cpu_to_le16(crtc_param->usH_Disp);
163
	conv_param.usH_Disp = cpu_to_le16(crtc_param->usH_Disp);
164
	conv_param.usH_SyncStart = cpu_to_le16(crtc_param->usH_SyncStart);
164
	conv_param.usH_SyncStart = cpu_to_le16(crtc_param->usH_SyncStart);
165
	conv_param.usH_SyncWidth = cpu_to_le16(crtc_param->usH_SyncWidth);
165
	conv_param.usH_SyncWidth = cpu_to_le16(crtc_param->usH_SyncWidth);
166
	conv_param.usV_Total = cpu_to_le16(crtc_param->usV_Total);
166
	conv_param.usV_Total = cpu_to_le16(crtc_param->usV_Total);
167
	conv_param.usV_Disp = cpu_to_le16(crtc_param->usV_Disp);
167
	conv_param.usV_Disp = cpu_to_le16(crtc_param->usV_Disp);
168
	conv_param.usV_SyncStart = cpu_to_le16(crtc_param->usV_SyncStart);
168
	conv_param.usV_SyncStart = cpu_to_le16(crtc_param->usV_SyncStart);
169
	conv_param.usV_SyncWidth = cpu_to_le16(crtc_param->usV_SyncWidth);
169
	conv_param.usV_SyncWidth = cpu_to_le16(crtc_param->usV_SyncWidth);
170
	conv_param.susModeMiscInfo.usAccess =
170
	conv_param.susModeMiscInfo.usAccess =
171
	    cpu_to_le16(crtc_param->susModeMiscInfo.usAccess);
171
	    cpu_to_le16(crtc_param->susModeMiscInfo.usAccess);
172
	conv_param.ucCRTC = crtc_param->ucCRTC;
172
	conv_param.ucCRTC = crtc_param->ucCRTC;
173
	conv_param.ucOverscanRight = crtc_param->ucOverscanRight;
173
	conv_param.ucOverscanRight = crtc_param->ucOverscanRight;
174
	conv_param.ucOverscanLeft = crtc_param->ucOverscanLeft;
174
	conv_param.ucOverscanLeft = crtc_param->ucOverscanLeft;
175
	conv_param.ucOverscanBottom = crtc_param->ucOverscanBottom;
175
	conv_param.ucOverscanBottom = crtc_param->ucOverscanBottom;
176
	conv_param.ucOverscanTop = crtc_param->ucOverscanTop;
176
	conv_param.ucOverscanTop = crtc_param->ucOverscanTop;
177
	conv_param.ucReserved = crtc_param->ucReserved;
177
	conv_param.ucReserved = crtc_param->ucReserved;
178
 
178
 
179
	printk("executing set crtc timing\n");
179
	printk("executing set crtc timing\n");
180
	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&conv_param);
180
	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&conv_param);
181
}
181
}
182
 
182
 
183
void atombios_crtc_set_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
183
void atombios_crtc_set_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
184
{
184
{
185
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
185
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
186
	struct drm_device *dev = crtc->dev;
186
	struct drm_device *dev = crtc->dev;
187
	struct radeon_device *rdev = dev->dev_private;
187
	struct radeon_device *rdev = dev->dev_private;
188
	struct drm_encoder *encoder = NULL;
188
	struct drm_encoder *encoder = NULL;
189
	struct radeon_encoder *radeon_encoder = NULL;
189
	struct radeon_encoder *radeon_encoder = NULL;
190
	uint8_t frev, crev;
190
	uint8_t frev, crev;
191
	int index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
191
	int index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
192
	SET_PIXEL_CLOCK_PS_ALLOCATION args;
192
	SET_PIXEL_CLOCK_PS_ALLOCATION args;
193
	PIXEL_CLOCK_PARAMETERS *spc1_ptr;
193
	PIXEL_CLOCK_PARAMETERS *spc1_ptr;
194
	PIXEL_CLOCK_PARAMETERS_V2 *spc2_ptr;
194
	PIXEL_CLOCK_PARAMETERS_V2 *spc2_ptr;
195
	PIXEL_CLOCK_PARAMETERS_V3 *spc3_ptr;
195
	PIXEL_CLOCK_PARAMETERS_V3 *spc3_ptr;
196
	uint32_t sclock = mode->clock;
196
	uint32_t sclock = mode->clock;
197
	uint32_t ref_div = 0, fb_div = 0, frac_fb_div = 0, post_div = 0;
197
	uint32_t ref_div = 0, fb_div = 0, frac_fb_div = 0, post_div = 0;
198
	struct radeon_pll *pll;
198
	struct radeon_pll *pll;
199
	int pll_flags = 0;
199
	int pll_flags = 0;
200
 
200
 
201
	memset(&args, 0, sizeof(args));
201
	memset(&args, 0, sizeof(args));
202
 
202
 
203
	if (ASIC_IS_AVIVO(rdev)) {
203
	if (ASIC_IS_AVIVO(rdev)) {
204
		uint32_t ss_cntl;
204
		uint32_t ss_cntl;
205
 
205
 
206
		if (ASIC_IS_DCE32(rdev) && mode->clock > 200000)	/* range limits??? */
206
		if (ASIC_IS_DCE32(rdev) && mode->clock > 200000)	/* range limits??? */
207
			pll_flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
207
			pll_flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
208
		else
208
		else
209
			pll_flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
209
			pll_flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
210
 
210
 
211
		/* disable spread spectrum clocking for now -- thanks Hedy Lamarr */
211
		/* disable spread spectrum clocking for now -- thanks Hedy Lamarr */
212
		if (radeon_crtc->crtc_id == 0) {
212
		if (radeon_crtc->crtc_id == 0) {
213
			ss_cntl = RREG32(AVIVO_P1PLL_INT_SS_CNTL);
213
			ss_cntl = RREG32(AVIVO_P1PLL_INT_SS_CNTL);
214
			WREG32(AVIVO_P1PLL_INT_SS_CNTL, ss_cntl & ~1);
214
			WREG32(AVIVO_P1PLL_INT_SS_CNTL, ss_cntl & ~1);
215
		} else {
215
		} else {
216
			ss_cntl = RREG32(AVIVO_P2PLL_INT_SS_CNTL);
216
			ss_cntl = RREG32(AVIVO_P2PLL_INT_SS_CNTL);
217
			WREG32(AVIVO_P2PLL_INT_SS_CNTL, ss_cntl & ~1);
217
			WREG32(AVIVO_P2PLL_INT_SS_CNTL, ss_cntl & ~1);
218
		}
218
		}
219
	} else {
219
	} else {
220
		pll_flags |= RADEON_PLL_LEGACY;
220
		pll_flags |= RADEON_PLL_LEGACY;
221
 
221
 
222
		if (mode->clock > 200000)	/* range limits??? */
222
		if (mode->clock > 200000)	/* range limits??? */
223
			pll_flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
223
			pll_flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
224
		else
224
		else
225
			pll_flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
225
			pll_flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
226
 
226
 
227
	}
227
	}
228
 
228
 
229
	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
229
	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
230
		if (encoder->crtc == crtc) {
230
		if (encoder->crtc == crtc) {
231
			if (!ASIC_IS_AVIVO(rdev)) {
231
			if (!ASIC_IS_AVIVO(rdev)) {
232
				if (encoder->encoder_type !=
232
				if (encoder->encoder_type !=
233
				    DRM_MODE_ENCODER_DAC)
233
				    DRM_MODE_ENCODER_DAC)
234
					pll_flags |= RADEON_PLL_NO_ODD_POST_DIV;
234
					pll_flags |= RADEON_PLL_NO_ODD_POST_DIV;
235
				if (!ASIC_IS_AVIVO(rdev)
235
				if (!ASIC_IS_AVIVO(rdev)
236
				    && (encoder->encoder_type ==
236
				    && (encoder->encoder_type ==
237
					DRM_MODE_ENCODER_LVDS))
237
					DRM_MODE_ENCODER_LVDS))
238
					pll_flags |= RADEON_PLL_USE_REF_DIV;
238
					pll_flags |= RADEON_PLL_USE_REF_DIV;
239
			}
239
			}
240
			radeon_encoder = to_radeon_encoder(encoder);
240
			radeon_encoder = to_radeon_encoder(encoder);
241
		}
241
		}
242
	}
242
	}
243
 
243
 
244
	if (radeon_crtc->crtc_id == 0)
244
	if (radeon_crtc->crtc_id == 0)
245
		pll = &rdev->clock.p1pll;
245
		pll = &rdev->clock.p1pll;
246
	else
246
	else
247
		pll = &rdev->clock.p2pll;
247
		pll = &rdev->clock.p2pll;
248
 
248
 
249
	radeon_compute_pll(pll, mode->clock, &sclock, &fb_div, &frac_fb_div,
249
	radeon_compute_pll(pll, mode->clock, &sclock, &fb_div, &frac_fb_div,
250
			   &ref_div, &post_div, pll_flags);
250
			   &ref_div, &post_div, pll_flags);
251
 
251
 
252
	atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
252
	atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
253
			      &crev);
253
			      &crev);
254
 
254
 
255
	switch (frev) {
255
	switch (frev) {
256
	case 1:
256
	case 1:
257
		switch (crev) {
257
		switch (crev) {
258
		case 1:
258
		case 1:
259
			spc1_ptr = (PIXEL_CLOCK_PARAMETERS *) & args.sPCLKInput;
259
			spc1_ptr = (PIXEL_CLOCK_PARAMETERS *) & args.sPCLKInput;
260
			spc1_ptr->usPixelClock = cpu_to_le16(sclock);
260
			spc1_ptr->usPixelClock = cpu_to_le16(sclock);
261
			spc1_ptr->usRefDiv = cpu_to_le16(ref_div);
261
			spc1_ptr->usRefDiv = cpu_to_le16(ref_div);
262
			spc1_ptr->usFbDiv = cpu_to_le16(fb_div);
262
			spc1_ptr->usFbDiv = cpu_to_le16(fb_div);
263
			spc1_ptr->ucFracFbDiv = frac_fb_div;
263
			spc1_ptr->ucFracFbDiv = frac_fb_div;
264
			spc1_ptr->ucPostDiv = post_div;
264
			spc1_ptr->ucPostDiv = post_div;
265
			spc1_ptr->ucPpll =
265
			spc1_ptr->ucPpll =
266
			    radeon_crtc->crtc_id ? ATOM_PPLL2 : ATOM_PPLL1;
266
			    radeon_crtc->crtc_id ? ATOM_PPLL2 : ATOM_PPLL1;
267
			spc1_ptr->ucCRTC = radeon_crtc->crtc_id;
267
			spc1_ptr->ucCRTC = radeon_crtc->crtc_id;
268
			spc1_ptr->ucRefDivSrc = 1;
268
			spc1_ptr->ucRefDivSrc = 1;
269
			break;
269
			break;
270
		case 2:
270
		case 2:
271
			spc2_ptr =
271
			spc2_ptr =
272
			    (PIXEL_CLOCK_PARAMETERS_V2 *) & args.sPCLKInput;
272
			    (PIXEL_CLOCK_PARAMETERS_V2 *) & args.sPCLKInput;
273
			spc2_ptr->usPixelClock = cpu_to_le16(sclock);
273
			spc2_ptr->usPixelClock = cpu_to_le16(sclock);
274
			spc2_ptr->usRefDiv = cpu_to_le16(ref_div);
274
			spc2_ptr->usRefDiv = cpu_to_le16(ref_div);
275
			spc2_ptr->usFbDiv = cpu_to_le16(fb_div);
275
			spc2_ptr->usFbDiv = cpu_to_le16(fb_div);
276
			spc2_ptr->ucFracFbDiv = frac_fb_div;
276
			spc2_ptr->ucFracFbDiv = frac_fb_div;
277
			spc2_ptr->ucPostDiv = post_div;
277
			spc2_ptr->ucPostDiv = post_div;
278
			spc2_ptr->ucPpll =
278
			spc2_ptr->ucPpll =
279
			    radeon_crtc->crtc_id ? ATOM_PPLL2 : ATOM_PPLL1;
279
			    radeon_crtc->crtc_id ? ATOM_PPLL2 : ATOM_PPLL1;
280
			spc2_ptr->ucCRTC = radeon_crtc->crtc_id;
280
			spc2_ptr->ucCRTC = radeon_crtc->crtc_id;
281
			spc2_ptr->ucRefDivSrc = 1;
281
			spc2_ptr->ucRefDivSrc = 1;
282
			break;
282
			break;
283
		case 3:
283
		case 3:
284
			if (!encoder)
284
			if (!encoder)
285
				return;
285
				return;
286
			spc3_ptr =
286
			spc3_ptr =
287
			    (PIXEL_CLOCK_PARAMETERS_V3 *) & args.sPCLKInput;
287
			    (PIXEL_CLOCK_PARAMETERS_V3 *) & args.sPCLKInput;
288
			spc3_ptr->usPixelClock = cpu_to_le16(sclock);
288
			spc3_ptr->usPixelClock = cpu_to_le16(sclock);
289
			spc3_ptr->usRefDiv = cpu_to_le16(ref_div);
289
			spc3_ptr->usRefDiv = cpu_to_le16(ref_div);
290
			spc3_ptr->usFbDiv = cpu_to_le16(fb_div);
290
			spc3_ptr->usFbDiv = cpu_to_le16(fb_div);
291
			spc3_ptr->ucFracFbDiv = frac_fb_div;
291
			spc3_ptr->ucFracFbDiv = frac_fb_div;
292
			spc3_ptr->ucPostDiv = post_div;
292
			spc3_ptr->ucPostDiv = post_div;
293
			spc3_ptr->ucPpll =
293
			spc3_ptr->ucPpll =
294
			    radeon_crtc->crtc_id ? ATOM_PPLL2 : ATOM_PPLL1;
294
			    radeon_crtc->crtc_id ? ATOM_PPLL2 : ATOM_PPLL1;
295
			spc3_ptr->ucMiscInfo = (radeon_crtc->crtc_id << 2);
295
			spc3_ptr->ucMiscInfo = (radeon_crtc->crtc_id << 2);
296
			spc3_ptr->ucTransmitterId = radeon_encoder->encoder_id;
296
			spc3_ptr->ucTransmitterId = radeon_encoder->encoder_id;
297
			spc3_ptr->ucEncoderMode =
297
			spc3_ptr->ucEncoderMode =
298
			    atombios_get_encoder_mode(encoder);
298
			    atombios_get_encoder_mode(encoder);
299
			break;
299
			break;
300
		default:
300
		default:
301
			DRM_ERROR("Unknown table version %d %d\n", frev, crev);
301
			DRM_ERROR("Unknown table version %d %d\n", frev, crev);
302
			return;
302
			return;
303
		}
303
		}
304
		break;
304
		break;
305
	default:
305
	default:
306
		DRM_ERROR("Unknown table version %d %d\n", frev, crev);
306
		DRM_ERROR("Unknown table version %d %d\n", frev, crev);
307
		return;
307
		return;
308
	}
308
	}
309
 
309
 
310
	printk("executing set pll\n");
310
	printk("executing set pll\n");
311
	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
311
	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
312
}
312
}
313
 
313
 
314
int atombios_crtc_set_base(struct drm_crtc *crtc, int x, int y,
314
int atombios_crtc_set_base(struct drm_crtc *crtc, int x, int y,
315
			   struct drm_framebuffer *old_fb)
315
			   struct drm_framebuffer *old_fb)
316
{
316
{
317
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
317
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
318
	struct drm_device *dev = crtc->dev;
318
	struct drm_device *dev = crtc->dev;
319
	struct radeon_device *rdev = dev->dev_private;
319
	struct radeon_device *rdev = dev->dev_private;
320
	struct radeon_framebuffer *radeon_fb;
320
	struct radeon_framebuffer *radeon_fb;
321
	struct drm_gem_object *obj;
321
	struct drm_gem_object *obj;
322
	struct drm_radeon_gem_object *obj_priv;
322
	struct drm_radeon_gem_object *obj_priv;
323
	uint64_t fb_location;
323
	uint64_t fb_location;
324
	uint32_t fb_format, fb_pitch_pixels;
324
	uint32_t fb_format, fb_pitch_pixels;
325
 
325
 
326
    ENTRY();
326
    ENTRY();
327
 
327
 
328
	if (!crtc->fb)
328
	if (!crtc->fb)
329
		return -EINVAL;
329
		return -EINVAL;
330
 
330
 
331
	radeon_fb = to_radeon_framebuffer(crtc->fb);
331
	radeon_fb = to_radeon_framebuffer(crtc->fb);
332
 
332
 
333
	obj = radeon_fb->obj;
333
	obj = radeon_fb->obj;
334
	obj_priv = obj->driver_private;
334
	obj_priv = obj->driver_private;
335
 
335
 
336
    //if (radeon_gem_object_pin(obj, RADEON_GEM_DOMAIN_VRAM, &fb_location)) {
336
    //if (radeon_gem_object_pin(obj, RADEON_GEM_DOMAIN_VRAM, &fb_location)) {
337
    //   return -EINVAL;
337
    //   return -EINVAL;
338
    //}
338
    //}
339
 
339
 
340
    fb_location = 0; //rdev->mc.vram_location;
340
    fb_location = 0; //rdev->mc.vram_location;
341
 
341
 
342
    dbgprintf("fb_location %x\n", fb_location);
342
    dbgprintf("fb_location %x\n", fb_location);
343
    dbgprintf("bpp %x\n", crtc->fb->bits_per_pixel);
343
    dbgprintf("bpp %x\n", crtc->fb->bits_per_pixel);
344
 
344
 
345
	switch (crtc->fb->bits_per_pixel) {
345
	switch (crtc->fb->bits_per_pixel) {
346
	case 15:
346
	case 15:
347
		fb_format =
347
		fb_format =
348
		    AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
348
		    AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
349
		    AVIVO_D1GRPH_CONTROL_16BPP_ARGB1555;
349
		    AVIVO_D1GRPH_CONTROL_16BPP_ARGB1555;
350
		break;
350
		break;
351
	case 16:
351
	case 16:
352
		fb_format =
352
		fb_format =
353
		    AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
353
		    AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
354
		    AVIVO_D1GRPH_CONTROL_16BPP_RGB565;
354
		    AVIVO_D1GRPH_CONTROL_16BPP_RGB565;
355
		break;
355
		break;
356
	case 24:
356
	case 24:
357
	case 32:
357
	case 32:
358
		fb_format =
358
		fb_format =
359
		    AVIVO_D1GRPH_CONTROL_DEPTH_32BPP |
359
		    AVIVO_D1GRPH_CONTROL_DEPTH_32BPP |
360
		    AVIVO_D1GRPH_CONTROL_32BPP_ARGB8888;
360
		    AVIVO_D1GRPH_CONTROL_32BPP_ARGB8888;
361
		break;
361
		break;
362
	default:
362
	default:
363
		DRM_ERROR("Unsupported screen depth %d\n",
363
		DRM_ERROR("Unsupported screen depth %d\n",
364
			  crtc->fb->bits_per_pixel);
364
			  crtc->fb->bits_per_pixel);
365
		return -EINVAL;
365
		return -EINVAL;
366
	}
366
	}
367
 
367
 
368
	/* TODO tiling */
368
	/* TODO tiling */
369
	if (radeon_crtc->crtc_id == 0)
369
	if (radeon_crtc->crtc_id == 0)
370
		WREG32(AVIVO_D1VGA_CONTROL, 0);
370
		WREG32(AVIVO_D1VGA_CONTROL, 0);
371
	else
371
	else
372
		WREG32(AVIVO_D2VGA_CONTROL, 0);
372
		WREG32(AVIVO_D2VGA_CONTROL, 0);
373
	WREG32(AVIVO_D1GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
373
	WREG32(AVIVO_D1GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
374
	       (u32) fb_location);
374
	       (u32) fb_location);
375
	WREG32(AVIVO_D1GRPH_SECONDARY_SURFACE_ADDRESS +
375
	WREG32(AVIVO_D1GRPH_SECONDARY_SURFACE_ADDRESS +
376
	       radeon_crtc->crtc_offset, (u32) fb_location);
376
	       radeon_crtc->crtc_offset, (u32) fb_location);
377
	WREG32(AVIVO_D1GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);
377
	WREG32(AVIVO_D1GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);
378
 
378
 
379
	WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
379
	WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
380
	WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
380
	WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
381
	WREG32(AVIVO_D1GRPH_X_START + radeon_crtc->crtc_offset, 0);
381
	WREG32(AVIVO_D1GRPH_X_START + radeon_crtc->crtc_offset, 0);
382
	WREG32(AVIVO_D1GRPH_Y_START + radeon_crtc->crtc_offset, 0);
382
	WREG32(AVIVO_D1GRPH_Y_START + radeon_crtc->crtc_offset, 0);
383
	WREG32(AVIVO_D1GRPH_X_END + radeon_crtc->crtc_offset, crtc->fb->width);
383
	WREG32(AVIVO_D1GRPH_X_END + radeon_crtc->crtc_offset, crtc->fb->width);
384
	WREG32(AVIVO_D1GRPH_Y_END + radeon_crtc->crtc_offset, crtc->fb->height);
384
	WREG32(AVIVO_D1GRPH_Y_END + radeon_crtc->crtc_offset, crtc->fb->height);
385
 
385
 
386
	fb_pitch_pixels = crtc->fb->pitch / (crtc->fb->bits_per_pixel / 8);
386
	fb_pitch_pixels = crtc->fb->pitch / (crtc->fb->bits_per_pixel / 8);
387
	WREG32(AVIVO_D1GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
387
	WREG32(AVIVO_D1GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
388
	WREG32(AVIVO_D1GRPH_ENABLE + radeon_crtc->crtc_offset, 1);
388
	WREG32(AVIVO_D1GRPH_ENABLE + radeon_crtc->crtc_offset, 1);
389
 
389
 
390
	WREG32(AVIVO_D1MODE_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
390
	WREG32(AVIVO_D1MODE_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
391
	       crtc->mode.vdisplay);
391
	       crtc->mode.vdisplay);
392
	x &= ~3;
392
	x &= ~3;
393
	y &= ~1;
393
	y &= ~1;
394
	WREG32(AVIVO_D1MODE_VIEWPORT_START + radeon_crtc->crtc_offset,
394
	WREG32(AVIVO_D1MODE_VIEWPORT_START + radeon_crtc->crtc_offset,
395
	       (x << 16) | y);
395
	       (x << 16) | y);
396
	WREG32(AVIVO_D1MODE_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
396
	WREG32(AVIVO_D1MODE_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
397
	       (crtc->mode.hdisplay << 16) | crtc->mode.vdisplay);
397
	       (crtc->mode.hdisplay << 16) | crtc->mode.vdisplay);
398
 
398
 
399
	if (crtc->mode.flags & DRM_MODE_FLAG_INTERLACE)
399
	if (crtc->mode.flags & DRM_MODE_FLAG_INTERLACE)
400
		WREG32(AVIVO_D1MODE_DATA_FORMAT + radeon_crtc->crtc_offset,
400
		WREG32(AVIVO_D1MODE_DATA_FORMAT + radeon_crtc->crtc_offset,
401
		       AVIVO_D1MODE_INTERLEAVE_EN);
401
		       AVIVO_D1MODE_INTERLEAVE_EN);
402
	else
402
	else
403
		WREG32(AVIVO_D1MODE_DATA_FORMAT + radeon_crtc->crtc_offset, 0);
403
		WREG32(AVIVO_D1MODE_DATA_FORMAT + radeon_crtc->crtc_offset, 0);
404
 
404
 
405
	if (old_fb && old_fb != crtc->fb) {
405
	if (old_fb && old_fb != crtc->fb) {
406
		radeon_fb = to_radeon_framebuffer(old_fb);
406
		radeon_fb = to_radeon_framebuffer(old_fb);
407
//       radeon_gem_object_unpin(radeon_fb->obj);
407
//       radeon_gem_object_unpin(radeon_fb->obj);
408
	}
408
	}
409
    LEAVE();
409
    LEAVE();
410
	return 0;
410
	return 0;
411
}
411
}
412
 
412
 
413
int atombios_crtc_mode_set(struct drm_crtc *crtc,
413
int atombios_crtc_mode_set(struct drm_crtc *crtc,
414
			   struct drm_display_mode *mode,
414
			   struct drm_display_mode *mode,
415
			   struct drm_display_mode *adjusted_mode,
415
			   struct drm_display_mode *adjusted_mode,
416
			   int x, int y, struct drm_framebuffer *old_fb)
416
			   int x, int y, struct drm_framebuffer *old_fb)
417
{
417
{
418
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
418
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
419
	struct drm_device *dev = crtc->dev;
419
	struct drm_device *dev = crtc->dev;
420
	struct radeon_device *rdev = dev->dev_private;
420
	struct radeon_device *rdev = dev->dev_private;
421
	struct drm_encoder *encoder;
421
	struct drm_encoder *encoder;
422
	SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION crtc_timing;
422
	SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION crtc_timing;
423
 
423
 
424
    ENTRY();
424
    ENTRY();
425
 
425
 
426
	/* TODO color tiling */
426
	/* TODO color tiling */
427
	memset(&crtc_timing, 0, sizeof(crtc_timing));
427
	memset(&crtc_timing, 0, sizeof(crtc_timing));
428
 
428
 
429
	/* TODO tv */
429
	/* TODO tv */
430
	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
430
	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
431
 
431
 
432
	}
432
	}
433
 
433
 
434
	crtc_timing.ucCRTC = radeon_crtc->crtc_id;
434
	crtc_timing.ucCRTC = radeon_crtc->crtc_id;
435
	crtc_timing.usH_Total = adjusted_mode->crtc_htotal;
435
	crtc_timing.usH_Total = adjusted_mode->crtc_htotal;
436
	crtc_timing.usH_Disp = adjusted_mode->crtc_hdisplay;
436
	crtc_timing.usH_Disp = adjusted_mode->crtc_hdisplay;
437
	crtc_timing.usH_SyncStart = adjusted_mode->crtc_hsync_start;
437
	crtc_timing.usH_SyncStart = adjusted_mode->crtc_hsync_start;
438
	crtc_timing.usH_SyncWidth =
438
	crtc_timing.usH_SyncWidth =
439
	    adjusted_mode->crtc_hsync_end - adjusted_mode->crtc_hsync_start;
439
	    adjusted_mode->crtc_hsync_end - adjusted_mode->crtc_hsync_start;
440
 
440
 
441
	crtc_timing.usV_Total = adjusted_mode->crtc_vtotal;
441
	crtc_timing.usV_Total = adjusted_mode->crtc_vtotal;
442
	crtc_timing.usV_Disp = adjusted_mode->crtc_vdisplay;
442
	crtc_timing.usV_Disp = adjusted_mode->crtc_vdisplay;
443
	crtc_timing.usV_SyncStart = adjusted_mode->crtc_vsync_start;
443
	crtc_timing.usV_SyncStart = adjusted_mode->crtc_vsync_start;
444
	crtc_timing.usV_SyncWidth =
444
	crtc_timing.usV_SyncWidth =
445
	    adjusted_mode->crtc_vsync_end - adjusted_mode->crtc_vsync_start;
445
	    adjusted_mode->crtc_vsync_end - adjusted_mode->crtc_vsync_start;
446
 
446
 
447
	if (adjusted_mode->flags & DRM_MODE_FLAG_NVSYNC)
447
	if (adjusted_mode->flags & DRM_MODE_FLAG_NVSYNC)
448
		crtc_timing.susModeMiscInfo.usAccess |= ATOM_VSYNC_POLARITY;
448
		crtc_timing.susModeMiscInfo.usAccess |= ATOM_VSYNC_POLARITY;
449
 
449
 
450
	if (adjusted_mode->flags & DRM_MODE_FLAG_NHSYNC)
450
	if (adjusted_mode->flags & DRM_MODE_FLAG_NHSYNC)
451
		crtc_timing.susModeMiscInfo.usAccess |= ATOM_HSYNC_POLARITY;
451
		crtc_timing.susModeMiscInfo.usAccess |= ATOM_HSYNC_POLARITY;
452
 
452
 
453
	if (adjusted_mode->flags & DRM_MODE_FLAG_CSYNC)
453
	if (adjusted_mode->flags & DRM_MODE_FLAG_CSYNC)
454
		crtc_timing.susModeMiscInfo.usAccess |= ATOM_COMPOSITESYNC;
454
		crtc_timing.susModeMiscInfo.usAccess |= ATOM_COMPOSITESYNC;
455
 
455
 
456
	if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE)
456
	if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE)
457
		crtc_timing.susModeMiscInfo.usAccess |= ATOM_INTERLACE;
457
		crtc_timing.susModeMiscInfo.usAccess |= ATOM_INTERLACE;
458
 
458
 
459
	if (adjusted_mode->flags & DRM_MODE_FLAG_DBLSCAN)
459
	if (adjusted_mode->flags & DRM_MODE_FLAG_DBLSCAN)
460
		crtc_timing.susModeMiscInfo.usAccess |= ATOM_DOUBLE_CLOCK_MODE;
460
		crtc_timing.susModeMiscInfo.usAccess |= ATOM_DOUBLE_CLOCK_MODE;
461
 
461
 
462
	atombios_crtc_set_pll(crtc, adjusted_mode);
462
	atombios_crtc_set_pll(crtc, adjusted_mode);
463
	atombios_crtc_set_timing(crtc, &crtc_timing);
463
	atombios_crtc_set_timing(crtc, &crtc_timing);
464
 
464
 
465
	if (ASIC_IS_AVIVO(rdev))
465
	if (ASIC_IS_AVIVO(rdev))
466
		atombios_crtc_set_base(crtc, x, y, old_fb);
466
		atombios_crtc_set_base(crtc, x, y, old_fb);
467
	else {
467
	else {
468
		if (radeon_crtc->crtc_id == 0) {
468
		if (radeon_crtc->crtc_id == 0) {
469
			SET_CRTC_USING_DTD_TIMING_PARAMETERS crtc_dtd_timing;
469
			SET_CRTC_USING_DTD_TIMING_PARAMETERS crtc_dtd_timing;
470
			memset(&crtc_dtd_timing, 0, sizeof(crtc_dtd_timing));
470
			memset(&crtc_dtd_timing, 0, sizeof(crtc_dtd_timing));
471
 
471
 
472
			/* setup FP shadow regs on R4xx */
472
			/* setup FP shadow regs on R4xx */
473
			crtc_dtd_timing.ucCRTC = radeon_crtc->crtc_id;
473
			crtc_dtd_timing.ucCRTC = radeon_crtc->crtc_id;
474
			crtc_dtd_timing.usH_Size = adjusted_mode->crtc_hdisplay;
474
			crtc_dtd_timing.usH_Size = adjusted_mode->crtc_hdisplay;
475
			crtc_dtd_timing.usV_Size = adjusted_mode->crtc_vdisplay;
475
			crtc_dtd_timing.usV_Size = adjusted_mode->crtc_vdisplay;
476
			crtc_dtd_timing.usH_Blanking_Time =
476
			crtc_dtd_timing.usH_Blanking_Time =
477
			    adjusted_mode->crtc_hblank_end -
477
			    adjusted_mode->crtc_hblank_end -
478
			    adjusted_mode->crtc_hdisplay;
478
			    adjusted_mode->crtc_hdisplay;
479
			crtc_dtd_timing.usV_Blanking_Time =
479
			crtc_dtd_timing.usV_Blanking_Time =
480
			    adjusted_mode->crtc_vblank_end -
480
			    adjusted_mode->crtc_vblank_end -
481
			    adjusted_mode->crtc_vdisplay;
481
			    adjusted_mode->crtc_vdisplay;
482
			crtc_dtd_timing.usH_SyncOffset =
482
			crtc_dtd_timing.usH_SyncOffset =
483
			    adjusted_mode->crtc_hsync_start -
483
			    adjusted_mode->crtc_hsync_start -
484
			    adjusted_mode->crtc_hdisplay;
484
			    adjusted_mode->crtc_hdisplay;
485
			crtc_dtd_timing.usV_SyncOffset =
485
			crtc_dtd_timing.usV_SyncOffset =
486
			    adjusted_mode->crtc_vsync_start -
486
			    adjusted_mode->crtc_vsync_start -
487
			    adjusted_mode->crtc_vdisplay;
487
			    adjusted_mode->crtc_vdisplay;
488
			crtc_dtd_timing.usH_SyncWidth =
488
			crtc_dtd_timing.usH_SyncWidth =
489
			    adjusted_mode->crtc_hsync_end -
489
			    adjusted_mode->crtc_hsync_end -
490
			    adjusted_mode->crtc_hsync_start;
490
			    adjusted_mode->crtc_hsync_start;
491
			crtc_dtd_timing.usV_SyncWidth =
491
			crtc_dtd_timing.usV_SyncWidth =
492
			    adjusted_mode->crtc_vsync_end -
492
			    adjusted_mode->crtc_vsync_end -
493
			    adjusted_mode->crtc_vsync_start;
493
			    adjusted_mode->crtc_vsync_start;
494
			/* crtc_dtd_timing.ucH_Border = adjusted_mode->crtc_hborder; */
494
			/* crtc_dtd_timing.ucH_Border = adjusted_mode->crtc_hborder; */
495
			/* crtc_dtd_timing.ucV_Border = adjusted_mode->crtc_vborder; */
495
			/* crtc_dtd_timing.ucV_Border = adjusted_mode->crtc_vborder; */
496
 
496
 
497
			if (adjusted_mode->flags & DRM_MODE_FLAG_NVSYNC)
497
			if (adjusted_mode->flags & DRM_MODE_FLAG_NVSYNC)
498
				crtc_dtd_timing.susModeMiscInfo.usAccess |=
498
				crtc_dtd_timing.susModeMiscInfo.usAccess |=
499
				    ATOM_VSYNC_POLARITY;
499
				    ATOM_VSYNC_POLARITY;
500
 
500
 
501
			if (adjusted_mode->flags & DRM_MODE_FLAG_NHSYNC)
501
			if (adjusted_mode->flags & DRM_MODE_FLAG_NHSYNC)
502
				crtc_dtd_timing.susModeMiscInfo.usAccess |=
502
				crtc_dtd_timing.susModeMiscInfo.usAccess |=
503
				    ATOM_HSYNC_POLARITY;
503
				    ATOM_HSYNC_POLARITY;
504
 
504
 
505
			if (adjusted_mode->flags & DRM_MODE_FLAG_CSYNC)
505
			if (adjusted_mode->flags & DRM_MODE_FLAG_CSYNC)
506
				crtc_dtd_timing.susModeMiscInfo.usAccess |=
506
				crtc_dtd_timing.susModeMiscInfo.usAccess |=
507
				    ATOM_COMPOSITESYNC;
507
				    ATOM_COMPOSITESYNC;
508
 
508
 
509
			if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE)
509
			if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE)
510
				crtc_dtd_timing.susModeMiscInfo.usAccess |=
510
				crtc_dtd_timing.susModeMiscInfo.usAccess |=
511
				    ATOM_INTERLACE;
511
				    ATOM_INTERLACE;
512
 
512
 
513
			if (adjusted_mode->flags & DRM_MODE_FLAG_DBLSCAN)
513
			if (adjusted_mode->flags & DRM_MODE_FLAG_DBLSCAN)
514
				crtc_dtd_timing.susModeMiscInfo.usAccess |=
514
				crtc_dtd_timing.susModeMiscInfo.usAccess |=
515
				    ATOM_DOUBLE_CLOCK_MODE;
515
				    ATOM_DOUBLE_CLOCK_MODE;
516
 
516
 
517
			atombios_set_crtc_dtd_timing(crtc, &crtc_dtd_timing);
517
			atombios_set_crtc_dtd_timing(crtc, &crtc_dtd_timing);
518
		}
518
		}
519
		radeon_crtc_set_base(crtc, x, y, old_fb);
519
		radeon_crtc_set_base(crtc, x, y, old_fb);
520
		radeon_legacy_atom_set_surface(crtc);
520
		radeon_legacy_atom_set_surface(crtc);
521
	}
521
	}
522
    LEAVE();
522
    LEAVE();
523
 
523
 
524
	return 0;
524
	return 0;
525
}
525
}
526
 
526
 
527
static bool atombios_crtc_mode_fixup(struct drm_crtc *crtc,
527
static bool atombios_crtc_mode_fixup(struct drm_crtc *crtc,
528
				     struct drm_display_mode *mode,
528
				     struct drm_display_mode *mode,
529
				     struct drm_display_mode *adjusted_mode)
529
				     struct drm_display_mode *adjusted_mode)
530
{
530
{
531
	return true;
531
	return true;
532
}
532
}
533
 
533
 
534
static void atombios_crtc_prepare(struct drm_crtc *crtc)
534
static void atombios_crtc_prepare(struct drm_crtc *crtc)
535
{
535
{
536
	atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
536
	atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
537
	atombios_lock_crtc(crtc, 1);
537
	atombios_lock_crtc(crtc, 1);
538
}
538
}
539
 
539
 
540
static void atombios_crtc_commit(struct drm_crtc *crtc)
540
static void atombios_crtc_commit(struct drm_crtc *crtc)
541
{
541
{
542
	atombios_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
542
	atombios_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
543
	atombios_lock_crtc(crtc, 0);
543
	atombios_lock_crtc(crtc, 0);
544
}
544
}
545
 
545
 
546
static const struct drm_crtc_helper_funcs atombios_helper_funcs = {
546
static const struct drm_crtc_helper_funcs atombios_helper_funcs = {
547
	.dpms = atombios_crtc_dpms,
547
	.dpms = atombios_crtc_dpms,
548
	.mode_fixup = atombios_crtc_mode_fixup,
548
	.mode_fixup = atombios_crtc_mode_fixup,
549
	.mode_set = atombios_crtc_mode_set,
549
	.mode_set = atombios_crtc_mode_set,
550
	.mode_set_base = atombios_crtc_set_base,
550
	.mode_set_base = atombios_crtc_set_base,
551
	.prepare = atombios_crtc_prepare,
551
	.prepare = atombios_crtc_prepare,
552
	.commit = atombios_crtc_commit,
552
	.commit = atombios_crtc_commit,
553
};
553
};
554
 
554
 
555
void radeon_atombios_init_crtc(struct drm_device *dev,
555
void radeon_atombios_init_crtc(struct drm_device *dev,
556
			       struct radeon_crtc *radeon_crtc)
556
			       struct radeon_crtc *radeon_crtc)
557
{
557
{
558
	if (radeon_crtc->crtc_id == 1)
558
	if (radeon_crtc->crtc_id == 1)
559
		radeon_crtc->crtc_offset =
559
		radeon_crtc->crtc_offset =
560
		    AVIVO_D2CRTC_H_TOTAL - AVIVO_D1CRTC_H_TOTAL;
560
		    AVIVO_D2CRTC_H_TOTAL - AVIVO_D1CRTC_H_TOTAL;
561
	drm_crtc_helper_add(&radeon_crtc->base, &atombios_helper_funcs);
561
	drm_crtc_helper_add(&radeon_crtc->base, &atombios_helper_funcs);
562
 
-
 
563
    dbgprintf("done %s\n",__FUNCTION__);
-
 
564
 
-
 
565
}
562
}
566
 
563
 
567
void radeon_init_disp_bw_avivo(struct drm_device *dev,
564
void radeon_init_disp_bw_avivo(struct drm_device *dev,
568
			       struct drm_display_mode *mode1,
565
			       struct drm_display_mode *mode1,
569
			       uint32_t pixel_bytes1,
566
			       uint32_t pixel_bytes1,
570
			       struct drm_display_mode *mode2,
567
			       struct drm_display_mode *mode2,
571
			       uint32_t pixel_bytes2)
568
			       uint32_t pixel_bytes2)
572
{
569
{
573
	struct radeon_device *rdev = dev->dev_private;
570
	struct radeon_device *rdev = dev->dev_private;
574
	fixed20_12 min_mem_eff;
571
	fixed20_12 min_mem_eff;
575
	fixed20_12 peak_disp_bw, mem_bw, pix_clk, pix_clk2, temp_ff;
572
	fixed20_12 peak_disp_bw, mem_bw, pix_clk, pix_clk2, temp_ff;
576
	fixed20_12 sclk_ff, mclk_ff;
573
	fixed20_12 sclk_ff, mclk_ff;
577
	uint32_t dc_lb_memory_split, temp;
574
	uint32_t dc_lb_memory_split, temp;
578
 
575
 
579
	min_mem_eff.full = rfixed_const_8(0);
576
	min_mem_eff.full = rfixed_const_8(0);
580
	if (rdev->disp_priority == 2) {
577
	if (rdev->disp_priority == 2) {
581
		uint32_t mc_init_misc_lat_timer = 0;
578
		uint32_t mc_init_misc_lat_timer = 0;
582
		if (rdev->family == CHIP_RV515)
579
		if (rdev->family == CHIP_RV515)
583
			mc_init_misc_lat_timer =
580
			mc_init_misc_lat_timer =
584
			    RREG32_MC(RV515_MC_INIT_MISC_LAT_TIMER);
581
			    RREG32_MC(RV515_MC_INIT_MISC_LAT_TIMER);
585
		else if (rdev->family == CHIP_RS690)
582
		else if (rdev->family == CHIP_RS690)
586
			mc_init_misc_lat_timer =
583
			mc_init_misc_lat_timer =
587
			    RREG32_MC(RS690_MC_INIT_MISC_LAT_TIMER);
584
			    RREG32_MC(RS690_MC_INIT_MISC_LAT_TIMER);
588
 
585
 
589
		mc_init_misc_lat_timer &=
586
		mc_init_misc_lat_timer &=
590
		    ~(R300_MC_DISP1R_INIT_LAT_MASK <<
587
		    ~(R300_MC_DISP1R_INIT_LAT_MASK <<
591
		      R300_MC_DISP1R_INIT_LAT_SHIFT);
588
		      R300_MC_DISP1R_INIT_LAT_SHIFT);
592
		mc_init_misc_lat_timer &=
589
		mc_init_misc_lat_timer &=
593
		    ~(R300_MC_DISP0R_INIT_LAT_MASK <<
590
		    ~(R300_MC_DISP0R_INIT_LAT_MASK <<
594
		      R300_MC_DISP0R_INIT_LAT_SHIFT);
591
		      R300_MC_DISP0R_INIT_LAT_SHIFT);
595
 
592
 
596
		if (mode2)
593
		if (mode2)
597
			mc_init_misc_lat_timer |=
594
			mc_init_misc_lat_timer |=
598
			    (1 << R300_MC_DISP1R_INIT_LAT_SHIFT);
595
			    (1 << R300_MC_DISP1R_INIT_LAT_SHIFT);
599
		if (mode1)
596
		if (mode1)
600
			mc_init_misc_lat_timer |=
597
			mc_init_misc_lat_timer |=
601
			    (1 << R300_MC_DISP0R_INIT_LAT_SHIFT);
598
			    (1 << R300_MC_DISP0R_INIT_LAT_SHIFT);
602
 
599
 
603
		if (rdev->family == CHIP_RV515)
600
		if (rdev->family == CHIP_RV515)
604
			WREG32_MC(RV515_MC_INIT_MISC_LAT_TIMER,
601
			WREG32_MC(RV515_MC_INIT_MISC_LAT_TIMER,
605
				  mc_init_misc_lat_timer);
602
				  mc_init_misc_lat_timer);
606
		else if (rdev->family == CHIP_RS690)
603
		else if (rdev->family == CHIP_RS690)
607
			WREG32_MC(RS690_MC_INIT_MISC_LAT_TIMER,
604
			WREG32_MC(RS690_MC_INIT_MISC_LAT_TIMER,
608
				  mc_init_misc_lat_timer);
605
				  mc_init_misc_lat_timer);
609
	}
606
	}
610
 
607
 
611
	/*
608
	/*
612
	 * determine is there is enough bw for current mode
609
	 * determine is there is enough bw for current mode
613
	 */
610
	 */
614
	temp_ff.full = rfixed_const(100);
611
	temp_ff.full = rfixed_const(100);
615
	mclk_ff.full = rfixed_const(rdev->clock.default_mclk);
612
	mclk_ff.full = rfixed_const(rdev->clock.default_mclk);
616
	mclk_ff.full = rfixed_div(mclk_ff, temp_ff);
613
	mclk_ff.full = rfixed_div(mclk_ff, temp_ff);
617
	sclk_ff.full = rfixed_const(rdev->clock.default_sclk);
614
	sclk_ff.full = rfixed_const(rdev->clock.default_sclk);
618
	sclk_ff.full = rfixed_div(sclk_ff, temp_ff);
615
	sclk_ff.full = rfixed_div(sclk_ff, temp_ff);
619
 
616
 
620
	temp = (rdev->mc.vram_width / 8) * (rdev->mc.vram_is_ddr ? 2 : 1);
617
	temp = (rdev->mc.vram_width / 8) * (rdev->mc.vram_is_ddr ? 2 : 1);
621
	temp_ff.full = rfixed_const(temp);
618
	temp_ff.full = rfixed_const(temp);
622
	mem_bw.full = rfixed_mul(mclk_ff, temp_ff);
619
	mem_bw.full = rfixed_mul(mclk_ff, temp_ff);
623
	mem_bw.full = rfixed_mul(mem_bw, min_mem_eff);
620
	mem_bw.full = rfixed_mul(mem_bw, min_mem_eff);
624
 
621
 
625
	pix_clk.full = 0;
622
	pix_clk.full = 0;
626
	pix_clk2.full = 0;
623
	pix_clk2.full = 0;
627
	peak_disp_bw.full = 0;
624
	peak_disp_bw.full = 0;
628
	if (mode1) {
625
	if (mode1) {
629
		temp_ff.full = rfixed_const(1000);
626
		temp_ff.full = rfixed_const(1000);
630
		pix_clk.full = rfixed_const(mode1->clock);	/* convert to fixed point */
627
		pix_clk.full = rfixed_const(mode1->clock);	/* convert to fixed point */
631
		pix_clk.full = rfixed_div(pix_clk, temp_ff);
628
		pix_clk.full = rfixed_div(pix_clk, temp_ff);
632
		temp_ff.full = rfixed_const(pixel_bytes1);
629
		temp_ff.full = rfixed_const(pixel_bytes1);
633
		peak_disp_bw.full += rfixed_mul(pix_clk, temp_ff);
630
		peak_disp_bw.full += rfixed_mul(pix_clk, temp_ff);
634
	}
631
	}
635
	if (mode2) {
632
	if (mode2) {
636
		temp_ff.full = rfixed_const(1000);
633
		temp_ff.full = rfixed_const(1000);
637
		pix_clk2.full = rfixed_const(mode2->clock);	/* convert to fixed point */
634
		pix_clk2.full = rfixed_const(mode2->clock);	/* convert to fixed point */
638
		pix_clk2.full = rfixed_div(pix_clk2, temp_ff);
635
		pix_clk2.full = rfixed_div(pix_clk2, temp_ff);
639
		temp_ff.full = rfixed_const(pixel_bytes2);
636
		temp_ff.full = rfixed_const(pixel_bytes2);
640
		peak_disp_bw.full += rfixed_mul(pix_clk2, temp_ff);
637
		peak_disp_bw.full += rfixed_mul(pix_clk2, temp_ff);
641
	}
638
	}
642
 
639
 
643
	if (peak_disp_bw.full >= mem_bw.full) {
640
	if (peak_disp_bw.full >= mem_bw.full) {
644
		DRM_ERROR
641
		DRM_ERROR
645
		    ("You may not have enough display bandwidth for current mode\n"
642
		    ("You may not have enough display bandwidth for current mode\n"
646
		     "If you have flickering problem, try to lower resolution, refresh rate, or color depth\n");
643
		     "If you have flickering problem, try to lower resolution, refresh rate, or color depth\n");
647
		printk("peak disp bw %d, mem_bw %d\n",
644
		printk("peak disp bw %d, mem_bw %d\n",
648
		       rfixed_trunc(peak_disp_bw), rfixed_trunc(mem_bw));
645
		       rfixed_trunc(peak_disp_bw), rfixed_trunc(mem_bw));
649
	}
646
	}
650
 
647
 
651
	/*
648
	/*
652
	 * Line Buffer Setup
649
	 * Line Buffer Setup
653
	 * There is a single line buffer shared by both display controllers.
650
	 * There is a single line buffer shared by both display controllers.
654
	 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between the display
651
	 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between the display
655
	 * controllers.  The paritioning can either be done manually or via one of four
652
	 * controllers.  The paritioning can either be done manually or via one of four
656
	 * preset allocations specified in bits 1:0:
653
	 * preset allocations specified in bits 1:0:
657
	 * 0 - line buffer is divided in half and shared between each display controller
654
	 * 0 - line buffer is divided in half and shared between each display controller
658
	 * 1 - D1 gets 3/4 of the line buffer, D2 gets 1/4
655
	 * 1 - D1 gets 3/4 of the line buffer, D2 gets 1/4
659
	 * 2 - D1 gets the whole buffer
656
	 * 2 - D1 gets the whole buffer
660
	 * 3 - D1 gets 1/4 of the line buffer, D2 gets 3/4
657
	 * 3 - D1 gets 1/4 of the line buffer, D2 gets 3/4
661
	 * Setting bit 2 of DC_LB_MEMORY_SPLIT controls switches to manual allocation mode.
658
	 * Setting bit 2 of DC_LB_MEMORY_SPLIT controls switches to manual allocation mode.
662
	 * In manual allocation mode, D1 always starts at 0, D1 end/2 is specified in bits
659
	 * In manual allocation mode, D1 always starts at 0, D1 end/2 is specified in bits
663
	 * 14:4; D2 allocation follows D1.
660
	 * 14:4; D2 allocation follows D1.
664
	 */
661
	 */
665
 
662
 
666
	/* is auto or manual better ? */
663
	/* is auto or manual better ? */
667
	dc_lb_memory_split =
664
	dc_lb_memory_split =
668
	    RREG32(AVIVO_DC_LB_MEMORY_SPLIT) & ~AVIVO_DC_LB_MEMORY_SPLIT_MASK;
665
	    RREG32(AVIVO_DC_LB_MEMORY_SPLIT) & ~AVIVO_DC_LB_MEMORY_SPLIT_MASK;
669
	dc_lb_memory_split &= ~AVIVO_DC_LB_MEMORY_SPLIT_SHIFT_MODE;
666
	dc_lb_memory_split &= ~AVIVO_DC_LB_MEMORY_SPLIT_SHIFT_MODE;
670
#if 1
667
#if 1
671
	/* auto */
668
	/* auto */
672
	if (mode1 && mode2) {
669
	if (mode1 && mode2) {
673
		if (mode1->hdisplay > mode2->hdisplay) {
670
		if (mode1->hdisplay > mode2->hdisplay) {
674
			if (mode1->hdisplay > 2560)
671
			if (mode1->hdisplay > 2560)
675
				dc_lb_memory_split |=
672
				dc_lb_memory_split |=
676
				    AVIVO_DC_LB_MEMORY_SPLIT_D1_3Q_D2_1Q;
673
				    AVIVO_DC_LB_MEMORY_SPLIT_D1_3Q_D2_1Q;
677
			else
674
			else
678
				dc_lb_memory_split |=
675
				dc_lb_memory_split |=
679
				    AVIVO_DC_LB_MEMORY_SPLIT_D1HALF_D2HALF;
676
				    AVIVO_DC_LB_MEMORY_SPLIT_D1HALF_D2HALF;
680
		} else if (mode2->hdisplay > mode1->hdisplay) {
677
		} else if (mode2->hdisplay > mode1->hdisplay) {
681
			if (mode2->hdisplay > 2560)
678
			if (mode2->hdisplay > 2560)
682
				dc_lb_memory_split |=
679
				dc_lb_memory_split |=
683
				    AVIVO_DC_LB_MEMORY_SPLIT_D1_1Q_D2_3Q;
680
				    AVIVO_DC_LB_MEMORY_SPLIT_D1_1Q_D2_3Q;
684
			else
681
			else
685
				dc_lb_memory_split |=
682
				dc_lb_memory_split |=
686
				    AVIVO_DC_LB_MEMORY_SPLIT_D1HALF_D2HALF;
683
				    AVIVO_DC_LB_MEMORY_SPLIT_D1HALF_D2HALF;
687
		} else
684
		} else
688
			dc_lb_memory_split |=
685
			dc_lb_memory_split |=
689
			    AVIVO_DC_LB_MEMORY_SPLIT_D1HALF_D2HALF;
686
			    AVIVO_DC_LB_MEMORY_SPLIT_D1HALF_D2HALF;
690
	} else if (mode1) {
687
	} else if (mode1) {
691
		dc_lb_memory_split |= AVIVO_DC_LB_MEMORY_SPLIT_D1_ONLY;
688
		dc_lb_memory_split |= AVIVO_DC_LB_MEMORY_SPLIT_D1_ONLY;
692
	} else if (mode2) {
689
	} else if (mode2) {
693
		dc_lb_memory_split |= AVIVO_DC_LB_MEMORY_SPLIT_D1_1Q_D2_3Q;
690
		dc_lb_memory_split |= AVIVO_DC_LB_MEMORY_SPLIT_D1_1Q_D2_3Q;
694
	}
691
	}
695
#else
692
#else
696
	/* manual */
693
	/* manual */
697
	dc_lb_memory_split |= AVIVO_DC_LB_MEMORY_SPLIT_SHIFT_MODE;
694
	dc_lb_memory_split |= AVIVO_DC_LB_MEMORY_SPLIT_SHIFT_MODE;
698
	dc_lb_memory_split &=
695
	dc_lb_memory_split &=
699
	    ~(AVIVO_DC_LB_DISP1_END_ADR_MASK <<
696
	    ~(AVIVO_DC_LB_DISP1_END_ADR_MASK <<
700
	      AVIVO_DC_LB_DISP1_END_ADR_SHIFT);
697
	      AVIVO_DC_LB_DISP1_END_ADR_SHIFT);
701
	if (mode1) {
698
	if (mode1) {
702
		dc_lb_memory_split |=
699
		dc_lb_memory_split |=
703
		    ((((mode1->hdisplay / 2) + 64) & AVIVO_DC_LB_DISP1_END_ADR_MASK)
700
		    ((((mode1->hdisplay / 2) + 64) & AVIVO_DC_LB_DISP1_END_ADR_MASK)
704
		     << AVIVO_DC_LB_DISP1_END_ADR_SHIFT);
701
		     << AVIVO_DC_LB_DISP1_END_ADR_SHIFT);
705
	} else if (mode2) {
702
	} else if (mode2) {
706
		dc_lb_memory_split |= (0 << AVIVO_DC_LB_DISP1_END_ADR_SHIFT);
703
		dc_lb_memory_split |= (0 << AVIVO_DC_LB_DISP1_END_ADR_SHIFT);
707
	}
704
	}
708
#endif
705
#endif
709
	WREG32(AVIVO_DC_LB_MEMORY_SPLIT, dc_lb_memory_split);
706
	WREG32(AVIVO_DC_LB_MEMORY_SPLIT, dc_lb_memory_split);
710
}
707
}
711
>
708
>
712
>
709
>
713
>
710
>
714
>
711
>
715
>
712
>
716
>
713
>