Rev 1430 | Rev 1963 | Go to most recent revision | Only display areas with differences | Regard whitespace | Details | Blame | Last modification | View Log | RSS feed
Rev 1430 | Rev 1630 | ||
---|---|---|---|
1 | /* |
1 | /* |
2 | * Copyright 2007-8 Advanced Micro Devices, Inc. |
2 | * Copyright 2007-8 Advanced Micro Devices, Inc. |
3 | * Copyright 2008 Red Hat Inc. |
3 | * Copyright 2008 Red Hat Inc. |
4 | * |
4 | * |
5 | * Permission is hereby granted, free of charge, to any person obtaining a |
5 | * Permission is hereby granted, free of charge, to any person obtaining a |
6 | * copy of this software and associated documentation files (the "Software"), |
6 | * copy of this software and associated documentation files (the "Software"), |
7 | * to deal in the Software without restriction, including without limitation |
7 | * to deal in the Software without restriction, including without limitation |
8 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, |
8 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, |
9 | * and/or sell copies of the Software, and to permit persons to whom the |
9 | * and/or sell copies of the Software, and to permit persons to whom the |
10 | * Software is furnished to do so, subject to the following conditions: |
10 | * Software is furnished to do so, subject to the following conditions: |
11 | * |
11 | * |
12 | * The above copyright notice and this permission notice shall be included in |
12 | * The above copyright notice and this permission notice shall be included in |
13 | * all copies or substantial portions of the Software. |
13 | * all copies or substantial portions of the Software. |
14 | * |
14 | * |
15 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
15 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
16 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
16 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
17 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL |
17 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL |
18 | * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR |
18 | * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR |
19 | * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, |
19 | * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, |
20 | * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR |
20 | * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR |
21 | * OTHER DEALINGS IN THE SOFTWARE. |
21 | * OTHER DEALINGS IN THE SOFTWARE. |
22 | * |
22 | * |
23 | * Authors: Dave Airlie |
23 | * Authors: Dave Airlie |
24 | * Alex Deucher |
24 | * Alex Deucher |
25 | */ |
25 | */ |
26 | #include "drmP.h" |
26 | #include "drmP.h" |
27 | #include "radeon_drm.h" |
27 | #include "radeon_drm.h" |
28 | #include "radeon.h" |
28 | #include "radeon.h" |
29 | 29 | ||
30 | #include "atom.h" |
30 | #include "atom.h" |
31 | //#include |
31 | //#include |
32 | 32 | ||
33 | #include "drm_crtc_helper.h" |
33 | #include "drm_crtc_helper.h" |
34 | #include "drm_edid.h" |
34 | #include "drm_edid.h" |
35 | 35 | ||
36 | static int radeon_ddc_dump(struct drm_connector *connector); |
36 | static int radeon_ddc_dump(struct drm_connector *connector); |
37 | 37 | ||
38 | static void avivo_crtc_load_lut(struct drm_crtc *crtc) |
38 | static void avivo_crtc_load_lut(struct drm_crtc *crtc) |
39 | { |
39 | { |
40 | struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); |
40 | struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); |
41 | struct drm_device *dev = crtc->dev; |
41 | struct drm_device *dev = crtc->dev; |
42 | struct radeon_device *rdev = dev->dev_private; |
42 | struct radeon_device *rdev = dev->dev_private; |
43 | int i; |
43 | int i; |
44 | 44 | ||
45 | DRM_DEBUG("%d\n", radeon_crtc->crtc_id); |
45 | DRM_DEBUG("%d\n", radeon_crtc->crtc_id); |
46 | WREG32(AVIVO_DC_LUTA_CONTROL + radeon_crtc->crtc_offset, 0); |
46 | WREG32(AVIVO_DC_LUTA_CONTROL + radeon_crtc->crtc_offset, 0); |
47 | 47 | ||
48 | WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_BLUE + radeon_crtc->crtc_offset, 0); |
48 | WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_BLUE + radeon_crtc->crtc_offset, 0); |
49 | WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_GREEN + radeon_crtc->crtc_offset, 0); |
49 | WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_GREEN + radeon_crtc->crtc_offset, 0); |
50 | WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_RED + radeon_crtc->crtc_offset, 0); |
50 | WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_RED + radeon_crtc->crtc_offset, 0); |
51 | 51 | ||
52 | WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_BLUE + radeon_crtc->crtc_offset, 0xffff); |
52 | WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_BLUE + radeon_crtc->crtc_offset, 0xffff); |
53 | WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_GREEN + radeon_crtc->crtc_offset, 0xffff); |
53 | WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_GREEN + radeon_crtc->crtc_offset, 0xffff); |
54 | WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_RED + radeon_crtc->crtc_offset, 0xffff); |
54 | WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_RED + radeon_crtc->crtc_offset, 0xffff); |
55 | 55 | ||
56 | WREG32(AVIVO_DC_LUT_RW_SELECT, radeon_crtc->crtc_id); |
56 | WREG32(AVIVO_DC_LUT_RW_SELECT, radeon_crtc->crtc_id); |
57 | WREG32(AVIVO_DC_LUT_RW_MODE, 0); |
57 | WREG32(AVIVO_DC_LUT_RW_MODE, 0); |
58 | WREG32(AVIVO_DC_LUT_WRITE_EN_MASK, 0x0000003f); |
58 | WREG32(AVIVO_DC_LUT_WRITE_EN_MASK, 0x0000003f); |
59 | 59 | ||
60 | WREG8(AVIVO_DC_LUT_RW_INDEX, 0); |
60 | WREG8(AVIVO_DC_LUT_RW_INDEX, 0); |
61 | for (i = 0; i < 256; i++) { |
61 | for (i = 0; i < 256; i++) { |
62 | WREG32(AVIVO_DC_LUT_30_COLOR, |
62 | WREG32(AVIVO_DC_LUT_30_COLOR, |
63 | (radeon_crtc->lut_r[i] << 20) | |
63 | (radeon_crtc->lut_r[i] << 20) | |
64 | (radeon_crtc->lut_g[i] << 10) | |
64 | (radeon_crtc->lut_g[i] << 10) | |
65 | (radeon_crtc->lut_b[i] << 0)); |
65 | (radeon_crtc->lut_b[i] << 0)); |
66 | } |
66 | } |
67 | 67 | ||
68 | WREG32(AVIVO_D1GRPH_LUT_SEL + radeon_crtc->crtc_offset, radeon_crtc->crtc_id); |
68 | WREG32(AVIVO_D1GRPH_LUT_SEL + radeon_crtc->crtc_offset, radeon_crtc->crtc_id); |
69 | } |
69 | } |
70 | 70 | ||
71 | static void evergreen_crtc_load_lut(struct drm_crtc *crtc) |
71 | static void evergreen_crtc_load_lut(struct drm_crtc *crtc) |
72 | { |
72 | { |
73 | struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); |
73 | struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); |
74 | struct drm_device *dev = crtc->dev; |
74 | struct drm_device *dev = crtc->dev; |
75 | struct radeon_device *rdev = dev->dev_private; |
75 | struct radeon_device *rdev = dev->dev_private; |
76 | int i; |
76 | int i; |
77 | 77 | ||
78 | DRM_DEBUG("%d\n", radeon_crtc->crtc_id); |
78 | DRM_DEBUG("%d\n", radeon_crtc->crtc_id); |
79 | WREG32(EVERGREEN_DC_LUT_CONTROL + radeon_crtc->crtc_offset, 0); |
79 | WREG32(EVERGREEN_DC_LUT_CONTROL + radeon_crtc->crtc_offset, 0); |
80 | 80 | ||
81 | WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_BLUE + radeon_crtc->crtc_offset, 0); |
81 | WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_BLUE + radeon_crtc->crtc_offset, 0); |
82 | WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_GREEN + radeon_crtc->crtc_offset, 0); |
82 | WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_GREEN + radeon_crtc->crtc_offset, 0); |
83 | WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_RED + radeon_crtc->crtc_offset, 0); |
83 | WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_RED + radeon_crtc->crtc_offset, 0); |
84 | 84 | ||
85 | WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE + radeon_crtc->crtc_offset, 0xffff); |
85 | WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE + radeon_crtc->crtc_offset, 0xffff); |
86 | WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_GREEN + radeon_crtc->crtc_offset, 0xffff); |
86 | WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_GREEN + radeon_crtc->crtc_offset, 0xffff); |
87 | WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_RED + radeon_crtc->crtc_offset, 0xffff); |
87 | WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_RED + radeon_crtc->crtc_offset, 0xffff); |
88 | 88 | ||
89 | WREG32(EVERGREEN_DC_LUT_RW_MODE, radeon_crtc->crtc_id); |
89 | WREG32(EVERGREEN_DC_LUT_RW_MODE, radeon_crtc->crtc_id); |
90 | WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK, 0x00000007); |
90 | WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK, 0x00000007); |
91 | 91 | ||
92 | WREG32(EVERGREEN_DC_LUT_RW_INDEX, 0); |
92 | WREG32(EVERGREEN_DC_LUT_RW_INDEX, 0); |
93 | for (i = 0; i < 256; i++) { |
93 | for (i = 0; i < 256; i++) { |
94 | WREG32(EVERGREEN_DC_LUT_30_COLOR, |
94 | WREG32(EVERGREEN_DC_LUT_30_COLOR, |
95 | (radeon_crtc->lut_r[i] << 20) | |
95 | (radeon_crtc->lut_r[i] << 20) | |
96 | (radeon_crtc->lut_g[i] << 10) | |
96 | (radeon_crtc->lut_g[i] << 10) | |
97 | (radeon_crtc->lut_b[i] << 0)); |
97 | (radeon_crtc->lut_b[i] << 0)); |
98 | } |
98 | } |
99 | } |
99 | } |
100 | 100 | ||
101 | static void legacy_crtc_load_lut(struct drm_crtc *crtc) |
101 | static void legacy_crtc_load_lut(struct drm_crtc *crtc) |
102 | { |
102 | { |
103 | struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); |
103 | struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); |
104 | struct drm_device *dev = crtc->dev; |
104 | struct drm_device *dev = crtc->dev; |
105 | struct radeon_device *rdev = dev->dev_private; |
105 | struct radeon_device *rdev = dev->dev_private; |
106 | int i; |
106 | int i; |
107 | uint32_t dac2_cntl; |
107 | uint32_t dac2_cntl; |
108 | 108 | ||
109 | dac2_cntl = RREG32(RADEON_DAC_CNTL2); |
109 | dac2_cntl = RREG32(RADEON_DAC_CNTL2); |
110 | if (radeon_crtc->crtc_id == 0) |
110 | if (radeon_crtc->crtc_id == 0) |
111 | dac2_cntl &= (uint32_t)~RADEON_DAC2_PALETTE_ACC_CTL; |
111 | dac2_cntl &= (uint32_t)~RADEON_DAC2_PALETTE_ACC_CTL; |
112 | else |
112 | else |
113 | dac2_cntl |= RADEON_DAC2_PALETTE_ACC_CTL; |
113 | dac2_cntl |= RADEON_DAC2_PALETTE_ACC_CTL; |
114 | WREG32(RADEON_DAC_CNTL2, dac2_cntl); |
114 | WREG32(RADEON_DAC_CNTL2, dac2_cntl); |
115 | 115 | ||
116 | WREG8(RADEON_PALETTE_INDEX, 0); |
116 | WREG8(RADEON_PALETTE_INDEX, 0); |
117 | for (i = 0; i < 256; i++) { |
117 | for (i = 0; i < 256; i++) { |
118 | WREG32(RADEON_PALETTE_30_DATA, |
118 | WREG32(RADEON_PALETTE_30_DATA, |
119 | (radeon_crtc->lut_r[i] << 20) | |
119 | (radeon_crtc->lut_r[i] << 20) | |
120 | (radeon_crtc->lut_g[i] << 10) | |
120 | (radeon_crtc->lut_g[i] << 10) | |
121 | (radeon_crtc->lut_b[i] << 0)); |
121 | (radeon_crtc->lut_b[i] << 0)); |
122 | } |
122 | } |
123 | } |
123 | } |
124 | 124 | ||
125 | void radeon_crtc_load_lut(struct drm_crtc *crtc) |
125 | void radeon_crtc_load_lut(struct drm_crtc *crtc) |
126 | { |
126 | { |
127 | struct drm_device *dev = crtc->dev; |
127 | struct drm_device *dev = crtc->dev; |
128 | struct radeon_device *rdev = dev->dev_private; |
128 | struct radeon_device *rdev = dev->dev_private; |
129 | 129 | ||
130 | if (!crtc->enabled) |
130 | if (!crtc->enabled) |
131 | return; |
131 | return; |
132 | 132 | ||
133 | if (ASIC_IS_DCE4(rdev)) |
133 | if (ASIC_IS_DCE4(rdev)) |
134 | evergreen_crtc_load_lut(crtc); |
134 | evergreen_crtc_load_lut(crtc); |
135 | else if (ASIC_IS_AVIVO(rdev)) |
135 | else if (ASIC_IS_AVIVO(rdev)) |
136 | avivo_crtc_load_lut(crtc); |
136 | avivo_crtc_load_lut(crtc); |
137 | else |
137 | else |
138 | legacy_crtc_load_lut(crtc); |
138 | legacy_crtc_load_lut(crtc); |
139 | } |
139 | } |
140 | 140 | ||
141 | /** Sets the color ramps on behalf of fbcon */ |
141 | /** Sets the color ramps on behalf of fbcon */ |
142 | void radeon_crtc_fb_gamma_set(struct drm_crtc *crtc, u16 red, u16 green, |
142 | void radeon_crtc_fb_gamma_set(struct drm_crtc *crtc, u16 red, u16 green, |
143 | u16 blue, int regno) |
143 | u16 blue, int regno) |
144 | { |
144 | { |
145 | struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); |
145 | struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); |
146 | 146 | ||
147 | radeon_crtc->lut_r[regno] = red >> 6; |
147 | radeon_crtc->lut_r[regno] = red >> 6; |
148 | radeon_crtc->lut_g[regno] = green >> 6; |
148 | radeon_crtc->lut_g[regno] = green >> 6; |
149 | radeon_crtc->lut_b[regno] = blue >> 6; |
149 | radeon_crtc->lut_b[regno] = blue >> 6; |
150 | } |
150 | } |
151 | 151 | ||
152 | /** Gets the color ramps on behalf of fbcon */ |
152 | /** Gets the color ramps on behalf of fbcon */ |
153 | void radeon_crtc_fb_gamma_get(struct drm_crtc *crtc, u16 *red, u16 *green, |
153 | void radeon_crtc_fb_gamma_get(struct drm_crtc *crtc, u16 *red, u16 *green, |
154 | u16 *blue, int regno) |
154 | u16 *blue, int regno) |
155 | { |
155 | { |
156 | struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); |
156 | struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); |
157 | 157 | ||
158 | *red = radeon_crtc->lut_r[regno] << 6; |
158 | *red = radeon_crtc->lut_r[regno] << 6; |
159 | *green = radeon_crtc->lut_g[regno] << 6; |
159 | *green = radeon_crtc->lut_g[regno] << 6; |
160 | *blue = radeon_crtc->lut_b[regno] << 6; |
160 | *blue = radeon_crtc->lut_b[regno] << 6; |
161 | } |
161 | } |
162 | 162 | ||
163 | static void radeon_crtc_gamma_set(struct drm_crtc *crtc, u16 *red, u16 *green, |
163 | static void radeon_crtc_gamma_set(struct drm_crtc *crtc, u16 *red, u16 *green, |
164 | u16 *blue, uint32_t size) |
164 | u16 *blue, uint32_t size) |
165 | { |
165 | { |
166 | struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); |
166 | struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); |
167 | int i; |
167 | int i; |
168 | 168 | ||
169 | if (size != 256) { |
169 | if (size != 256) { |
170 | return; |
170 | return; |
171 | } |
171 | } |
172 | 172 | ||
173 | /* userspace palettes are always correct as is */ |
173 | /* userspace palettes are always correct as is */ |
174 | for (i = 0; i < 256; i++) { |
174 | for (i = 0; i < 256; i++) { |
175 | radeon_crtc->lut_r[i] = red[i] >> 6; |
175 | radeon_crtc->lut_r[i] = red[i] >> 6; |
176 | radeon_crtc->lut_g[i] = green[i] >> 6; |
176 | radeon_crtc->lut_g[i] = green[i] >> 6; |
177 | radeon_crtc->lut_b[i] = blue[i] >> 6; |
177 | radeon_crtc->lut_b[i] = blue[i] >> 6; |
178 | } |
178 | } |
179 | radeon_crtc_load_lut(crtc); |
179 | radeon_crtc_load_lut(crtc); |
180 | } |
180 | } |
181 | 181 | ||
182 | static void radeon_crtc_destroy(struct drm_crtc *crtc) |
182 | static void radeon_crtc_destroy(struct drm_crtc *crtc) |
183 | { |
183 | { |
184 | struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); |
184 | struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); |
185 | 185 | ||
186 | drm_crtc_cleanup(crtc); |
186 | drm_crtc_cleanup(crtc); |
187 | kfree(radeon_crtc); |
187 | kfree(radeon_crtc); |
188 | } |
188 | } |
189 | 189 | ||
190 | static const struct drm_crtc_funcs radeon_crtc_funcs = { |
190 | static const struct drm_crtc_funcs radeon_crtc_funcs = { |
191 | .cursor_set = NULL, |
191 | .cursor_set = NULL, |
192 | .cursor_move = NULL, |
192 | .cursor_move = NULL, |
193 | .gamma_set = radeon_crtc_gamma_set, |
193 | .gamma_set = radeon_crtc_gamma_set, |
194 | .set_config = drm_crtc_helper_set_config, |
194 | .set_config = drm_crtc_helper_set_config, |
195 | .destroy = radeon_crtc_destroy, |
195 | .destroy = radeon_crtc_destroy, |
196 | }; |
196 | }; |
197 | 197 | ||
198 | static void radeon_crtc_init(struct drm_device *dev, int index) |
198 | static void radeon_crtc_init(struct drm_device *dev, int index) |
199 | { |
199 | { |
200 | struct radeon_device *rdev = dev->dev_private; |
200 | struct radeon_device *rdev = dev->dev_private; |
201 | struct radeon_crtc *radeon_crtc; |
201 | struct radeon_crtc *radeon_crtc; |
202 | int i; |
202 | int i; |
203 | 203 | ||
204 | radeon_crtc = kzalloc(sizeof(struct radeon_crtc) + (RADEONFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL); |
204 | radeon_crtc = kzalloc(sizeof(struct radeon_crtc) + (RADEONFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL); |
205 | if (radeon_crtc == NULL) |
205 | if (radeon_crtc == NULL) |
206 | return; |
206 | return; |
207 | 207 | ||
208 | drm_crtc_init(dev, &radeon_crtc->base, &radeon_crtc_funcs); |
208 | drm_crtc_init(dev, &radeon_crtc->base, &radeon_crtc_funcs); |
209 | 209 | ||
210 | drm_mode_crtc_set_gamma_size(&radeon_crtc->base, 256); |
210 | drm_mode_crtc_set_gamma_size(&radeon_crtc->base, 256); |
211 | radeon_crtc->crtc_id = index; |
211 | radeon_crtc->crtc_id = index; |
212 | rdev->mode_info.crtcs[index] = radeon_crtc; |
212 | rdev->mode_info.crtcs[index] = radeon_crtc; |
213 | 213 | ||
214 | #if 0 |
214 | #if 0 |
215 | radeon_crtc->mode_set.crtc = &radeon_crtc->base; |
215 | radeon_crtc->mode_set.crtc = &radeon_crtc->base; |
216 | radeon_crtc->mode_set.connectors = (struct drm_connector **)(radeon_crtc + 1); |
216 | radeon_crtc->mode_set.connectors = (struct drm_connector **)(radeon_crtc + 1); |
217 | radeon_crtc->mode_set.num_connectors = 0; |
217 | radeon_crtc->mode_set.num_connectors = 0; |
218 | #endif |
218 | #endif |
219 | 219 | ||
220 | for (i = 0; i < 256; i++) { |
220 | for (i = 0; i < 256; i++) { |
221 | radeon_crtc->lut_r[i] = i << 2; |
221 | radeon_crtc->lut_r[i] = i << 2; |
222 | radeon_crtc->lut_g[i] = i << 2; |
222 | radeon_crtc->lut_g[i] = i << 2; |
223 | radeon_crtc->lut_b[i] = i << 2; |
223 | radeon_crtc->lut_b[i] = i << 2; |
224 | } |
224 | } |
225 | 225 | ||
226 | if (rdev->is_atom_bios && (ASIC_IS_AVIVO(rdev) || radeon_r4xx_atom)) |
226 | if (rdev->is_atom_bios && (ASIC_IS_AVIVO(rdev) || radeon_r4xx_atom)) |
227 | radeon_atombios_init_crtc(dev, radeon_crtc); |
227 | radeon_atombios_init_crtc(dev, radeon_crtc); |
228 | else |
228 | else |
229 | radeon_legacy_init_crtc(dev, radeon_crtc); |
229 | radeon_legacy_init_crtc(dev, radeon_crtc); |
230 | } |
230 | } |
231 | 231 | ||
232 | static const char *encoder_names[34] = { |
232 | static const char *encoder_names[34] = { |
233 | "NONE", |
233 | "NONE", |
234 | "INTERNAL_LVDS", |
234 | "INTERNAL_LVDS", |
235 | "INTERNAL_TMDS1", |
235 | "INTERNAL_TMDS1", |
236 | "INTERNAL_TMDS2", |
236 | "INTERNAL_TMDS2", |
237 | "INTERNAL_DAC1", |
237 | "INTERNAL_DAC1", |
238 | "INTERNAL_DAC2", |
238 | "INTERNAL_DAC2", |
239 | "INTERNAL_SDVOA", |
239 | "INTERNAL_SDVOA", |
240 | "INTERNAL_SDVOB", |
240 | "INTERNAL_SDVOB", |
241 | "SI170B", |
241 | "SI170B", |
242 | "CH7303", |
242 | "CH7303", |
243 | "CH7301", |
243 | "CH7301", |
244 | "INTERNAL_DVO1", |
244 | "INTERNAL_DVO1", |
245 | "EXTERNAL_SDVOA", |
245 | "EXTERNAL_SDVOA", |
246 | "EXTERNAL_SDVOB", |
246 | "EXTERNAL_SDVOB", |
247 | "TITFP513", |
247 | "TITFP513", |
248 | "INTERNAL_LVTM1", |
248 | "INTERNAL_LVTM1", |
249 | "VT1623", |
249 | "VT1623", |
250 | "HDMI_SI1930", |
250 | "HDMI_SI1930", |
251 | "HDMI_INTERNAL", |
251 | "HDMI_INTERNAL", |
252 | "INTERNAL_KLDSCP_TMDS1", |
252 | "INTERNAL_KLDSCP_TMDS1", |
253 | "INTERNAL_KLDSCP_DVO1", |
253 | "INTERNAL_KLDSCP_DVO1", |
254 | "INTERNAL_KLDSCP_DAC1", |
254 | "INTERNAL_KLDSCP_DAC1", |
255 | "INTERNAL_KLDSCP_DAC2", |
255 | "INTERNAL_KLDSCP_DAC2", |
256 | "SI178", |
256 | "SI178", |
257 | "MVPU_FPGA", |
257 | "MVPU_FPGA", |
258 | "INTERNAL_DDI", |
258 | "INTERNAL_DDI", |
259 | "VT1625", |
259 | "VT1625", |
260 | "HDMI_SI1932", |
260 | "HDMI_SI1932", |
261 | "DP_AN9801", |
261 | "DP_AN9801", |
262 | "DP_DP501", |
262 | "DP_DP501", |
263 | "INTERNAL_UNIPHY", |
263 | "INTERNAL_UNIPHY", |
264 | "INTERNAL_KLDSCP_LVTMA", |
264 | "INTERNAL_KLDSCP_LVTMA", |
265 | "INTERNAL_UNIPHY1", |
265 | "INTERNAL_UNIPHY1", |
266 | "INTERNAL_UNIPHY2", |
266 | "INTERNAL_UNIPHY2", |
267 | }; |
267 | }; |
268 | 268 | ||
269 | static const char *connector_names[15] = { |
269 | static const char *connector_names[15] = { |
270 | "Unknown", |
270 | "Unknown", |
271 | "VGA", |
271 | "VGA", |
272 | "DVI-I", |
272 | "DVI-I", |
273 | "DVI-D", |
273 | "DVI-D", |
274 | "DVI-A", |
274 | "DVI-A", |
275 | "Composite", |
275 | "Composite", |
276 | "S-video", |
276 | "S-video", |
277 | "LVDS", |
277 | "LVDS", |
278 | "Component", |
278 | "Component", |
279 | "DIN", |
279 | "DIN", |
280 | "DisplayPort", |
280 | "DisplayPort", |
281 | "HDMI-A", |
281 | "HDMI-A", |
282 | "HDMI-B", |
282 | "HDMI-B", |
283 | "TV", |
283 | "TV", |
284 | "eDP", |
284 | "eDP", |
285 | }; |
285 | }; |
286 | 286 | ||
287 | static const char *hpd_names[7] = { |
287 | static const char *hpd_names[7] = { |
288 | "NONE", |
288 | "NONE", |
289 | "HPD1", |
289 | "HPD1", |
290 | "HPD2", |
290 | "HPD2", |
291 | "HPD3", |
291 | "HPD3", |
292 | "HPD4", |
292 | "HPD4", |
293 | "HPD5", |
293 | "HPD5", |
294 | "HPD6", |
294 | "HPD6", |
295 | }; |
295 | }; |
296 | 296 | ||
297 | static void radeon_print_display_setup(struct drm_device *dev) |
297 | static void radeon_print_display_setup(struct drm_device *dev) |
298 | { |
298 | { |
299 | struct drm_connector *connector; |
299 | struct drm_connector *connector; |
300 | struct radeon_connector *radeon_connector; |
300 | struct radeon_connector *radeon_connector; |
301 | struct drm_encoder *encoder; |
301 | struct drm_encoder *encoder; |
302 | struct radeon_encoder *radeon_encoder; |
302 | struct radeon_encoder *radeon_encoder; |
303 | uint32_t devices; |
303 | uint32_t devices; |
304 | int i = 0; |
304 | int i = 0; |
305 | 305 | ||
306 | DRM_INFO("Radeon Display Connectors\n"); |
306 | DRM_INFO("Radeon Display Connectors\n"); |
307 | list_for_each_entry(connector, &dev->mode_config.connector_list, head) { |
307 | list_for_each_entry(connector, &dev->mode_config.connector_list, head) { |
308 | radeon_connector = to_radeon_connector(connector); |
308 | radeon_connector = to_radeon_connector(connector); |
309 | DRM_INFO("Connector %d:\n", i); |
309 | DRM_INFO("Connector %d:\n", i); |
310 | DRM_INFO(" %s\n", connector_names[connector->connector_type]); |
310 | DRM_INFO(" %s\n", connector_names[connector->connector_type]); |
311 | if (radeon_connector->hpd.hpd != RADEON_HPD_NONE) |
311 | if (radeon_connector->hpd.hpd != RADEON_HPD_NONE) |
312 | DRM_INFO(" %s\n", hpd_names[radeon_connector->hpd.hpd]); |
312 | DRM_INFO(" %s\n", hpd_names[radeon_connector->hpd.hpd]); |
313 | if (radeon_connector->ddc_bus) { |
313 | if (radeon_connector->ddc_bus) { |
314 | DRM_INFO(" DDC: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n", |
314 | DRM_INFO(" DDC: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n", |
315 | radeon_connector->ddc_bus->rec.mask_clk_reg, |
315 | radeon_connector->ddc_bus->rec.mask_clk_reg, |
316 | radeon_connector->ddc_bus->rec.mask_data_reg, |
316 | radeon_connector->ddc_bus->rec.mask_data_reg, |
317 | radeon_connector->ddc_bus->rec.a_clk_reg, |
317 | radeon_connector->ddc_bus->rec.a_clk_reg, |
318 | radeon_connector->ddc_bus->rec.a_data_reg, |
318 | radeon_connector->ddc_bus->rec.a_data_reg, |
319 | radeon_connector->ddc_bus->rec.en_clk_reg, |
319 | radeon_connector->ddc_bus->rec.en_clk_reg, |
320 | radeon_connector->ddc_bus->rec.en_data_reg, |
320 | radeon_connector->ddc_bus->rec.en_data_reg, |
321 | radeon_connector->ddc_bus->rec.y_clk_reg, |
321 | radeon_connector->ddc_bus->rec.y_clk_reg, |
322 | radeon_connector->ddc_bus->rec.y_data_reg); |
322 | radeon_connector->ddc_bus->rec.y_data_reg); |
323 | } else { |
323 | } else { |
324 | if (connector->connector_type == DRM_MODE_CONNECTOR_VGA || |
324 | if (connector->connector_type == DRM_MODE_CONNECTOR_VGA || |
325 | connector->connector_type == DRM_MODE_CONNECTOR_DVII || |
325 | connector->connector_type == DRM_MODE_CONNECTOR_DVII || |
326 | connector->connector_type == DRM_MODE_CONNECTOR_DVID || |
326 | connector->connector_type == DRM_MODE_CONNECTOR_DVID || |
327 | connector->connector_type == DRM_MODE_CONNECTOR_DVIA || |
327 | connector->connector_type == DRM_MODE_CONNECTOR_DVIA || |
328 | connector->connector_type == DRM_MODE_CONNECTOR_HDMIA || |
328 | connector->connector_type == DRM_MODE_CONNECTOR_HDMIA || |
329 | connector->connector_type == DRM_MODE_CONNECTOR_HDMIB) |
329 | connector->connector_type == DRM_MODE_CONNECTOR_HDMIB) |
330 | DRM_INFO(" DDC: no ddc bus - possible BIOS bug - please report to xorg-driver-ati@lists.x.org\n"); |
330 | DRM_INFO(" DDC: no ddc bus - possible BIOS bug - please report to xorg-driver-ati@lists.x.org\n"); |
331 | } |
331 | } |
332 | DRM_INFO(" Encoders:\n"); |
332 | DRM_INFO(" Encoders:\n"); |
333 | list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { |
333 | list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { |
334 | radeon_encoder = to_radeon_encoder(encoder); |
334 | radeon_encoder = to_radeon_encoder(encoder); |
335 | devices = radeon_encoder->devices & radeon_connector->devices; |
335 | devices = radeon_encoder->devices & radeon_connector->devices; |
336 | if (devices) { |
336 | if (devices) { |
337 | if (devices & ATOM_DEVICE_CRT1_SUPPORT) |
337 | if (devices & ATOM_DEVICE_CRT1_SUPPORT) |
338 | DRM_INFO(" CRT1: %s\n", encoder_names[radeon_encoder->encoder_id]); |
338 | DRM_INFO(" CRT1: %s\n", encoder_names[radeon_encoder->encoder_id]); |
339 | if (devices & ATOM_DEVICE_CRT2_SUPPORT) |
339 | if (devices & ATOM_DEVICE_CRT2_SUPPORT) |
340 | DRM_INFO(" CRT2: %s\n", encoder_names[radeon_encoder->encoder_id]); |
340 | DRM_INFO(" CRT2: %s\n", encoder_names[radeon_encoder->encoder_id]); |
341 | if (devices & ATOM_DEVICE_LCD1_SUPPORT) |
341 | if (devices & ATOM_DEVICE_LCD1_SUPPORT) |
342 | DRM_INFO(" LCD1: %s\n", encoder_names[radeon_encoder->encoder_id]); |
342 | DRM_INFO(" LCD1: %s\n", encoder_names[radeon_encoder->encoder_id]); |
343 | if (devices & ATOM_DEVICE_DFP1_SUPPORT) |
343 | if (devices & ATOM_DEVICE_DFP1_SUPPORT) |
344 | DRM_INFO(" DFP1: %s\n", encoder_names[radeon_encoder->encoder_id]); |
344 | DRM_INFO(" DFP1: %s\n", encoder_names[radeon_encoder->encoder_id]); |
345 | if (devices & ATOM_DEVICE_DFP2_SUPPORT) |
345 | if (devices & ATOM_DEVICE_DFP2_SUPPORT) |
346 | DRM_INFO(" DFP2: %s\n", encoder_names[radeon_encoder->encoder_id]); |
346 | DRM_INFO(" DFP2: %s\n", encoder_names[radeon_encoder->encoder_id]); |
347 | if (devices & ATOM_DEVICE_DFP3_SUPPORT) |
347 | if (devices & ATOM_DEVICE_DFP3_SUPPORT) |
348 | DRM_INFO(" DFP3: %s\n", encoder_names[radeon_encoder->encoder_id]); |
348 | DRM_INFO(" DFP3: %s\n", encoder_names[radeon_encoder->encoder_id]); |
349 | if (devices & ATOM_DEVICE_DFP4_SUPPORT) |
349 | if (devices & ATOM_DEVICE_DFP4_SUPPORT) |
350 | DRM_INFO(" DFP4: %s\n", encoder_names[radeon_encoder->encoder_id]); |
350 | DRM_INFO(" DFP4: %s\n", encoder_names[radeon_encoder->encoder_id]); |
351 | if (devices & ATOM_DEVICE_DFP5_SUPPORT) |
351 | if (devices & ATOM_DEVICE_DFP5_SUPPORT) |
352 | DRM_INFO(" DFP5: %s\n", encoder_names[radeon_encoder->encoder_id]); |
352 | DRM_INFO(" DFP5: %s\n", encoder_names[radeon_encoder->encoder_id]); |
353 | if (devices & ATOM_DEVICE_TV1_SUPPORT) |
353 | if (devices & ATOM_DEVICE_TV1_SUPPORT) |
354 | DRM_INFO(" TV1: %s\n", encoder_names[radeon_encoder->encoder_id]); |
354 | DRM_INFO(" TV1: %s\n", encoder_names[radeon_encoder->encoder_id]); |
355 | if (devices & ATOM_DEVICE_CV_SUPPORT) |
355 | if (devices & ATOM_DEVICE_CV_SUPPORT) |
356 | DRM_INFO(" CV: %s\n", encoder_names[radeon_encoder->encoder_id]); |
356 | DRM_INFO(" CV: %s\n", encoder_names[radeon_encoder->encoder_id]); |
357 | } |
357 | } |
358 | } |
358 | } |
359 | i++; |
359 | i++; |
360 | } |
360 | } |
361 | } |
361 | } |
362 | 362 | ||
363 | static bool radeon_setup_enc_conn(struct drm_device *dev) |
363 | static bool radeon_setup_enc_conn(struct drm_device *dev) |
364 | { |
364 | { |
365 | struct radeon_device *rdev = dev->dev_private; |
365 | struct radeon_device *rdev = dev->dev_private; |
366 | struct drm_connector *drm_connector; |
366 | struct drm_connector *drm_connector; |
367 | bool ret = false; |
367 | bool ret = false; |
368 | 368 | ||
369 | if (rdev->bios) { |
369 | if (rdev->bios) { |
370 | if (rdev->is_atom_bios) { |
370 | if (rdev->is_atom_bios) { |
371 | if (rdev->family >= CHIP_R600) |
371 | if (rdev->family >= CHIP_R600) |
372 | ret = radeon_get_atom_connector_info_from_object_table(dev); |
372 | ret = radeon_get_atom_connector_info_from_object_table(dev); |
373 | else |
373 | else |
374 | ret = radeon_get_atom_connector_info_from_supported_devices_table(dev); |
374 | ret = radeon_get_atom_connector_info_from_supported_devices_table(dev); |
375 | } else { |
375 | } else { |
376 | ret = radeon_get_legacy_connector_info_from_bios(dev); |
376 | ret = radeon_get_legacy_connector_info_from_bios(dev); |
377 | if (ret == false) |
377 | if (ret == false) |
378 | ret = radeon_get_legacy_connector_info_from_table(dev); |
378 | ret = radeon_get_legacy_connector_info_from_table(dev); |
379 | } |
379 | } |
380 | } else { |
380 | } else { |
381 | if (!ASIC_IS_AVIVO(rdev)) |
381 | if (!ASIC_IS_AVIVO(rdev)) |
382 | ret = radeon_get_legacy_connector_info_from_table(dev); |
382 | ret = radeon_get_legacy_connector_info_from_table(dev); |
383 | } |
383 | } |
384 | if (ret) { |
384 | if (ret) { |
385 | radeon_setup_encoder_clones(dev); |
385 | radeon_setup_encoder_clones(dev); |
386 | radeon_print_display_setup(dev); |
386 | radeon_print_display_setup(dev); |
387 | list_for_each_entry(drm_connector, &dev->mode_config.connector_list, head) |
387 | list_for_each_entry(drm_connector, &dev->mode_config.connector_list, head) |
388 | radeon_ddc_dump(drm_connector); |
388 | radeon_ddc_dump(drm_connector); |
389 | } |
389 | } |
390 | 390 | ||
391 | return ret; |
391 | return ret; |
392 | } |
392 | } |
393 | 393 | ||
394 | int radeon_ddc_get_modes(struct radeon_connector *radeon_connector) |
394 | int radeon_ddc_get_modes(struct radeon_connector *radeon_connector) |
395 | { |
395 | { |
396 | struct drm_device *dev = radeon_connector->base.dev; |
396 | struct drm_device *dev = radeon_connector->base.dev; |
397 | struct radeon_device *rdev = dev->dev_private; |
397 | struct radeon_device *rdev = dev->dev_private; |
398 | int ret = 0; |
398 | int ret = 0; |
399 | 399 | ||
400 | if ((radeon_connector->base.connector_type == DRM_MODE_CONNECTOR_DisplayPort) || |
400 | if ((radeon_connector->base.connector_type == DRM_MODE_CONNECTOR_DisplayPort) || |
401 | (radeon_connector->base.connector_type == DRM_MODE_CONNECTOR_eDP)) { |
401 | (radeon_connector->base.connector_type == DRM_MODE_CONNECTOR_eDP)) { |
402 | struct radeon_connector_atom_dig *dig = radeon_connector->con_priv; |
402 | struct radeon_connector_atom_dig *dig = radeon_connector->con_priv; |
403 | if ((dig->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT || |
403 | if ((dig->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT || |
404 | dig->dp_sink_type == CONNECTOR_OBJECT_ID_eDP) && dig->dp_i2c_bus) |
404 | dig->dp_sink_type == CONNECTOR_OBJECT_ID_eDP) && dig->dp_i2c_bus) |
405 | radeon_connector->edid = drm_get_edid(&radeon_connector->base, &dig->dp_i2c_bus->adapter); |
405 | radeon_connector->edid = drm_get_edid(&radeon_connector->base, &dig->dp_i2c_bus->adapter); |
406 | } |
406 | } |
407 | if (!radeon_connector->ddc_bus) |
407 | if (!radeon_connector->ddc_bus) |
408 | return -1; |
408 | return -1; |
409 | if (!radeon_connector->edid) { |
409 | if (!radeon_connector->edid) { |
410 | radeon_connector->edid = drm_get_edid(&radeon_connector->base, &radeon_connector->ddc_bus->adapter); |
410 | radeon_connector->edid = drm_get_edid(&radeon_connector->base, &radeon_connector->ddc_bus->adapter); |
411 | } |
411 | } |
412 | /* some servers provide a hardcoded edid in rom for KVMs */ |
412 | /* some servers provide a hardcoded edid in rom for KVMs */ |
413 | if (!radeon_connector->edid) |
413 | if (!radeon_connector->edid) |
414 | radeon_connector->edid = radeon_combios_get_hardcoded_edid(rdev); |
414 | radeon_connector->edid = radeon_combios_get_hardcoded_edid(rdev); |
415 | if (radeon_connector->edid) { |
415 | if (radeon_connector->edid) { |
416 | drm_mode_connector_update_edid_property(&radeon_connector->base, radeon_connector->edid); |
416 | drm_mode_connector_update_edid_property(&radeon_connector->base, radeon_connector->edid); |
417 | ret = drm_add_edid_modes(&radeon_connector->base, radeon_connector->edid); |
417 | ret = drm_add_edid_modes(&radeon_connector->base, radeon_connector->edid); |
418 | return ret; |
418 | return ret; |
419 | } |
419 | } |
420 | drm_mode_connector_update_edid_property(&radeon_connector->base, NULL); |
420 | drm_mode_connector_update_edid_property(&radeon_connector->base, NULL); |
421 | return 0; |
421 | return 0; |
422 | } |
422 | } |
423 | 423 | ||
424 | static int radeon_ddc_dump(struct drm_connector *connector) |
424 | static int radeon_ddc_dump(struct drm_connector *connector) |
425 | { |
425 | { |
426 | struct edid *edid; |
426 | struct edid *edid; |
427 | struct radeon_connector *radeon_connector = to_radeon_connector(connector); |
427 | struct radeon_connector *radeon_connector = to_radeon_connector(connector); |
428 | int ret = 0; |
428 | int ret = 0; |
429 | 429 | ||
430 | if (!radeon_connector->ddc_bus) |
430 | if (!radeon_connector->ddc_bus) |
431 | return -1; |
431 | return -1; |
432 | edid = drm_get_edid(connector, &radeon_connector->ddc_bus->adapter); |
432 | edid = drm_get_edid(connector, &radeon_connector->ddc_bus->adapter); |
433 | if (edid) { |
433 | if (edid) { |
434 | kfree(edid); |
434 | kfree(edid); |
435 | } |
435 | } |
436 | return ret; |
436 | return ret; |
437 | } |
437 | } |
438 | 438 | ||
439 | static inline uint32_t radeon_div(uint64_t n, uint32_t d) |
439 | static inline uint32_t radeon_div(uint64_t n, uint32_t d) |
440 | { |
440 | { |
441 | uint64_t mod; |
441 | uint64_t mod; |
442 | 442 | ||
443 | n += d / 2; |
443 | n += d / 2; |
444 | 444 | ||
445 | mod = do_div(n, d); |
445 | mod = do_div(n, d); |
446 | return n; |
446 | return n; |
447 | } |
447 | } |
448 | 448 | ||
449 | static void radeon_compute_pll_legacy(struct radeon_pll *pll, |
449 | static void radeon_compute_pll_legacy(struct radeon_pll *pll, |
450 | uint64_t freq, |
450 | uint64_t freq, |
451 | uint32_t *dot_clock_p, |
451 | uint32_t *dot_clock_p, |
452 | uint32_t *fb_div_p, |
452 | uint32_t *fb_div_p, |
453 | uint32_t *frac_fb_div_p, |
453 | uint32_t *frac_fb_div_p, |
454 | uint32_t *ref_div_p, |
454 | uint32_t *ref_div_p, |
455 | uint32_t *post_div_p) |
455 | uint32_t *post_div_p) |
456 | { |
456 | { |
457 | uint32_t min_ref_div = pll->min_ref_div; |
457 | uint32_t min_ref_div = pll->min_ref_div; |
458 | uint32_t max_ref_div = pll->max_ref_div; |
458 | uint32_t max_ref_div = pll->max_ref_div; |
459 | uint32_t min_post_div = pll->min_post_div; |
459 | uint32_t min_post_div = pll->min_post_div; |
460 | uint32_t max_post_div = pll->max_post_div; |
460 | uint32_t max_post_div = pll->max_post_div; |
461 | uint32_t min_fractional_feed_div = 0; |
461 | uint32_t min_fractional_feed_div = 0; |
462 | uint32_t max_fractional_feed_div = 0; |
462 | uint32_t max_fractional_feed_div = 0; |
463 | uint32_t best_vco = pll->best_vco; |
463 | uint32_t best_vco = pll->best_vco; |
464 | uint32_t best_post_div = 1; |
464 | uint32_t best_post_div = 1; |
465 | uint32_t best_ref_div = 1; |
465 | uint32_t best_ref_div = 1; |
466 | uint32_t best_feedback_div = 1; |
466 | uint32_t best_feedback_div = 1; |
467 | uint32_t best_frac_feedback_div = 0; |
467 | uint32_t best_frac_feedback_div = 0; |
468 | uint32_t best_freq = -1; |
468 | uint32_t best_freq = -1; |
469 | uint32_t best_error = 0xffffffff; |
469 | uint32_t best_error = 0xffffffff; |
470 | uint32_t best_vco_diff = 1; |
470 | uint32_t best_vco_diff = 1; |
471 | uint32_t post_div; |
471 | uint32_t post_div; |
472 | 472 | ||
473 | DRM_DEBUG("PLL freq %llu %u %u\n", freq, pll->min_ref_div, pll->max_ref_div); |
473 | DRM_DEBUG("PLL freq %llu %u %u\n", freq, pll->min_ref_div, pll->max_ref_div); |
474 | freq = freq * 1000; |
474 | freq = freq * 1000; |
475 | 475 | ||
476 | if (pll->flags & RADEON_PLL_USE_REF_DIV) |
476 | if (pll->flags & RADEON_PLL_USE_REF_DIV) |
477 | min_ref_div = max_ref_div = pll->reference_div; |
477 | min_ref_div = max_ref_div = pll->reference_div; |
478 | else { |
478 | else { |
479 | while (min_ref_div < max_ref_div-1) { |
479 | while (min_ref_div < max_ref_div-1) { |
480 | uint32_t mid = (min_ref_div + max_ref_div) / 2; |
480 | uint32_t mid = (min_ref_div + max_ref_div) / 2; |
481 | uint32_t pll_in = pll->reference_freq / mid; |
481 | uint32_t pll_in = pll->reference_freq / mid; |
482 | if (pll_in < pll->pll_in_min) |
482 | if (pll_in < pll->pll_in_min) |
483 | max_ref_div = mid; |
483 | max_ref_div = mid; |
484 | else if (pll_in > pll->pll_in_max) |
484 | else if (pll_in > pll->pll_in_max) |
485 | min_ref_div = mid; |
485 | min_ref_div = mid; |
486 | else |
486 | else |
487 | break; |
487 | break; |
488 | } |
488 | } |
489 | } |
489 | } |
490 | 490 | ||
491 | if (pll->flags & RADEON_PLL_USE_POST_DIV) |
491 | if (pll->flags & RADEON_PLL_USE_POST_DIV) |
492 | min_post_div = max_post_div = pll->post_div; |
492 | min_post_div = max_post_div = pll->post_div; |
493 | 493 | ||
494 | if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) { |
494 | if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) { |
495 | min_fractional_feed_div = pll->min_frac_feedback_div; |
495 | min_fractional_feed_div = pll->min_frac_feedback_div; |
496 | max_fractional_feed_div = pll->max_frac_feedback_div; |
496 | max_fractional_feed_div = pll->max_frac_feedback_div; |
497 | } |
497 | } |
498 | 498 | ||
499 | for (post_div = min_post_div; post_div <= max_post_div; ++post_div) { |
499 | for (post_div = min_post_div; post_div <= max_post_div; ++post_div) { |
500 | uint32_t ref_div; |
500 | uint32_t ref_div; |
501 | 501 | ||
502 | if ((pll->flags & RADEON_PLL_NO_ODD_POST_DIV) && (post_div & 1)) |
502 | if ((pll->flags & RADEON_PLL_NO_ODD_POST_DIV) && (post_div & 1)) |
503 | continue; |
503 | continue; |
504 | 504 | ||
505 | /* legacy radeons only have a few post_divs */ |
505 | /* legacy radeons only have a few post_divs */ |
506 | if (pll->flags & RADEON_PLL_LEGACY) { |
506 | if (pll->flags & RADEON_PLL_LEGACY) { |
507 | if ((post_div == 5) || |
507 | if ((post_div == 5) || |
508 | (post_div == 7) || |
508 | (post_div == 7) || |
509 | (post_div == 9) || |
509 | (post_div == 9) || |
510 | (post_div == 10) || |
510 | (post_div == 10) || |
511 | (post_div == 11) || |
511 | (post_div == 11) || |
512 | (post_div == 13) || |
512 | (post_div == 13) || |
513 | (post_div == 14) || |
513 | (post_div == 14) || |
514 | (post_div == 15)) |
514 | (post_div == 15)) |
515 | continue; |
515 | continue; |
516 | } |
516 | } |
517 | 517 | ||
518 | for (ref_div = min_ref_div; ref_div <= max_ref_div; ++ref_div) { |
518 | for (ref_div = min_ref_div; ref_div <= max_ref_div; ++ref_div) { |
519 | uint32_t feedback_div, current_freq = 0, error, vco_diff; |
519 | uint32_t feedback_div, current_freq = 0, error, vco_diff; |
520 | uint32_t pll_in = pll->reference_freq / ref_div; |
520 | uint32_t pll_in = pll->reference_freq / ref_div; |
521 | uint32_t min_feed_div = pll->min_feedback_div; |
521 | uint32_t min_feed_div = pll->min_feedback_div; |
522 | uint32_t max_feed_div = pll->max_feedback_div + 1; |
522 | uint32_t max_feed_div = pll->max_feedback_div + 1; |
523 | 523 | ||
524 | if (pll_in < pll->pll_in_min || pll_in > pll->pll_in_max) |
524 | if (pll_in < pll->pll_in_min || pll_in > pll->pll_in_max) |
525 | continue; |
525 | continue; |
526 | 526 | ||
527 | while (min_feed_div < max_feed_div) { |
527 | while (min_feed_div < max_feed_div) { |
528 | uint32_t vco; |
528 | uint32_t vco; |
529 | uint32_t min_frac_feed_div = min_fractional_feed_div; |
529 | uint32_t min_frac_feed_div = min_fractional_feed_div; |
530 | uint32_t max_frac_feed_div = max_fractional_feed_div + 1; |
530 | uint32_t max_frac_feed_div = max_fractional_feed_div + 1; |
531 | uint32_t frac_feedback_div; |
531 | uint32_t frac_feedback_div; |
532 | uint64_t tmp; |
532 | uint64_t tmp; |
533 | 533 | ||
534 | feedback_div = (min_feed_div + max_feed_div) / 2; |
534 | feedback_div = (min_feed_div + max_feed_div) / 2; |
535 | 535 | ||
536 | tmp = (uint64_t)pll->reference_freq * feedback_div; |
536 | tmp = (uint64_t)pll->reference_freq * feedback_div; |
537 | vco = radeon_div(tmp, ref_div); |
537 | vco = radeon_div(tmp, ref_div); |
538 | 538 | ||
539 | if (vco < pll->pll_out_min) { |
539 | if (vco < pll->pll_out_min) { |
540 | min_feed_div = feedback_div + 1; |
540 | min_feed_div = feedback_div + 1; |
541 | continue; |
541 | continue; |
542 | } else if (vco > pll->pll_out_max) { |
542 | } else if (vco > pll->pll_out_max) { |
543 | max_feed_div = feedback_div; |
543 | max_feed_div = feedback_div; |
544 | continue; |
544 | continue; |
545 | } |
545 | } |
546 | 546 | ||
547 | while (min_frac_feed_div < max_frac_feed_div) { |
547 | while (min_frac_feed_div < max_frac_feed_div) { |
548 | frac_feedback_div = (min_frac_feed_div + max_frac_feed_div) / 2; |
548 | frac_feedback_div = (min_frac_feed_div + max_frac_feed_div) / 2; |
549 | tmp = (uint64_t)pll->reference_freq * 10000 * feedback_div; |
549 | tmp = (uint64_t)pll->reference_freq * 10000 * feedback_div; |
550 | tmp += (uint64_t)pll->reference_freq * 1000 * frac_feedback_div; |
550 | tmp += (uint64_t)pll->reference_freq * 1000 * frac_feedback_div; |
551 | current_freq = radeon_div(tmp, ref_div * post_div); |
551 | current_freq = radeon_div(tmp, ref_div * post_div); |
552 | 552 | ||
553 | if (pll->flags & RADEON_PLL_PREFER_CLOSEST_LOWER) { |
553 | if (pll->flags & RADEON_PLL_PREFER_CLOSEST_LOWER) { |
554 | error = freq - current_freq; |
554 | error = freq - current_freq; |
555 | error = error < 0 ? 0xffffffff : error; |
555 | error = error < 0 ? 0xffffffff : error; |
556 | } else |
556 | } else |
557 | error = abs(current_freq - freq); |
557 | error = abs(current_freq - freq); |
558 | vco_diff = abs(vco - best_vco); |
558 | vco_diff = abs(vco - best_vco); |
559 | 559 | ||
560 | if ((best_vco == 0 && error < best_error) || |
560 | if ((best_vco == 0 && error < best_error) || |
561 | (best_vco != 0 && |
561 | (best_vco != 0 && |
562 | (error < best_error - 100 || |
562 | (error < best_error - 100 || |
563 | (abs(error - best_error) < 100 && vco_diff < best_vco_diff)))) { |
563 | (abs(error - best_error) < 100 && vco_diff < best_vco_diff)))) { |
564 | best_post_div = post_div; |
564 | best_post_div = post_div; |
565 | best_ref_div = ref_div; |
565 | best_ref_div = ref_div; |
566 | best_feedback_div = feedback_div; |
566 | best_feedback_div = feedback_div; |
567 | best_frac_feedback_div = frac_feedback_div; |
567 | best_frac_feedback_div = frac_feedback_div; |
568 | best_freq = current_freq; |
568 | best_freq = current_freq; |
569 | best_error = error; |
569 | best_error = error; |
570 | best_vco_diff = vco_diff; |
570 | best_vco_diff = vco_diff; |
571 | } else if (current_freq == freq) { |
571 | } else if (current_freq == freq) { |
572 | if (best_freq == -1) { |
572 | if (best_freq == -1) { |
573 | best_post_div = post_div; |
573 | best_post_div = post_div; |
574 | best_ref_div = ref_div; |
574 | best_ref_div = ref_div; |
575 | best_feedback_div = feedback_div; |
575 | best_feedback_div = feedback_div; |
576 | best_frac_feedback_div = frac_feedback_div; |
576 | best_frac_feedback_div = frac_feedback_div; |
577 | best_freq = current_freq; |
577 | best_freq = current_freq; |
578 | best_error = error; |
578 | best_error = error; |
579 | best_vco_diff = vco_diff; |
579 | best_vco_diff = vco_diff; |
580 | } else if (((pll->flags & RADEON_PLL_PREFER_LOW_REF_DIV) && (ref_div < best_ref_div)) || |
580 | } else if (((pll->flags & RADEON_PLL_PREFER_LOW_REF_DIV) && (ref_div < best_ref_div)) || |
581 | ((pll->flags & RADEON_PLL_PREFER_HIGH_REF_DIV) && (ref_div > best_ref_div)) || |
581 | ((pll->flags & RADEON_PLL_PREFER_HIGH_REF_DIV) && (ref_div > best_ref_div)) || |
582 | ((pll->flags & RADEON_PLL_PREFER_LOW_FB_DIV) && (feedback_div < best_feedback_div)) || |
582 | ((pll->flags & RADEON_PLL_PREFER_LOW_FB_DIV) && (feedback_div < best_feedback_div)) || |
583 | ((pll->flags & RADEON_PLL_PREFER_HIGH_FB_DIV) && (feedback_div > best_feedback_div)) || |
583 | ((pll->flags & RADEON_PLL_PREFER_HIGH_FB_DIV) && (feedback_div > best_feedback_div)) || |
584 | ((pll->flags & RADEON_PLL_PREFER_LOW_POST_DIV) && (post_div < best_post_div)) || |
584 | ((pll->flags & RADEON_PLL_PREFER_LOW_POST_DIV) && (post_div < best_post_div)) || |
585 | ((pll->flags & RADEON_PLL_PREFER_HIGH_POST_DIV) && (post_div > best_post_div))) { |
585 | ((pll->flags & RADEON_PLL_PREFER_HIGH_POST_DIV) && (post_div > best_post_div))) { |
586 | best_post_div = post_div; |
586 | best_post_div = post_div; |
587 | best_ref_div = ref_div; |
587 | best_ref_div = ref_div; |
588 | best_feedback_div = feedback_div; |
588 | best_feedback_div = feedback_div; |
589 | best_frac_feedback_div = frac_feedback_div; |
589 | best_frac_feedback_div = frac_feedback_div; |
590 | best_freq = current_freq; |
590 | best_freq = current_freq; |
591 | best_error = error; |
591 | best_error = error; |
592 | best_vco_diff = vco_diff; |
592 | best_vco_diff = vco_diff; |
593 | } |
593 | } |
594 | } |
594 | } |
595 | if (current_freq < freq) |
595 | if (current_freq < freq) |
596 | min_frac_feed_div = frac_feedback_div + 1; |
596 | min_frac_feed_div = frac_feedback_div + 1; |
597 | else |
597 | else |
598 | max_frac_feed_div = frac_feedback_div; |
598 | max_frac_feed_div = frac_feedback_div; |
599 | } |
599 | } |
600 | if (current_freq < freq) |
600 | if (current_freq < freq) |
601 | min_feed_div = feedback_div + 1; |
601 | min_feed_div = feedback_div + 1; |
602 | else |
602 | else |
603 | max_feed_div = feedback_div; |
603 | max_feed_div = feedback_div; |
604 | } |
604 | } |
605 | } |
605 | } |
606 | } |
606 | } |
607 | 607 | ||
608 | *dot_clock_p = best_freq / 10000; |
608 | *dot_clock_p = best_freq / 10000; |
609 | *fb_div_p = best_feedback_div; |
609 | *fb_div_p = best_feedback_div; |
610 | *frac_fb_div_p = best_frac_feedback_div; |
610 | *frac_fb_div_p = best_frac_feedback_div; |
611 | *ref_div_p = best_ref_div; |
611 | *ref_div_p = best_ref_div; |
612 | *post_div_p = best_post_div; |
612 | *post_div_p = best_post_div; |
613 | } |
613 | } |
614 | 614 | ||
615 | static bool |
615 | static bool |
616 | calc_fb_div(struct radeon_pll *pll, |
616 | calc_fb_div(struct radeon_pll *pll, |
617 | uint32_t freq, |
617 | uint32_t freq, |
618 | uint32_t post_div, |
618 | uint32_t post_div, |
619 | uint32_t ref_div, |
619 | uint32_t ref_div, |
620 | uint32_t *fb_div, |
620 | uint32_t *fb_div, |
621 | uint32_t *fb_div_frac) |
621 | uint32_t *fb_div_frac) |
622 | { |
622 | { |
623 | fixed20_12 feedback_divider, a, b; |
623 | fixed20_12 feedback_divider, a, b; |
624 | u32 vco_freq; |
624 | u32 vco_freq; |
625 | 625 | ||
626 | vco_freq = freq * post_div; |
626 | vco_freq = freq * post_div; |
627 | /* feedback_divider = vco_freq * ref_div / pll->reference_freq; */ |
627 | /* feedback_divider = vco_freq * ref_div / pll->reference_freq; */ |
628 | a.full = rfixed_const(pll->reference_freq); |
628 | a.full = rfixed_const(pll->reference_freq); |
629 | feedback_divider.full = rfixed_const(vco_freq); |
629 | feedback_divider.full = rfixed_const(vco_freq); |
630 | feedback_divider.full = rfixed_div(feedback_divider, a); |
630 | feedback_divider.full = rfixed_div(feedback_divider, a); |
631 | a.full = rfixed_const(ref_div); |
631 | a.full = rfixed_const(ref_div); |
632 | feedback_divider.full = rfixed_mul(feedback_divider, a); |
632 | feedback_divider.full = rfixed_mul(feedback_divider, a); |
633 | 633 | ||
634 | if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) { |
634 | if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) { |
635 | /* feedback_divider = floor((feedback_divider * 10.0) + 0.5) * 0.1; */ |
635 | /* feedback_divider = floor((feedback_divider * 10.0) + 0.5) * 0.1; */ |
636 | a.full = rfixed_const(10); |
636 | a.full = rfixed_const(10); |
637 | feedback_divider.full = rfixed_mul(feedback_divider, a); |
637 | feedback_divider.full = rfixed_mul(feedback_divider, a); |
638 | feedback_divider.full += rfixed_const_half(0); |
638 | feedback_divider.full += rfixed_const_half(0); |
639 | feedback_divider.full = rfixed_floor(feedback_divider); |
639 | feedback_divider.full = rfixed_floor(feedback_divider); |
640 | feedback_divider.full = rfixed_div(feedback_divider, a); |
640 | feedback_divider.full = rfixed_div(feedback_divider, a); |
641 | 641 | ||
642 | /* *fb_div = floor(feedback_divider); */ |
642 | /* *fb_div = floor(feedback_divider); */ |
643 | a.full = rfixed_floor(feedback_divider); |
643 | a.full = rfixed_floor(feedback_divider); |
644 | *fb_div = rfixed_trunc(a); |
644 | *fb_div = rfixed_trunc(a); |
645 | /* *fb_div_frac = fmod(feedback_divider, 1.0) * 10.0; */ |
645 | /* *fb_div_frac = fmod(feedback_divider, 1.0) * 10.0; */ |
646 | a.full = rfixed_const(10); |
646 | a.full = rfixed_const(10); |
647 | b.full = rfixed_mul(feedback_divider, a); |
647 | b.full = rfixed_mul(feedback_divider, a); |
648 | 648 | ||
649 | feedback_divider.full = rfixed_floor(feedback_divider); |
649 | feedback_divider.full = rfixed_floor(feedback_divider); |
650 | feedback_divider.full = rfixed_mul(feedback_divider, a); |
650 | feedback_divider.full = rfixed_mul(feedback_divider, a); |
651 | feedback_divider.full = b.full - feedback_divider.full; |
651 | feedback_divider.full = b.full - feedback_divider.full; |
652 | *fb_div_frac = rfixed_trunc(feedback_divider); |
652 | *fb_div_frac = rfixed_trunc(feedback_divider); |
653 | } else { |
653 | } else { |
654 | /* *fb_div = floor(feedback_divider + 0.5); */ |
654 | /* *fb_div = floor(feedback_divider + 0.5); */ |
655 | feedback_divider.full += rfixed_const_half(0); |
655 | feedback_divider.full += rfixed_const_half(0); |
656 | feedback_divider.full = rfixed_floor(feedback_divider); |
656 | feedback_divider.full = rfixed_floor(feedback_divider); |
657 | 657 | ||
658 | *fb_div = rfixed_trunc(feedback_divider); |
658 | *fb_div = rfixed_trunc(feedback_divider); |
659 | *fb_div_frac = 0; |
659 | *fb_div_frac = 0; |
660 | } |
660 | } |
661 | 661 | ||
662 | if (((*fb_div) < pll->min_feedback_div) || ((*fb_div) > pll->max_feedback_div)) |
662 | if (((*fb_div) < pll->min_feedback_div) || ((*fb_div) > pll->max_feedback_div)) |
663 | return false; |
663 | return false; |
664 | else |
664 | else |
665 | return true; |
665 | return true; |
666 | } |
666 | } |
667 | 667 | ||
668 | static bool |
668 | static bool |
669 | calc_fb_ref_div(struct radeon_pll *pll, |
669 | calc_fb_ref_div(struct radeon_pll *pll, |
670 | uint32_t freq, |
670 | uint32_t freq, |
671 | uint32_t post_div, |
671 | uint32_t post_div, |
672 | uint32_t *fb_div, |
672 | uint32_t *fb_div, |
673 | uint32_t *fb_div_frac, |
673 | uint32_t *fb_div_frac, |
674 | uint32_t *ref_div) |
674 | uint32_t *ref_div) |
675 | { |
675 | { |
676 | fixed20_12 ffreq, max_error, error, pll_out, a; |
676 | fixed20_12 ffreq, max_error, error, pll_out, a; |
677 | u32 vco; |
677 | u32 vco; |
678 | 678 | ||
679 | ffreq.full = rfixed_const(freq); |
679 | ffreq.full = rfixed_const(freq); |
680 | /* max_error = ffreq * 0.0025; */ |
680 | /* max_error = ffreq * 0.0025; */ |
681 | a.full = rfixed_const(400); |
681 | a.full = rfixed_const(400); |
682 | max_error.full = rfixed_div(ffreq, a); |
682 | max_error.full = rfixed_div(ffreq, a); |
683 | 683 | ||
684 | for ((*ref_div) = pll->min_ref_div; (*ref_div) < pll->max_ref_div; ++(*ref_div)) { |
684 | for ((*ref_div) = pll->min_ref_div; (*ref_div) < pll->max_ref_div; ++(*ref_div)) { |
685 | if (calc_fb_div(pll, freq, post_div, (*ref_div), fb_div, fb_div_frac)) { |
685 | if (calc_fb_div(pll, freq, post_div, (*ref_div), fb_div, fb_div_frac)) { |
686 | vco = pll->reference_freq * (((*fb_div) * 10) + (*fb_div_frac)); |
686 | vco = pll->reference_freq * (((*fb_div) * 10) + (*fb_div_frac)); |
687 | vco = vco / ((*ref_div) * 10); |
687 | vco = vco / ((*ref_div) * 10); |
688 | 688 | ||
689 | if ((vco < pll->pll_out_min) || (vco > pll->pll_out_max)) |
689 | if ((vco < pll->pll_out_min) || (vco > pll->pll_out_max)) |
690 | continue; |
690 | continue; |
691 | 691 | ||
692 | /* pll_out = vco / post_div; */ |
692 | /* pll_out = vco / post_div; */ |
693 | a.full = rfixed_const(post_div); |
693 | a.full = rfixed_const(post_div); |
694 | pll_out.full = rfixed_const(vco); |
694 | pll_out.full = rfixed_const(vco); |
695 | pll_out.full = rfixed_div(pll_out, a); |
695 | pll_out.full = rfixed_div(pll_out, a); |
696 | 696 | ||
697 | if (pll_out.full >= ffreq.full) { |
697 | if (pll_out.full >= ffreq.full) { |
698 | error.full = pll_out.full - ffreq.full; |
698 | error.full = pll_out.full - ffreq.full; |
699 | if (error.full <= max_error.full) |
699 | if (error.full <= max_error.full) |
700 | return true; |
700 | return true; |
701 | } |
701 | } |
702 | } |
702 | } |
703 | } |
703 | } |
704 | return false; |
704 | return false; |
705 | } |
705 | } |
706 | 706 | ||
707 | static void radeon_compute_pll_new(struct radeon_pll *pll, |
707 | static void radeon_compute_pll_new(struct radeon_pll *pll, |
708 | uint64_t freq, |
708 | uint64_t freq, |
709 | uint32_t *dot_clock_p, |
709 | uint32_t *dot_clock_p, |
710 | uint32_t *fb_div_p, |
710 | uint32_t *fb_div_p, |
711 | uint32_t *frac_fb_div_p, |
711 | uint32_t *frac_fb_div_p, |
712 | uint32_t *ref_div_p, |
712 | uint32_t *ref_div_p, |
713 | uint32_t *post_div_p) |
713 | uint32_t *post_div_p) |
714 | { |
714 | { |
715 | u32 fb_div = 0, fb_div_frac = 0, post_div = 0, ref_div = 0; |
715 | u32 fb_div = 0, fb_div_frac = 0, post_div = 0, ref_div = 0; |
716 | u32 best_freq = 0, vco_frequency; |
716 | u32 best_freq = 0, vco_frequency; |
717 | 717 | ||
718 | /* freq = freq / 10; */ |
718 | /* freq = freq / 10; */ |
719 | do_div(freq, 10); |
719 | do_div(freq, 10); |
720 | 720 | ||
721 | if (pll->flags & RADEON_PLL_USE_POST_DIV) { |
721 | if (pll->flags & RADEON_PLL_USE_POST_DIV) { |
722 | post_div = pll->post_div; |
722 | post_div = pll->post_div; |
723 | if ((post_div < pll->min_post_div) || (post_div > pll->max_post_div)) |
723 | if ((post_div < pll->min_post_div) || (post_div > pll->max_post_div)) |
724 | goto done; |
724 | goto done; |
725 | 725 | ||
726 | vco_frequency = freq * post_div; |
726 | vco_frequency = freq * post_div; |
727 | if ((vco_frequency < pll->pll_out_min) || (vco_frequency > pll->pll_out_max)) |
727 | if ((vco_frequency < pll->pll_out_min) || (vco_frequency > pll->pll_out_max)) |
728 | goto done; |
728 | goto done; |
729 | 729 | ||
730 | if (pll->flags & RADEON_PLL_USE_REF_DIV) { |
730 | if (pll->flags & RADEON_PLL_USE_REF_DIV) { |
731 | ref_div = pll->reference_div; |
731 | ref_div = pll->reference_div; |
732 | if ((ref_div < pll->min_ref_div) || (ref_div > pll->max_ref_div)) |
732 | if ((ref_div < pll->min_ref_div) || (ref_div > pll->max_ref_div)) |
733 | goto done; |
733 | goto done; |
734 | if (!calc_fb_div(pll, freq, post_div, ref_div, &fb_div, &fb_div_frac)) |
734 | if (!calc_fb_div(pll, freq, post_div, ref_div, &fb_div, &fb_div_frac)) |
735 | goto done; |
735 | goto done; |
736 | } |
736 | } |
737 | } else { |
737 | } else { |
738 | for (post_div = pll->max_post_div; post_div >= pll->min_post_div; --post_div) { |
738 | for (post_div = pll->max_post_div; post_div >= pll->min_post_div; --post_div) { |
739 | if (pll->flags & RADEON_PLL_LEGACY) { |
739 | if (pll->flags & RADEON_PLL_LEGACY) { |
740 | if ((post_div == 5) || |
740 | if ((post_div == 5) || |
741 | (post_div == 7) || |
741 | (post_div == 7) || |
742 | (post_div == 9) || |
742 | (post_div == 9) || |
743 | (post_div == 10) || |
743 | (post_div == 10) || |
744 | (post_div == 11)) |
744 | (post_div == 11)) |
745 | continue; |
745 | continue; |
746 | } |
746 | } |
747 | 747 | ||
748 | if ((pll->flags & RADEON_PLL_NO_ODD_POST_DIV) && (post_div & 1)) |
748 | if ((pll->flags & RADEON_PLL_NO_ODD_POST_DIV) && (post_div & 1)) |
749 | continue; |
749 | continue; |
750 | 750 | ||
751 | vco_frequency = freq * post_div; |
751 | vco_frequency = freq * post_div; |
752 | if ((vco_frequency < pll->pll_out_min) || (vco_frequency > pll->pll_out_max)) |
752 | if ((vco_frequency < pll->pll_out_min) || (vco_frequency > pll->pll_out_max)) |
753 | continue; |
753 | continue; |
754 | if (pll->flags & RADEON_PLL_USE_REF_DIV) { |
754 | if (pll->flags & RADEON_PLL_USE_REF_DIV) { |
755 | ref_div = pll->reference_div; |
755 | ref_div = pll->reference_div; |
756 | if ((ref_div < pll->min_ref_div) || (ref_div > pll->max_ref_div)) |
756 | if ((ref_div < pll->min_ref_div) || (ref_div > pll->max_ref_div)) |
757 | goto done; |
757 | goto done; |
758 | if (calc_fb_div(pll, freq, post_div, ref_div, &fb_div, &fb_div_frac)) |
758 | if (calc_fb_div(pll, freq, post_div, ref_div, &fb_div, &fb_div_frac)) |
759 | break; |
759 | break; |
760 | } else { |
760 | } else { |
761 | if (calc_fb_ref_div(pll, freq, post_div, &fb_div, &fb_div_frac, &ref_div)) |
761 | if (calc_fb_ref_div(pll, freq, post_div, &fb_div, &fb_div_frac, &ref_div)) |
762 | break; |
762 | break; |
763 | } |
763 | } |
764 | } |
764 | } |
765 | } |
765 | } |
766 | 766 | ||
767 | best_freq = pll->reference_freq * 10 * fb_div; |
767 | best_freq = pll->reference_freq * 10 * fb_div; |
768 | best_freq += pll->reference_freq * fb_div_frac; |
768 | best_freq += pll->reference_freq * fb_div_frac; |
769 | best_freq = best_freq / (ref_div * post_div); |
769 | best_freq = best_freq / (ref_div * post_div); |
770 | 770 | ||
771 | done: |
771 | done: |
772 | if (best_freq == 0) |
772 | if (best_freq == 0) |
773 | DRM_ERROR("Couldn't find valid PLL dividers\n"); |
773 | DRM_ERROR("Couldn't find valid PLL dividers\n"); |
774 | 774 | ||
775 | *dot_clock_p = best_freq / 10; |
775 | *dot_clock_p = best_freq / 10; |
776 | *fb_div_p = fb_div; |
776 | *fb_div_p = fb_div; |
777 | *frac_fb_div_p = fb_div_frac; |
777 | *frac_fb_div_p = fb_div_frac; |
778 | *ref_div_p = ref_div; |
778 | *ref_div_p = ref_div; |
779 | *post_div_p = post_div; |
779 | *post_div_p = post_div; |
780 | 780 | ||
781 | DRM_DEBUG("%u %d.%d, %d, %d\n", *dot_clock_p, *fb_div_p, *frac_fb_div_p, *ref_div_p, *post_div_p); |
781 | DRM_DEBUG("%u %d.%d, %d, %d\n", *dot_clock_p, *fb_div_p, *frac_fb_div_p, *ref_div_p, *post_div_p); |
782 | } |
782 | } |
783 | 783 | ||
784 | void radeon_compute_pll(struct radeon_pll *pll, |
784 | void radeon_compute_pll(struct radeon_pll *pll, |
785 | uint64_t freq, |
785 | uint64_t freq, |
786 | uint32_t *dot_clock_p, |
786 | uint32_t *dot_clock_p, |
787 | uint32_t *fb_div_p, |
787 | uint32_t *fb_div_p, |
788 | uint32_t *frac_fb_div_p, |
788 | uint32_t *frac_fb_div_p, |
789 | uint32_t *ref_div_p, |
789 | uint32_t *ref_div_p, |
790 | uint32_t *post_div_p) |
790 | uint32_t *post_div_p) |
791 | { |
791 | { |
792 | switch (pll->algo) { |
792 | switch (pll->algo) { |
793 | case PLL_ALGO_NEW: |
793 | case PLL_ALGO_NEW: |
794 | radeon_compute_pll_new(pll, freq, dot_clock_p, fb_div_p, |
794 | radeon_compute_pll_new(pll, freq, dot_clock_p, fb_div_p, |
795 | frac_fb_div_p, ref_div_p, post_div_p); |
795 | frac_fb_div_p, ref_div_p, post_div_p); |
796 | break; |
796 | break; |
797 | case PLL_ALGO_LEGACY: |
797 | case PLL_ALGO_LEGACY: |
798 | default: |
798 | default: |
799 | radeon_compute_pll_legacy(pll, freq, dot_clock_p, fb_div_p, |
799 | radeon_compute_pll_legacy(pll, freq, dot_clock_p, fb_div_p, |
800 | frac_fb_div_p, ref_div_p, post_div_p); |
800 | frac_fb_div_p, ref_div_p, post_div_p); |
801 | break; |
801 | break; |
802 | } |
802 | } |
803 | } |
803 | } |
804 | 804 | ||
805 | static void radeon_user_framebuffer_destroy(struct drm_framebuffer *fb) |
805 | static void radeon_user_framebuffer_destroy(struct drm_framebuffer *fb) |
806 | { |
806 | { |
807 | struct radeon_framebuffer *radeon_fb = to_radeon_framebuffer(fb); |
807 | struct radeon_framebuffer *radeon_fb = to_radeon_framebuffer(fb); |
808 | struct drm_device *dev = fb->dev; |
808 | struct drm_device *dev = fb->dev; |
809 | 809 | ||
810 | if (fb->fbdev) |
810 | if (fb->fbdev) |
811 | radeonfb_remove(dev, fb); |
811 | radeonfb_remove(dev, fb); |
812 | - | ||
813 | // if (radeon_fb->obj) { |
- | |
814 | // radeon_gem_object_unpin(radeon_fb->obj); |
- | |
815 | // mutex_lock(&dev->struct_mutex); |
- | |
816 | // drm_gem_object_unreference(radeon_fb->obj); |
- | |
817 | // mutex_unlock(&dev->struct_mutex); |
812 | |
818 | // } |
813 | |
819 | drm_framebuffer_cleanup(fb); |
814 | drm_framebuffer_cleanup(fb); |
820 | kfree(radeon_fb); |
815 | kfree(radeon_fb); |
821 | } |
816 | } |
822 | 817 | ||
823 | static int radeon_user_framebuffer_create_handle(struct drm_framebuffer *fb, |
818 | static int radeon_user_framebuffer_create_handle(struct drm_framebuffer *fb, |
824 | struct drm_file *file_priv, |
819 | struct drm_file *file_priv, |
825 | unsigned int *handle) |
820 | unsigned int *handle) |
826 | { |
821 | { |
827 | struct radeon_framebuffer *radeon_fb = to_radeon_framebuffer(fb); |
822 | struct radeon_framebuffer *radeon_fb = to_radeon_framebuffer(fb); |
828 | 823 | ||
829 | return NULL; |
824 | return NULL; |
830 | // return drm_gem_handle_create(file_priv, radeon_fb->obj, handle); |
825 | // return drm_gem_handle_create(file_priv, radeon_fb->obj, handle); |
831 | } |
826 | } |
832 | 827 | ||
833 | static const struct drm_framebuffer_funcs radeon_fb_funcs = { |
828 | static const struct drm_framebuffer_funcs radeon_fb_funcs = { |
834 | .destroy = radeon_user_framebuffer_destroy, |
829 | .destroy = radeon_user_framebuffer_destroy, |
835 | .create_handle = radeon_user_framebuffer_create_handle, |
830 | .create_handle = radeon_user_framebuffer_create_handle, |
836 | }; |
831 | }; |
837 | 832 | ||
838 | struct drm_framebuffer * |
833 | struct drm_framebuffer * |
839 | radeon_framebuffer_create(struct drm_device *dev, |
834 | radeon_framebuffer_create(struct drm_device *dev, |
840 | struct drm_mode_fb_cmd *mode_cmd, |
835 | struct drm_mode_fb_cmd *mode_cmd, |
841 | struct drm_gem_object *obj) |
836 | struct drm_gem_object *obj) |
842 | { |
837 | { |
843 | struct radeon_framebuffer *radeon_fb; |
838 | struct radeon_framebuffer *radeon_fb; |
844 | 839 | ||
845 | radeon_fb = kzalloc(sizeof(*radeon_fb), GFP_KERNEL); |
840 | radeon_fb = kzalloc(sizeof(*radeon_fb), GFP_KERNEL); |
846 | if (radeon_fb == NULL) { |
841 | if (radeon_fb == NULL) { |
847 | return NULL; |
842 | return NULL; |
848 | } |
843 | } |
849 | drm_framebuffer_init(dev, &radeon_fb->base, &radeon_fb_funcs); |
844 | drm_framebuffer_init(dev, &radeon_fb->base, &radeon_fb_funcs); |
850 | drm_helper_mode_fill_fb_struct(&radeon_fb->base, mode_cmd); |
845 | drm_helper_mode_fill_fb_struct(&radeon_fb->base, mode_cmd); |
851 | radeon_fb->obj = obj; |
846 | radeon_fb->obj = obj; |
852 | return &radeon_fb->base; |
847 | return &radeon_fb->base; |
853 | } |
848 | } |
854 | 849 | ||
855 | static struct drm_framebuffer * |
850 | static struct drm_framebuffer * |
856 | radeon_user_framebuffer_create(struct drm_device *dev, |
851 | radeon_user_framebuffer_create(struct drm_device *dev, |
857 | struct drm_file *file_priv, |
852 | struct drm_file *file_priv, |
858 | struct drm_mode_fb_cmd *mode_cmd) |
853 | struct drm_mode_fb_cmd *mode_cmd) |
859 | { |
854 | { |
860 | struct drm_gem_object *obj; |
855 | struct drm_gem_object *obj; |
861 | 856 | ||
862 | return NULL; |
857 | return NULL; |
863 | 858 | ||
864 | // obj = drm_gem_object_lookup(dev, file_priv, mode_cmd->handle); |
859 | // obj = drm_gem_object_lookup(dev, file_priv, mode_cmd->handle); |
865 | // |
860 | // |
866 | // return radeon_framebuffer_create(dev, mode_cmd, obj); |
861 | // return radeon_framebuffer_create(dev, mode_cmd, obj); |
867 | } |
862 | } |
868 | 863 | ||
869 | static const struct drm_mode_config_funcs radeon_mode_funcs = { |
864 | static const struct drm_mode_config_funcs radeon_mode_funcs = { |
870 | // .fb_create = radeon_user_framebuffer_create, |
865 | // .fb_create = radeon_user_framebuffer_create, |
871 | .fb_changed = radeonfb_probe, |
866 | .fb_changed = radeonfb_probe, |
872 | }; |
867 | }; |
873 | 868 | ||
874 | struct drm_prop_enum_list { |
869 | struct drm_prop_enum_list { |
875 | int type; |
870 | int type; |
876 | char *name; |
871 | char *name; |
877 | }; |
872 | }; |
878 | 873 | ||
879 | static struct drm_prop_enum_list radeon_tmds_pll_enum_list[] = |
874 | static struct drm_prop_enum_list radeon_tmds_pll_enum_list[] = |
880 | { { 0, "driver" }, |
875 | { { 0, "driver" }, |
881 | { 1, "bios" }, |
876 | { 1, "bios" }, |
882 | }; |
877 | }; |
883 | 878 | ||
884 | static struct drm_prop_enum_list radeon_tv_std_enum_list[] = |
879 | static struct drm_prop_enum_list radeon_tv_std_enum_list[] = |
885 | { { TV_STD_NTSC, "ntsc" }, |
880 | { { TV_STD_NTSC, "ntsc" }, |
886 | { TV_STD_PAL, "pal" }, |
881 | { TV_STD_PAL, "pal" }, |
887 | { TV_STD_PAL_M, "pal-m" }, |
882 | { TV_STD_PAL_M, "pal-m" }, |
888 | { TV_STD_PAL_60, "pal-60" }, |
883 | { TV_STD_PAL_60, "pal-60" }, |
889 | { TV_STD_NTSC_J, "ntsc-j" }, |
884 | { TV_STD_NTSC_J, "ntsc-j" }, |
890 | { TV_STD_SCART_PAL, "scart-pal" }, |
885 | { TV_STD_SCART_PAL, "scart-pal" }, |
891 | { TV_STD_PAL_CN, "pal-cn" }, |
886 | { TV_STD_PAL_CN, "pal-cn" }, |
892 | { TV_STD_SECAM, "secam" }, |
887 | { TV_STD_SECAM, "secam" }, |
893 | }; |
888 | }; |
894 | 889 | ||
895 | static int radeon_modeset_create_props(struct radeon_device *rdev) |
890 | static int radeon_modeset_create_props(struct radeon_device *rdev) |
896 | { |
891 | { |
897 | int i, sz; |
892 | int i, sz; |
898 | 893 | ||
899 | if (rdev->is_atom_bios) { |
894 | if (rdev->is_atom_bios) { |
900 | rdev->mode_info.coherent_mode_property = |
895 | rdev->mode_info.coherent_mode_property = |
901 | drm_property_create(rdev->ddev, |
896 | drm_property_create(rdev->ddev, |
902 | DRM_MODE_PROP_RANGE, |
897 | DRM_MODE_PROP_RANGE, |
903 | "coherent", 2); |
898 | "coherent", 2); |
904 | if (!rdev->mode_info.coherent_mode_property) |
899 | if (!rdev->mode_info.coherent_mode_property) |
905 | return -ENOMEM; |
900 | return -ENOMEM; |
906 | 901 | ||
907 | rdev->mode_info.coherent_mode_property->values[0] = 0; |
902 | rdev->mode_info.coherent_mode_property->values[0] = 0; |
908 | rdev->mode_info.coherent_mode_property->values[1] = 1; |
903 | rdev->mode_info.coherent_mode_property->values[1] = 1; |
909 | } |
904 | } |
910 | 905 | ||
911 | if (!ASIC_IS_AVIVO(rdev)) { |
906 | if (!ASIC_IS_AVIVO(rdev)) { |
912 | sz = ARRAY_SIZE(radeon_tmds_pll_enum_list); |
907 | sz = ARRAY_SIZE(radeon_tmds_pll_enum_list); |
913 | rdev->mode_info.tmds_pll_property = |
908 | rdev->mode_info.tmds_pll_property = |
914 | drm_property_create(rdev->ddev, |
909 | drm_property_create(rdev->ddev, |
915 | DRM_MODE_PROP_ENUM, |
910 | DRM_MODE_PROP_ENUM, |
916 | "tmds_pll", sz); |
911 | "tmds_pll", sz); |
917 | for (i = 0; i < sz; i++) { |
912 | for (i = 0; i < sz; i++) { |
918 | drm_property_add_enum(rdev->mode_info.tmds_pll_property, |
913 | drm_property_add_enum(rdev->mode_info.tmds_pll_property, |
919 | i, |
914 | i, |
920 | radeon_tmds_pll_enum_list[i].type, |
915 | radeon_tmds_pll_enum_list[i].type, |
921 | radeon_tmds_pll_enum_list[i].name); |
916 | radeon_tmds_pll_enum_list[i].name); |
922 | } |
917 | } |
923 | } |
918 | } |
924 | 919 | ||
925 | rdev->mode_info.load_detect_property = |
920 | rdev->mode_info.load_detect_property = |
926 | drm_property_create(rdev->ddev, |
921 | drm_property_create(rdev->ddev, |
927 | DRM_MODE_PROP_RANGE, |
922 | DRM_MODE_PROP_RANGE, |
928 | "load detection", 2); |
923 | "load detection", 2); |
929 | if (!rdev->mode_info.load_detect_property) |
924 | if (!rdev->mode_info.load_detect_property) |
930 | return -ENOMEM; |
925 | return -ENOMEM; |
931 | rdev->mode_info.load_detect_property->values[0] = 0; |
926 | rdev->mode_info.load_detect_property->values[0] = 0; |
932 | rdev->mode_info.load_detect_property->values[1] = 1; |
927 | rdev->mode_info.load_detect_property->values[1] = 1; |
933 | 928 | ||
934 | drm_mode_create_scaling_mode_property(rdev->ddev); |
929 | drm_mode_create_scaling_mode_property(rdev->ddev); |
935 | 930 | ||
936 | sz = ARRAY_SIZE(radeon_tv_std_enum_list); |
931 | sz = ARRAY_SIZE(radeon_tv_std_enum_list); |
937 | rdev->mode_info.tv_std_property = |
932 | rdev->mode_info.tv_std_property = |
938 | drm_property_create(rdev->ddev, |
933 | drm_property_create(rdev->ddev, |
939 | DRM_MODE_PROP_ENUM, |
934 | DRM_MODE_PROP_ENUM, |
940 | "tv standard", sz); |
935 | "tv standard", sz); |
941 | for (i = 0; i < sz; i++) { |
936 | for (i = 0; i < sz; i++) { |
942 | drm_property_add_enum(rdev->mode_info.tv_std_property, |
937 | drm_property_add_enum(rdev->mode_info.tv_std_property, |
943 | i, |
938 | i, |
944 | radeon_tv_std_enum_list[i].type, |
939 | radeon_tv_std_enum_list[i].type, |
945 | radeon_tv_std_enum_list[i].name); |
940 | radeon_tv_std_enum_list[i].name); |
946 | } |
941 | } |
947 | 942 | ||
948 | return 0; |
943 | return 0; |
949 | } |
944 | } |
950 | 945 | ||
951 | int radeon_modeset_init(struct radeon_device *rdev) |
946 | int radeon_modeset_init(struct radeon_device *rdev) |
952 | { |
947 | { |
953 | int i; |
948 | int i; |
954 | int ret; |
949 | int ret; |
955 | 950 | ||
956 | drm_mode_config_init(rdev->ddev); |
951 | drm_mode_config_init(rdev->ddev); |
957 | rdev->mode_info.mode_config_initialized = true; |
952 | rdev->mode_info.mode_config_initialized = true; |
958 | 953 | ||
959 | rdev->ddev->mode_config.funcs = (void *)&radeon_mode_funcs; |
954 | rdev->ddev->mode_config.funcs = (void *)&radeon_mode_funcs; |
960 | 955 | ||
961 | if (ASIC_IS_AVIVO(rdev)) { |
956 | if (ASIC_IS_AVIVO(rdev)) { |
962 | rdev->ddev->mode_config.max_width = 8192; |
957 | rdev->ddev->mode_config.max_width = 8192; |
963 | rdev->ddev->mode_config.max_height = 8192; |
958 | rdev->ddev->mode_config.max_height = 8192; |
964 | } else { |
959 | } else { |
965 | rdev->ddev->mode_config.max_width = 4096; |
960 | rdev->ddev->mode_config.max_width = 4096; |
966 | rdev->ddev->mode_config.max_height = 4096; |
961 | rdev->ddev->mode_config.max_height = 4096; |
967 | } |
962 | } |
968 | 963 | ||
969 | rdev->ddev->mode_config.fb_base = rdev->mc.aper_base; |
964 | rdev->ddev->mode_config.fb_base = rdev->mc.aper_base; |
970 | 965 | ||
971 | ret = radeon_modeset_create_props(rdev); |
966 | ret = radeon_modeset_create_props(rdev); |
972 | if (ret) { |
967 | if (ret) { |
973 | return ret; |
968 | return ret; |
974 | } |
969 | } |
975 | 970 | ||
976 | /* check combios for a valid hardcoded EDID - Sun servers */ |
971 | /* check combios for a valid hardcoded EDID - Sun servers */ |
977 | if (!rdev->is_atom_bios) { |
972 | if (!rdev->is_atom_bios) { |
978 | /* check for hardcoded EDID in BIOS */ |
973 | /* check for hardcoded EDID in BIOS */ |
979 | radeon_combios_check_hardcoded_edid(rdev); |
974 | radeon_combios_check_hardcoded_edid(rdev); |
980 | } |
975 | } |
981 | 976 | ||
982 | if (rdev->flags & RADEON_SINGLE_CRTC) |
977 | if (rdev->flags & RADEON_SINGLE_CRTC) |
983 | rdev->num_crtc = 1; |
978 | rdev->num_crtc = 1; |
984 | else { |
979 | else { |
985 | if (ASIC_IS_DCE4(rdev)) |
980 | if (ASIC_IS_DCE4(rdev)) |
986 | rdev->num_crtc = 6; |
981 | rdev->num_crtc = 6; |
987 | else |
982 | else |
988 | rdev->num_crtc = 2; |
983 | rdev->num_crtc = 2; |
989 | } |
984 | } |
990 | 985 | ||
991 | /* allocate crtcs */ |
986 | /* allocate crtcs */ |
992 | for (i = 0; i < rdev->num_crtc; i++) { |
987 | for (i = 0; i < rdev->num_crtc; i++) { |
993 | radeon_crtc_init(rdev->ddev, i); |
988 | radeon_crtc_init(rdev->ddev, i); |
994 | } |
989 | } |
995 | 990 | ||
996 | /* okay we should have all the bios connectors */ |
991 | /* okay we should have all the bios connectors */ |
997 | ret = radeon_setup_enc_conn(rdev->ddev); |
992 | ret = radeon_setup_enc_conn(rdev->ddev); |
998 | if (!ret) { |
993 | if (!ret) { |
999 | return ret; |
994 | return ret; |
1000 | } |
995 | } |
1001 | /* initialize hpd */ |
996 | /* initialize hpd */ |
1002 | radeon_hpd_init(rdev); |
997 | radeon_hpd_init(rdev); |
1003 | drm_helper_initial_config(rdev->ddev); |
998 | drm_helper_initial_config(rdev->ddev); |
1004 | return 0; |
999 | return 0; |
1005 | } |
1000 | } |
1006 | 1001 | ||
1007 | void radeon_modeset_fini(struct radeon_device *rdev) |
1002 | void radeon_modeset_fini(struct radeon_device *rdev) |
1008 | { |
1003 | { |
1009 | kfree(rdev->mode_info.bios_hardcoded_edid); |
1004 | kfree(rdev->mode_info.bios_hardcoded_edid); |
1010 | 1005 | ||
1011 | if (rdev->mode_info.mode_config_initialized) { |
1006 | if (rdev->mode_info.mode_config_initialized) { |
1012 | radeon_hpd_fini(rdev); |
1007 | radeon_hpd_fini(rdev); |
1013 | drm_mode_config_cleanup(rdev->ddev); |
1008 | drm_mode_config_cleanup(rdev->ddev); |
1014 | rdev->mode_info.mode_config_initialized = false; |
1009 | rdev->mode_info.mode_config_initialized = false; |
1015 | } |
1010 | } |
1016 | } |
1011 | } |
1017 | 1012 | ||
1018 | bool radeon_crtc_scaling_mode_fixup(struct drm_crtc *crtc, |
1013 | bool radeon_crtc_scaling_mode_fixup(struct drm_crtc *crtc, |
1019 | struct drm_display_mode *mode, |
1014 | struct drm_display_mode *mode, |
1020 | struct drm_display_mode *adjusted_mode) |
1015 | struct drm_display_mode *adjusted_mode) |
1021 | { |
1016 | { |
1022 | struct drm_device *dev = crtc->dev; |
1017 | struct drm_device *dev = crtc->dev; |
1023 | struct drm_encoder *encoder; |
1018 | struct drm_encoder *encoder; |
1024 | struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); |
1019 | struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); |
1025 | struct radeon_encoder *radeon_encoder; |
1020 | struct radeon_encoder *radeon_encoder; |
1026 | bool first = true; |
1021 | bool first = true; |
1027 | 1022 | ||
1028 | list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { |
1023 | list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { |
1029 | radeon_encoder = to_radeon_encoder(encoder); |
1024 | radeon_encoder = to_radeon_encoder(encoder); |
1030 | if (encoder->crtc != crtc) |
1025 | if (encoder->crtc != crtc) |
1031 | continue; |
1026 | continue; |
1032 | if (first) { |
1027 | if (first) { |
1033 | /* set scaling */ |
1028 | /* set scaling */ |
1034 | if (radeon_encoder->rmx_type == RMX_OFF) |
1029 | if (radeon_encoder->rmx_type == RMX_OFF) |
1035 | radeon_crtc->rmx_type = RMX_OFF; |
1030 | radeon_crtc->rmx_type = RMX_OFF; |
1036 | else if (mode->hdisplay < radeon_encoder->native_mode.hdisplay || |
1031 | else if (mode->hdisplay < radeon_encoder->native_mode.hdisplay || |
1037 | mode->vdisplay < radeon_encoder->native_mode.vdisplay) |
1032 | mode->vdisplay < radeon_encoder->native_mode.vdisplay) |
1038 | radeon_crtc->rmx_type = radeon_encoder->rmx_type; |
1033 | radeon_crtc->rmx_type = radeon_encoder->rmx_type; |
1039 | else |
1034 | else |
1040 | radeon_crtc->rmx_type = RMX_OFF; |
1035 | radeon_crtc->rmx_type = RMX_OFF; |
1041 | /* copy native mode */ |
1036 | /* copy native mode */ |
1042 | memcpy(&radeon_crtc->native_mode, |
1037 | memcpy(&radeon_crtc->native_mode, |
1043 | &radeon_encoder->native_mode, |
1038 | &radeon_encoder->native_mode, |
1044 | sizeof(struct drm_display_mode)); |
1039 | sizeof(struct drm_display_mode)); |
1045 | first = false; |
1040 | first = false; |
1046 | } else { |
1041 | } else { |
1047 | if (radeon_crtc->rmx_type != radeon_encoder->rmx_type) { |
1042 | if (radeon_crtc->rmx_type != radeon_encoder->rmx_type) { |
1048 | /* WARNING: Right now this can't happen but |
1043 | /* WARNING: Right now this can't happen but |
1049 | * in the future we need to check that scaling |
1044 | * in the future we need to check that scaling |
1050 | * are consistent accross different encoder |
1045 | * are consistent accross different encoder |
1051 | * (ie all encoder can work with the same |
1046 | * (ie all encoder can work with the same |
1052 | * scaling). |
1047 | * scaling). |
1053 | */ |
1048 | */ |
1054 | DRM_ERROR("Scaling not consistent accross encoder.\n"); |
1049 | DRM_ERROR("Scaling not consistent accross encoder.\n"); |
1055 | return false; |
1050 | return false; |
1056 | } |
1051 | } |
1057 | } |
1052 | } |
1058 | } |
1053 | } |
1059 | if (radeon_crtc->rmx_type != RMX_OFF) { |
1054 | if (radeon_crtc->rmx_type != RMX_OFF) { |
1060 | fixed20_12 a, b; |
1055 | fixed20_12 a, b; |
1061 | a.full = rfixed_const(crtc->mode.vdisplay); |
1056 | a.full = rfixed_const(crtc->mode.vdisplay); |
1062 | b.full = rfixed_const(radeon_crtc->native_mode.hdisplay); |
1057 | b.full = rfixed_const(radeon_crtc->native_mode.hdisplay); |
1063 | radeon_crtc->vsc.full = rfixed_div(a, b); |
1058 | radeon_crtc->vsc.full = rfixed_div(a, b); |
1064 | a.full = rfixed_const(crtc->mode.hdisplay); |
1059 | a.full = rfixed_const(crtc->mode.hdisplay); |
1065 | b.full = rfixed_const(radeon_crtc->native_mode.vdisplay); |
1060 | b.full = rfixed_const(radeon_crtc->native_mode.vdisplay); |
1066 | radeon_crtc->hsc.full = rfixed_div(a, b); |
1061 | radeon_crtc->hsc.full = rfixed_div(a, b); |
1067 | } else { |
1062 | } else { |
1068 | radeon_crtc->vsc.full = rfixed_const(1); |
1063 | radeon_crtc->vsc.full = rfixed_const(1); |
1069 | radeon_crtc->hsc.full = rfixed_const(1); |
1064 | radeon_crtc->hsc.full = rfixed_const(1); |
1070 | } |
1065 | } |
1071 | return true; |
1066 | return true; |
1072 | }>>>>>>>>>>=>>>>>>>>>>>>>>>>>>=>=>>>><>><>><>>>><>><>><>><>><>><>>><>><>><>>><>><>><>> |
1067 | }>>>>>>>>>>=>>>>>>>>>>>>>>>>>>=>=>>>><>><>><>>>><>><>><>><>><>><>>><>><>><>>><>><>><>> |