Rev 5271 | Rev 6938 | Go to most recent revision | Only display areas with differences | Regard whitespace | Details | Blame | Last modification | View Log | RSS feed
Rev 5271 | Rev 6104 | ||
---|---|---|---|
1 | /* |
1 | /* |
2 | * Copyright © 2007 David Airlie |
2 | * Copyright © 2007 David Airlie |
3 | * |
3 | * |
4 | * Permission is hereby granted, free of charge, to any person obtaining a |
4 | * Permission is hereby granted, free of charge, to any person obtaining a |
5 | * copy of this software and associated documentation files (the "Software"), |
5 | * copy of this software and associated documentation files (the "Software"), |
6 | * to deal in the Software without restriction, including without limitation |
6 | * to deal in the Software without restriction, including without limitation |
7 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, |
7 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, |
8 | * and/or sell copies of the Software, and to permit persons to whom the |
8 | * and/or sell copies of the Software, and to permit persons to whom the |
9 | * Software is furnished to do so, subject to the following conditions: |
9 | * Software is furnished to do so, subject to the following conditions: |
10 | * |
10 | * |
11 | * The above copyright notice and this permission notice (including the next |
11 | * The above copyright notice and this permission notice (including the next |
12 | * paragraph) shall be included in all copies or substantial portions of the |
12 | * paragraph) shall be included in all copies or substantial portions of the |
13 | * Software. |
13 | * Software. |
14 | * |
14 | * |
15 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
15 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
16 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
16 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
17 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL |
17 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL |
18 | * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
18 | * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
19 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |
19 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |
20 | * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER |
20 | * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER |
21 | * DEALINGS IN THE SOFTWARE. |
21 | * DEALINGS IN THE SOFTWARE. |
22 | * |
22 | * |
23 | * Authors: |
23 | * Authors: |
24 | * David Airlie |
24 | * David Airlie |
25 | */ |
25 | */ |
26 | #include |
26 | #include |
27 | #include |
27 | #include |
28 | #include |
28 | #include |
29 | 29 | ||
30 | #include |
30 | #include |
31 | #include |
31 | #include |
32 | #include |
32 | #include |
33 | #include |
33 | #include |
34 | #include "radeon.h" |
34 | #include "radeon.h" |
35 | 35 | ||
36 | #include |
36 | #include |
37 | 37 | ||
38 | struct drm_framebuffer *main_fb; |
38 | struct drm_framebuffer *main_fb; |
39 | struct drm_gem_object *main_fb_obj; |
39 | struct drm_gem_object *main_fb_obj; |
40 | 40 | ||
41 | /* object hierarchy - |
41 | /* object hierarchy - |
42 | this contains a helper + a radeon fb |
42 | this contains a helper + a radeon fb |
43 | the helper contains a pointer to radeon framebuffer baseclass. |
43 | the helper contains a pointer to radeon framebuffer baseclass. |
44 | */ |
44 | */ |
45 | struct radeon_fbdev { |
45 | struct radeon_fbdev { |
46 | struct drm_fb_helper helper; |
46 | struct drm_fb_helper helper; |
47 | struct radeon_framebuffer rfb; |
47 | struct radeon_framebuffer rfb; |
48 | struct list_head fbdev_list; |
48 | struct list_head fbdev_list; |
49 | struct radeon_device *rdev; |
49 | struct radeon_device *rdev; |
50 | }; |
50 | }; |
51 | 51 | ||
52 | static struct fb_ops radeonfb_ops = { |
52 | static struct fb_ops radeonfb_ops = { |
53 | .owner = THIS_MODULE, |
53 | .owner = THIS_MODULE, |
54 | .fb_check_var = drm_fb_helper_check_var, |
54 | .fb_check_var = drm_fb_helper_check_var, |
55 | .fb_set_par = drm_fb_helper_set_par, |
55 | .fb_set_par = drm_fb_helper_set_par, |
56 | // .fb_fillrect = cfb_fillrect, |
56 | // .fb_fillrect = cfb_fillrect, |
57 | // .fb_copyarea = cfb_copyarea, |
57 | // .fb_copyarea = cfb_copyarea, |
58 | // .fb_imageblit = cfb_imageblit, |
58 | // .fb_imageblit = cfb_imageblit, |
59 | // .fb_pan_display = drm_fb_helper_pan_display, |
59 | // .fb_pan_display = drm_fb_helper_pan_display, |
60 | .fb_blank = drm_fb_helper_blank, |
60 | .fb_blank = drm_fb_helper_blank, |
61 | .fb_setcmap = drm_fb_helper_setcmap, |
61 | .fb_setcmap = drm_fb_helper_setcmap, |
62 | }; |
62 | }; |
63 | 63 | ||
64 | 64 | ||
65 | int radeon_align_pitch(struct radeon_device *rdev, int width, int bpp, bool tiled) |
65 | int radeon_align_pitch(struct radeon_device *rdev, int width, int bpp, bool tiled) |
66 | { |
66 | { |
67 | int aligned = width; |
67 | int aligned = width; |
68 | int align_large = (ASIC_IS_AVIVO(rdev)) || tiled; |
68 | int align_large = (ASIC_IS_AVIVO(rdev)) || tiled; |
69 | int pitch_mask = 0; |
69 | int pitch_mask = 0; |
70 | 70 | ||
71 | switch (bpp / 8) { |
71 | switch (bpp / 8) { |
72 | case 1: |
72 | case 1: |
73 | pitch_mask = align_large ? 255 : 127; |
73 | pitch_mask = align_large ? 255 : 127; |
74 | break; |
74 | break; |
75 | case 2: |
75 | case 2: |
76 | pitch_mask = align_large ? 127 : 31; |
76 | pitch_mask = align_large ? 127 : 31; |
77 | break; |
77 | break; |
78 | case 3: |
78 | case 3: |
79 | case 4: |
79 | case 4: |
80 | pitch_mask = align_large ? 63 : 15; |
80 | pitch_mask = align_large ? 63 : 15; |
81 | break; |
81 | break; |
82 | } |
82 | } |
83 | 83 | ||
84 | aligned += pitch_mask; |
84 | aligned += pitch_mask; |
85 | aligned &= ~pitch_mask; |
85 | aligned &= ~pitch_mask; |
86 | return aligned; |
86 | return aligned; |
87 | } |
87 | } |
88 | 88 | ||
89 | static void radeonfb_destroy_pinned_object(struct drm_gem_object *gobj) |
89 | static void radeonfb_destroy_pinned_object(struct drm_gem_object *gobj) |
90 | { |
90 | { |
91 | struct radeon_bo *rbo = gem_to_radeon_bo(gobj); |
91 | struct radeon_bo *rbo = gem_to_radeon_bo(gobj); |
92 | int ret; |
92 | int ret; |
93 | 93 | ||
94 | ret = radeon_bo_reserve(rbo, false); |
94 | ret = radeon_bo_reserve(rbo, false); |
95 | if (likely(ret == 0)) { |
95 | if (likely(ret == 0)) { |
96 | radeon_bo_kunmap(rbo); |
96 | radeon_bo_kunmap(rbo); |
97 | radeon_bo_unpin(rbo); |
97 | radeon_bo_unpin(rbo); |
98 | radeon_bo_unreserve(rbo); |
98 | radeon_bo_unreserve(rbo); |
99 | } |
99 | } |
100 | drm_gem_object_unreference_unlocked(gobj); |
100 | drm_gem_object_unreference_unlocked(gobj); |
101 | } |
101 | } |
102 | 102 | ||
103 | static int radeonfb_create_pinned_object(struct radeon_fbdev *rfbdev, |
103 | static int radeonfb_create_pinned_object(struct radeon_fbdev *rfbdev, |
104 | struct drm_mode_fb_cmd2 *mode_cmd, |
104 | struct drm_mode_fb_cmd2 *mode_cmd, |
105 | struct drm_gem_object **gobj_p) |
105 | struct drm_gem_object **gobj_p) |
106 | { |
106 | { |
107 | struct radeon_device *rdev = rfbdev->rdev; |
107 | struct radeon_device *rdev = rfbdev->rdev; |
108 | struct drm_gem_object *gobj = NULL; |
108 | struct drm_gem_object *gobj = NULL; |
109 | struct radeon_bo *rbo = NULL; |
109 | struct radeon_bo *rbo = NULL; |
110 | bool fb_tiled = false; /* useful for testing */ |
110 | bool fb_tiled = false; /* useful for testing */ |
111 | u32 tiling_flags = 0; |
111 | u32 tiling_flags = 0; |
112 | int ret; |
112 | int ret; |
113 | int aligned_size, size; |
113 | int aligned_size, size; |
114 | int height = mode_cmd->height; |
114 | int height = mode_cmd->height; |
115 | u32 bpp, depth; |
115 | u32 bpp, depth; |
116 | 116 | ||
117 | drm_fb_get_bpp_depth(mode_cmd->pixel_format, &depth, &bpp); |
117 | drm_fb_get_bpp_depth(mode_cmd->pixel_format, &depth, &bpp); |
118 | 118 | ||
119 | /* need to align pitch with crtc limits */ |
119 | /* need to align pitch with crtc limits */ |
120 | mode_cmd->pitches[0] = radeon_align_pitch(rdev, mode_cmd->width, bpp, |
120 | mode_cmd->pitches[0] = radeon_align_pitch(rdev, mode_cmd->width, bpp, |
121 | fb_tiled) * ((bpp + 1) / 8); |
121 | fb_tiled) * ((bpp + 1) / 8); |
122 | 122 | ||
123 | if (rdev->family >= CHIP_R600) |
123 | if (rdev->family >= CHIP_R600) |
124 | height = ALIGN(mode_cmd->height, 8); |
124 | height = ALIGN(mode_cmd->height, 8); |
125 | size = mode_cmd->pitches[0] * height; |
125 | size = mode_cmd->pitches[0] * height; |
126 | aligned_size = ALIGN(size, PAGE_SIZE); |
126 | aligned_size = ALIGN(size, PAGE_SIZE); |
127 | 127 | ||
128 | rbo = rdev->stollen_vga_memory; |
128 | rbo = rdev->stollen_vga_memory; |
129 | gobj = &rbo->gem_base; |
129 | gobj = &rbo->gem_base; |
130 | mutex_lock(&rdev->gem.mutex); |
130 | mutex_lock(&rdev->gem.mutex); |
131 | list_add_tail(&rbo->list, &rdev->gem.objects); |
131 | list_add_tail(&rbo->list, &rdev->gem.objects); |
132 | mutex_unlock(&rdev->gem.mutex); |
132 | mutex_unlock(&rdev->gem.mutex); |
133 | 133 | ||
134 | rbo = gem_to_radeon_bo(gobj); |
134 | rbo = gem_to_radeon_bo(gobj); |
135 | 135 | ||
136 | if (fb_tiled) |
136 | if (fb_tiled) |
137 | tiling_flags = RADEON_TILING_MACRO; |
137 | tiling_flags = RADEON_TILING_MACRO; |
138 | 138 | ||
139 | #ifdef __BIG_ENDIAN |
139 | #ifdef __BIG_ENDIAN |
140 | switch (bpp) { |
140 | switch (bpp) { |
141 | case 32: |
141 | case 32: |
142 | tiling_flags |= RADEON_TILING_SWAP_32BIT; |
142 | tiling_flags |= RADEON_TILING_SWAP_32BIT; |
143 | break; |
143 | break; |
144 | case 16: |
144 | case 16: |
145 | tiling_flags |= RADEON_TILING_SWAP_16BIT; |
145 | tiling_flags |= RADEON_TILING_SWAP_16BIT; |
146 | default: |
146 | default: |
147 | break; |
147 | break; |
148 | } |
148 | } |
149 | #endif |
149 | #endif |
150 | 150 | ||
151 | // if (tiling_flags) { |
151 | // if (tiling_flags) { |
152 | // ret = radeon_bo_set_tiling_flags(rbo, |
152 | // ret = radeon_bo_set_tiling_flags(rbo, |
153 | // tiling_flags | RADEON_TILING_SURFACE, |
153 | // tiling_flags | RADEON_TILING_SURFACE, |
154 | // mode_cmd->pitches[0]); |
154 | // mode_cmd->pitches[0]); |
155 | // if (ret) |
155 | // if (ret) |
156 | // dev_err(rdev->dev, "FB failed to set tiling flags\n"); |
156 | // dev_err(rdev->dev, "FB failed to set tiling flags\n"); |
157 | // } |
157 | // } |
158 | 158 | ||
159 | 159 | ||
160 | ret = radeon_bo_reserve(rbo, false); |
160 | ret = radeon_bo_reserve(rbo, false); |
161 | if (unlikely(ret != 0)) |
161 | if (unlikely(ret != 0)) |
162 | goto out_unref; |
162 | goto out_unref; |
163 | /* Only 27 bit offset for legacy CRTC */ |
163 | /* Only 27 bit offset for legacy CRTC */ |
164 | ret = radeon_bo_pin_restricted(rbo, RADEON_GEM_DOMAIN_VRAM, |
164 | ret = radeon_bo_pin_restricted(rbo, RADEON_GEM_DOMAIN_VRAM, |
165 | ASIC_IS_AVIVO(rdev) ? 0 : 1 << 27, |
165 | ASIC_IS_AVIVO(rdev) ? 0 : 1 << 27, |
166 | NULL); |
166 | NULL); |
167 | if (ret) { |
167 | if (ret) { |
168 | radeon_bo_unreserve(rbo); |
168 | radeon_bo_unreserve(rbo); |
169 | goto out_unref; |
169 | goto out_unref; |
170 | } |
170 | } |
171 | radeon_bo_unreserve(rbo); |
171 | radeon_bo_unreserve(rbo); |
172 | if (ret) { |
172 | if (ret) { |
173 | goto out_unref; |
173 | goto out_unref; |
174 | } |
174 | } |
175 | 175 | ||
176 | *gobj_p = gobj; |
176 | *gobj_p = gobj; |
177 | return 0; |
177 | return 0; |
178 | out_unref: |
178 | out_unref: |
179 | radeonfb_destroy_pinned_object(gobj); |
179 | radeonfb_destroy_pinned_object(gobj); |
180 | *gobj_p = NULL; |
180 | *gobj_p = NULL; |
181 | return ret; |
181 | return ret; |
182 | } |
182 | } |
183 | 183 | ||
184 | static int radeonfb_create(struct drm_fb_helper *helper, |
184 | static int radeonfb_create(struct drm_fb_helper *helper, |
185 | struct drm_fb_helper_surface_size *sizes) |
185 | struct drm_fb_helper_surface_size *sizes) |
186 | { |
186 | { |
187 | struct radeon_fbdev *rfbdev = |
187 | struct radeon_fbdev *rfbdev = |
188 | container_of(helper, struct radeon_fbdev, helper); |
188 | container_of(helper, struct radeon_fbdev, helper); |
189 | struct radeon_device *rdev = rfbdev->rdev; |
189 | struct radeon_device *rdev = rfbdev->rdev; |
190 | struct fb_info *info; |
190 | struct fb_info *info; |
191 | struct drm_framebuffer *fb = NULL; |
191 | struct drm_framebuffer *fb = NULL; |
192 | struct drm_mode_fb_cmd2 mode_cmd; |
192 | struct drm_mode_fb_cmd2 mode_cmd; |
193 | struct drm_gem_object *gobj = NULL; |
193 | struct drm_gem_object *gobj = NULL; |
194 | struct radeon_bo *rbo = NULL; |
194 | struct radeon_bo *rbo = NULL; |
195 | struct device *device = &rdev->pdev->dev; |
- | |
196 | int ret; |
195 | int ret; |
197 | unsigned long tmp; |
196 | unsigned long tmp; |
198 | 197 | ||
199 | mode_cmd.width = sizes->surface_width; |
198 | mode_cmd.width = sizes->surface_width; |
200 | mode_cmd.height = sizes->surface_height; |
199 | mode_cmd.height = sizes->surface_height; |
201 | 200 | ||
202 | /* avivo can't scanout real 24bpp */ |
201 | /* avivo can't scanout real 24bpp */ |
203 | if ((sizes->surface_bpp == 24) && ASIC_IS_AVIVO(rdev)) |
202 | if ((sizes->surface_bpp == 24) && ASIC_IS_AVIVO(rdev)) |
204 | sizes->surface_bpp = 32; |
203 | sizes->surface_bpp = 32; |
205 | 204 | ||
206 | mode_cmd.pixel_format = drm_mode_legacy_fb_format(sizes->surface_bpp, |
205 | mode_cmd.pixel_format = drm_mode_legacy_fb_format(sizes->surface_bpp, |
207 | sizes->surface_depth); |
206 | sizes->surface_depth); |
208 | 207 | ||
209 | ret = radeonfb_create_pinned_object(rfbdev, &mode_cmd, &gobj); |
208 | ret = radeonfb_create_pinned_object(rfbdev, &mode_cmd, &gobj); |
210 | if (ret) { |
209 | if (ret) { |
211 | DRM_ERROR("failed to create fbcon object %d\n", ret); |
210 | DRM_ERROR("failed to create fbcon object %d\n", ret); |
212 | return ret; |
211 | return ret; |
213 | } |
212 | } |
214 | 213 | ||
215 | rbo = gem_to_radeon_bo(gobj); |
214 | rbo = gem_to_radeon_bo(gobj); |
216 | 215 | ||
217 | /* okay we have an object now allocate the framebuffer */ |
216 | /* okay we have an object now allocate the framebuffer */ |
218 | info = framebuffer_alloc(0, device); |
217 | info = drm_fb_helper_alloc_fbi(helper); |
219 | if (info == NULL) { |
218 | if (IS_ERR(info)) { |
220 | ret = -ENOMEM; |
219 | ret = PTR_ERR(info); |
221 | goto out_unref; |
220 | goto out_unref; |
222 | } |
221 | } |
223 | 222 | ||
224 | info->par = rfbdev; |
223 | info->par = rfbdev; |
- | 224 | info->skip_vt_switch = true; |
|
225 | 225 | ||
226 | ret = radeon_framebuffer_init(rdev->ddev, &rfbdev->rfb, &mode_cmd, gobj); |
226 | ret = radeon_framebuffer_init(rdev->ddev, &rfbdev->rfb, &mode_cmd, gobj); |
227 | if (ret) { |
227 | if (ret) { |
228 | DRM_ERROR("failed to initialize framebuffer %d\n", ret); |
228 | DRM_ERROR("failed to initialize framebuffer %d\n", ret); |
229 | goto out_unref; |
229 | goto out_destroy_fbi; |
230 | } |
230 | } |
231 | 231 | ||
232 | fb = &rfbdev->rfb.base; |
232 | fb = &rfbdev->rfb.base; |
233 | 233 | ||
234 | /* setup helper */ |
234 | /* setup helper */ |
235 | rfbdev->helper.fb = fb; |
235 | rfbdev->helper.fb = fb; |
236 | rfbdev->helper.fbdev = info; |
- | |
237 | 236 | ||
238 | // memset_io(rbo->kptr, 0x0, radeon_bo_size(rbo)); |
237 | // memset_io(rbo->kptr, 0x0, radeon_bo_size(rbo)); |
239 | 238 | ||
240 | strcpy(info->fix.id, "radeondrmfb"); |
239 | strcpy(info->fix.id, "radeondrmfb"); |
241 | 240 | ||
242 | drm_fb_helper_fill_fix(info, fb->pitches[0], fb->depth); |
241 | drm_fb_helper_fill_fix(info, fb->pitches[0], fb->depth); |
243 | 242 | ||
244 | info->flags = FBINFO_DEFAULT | FBINFO_CAN_FORCE_OUTPUT; |
243 | info->flags = FBINFO_DEFAULT | FBINFO_CAN_FORCE_OUTPUT; |
245 | info->fbops = &radeonfb_ops; |
244 | info->fbops = &radeonfb_ops; |
246 | 245 | ||
247 | tmp = radeon_bo_gpu_offset(rbo) - rdev->mc.vram_start; |
246 | tmp = radeon_bo_gpu_offset(rbo) - rdev->mc.vram_start; |
248 | info->fix.smem_start = rdev->mc.aper_base + tmp; |
247 | info->fix.smem_start = rdev->mc.aper_base + tmp; |
249 | info->fix.smem_len = radeon_bo_size(rbo); |
248 | info->fix.smem_len = radeon_bo_size(rbo); |
250 | info->screen_base = rbo->kptr; |
249 | info->screen_base = rbo->kptr; |
251 | info->screen_size = radeon_bo_size(rbo); |
250 | info->screen_size = radeon_bo_size(rbo); |
252 | 251 | ||
253 | drm_fb_helper_fill_var(info, &rfbdev->helper, sizes->fb_width, sizes->fb_height); |
252 | drm_fb_helper_fill_var(info, &rfbdev->helper, sizes->fb_width, sizes->fb_height); |
254 | 253 | ||
255 | /* setup aperture base/size for vesafb takeover */ |
254 | /* setup aperture base/size for vesafb takeover */ |
256 | info->apertures = alloc_apertures(1); |
- | |
257 | if (!info->apertures) { |
- | |
258 | ret = -ENOMEM; |
- | |
259 | goto out_unref; |
- | |
260 | } |
- | |
261 | info->apertures->ranges[0].base = rdev->ddev->mode_config.fb_base; |
255 | info->apertures->ranges[0].base = rdev->ddev->mode_config.fb_base; |
262 | info->apertures->ranges[0].size = rdev->mc.aper_size; |
256 | info->apertures->ranges[0].size = rdev->mc.aper_size; |
263 | 257 | ||
264 | /* Use default scratch pixmap (info->pixmap.flags = FB_PIXMAP_SYSTEM) */ |
258 | /* Use default scratch pixmap (info->pixmap.flags = FB_PIXMAP_SYSTEM) */ |
265 | 259 | ||
266 | 260 | ||
267 | DRM_INFO("fb mappable at 0x%lX\n", info->fix.smem_start); |
261 | DRM_INFO("fb mappable at 0x%lX\n", info->fix.smem_start); |
268 | DRM_INFO("vram apper at 0x%lX\n", (unsigned long)rdev->mc.aper_base); |
262 | DRM_INFO("vram apper at 0x%lX\n", (unsigned long)rdev->mc.aper_base); |
269 | DRM_INFO("size %lu\n", (unsigned long)radeon_bo_size(rbo)); |
263 | DRM_INFO("size %lu\n", (unsigned long)radeon_bo_size(rbo)); |
270 | DRM_INFO("fb depth is %d\n", fb->depth); |
264 | DRM_INFO("fb depth is %d\n", fb->depth); |
271 | DRM_INFO(" pitch is %d\n", fb->pitches[0]); |
265 | DRM_INFO(" pitch is %d\n", fb->pitches[0]); |
272 | 266 | ||
273 | main_fb = fb; |
267 | main_fb = fb; |
274 | main_fb_obj = gobj; |
268 | main_fb_obj = gobj; |
275 | 269 | ||
276 | return 0; |
270 | return 0; |
- | 271 | ||
- | 272 | out_destroy_fbi: |
|
277 | 273 | // drm_fb_helper_release_fbi(helper); |
|
278 | out_unref: |
274 | out_unref: |
279 | if (rbo) { |
275 | if (rbo) { |
280 | 276 | ||
281 | } |
277 | } |
282 | if (fb && ret) { |
278 | if (fb && ret) { |
283 | kfree(fb); |
279 | kfree(fb); |
284 | } |
280 | } |
285 | return ret; |
281 | return ret; |
286 | } |
282 | } |
287 | 283 | ||
288 | 284 | ||
289 | static int radeon_fbdev_destroy(struct drm_device *dev, struct radeon_fbdev *rfbdev) |
285 | static int radeon_fbdev_destroy(struct drm_device *dev, struct radeon_fbdev *rfbdev) |
290 | { |
286 | { |
291 | struct fb_info *info; |
- | |
292 | struct radeon_framebuffer *rfb = &rfbdev->rfb; |
287 | struct radeon_framebuffer *rfb = &rfbdev->rfb; |
293 | - | ||
294 | if (rfbdev->helper.fbdev) { |
- | |
295 | info = rfbdev->helper.fbdev; |
- | |
296 | 288 | ||
297 | // unregister_framebuffer(info); |
289 | // drm_fb_helper_unregister_fbi(&rfbdev->helper); |
298 | // framebuffer_release(info); |
- | |
299 | } |
290 | // drm_fb_helper_release_fbi(&rfbdev->helper); |
300 | 291 | ||
301 | if (rfb->obj) { |
292 | if (rfb->obj) { |
302 | rfb->obj = NULL; |
293 | rfb->obj = NULL; |
303 | } |
294 | } |
304 | // drm_fb_helper_fini(&rfbdev->helper); |
295 | // drm_fb_helper_fini(&rfbdev->helper); |
305 | drm_framebuffer_cleanup(&rfb->base); |
296 | drm_framebuffer_cleanup(&rfb->base); |
306 | 297 | ||
307 | return 0; |
298 | return 0; |
308 | } |
299 | } |
309 | 300 | ||
310 | static const struct drm_fb_helper_funcs radeon_fb_helper_funcs = { |
301 | static const struct drm_fb_helper_funcs radeon_fb_helper_funcs = { |
311 | .gamma_set = radeon_crtc_fb_gamma_set, |
302 | .gamma_set = radeon_crtc_fb_gamma_set, |
312 | .gamma_get = radeon_crtc_fb_gamma_get, |
303 | .gamma_get = radeon_crtc_fb_gamma_get, |
313 | .fb_probe = radeonfb_create, |
304 | .fb_probe = radeonfb_create, |
314 | }; |
305 | }; |
315 | 306 | ||
316 | int radeon_fbdev_init(struct radeon_device *rdev) |
307 | int radeon_fbdev_init(struct radeon_device *rdev) |
317 | { |
308 | { |
318 | struct radeon_fbdev *rfbdev; |
309 | struct radeon_fbdev *rfbdev; |
319 | int bpp_sel = 32; |
310 | int bpp_sel = 32; |
320 | int ret; |
311 | int ret; |
321 | ENTER(); |
- | |
322 | 312 | ||
323 | /* select 8 bpp console on RN50 or 16MB cards */ |
313 | /* select 8 bpp console on RN50 or 16MB cards */ |
324 | if (ASIC_IS_RN50(rdev) || rdev->mc.real_vram_size <= (32*1024*1024)) |
314 | if (ASIC_IS_RN50(rdev) || rdev->mc.real_vram_size <= (32*1024*1024)) |
325 | bpp_sel = 8; |
315 | bpp_sel = 8; |
326 | 316 | ||
327 | rfbdev = kzalloc(sizeof(struct radeon_fbdev), GFP_KERNEL); |
317 | rfbdev = kzalloc(sizeof(struct radeon_fbdev), GFP_KERNEL); |
328 | if (!rfbdev) |
318 | if (!rfbdev) |
329 | return -ENOMEM; |
319 | return -ENOMEM; |
330 | 320 | ||
331 | rfbdev->rdev = rdev; |
321 | rfbdev->rdev = rdev; |
332 | rdev->mode_info.rfbdev = rfbdev; |
322 | rdev->mode_info.rfbdev = rfbdev; |
333 | 323 | ||
334 | drm_fb_helper_prepare(rdev->ddev, &rfbdev->helper, |
324 | drm_fb_helper_prepare(rdev->ddev, &rfbdev->helper, |
335 | &radeon_fb_helper_funcs); |
325 | &radeon_fb_helper_funcs); |
336 | 326 | ||
337 | ret = drm_fb_helper_init(rdev->ddev, &rfbdev->helper, |
327 | ret = drm_fb_helper_init(rdev->ddev, &rfbdev->helper, |
338 | rdev->num_crtc, |
328 | rdev->num_crtc, |
339 | RADEONFB_CONN_LIMIT); |
329 | RADEONFB_CONN_LIMIT); |
340 | if (ret) { |
330 | if (ret) |
341 | kfree(rfbdev); |
- | |
342 | return ret; |
331 | goto free; |
343 | } |
- | |
344 | 332 | ||
- | 333 | ret = drm_fb_helper_single_add_all_connectors(&rfbdev->helper); |
|
- | 334 | if (ret) |
|
345 | drm_fb_helper_single_add_all_connectors(&rfbdev->helper); |
335 | goto fini; |
346 | 336 | ||
347 | /* disable all the possible outputs/crtcs before entering KMS mode */ |
337 | /* disable all the possible outputs/crtcs before entering KMS mode */ |
348 | drm_helper_disable_unused_functions(rdev->ddev); |
338 | drm_helper_disable_unused_functions(rdev->ddev); |
349 | 339 | ||
350 | drm_fb_helper_initial_config(&rfbdev->helper, bpp_sel); |
340 | ret = drm_fb_helper_initial_config(&rfbdev->helper, bpp_sel); |
- | 341 | if (ret) |
|
351 | LEAVE(); |
342 | goto fini; |
- | 343 | ||
- | 344 | return 0; |
|
- | 345 | ||
- | 346 | fini: |
|
- | 347 | // drm_fb_helper_fini(&rfbdev->helper); |
|
- | 348 | free: |
|
352 | 349 | kfree(rfbdev); |
|
353 | return 0; |
350 | return ret; |
354 | } |
351 | } |
355 | 352 | ||
356 | void radeon_fbdev_fini(struct radeon_device *rdev) |
353 | void radeon_fbdev_fini(struct radeon_device *rdev) |
357 | { |
354 | { |
358 | if (!rdev->mode_info.rfbdev) |
355 | if (!rdev->mode_info.rfbdev) |
359 | return; |
356 | return; |
360 | 357 | ||
361 | radeon_fbdev_destroy(rdev->ddev, rdev->mode_info.rfbdev); |
358 | radeon_fbdev_destroy(rdev->ddev, rdev->mode_info.rfbdev); |
362 | kfree(rdev->mode_info.rfbdev); |
359 | kfree(rdev->mode_info.rfbdev); |
363 | rdev->mode_info.rfbdev = NULL; |
360 | rdev->mode_info.rfbdev = NULL; |
364 | } |
361 | } |
365 | - | ||
366 | - | ||
367 | int radeon_fbdev_total_size(struct radeon_device *rdev) |
- | |
368 | { |
- | |
369 | struct radeon_bo *robj; |
- | |
370 | int size = 0; |
- | |
371 | - | ||
372 | robj = gem_to_radeon_bo(rdev->mode_info.rfbdev->rfb.obj); |
- | |
373 | size += radeon_bo_size(robj); |
- | |
374 | return size; |
- | |
375 | } |
- | |
376 | 362 | ||
377 | bool radeon_fbdev_robj_is_fb(struct radeon_device *rdev, struct radeon_bo *robj) |
363 | bool radeon_fbdev_robj_is_fb(struct radeon_device *rdev, struct radeon_bo *robj) |
378 | { |
364 | { |
379 | if (robj == gem_to_radeon_bo(rdev->mode_info.rfbdev->rfb.obj)) |
365 | if (robj == gem_to_radeon_bo(rdev->mode_info.rfbdev->rfb.obj)) |
380 | return true; |
366 | return true; |
381 | return false; |
367 | return false; |
382 | }=>><> |
368 | } |
- | 369 | ||
- | 370 | void radeon_fb_add_connector(struct radeon_device *rdev, struct drm_connector *connector) |
|
- | 371 | { |
|
- | 372 | drm_fb_helper_add_one_connector(&rdev->mode_info.rfbdev->helper, connector); |
|
- | 373 | } |
|
- | 374 | ||
- | 375 | void radeon_fb_remove_connector(struct radeon_device *rdev, struct drm_connector *connector) |
|
- | 376 | { |
|
- | 377 | drm_fb_helper_remove_one_connector(&rdev->mode_info.rfbdev->helper, connector); |
|
- | 378 | }=>><> |