Rev 3263 | Go to most recent revision | Only display areas with differences | Regard whitespace | Details | Blame | Last modification | View Log | RSS feed
Rev 3263 | Rev 3266 | ||
---|---|---|---|
1 | //#include "../bitmap.h" |
1 | //#include "../bitmap.h" |
2 | 2 | ||
3 | #include |
3 | #include |
4 | #include |
4 | #include |
5 | 5 | ||
6 | #include "sna.h" |
6 | #include "sna.h" |
7 | 7 | ||
8 | #include |
8 | #include |
9 | 9 | ||
10 | static struct sna_fb sna_fb; |
10 | static struct sna_fb sna_fb; |
11 | 11 | ||
12 | typedef struct __attribute__((packed)) |
12 | typedef struct __attribute__((packed)) |
13 | { |
13 | { |
14 | unsigned handle; |
14 | unsigned handle; |
15 | unsigned io_code; |
15 | unsigned io_code; |
16 | void *input; |
16 | void *input; |
17 | int inp_size; |
17 | int inp_size; |
18 | void *output; |
18 | void *output; |
19 | int out_size; |
19 | int out_size; |
20 | }ioctl_t; |
20 | }ioctl_t; |
21 | 21 | ||
22 | 22 | ||
23 | static int call_service(ioctl_t *io) |
23 | static int call_service(ioctl_t *io) |
24 | { |
24 | { |
25 | int retval; |
25 | int retval; |
26 | 26 | ||
27 | asm volatile("int $0x40" |
27 | asm volatile("int $0x40" |
28 | :"=a"(retval) |
28 | :"=a"(retval) |
29 | :"a"(68),"b"(17),"c"(io) |
29 | :"a"(68),"b"(17),"c"(io) |
30 | :"memory","cc"); |
30 | :"memory","cc"); |
31 | 31 | ||
32 | return retval; |
32 | return retval; |
33 | }; |
33 | }; |
- | 34 | ||
- | 35 | static inline void get_proc_info(char *info) |
|
- | 36 | { |
|
- | 37 | __asm__ __volatile__( |
|
- | 38 | "int $0x40" |
|
- | 39 | : |
|
- | 40 | :"a"(9), "b"(info), "c"(-1)); |
|
- | 41 | } |
|
34 | 42 | ||
35 | const struct intel_device_info * |
43 | const struct intel_device_info * |
36 | intel_detect_chipset(struct pci_device *pci); |
44 | intel_detect_chipset(struct pci_device *pci); |
37 | 45 | ||
38 | //struct kgem_bo *create_bo(bitmap_t *bitmap); |
46 | //struct kgem_bo *create_bo(bitmap_t *bitmap); |
39 | 47 | ||
40 | static bool sna_solid_cache_init(struct sna *sna); |
48 | static bool sna_solid_cache_init(struct sna *sna); |
41 | 49 | ||
42 | struct sna *sna_device; |
50 | struct sna *sna_device; |
43 | 51 | ||
44 | static void no_render_reset(struct sna *sna) |
52 | static void no_render_reset(struct sna *sna) |
45 | { |
53 | { |
46 | (void)sna; |
54 | (void)sna; |
47 | } |
55 | } |
48 | 56 | ||
49 | void no_render_init(struct sna *sna) |
57 | void no_render_init(struct sna *sna) |
50 | { |
58 | { |
51 | struct sna_render *render = &sna->render; |
59 | struct sna_render *render = &sna->render; |
52 | 60 | ||
53 | memset (render,0, sizeof (*render)); |
61 | memset (render,0, sizeof (*render)); |
54 | 62 | ||
55 | render->prefer_gpu = PREFER_GPU_BLT; |
63 | render->prefer_gpu = PREFER_GPU_BLT; |
56 | 64 | ||
57 | render->vertices = render->vertex_data; |
65 | render->vertices = render->vertex_data; |
58 | render->vertex_size = ARRAY_SIZE(render->vertex_data); |
66 | render->vertex_size = ARRAY_SIZE(render->vertex_data); |
59 | 67 | ||
60 | // render->composite = no_render_composite; |
68 | // render->composite = no_render_composite; |
61 | 69 | ||
62 | // render->copy_boxes = no_render_copy_boxes; |
70 | // render->copy_boxes = no_render_copy_boxes; |
63 | // render->copy = no_render_copy; |
71 | // render->copy = no_render_copy; |
64 | 72 | ||
65 | // render->fill_boxes = no_render_fill_boxes; |
73 | // render->fill_boxes = no_render_fill_boxes; |
66 | // render->fill = no_render_fill; |
74 | // render->fill = no_render_fill; |
67 | // render->fill_one = no_render_fill_one; |
75 | // render->fill_one = no_render_fill_one; |
68 | // render->clear = no_render_clear; |
76 | // render->clear = no_render_clear; |
69 | 77 | ||
70 | render->reset = no_render_reset; |
78 | render->reset = no_render_reset; |
71 | // render->flush = no_render_flush; |
79 | // render->flush = no_render_flush; |
72 | // render->fini = no_render_fini; |
80 | // render->fini = no_render_fini; |
73 | 81 | ||
74 | // sna->kgem.context_switch = no_render_context_switch; |
82 | // sna->kgem.context_switch = no_render_context_switch; |
75 | // sna->kgem.retire = no_render_retire; |
83 | // sna->kgem.retire = no_render_retire; |
76 | 84 | ||
77 | if (sna->kgem.gen >= 60) |
85 | if (sna->kgem.gen >= 60) |
78 | sna->kgem.ring = KGEM_RENDER; |
86 | sna->kgem.ring = KGEM_RENDER; |
79 | 87 | ||
80 | sna_vertex_init(sna); |
88 | sna_vertex_init(sna); |
81 | } |
89 | } |
82 | 90 | ||
83 | void sna_vertex_init(struct sna *sna) |
91 | void sna_vertex_init(struct sna *sna) |
84 | { |
92 | { |
85 | // pthread_mutex_init(&sna->render.lock, NULL); |
93 | // pthread_mutex_init(&sna->render.lock, NULL); |
86 | // pthread_cond_init(&sna->render.wait, NULL); |
94 | // pthread_cond_init(&sna->render.wait, NULL); |
87 | sna->render.active = 0; |
95 | sna->render.active = 0; |
88 | } |
96 | } |
89 | 97 | ||
90 | bool sna_accel_init(struct sna *sna) |
98 | bool sna_accel_init(struct sna *sna) |
91 | { |
99 | { |
92 | const char *backend; |
100 | const char *backend; |
93 | 101 | ||
94 | // list_init(&sna->deferred_free); |
102 | // list_init(&sna->deferred_free); |
95 | // list_init(&sna->dirty_pixmaps); |
103 | // list_init(&sna->dirty_pixmaps); |
96 | // list_init(&sna->active_pixmaps); |
104 | // list_init(&sna->active_pixmaps); |
97 | // list_init(&sna->inactive_clock[0]); |
105 | // list_init(&sna->inactive_clock[0]); |
98 | // list_init(&sna->inactive_clock[1]); |
106 | // list_init(&sna->inactive_clock[1]); |
99 | 107 | ||
100 | // sna_accel_install_timers(sna); |
108 | // sna_accel_install_timers(sna); |
101 | 109 | ||
102 | 110 | ||
103 | backend = "no"; |
111 | backend = "no"; |
104 | no_render_init(sna); |
112 | no_render_init(sna); |
105 | 113 | ||
106 | if (sna->info->gen >= 0100) { |
114 | if (sna->info->gen >= 0100) { |
107 | /* } else if (sna->info->gen >= 070) { |
115 | /* } else if (sna->info->gen >= 070) { |
108 | if (gen7_render_init(sna)) |
116 | if (gen7_render_init(sna)) |
109 | backend = "IvyBridge"; */ |
117 | backend = "IvyBridge"; */ |
110 | } else if (sna->info->gen >= 060) { |
118 | } else if (sna->info->gen >= 060) { |
111 | if (gen6_render_init(sna)) |
119 | if (gen6_render_init(sna)) |
112 | backend = "SandyBridge"; |
120 | backend = "SandyBridge"; |
113 | /* } else if (sna->info->gen >= 050) { |
121 | /* } else if (sna->info->gen >= 050) { |
114 | if (gen5_render_init(sna)) |
122 | if (gen5_render_init(sna)) |
115 | backend = "Ironlake"; |
123 | backend = "Ironlake"; |
116 | } else if (sna->info->gen >= 040) { |
124 | } else if (sna->info->gen >= 040) { |
117 | if (gen4_render_init(sna)) |
125 | if (gen4_render_init(sna)) |
118 | backend = "Broadwater/Crestline"; |
126 | backend = "Broadwater/Crestline"; |
119 | } else if (sna->info->gen >= 030) { |
127 | } else if (sna->info->gen >= 030) { |
120 | if (gen3_render_init(sna)) |
128 | if (gen3_render_init(sna)) |
121 | backend = "gen3"; |
129 | backend = "gen3"; |
122 | } else if (sna->info->gen >= 020) { |
130 | } else if (sna->info->gen >= 020) { |
123 | if (gen2_render_init(sna)) |
131 | if (gen2_render_init(sna)) |
124 | backend = "gen2"; */ |
132 | backend = "gen2"; */ |
125 | } |
133 | } |
126 | 134 | ||
127 | DBG(("%s(backend=%s, prefer_gpu=%x)\n", |
135 | DBG(("%s(backend=%s, prefer_gpu=%x)\n", |
128 | __FUNCTION__, backend, sna->render.prefer_gpu)); |
136 | __FUNCTION__, backend, sna->render.prefer_gpu)); |
129 | 137 | ||
130 | kgem_reset(&sna->kgem); |
138 | kgem_reset(&sna->kgem); |
131 | 139 | ||
132 | // if (!sna_solid_cache_init(sna)) |
140 | // if (!sna_solid_cache_init(sna)) |
133 | // return false; |
141 | // return false; |
134 | 142 | ||
135 | sna_device = sna; |
143 | sna_device = sna; |
136 | 144 | ||
137 | 145 | ||
138 | return kgem_init_fb(&sna->kgem, &sna_fb); |
146 | return kgem_init_fb(&sna->kgem, &sna_fb); |
139 | } |
147 | } |
140 | 148 | ||
141 | int sna_init(uint32_t service) |
149 | int sna_init(uint32_t service) |
142 | { |
150 | { |
143 | ioctl_t io; |
151 | ioctl_t io; |
144 | 152 | ||
145 | static struct pci_device device; |
153 | static struct pci_device device; |
146 | struct sna *sna; |
154 | struct sna *sna; |
147 | 155 | ||
148 | DBG(("%s\n", __FUNCTION__)); |
156 | DBG(("%s\n", __FUNCTION__)); |
149 | 157 | ||
150 | sna = malloc(sizeof(struct sna)); |
158 | sna = malloc(sizeof(struct sna)); |
151 | if (sna == NULL) |
159 | if (sna == NULL) |
152 | return false; |
160 | return false; |
153 | 161 | ||
154 | io.handle = service; |
162 | io.handle = service; |
155 | io.io_code = SRV_GET_PCI_INFO; |
163 | io.io_code = SRV_GET_PCI_INFO; |
156 | io.input = &device; |
164 | io.input = &device; |
157 | io.inp_size = sizeof(device); |
165 | io.inp_size = sizeof(device); |
158 | io.output = NULL; |
166 | io.output = NULL; |
159 | io.out_size = 0; |
167 | io.out_size = 0; |
160 | 168 | ||
161 | if (call_service(&io)!=0) |
169 | if (call_service(&io)!=0) |
162 | return false; |
170 | return false; |
163 | 171 | ||
164 | sna->PciInfo = &device; |
172 | sna->PciInfo = &device; |
165 | 173 | ||
166 | sna->info = intel_detect_chipset(sna->PciInfo); |
174 | sna->info = intel_detect_chipset(sna->PciInfo); |
167 | 175 | ||
168 | kgem_init(&sna->kgem, service, sna->PciInfo, sna->info->gen); |
176 | kgem_init(&sna->kgem, service, sna->PciInfo, sna->info->gen); |
169 | /* |
177 | /* |
170 | if (!xf86ReturnOptValBool(sna->Options, |
178 | if (!xf86ReturnOptValBool(sna->Options, |
171 | OPTION_RELAXED_FENCING, |
179 | OPTION_RELAXED_FENCING, |
172 | sna->kgem.has_relaxed_fencing)) { |
180 | sna->kgem.has_relaxed_fencing)) { |
173 | xf86DrvMsg(scrn->scrnIndex, |
181 | xf86DrvMsg(scrn->scrnIndex, |
174 | sna->kgem.has_relaxed_fencing ? X_CONFIG : X_PROBED, |
182 | sna->kgem.has_relaxed_fencing ? X_CONFIG : X_PROBED, |
175 | "Disabling use of relaxed fencing\n"); |
183 | "Disabling use of relaxed fencing\n"); |
176 | sna->kgem.has_relaxed_fencing = 0; |
184 | sna->kgem.has_relaxed_fencing = 0; |
177 | } |
185 | } |
178 | if (!xf86ReturnOptValBool(sna->Options, |
186 | if (!xf86ReturnOptValBool(sna->Options, |
179 | OPTION_VMAP, |
187 | OPTION_VMAP, |
180 | sna->kgem.has_vmap)) { |
188 | sna->kgem.has_vmap)) { |
181 | xf86DrvMsg(scrn->scrnIndex, |
189 | xf86DrvMsg(scrn->scrnIndex, |
182 | sna->kgem.has_vmap ? X_CONFIG : X_PROBED, |
190 | sna->kgem.has_vmap ? X_CONFIG : X_PROBED, |
183 | "Disabling use of vmap\n"); |
191 | "Disabling use of vmap\n"); |
184 | sna->kgem.has_vmap = 0; |
192 | sna->kgem.has_vmap = 0; |
185 | } |
193 | } |
186 | */ |
194 | */ |
187 | 195 | ||
188 | /* Disable tiling by default */ |
196 | /* Disable tiling by default */ |
189 | sna->tiling = SNA_TILING_DISABLE; |
197 | sna->tiling = SNA_TILING_DISABLE; |
190 | 198 | ||
191 | /* Default fail-safe value of 75 Hz */ |
199 | /* Default fail-safe value of 75 Hz */ |
192 | // sna->vblank_interval = 1000 * 1000 * 1000 / 75; |
200 | // sna->vblank_interval = 1000 * 1000 * 1000 / 75; |
193 | 201 | ||
194 | sna->flags = 0; |
202 | sna->flags = 0; |
195 | 203 | ||
196 | return sna_accel_init(sna); |
204 | return sna_accel_init(sna); |
197 | } |
205 | } |
198 | 206 | ||
199 | #if 0 |
207 | #if 0 |
200 | 208 | ||
201 | static bool sna_solid_cache_init(struct sna *sna) |
209 | static bool sna_solid_cache_init(struct sna *sna) |
202 | { |
210 | { |
203 | struct sna_solid_cache *cache = &sna->render.solid_cache; |
211 | struct sna_solid_cache *cache = &sna->render.solid_cache; |
204 | 212 | ||
205 | DBG(("%s\n", __FUNCTION__)); |
213 | DBG(("%s\n", __FUNCTION__)); |
206 | 214 | ||
207 | cache->cache_bo = |
215 | cache->cache_bo = |
208 | kgem_create_linear(&sna->kgem, sizeof(cache->color)); |
216 | kgem_create_linear(&sna->kgem, sizeof(cache->color)); |
209 | if (!cache->cache_bo) |
217 | if (!cache->cache_bo) |
210 | return FALSE; |
218 | return FALSE; |
211 | 219 | ||
212 | /* |
220 | /* |
213 | * Initialise [0] with white since it is very common and filling the |
221 | * Initialise [0] with white since it is very common and filling the |
214 | * zeroth slot simplifies some of the checks. |
222 | * zeroth slot simplifies some of the checks. |
215 | */ |
223 | */ |
216 | cache->color[0] = 0xffffffff; |
224 | cache->color[0] = 0xffffffff; |
217 | cache->bo[0] = kgem_create_proxy(cache->cache_bo, 0, sizeof(uint32_t)); |
225 | cache->bo[0] = kgem_create_proxy(cache->cache_bo, 0, sizeof(uint32_t)); |
218 | cache->bo[0]->pitch = 4; |
226 | cache->bo[0]->pitch = 4; |
219 | cache->dirty = 1; |
227 | cache->dirty = 1; |
220 | cache->size = 1; |
228 | cache->size = 1; |
221 | cache->last = 0; |
229 | cache->last = 0; |
222 | 230 | ||
223 | return TRUE; |
231 | return TRUE; |
224 | } |
232 | } |
225 | 233 | ||
226 | void |
234 | void |
227 | sna_render_flush_solid(struct sna *sna) |
235 | sna_render_flush_solid(struct sna *sna) |
228 | { |
236 | { |
229 | struct sna_solid_cache *cache = &sna->render.solid_cache; |
237 | struct sna_solid_cache *cache = &sna->render.solid_cache; |
230 | 238 | ||
231 | DBG(("sna_render_flush_solid(size=%d)\n", cache->size)); |
239 | DBG(("sna_render_flush_solid(size=%d)\n", cache->size)); |
232 | assert(cache->dirty); |
240 | assert(cache->dirty); |
233 | assert(cache->size); |
241 | assert(cache->size); |
234 | 242 | ||
235 | kgem_bo_write(&sna->kgem, cache->cache_bo, |
243 | kgem_bo_write(&sna->kgem, cache->cache_bo, |
236 | cache->color, cache->size*sizeof(uint32_t)); |
244 | cache->color, cache->size*sizeof(uint32_t)); |
237 | cache->dirty = 0; |
245 | cache->dirty = 0; |
238 | cache->last = 0; |
246 | cache->last = 0; |
239 | } |
247 | } |
240 | 248 | ||
241 | static void |
249 | static void |
242 | sna_render_finish_solid(struct sna *sna, bool force) |
250 | sna_render_finish_solid(struct sna *sna, bool force) |
243 | { |
251 | { |
244 | struct sna_solid_cache *cache = &sna->render.solid_cache; |
252 | struct sna_solid_cache *cache = &sna->render.solid_cache; |
245 | int i; |
253 | int i; |
246 | 254 | ||
247 | DBG(("sna_render_finish_solid(force=%d, domain=%d, busy=%d, dirty=%d)\n", |
255 | DBG(("sna_render_finish_solid(force=%d, domain=%d, busy=%d, dirty=%d)\n", |
248 | force, cache->cache_bo->domain, cache->cache_bo->rq != NULL, cache->dirty)); |
256 | force, cache->cache_bo->domain, cache->cache_bo->rq != NULL, cache->dirty)); |
249 | 257 | ||
250 | if (!force && cache->cache_bo->domain != DOMAIN_GPU) |
258 | if (!force && cache->cache_bo->domain != DOMAIN_GPU) |
251 | return; |
259 | return; |
252 | 260 | ||
253 | if (cache->dirty) |
261 | if (cache->dirty) |
254 | sna_render_flush_solid(sna); |
262 | sna_render_flush_solid(sna); |
255 | 263 | ||
256 | for (i = 0; i < cache->size; i++) { |
264 | for (i = 0; i < cache->size; i++) { |
257 | if (cache->bo[i] == NULL) |
265 | if (cache->bo[i] == NULL) |
258 | continue; |
266 | continue; |
259 | 267 | ||
260 | kgem_bo_destroy(&sna->kgem, cache->bo[i]); |
268 | kgem_bo_destroy(&sna->kgem, cache->bo[i]); |
261 | cache->bo[i] = NULL; |
269 | cache->bo[i] = NULL; |
262 | } |
270 | } |
263 | kgem_bo_destroy(&sna->kgem, cache->cache_bo); |
271 | kgem_bo_destroy(&sna->kgem, cache->cache_bo); |
264 | 272 | ||
265 | DBG(("sna_render_finish_solid reset\n")); |
273 | DBG(("sna_render_finish_solid reset\n")); |
266 | 274 | ||
267 | cache->cache_bo = kgem_create_linear(&sna->kgem, sizeof(cache->color)); |
275 | cache->cache_bo = kgem_create_linear(&sna->kgem, sizeof(cache->color)); |
268 | cache->bo[0] = kgem_create_proxy(cache->cache_bo, 0, sizeof(uint32_t)); |
276 | cache->bo[0] = kgem_create_proxy(cache->cache_bo, 0, sizeof(uint32_t)); |
269 | cache->bo[0]->pitch = 4; |
277 | cache->bo[0]->pitch = 4; |
270 | if (force) |
278 | if (force) |
271 | cache->size = 1; |
279 | cache->size = 1; |
272 | } |
280 | } |
273 | 281 | ||
274 | 282 | ||
275 | struct kgem_bo * |
283 | struct kgem_bo * |
276 | sna_render_get_solid(struct sna *sna, uint32_t color) |
284 | sna_render_get_solid(struct sna *sna, uint32_t color) |
277 | { |
285 | { |
278 | struct sna_solid_cache *cache = &sna->render.solid_cache; |
286 | struct sna_solid_cache *cache = &sna->render.solid_cache; |
279 | int i; |
287 | int i; |
280 | 288 | ||
281 | DBG(("%s: %08x\n", __FUNCTION__, color)); |
289 | DBG(("%s: %08x\n", __FUNCTION__, color)); |
282 | 290 | ||
283 | // if ((color & 0xffffff) == 0) /* alpha only */ |
291 | // if ((color & 0xffffff) == 0) /* alpha only */ |
284 | // return kgem_bo_reference(sna->render.alpha_cache.bo[color>>24]); |
292 | // return kgem_bo_reference(sna->render.alpha_cache.bo[color>>24]); |
285 | 293 | ||
286 | if (color == 0xffffffff) { |
294 | if (color == 0xffffffff) { |
287 | DBG(("%s(white)\n", __FUNCTION__)); |
295 | DBG(("%s(white)\n", __FUNCTION__)); |
288 | return kgem_bo_reference(cache->bo[0]); |
296 | return kgem_bo_reference(cache->bo[0]); |
289 | } |
297 | } |
290 | 298 | ||
291 | if (cache->color[cache->last] == color) { |
299 | if (cache->color[cache->last] == color) { |
292 | DBG(("sna_render_get_solid(%d) = %x (last)\n", |
300 | DBG(("sna_render_get_solid(%d) = %x (last)\n", |
293 | cache->last, color)); |
301 | cache->last, color)); |
294 | return kgem_bo_reference(cache->bo[cache->last]); |
302 | return kgem_bo_reference(cache->bo[cache->last]); |
295 | } |
303 | } |
296 | 304 | ||
297 | for (i = 1; i < cache->size; i++) { |
305 | for (i = 1; i < cache->size; i++) { |
298 | if (cache->color[i] == color) { |
306 | if (cache->color[i] == color) { |
299 | if (cache->bo[i] == NULL) { |
307 | if (cache->bo[i] == NULL) { |
300 | DBG(("sna_render_get_solid(%d) = %x (recreate)\n", |
308 | DBG(("sna_render_get_solid(%d) = %x (recreate)\n", |
301 | i, color)); |
309 | i, color)); |
302 | goto create; |
310 | goto create; |
303 | } else { |
311 | } else { |
304 | DBG(("sna_render_get_solid(%d) = %x (old)\n", |
312 | DBG(("sna_render_get_solid(%d) = %x (old)\n", |
305 | i, color)); |
313 | i, color)); |
306 | goto done; |
314 | goto done; |
307 | } |
315 | } |
308 | } |
316 | } |
309 | } |
317 | } |
310 | 318 | ||
311 | sna_render_finish_solid(sna, i == ARRAY_SIZE(cache->color)); |
319 | sna_render_finish_solid(sna, i == ARRAY_SIZE(cache->color)); |
312 | 320 | ||
313 | i = cache->size++; |
321 | i = cache->size++; |
314 | cache->color[i] = color; |
322 | cache->color[i] = color; |
315 | cache->dirty = 1; |
323 | cache->dirty = 1; |
316 | DBG(("sna_render_get_solid(%d) = %x (new)\n", i, color)); |
324 | DBG(("sna_render_get_solid(%d) = %x (new)\n", i, color)); |
317 | 325 | ||
318 | create: |
326 | create: |
319 | cache->bo[i] = kgem_create_proxy(cache->cache_bo, |
327 | cache->bo[i] = kgem_create_proxy(cache->cache_bo, |
320 | i*sizeof(uint32_t), sizeof(uint32_t)); |
328 | i*sizeof(uint32_t), sizeof(uint32_t)); |
321 | cache->bo[i]->pitch = 4; |
329 | cache->bo[i]->pitch = 4; |
322 | 330 | ||
323 | done: |
331 | done: |
324 | cache->last = i; |
332 | cache->last = i; |
325 | return kgem_bo_reference(cache->bo[i]); |
333 | return kgem_bo_reference(cache->bo[i]); |
326 | } |
334 | } |
327 | 335 | ||
328 | #endif |
336 | #endif |
329 | 337 | ||
330 | 338 | ||
331 | int sna_blit_copy(bitmap_t *src_bitmap, int dst_x, int dst_y, |
339 | int sna_blit_copy(bitmap_t *src_bitmap, int dst_x, int dst_y, |
332 | int w, int h, int src_x, int src_y) |
340 | int w, int h, int src_x, int src_y) |
333 | 341 | ||
334 | { |
342 | { |
335 | struct sna_copy_op copy; |
343 | struct sna_copy_op copy; |
336 | struct _Pixmap src, dst; |
344 | struct _Pixmap src, dst; |
337 | struct kgem_bo *src_bo; |
345 | struct kgem_bo *src_bo; |
- | 346 | ||
- | 347 | char proc_info[1024]; |
|
- | 348 | int winx, winy; |
|
- | 349 | ||
- | 350 | get_proc_info(proc_info); |
|
- | 351 | ||
- | 352 | winx = *(uint32_t*)(proc_info+34); |
|
- | 353 | winy = *(uint32_t*)(proc_info+38); |
|
338 | 354 | ||
339 | memset(&src, 0, sizeof(src)); |
355 | memset(&src, 0, sizeof(src)); |
340 | memset(&dst, 0, sizeof(dst)); |
356 | memset(&dst, 0, sizeof(dst)); |
341 | 357 | ||
342 | src.drawable.bitsPerPixel = 32; |
358 | src.drawable.bitsPerPixel = 32; |
343 | src.drawable.width = src_bitmap->width; |
359 | src.drawable.width = src_bitmap->width; |
344 | src.drawable.height = src_bitmap->height; |
360 | src.drawable.height = src_bitmap->height; |
345 | 361 | ||
346 | dst.drawable.bitsPerPixel = 32; |
362 | dst.drawable.bitsPerPixel = 32; |
347 | dst.drawable.width = sna_fb.width; |
363 | dst.drawable.width = sna_fb.width; |
348 | dst.drawable.height = sna_fb.height; |
364 | dst.drawable.height = sna_fb.height; |
349 | 365 | ||
350 | memset(©, 0, sizeof(copy)); |
366 | memset(©, 0, sizeof(copy)); |
351 | 367 | ||
352 | src_bo = (struct kgem_bo*)src_bitmap->handle; |
368 | src_bo = (struct kgem_bo*)src_bitmap->handle; |
353 | 369 | ||
354 | if( sna_device->render.copy(sna_device, GXcopy, |
370 | if( sna_device->render.copy(sna_device, GXcopy, |
355 | &src, src_bo, |
371 | &src, src_bo, |
356 | &dst, sna_fb.fb_bo, ©) ) |
372 | &dst, sna_fb.fb_bo, ©) ) |
357 | { |
373 | { |
358 | copy.blt(sna_device, ©, src_x, src_y, w, h, dst_x, dst_y); |
374 | copy.blt(sna_device, ©, src_x, src_y, w, h, winx+dst_x, winy+dst_y); |
359 | copy.done(sna_device, ©); |
375 | copy.done(sna_device, ©); |
360 | } |
376 | } |
361 | 377 | ||
362 | kgem_submit(&sna_device->kgem); |
378 | kgem_submit(&sna_device->kgem); |
363 | 379 | ||
364 | // __asm__ __volatile__("int3"); |
380 | // __asm__ __volatile__("int3"); |
365 | 381 | ||
366 | }; |
382 | }; |
367 | 383 | ||
368 | int sna_create_bitmap(bitmap_t *bitmap) |
384 | int sna_create_bitmap(bitmap_t *bitmap) |
369 | { |
385 | { |
370 | struct kgem_bo *bo; |
386 | struct kgem_bo *bo; |
371 | 387 | ||
372 | bo = kgem_create_2d(&sna_device->kgem, bitmap->width, bitmap->height, |
388 | bo = kgem_create_2d(&sna_device->kgem, bitmap->width, bitmap->height, |
373 | 32,I915_TILING_NONE, CREATE_CPU_MAP); |
389 | 32,I915_TILING_NONE, CREATE_CPU_MAP); |
374 | 390 | ||
375 | if(bo == NULL) |
391 | if(bo == NULL) |
376 | goto err_1; |
392 | goto err_1; |
377 | 393 | ||
378 | void *map = kgem_bo_map(&sna_device->kgem, bo); |
394 | void *map = kgem_bo_map(&sna_device->kgem, bo); |
379 | if(map == NULL) |
395 | if(map == NULL) |
380 | goto err_2; |
396 | goto err_2; |
381 | 397 | ||
382 | bitmap->handle = (uint32_t)bo; |
398 | bitmap->handle = (uint32_t)bo; |
383 | bitmap->pitch = bo->pitch; |
399 | bitmap->pitch = bo->pitch; |
384 | bitmap->data = map; |
400 | bitmap->data = map; |
385 | 401 | ||
386 | return 0; |
402 | return 0; |
387 | 403 | ||
388 | err_2: |
404 | err_2: |
389 | kgem_bo_destroy(&sna_device->kgem, bo); |
405 | kgem_bo_destroy(&sna_device->kgem, bo); |
390 | 406 | ||
391 | err_1: |
407 | err_1: |
392 | return -1; |
408 | return -1; |
- | 409 | ||
393 | }; |
410 | }; |
- | 411 | ||
- | 412 | void sna_lock_bitmap(bitmap_t *bitmap) |
|
- | 413 | { |
|
- | 414 | struct kgem_bo *bo; |
|
- | 415 | ||
- | 416 | bo = (struct kgem_bo *)bitmap->handle; |
|
- | 417 | ||
- | 418 | kgem_bo_sync__cpu(&sna_device->kgem, bo); |
|
- | 419 | ||
- | 420 | }; |
|
- | 421 | ||
- | 422 | ||
- | 423 | ||
394 | /* |
424 | /* |
395 | 425 | ||
396 | int sna_blit_tex(bitmap_t *dst_bitmap, int dst_x, int dst_y, |
426 | int sna_blit_tex(bitmap_t *dst_bitmap, int dst_x, int dst_y, |
397 | int w, int h, bitmap_t *src_bitmap, int src_x, int src_y, |
427 | int w, int h, bitmap_t *src_bitmap, int src_x, int src_y, |
398 | bitmap_t *mask_bitmap) |
428 | bitmap_t *mask_bitmap) |
399 | 429 | ||
400 | { |
430 | { |
401 | struct sna_composite_op cop; |
431 | struct sna_composite_op cop; |
402 | batchbuffer_t execbuffer; |
432 | batchbuffer_t execbuffer; |
403 | BoxRec box; |
433 | BoxRec box; |
404 | 434 | ||
405 | struct kgem_bo src_bo, mask_bo, dst_bo; |
435 | struct kgem_bo src_bo, mask_bo, dst_bo; |
406 | 436 | ||
407 | memset(&cop, 0, sizeof(cop)); |
437 | memset(&cop, 0, sizeof(cop)); |
408 | memset(&execbuffer, 0, sizeof(execbuffer)); |
438 | memset(&execbuffer, 0, sizeof(execbuffer)); |
409 | memset(&src_bo, 0, sizeof(src_bo)); |
439 | memset(&src_bo, 0, sizeof(src_bo)); |
410 | memset(&dst_bo, 0, sizeof(dst_bo)); |
440 | memset(&dst_bo, 0, sizeof(dst_bo)); |
411 | memset(&mask_bo, 0, sizeof(mask_bo)); |
441 | memset(&mask_bo, 0, sizeof(mask_bo)); |
412 | 442 | ||
413 | src_bo.gaddr = src_bitmap->gaddr; |
443 | src_bo.gaddr = src_bitmap->gaddr; |
414 | src_bo.pitch = src_bitmap->pitch; |
444 | src_bo.pitch = src_bitmap->pitch; |
415 | src_bo.tiling = 0; |
445 | src_bo.tiling = 0; |
416 | 446 | ||
417 | dst_bo.gaddr = dst_bitmap->gaddr; |
447 | dst_bo.gaddr = dst_bitmap->gaddr; |
418 | dst_bo.pitch = dst_bitmap->pitch; |
448 | dst_bo.pitch = dst_bitmap->pitch; |
419 | dst_bo.tiling = 0; |
449 | dst_bo.tiling = 0; |
420 | 450 | ||
421 | mask_bo.gaddr = mask_bitmap->gaddr; |
451 | mask_bo.gaddr = mask_bitmap->gaddr; |
422 | mask_bo.pitch = mask_bitmap->pitch; |
452 | mask_bo.pitch = mask_bitmap->pitch; |
423 | mask_bo.tiling = 0; |
453 | mask_bo.tiling = 0; |
424 | 454 | ||
425 | box.x1 = dst_x; |
455 | box.x1 = dst_x; |
426 | box.y1 = dst_y; |
456 | box.y1 = dst_y; |
427 | box.x2 = dst_x+w; |
457 | box.x2 = dst_x+w; |
428 | box.y2 = dst_y+h; |
458 | box.y2 = dst_y+h; |
429 | 459 | ||
430 | sna_device->render.composite(sna_device, 0, |
460 | sna_device->render.composite(sna_device, 0, |
431 | src_bitmap, &src_bo, |
461 | src_bitmap, &src_bo, |
432 | mask_bitmap, &mask_bo, |
462 | mask_bitmap, &mask_bo, |
433 | dst_bitmap, &dst_bo, |
463 | dst_bitmap, &dst_bo, |
434 | src_x, src_y, |
464 | src_x, src_y, |
435 | src_x, src_y, |
465 | src_x, src_y, |
436 | dst_x, dst_y, |
466 | dst_x, dst_y, |
437 | w, h, &cop); |
467 | w, h, &cop); |
438 | 468 | ||
439 | cop.box(sna_device, &cop, &box); |
469 | cop.box(sna_device, &cop, &box); |
440 | cop.done(sna_device, &cop); |
470 | cop.done(sna_device, &cop); |
441 | 471 | ||
442 | INIT_LIST_HEAD(&execbuffer.objects); |
472 | INIT_LIST_HEAD(&execbuffer.objects); |
443 | list_add_tail(&src_bitmap->obj->exec_list, &execbuffer.objects); |
473 | list_add_tail(&src_bitmap->obj->exec_list, &execbuffer.objects); |
444 | list_add_tail(&mask_bitmap->obj->exec_list, &execbuffer.objects); |
474 | list_add_tail(&mask_bitmap->obj->exec_list, &execbuffer.objects); |
445 | 475 | ||
446 | _kgem_submit(&sna_device->kgem, &execbuffer); |
476 | _kgem_submit(&sna_device->kgem, &execbuffer); |
447 | 477 | ||
448 | }; |
478 | }; |
449 | 479 | ||
450 | */ |
480 | */ |
451 | 481 | ||
452 | static const struct intel_device_info intel_generic_info = { |
482 | static const struct intel_device_info intel_generic_info = { |
453 | .gen = -1, |
483 | .gen = -1, |
454 | }; |
484 | }; |
455 | 485 | ||
456 | static const struct intel_device_info intel_i915_info = { |
486 | static const struct intel_device_info intel_i915_info = { |
457 | .gen = 030, |
487 | .gen = 030, |
458 | }; |
488 | }; |
459 | static const struct intel_device_info intel_i945_info = { |
489 | static const struct intel_device_info intel_i945_info = { |
460 | .gen = 031, |
490 | .gen = 031, |
461 | }; |
491 | }; |
462 | 492 | ||
463 | static const struct intel_device_info intel_g33_info = { |
493 | static const struct intel_device_info intel_g33_info = { |
464 | .gen = 033, |
494 | .gen = 033, |
465 | }; |
495 | }; |
466 | 496 | ||
467 | static const struct intel_device_info intel_i965_info = { |
497 | static const struct intel_device_info intel_i965_info = { |
468 | .gen = 040, |
498 | .gen = 040, |
469 | }; |
499 | }; |
470 | 500 | ||
471 | static const struct intel_device_info intel_g4x_info = { |
501 | static const struct intel_device_info intel_g4x_info = { |
472 | .gen = 045, |
502 | .gen = 045, |
473 | }; |
503 | }; |
474 | 504 | ||
475 | static const struct intel_device_info intel_ironlake_info = { |
505 | static const struct intel_device_info intel_ironlake_info = { |
476 | .gen = 050, |
506 | .gen = 050, |
477 | }; |
507 | }; |
478 | 508 | ||
479 | static const struct intel_device_info intel_sandybridge_info = { |
509 | static const struct intel_device_info intel_sandybridge_info = { |
480 | .gen = 060, |
510 | .gen = 060, |
481 | }; |
511 | }; |
482 | 512 | ||
483 | static const struct intel_device_info intel_ivybridge_info = { |
513 | static const struct intel_device_info intel_ivybridge_info = { |
484 | .gen = 070, |
514 | .gen = 070, |
485 | }; |
515 | }; |
486 | 516 | ||
487 | static const struct intel_device_info intel_valleyview_info = { |
517 | static const struct intel_device_info intel_valleyview_info = { |
488 | .gen = 071, |
518 | .gen = 071, |
489 | }; |
519 | }; |
490 | 520 | ||
491 | static const struct intel_device_info intel_haswell_info = { |
521 | static const struct intel_device_info intel_haswell_info = { |
492 | .gen = 075, |
522 | .gen = 075, |
493 | }; |
523 | }; |
494 | 524 | ||
495 | #define INTEL_DEVICE_MATCH(d,i) \ |
525 | #define INTEL_DEVICE_MATCH(d,i) \ |
496 | { 0x8086, (d), PCI_MATCH_ANY, PCI_MATCH_ANY, 0x3 << 16, 0xff << 16, (intptr_t)(i) } |
526 | { 0x8086, (d), PCI_MATCH_ANY, PCI_MATCH_ANY, 0x3 << 16, 0xff << 16, (intptr_t)(i) } |
497 | 527 | ||
498 | 528 | ||
499 | static const struct pci_id_match intel_device_match[] = { |
529 | static const struct pci_id_match intel_device_match[] = { |
500 | 530 | ||
501 | 531 | ||
502 | INTEL_DEVICE_MATCH (PCI_CHIP_I915_G, &intel_i915_info ), |
532 | INTEL_DEVICE_MATCH (PCI_CHIP_I915_G, &intel_i915_info ), |
503 | INTEL_DEVICE_MATCH (PCI_CHIP_E7221_G, &intel_i915_info ), |
533 | INTEL_DEVICE_MATCH (PCI_CHIP_E7221_G, &intel_i915_info ), |
504 | INTEL_DEVICE_MATCH (PCI_CHIP_I915_GM, &intel_i915_info ), |
534 | INTEL_DEVICE_MATCH (PCI_CHIP_I915_GM, &intel_i915_info ), |
505 | INTEL_DEVICE_MATCH (PCI_CHIP_I945_G, &intel_i945_info ), |
535 | INTEL_DEVICE_MATCH (PCI_CHIP_I945_G, &intel_i945_info ), |
506 | INTEL_DEVICE_MATCH (PCI_CHIP_I945_GM, &intel_i945_info ), |
536 | INTEL_DEVICE_MATCH (PCI_CHIP_I945_GM, &intel_i945_info ), |
507 | INTEL_DEVICE_MATCH (PCI_CHIP_I945_GME, &intel_i945_info ), |
537 | INTEL_DEVICE_MATCH (PCI_CHIP_I945_GME, &intel_i945_info ), |
508 | 538 | ||
509 | INTEL_DEVICE_MATCH (PCI_CHIP_PINEVIEW_M, &intel_g33_info ), |
539 | INTEL_DEVICE_MATCH (PCI_CHIP_PINEVIEW_M, &intel_g33_info ), |
510 | INTEL_DEVICE_MATCH (PCI_CHIP_PINEVIEW_G, &intel_g33_info ), |
540 | INTEL_DEVICE_MATCH (PCI_CHIP_PINEVIEW_G, &intel_g33_info ), |
511 | INTEL_DEVICE_MATCH (PCI_CHIP_G33_G, &intel_g33_info ), |
541 | INTEL_DEVICE_MATCH (PCI_CHIP_G33_G, &intel_g33_info ), |
512 | INTEL_DEVICE_MATCH (PCI_CHIP_Q33_G, &intel_g33_info ), |
542 | INTEL_DEVICE_MATCH (PCI_CHIP_Q33_G, &intel_g33_info ), |
513 | /* Another marketing win: Q35 is another g33 device not a gen4 part |
543 | /* Another marketing win: Q35 is another g33 device not a gen4 part |
514 | * like its G35 brethren. |
544 | * like its G35 brethren. |
515 | */ |
545 | */ |
516 | INTEL_DEVICE_MATCH (PCI_CHIP_Q35_G, &intel_g33_info ), |
546 | INTEL_DEVICE_MATCH (PCI_CHIP_Q35_G, &intel_g33_info ), |
517 | 547 | ||
518 | INTEL_DEVICE_MATCH (PCI_CHIP_I965_G, &intel_i965_info ), |
548 | INTEL_DEVICE_MATCH (PCI_CHIP_I965_G, &intel_i965_info ), |
519 | INTEL_DEVICE_MATCH (PCI_CHIP_G35_G, &intel_i965_info ), |
549 | INTEL_DEVICE_MATCH (PCI_CHIP_G35_G, &intel_i965_info ), |
520 | INTEL_DEVICE_MATCH (PCI_CHIP_I965_Q, &intel_i965_info ), |
550 | INTEL_DEVICE_MATCH (PCI_CHIP_I965_Q, &intel_i965_info ), |
521 | INTEL_DEVICE_MATCH (PCI_CHIP_I946_GZ, &intel_i965_info ), |
551 | INTEL_DEVICE_MATCH (PCI_CHIP_I946_GZ, &intel_i965_info ), |
522 | INTEL_DEVICE_MATCH (PCI_CHIP_I965_GM, &intel_i965_info ), |
552 | INTEL_DEVICE_MATCH (PCI_CHIP_I965_GM, &intel_i965_info ), |
523 | INTEL_DEVICE_MATCH (PCI_CHIP_I965_GME, &intel_i965_info ), |
553 | INTEL_DEVICE_MATCH (PCI_CHIP_I965_GME, &intel_i965_info ), |
524 | 554 | ||
525 | INTEL_DEVICE_MATCH (PCI_CHIP_GM45_GM, &intel_g4x_info ), |
555 | INTEL_DEVICE_MATCH (PCI_CHIP_GM45_GM, &intel_g4x_info ), |
526 | INTEL_DEVICE_MATCH (PCI_CHIP_G45_E_G, &intel_g4x_info ), |
556 | INTEL_DEVICE_MATCH (PCI_CHIP_G45_E_G, &intel_g4x_info ), |
527 | INTEL_DEVICE_MATCH (PCI_CHIP_G45_G, &intel_g4x_info ), |
557 | INTEL_DEVICE_MATCH (PCI_CHIP_G45_G, &intel_g4x_info ), |
528 | INTEL_DEVICE_MATCH (PCI_CHIP_Q45_G, &intel_g4x_info ), |
558 | INTEL_DEVICE_MATCH (PCI_CHIP_Q45_G, &intel_g4x_info ), |
529 | INTEL_DEVICE_MATCH (PCI_CHIP_G41_G, &intel_g4x_info ), |
559 | INTEL_DEVICE_MATCH (PCI_CHIP_G41_G, &intel_g4x_info ), |
530 | INTEL_DEVICE_MATCH (PCI_CHIP_B43_G, &intel_g4x_info ), |
560 | INTEL_DEVICE_MATCH (PCI_CHIP_B43_G, &intel_g4x_info ), |
531 | INTEL_DEVICE_MATCH (PCI_CHIP_B43_G1, &intel_g4x_info ), |
561 | INTEL_DEVICE_MATCH (PCI_CHIP_B43_G1, &intel_g4x_info ), |
532 | 562 | ||
533 | INTEL_DEVICE_MATCH (PCI_CHIP_IRONLAKE_D_G, &intel_ironlake_info ), |
563 | INTEL_DEVICE_MATCH (PCI_CHIP_IRONLAKE_D_G, &intel_ironlake_info ), |
534 | INTEL_DEVICE_MATCH (PCI_CHIP_IRONLAKE_M_G, &intel_ironlake_info ), |
564 | INTEL_DEVICE_MATCH (PCI_CHIP_IRONLAKE_M_G, &intel_ironlake_info ), |
535 | 565 | ||
536 | INTEL_DEVICE_MATCH (PCI_CHIP_SANDYBRIDGE_GT1, &intel_sandybridge_info ), |
566 | INTEL_DEVICE_MATCH (PCI_CHIP_SANDYBRIDGE_GT1, &intel_sandybridge_info ), |
537 | INTEL_DEVICE_MATCH (PCI_CHIP_SANDYBRIDGE_GT2, &intel_sandybridge_info ), |
567 | INTEL_DEVICE_MATCH (PCI_CHIP_SANDYBRIDGE_GT2, &intel_sandybridge_info ), |
538 | INTEL_DEVICE_MATCH (PCI_CHIP_SANDYBRIDGE_GT2_PLUS, &intel_sandybridge_info ), |
568 | INTEL_DEVICE_MATCH (PCI_CHIP_SANDYBRIDGE_GT2_PLUS, &intel_sandybridge_info ), |
539 | INTEL_DEVICE_MATCH (PCI_CHIP_SANDYBRIDGE_M_GT1, &intel_sandybridge_info ), |
569 | INTEL_DEVICE_MATCH (PCI_CHIP_SANDYBRIDGE_M_GT1, &intel_sandybridge_info ), |
540 | INTEL_DEVICE_MATCH (PCI_CHIP_SANDYBRIDGE_M_GT2, &intel_sandybridge_info ), |
570 | INTEL_DEVICE_MATCH (PCI_CHIP_SANDYBRIDGE_M_GT2, &intel_sandybridge_info ), |
541 | INTEL_DEVICE_MATCH (PCI_CHIP_SANDYBRIDGE_M_GT2_PLUS, &intel_sandybridge_info ), |
571 | INTEL_DEVICE_MATCH (PCI_CHIP_SANDYBRIDGE_M_GT2_PLUS, &intel_sandybridge_info ), |
542 | INTEL_DEVICE_MATCH (PCI_CHIP_SANDYBRIDGE_S_GT, &intel_sandybridge_info ), |
572 | INTEL_DEVICE_MATCH (PCI_CHIP_SANDYBRIDGE_S_GT, &intel_sandybridge_info ), |
543 | 573 | ||
544 | INTEL_DEVICE_MATCH (PCI_CHIP_IVYBRIDGE_M_GT1, &intel_ivybridge_info ), |
574 | INTEL_DEVICE_MATCH (PCI_CHIP_IVYBRIDGE_M_GT1, &intel_ivybridge_info ), |
545 | INTEL_DEVICE_MATCH (PCI_CHIP_IVYBRIDGE_M_GT2, &intel_ivybridge_info ), |
575 | INTEL_DEVICE_MATCH (PCI_CHIP_IVYBRIDGE_M_GT2, &intel_ivybridge_info ), |
546 | INTEL_DEVICE_MATCH (PCI_CHIP_IVYBRIDGE_D_GT1, &intel_ivybridge_info ), |
576 | INTEL_DEVICE_MATCH (PCI_CHIP_IVYBRIDGE_D_GT1, &intel_ivybridge_info ), |
547 | INTEL_DEVICE_MATCH (PCI_CHIP_IVYBRIDGE_D_GT2, &intel_ivybridge_info ), |
577 | INTEL_DEVICE_MATCH (PCI_CHIP_IVYBRIDGE_D_GT2, &intel_ivybridge_info ), |
548 | INTEL_DEVICE_MATCH (PCI_CHIP_IVYBRIDGE_S_GT1, &intel_ivybridge_info ), |
578 | INTEL_DEVICE_MATCH (PCI_CHIP_IVYBRIDGE_S_GT1, &intel_ivybridge_info ), |
549 | INTEL_DEVICE_MATCH (PCI_CHIP_IVYBRIDGE_S_GT2, &intel_ivybridge_info ), |
579 | INTEL_DEVICE_MATCH (PCI_CHIP_IVYBRIDGE_S_GT2, &intel_ivybridge_info ), |
550 | 580 | ||
551 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_D_GT1, &intel_haswell_info ), |
581 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_D_GT1, &intel_haswell_info ), |
552 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_D_GT2, &intel_haswell_info ), |
582 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_D_GT2, &intel_haswell_info ), |
553 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_D_GT2_PLUS, &intel_haswell_info ), |
583 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_D_GT2_PLUS, &intel_haswell_info ), |
554 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_M_GT1, &intel_haswell_info ), |
584 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_M_GT1, &intel_haswell_info ), |
555 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_M_GT2, &intel_haswell_info ), |
585 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_M_GT2, &intel_haswell_info ), |
556 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_M_GT2_PLUS, &intel_haswell_info ), |
586 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_M_GT2_PLUS, &intel_haswell_info ), |
557 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_S_GT1, &intel_haswell_info ), |
587 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_S_GT1, &intel_haswell_info ), |
558 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_S_GT2, &intel_haswell_info ), |
588 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_S_GT2, &intel_haswell_info ), |
559 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_S_GT2_PLUS, &intel_haswell_info ), |
589 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_S_GT2_PLUS, &intel_haswell_info ), |
560 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_SDV_D_GT1, &intel_haswell_info ), |
590 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_SDV_D_GT1, &intel_haswell_info ), |
561 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_SDV_D_GT2, &intel_haswell_info ), |
591 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_SDV_D_GT2, &intel_haswell_info ), |
562 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_SDV_D_GT2_PLUS, &intel_haswell_info ), |
592 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_SDV_D_GT2_PLUS, &intel_haswell_info ), |
563 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_SDV_M_GT1, &intel_haswell_info ), |
593 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_SDV_M_GT1, &intel_haswell_info ), |
564 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_SDV_M_GT2, &intel_haswell_info ), |
594 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_SDV_M_GT2, &intel_haswell_info ), |
565 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_SDV_M_GT2_PLUS, &intel_haswell_info ), |
595 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_SDV_M_GT2_PLUS, &intel_haswell_info ), |
566 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_SDV_S_GT1, &intel_haswell_info ), |
596 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_SDV_S_GT1, &intel_haswell_info ), |
567 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_SDV_S_GT2, &intel_haswell_info ), |
597 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_SDV_S_GT2, &intel_haswell_info ), |
568 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_SDV_S_GT2_PLUS, &intel_haswell_info ), |
598 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_SDV_S_GT2_PLUS, &intel_haswell_info ), |
569 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_ULT_D_GT1, &intel_haswell_info ), |
599 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_ULT_D_GT1, &intel_haswell_info ), |
570 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_ULT_D_GT2, &intel_haswell_info ), |
600 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_ULT_D_GT2, &intel_haswell_info ), |
571 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_ULT_D_GT2_PLUS, &intel_haswell_info ), |
601 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_ULT_D_GT2_PLUS, &intel_haswell_info ), |
572 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_ULT_M_GT1, &intel_haswell_info ), |
602 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_ULT_M_GT1, &intel_haswell_info ), |
573 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_ULT_M_GT2, &intel_haswell_info ), |
603 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_ULT_M_GT2, &intel_haswell_info ), |
574 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_ULT_M_GT2_PLUS, &intel_haswell_info ), |
604 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_ULT_M_GT2_PLUS, &intel_haswell_info ), |
575 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_ULT_S_GT1, &intel_haswell_info ), |
605 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_ULT_S_GT1, &intel_haswell_info ), |
576 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_ULT_S_GT2, &intel_haswell_info ), |
606 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_ULT_S_GT2, &intel_haswell_info ), |
577 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_ULT_S_GT2_PLUS, &intel_haswell_info ), |
607 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_ULT_S_GT2_PLUS, &intel_haswell_info ), |
578 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_CRW_D_GT1, &intel_haswell_info ), |
608 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_CRW_D_GT1, &intel_haswell_info ), |
579 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_CRW_D_GT2, &intel_haswell_info ), |
609 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_CRW_D_GT2, &intel_haswell_info ), |
580 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_CRW_D_GT2_PLUS, &intel_haswell_info ), |
610 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_CRW_D_GT2_PLUS, &intel_haswell_info ), |
581 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_CRW_M_GT1, &intel_haswell_info ), |
611 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_CRW_M_GT1, &intel_haswell_info ), |
582 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_CRW_M_GT2, &intel_haswell_info ), |
612 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_CRW_M_GT2, &intel_haswell_info ), |
583 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_CRW_M_GT2_PLUS, &intel_haswell_info ), |
613 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_CRW_M_GT2_PLUS, &intel_haswell_info ), |
584 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_CRW_S_GT1, &intel_haswell_info ), |
614 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_CRW_S_GT1, &intel_haswell_info ), |
585 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_CRW_S_GT2, &intel_haswell_info ), |
615 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_CRW_S_GT2, &intel_haswell_info ), |
586 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_CRW_S_GT2_PLUS, &intel_haswell_info ), |
616 | INTEL_DEVICE_MATCH (PCI_CHIP_HASWELL_CRW_S_GT2_PLUS, &intel_haswell_info ), |
587 | 617 | ||
588 | INTEL_DEVICE_MATCH (PCI_CHIP_VALLEYVIEW_PO, &intel_valleyview_info ), |
618 | INTEL_DEVICE_MATCH (PCI_CHIP_VALLEYVIEW_PO, &intel_valleyview_info ), |
589 | INTEL_DEVICE_MATCH (PCI_CHIP_VALLEYVIEW_1, &intel_valleyview_info ), |
619 | INTEL_DEVICE_MATCH (PCI_CHIP_VALLEYVIEW_1, &intel_valleyview_info ), |
590 | INTEL_DEVICE_MATCH (PCI_CHIP_VALLEYVIEW_2, &intel_valleyview_info ), |
620 | INTEL_DEVICE_MATCH (PCI_CHIP_VALLEYVIEW_2, &intel_valleyview_info ), |
591 | INTEL_DEVICE_MATCH (PCI_CHIP_VALLEYVIEW_3, &intel_valleyview_info ), |
621 | INTEL_DEVICE_MATCH (PCI_CHIP_VALLEYVIEW_3, &intel_valleyview_info ), |
592 | 622 | ||
593 | INTEL_DEVICE_MATCH (PCI_MATCH_ANY, &intel_generic_info ), |
623 | INTEL_DEVICE_MATCH (PCI_MATCH_ANY, &intel_generic_info ), |
594 | 624 | ||
595 | { 0, 0, 0 }, |
625 | { 0, 0, 0 }, |
596 | }; |
626 | }; |
597 | 627 | ||
598 | const struct pci_id_match *PciDevMatch(uint16_t dev,const struct pci_id_match *list) |
628 | const struct pci_id_match *PciDevMatch(uint16_t dev,const struct pci_id_match *list) |
599 | { |
629 | { |
600 | while(list->device_id) |
630 | while(list->device_id) |
601 | { |
631 | { |
602 | if(dev==list->device_id) |
632 | if(dev==list->device_id) |
603 | return list; |
633 | return list; |
604 | list++; |
634 | list++; |
605 | } |
635 | } |
606 | return NULL; |
636 | return NULL; |
607 | } |
637 | } |
608 | 638 | ||
609 | const struct intel_device_info * |
639 | const struct intel_device_info * |
610 | intel_detect_chipset(struct pci_device *pci) |
640 | intel_detect_chipset(struct pci_device *pci) |
611 | { |
641 | { |
612 | const struct pci_id_match *ent = NULL; |
642 | const struct pci_id_match *ent = NULL; |
613 | const char *name = NULL; |
643 | const char *name = NULL; |
614 | int i; |
644 | int i; |
615 | 645 | ||
616 | ent = PciDevMatch(pci->device_id, intel_device_match); |
646 | ent = PciDevMatch(pci->device_id, intel_device_match); |
617 | 647 | ||
618 | if(ent != NULL) |
648 | if(ent != NULL) |
619 | return (const struct intel_device_info*)ent->match_data; |
649 | return (const struct intel_device_info*)ent->match_data; |
620 | else |
650 | else |
621 | return &intel_generic_info; |
651 | return &intel_generic_info; |
622 | 652 | ||
623 | #if 0 |
653 | #if 0 |
624 | for (i = 0; intel_chipsets[i].name != NULL; i++) { |
654 | for (i = 0; intel_chipsets[i].name != NULL; i++) { |
625 | if (DEVICE_ID(pci) == intel_chipsets[i].token) { |
655 | if (DEVICE_ID(pci) == intel_chipsets[i].token) { |
626 | name = intel_chipsets[i].name; |
656 | name = intel_chipsets[i].name; |
627 | break; |
657 | break; |
628 | } |
658 | } |
629 | } |
659 | } |
630 | if (name == NULL) { |
660 | if (name == NULL) { |
631 | xf86DrvMsg(scrn->scrnIndex, X_WARNING, "unknown chipset\n"); |
661 | xf86DrvMsg(scrn->scrnIndex, X_WARNING, "unknown chipset\n"); |
632 | name = "unknown"; |
662 | name = "unknown"; |
633 | } else { |
663 | } else { |
634 | xf86DrvMsg(scrn->scrnIndex, from, |
664 | xf86DrvMsg(scrn->scrnIndex, from, |
635 | "Integrated Graphics Chipset: Intel(R) %s\n", |
665 | "Integrated Graphics Chipset: Intel(R) %s\n", |
636 | name); |
666 | name); |
637 | } |
667 | } |
638 | 668 | ||
639 | scrn->chipset = name; |
669 | scrn->chipset = name; |
640 | #endif |
670 | #endif |
641 | 671 | ||
642 | } |
672 | } |
643 | 673 | ||
644 | 674 | ||
645 | int drmIoctl(int fd, unsigned long request, void *arg) |
675 | int drmIoctl(int fd, unsigned long request, void *arg) |
646 | { |
676 | { |
647 | ioctl_t io; |
677 | ioctl_t io; |
648 | 678 | ||
649 | io.handle = fd; |
679 | io.handle = fd; |
650 | io.io_code = request; |
680 | io.io_code = request; |
651 | io.input = arg; |
681 | io.input = arg; |
652 | io.inp_size = 64; |
682 | io.inp_size = 64; |
653 | io.output = NULL; |
683 | io.output = NULL; |
654 | io.out_size = 0; |
684 | io.out_size = 0; |
655 | 685 | ||
656 | return call_service(&io); |
686 | return call_service(&io); |
657 | }><>><>>> |
687 | }><>><>>> |