Rev 1413 | Rev 1963 | Go to most recent revision | Details | Compare with Previous | Last modification | View Log | RSS feed
Rev | Author | Line No. | Line |
---|---|---|---|
1117 | serge | 1 | /* |
2 | * Copyright 2008 Advanced Micro Devices, Inc. |
||
3 | * Copyright 2008 Red Hat Inc. |
||
4 | * Copyright 2009 Jerome Glisse. |
||
5 | * |
||
6 | * Permission is hereby granted, free of charge, to any person obtaining a |
||
7 | * copy of this software and associated documentation files (the "Software"), |
||
8 | * to deal in the Software without restriction, including without limitation |
||
9 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, |
||
10 | * and/or sell copies of the Software, and to permit persons to whom the |
||
11 | * Software is furnished to do so, subject to the following conditions: |
||
12 | * |
||
13 | * The above copyright notice and this permission notice shall be included in |
||
14 | * all copies or substantial portions of the Software. |
||
15 | * |
||
16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
||
17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
||
18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL |
||
19 | * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR |
||
20 | * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, |
||
21 | * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR |
||
22 | * OTHER DEALINGS IN THE SOFTWARE. |
||
23 | * |
||
24 | * Authors: Dave Airlie |
||
25 | * Alex Deucher |
||
26 | * Jerome Glisse |
||
27 | */ |
||
28 | #ifndef __RADEON_ASIC_H__ |
||
29 | #define __RADEON_ASIC_H__ |
||
30 | |||
31 | /* |
||
32 | * common functions |
||
33 | */ |
||
1268 | serge | 34 | uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev); |
1117 | serge | 35 | void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock); |
1403 | serge | 36 | uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev); |
1117 | serge | 37 | void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable); |
38 | |||
1268 | serge | 39 | uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev); |
1117 | serge | 40 | void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock); |
1268 | serge | 41 | uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev); |
1117 | serge | 42 | void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock); |
43 | void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable); |
||
44 | |||
45 | /* |
||
1430 | serge | 46 | * r100,rv100,rs100,rv200,rs200 |
1117 | serge | 47 | */ |
1221 | serge | 48 | extern int r100_init(struct radeon_device *rdev); |
49 | extern void r100_fini(struct radeon_device *rdev); |
||
50 | extern int r100_suspend(struct radeon_device *rdev); |
||
51 | extern int r100_resume(struct radeon_device *rdev); |
||
1117 | serge | 52 | uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg); |
53 | void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
||
1179 | serge | 54 | void r100_vga_set_state(struct radeon_device *rdev, bool state); |
1117 | serge | 55 | int r100_gpu_reset(struct radeon_device *rdev); |
1179 | serge | 56 | u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc); |
1117 | serge | 57 | void r100_pci_gart_tlb_flush(struct radeon_device *rdev); |
58 | int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); |
||
1179 | serge | 59 | void r100_cp_commit(struct radeon_device *rdev); |
1117 | serge | 60 | void r100_ring_start(struct radeon_device *rdev); |
61 | int r100_irq_set(struct radeon_device *rdev); |
||
62 | int r100_irq_process(struct radeon_device *rdev); |
||
1128 | serge | 63 | void r100_fence_ring_emit(struct radeon_device *rdev, |
64 | struct radeon_fence *fence); |
||
65 | int r100_cs_parse(struct radeon_cs_parser *p); |
||
1117 | serge | 66 | void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
67 | uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg); |
||
1128 | serge | 68 | int r100_copy_blit(struct radeon_device *rdev, |
69 | uint64_t src_offset, |
||
70 | uint64_t dst_offset, |
||
71 | unsigned num_pages, |
||
72 | struct radeon_fence *fence); |
||
1179 | serge | 73 | int r100_set_surface_reg(struct radeon_device *rdev, int reg, |
74 | uint32_t tiling_flags, uint32_t pitch, |
||
75 | uint32_t offset, uint32_t obj_size); |
||
76 | int r100_clear_surface_reg(struct radeon_device *rdev, int reg); |
||
77 | void r100_bandwidth_update(struct radeon_device *rdev); |
||
78 | void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); |
||
79 | int r100_ring_test(struct radeon_device *rdev); |
||
1321 | serge | 80 | void r100_hpd_init(struct radeon_device *rdev); |
81 | void r100_hpd_fini(struct radeon_device *rdev); |
||
82 | bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd); |
||
83 | void r100_hpd_set_polarity(struct radeon_device *rdev, |
||
84 | enum radeon_hpd_id hpd); |
||
1117 | serge | 85 | |
86 | static struct radeon_asic r100_asic = { |
||
87 | .init = &r100_init, |
||
1221 | serge | 88 | // .fini = &r100_fini, |
89 | // .suspend = &r100_suspend, |
||
90 | // .resume = &r100_resume, |
||
91 | // .vga_set_state = &r100_vga_set_state, |
||
1117 | serge | 92 | .gpu_reset = &r100_gpu_reset, |
93 | .gart_tlb_flush = &r100_pci_gart_tlb_flush, |
||
94 | .gart_set_page = &r100_pci_gart_set_page, |
||
1221 | serge | 95 | .cp_commit = &r100_cp_commit, |
1412 | serge | 96 | .ring_start = &r100_ring_start, |
97 | .ring_test = &r100_ring_test, |
||
1221 | serge | 98 | // .ring_ib_execute = &r100_ring_ib_execute, |
99 | // .irq_set = &r100_irq_set, |
||
100 | // .irq_process = &r100_irq_process, |
||
101 | // .get_vblank_counter = &r100_get_vblank_counter, |
||
1413 | serge | 102 | .fence_ring_emit = &r100_fence_ring_emit, |
1221 | serge | 103 | // .cs_parse = &r100_cs_parse, |
104 | // .copy_blit = &r100_copy_blit, |
||
105 | // .copy_dma = NULL, |
||
106 | // .copy = &r100_copy_blit, |
||
1268 | serge | 107 | .get_engine_clock = &radeon_legacy_get_engine_clock, |
1221 | serge | 108 | .set_engine_clock = &radeon_legacy_set_engine_clock, |
1403 | serge | 109 | .get_memory_clock = &radeon_legacy_get_memory_clock, |
1221 | serge | 110 | .set_memory_clock = NULL, |
1430 | serge | 111 | .get_pcie_lanes = NULL, |
1221 | serge | 112 | .set_pcie_lanes = NULL, |
113 | .set_clock_gating = &radeon_legacy_set_clock_gating, |
||
1179 | serge | 114 | .set_surface_reg = r100_set_surface_reg, |
115 | .clear_surface_reg = r100_clear_surface_reg, |
||
116 | .bandwidth_update = &r100_bandwidth_update, |
||
1321 | serge | 117 | .hpd_init = &r100_hpd_init, |
118 | .hpd_fini = &r100_hpd_fini, |
||
119 | .hpd_sense = &r100_hpd_sense, |
||
120 | .hpd_set_polarity = &r100_hpd_set_polarity, |
||
1404 | serge | 121 | .ioctl_wait_idle = NULL, |
1117 | serge | 122 | }; |
123 | |||
1430 | serge | 124 | /* |
125 | * r200,rv250,rs300,rv280 |
||
126 | */ |
||
127 | extern int r200_copy_dma(struct radeon_device *rdev, |
||
128 | uint64_t src_offset, |
||
129 | uint64_t dst_offset, |
||
130 | unsigned num_pages, |
||
131 | struct radeon_fence *fence); |
||
132 | static struct radeon_asic r200_asic = { |
||
133 | .init = &r100_init, |
||
134 | // .fini = &r100_fini, |
||
135 | // .suspend = &r100_suspend, |
||
136 | // .resume = &r100_resume, |
||
137 | // .vga_set_state = &r100_vga_set_state, |
||
138 | .gpu_reset = &r100_gpu_reset, |
||
139 | .gart_tlb_flush = &r100_pci_gart_tlb_flush, |
||
140 | .gart_set_page = &r100_pci_gart_set_page, |
||
141 | .cp_commit = &r100_cp_commit, |
||
142 | .ring_start = &r100_ring_start, |
||
143 | .ring_test = &r100_ring_test, |
||
144 | // .ring_ib_execute = &r100_ring_ib_execute, |
||
145 | // .irq_set = &r100_irq_set, |
||
146 | // .irq_process = &r100_irq_process, |
||
147 | // .get_vblank_counter = &r100_get_vblank_counter, |
||
148 | .fence_ring_emit = &r100_fence_ring_emit, |
||
149 | // .cs_parse = &r100_cs_parse, |
||
150 | // .copy_blit = &r100_copy_blit, |
||
151 | // .copy_dma = NULL, |
||
152 | // .copy = &r100_copy_blit, |
||
153 | .get_engine_clock = &radeon_legacy_get_engine_clock, |
||
154 | .set_engine_clock = &radeon_legacy_set_engine_clock, |
||
155 | .get_memory_clock = &radeon_legacy_get_memory_clock, |
||
156 | .set_memory_clock = NULL, |
||
157 | .set_pcie_lanes = NULL, |
||
158 | .set_clock_gating = &radeon_legacy_set_clock_gating, |
||
159 | .set_surface_reg = r100_set_surface_reg, |
||
160 | .clear_surface_reg = r100_clear_surface_reg, |
||
161 | .bandwidth_update = &r100_bandwidth_update, |
||
162 | .hpd_init = &r100_hpd_init, |
||
163 | .hpd_fini = &r100_hpd_fini, |
||
164 | .hpd_sense = &r100_hpd_sense, |
||
165 | .hpd_set_polarity = &r100_hpd_set_polarity, |
||
166 | .ioctl_wait_idle = NULL, |
||
167 | }; |
||
1117 | serge | 168 | |
1430 | serge | 169 | |
1117 | serge | 170 | /* |
171 | * r300,r350,rv350,rv380 |
||
172 | */ |
||
1221 | serge | 173 | extern int r300_init(struct radeon_device *rdev); |
174 | extern void r300_fini(struct radeon_device *rdev); |
||
175 | extern int r300_suspend(struct radeon_device *rdev); |
||
176 | extern int r300_resume(struct radeon_device *rdev); |
||
177 | extern int r300_gpu_reset(struct radeon_device *rdev); |
||
178 | extern void r300_ring_start(struct radeon_device *rdev); |
||
179 | extern void r300_fence_ring_emit(struct radeon_device *rdev, |
||
1128 | serge | 180 | struct radeon_fence *fence); |
1221 | serge | 181 | extern int r300_cs_parse(struct radeon_cs_parser *p); |
182 | extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev); |
||
183 | extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); |
||
184 | extern uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg); |
||
185 | extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
||
186 | extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes); |
||
1430 | serge | 187 | extern int rv370_get_pcie_lanes(struct radeon_device *rdev); |
188 | |||
1117 | serge | 189 | static struct radeon_asic r300_asic = { |
190 | .init = &r300_init, |
||
1221 | serge | 191 | // .fini = &r300_fini, |
192 | // .suspend = &r300_suspend, |
||
193 | // .resume = &r300_resume, |
||
194 | // .vga_set_state = &r100_vga_set_state, |
||
1117 | serge | 195 | .gpu_reset = &r300_gpu_reset, |
196 | .gart_tlb_flush = &r100_pci_gart_tlb_flush, |
||
197 | .gart_set_page = &r100_pci_gart_set_page, |
||
1412 | serge | 198 | .cp_commit = &r100_cp_commit, |
199 | .ring_start = &r300_ring_start, |
||
200 | .ring_test = &r100_ring_test, |
||
1221 | serge | 201 | // .ring_ib_execute = &r100_ring_ib_execute, |
202 | // .irq_set = &r100_irq_set, |
||
203 | // .irq_process = &r100_irq_process, |
||
204 | // .get_vblank_counter = &r100_get_vblank_counter, |
||
1413 | serge | 205 | .fence_ring_emit = &r300_fence_ring_emit, |
1221 | serge | 206 | // .cs_parse = &r300_cs_parse, |
207 | // .copy_blit = &r100_copy_blit, |
||
208 | // .copy_dma = &r300_copy_dma, |
||
209 | // .copy = &r100_copy_blit, |
||
1268 | serge | 210 | .get_engine_clock = &radeon_legacy_get_engine_clock, |
1221 | serge | 211 | .set_engine_clock = &radeon_legacy_set_engine_clock, |
1403 | serge | 212 | .get_memory_clock = &radeon_legacy_get_memory_clock, |
1221 | serge | 213 | .set_memory_clock = NULL, |
1430 | serge | 214 | .get_pcie_lanes = &rv370_get_pcie_lanes, |
1221 | serge | 215 | .set_pcie_lanes = &rv370_set_pcie_lanes, |
216 | .set_clock_gating = &radeon_legacy_set_clock_gating, |
||
1179 | serge | 217 | .set_surface_reg = r100_set_surface_reg, |
218 | .clear_surface_reg = r100_clear_surface_reg, |
||
219 | .bandwidth_update = &r100_bandwidth_update, |
||
1321 | serge | 220 | .hpd_init = &r100_hpd_init, |
221 | .hpd_fini = &r100_hpd_fini, |
||
222 | .hpd_sense = &r100_hpd_sense, |
||
223 | .hpd_set_polarity = &r100_hpd_set_polarity, |
||
1404 | serge | 224 | .ioctl_wait_idle = NULL, |
1117 | serge | 225 | }; |
226 | |||
1430 | serge | 227 | |
228 | static struct radeon_asic r300_asic_pcie = { |
||
229 | .init = &r300_init, |
||
230 | // .fini = &r300_fini, |
||
231 | // .suspend = &r300_suspend, |
||
232 | // .resume = &r300_resume, |
||
233 | // .vga_set_state = &r100_vga_set_state, |
||
234 | .gpu_reset = &r300_gpu_reset, |
||
235 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, |
||
236 | .gart_set_page = &rv370_pcie_gart_set_page, |
||
237 | .cp_commit = &r100_cp_commit, |
||
238 | .ring_start = &r300_ring_start, |
||
239 | .ring_test = &r100_ring_test, |
||
240 | // .ring_ib_execute = &r100_ring_ib_execute, |
||
241 | // .irq_set = &r100_irq_set, |
||
242 | // .irq_process = &r100_irq_process, |
||
243 | // .get_vblank_counter = &r100_get_vblank_counter, |
||
244 | .fence_ring_emit = &r300_fence_ring_emit, |
||
245 | // .cs_parse = &r300_cs_parse, |
||
246 | // .copy_blit = &r100_copy_blit, |
||
247 | // .copy_dma = &r300_copy_dma, |
||
248 | // .copy = &r100_copy_blit, |
||
249 | .get_engine_clock = &radeon_legacy_get_engine_clock, |
||
250 | .set_engine_clock = &radeon_legacy_set_engine_clock, |
||
251 | .get_memory_clock = &radeon_legacy_get_memory_clock, |
||
252 | .set_memory_clock = NULL, |
||
253 | .set_pcie_lanes = &rv370_set_pcie_lanes, |
||
254 | .set_clock_gating = &radeon_legacy_set_clock_gating, |
||
255 | .set_surface_reg = r100_set_surface_reg, |
||
256 | .clear_surface_reg = r100_clear_surface_reg, |
||
257 | .bandwidth_update = &r100_bandwidth_update, |
||
258 | .hpd_init = &r100_hpd_init, |
||
259 | .hpd_fini = &r100_hpd_fini, |
||
260 | .hpd_sense = &r100_hpd_sense, |
||
261 | .hpd_set_polarity = &r100_hpd_set_polarity, |
||
262 | .ioctl_wait_idle = NULL, |
||
263 | }; |
||
264 | |||
1117 | serge | 265 | /* |
266 | * r420,r423,rv410 |
||
267 | */ |
||
1179 | serge | 268 | extern int r420_init(struct radeon_device *rdev); |
269 | extern void r420_fini(struct radeon_device *rdev); |
||
270 | extern int r420_suspend(struct radeon_device *rdev); |
||
271 | extern int r420_resume(struct radeon_device *rdev); |
||
1117 | serge | 272 | static struct radeon_asic r420_asic = { |
1179 | serge | 273 | .init = &r420_init, |
1221 | serge | 274 | // .fini = &r420_fini, |
275 | // .suspend = &r420_suspend, |
||
276 | // .resume = &r420_resume, |
||
277 | // .vga_set_state = &r100_vga_set_state, |
||
1117 | serge | 278 | .gpu_reset = &r300_gpu_reset, |
279 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, |
||
280 | .gart_set_page = &rv370_pcie_gart_set_page, |
||
1412 | serge | 281 | .cp_commit = &r100_cp_commit, |
282 | .ring_start = &r300_ring_start, |
||
283 | .ring_test = &r100_ring_test, |
||
1221 | serge | 284 | // .ring_ib_execute = &r100_ring_ib_execute, |
285 | // .irq_set = &r100_irq_set, |
||
286 | // .irq_process = &r100_irq_process, |
||
287 | // .get_vblank_counter = &r100_get_vblank_counter, |
||
1413 | serge | 288 | .fence_ring_emit = &r300_fence_ring_emit, |
1221 | serge | 289 | // .cs_parse = &r300_cs_parse, |
290 | // .copy_blit = &r100_copy_blit, |
||
291 | // .copy_dma = &r300_copy_dma, |
||
292 | // .copy = &r100_copy_blit, |
||
1268 | serge | 293 | .get_engine_clock = &radeon_atom_get_engine_clock, |
1221 | serge | 294 | .set_engine_clock = &radeon_atom_set_engine_clock, |
1268 | serge | 295 | .get_memory_clock = &radeon_atom_get_memory_clock, |
1221 | serge | 296 | .set_memory_clock = &radeon_atom_set_memory_clock, |
1430 | serge | 297 | .get_pcie_lanes = &rv370_get_pcie_lanes, |
1221 | serge | 298 | .set_pcie_lanes = &rv370_set_pcie_lanes, |
299 | .set_clock_gating = &radeon_atom_set_clock_gating, |
||
1179 | serge | 300 | .set_surface_reg = r100_set_surface_reg, |
301 | .clear_surface_reg = r100_clear_surface_reg, |
||
302 | .bandwidth_update = &r100_bandwidth_update, |
||
1321 | serge | 303 | .hpd_init = &r100_hpd_init, |
304 | .hpd_fini = &r100_hpd_fini, |
||
305 | .hpd_sense = &r100_hpd_sense, |
||
306 | .hpd_set_polarity = &r100_hpd_set_polarity, |
||
1404 | serge | 307 | .ioctl_wait_idle = NULL, |
1117 | serge | 308 | }; |
309 | |||
310 | |||
311 | /* |
||
312 | * rs400,rs480 |
||
313 | */ |
||
1221 | serge | 314 | extern int rs400_init(struct radeon_device *rdev); |
315 | extern void rs400_fini(struct radeon_device *rdev); |
||
316 | extern int rs400_suspend(struct radeon_device *rdev); |
||
317 | extern int rs400_resume(struct radeon_device *rdev); |
||
1117 | serge | 318 | void rs400_gart_tlb_flush(struct radeon_device *rdev); |
319 | int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); |
||
320 | uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg); |
||
321 | void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
||
322 | static struct radeon_asic rs400_asic = { |
||
1221 | serge | 323 | .init = &rs400_init, |
324 | // .fini = &rs400_fini, |
||
325 | // .suspend = &rs400_suspend, |
||
326 | // .resume = &rs400_resume, |
||
327 | // .vga_set_state = &r100_vga_set_state, |
||
1117 | serge | 328 | .gpu_reset = &r300_gpu_reset, |
329 | .gart_tlb_flush = &rs400_gart_tlb_flush, |
||
330 | .gart_set_page = &rs400_gart_set_page, |
||
1412 | serge | 331 | .cp_commit = &r100_cp_commit, |
332 | .ring_start = &r300_ring_start, |
||
333 | .ring_test = &r100_ring_test, |
||
1221 | serge | 334 | // .ring_ib_execute = &r100_ring_ib_execute, |
335 | // .irq_set = &r100_irq_set, |
||
336 | // .irq_process = &r100_irq_process, |
||
337 | // .get_vblank_counter = &r100_get_vblank_counter, |
||
1413 | serge | 338 | .fence_ring_emit = &r300_fence_ring_emit, |
1221 | serge | 339 | // .cs_parse = &r300_cs_parse, |
340 | // .copy_blit = &r100_copy_blit, |
||
341 | // .copy_dma = &r300_copy_dma, |
||
342 | // .copy = &r100_copy_blit, |
||
1268 | serge | 343 | .get_engine_clock = &radeon_legacy_get_engine_clock, |
1221 | serge | 344 | .set_engine_clock = &radeon_legacy_set_engine_clock, |
1403 | serge | 345 | .get_memory_clock = &radeon_legacy_get_memory_clock, |
1221 | serge | 346 | .set_memory_clock = NULL, |
1430 | serge | 347 | .get_pcie_lanes = NULL, |
1221 | serge | 348 | .set_pcie_lanes = NULL, |
349 | .set_clock_gating = &radeon_legacy_set_clock_gating, |
||
1179 | serge | 350 | .set_surface_reg = r100_set_surface_reg, |
351 | .clear_surface_reg = r100_clear_surface_reg, |
||
352 | .bandwidth_update = &r100_bandwidth_update, |
||
1321 | serge | 353 | .hpd_init = &r100_hpd_init, |
354 | .hpd_fini = &r100_hpd_fini, |
||
355 | .hpd_sense = &r100_hpd_sense, |
||
356 | .hpd_set_polarity = &r100_hpd_set_polarity, |
||
1404 | serge | 357 | .ioctl_wait_idle = NULL, |
1117 | serge | 358 | }; |
359 | |||
360 | |||
361 | /* |
||
362 | * rs600. |
||
363 | */ |
||
1221 | serge | 364 | extern int rs600_init(struct radeon_device *rdev); |
365 | extern void rs600_fini(struct radeon_device *rdev); |
||
366 | extern int rs600_suspend(struct radeon_device *rdev); |
||
367 | extern int rs600_resume(struct radeon_device *rdev); |
||
1117 | serge | 368 | int rs600_irq_set(struct radeon_device *rdev); |
1179 | serge | 369 | int rs600_irq_process(struct radeon_device *rdev); |
370 | u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc); |
||
1117 | serge | 371 | void rs600_gart_tlb_flush(struct radeon_device *rdev); |
372 | int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); |
||
373 | uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg); |
||
374 | void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
||
1179 | serge | 375 | void rs600_bandwidth_update(struct radeon_device *rdev); |
1321 | serge | 376 | void rs600_hpd_init(struct radeon_device *rdev); |
377 | void rs600_hpd_fini(struct radeon_device *rdev); |
||
378 | bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd); |
||
379 | void rs600_hpd_set_polarity(struct radeon_device *rdev, |
||
380 | enum radeon_hpd_id hpd); |
||
381 | |||
1117 | serge | 382 | static struct radeon_asic rs600_asic = { |
1179 | serge | 383 | .init = &rs600_init, |
1221 | serge | 384 | // .fini = &rs600_fini, |
385 | // .suspend = &rs600_suspend, |
||
386 | // .resume = &rs600_resume, |
||
387 | // .vga_set_state = &r100_vga_set_state, |
||
1117 | serge | 388 | .gpu_reset = &r300_gpu_reset, |
389 | .gart_tlb_flush = &rs600_gart_tlb_flush, |
||
390 | .gart_set_page = &rs600_gart_set_page, |
||
1412 | serge | 391 | .cp_commit = &r100_cp_commit, |
392 | .ring_start = &r300_ring_start, |
||
393 | .ring_test = &r100_ring_test, |
||
1221 | serge | 394 | // .ring_ib_execute = &r100_ring_ib_execute, |
395 | // .irq_set = &rs600_irq_set, |
||
396 | // .irq_process = &rs600_irq_process, |
||
397 | // .get_vblank_counter = &rs600_get_vblank_counter, |
||
1413 | serge | 398 | .fence_ring_emit = &r300_fence_ring_emit, |
1117 | serge | 399 | // .cs_parse = &r300_cs_parse, |
400 | // .copy_blit = &r100_copy_blit, |
||
401 | // .copy_dma = &r300_copy_dma, |
||
402 | // .copy = &r100_copy_blit, |
||
1268 | serge | 403 | .get_engine_clock = &radeon_atom_get_engine_clock, |
1221 | serge | 404 | .set_engine_clock = &radeon_atom_set_engine_clock, |
1268 | serge | 405 | .get_memory_clock = &radeon_atom_get_memory_clock, |
1221 | serge | 406 | .set_memory_clock = &radeon_atom_set_memory_clock, |
1430 | serge | 407 | .get_pcie_lanes = NULL, |
1221 | serge | 408 | .set_pcie_lanes = NULL, |
409 | .set_clock_gating = &radeon_atom_set_clock_gating, |
||
1430 | serge | 410 | .set_surface_reg = r100_set_surface_reg, |
411 | .clear_surface_reg = r100_clear_surface_reg, |
||
1179 | serge | 412 | .bandwidth_update = &rs600_bandwidth_update, |
1321 | serge | 413 | .hpd_init = &rs600_hpd_init, |
414 | .hpd_fini = &rs600_hpd_fini, |
||
415 | .hpd_sense = &rs600_hpd_sense, |
||
416 | .hpd_set_polarity = &rs600_hpd_set_polarity, |
||
1404 | serge | 417 | .ioctl_wait_idle = NULL, |
1117 | serge | 418 | }; |
419 | |||
420 | |||
421 | /* |
||
422 | * rs690,rs740 |
||
423 | */ |
||
1221 | serge | 424 | int rs690_init(struct radeon_device *rdev); |
425 | void rs690_fini(struct radeon_device *rdev); |
||
426 | int rs690_resume(struct radeon_device *rdev); |
||
427 | int rs690_suspend(struct radeon_device *rdev); |
||
1117 | serge | 428 | uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg); |
429 | void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
||
1179 | serge | 430 | void rs690_bandwidth_update(struct radeon_device *rdev); |
1117 | serge | 431 | static struct radeon_asic rs690_asic = { |
1221 | serge | 432 | .init = &rs690_init, |
433 | // .fini = &rs690_fini, |
||
434 | // .suspend = &rs690_suspend, |
||
435 | // .resume = &rs690_resume, |
||
436 | // .vga_set_state = &r100_vga_set_state, |
||
1117 | serge | 437 | .gpu_reset = &r300_gpu_reset, |
438 | .gart_tlb_flush = &rs400_gart_tlb_flush, |
||
439 | .gart_set_page = &rs400_gart_set_page, |
||
1412 | serge | 440 | .cp_commit = &r100_cp_commit, |
441 | .ring_start = &r300_ring_start, |
||
442 | .ring_test = &r100_ring_test, |
||
1221 | serge | 443 | // .ring_ib_execute = &r100_ring_ib_execute, |
444 | // .irq_set = &rs600_irq_set, |
||
445 | // .irq_process = &rs600_irq_process, |
||
446 | // .get_vblank_counter = &rs600_get_vblank_counter, |
||
1413 | serge | 447 | .fence_ring_emit = &r300_fence_ring_emit, |
1221 | serge | 448 | // .cs_parse = &r300_cs_parse, |
449 | // .copy_blit = &r100_copy_blit, |
||
450 | // .copy_dma = &r300_copy_dma, |
||
451 | // .copy = &r300_copy_dma, |
||
1268 | serge | 452 | .get_engine_clock = &radeon_atom_get_engine_clock, |
1221 | serge | 453 | .set_engine_clock = &radeon_atom_set_engine_clock, |
1268 | serge | 454 | .get_memory_clock = &radeon_atom_get_memory_clock, |
1221 | serge | 455 | .set_memory_clock = &radeon_atom_set_memory_clock, |
1430 | serge | 456 | .get_pcie_lanes = NULL, |
1221 | serge | 457 | .set_pcie_lanes = NULL, |
458 | .set_clock_gating = &radeon_atom_set_clock_gating, |
||
1179 | serge | 459 | .set_surface_reg = r100_set_surface_reg, |
460 | .clear_surface_reg = r100_clear_surface_reg, |
||
461 | .bandwidth_update = &rs690_bandwidth_update, |
||
1321 | serge | 462 | .hpd_init = &rs600_hpd_init, |
463 | .hpd_fini = &rs600_hpd_fini, |
||
464 | .hpd_sense = &rs600_hpd_sense, |
||
465 | .hpd_set_polarity = &rs600_hpd_set_polarity, |
||
1404 | serge | 466 | .ioctl_wait_idle = NULL, |
1117 | serge | 467 | }; |
468 | |||
1179 | serge | 469 | |
1117 | serge | 470 | /* |
471 | * rv515 |
||
472 | */ |
||
473 | int rv515_init(struct radeon_device *rdev); |
||
1221 | serge | 474 | void rv515_fini(struct radeon_device *rdev); |
1117 | serge | 475 | int rv515_gpu_reset(struct radeon_device *rdev); |
476 | uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg); |
||
477 | void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
||
478 | void rv515_ring_start(struct radeon_device *rdev); |
||
479 | uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg); |
||
480 | void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
||
1179 | serge | 481 | void rv515_bandwidth_update(struct radeon_device *rdev); |
1221 | serge | 482 | int rv515_resume(struct radeon_device *rdev); |
483 | int rv515_suspend(struct radeon_device *rdev); |
||
1117 | serge | 484 | static struct radeon_asic rv515_asic = { |
485 | .init = &rv515_init, |
||
1221 | serge | 486 | // .fini = &rv515_fini, |
487 | // .suspend = &rv515_suspend, |
||
488 | // .resume = &rv515_resume, |
||
489 | // .vga_set_state = &r100_vga_set_state, |
||
1117 | serge | 490 | .gpu_reset = &rv515_gpu_reset, |
491 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, |
||
492 | .gart_set_page = &rv370_pcie_gart_set_page, |
||
1412 | serge | 493 | .cp_commit = &r100_cp_commit, |
494 | .ring_start = &rv515_ring_start, |
||
495 | .ring_test = &r100_ring_test, |
||
1221 | serge | 496 | // .ring_ib_execute = &r100_ring_ib_execute, |
497 | // .irq_set = &rs600_irq_set, |
||
498 | // .irq_process = &rs600_irq_process, |
||
499 | // .get_vblank_counter = &rs600_get_vblank_counter, |
||
1413 | serge | 500 | .fence_ring_emit = &r300_fence_ring_emit, |
1221 | serge | 501 | // .cs_parse = &r300_cs_parse, |
502 | // .copy_blit = &r100_copy_blit, |
||
503 | // .copy_dma = &r300_copy_dma, |
||
504 | // .copy = &r100_copy_blit, |
||
1268 | serge | 505 | .get_engine_clock = &radeon_atom_get_engine_clock, |
1221 | serge | 506 | .set_engine_clock = &radeon_atom_set_engine_clock, |
1268 | serge | 507 | .get_memory_clock = &radeon_atom_get_memory_clock, |
1221 | serge | 508 | .set_memory_clock = &radeon_atom_set_memory_clock, |
1430 | serge | 509 | .get_pcie_lanes = &rv370_get_pcie_lanes, |
1221 | serge | 510 | .set_pcie_lanes = &rv370_set_pcie_lanes, |
511 | .set_clock_gating = &radeon_atom_set_clock_gating, |
||
1179 | serge | 512 | .set_surface_reg = r100_set_surface_reg, |
513 | .clear_surface_reg = r100_clear_surface_reg, |
||
514 | .bandwidth_update = &rv515_bandwidth_update, |
||
1321 | serge | 515 | .hpd_init = &rs600_hpd_init, |
516 | .hpd_fini = &rs600_hpd_fini, |
||
517 | .hpd_sense = &rs600_hpd_sense, |
||
518 | .hpd_set_polarity = &rs600_hpd_set_polarity, |
||
1404 | serge | 519 | .ioctl_wait_idle = NULL, |
1117 | serge | 520 | }; |
521 | |||
522 | |||
523 | /* |
||
524 | * r520,rv530,rv560,rv570,r580 |
||
525 | */ |
||
1221 | serge | 526 | int r520_init(struct radeon_device *rdev); |
527 | int r520_resume(struct radeon_device *rdev); |
||
1117 | serge | 528 | static struct radeon_asic r520_asic = { |
1221 | serge | 529 | .init = &r520_init, |
530 | // .fini = &rv515_fini, |
||
531 | // .suspend = &rv515_suspend, |
||
532 | // .resume = &r520_resume, |
||
533 | // .vga_set_state = &r100_vga_set_state, |
||
1117 | serge | 534 | .gpu_reset = &rv515_gpu_reset, |
1119 | serge | 535 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, |
536 | .gart_set_page = &rv370_pcie_gart_set_page, |
||
1412 | serge | 537 | .cp_commit = &r100_cp_commit, |
538 | .ring_start = &rv515_ring_start, |
||
539 | .ring_test = &r100_ring_test, |
||
1221 | serge | 540 | // .ring_ib_execute = &r100_ring_ib_execute, |
541 | // .irq_set = &rs600_irq_set, |
||
542 | // .irq_process = &rs600_irq_process, |
||
543 | // .get_vblank_counter = &rs600_get_vblank_counter, |
||
1413 | serge | 544 | .fence_ring_emit = &r300_fence_ring_emit, |
1221 | serge | 545 | // .cs_parse = &r300_cs_parse, |
546 | // .copy_blit = &r100_copy_blit, |
||
547 | // .copy_dma = &r300_copy_dma, |
||
548 | // .copy = &r100_copy_blit, |
||
1268 | serge | 549 | .get_engine_clock = &radeon_atom_get_engine_clock, |
1221 | serge | 550 | .set_engine_clock = &radeon_atom_set_engine_clock, |
1268 | serge | 551 | .get_memory_clock = &radeon_atom_get_memory_clock, |
1221 | serge | 552 | .set_memory_clock = &radeon_atom_set_memory_clock, |
1430 | serge | 553 | .get_pcie_lanes = &rv370_get_pcie_lanes, |
1221 | serge | 554 | .set_pcie_lanes = &rv370_set_pcie_lanes, |
555 | .set_clock_gating = &radeon_atom_set_clock_gating, |
||
1179 | serge | 556 | .set_surface_reg = r100_set_surface_reg, |
557 | .clear_surface_reg = r100_clear_surface_reg, |
||
1221 | serge | 558 | .bandwidth_update = &rv515_bandwidth_update, |
1321 | serge | 559 | .hpd_init = &rs600_hpd_init, |
560 | .hpd_fini = &rs600_hpd_fini, |
||
561 | .hpd_sense = &rs600_hpd_sense, |
||
562 | .hpd_set_polarity = &rs600_hpd_set_polarity, |
||
1404 | serge | 563 | .ioctl_wait_idle = NULL, |
1117 | serge | 564 | }; |
565 | |||
566 | /* |
||
1221 | serge | 567 | * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880 |
1117 | serge | 568 | */ |
1179 | serge | 569 | int r600_init(struct radeon_device *rdev); |
570 | void r600_fini(struct radeon_device *rdev); |
||
571 | int r600_suspend(struct radeon_device *rdev); |
||
572 | int r600_resume(struct radeon_device *rdev); |
||
573 | void r600_vga_set_state(struct radeon_device *rdev, bool state); |
||
574 | int r600_wb_init(struct radeon_device *rdev); |
||
575 | void r600_wb_fini(struct radeon_device *rdev); |
||
576 | void r600_cp_commit(struct radeon_device *rdev); |
||
577 | void r600_pcie_gart_tlb_flush(struct radeon_device *rdev); |
||
1117 | serge | 578 | uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg); |
579 | void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
||
1233 | serge | 580 | int r600_cs_parse(struct radeon_cs_parser *p); |
581 | void r600_fence_ring_emit(struct radeon_device *rdev, |
||
582 | struct radeon_fence *fence); |
||
583 | int r600_copy_dma(struct radeon_device *rdev, |
||
584 | uint64_t src_offset, |
||
585 | uint64_t dst_offset, |
||
586 | unsigned num_pages, |
||
587 | struct radeon_fence *fence); |
||
588 | int r600_irq_process(struct radeon_device *rdev); |
||
589 | int r600_irq_set(struct radeon_device *rdev); |
||
590 | int r600_gpu_reset(struct radeon_device *rdev); |
||
591 | int r600_set_surface_reg(struct radeon_device *rdev, int reg, |
||
592 | uint32_t tiling_flags, uint32_t pitch, |
||
593 | uint32_t offset, uint32_t obj_size); |
||
594 | int r600_clear_surface_reg(struct radeon_device *rdev, int reg); |
||
595 | void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); |
||
596 | int r600_ring_test(struct radeon_device *rdev); |
||
597 | int r600_copy_blit(struct radeon_device *rdev, |
||
598 | uint64_t src_offset, uint64_t dst_offset, |
||
599 | unsigned num_pages, struct radeon_fence *fence); |
||
1321 | serge | 600 | void r600_hpd_init(struct radeon_device *rdev); |
601 | void r600_hpd_fini(struct radeon_device *rdev); |
||
602 | bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd); |
||
603 | void r600_hpd_set_polarity(struct radeon_device *rdev, |
||
604 | enum radeon_hpd_id hpd); |
||
1404 | serge | 605 | extern void r600_ioctl_wait_idle(struct radeon_device *rdev, struct radeon_bo *bo); |
1117 | serge | 606 | |
1233 | serge | 607 | static struct radeon_asic r600_asic = { |
608 | .init = &r600_init, |
||
609 | // .fini = &r600_fini, |
||
610 | // .suspend = &r600_suspend, |
||
611 | // .resume = &r600_resume, |
||
1413 | serge | 612 | .cp_commit = &r600_cp_commit, |
1233 | serge | 613 | .vga_set_state = &r600_vga_set_state, |
614 | .gpu_reset = &r600_gpu_reset, |
||
615 | .gart_tlb_flush = &r600_pcie_gart_tlb_flush, |
||
616 | .gart_set_page = &rs600_gart_set_page, |
||
1413 | serge | 617 | .ring_test = &r600_ring_test, |
1233 | serge | 618 | // .ring_ib_execute = &r600_ring_ib_execute, |
619 | // .irq_set = &r600_irq_set, |
||
620 | // .irq_process = &r600_irq_process, |
||
1413 | serge | 621 | .fence_ring_emit = &r600_fence_ring_emit, |
1233 | serge | 622 | // .cs_parse = &r600_cs_parse, |
623 | // .copy_blit = &r600_copy_blit, |
||
624 | // .copy_dma = &r600_copy_blit, |
||
625 | // .copy = &r600_copy_blit, |
||
1268 | serge | 626 | .get_engine_clock = &radeon_atom_get_engine_clock, |
1233 | serge | 627 | .set_engine_clock = &radeon_atom_set_engine_clock, |
1268 | serge | 628 | .get_memory_clock = &radeon_atom_get_memory_clock, |
1233 | serge | 629 | .set_memory_clock = &radeon_atom_set_memory_clock, |
1430 | serge | 630 | .get_pcie_lanes = &rv370_get_pcie_lanes, |
1233 | serge | 631 | .set_pcie_lanes = NULL, |
1430 | serge | 632 | .set_clock_gating = NULL, |
1233 | serge | 633 | .set_surface_reg = r600_set_surface_reg, |
634 | .clear_surface_reg = r600_clear_surface_reg, |
||
635 | .bandwidth_update = &rv515_bandwidth_update, |
||
1321 | serge | 636 | .hpd_init = &r600_hpd_init, |
637 | .hpd_fini = &r600_hpd_fini, |
||
638 | .hpd_sense = &r600_hpd_sense, |
||
639 | .hpd_set_polarity = &r600_hpd_set_polarity, |
||
1404 | serge | 640 | // .ioctl_wait_idle = r600_ioctl_wait_idle, |
1233 | serge | 641 | }; |
642 | |||
643 | /* |
||
644 | * rv770,rv730,rv710,rv740 |
||
645 | */ |
||
646 | int rv770_init(struct radeon_device *rdev); |
||
647 | void rv770_fini(struct radeon_device *rdev); |
||
648 | int rv770_suspend(struct radeon_device *rdev); |
||
649 | int rv770_resume(struct radeon_device *rdev); |
||
650 | int rv770_gpu_reset(struct radeon_device *rdev); |
||
651 | |||
652 | static struct radeon_asic rv770_asic = { |
||
653 | .init = &rv770_init, |
||
654 | // .fini = &rv770_fini, |
||
655 | // .suspend = &rv770_suspend, |
||
656 | // .resume = &rv770_resume, |
||
1413 | serge | 657 | .cp_commit = &r600_cp_commit, |
1233 | serge | 658 | .gpu_reset = &rv770_gpu_reset, |
659 | .vga_set_state = &r600_vga_set_state, |
||
660 | .gart_tlb_flush = &r600_pcie_gart_tlb_flush, |
||
661 | .gart_set_page = &rs600_gart_set_page, |
||
1413 | serge | 662 | .ring_test = &r600_ring_test, |
1233 | serge | 663 | // .ring_ib_execute = &r600_ring_ib_execute, |
664 | // .irq_set = &r600_irq_set, |
||
665 | // .irq_process = &r600_irq_process, |
||
1413 | serge | 666 | .fence_ring_emit = &r600_fence_ring_emit, |
1233 | serge | 667 | // .cs_parse = &r600_cs_parse, |
668 | // .copy_blit = &r600_copy_blit, |
||
669 | // .copy_dma = &r600_copy_blit, |
||
670 | // .copy = &r600_copy_blit, |
||
1268 | serge | 671 | .get_engine_clock = &radeon_atom_get_engine_clock, |
1233 | serge | 672 | .set_engine_clock = &radeon_atom_set_engine_clock, |
1268 | serge | 673 | .get_memory_clock = &radeon_atom_get_memory_clock, |
1233 | serge | 674 | .set_memory_clock = &radeon_atom_set_memory_clock, |
1430 | serge | 675 | .get_pcie_lanes = &rv370_get_pcie_lanes, |
1233 | serge | 676 | .set_pcie_lanes = NULL, |
677 | .set_clock_gating = &radeon_atom_set_clock_gating, |
||
678 | .set_surface_reg = r600_set_surface_reg, |
||
679 | .clear_surface_reg = r600_clear_surface_reg, |
||
680 | .bandwidth_update = &rv515_bandwidth_update, |
||
1321 | serge | 681 | .hpd_init = &r600_hpd_init, |
682 | .hpd_fini = &r600_hpd_fini, |
||
683 | .hpd_sense = &r600_hpd_sense, |
||
684 | .hpd_set_polarity = &r600_hpd_set_polarity, |
||
1233 | serge | 685 | }; |
686 | |||
1430 | serge | 687 | /* |
688 | * evergreen |
||
689 | */ |
||
690 | int evergreen_init(struct radeon_device *rdev); |
||
691 | void evergreen_fini(struct radeon_device *rdev); |
||
692 | int evergreen_suspend(struct radeon_device *rdev); |
||
693 | int evergreen_resume(struct radeon_device *rdev); |
||
694 | int evergreen_gpu_reset(struct radeon_device *rdev); |
||
695 | void evergreen_bandwidth_update(struct radeon_device *rdev); |
||
696 | void evergreen_hpd_init(struct radeon_device *rdev); |
||
697 | void evergreen_hpd_fini(struct radeon_device *rdev); |
||
698 | bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd); |
||
699 | void evergreen_hpd_set_polarity(struct radeon_device *rdev, |
||
700 | enum radeon_hpd_id hpd); |
||
701 | |||
702 | static struct radeon_asic evergreen_asic = { |
||
703 | .init = &evergreen_init, |
||
704 | // .fini = &evergreen_fini, |
||
705 | // .suspend = &evergreen_suspend, |
||
706 | // .resume = &evergreen_resume, |
||
707 | .cp_commit = NULL, |
||
708 | .gpu_reset = &evergreen_gpu_reset, |
||
709 | .vga_set_state = &r600_vga_set_state, |
||
710 | .gart_tlb_flush = &r600_pcie_gart_tlb_flush, |
||
711 | .gart_set_page = &rs600_gart_set_page, |
||
712 | .ring_test = NULL, |
||
713 | // .ring_ib_execute = &r600_ring_ib_execute, |
||
714 | // .irq_set = &r600_irq_set, |
||
715 | // .irq_process = &r600_irq_process, |
||
716 | .fence_ring_emit = &r600_fence_ring_emit, |
||
717 | // .cs_parse = &r600_cs_parse, |
||
718 | // .copy_blit = &r600_copy_blit, |
||
719 | // .copy_dma = &r600_copy_blit, |
||
720 | // .copy = &r600_copy_blit, |
||
721 | .get_engine_clock = &radeon_atom_get_engine_clock, |
||
722 | .set_engine_clock = &radeon_atom_set_engine_clock, |
||
723 | .get_memory_clock = &radeon_atom_get_memory_clock, |
||
724 | .set_memory_clock = &radeon_atom_set_memory_clock, |
||
725 | .set_pcie_lanes = NULL, |
||
726 | .set_clock_gating = NULL, |
||
727 | .set_surface_reg = r600_set_surface_reg, |
||
728 | .clear_surface_reg = r600_clear_surface_reg, |
||
729 | .bandwidth_update = &evergreen_bandwidth_update, |
||
730 | .hpd_init = &evergreen_hpd_init, |
||
731 | .hpd_fini = &evergreen_hpd_fini, |
||
732 | .hpd_sense = &evergreen_hpd_sense, |
||
733 | .hpd_set_polarity = &evergreen_hpd_set_polarity, |
||
734 | }; |
||
735 | |||
1117 | serge | 736 | #endif |