Rev 1128 | Rev 1221 | Go to most recent revision | Only display areas with differences | Regard whitespace | Details | Blame | Last modification | View Log | RSS feed
Rev 1128 | Rev 1179 | ||
---|---|---|---|
1 | /* |
1 | /* |
2 | * Copyright 2008 Advanced Micro Devices, Inc. |
2 | * Copyright 2008 Advanced Micro Devices, Inc. |
3 | * Copyright 2008 Red Hat Inc. |
3 | * Copyright 2008 Red Hat Inc. |
4 | * Copyright 2009 Jerome Glisse. |
4 | * Copyright 2009 Jerome Glisse. |
5 | * |
5 | * |
6 | * Permission is hereby granted, free of charge, to any person obtaining a |
6 | * Permission is hereby granted, free of charge, to any person obtaining a |
7 | * copy of this software and associated documentation files (the "Software"), |
7 | * copy of this software and associated documentation files (the "Software"), |
8 | * to deal in the Software without restriction, including without limitation |
8 | * to deal in the Software without restriction, including without limitation |
9 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, |
9 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, |
10 | * and/or sell copies of the Software, and to permit persons to whom the |
10 | * and/or sell copies of the Software, and to permit persons to whom the |
11 | * Software is furnished to do so, subject to the following conditions: |
11 | * Software is furnished to do so, subject to the following conditions: |
12 | * |
12 | * |
13 | * The above copyright notice and this permission notice shall be included in |
13 | * The above copyright notice and this permission notice shall be included in |
14 | * all copies or substantial portions of the Software. |
14 | * all copies or substantial portions of the Software. |
15 | * |
15 | * |
16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL |
18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL |
19 | * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR |
19 | * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR |
20 | * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, |
20 | * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, |
21 | * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR |
21 | * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR |
22 | * OTHER DEALINGS IN THE SOFTWARE. |
22 | * OTHER DEALINGS IN THE SOFTWARE. |
23 | * |
23 | * |
24 | * Authors: Dave Airlie |
24 | * Authors: Dave Airlie |
25 | * Alex Deucher |
25 | * Alex Deucher |
26 | * Jerome Glisse |
26 | * Jerome Glisse |
27 | */ |
27 | */ |
28 | #ifndef __RADEON_ASIC_H__ |
28 | #ifndef __RADEON_ASIC_H__ |
29 | #define __RADEON_ASIC_H__ |
29 | #define __RADEON_ASIC_H__ |
30 | 30 | ||
31 | /* |
31 | /* |
32 | * common functions |
32 | * common functions |
33 | */ |
33 | */ |
34 | void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock); |
34 | void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock); |
35 | void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable); |
35 | void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable); |
36 | 36 | ||
37 | void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock); |
37 | void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock); |
38 | void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock); |
38 | void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock); |
39 | void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable); |
39 | void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable); |
40 | 40 | ||
41 | /* |
41 | /* |
42 | * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280 |
42 | * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280 |
43 | */ |
43 | */ |
44 | int r100_init(struct radeon_device *rdev); |
44 | int r100_init(struct radeon_device *rdev); |
- | 45 | int r200_init(struct radeon_device *rdev); |
|
45 | uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg); |
46 | uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg); |
46 | void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
47 | void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
47 | void r100_errata(struct radeon_device *rdev); |
48 | void r100_errata(struct radeon_device *rdev); |
48 | void r100_vram_info(struct radeon_device *rdev); |
49 | void r100_vram_info(struct radeon_device *rdev); |
- | 50 | void r100_vga_set_state(struct radeon_device *rdev, bool state); |
|
49 | int r100_gpu_reset(struct radeon_device *rdev); |
51 | int r100_gpu_reset(struct radeon_device *rdev); |
50 | int r100_mc_init(struct radeon_device *rdev); |
52 | int r100_mc_init(struct radeon_device *rdev); |
51 | void r100_mc_fini(struct radeon_device *rdev); |
53 | void r100_mc_fini(struct radeon_device *rdev); |
- | 54 | u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc); |
|
52 | int r100_wb_init(struct radeon_device *rdev); |
55 | int r100_wb_init(struct radeon_device *rdev); |
53 | void r100_wb_fini(struct radeon_device *rdev); |
56 | void r100_wb_fini(struct radeon_device *rdev); |
- | 57 | int r100_pci_gart_init(struct radeon_device *rdev); |
|
- | 58 | void r100_pci_gart_fini(struct radeon_device *rdev); |
|
54 | int r100_gart_enable(struct radeon_device *rdev); |
59 | int r100_pci_gart_enable(struct radeon_device *rdev); |
55 | void r100_pci_gart_disable(struct radeon_device *rdev); |
60 | void r100_pci_gart_disable(struct radeon_device *rdev); |
56 | void r100_pci_gart_tlb_flush(struct radeon_device *rdev); |
61 | void r100_pci_gart_tlb_flush(struct radeon_device *rdev); |
57 | int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); |
62 | int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); |
58 | int r100_cp_init(struct radeon_device *rdev, unsigned ring_size); |
63 | int r100_cp_init(struct radeon_device *rdev, unsigned ring_size); |
59 | void r100_cp_fini(struct radeon_device *rdev); |
64 | void r100_cp_fini(struct radeon_device *rdev); |
60 | void r100_cp_disable(struct radeon_device *rdev); |
65 | void r100_cp_disable(struct radeon_device *rdev); |
- | 66 | void r100_cp_commit(struct radeon_device *rdev); |
|
61 | void r100_ring_start(struct radeon_device *rdev); |
67 | void r100_ring_start(struct radeon_device *rdev); |
62 | int r100_irq_set(struct radeon_device *rdev); |
68 | int r100_irq_set(struct radeon_device *rdev); |
63 | int r100_irq_process(struct radeon_device *rdev); |
69 | int r100_irq_process(struct radeon_device *rdev); |
64 | void r100_fence_ring_emit(struct radeon_device *rdev, |
70 | void r100_fence_ring_emit(struct radeon_device *rdev, |
65 | struct radeon_fence *fence); |
71 | struct radeon_fence *fence); |
66 | int r100_cs_parse(struct radeon_cs_parser *p); |
72 | int r100_cs_parse(struct radeon_cs_parser *p); |
67 | void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
73 | void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
68 | uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg); |
74 | uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg); |
69 | int r100_copy_blit(struct radeon_device *rdev, |
75 | int r100_copy_blit(struct radeon_device *rdev, |
70 | uint64_t src_offset, |
76 | uint64_t src_offset, |
71 | uint64_t dst_offset, |
77 | uint64_t dst_offset, |
72 | unsigned num_pages, |
78 | unsigned num_pages, |
73 | struct radeon_fence *fence); |
79 | struct radeon_fence *fence); |
- | 80 | int r100_set_surface_reg(struct radeon_device *rdev, int reg, |
|
- | 81 | uint32_t tiling_flags, uint32_t pitch, |
|
- | 82 | uint32_t offset, uint32_t obj_size); |
|
- | 83 | int r100_clear_surface_reg(struct radeon_device *rdev, int reg); |
|
- | 84 | void r100_bandwidth_update(struct radeon_device *rdev); |
|
- | 85 | void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); |
|
74 | - | ||
- | 86 | int r100_ib_test(struct radeon_device *rdev); |
|
- | 87 | int r100_ring_test(struct radeon_device *rdev); |
|
75 | 88 | ||
76 | static struct radeon_asic r100_asic = { |
89 | static struct radeon_asic r100_asic = { |
77 | .init = &r100_init, |
90 | .init = &r100_init, |
78 | .errata = &r100_errata, |
91 | .errata = &r100_errata, |
79 | .vram_info = &r100_vram_info, |
92 | .vram_info = &r100_vram_info, |
80 | .gpu_reset = &r100_gpu_reset, |
93 | .gpu_reset = &r100_gpu_reset, |
81 | .mc_init = &r100_mc_init, |
94 | .mc_init = &r100_mc_init, |
82 | .mc_fini = &r100_mc_fini, |
95 | .mc_fini = &r100_mc_fini, |
83 | // .wb_init = &r100_wb_init, |
96 | // .wb_init = &r100_wb_init, |
84 | // .wb_fini = &r100_wb_fini, |
97 | // .wb_fini = &r100_wb_fini, |
85 | .gart_enable = &r100_gart_enable, |
98 | .gart_enable = &r100_pci_gart_enable, |
86 | .gart_disable = &r100_pci_gart_disable, |
99 | .gart_disable = &r100_pci_gart_disable, |
87 | .gart_tlb_flush = &r100_pci_gart_tlb_flush, |
100 | .gart_tlb_flush = &r100_pci_gart_tlb_flush, |
88 | .gart_set_page = &r100_pci_gart_set_page, |
101 | .gart_set_page = &r100_pci_gart_set_page, |
89 | .cp_init = &r100_cp_init, |
102 | .cp_init = &r100_cp_init, |
90 | // .cp_fini = &r100_cp_fini, |
103 | // .cp_fini = &r100_cp_fini, |
91 | // .cp_disable = &r100_cp_disable, |
104 | // .cp_disable = &r100_cp_disable, |
92 | .ring_start = &r100_ring_start, |
105 | .ring_start = &r100_ring_start, |
93 | // .irq_set = &r100_irq_set, |
106 | // .irq_set = &r100_irq_set, |
94 | // .irq_process = &r100_irq_process, |
107 | // .irq_process = &r100_irq_process, |
95 | // .fence_ring_emit = &r100_fence_ring_emit, |
108 | // .fence_ring_emit = &r100_fence_ring_emit, |
96 | // .cs_parse = &r100_cs_parse, |
109 | // .cs_parse = &r100_cs_parse, |
97 | // .copy_blit = &r100_copy_blit, |
110 | // .copy_blit = &r100_copy_blit, |
98 | // .copy_dma = NULL, |
111 | // .copy_dma = NULL, |
99 | // .copy = &r100_copy_blit, |
112 | // .copy = &r100_copy_blit, |
100 | // .set_engine_clock = &radeon_legacy_set_engine_clock, |
113 | // .set_engine_clock = &radeon_legacy_set_engine_clock, |
101 | // .set_memory_clock = NULL, |
114 | // .set_memory_clock = NULL, |
102 | // .set_pcie_lanes = NULL, |
115 | // .set_pcie_lanes = NULL, |
103 | // .set_clock_gating = &radeon_legacy_set_clock_gating, |
116 | // .set_clock_gating = &radeon_legacy_set_clock_gating, |
- | 117 | .set_surface_reg = r100_set_surface_reg, |
|
- | 118 | .clear_surface_reg = r100_clear_surface_reg, |
|
- | 119 | .bandwidth_update = &r100_bandwidth_update, |
|
104 | }; |
120 | }; |
105 | 121 | ||
106 | 122 | ||
107 | /* |
123 | /* |
108 | * r300,r350,rv350,rv380 |
124 | * r300,r350,rv350,rv380 |
109 | */ |
125 | */ |
110 | int r300_init(struct radeon_device *rdev); |
126 | int r300_init(struct radeon_device *rdev); |
111 | void r300_errata(struct radeon_device *rdev); |
127 | void r300_errata(struct radeon_device *rdev); |
112 | void r300_vram_info(struct radeon_device *rdev); |
128 | void r300_vram_info(struct radeon_device *rdev); |
113 | int r300_gpu_reset(struct radeon_device *rdev); |
129 | int r300_gpu_reset(struct radeon_device *rdev); |
114 | int r300_mc_init(struct radeon_device *rdev); |
130 | int r300_mc_init(struct radeon_device *rdev); |
115 | void r300_mc_fini(struct radeon_device *rdev); |
131 | void r300_mc_fini(struct radeon_device *rdev); |
116 | void r300_ring_start(struct radeon_device *rdev); |
132 | void r300_ring_start(struct radeon_device *rdev); |
117 | void r300_fence_ring_emit(struct radeon_device *rdev, |
133 | void r300_fence_ring_emit(struct radeon_device *rdev, |
118 | struct radeon_fence *fence); |
134 | struct radeon_fence *fence); |
119 | int r300_cs_parse(struct radeon_cs_parser *p); |
135 | int r300_cs_parse(struct radeon_cs_parser *p); |
- | 136 | int rv370_pcie_gart_init(struct radeon_device *rdev); |
|
- | 137 | void rv370_pcie_gart_fini(struct radeon_device *rdev); |
|
120 | int r300_gart_enable(struct radeon_device *rdev); |
138 | int rv370_pcie_gart_enable(struct radeon_device *rdev); |
121 | void rv370_pcie_gart_disable(struct radeon_device *rdev); |
139 | void rv370_pcie_gart_disable(struct radeon_device *rdev); |
122 | void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev); |
140 | void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev); |
123 | int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); |
141 | int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); |
124 | uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg); |
142 | uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg); |
125 | void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
143 | void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
126 | void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes); |
144 | void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes); |
127 | int r300_copy_dma(struct radeon_device *rdev, |
145 | int r300_copy_dma(struct radeon_device *rdev, |
128 | uint64_t src_offset, |
146 | uint64_t src_offset, |
129 | uint64_t dst_offset, |
147 | uint64_t dst_offset, |
130 | unsigned num_pages, |
148 | unsigned num_pages, |
131 | struct radeon_fence *fence); |
149 | struct radeon_fence *fence); |
132 | - | ||
133 | 150 | ||
134 | static struct radeon_asic r300_asic = { |
151 | static struct radeon_asic r300_asic = { |
135 | .init = &r300_init, |
152 | .init = &r300_init, |
136 | .errata = &r300_errata, |
153 | .errata = &r300_errata, |
137 | .vram_info = &r300_vram_info, |
154 | .vram_info = &r300_vram_info, |
- | 155 | .vga_set_state = &r100_vga_set_state, |
|
138 | .gpu_reset = &r300_gpu_reset, |
156 | .gpu_reset = &r300_gpu_reset, |
139 | .mc_init = &r300_mc_init, |
157 | .mc_init = &r300_mc_init, |
140 | .mc_fini = &r300_mc_fini, |
158 | .mc_fini = &r300_mc_fini, |
141 | // .wb_init = &r100_wb_init, |
159 | // .wb_init = &r100_wb_init, |
142 | // .wb_fini = &r100_wb_fini, |
160 | // .wb_fini = &r100_wb_fini, |
143 | .gart_enable = &r300_gart_enable, |
161 | .gart_enable = &r100_pci_gart_enable, |
144 | .gart_disable = &r100_pci_gart_disable, |
162 | .gart_disable = &r100_pci_gart_disable, |
145 | .gart_tlb_flush = &r100_pci_gart_tlb_flush, |
163 | .gart_tlb_flush = &r100_pci_gart_tlb_flush, |
146 | .gart_set_page = &r100_pci_gart_set_page, |
164 | .gart_set_page = &r100_pci_gart_set_page, |
147 | .cp_init = &r100_cp_init, |
165 | .cp_init = &r100_cp_init, |
148 | // .cp_fini = &r100_cp_fini, |
166 | // .cp_fini = &r100_cp_fini, |
149 | // .cp_disable = &r100_cp_disable, |
167 | // .cp_disable = &r100_cp_disable, |
150 | .ring_start = &r300_ring_start, |
168 | .ring_start = &r300_ring_start, |
151 | // .irq_set = &r100_irq_set, |
169 | // .irq_set = &r100_irq_set, |
152 | // .irq_process = &r100_irq_process, |
170 | // .irq_process = &r100_irq_process, |
153 | // .fence_ring_emit = &r300_fence_ring_emit, |
171 | // .fence_ring_emit = &r300_fence_ring_emit, |
154 | // .cs_parse = &r300_cs_parse, |
172 | // .cs_parse = &r300_cs_parse, |
155 | // .copy_blit = &r100_copy_blit, |
173 | // .copy_blit = &r100_copy_blit, |
156 | // .copy_dma = &r300_copy_dma, |
174 | // .copy_dma = &r300_copy_dma, |
157 | // .copy = &r100_copy_blit, |
175 | // .copy = &r100_copy_blit, |
158 | // .set_engine_clock = &radeon_legacy_set_engine_clock, |
176 | // .set_engine_clock = &radeon_legacy_set_engine_clock, |
159 | // .set_memory_clock = NULL, |
177 | // .set_memory_clock = NULL, |
160 | // .set_pcie_lanes = &rv370_set_pcie_lanes, |
178 | // .set_pcie_lanes = &rv370_set_pcie_lanes, |
161 | // .set_clock_gating = &radeon_legacy_set_clock_gating, |
179 | // .set_clock_gating = &radeon_legacy_set_clock_gating, |
- | 180 | .set_surface_reg = r100_set_surface_reg, |
|
- | 181 | .clear_surface_reg = r100_clear_surface_reg, |
|
- | 182 | .bandwidth_update = &r100_bandwidth_update, |
|
162 | }; |
183 | }; |
163 | - | ||
164 | 184 | ||
165 | /* |
185 | /* |
166 | * r420,r423,rv410 |
186 | * r420,r423,rv410 |
167 | */ |
187 | */ |
168 | void r420_errata(struct radeon_device *rdev); |
188 | extern int r420_init(struct radeon_device *rdev); |
169 | void r420_vram_info(struct radeon_device *rdev); |
189 | extern void r420_fini(struct radeon_device *rdev); |
170 | int r420_mc_init(struct radeon_device *rdev); |
190 | extern int r420_suspend(struct radeon_device *rdev); |
171 | void r420_mc_fini(struct radeon_device *rdev); |
191 | extern int r420_resume(struct radeon_device *rdev); |
172 | static struct radeon_asic r420_asic = { |
192 | static struct radeon_asic r420_asic = { |
173 | .init = &r300_init, |
193 | .init = &r420_init, |
- | 194 | .fini = &r420_fini, |
|
- | 195 | .suspend = &r420_suspend, |
|
174 | .errata = &r420_errata, |
196 | .resume = &r420_resume, |
- | 197 | .errata = NULL, |
|
175 | .vram_info = &r420_vram_info, |
198 | .vram_info = NULL, |
- | 199 | .vga_set_state = &r100_vga_set_state, |
|
176 | .gpu_reset = &r300_gpu_reset, |
200 | .gpu_reset = &r300_gpu_reset, |
177 | .mc_init = &r420_mc_init, |
201 | .mc_init = NULL, |
178 | .mc_fini = &r420_mc_fini, |
202 | .mc_fini = NULL, |
179 | // .wb_init = &r100_wb_init, |
203 | .wb_init = NULL, |
180 | // .wb_fini = &r100_wb_fini, |
204 | .wb_fini = NULL, |
181 | .gart_enable = &r300_gart_enable, |
205 | .gart_enable = NULL, |
182 | .gart_disable = &rv370_pcie_gart_disable, |
206 | .gart_disable = NULL, |
183 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, |
207 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, |
184 | .gart_set_page = &rv370_pcie_gart_set_page, |
208 | .gart_set_page = &rv370_pcie_gart_set_page, |
185 | .cp_init = &r100_cp_init, |
209 | .cp_init = NULL, |
186 | // .cp_fini = &r100_cp_fini, |
210 | .cp_fini = NULL, |
187 | // .cp_disable = &r100_cp_disable, |
211 | .cp_disable = NULL, |
188 | .ring_start = &r300_ring_start, |
212 | .ring_start = &r300_ring_start, |
189 | // .irq_set = &r100_irq_set, |
213 | // .irq_set = &r100_irq_set, |
190 | // .irq_process = &r100_irq_process, |
214 | // .irq_process = &r100_irq_process, |
191 | // .fence_ring_emit = &r300_fence_ring_emit, |
215 | // .fence_ring_emit = &r300_fence_ring_emit, |
192 | // .cs_parse = &r300_cs_parse, |
216 | // .cs_parse = &r300_cs_parse, |
193 | // .copy_blit = &r100_copy_blit, |
217 | // .copy_blit = &r100_copy_blit, |
194 | // .copy_dma = &r300_copy_dma, |
218 | // .copy_dma = &r300_copy_dma, |
195 | // .copy = &r100_copy_blit, |
219 | // .copy = &r100_copy_blit, |
196 | // .set_engine_clock = &radeon_atom_set_engine_clock, |
220 | // .set_engine_clock = &radeon_atom_set_engine_clock, |
197 | // .set_memory_clock = &radeon_atom_set_memory_clock, |
221 | // .set_memory_clock = &radeon_atom_set_memory_clock, |
198 | // .set_pcie_lanes = &rv370_set_pcie_lanes, |
222 | // .set_pcie_lanes = &rv370_set_pcie_lanes, |
199 | // .set_clock_gating = &radeon_atom_set_clock_gating, |
223 | // .set_clock_gating = &radeon_atom_set_clock_gating, |
- | 224 | .set_surface_reg = r100_set_surface_reg, |
|
- | 225 | .clear_surface_reg = r100_clear_surface_reg, |
|
- | 226 | .bandwidth_update = &r100_bandwidth_update, |
|
200 | }; |
227 | }; |
201 | 228 | ||
202 | 229 | ||
203 | /* |
230 | /* |
204 | * rs400,rs480 |
231 | * rs400,rs480 |
205 | */ |
232 | */ |
206 | void rs400_errata(struct radeon_device *rdev); |
233 | void rs400_errata(struct radeon_device *rdev); |
207 | void rs400_vram_info(struct radeon_device *rdev); |
234 | void rs400_vram_info(struct radeon_device *rdev); |
208 | int rs400_mc_init(struct radeon_device *rdev); |
235 | int rs400_mc_init(struct radeon_device *rdev); |
209 | void rs400_mc_fini(struct radeon_device *rdev); |
236 | void rs400_mc_fini(struct radeon_device *rdev); |
- | 237 | int rs400_gart_init(struct radeon_device *rdev); |
|
- | 238 | void rs400_gart_fini(struct radeon_device *rdev); |
|
210 | int rs400_gart_enable(struct radeon_device *rdev); |
239 | int rs400_gart_enable(struct radeon_device *rdev); |
211 | void rs400_gart_disable(struct radeon_device *rdev); |
240 | void rs400_gart_disable(struct radeon_device *rdev); |
212 | void rs400_gart_tlb_flush(struct radeon_device *rdev); |
241 | void rs400_gart_tlb_flush(struct radeon_device *rdev); |
213 | int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); |
242 | int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); |
214 | uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg); |
243 | uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg); |
215 | void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
244 | void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
216 | static struct radeon_asic rs400_asic = { |
245 | static struct radeon_asic rs400_asic = { |
217 | .init = &r300_init, |
246 | .init = &r300_init, |
218 | .errata = &rs400_errata, |
247 | .errata = &rs400_errata, |
219 | .vram_info = &rs400_vram_info, |
248 | .vram_info = &rs400_vram_info, |
- | 249 | .vga_set_state = &r100_vga_set_state, |
|
220 | .gpu_reset = &r300_gpu_reset, |
250 | .gpu_reset = &r300_gpu_reset, |
221 | .mc_init = &rs400_mc_init, |
251 | .mc_init = &rs400_mc_init, |
222 | .mc_fini = &rs400_mc_fini, |
252 | .mc_fini = &rs400_mc_fini, |
223 | // .wb_init = &r100_wb_init, |
253 | // .wb_init = &r100_wb_init, |
224 | // .wb_fini = &r100_wb_fini, |
254 | // .wb_fini = &r100_wb_fini, |
- | 255 | .gart_init = &rs400_gart_init, |
|
- | 256 | .gart_fini = &rs400_gart_fini, |
|
225 | .gart_enable = &rs400_gart_enable, |
257 | .gart_enable = &rs400_gart_enable, |
226 | .gart_disable = &rs400_gart_disable, |
258 | .gart_disable = &rs400_gart_disable, |
227 | .gart_tlb_flush = &rs400_gart_tlb_flush, |
259 | .gart_tlb_flush = &rs400_gart_tlb_flush, |
228 | .gart_set_page = &rs400_gart_set_page, |
260 | .gart_set_page = &rs400_gart_set_page, |
229 | .cp_init = &r100_cp_init, |
261 | .cp_init = &r100_cp_init, |
230 | // .cp_fini = &r100_cp_fini, |
262 | // .cp_fini = &r100_cp_fini, |
231 | // .cp_disable = &r100_cp_disable, |
263 | // .cp_disable = &r100_cp_disable, |
- | 264 | .cp_commit = &r100_cp_commit, |
|
232 | .ring_start = &r300_ring_start, |
265 | .ring_start = &r300_ring_start, |
233 | // .irq_set = &r100_irq_set, |
266 | // .irq_set = &r100_irq_set, |
234 | // .irq_process = &r100_irq_process, |
267 | // .irq_process = &r100_irq_process, |
235 | // .fence_ring_emit = &r300_fence_ring_emit, |
268 | // .fence_ring_emit = &r300_fence_ring_emit, |
236 | // .cs_parse = &r300_cs_parse, |
269 | // .cs_parse = &r300_cs_parse, |
237 | // .copy_blit = &r100_copy_blit, |
270 | // .copy_blit = &r100_copy_blit, |
238 | // .copy_dma = &r300_copy_dma, |
271 | // .copy_dma = &r300_copy_dma, |
239 | // .copy = &r100_copy_blit, |
272 | // .copy = &r100_copy_blit, |
240 | // .set_engine_clock = &radeon_legacy_set_engine_clock, |
273 | // .set_engine_clock = &radeon_legacy_set_engine_clock, |
241 | // .set_memory_clock = NULL, |
274 | // .set_memory_clock = NULL, |
242 | // .set_pcie_lanes = NULL, |
275 | // .set_pcie_lanes = NULL, |
243 | // .set_clock_gating = &radeon_legacy_set_clock_gating, |
276 | // .set_clock_gating = &radeon_legacy_set_clock_gating, |
- | 277 | .set_surface_reg = r100_set_surface_reg, |
|
- | 278 | .clear_surface_reg = r100_clear_surface_reg, |
|
- | 279 | .bandwidth_update = &r100_bandwidth_update, |
|
244 | }; |
280 | }; |
245 | 281 | ||
246 | 282 | ||
247 | /* |
283 | /* |
248 | * rs600. |
284 | * rs600. |
249 | */ |
285 | */ |
- | 286 | int rs600_init(struct radeon_device *rdev); |
|
250 | void rs600_errata(struct radeon_device *rdev); |
287 | void rs600_errata(struct radeon_device *rdev); |
251 | void rs600_vram_info(struct radeon_device *rdev); |
288 | void rs600_vram_info(struct radeon_device *rdev); |
252 | int rs600_mc_init(struct radeon_device *rdev); |
289 | int rs600_mc_init(struct radeon_device *rdev); |
253 | void rs600_mc_fini(struct radeon_device *rdev); |
290 | void rs600_mc_fini(struct radeon_device *rdev); |
254 | int rs600_irq_set(struct radeon_device *rdev); |
291 | int rs600_irq_set(struct radeon_device *rdev); |
- | 292 | int rs600_irq_process(struct radeon_device *rdev); |
|
- | 293 | u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc); |
|
- | 294 | int rs600_gart_init(struct radeon_device *rdev); |
|
- | 295 | void rs600_gart_fini(struct radeon_device *rdev); |
|
255 | int rs600_gart_enable(struct radeon_device *rdev); |
296 | int rs600_gart_enable(struct radeon_device *rdev); |
256 | void rs600_gart_disable(struct radeon_device *rdev); |
297 | void rs600_gart_disable(struct radeon_device *rdev); |
257 | void rs600_gart_tlb_flush(struct radeon_device *rdev); |
298 | void rs600_gart_tlb_flush(struct radeon_device *rdev); |
258 | int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); |
299 | int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); |
259 | uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg); |
300 | uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg); |
260 | void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
301 | void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
261 | - | ||
- | 302 | void rs600_bandwidth_update(struct radeon_device *rdev); |
|
262 | static struct radeon_asic rs600_asic = { |
303 | static struct radeon_asic rs600_asic = { |
263 | .init = &r300_init, |
304 | .init = &rs600_init, |
264 | .errata = &rs600_errata, |
305 | .errata = &rs600_errata, |
265 | .vram_info = &rs600_vram_info, |
306 | .vram_info = &rs600_vram_info, |
- | 307 | .vga_set_state = &r100_vga_set_state, |
|
266 | .gpu_reset = &r300_gpu_reset, |
308 | .gpu_reset = &r300_gpu_reset, |
267 | .mc_init = &rs600_mc_init, |
309 | .mc_init = &rs600_mc_init, |
268 | .mc_fini = &rs600_mc_fini, |
310 | .mc_fini = &rs600_mc_fini, |
269 | // .wb_init = &r100_wb_init, |
311 | // .wb_init = &r100_wb_init, |
270 | // .wb_fini = &r100_wb_fini, |
312 | // .wb_fini = &r100_wb_fini, |
- | 313 | .gart_init = &rs600_gart_init, |
|
- | 314 | .gart_fini = &rs600_gart_fini, |
|
271 | .gart_enable = &rs600_gart_enable, |
315 | .gart_enable = &rs600_gart_enable, |
272 | .gart_disable = &rs600_gart_disable, |
316 | .gart_disable = &rs600_gart_disable, |
273 | .gart_tlb_flush = &rs600_gart_tlb_flush, |
317 | .gart_tlb_flush = &rs600_gart_tlb_flush, |
274 | .gart_set_page = &rs600_gart_set_page, |
318 | .gart_set_page = &rs600_gart_set_page, |
275 | .cp_init = &r100_cp_init, |
319 | .cp_init = &r100_cp_init, |
276 | // .cp_fini = &r100_cp_fini, |
320 | // .cp_fini = &r100_cp_fini, |
277 | // .cp_disable = &r100_cp_disable, |
321 | // .cp_disable = &r100_cp_disable, |
- | 322 | .cp_commit = &r100_cp_commit, |
|
278 | .ring_start = &r300_ring_start, |
323 | .ring_start = &r300_ring_start, |
279 | // .irq_set = &rs600_irq_set, |
324 | // .irq_set = &rs600_irq_set, |
280 | // .irq_process = &r100_irq_process, |
325 | // .irq_process = &r100_irq_process, |
281 | // .fence_ring_emit = &r300_fence_ring_emit, |
326 | // .fence_ring_emit = &r300_fence_ring_emit, |
282 | // .cs_parse = &r300_cs_parse, |
327 | // .cs_parse = &r300_cs_parse, |
283 | // .copy_blit = &r100_copy_blit, |
328 | // .copy_blit = &r100_copy_blit, |
284 | // .copy_dma = &r300_copy_dma, |
329 | // .copy_dma = &r300_copy_dma, |
285 | // .copy = &r100_copy_blit, |
330 | // .copy = &r100_copy_blit, |
286 | // .set_engine_clock = &radeon_atom_set_engine_clock, |
331 | // .set_engine_clock = &radeon_atom_set_engine_clock, |
287 | // .set_memory_clock = &radeon_atom_set_memory_clock, |
332 | // .set_memory_clock = &radeon_atom_set_memory_clock, |
288 | // .set_pcie_lanes = NULL, |
333 | // .set_pcie_lanes = NULL, |
289 | // .set_clock_gating = &radeon_atom_set_clock_gating, |
334 | // .set_clock_gating = &radeon_atom_set_clock_gating, |
- | 335 | .bandwidth_update = &rs600_bandwidth_update, |
|
290 | }; |
336 | }; |
291 | 337 | ||
292 | 338 | ||
293 | /* |
339 | /* |
294 | * rs690,rs740 |
340 | * rs690,rs740 |
295 | */ |
341 | */ |
296 | void rs690_errata(struct radeon_device *rdev); |
342 | void rs690_errata(struct radeon_device *rdev); |
297 | void rs690_vram_info(struct radeon_device *rdev); |
343 | void rs690_vram_info(struct radeon_device *rdev); |
298 | int rs690_mc_init(struct radeon_device *rdev); |
344 | int rs690_mc_init(struct radeon_device *rdev); |
299 | void rs690_mc_fini(struct radeon_device *rdev); |
345 | void rs690_mc_fini(struct radeon_device *rdev); |
300 | uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg); |
346 | uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg); |
301 | void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
347 | void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
- | 348 | void rs690_bandwidth_update(struct radeon_device *rdev); |
|
302 | static struct radeon_asic rs690_asic = { |
349 | static struct radeon_asic rs690_asic = { |
303 | .init = &r300_init, |
350 | .init = &rs600_init, |
304 | .errata = &rs690_errata, |
351 | .errata = &rs690_errata, |
305 | .vram_info = &rs690_vram_info, |
352 | .vram_info = &rs690_vram_info, |
- | 353 | .vga_set_state = &r100_vga_set_state, |
|
306 | .gpu_reset = &r300_gpu_reset, |
354 | .gpu_reset = &r300_gpu_reset, |
307 | .mc_init = &rs690_mc_init, |
355 | .mc_init = &rs690_mc_init, |
308 | .mc_fini = &rs690_mc_fini, |
356 | .mc_fini = &rs690_mc_fini, |
309 | // .wb_init = &r100_wb_init, |
357 | // .wb_init = &r100_wb_init, |
310 | // .wb_fini = &r100_wb_fini, |
358 | // .wb_fini = &r100_wb_fini, |
- | 359 | .gart_init = &rs400_gart_init, |
|
- | 360 | .gart_fini = &rs400_gart_fini, |
|
311 | .gart_enable = &rs400_gart_enable, |
361 | .gart_enable = &rs400_gart_enable, |
312 | .gart_disable = &rs400_gart_disable, |
362 | .gart_disable = &rs400_gart_disable, |
313 | .gart_tlb_flush = &rs400_gart_tlb_flush, |
363 | .gart_tlb_flush = &rs400_gart_tlb_flush, |
314 | .gart_set_page = &rs400_gart_set_page, |
364 | .gart_set_page = &rs400_gart_set_page, |
315 | .cp_init = &r100_cp_init, |
365 | .cp_init = &r100_cp_init, |
316 | // .cp_fini = &r100_cp_fini, |
366 | // .cp_fini = &r100_cp_fini, |
317 | // .cp_disable = &r100_cp_disable, |
367 | // .cp_disable = &r100_cp_disable, |
- | 368 | .cp_commit = &r100_cp_commit, |
|
318 | .ring_start = &r300_ring_start, |
369 | .ring_start = &r300_ring_start, |
319 | // .irq_set = &rs600_irq_set, |
370 | // .irq_set = &rs600_irq_set, |
320 | // .irq_process = &r100_irq_process, |
371 | // .irq_process = &r100_irq_process, |
321 | // .fence_ring_emit = &r300_fence_ring_emit, |
372 | // .fence_ring_emit = &r300_fence_ring_emit, |
322 | // .cs_parse = &r300_cs_parse, |
373 | // .cs_parse = &r300_cs_parse, |
323 | // .copy_blit = &r100_copy_blit, |
374 | // .copy_blit = &r100_copy_blit, |
324 | // .copy_dma = &r300_copy_dma, |
375 | // .copy_dma = &r300_copy_dma, |
325 | // .copy = &r300_copy_dma, |
376 | // .copy = &r300_copy_dma, |
326 | // .set_engine_clock = &radeon_atom_set_engine_clock, |
377 | // .set_engine_clock = &radeon_atom_set_engine_clock, |
327 | // .set_memory_clock = &radeon_atom_set_memory_clock, |
378 | // .set_memory_clock = &radeon_atom_set_memory_clock, |
328 | // .set_pcie_lanes = NULL, |
379 | // .set_pcie_lanes = NULL, |
329 | // .set_clock_gating = &radeon_atom_set_clock_gating, |
380 | // .set_clock_gating = &radeon_atom_set_clock_gating, |
- | 381 | .set_surface_reg = r100_set_surface_reg, |
|
- | 382 | .clear_surface_reg = r100_clear_surface_reg, |
|
- | 383 | .bandwidth_update = &rs690_bandwidth_update, |
|
330 | }; |
384 | }; |
- | 385 | ||
331 | 386 | ||
332 | /* |
387 | /* |
333 | * rv515 |
388 | * rv515 |
334 | */ |
389 | */ |
335 | int rv515_init(struct radeon_device *rdev); |
390 | int rv515_init(struct radeon_device *rdev); |
336 | void rv515_errata(struct radeon_device *rdev); |
391 | void rv515_errata(struct radeon_device *rdev); |
337 | void rv515_vram_info(struct radeon_device *rdev); |
392 | void rv515_vram_info(struct radeon_device *rdev); |
338 | int rv515_gpu_reset(struct radeon_device *rdev); |
393 | int rv515_gpu_reset(struct radeon_device *rdev); |
339 | int rv515_mc_init(struct radeon_device *rdev); |
394 | int rv515_mc_init(struct radeon_device *rdev); |
340 | void rv515_mc_fini(struct radeon_device *rdev); |
395 | void rv515_mc_fini(struct radeon_device *rdev); |
341 | uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg); |
396 | uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg); |
342 | void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
397 | void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
343 | void rv515_ring_start(struct radeon_device *rdev); |
398 | void rv515_ring_start(struct radeon_device *rdev); |
344 | uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg); |
399 | uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg); |
345 | void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
400 | void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
346 | - | ||
347 | - | ||
- | 401 | void rv515_bandwidth_update(struct radeon_device *rdev); |
|
348 | static struct radeon_asic rv515_asic = { |
402 | static struct radeon_asic rv515_asic = { |
349 | .init = &rv515_init, |
403 | .init = &rv515_init, |
350 | .errata = &rv515_errata, |
404 | .errata = &rv515_errata, |
351 | .vram_info = &rv515_vram_info, |
405 | .vram_info = &rv515_vram_info, |
- | 406 | .vga_set_state = &r100_vga_set_state, |
|
352 | .gpu_reset = &rv515_gpu_reset, |
407 | .gpu_reset = &rv515_gpu_reset, |
353 | .mc_init = &rv515_mc_init, |
408 | .mc_init = &rv515_mc_init, |
354 | .mc_fini = &rv515_mc_fini, |
409 | .mc_fini = &rv515_mc_fini, |
355 | // .wb_init = &r100_wb_init, |
410 | // .wb_init = &r100_wb_init, |
356 | // .wb_fini = &r100_wb_fini, |
411 | // .wb_fini = &r100_wb_fini, |
- | 412 | .gart_init = &rv370_pcie_gart_init, |
|
- | 413 | .gart_fini = &rv370_pcie_gart_fini, |
|
357 | .gart_enable = &r300_gart_enable, |
414 | .gart_enable = &rv370_pcie_gart_enable, |
358 | .gart_disable = &rv370_pcie_gart_disable, |
415 | .gart_disable = &rv370_pcie_gart_disable, |
359 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, |
416 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, |
360 | .gart_set_page = &rv370_pcie_gart_set_page, |
417 | .gart_set_page = &rv370_pcie_gart_set_page, |
361 | .cp_init = &r100_cp_init, |
418 | .cp_init = &r100_cp_init, |
362 | // .cp_fini = &r100_cp_fini, |
419 | // .cp_fini = &r100_cp_fini, |
363 | // .cp_disable = &r100_cp_disable, |
420 | // .cp_disable = &r100_cp_disable, |
- | 421 | .cp_commit = &r100_cp_commit, |
|
364 | .ring_start = &rv515_ring_start, |
422 | .ring_start = &rv515_ring_start, |
365 | // .irq_set = &r100_irq_set, |
423 | // .irq_set = &r100_irq_set, |
366 | // .irq_process = &r100_irq_process, |
424 | // .irq_process = &r100_irq_process, |
367 | // .fence_ring_emit = &r300_fence_ring_emit, |
425 | // .fence_ring_emit = &r300_fence_ring_emit, |
368 | // .cs_parse = &r300_cs_parse, |
426 | // .cs_parse = &r300_cs_parse, |
369 | // .copy_blit = &r100_copy_blit, |
427 | // .copy_blit = &r100_copy_blit, |
370 | // .copy_dma = &r300_copy_dma, |
428 | // .copy_dma = &r300_copy_dma, |
371 | // .copy = &r100_copy_blit, |
429 | // .copy = &r100_copy_blit, |
372 | // .set_engine_clock = &radeon_atom_set_engine_clock, |
430 | // .set_engine_clock = &radeon_atom_set_engine_clock, |
373 | // .set_memory_clock = &radeon_atom_set_memory_clock, |
431 | // .set_memory_clock = &radeon_atom_set_memory_clock, |
374 | // .set_pcie_lanes = &rv370_set_pcie_lanes, |
432 | // .set_pcie_lanes = &rv370_set_pcie_lanes, |
375 | // .set_clock_gating = &radeon_atom_set_clock_gating, |
433 | // .set_clock_gating = &radeon_atom_set_clock_gating, |
- | 434 | .set_surface_reg = r100_set_surface_reg, |
|
- | 435 | .clear_surface_reg = r100_clear_surface_reg, |
|
- | 436 | .bandwidth_update = &rv515_bandwidth_update, |
|
376 | }; |
437 | }; |
377 | 438 | ||
378 | 439 | ||
379 | /* |
440 | /* |
380 | * r520,rv530,rv560,rv570,r580 |
441 | * r520,rv530,rv560,rv570,r580 |
381 | */ |
442 | */ |
382 | void r520_errata(struct radeon_device *rdev); |
443 | void r520_errata(struct radeon_device *rdev); |
383 | void r520_vram_info(struct radeon_device *rdev); |
444 | void r520_vram_info(struct radeon_device *rdev); |
384 | int r520_mc_init(struct radeon_device *rdev); |
445 | int r520_mc_init(struct radeon_device *rdev); |
385 | void r520_mc_fini(struct radeon_device *rdev); |
446 | void r520_mc_fini(struct radeon_device *rdev); |
386 | - | ||
- | 447 | void r520_bandwidth_update(struct radeon_device *rdev); |
|
387 | static struct radeon_asic r520_asic = { |
448 | static struct radeon_asic r520_asic = { |
388 | .init = &rv515_init, |
449 | .init = &rv515_init, |
389 | .errata = &r520_errata, |
450 | .errata = &r520_errata, |
390 | .vram_info = &r520_vram_info, |
451 | .vram_info = &r520_vram_info, |
- | 452 | .vga_set_state = &r100_vga_set_state, |
|
391 | .gpu_reset = &rv515_gpu_reset, |
453 | .gpu_reset = &rv515_gpu_reset, |
392 | .mc_init = &r520_mc_init, |
454 | .mc_init = &r520_mc_init, |
393 | .mc_fini = &r520_mc_fini, |
455 | .mc_fini = &r520_mc_fini, |
394 | // .wb_init = &r100_wb_init, |
456 | // .wb_init = &r100_wb_init, |
395 | // .wb_fini = &r100_wb_fini, |
457 | // .wb_fini = &r100_wb_fini, |
- | 458 | .gart_init = &rv370_pcie_gart_init, |
|
- | 459 | .gart_fini = &rv370_pcie_gart_fini, |
|
396 | .gart_enable = &r300_gart_enable, |
460 | .gart_enable = &rv370_pcie_gart_enable, |
397 | .gart_disable = &rv370_pcie_gart_disable, |
461 | .gart_disable = &rv370_pcie_gart_disable, |
398 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, |
462 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, |
399 | .gart_set_page = &rv370_pcie_gart_set_page, |
463 | .gart_set_page = &rv370_pcie_gart_set_page, |
400 | .cp_init = &r100_cp_init, |
464 | .cp_init = &r100_cp_init, |
401 | // .cp_fini = &r100_cp_fini, |
465 | // .cp_fini = &r100_cp_fini, |
402 | // .cp_disable = &r100_cp_disable, |
466 | // .cp_disable = &r100_cp_disable, |
- | 467 | .cp_commit = &r100_cp_commit, |
|
403 | .ring_start = &rv515_ring_start, |
468 | .ring_start = &rv515_ring_start, |
404 | // .irq_set = &r100_irq_set, |
469 | // .irq_set = &r100_irq_set, |
405 | // .irq_process = &r100_irq_process, |
470 | // .irq_process = &r100_irq_process, |
406 | // .fence_ring_emit = &r300_fence_ring_emit, |
471 | // .fence_ring_emit = &r300_fence_ring_emit, |
407 | // .cs_parse = &r300_cs_parse, |
472 | // .cs_parse = &r300_cs_parse, |
408 | // .copy_blit = &r100_copy_blit, |
473 | // .copy_blit = &r100_copy_blit, |
409 | // .copy_dma = &r300_copy_dma, |
474 | // .copy_dma = &r300_copy_dma, |
410 | // .copy = &r100_copy_blit, |
475 | // .copy = &r100_copy_blit, |
411 | // .set_engine_clock = &radeon_atom_set_engine_clock, |
476 | // .set_engine_clock = &radeon_atom_set_engine_clock, |
412 | // .set_memory_clock = &radeon_atom_set_memory_clock, |
477 | // .set_memory_clock = &radeon_atom_set_memory_clock, |
413 | // .set_pcie_lanes = &rv370_set_pcie_lanes, |
478 | // .set_pcie_lanes = &rv370_set_pcie_lanes, |
414 | // .set_clock_gating = &radeon_atom_set_clock_gating, |
479 | // .set_clock_gating = &radeon_atom_set_clock_gating, |
- | 480 | .set_surface_reg = r100_set_surface_reg, |
|
- | 481 | .clear_surface_reg = r100_clear_surface_reg, |
|
- | 482 | .bandwidth_update = &r520_bandwidth_update, |
|
415 | }; |
483 | }; |
416 | 484 | ||
417 | /* |
485 | /* |
418 | * r600,rv610,rv630,rv620,rv635,rv670,rs780,rv770,rv730,rv710 |
486 | * r600,rv610,rv630,rv620,rv635,rv670,rs780,rv770,rv730,rv710 |
419 | */ |
487 | */ |
- | 488 | int r600_init(struct radeon_device *rdev); |
|
- | 489 | void r600_fini(struct radeon_device *rdev); |
|
- | 490 | int r600_suspend(struct radeon_device *rdev); |
|
- | 491 | int r600_resume(struct radeon_device *rdev); |
|
- | 492 | void r600_vga_set_state(struct radeon_device *rdev, bool state); |
|
- | 493 | int r600_wb_init(struct radeon_device *rdev); |
|
- | 494 | void r600_wb_fini(struct radeon_device *rdev); |
|
- | 495 | void r600_cp_commit(struct radeon_device *rdev); |
|
- | 496 | void r600_pcie_gart_tlb_flush(struct radeon_device *rdev); |
|
420 | uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg); |
497 | uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg); |
421 | void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
498 | void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
422 | 499 | ||
423 | #endif |
500 | #endif |