Rev 1404 | Rev 1413 | Go to most recent revision | Only display areas with differences | Regard whitespace | Details | Blame | Last modification | View Log | RSS feed
Rev 1404 | Rev 1412 | ||
---|---|---|---|
1 | /* |
1 | /* |
2 | * Copyright 2008 Advanced Micro Devices, Inc. |
2 | * Copyright 2008 Advanced Micro Devices, Inc. |
3 | * Copyright 2008 Red Hat Inc. |
3 | * Copyright 2008 Red Hat Inc. |
4 | * Copyright 2009 Jerome Glisse. |
4 | * Copyright 2009 Jerome Glisse. |
5 | * |
5 | * |
6 | * Permission is hereby granted, free of charge, to any person obtaining a |
6 | * Permission is hereby granted, free of charge, to any person obtaining a |
7 | * copy of this software and associated documentation files (the "Software"), |
7 | * copy of this software and associated documentation files (the "Software"), |
8 | * to deal in the Software without restriction, including without limitation |
8 | * to deal in the Software without restriction, including without limitation |
9 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, |
9 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, |
10 | * and/or sell copies of the Software, and to permit persons to whom the |
10 | * and/or sell copies of the Software, and to permit persons to whom the |
11 | * Software is furnished to do so, subject to the following conditions: |
11 | * Software is furnished to do so, subject to the following conditions: |
12 | * |
12 | * |
13 | * The above copyright notice and this permission notice shall be included in |
13 | * The above copyright notice and this permission notice shall be included in |
14 | * all copies or substantial portions of the Software. |
14 | * all copies or substantial portions of the Software. |
15 | * |
15 | * |
16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL |
18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL |
19 | * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR |
19 | * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR |
20 | * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, |
20 | * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, |
21 | * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR |
21 | * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR |
22 | * OTHER DEALINGS IN THE SOFTWARE. |
22 | * OTHER DEALINGS IN THE SOFTWARE. |
23 | * |
23 | * |
24 | * Authors: Dave Airlie |
24 | * Authors: Dave Airlie |
25 | * Alex Deucher |
25 | * Alex Deucher |
26 | * Jerome Glisse |
26 | * Jerome Glisse |
27 | */ |
27 | */ |
28 | #ifndef __RADEON_ASIC_H__ |
28 | #ifndef __RADEON_ASIC_H__ |
29 | #define __RADEON_ASIC_H__ |
29 | #define __RADEON_ASIC_H__ |
30 | 30 | ||
31 | /* |
31 | /* |
32 | * common functions |
32 | * common functions |
33 | */ |
33 | */ |
34 | uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev); |
34 | uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev); |
35 | void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock); |
35 | void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock); |
36 | uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev); |
36 | uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev); |
37 | void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable); |
37 | void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable); |
38 | 38 | ||
39 | uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev); |
39 | uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev); |
40 | void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock); |
40 | void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock); |
41 | uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev); |
41 | uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev); |
42 | void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock); |
42 | void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock); |
43 | void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable); |
43 | void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable); |
44 | 44 | ||
45 | /* |
45 | /* |
46 | * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280 |
46 | * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280 |
47 | */ |
47 | */ |
48 | extern int r100_init(struct radeon_device *rdev); |
48 | extern int r100_init(struct radeon_device *rdev); |
49 | extern void r100_fini(struct radeon_device *rdev); |
49 | extern void r100_fini(struct radeon_device *rdev); |
50 | extern int r100_suspend(struct radeon_device *rdev); |
50 | extern int r100_suspend(struct radeon_device *rdev); |
51 | extern int r100_resume(struct radeon_device *rdev); |
51 | extern int r100_resume(struct radeon_device *rdev); |
52 | uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg); |
52 | uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg); |
53 | void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
53 | void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
54 | void r100_vga_set_state(struct radeon_device *rdev, bool state); |
54 | void r100_vga_set_state(struct radeon_device *rdev, bool state); |
55 | int r100_gpu_reset(struct radeon_device *rdev); |
55 | int r100_gpu_reset(struct radeon_device *rdev); |
56 | u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc); |
56 | u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc); |
57 | void r100_pci_gart_tlb_flush(struct radeon_device *rdev); |
57 | void r100_pci_gart_tlb_flush(struct radeon_device *rdev); |
58 | int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); |
58 | int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); |
59 | void r100_cp_commit(struct radeon_device *rdev); |
59 | void r100_cp_commit(struct radeon_device *rdev); |
60 | void r100_ring_start(struct radeon_device *rdev); |
60 | void r100_ring_start(struct radeon_device *rdev); |
61 | int r100_irq_set(struct radeon_device *rdev); |
61 | int r100_irq_set(struct radeon_device *rdev); |
62 | int r100_irq_process(struct radeon_device *rdev); |
62 | int r100_irq_process(struct radeon_device *rdev); |
63 | void r100_fence_ring_emit(struct radeon_device *rdev, |
63 | void r100_fence_ring_emit(struct radeon_device *rdev, |
64 | struct radeon_fence *fence); |
64 | struct radeon_fence *fence); |
65 | int r100_cs_parse(struct radeon_cs_parser *p); |
65 | int r100_cs_parse(struct radeon_cs_parser *p); |
66 | void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
66 | void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
67 | uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg); |
67 | uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg); |
68 | int r100_copy_blit(struct radeon_device *rdev, |
68 | int r100_copy_blit(struct radeon_device *rdev, |
69 | uint64_t src_offset, |
69 | uint64_t src_offset, |
70 | uint64_t dst_offset, |
70 | uint64_t dst_offset, |
71 | unsigned num_pages, |
71 | unsigned num_pages, |
72 | struct radeon_fence *fence); |
72 | struct radeon_fence *fence); |
73 | int r100_set_surface_reg(struct radeon_device *rdev, int reg, |
73 | int r100_set_surface_reg(struct radeon_device *rdev, int reg, |
74 | uint32_t tiling_flags, uint32_t pitch, |
74 | uint32_t tiling_flags, uint32_t pitch, |
75 | uint32_t offset, uint32_t obj_size); |
75 | uint32_t offset, uint32_t obj_size); |
76 | int r100_clear_surface_reg(struct radeon_device *rdev, int reg); |
76 | int r100_clear_surface_reg(struct radeon_device *rdev, int reg); |
77 | void r100_bandwidth_update(struct radeon_device *rdev); |
77 | void r100_bandwidth_update(struct radeon_device *rdev); |
78 | void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); |
78 | void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); |
79 | int r100_ring_test(struct radeon_device *rdev); |
79 | int r100_ring_test(struct radeon_device *rdev); |
80 | void r100_hpd_init(struct radeon_device *rdev); |
80 | void r100_hpd_init(struct radeon_device *rdev); |
81 | void r100_hpd_fini(struct radeon_device *rdev); |
81 | void r100_hpd_fini(struct radeon_device *rdev); |
82 | bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd); |
82 | bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd); |
83 | void r100_hpd_set_polarity(struct radeon_device *rdev, |
83 | void r100_hpd_set_polarity(struct radeon_device *rdev, |
84 | enum radeon_hpd_id hpd); |
84 | enum radeon_hpd_id hpd); |
85 | 85 | ||
86 | static struct radeon_asic r100_asic = { |
86 | static struct radeon_asic r100_asic = { |
87 | .init = &r100_init, |
87 | .init = &r100_init, |
88 | // .fini = &r100_fini, |
88 | // .fini = &r100_fini, |
89 | // .suspend = &r100_suspend, |
89 | // .suspend = &r100_suspend, |
90 | // .resume = &r100_resume, |
90 | // .resume = &r100_resume, |
91 | // .vga_set_state = &r100_vga_set_state, |
91 | // .vga_set_state = &r100_vga_set_state, |
92 | .gpu_reset = &r100_gpu_reset, |
92 | .gpu_reset = &r100_gpu_reset, |
93 | .gart_tlb_flush = &r100_pci_gart_tlb_flush, |
93 | .gart_tlb_flush = &r100_pci_gart_tlb_flush, |
94 | .gart_set_page = &r100_pci_gart_set_page, |
94 | .gart_set_page = &r100_pci_gart_set_page, |
95 | .cp_commit = &r100_cp_commit, |
95 | .cp_commit = &r100_cp_commit, |
96 | // .ring_start = &r100_ring_start, |
96 | .ring_start = &r100_ring_start, |
97 | // .ring_test = &r100_ring_test, |
97 | .ring_test = &r100_ring_test, |
98 | // .ring_ib_execute = &r100_ring_ib_execute, |
98 | // .ring_ib_execute = &r100_ring_ib_execute, |
99 | // .irq_set = &r100_irq_set, |
99 | // .irq_set = &r100_irq_set, |
100 | // .irq_process = &r100_irq_process, |
100 | // .irq_process = &r100_irq_process, |
101 | // .get_vblank_counter = &r100_get_vblank_counter, |
101 | // .get_vblank_counter = &r100_get_vblank_counter, |
102 | // .fence_ring_emit = &r100_fence_ring_emit, |
102 | // .fence_ring_emit = &r100_fence_ring_emit, |
103 | // .cs_parse = &r100_cs_parse, |
103 | // .cs_parse = &r100_cs_parse, |
104 | // .copy_blit = &r100_copy_blit, |
104 | // .copy_blit = &r100_copy_blit, |
105 | // .copy_dma = NULL, |
105 | // .copy_dma = NULL, |
106 | // .copy = &r100_copy_blit, |
106 | // .copy = &r100_copy_blit, |
107 | .get_engine_clock = &radeon_legacy_get_engine_clock, |
107 | .get_engine_clock = &radeon_legacy_get_engine_clock, |
108 | .set_engine_clock = &radeon_legacy_set_engine_clock, |
108 | .set_engine_clock = &radeon_legacy_set_engine_clock, |
109 | .get_memory_clock = &radeon_legacy_get_memory_clock, |
109 | .get_memory_clock = &radeon_legacy_get_memory_clock, |
110 | .set_memory_clock = NULL, |
110 | .set_memory_clock = NULL, |
111 | .set_pcie_lanes = NULL, |
111 | .set_pcie_lanes = NULL, |
112 | .set_clock_gating = &radeon_legacy_set_clock_gating, |
112 | .set_clock_gating = &radeon_legacy_set_clock_gating, |
113 | .set_surface_reg = r100_set_surface_reg, |
113 | .set_surface_reg = r100_set_surface_reg, |
114 | .clear_surface_reg = r100_clear_surface_reg, |
114 | .clear_surface_reg = r100_clear_surface_reg, |
115 | .bandwidth_update = &r100_bandwidth_update, |
115 | .bandwidth_update = &r100_bandwidth_update, |
116 | .hpd_init = &r100_hpd_init, |
116 | .hpd_init = &r100_hpd_init, |
117 | .hpd_fini = &r100_hpd_fini, |
117 | .hpd_fini = &r100_hpd_fini, |
118 | .hpd_sense = &r100_hpd_sense, |
118 | .hpd_sense = &r100_hpd_sense, |
119 | .hpd_set_polarity = &r100_hpd_set_polarity, |
119 | .hpd_set_polarity = &r100_hpd_set_polarity, |
120 | .ioctl_wait_idle = NULL, |
120 | .ioctl_wait_idle = NULL, |
121 | }; |
121 | }; |
122 | 122 | ||
123 | 123 | ||
124 | /* |
124 | /* |
125 | * r300,r350,rv350,rv380 |
125 | * r300,r350,rv350,rv380 |
126 | */ |
126 | */ |
127 | extern int r300_init(struct radeon_device *rdev); |
127 | extern int r300_init(struct radeon_device *rdev); |
128 | extern void r300_fini(struct radeon_device *rdev); |
128 | extern void r300_fini(struct radeon_device *rdev); |
129 | extern int r300_suspend(struct radeon_device *rdev); |
129 | extern int r300_suspend(struct radeon_device *rdev); |
130 | extern int r300_resume(struct radeon_device *rdev); |
130 | extern int r300_resume(struct radeon_device *rdev); |
131 | extern int r300_gpu_reset(struct radeon_device *rdev); |
131 | extern int r300_gpu_reset(struct radeon_device *rdev); |
132 | extern void r300_ring_start(struct radeon_device *rdev); |
132 | extern void r300_ring_start(struct radeon_device *rdev); |
133 | extern void r300_fence_ring_emit(struct radeon_device *rdev, |
133 | extern void r300_fence_ring_emit(struct radeon_device *rdev, |
134 | struct radeon_fence *fence); |
134 | struct radeon_fence *fence); |
135 | extern int r300_cs_parse(struct radeon_cs_parser *p); |
135 | extern int r300_cs_parse(struct radeon_cs_parser *p); |
136 | extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev); |
136 | extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev); |
137 | extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); |
137 | extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); |
138 | extern uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg); |
138 | extern uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg); |
139 | extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
139 | extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
140 | extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes); |
140 | extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes); |
141 | extern int r300_copy_dma(struct radeon_device *rdev, |
141 | extern int r300_copy_dma(struct radeon_device *rdev, |
142 | uint64_t src_offset, |
142 | uint64_t src_offset, |
143 | uint64_t dst_offset, |
143 | uint64_t dst_offset, |
144 | unsigned num_pages, |
144 | unsigned num_pages, |
145 | struct radeon_fence *fence); |
145 | struct radeon_fence *fence); |
146 | static struct radeon_asic r300_asic = { |
146 | static struct radeon_asic r300_asic = { |
147 | .init = &r300_init, |
147 | .init = &r300_init, |
148 | // .fini = &r300_fini, |
148 | // .fini = &r300_fini, |
149 | // .suspend = &r300_suspend, |
149 | // .suspend = &r300_suspend, |
150 | // .resume = &r300_resume, |
150 | // .resume = &r300_resume, |
151 | // .vga_set_state = &r100_vga_set_state, |
151 | // .vga_set_state = &r100_vga_set_state, |
152 | .gpu_reset = &r300_gpu_reset, |
152 | .gpu_reset = &r300_gpu_reset, |
153 | .gart_tlb_flush = &r100_pci_gart_tlb_flush, |
153 | .gart_tlb_flush = &r100_pci_gart_tlb_flush, |
154 | .gart_set_page = &r100_pci_gart_set_page, |
154 | .gart_set_page = &r100_pci_gart_set_page, |
155 | // .cp_commit = &r100_cp_commit, |
155 | .cp_commit = &r100_cp_commit, |
156 | // .ring_start = &r300_ring_start, |
156 | .ring_start = &r300_ring_start, |
157 | // .ring_test = &r100_ring_test, |
157 | .ring_test = &r100_ring_test, |
158 | // .ring_ib_execute = &r100_ring_ib_execute, |
158 | // .ring_ib_execute = &r100_ring_ib_execute, |
159 | // .irq_set = &r100_irq_set, |
159 | // .irq_set = &r100_irq_set, |
160 | // .irq_process = &r100_irq_process, |
160 | // .irq_process = &r100_irq_process, |
161 | // .get_vblank_counter = &r100_get_vblank_counter, |
161 | // .get_vblank_counter = &r100_get_vblank_counter, |
162 | // .fence_ring_emit = &r300_fence_ring_emit, |
162 | // .fence_ring_emit = &r300_fence_ring_emit, |
163 | // .cs_parse = &r300_cs_parse, |
163 | // .cs_parse = &r300_cs_parse, |
164 | // .copy_blit = &r100_copy_blit, |
164 | // .copy_blit = &r100_copy_blit, |
165 | // .copy_dma = &r300_copy_dma, |
165 | // .copy_dma = &r300_copy_dma, |
166 | // .copy = &r100_copy_blit, |
166 | // .copy = &r100_copy_blit, |
167 | .get_engine_clock = &radeon_legacy_get_engine_clock, |
167 | .get_engine_clock = &radeon_legacy_get_engine_clock, |
168 | .set_engine_clock = &radeon_legacy_set_engine_clock, |
168 | .set_engine_clock = &radeon_legacy_set_engine_clock, |
169 | .get_memory_clock = &radeon_legacy_get_memory_clock, |
169 | .get_memory_clock = &radeon_legacy_get_memory_clock, |
170 | .set_memory_clock = NULL, |
170 | .set_memory_clock = NULL, |
171 | .set_pcie_lanes = &rv370_set_pcie_lanes, |
171 | .set_pcie_lanes = &rv370_set_pcie_lanes, |
172 | .set_clock_gating = &radeon_legacy_set_clock_gating, |
172 | .set_clock_gating = &radeon_legacy_set_clock_gating, |
173 | .set_surface_reg = r100_set_surface_reg, |
173 | .set_surface_reg = r100_set_surface_reg, |
174 | .clear_surface_reg = r100_clear_surface_reg, |
174 | .clear_surface_reg = r100_clear_surface_reg, |
175 | .bandwidth_update = &r100_bandwidth_update, |
175 | .bandwidth_update = &r100_bandwidth_update, |
176 | .hpd_init = &r100_hpd_init, |
176 | .hpd_init = &r100_hpd_init, |
177 | .hpd_fini = &r100_hpd_fini, |
177 | .hpd_fini = &r100_hpd_fini, |
178 | .hpd_sense = &r100_hpd_sense, |
178 | .hpd_sense = &r100_hpd_sense, |
179 | .hpd_set_polarity = &r100_hpd_set_polarity, |
179 | .hpd_set_polarity = &r100_hpd_set_polarity, |
180 | .ioctl_wait_idle = NULL, |
180 | .ioctl_wait_idle = NULL, |
181 | }; |
181 | }; |
182 | 182 | ||
183 | /* |
183 | /* |
184 | * r420,r423,rv410 |
184 | * r420,r423,rv410 |
185 | */ |
185 | */ |
186 | extern int r420_init(struct radeon_device *rdev); |
186 | extern int r420_init(struct radeon_device *rdev); |
187 | extern void r420_fini(struct radeon_device *rdev); |
187 | extern void r420_fini(struct radeon_device *rdev); |
188 | extern int r420_suspend(struct radeon_device *rdev); |
188 | extern int r420_suspend(struct radeon_device *rdev); |
189 | extern int r420_resume(struct radeon_device *rdev); |
189 | extern int r420_resume(struct radeon_device *rdev); |
190 | static struct radeon_asic r420_asic = { |
190 | static struct radeon_asic r420_asic = { |
191 | .init = &r420_init, |
191 | .init = &r420_init, |
192 | // .fini = &r420_fini, |
192 | // .fini = &r420_fini, |
193 | // .suspend = &r420_suspend, |
193 | // .suspend = &r420_suspend, |
194 | // .resume = &r420_resume, |
194 | // .resume = &r420_resume, |
195 | // .vga_set_state = &r100_vga_set_state, |
195 | // .vga_set_state = &r100_vga_set_state, |
196 | .gpu_reset = &r300_gpu_reset, |
196 | .gpu_reset = &r300_gpu_reset, |
197 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, |
197 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, |
198 | .gart_set_page = &rv370_pcie_gart_set_page, |
198 | .gart_set_page = &rv370_pcie_gart_set_page, |
199 | // .cp_commit = &r100_cp_commit, |
199 | .cp_commit = &r100_cp_commit, |
200 | // .ring_start = &r300_ring_start, |
200 | .ring_start = &r300_ring_start, |
201 | // .ring_test = &r100_ring_test, |
201 | .ring_test = &r100_ring_test, |
202 | // .ring_ib_execute = &r100_ring_ib_execute, |
202 | // .ring_ib_execute = &r100_ring_ib_execute, |
203 | // .irq_set = &r100_irq_set, |
203 | // .irq_set = &r100_irq_set, |
204 | // .irq_process = &r100_irq_process, |
204 | // .irq_process = &r100_irq_process, |
205 | // .get_vblank_counter = &r100_get_vblank_counter, |
205 | // .get_vblank_counter = &r100_get_vblank_counter, |
206 | // .fence_ring_emit = &r300_fence_ring_emit, |
206 | // .fence_ring_emit = &r300_fence_ring_emit, |
207 | // .cs_parse = &r300_cs_parse, |
207 | // .cs_parse = &r300_cs_parse, |
208 | // .copy_blit = &r100_copy_blit, |
208 | // .copy_blit = &r100_copy_blit, |
209 | // .copy_dma = &r300_copy_dma, |
209 | // .copy_dma = &r300_copy_dma, |
210 | // .copy = &r100_copy_blit, |
210 | // .copy = &r100_copy_blit, |
211 | .get_engine_clock = &radeon_atom_get_engine_clock, |
211 | .get_engine_clock = &radeon_atom_get_engine_clock, |
212 | .set_engine_clock = &radeon_atom_set_engine_clock, |
212 | .set_engine_clock = &radeon_atom_set_engine_clock, |
213 | .get_memory_clock = &radeon_atom_get_memory_clock, |
213 | .get_memory_clock = &radeon_atom_get_memory_clock, |
214 | .set_memory_clock = &radeon_atom_set_memory_clock, |
214 | .set_memory_clock = &radeon_atom_set_memory_clock, |
215 | .set_pcie_lanes = &rv370_set_pcie_lanes, |
215 | .set_pcie_lanes = &rv370_set_pcie_lanes, |
216 | .set_clock_gating = &radeon_atom_set_clock_gating, |
216 | .set_clock_gating = &radeon_atom_set_clock_gating, |
217 | .set_surface_reg = r100_set_surface_reg, |
217 | .set_surface_reg = r100_set_surface_reg, |
218 | .clear_surface_reg = r100_clear_surface_reg, |
218 | .clear_surface_reg = r100_clear_surface_reg, |
219 | .bandwidth_update = &r100_bandwidth_update, |
219 | .bandwidth_update = &r100_bandwidth_update, |
220 | .hpd_init = &r100_hpd_init, |
220 | .hpd_init = &r100_hpd_init, |
221 | .hpd_fini = &r100_hpd_fini, |
221 | .hpd_fini = &r100_hpd_fini, |
222 | .hpd_sense = &r100_hpd_sense, |
222 | .hpd_sense = &r100_hpd_sense, |
223 | .hpd_set_polarity = &r100_hpd_set_polarity, |
223 | .hpd_set_polarity = &r100_hpd_set_polarity, |
224 | .ioctl_wait_idle = NULL, |
224 | .ioctl_wait_idle = NULL, |
225 | }; |
225 | }; |
226 | 226 | ||
227 | 227 | ||
228 | /* |
228 | /* |
229 | * rs400,rs480 |
229 | * rs400,rs480 |
230 | */ |
230 | */ |
231 | extern int rs400_init(struct radeon_device *rdev); |
231 | extern int rs400_init(struct radeon_device *rdev); |
232 | extern void rs400_fini(struct radeon_device *rdev); |
232 | extern void rs400_fini(struct radeon_device *rdev); |
233 | extern int rs400_suspend(struct radeon_device *rdev); |
233 | extern int rs400_suspend(struct radeon_device *rdev); |
234 | extern int rs400_resume(struct radeon_device *rdev); |
234 | extern int rs400_resume(struct radeon_device *rdev); |
235 | void rs400_gart_tlb_flush(struct radeon_device *rdev); |
235 | void rs400_gart_tlb_flush(struct radeon_device *rdev); |
236 | int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); |
236 | int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); |
237 | uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg); |
237 | uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg); |
238 | void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
238 | void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
239 | static struct radeon_asic rs400_asic = { |
239 | static struct radeon_asic rs400_asic = { |
240 | .init = &rs400_init, |
240 | .init = &rs400_init, |
241 | // .fini = &rs400_fini, |
241 | // .fini = &rs400_fini, |
242 | // .suspend = &rs400_suspend, |
242 | // .suspend = &rs400_suspend, |
243 | // .resume = &rs400_resume, |
243 | // .resume = &rs400_resume, |
244 | // .vga_set_state = &r100_vga_set_state, |
244 | // .vga_set_state = &r100_vga_set_state, |
245 | .gpu_reset = &r300_gpu_reset, |
245 | .gpu_reset = &r300_gpu_reset, |
246 | .gart_tlb_flush = &rs400_gart_tlb_flush, |
246 | .gart_tlb_flush = &rs400_gart_tlb_flush, |
247 | .gart_set_page = &rs400_gart_set_page, |
247 | .gart_set_page = &rs400_gart_set_page, |
248 | // .cp_commit = &r100_cp_commit, |
248 | .cp_commit = &r100_cp_commit, |
249 | // .ring_start = &r300_ring_start, |
249 | .ring_start = &r300_ring_start, |
250 | // .ring_test = &r100_ring_test, |
250 | .ring_test = &r100_ring_test, |
251 | // .ring_ib_execute = &r100_ring_ib_execute, |
251 | // .ring_ib_execute = &r100_ring_ib_execute, |
252 | // .irq_set = &r100_irq_set, |
252 | // .irq_set = &r100_irq_set, |
253 | // .irq_process = &r100_irq_process, |
253 | // .irq_process = &r100_irq_process, |
254 | // .get_vblank_counter = &r100_get_vblank_counter, |
254 | // .get_vblank_counter = &r100_get_vblank_counter, |
255 | // .fence_ring_emit = &r300_fence_ring_emit, |
255 | // .fence_ring_emit = &r300_fence_ring_emit, |
256 | // .cs_parse = &r300_cs_parse, |
256 | // .cs_parse = &r300_cs_parse, |
257 | // .copy_blit = &r100_copy_blit, |
257 | // .copy_blit = &r100_copy_blit, |
258 | // .copy_dma = &r300_copy_dma, |
258 | // .copy_dma = &r300_copy_dma, |
259 | // .copy = &r100_copy_blit, |
259 | // .copy = &r100_copy_blit, |
260 | .get_engine_clock = &radeon_legacy_get_engine_clock, |
260 | .get_engine_clock = &radeon_legacy_get_engine_clock, |
261 | .set_engine_clock = &radeon_legacy_set_engine_clock, |
261 | .set_engine_clock = &radeon_legacy_set_engine_clock, |
262 | .get_memory_clock = &radeon_legacy_get_memory_clock, |
262 | .get_memory_clock = &radeon_legacy_get_memory_clock, |
263 | .set_memory_clock = NULL, |
263 | .set_memory_clock = NULL, |
264 | .set_pcie_lanes = NULL, |
264 | .set_pcie_lanes = NULL, |
265 | .set_clock_gating = &radeon_legacy_set_clock_gating, |
265 | .set_clock_gating = &radeon_legacy_set_clock_gating, |
266 | .set_surface_reg = r100_set_surface_reg, |
266 | .set_surface_reg = r100_set_surface_reg, |
267 | .clear_surface_reg = r100_clear_surface_reg, |
267 | .clear_surface_reg = r100_clear_surface_reg, |
268 | .bandwidth_update = &r100_bandwidth_update, |
268 | .bandwidth_update = &r100_bandwidth_update, |
269 | .hpd_init = &r100_hpd_init, |
269 | .hpd_init = &r100_hpd_init, |
270 | .hpd_fini = &r100_hpd_fini, |
270 | .hpd_fini = &r100_hpd_fini, |
271 | .hpd_sense = &r100_hpd_sense, |
271 | .hpd_sense = &r100_hpd_sense, |
272 | .hpd_set_polarity = &r100_hpd_set_polarity, |
272 | .hpd_set_polarity = &r100_hpd_set_polarity, |
273 | .ioctl_wait_idle = NULL, |
273 | .ioctl_wait_idle = NULL, |
274 | }; |
274 | }; |
275 | 275 | ||
276 | 276 | ||
277 | /* |
277 | /* |
278 | * rs600. |
278 | * rs600. |
279 | */ |
279 | */ |
280 | extern int rs600_init(struct radeon_device *rdev); |
280 | extern int rs600_init(struct radeon_device *rdev); |
281 | extern void rs600_fini(struct radeon_device *rdev); |
281 | extern void rs600_fini(struct radeon_device *rdev); |
282 | extern int rs600_suspend(struct radeon_device *rdev); |
282 | extern int rs600_suspend(struct radeon_device *rdev); |
283 | extern int rs600_resume(struct radeon_device *rdev); |
283 | extern int rs600_resume(struct radeon_device *rdev); |
284 | int rs600_irq_set(struct radeon_device *rdev); |
284 | int rs600_irq_set(struct radeon_device *rdev); |
285 | int rs600_irq_process(struct radeon_device *rdev); |
285 | int rs600_irq_process(struct radeon_device *rdev); |
286 | u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc); |
286 | u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc); |
287 | void rs600_gart_tlb_flush(struct radeon_device *rdev); |
287 | void rs600_gart_tlb_flush(struct radeon_device *rdev); |
288 | int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); |
288 | int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); |
289 | uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg); |
289 | uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg); |
290 | void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
290 | void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
291 | void rs600_bandwidth_update(struct radeon_device *rdev); |
291 | void rs600_bandwidth_update(struct radeon_device *rdev); |
292 | void rs600_hpd_init(struct radeon_device *rdev); |
292 | void rs600_hpd_init(struct radeon_device *rdev); |
293 | void rs600_hpd_fini(struct radeon_device *rdev); |
293 | void rs600_hpd_fini(struct radeon_device *rdev); |
294 | bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd); |
294 | bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd); |
295 | void rs600_hpd_set_polarity(struct radeon_device *rdev, |
295 | void rs600_hpd_set_polarity(struct radeon_device *rdev, |
296 | enum radeon_hpd_id hpd); |
296 | enum radeon_hpd_id hpd); |
297 | 297 | ||
298 | static struct radeon_asic rs600_asic = { |
298 | static struct radeon_asic rs600_asic = { |
299 | .init = &rs600_init, |
299 | .init = &rs600_init, |
300 | // .fini = &rs600_fini, |
300 | // .fini = &rs600_fini, |
301 | // .suspend = &rs600_suspend, |
301 | // .suspend = &rs600_suspend, |
302 | // .resume = &rs600_resume, |
302 | // .resume = &rs600_resume, |
303 | // .vga_set_state = &r100_vga_set_state, |
303 | // .vga_set_state = &r100_vga_set_state, |
304 | .gpu_reset = &r300_gpu_reset, |
304 | .gpu_reset = &r300_gpu_reset, |
305 | .gart_tlb_flush = &rs600_gart_tlb_flush, |
305 | .gart_tlb_flush = &rs600_gart_tlb_flush, |
306 | .gart_set_page = &rs600_gart_set_page, |
306 | .gart_set_page = &rs600_gart_set_page, |
307 | // .cp_commit = &r100_cp_commit, |
307 | .cp_commit = &r100_cp_commit, |
308 | // .ring_start = &r300_ring_start, |
308 | .ring_start = &r300_ring_start, |
309 | // .ring_test = &r100_ring_test, |
309 | .ring_test = &r100_ring_test, |
310 | // .ring_ib_execute = &r100_ring_ib_execute, |
310 | // .ring_ib_execute = &r100_ring_ib_execute, |
311 | // .irq_set = &rs600_irq_set, |
311 | // .irq_set = &rs600_irq_set, |
312 | // .irq_process = &rs600_irq_process, |
312 | // .irq_process = &rs600_irq_process, |
313 | // .get_vblank_counter = &rs600_get_vblank_counter, |
313 | // .get_vblank_counter = &rs600_get_vblank_counter, |
314 | // .fence_ring_emit = &r300_fence_ring_emit, |
314 | // .fence_ring_emit = &r300_fence_ring_emit, |
315 | // .cs_parse = &r300_cs_parse, |
315 | // .cs_parse = &r300_cs_parse, |
316 | // .copy_blit = &r100_copy_blit, |
316 | // .copy_blit = &r100_copy_blit, |
317 | // .copy_dma = &r300_copy_dma, |
317 | // .copy_dma = &r300_copy_dma, |
318 | // .copy = &r100_copy_blit, |
318 | // .copy = &r100_copy_blit, |
319 | .get_engine_clock = &radeon_atom_get_engine_clock, |
319 | .get_engine_clock = &radeon_atom_get_engine_clock, |
320 | .set_engine_clock = &radeon_atom_set_engine_clock, |
320 | .set_engine_clock = &radeon_atom_set_engine_clock, |
321 | .get_memory_clock = &radeon_atom_get_memory_clock, |
321 | .get_memory_clock = &radeon_atom_get_memory_clock, |
322 | .set_memory_clock = &radeon_atom_set_memory_clock, |
322 | .set_memory_clock = &radeon_atom_set_memory_clock, |
323 | .set_pcie_lanes = NULL, |
323 | .set_pcie_lanes = NULL, |
324 | .set_clock_gating = &radeon_atom_set_clock_gating, |
324 | .set_clock_gating = &radeon_atom_set_clock_gating, |
325 | .bandwidth_update = &rs600_bandwidth_update, |
325 | .bandwidth_update = &rs600_bandwidth_update, |
326 | .hpd_init = &rs600_hpd_init, |
326 | .hpd_init = &rs600_hpd_init, |
327 | .hpd_fini = &rs600_hpd_fini, |
327 | .hpd_fini = &rs600_hpd_fini, |
328 | .hpd_sense = &rs600_hpd_sense, |
328 | .hpd_sense = &rs600_hpd_sense, |
329 | .hpd_set_polarity = &rs600_hpd_set_polarity, |
329 | .hpd_set_polarity = &rs600_hpd_set_polarity, |
330 | .ioctl_wait_idle = NULL, |
330 | .ioctl_wait_idle = NULL, |
331 | }; |
331 | }; |
332 | 332 | ||
333 | 333 | ||
334 | /* |
334 | /* |
335 | * rs690,rs740 |
335 | * rs690,rs740 |
336 | */ |
336 | */ |
337 | int rs690_init(struct radeon_device *rdev); |
337 | int rs690_init(struct radeon_device *rdev); |
338 | void rs690_fini(struct radeon_device *rdev); |
338 | void rs690_fini(struct radeon_device *rdev); |
339 | int rs690_resume(struct radeon_device *rdev); |
339 | int rs690_resume(struct radeon_device *rdev); |
340 | int rs690_suspend(struct radeon_device *rdev); |
340 | int rs690_suspend(struct radeon_device *rdev); |
341 | uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg); |
341 | uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg); |
342 | void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
342 | void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
343 | void rs690_bandwidth_update(struct radeon_device *rdev); |
343 | void rs690_bandwidth_update(struct radeon_device *rdev); |
344 | static struct radeon_asic rs690_asic = { |
344 | static struct radeon_asic rs690_asic = { |
345 | .init = &rs690_init, |
345 | .init = &rs690_init, |
346 | // .fini = &rs690_fini, |
346 | // .fini = &rs690_fini, |
347 | // .suspend = &rs690_suspend, |
347 | // .suspend = &rs690_suspend, |
348 | // .resume = &rs690_resume, |
348 | // .resume = &rs690_resume, |
349 | // .vga_set_state = &r100_vga_set_state, |
349 | // .vga_set_state = &r100_vga_set_state, |
350 | .gpu_reset = &r300_gpu_reset, |
350 | .gpu_reset = &r300_gpu_reset, |
351 | .gart_tlb_flush = &rs400_gart_tlb_flush, |
351 | .gart_tlb_flush = &rs400_gart_tlb_flush, |
352 | .gart_set_page = &rs400_gart_set_page, |
352 | .gart_set_page = &rs400_gart_set_page, |
353 | // .cp_commit = &r100_cp_commit, |
353 | .cp_commit = &r100_cp_commit, |
354 | // .ring_start = &r300_ring_start, |
354 | .ring_start = &r300_ring_start, |
355 | // .ring_test = &r100_ring_test, |
355 | .ring_test = &r100_ring_test, |
356 | // .ring_ib_execute = &r100_ring_ib_execute, |
356 | // .ring_ib_execute = &r100_ring_ib_execute, |
357 | // .irq_set = &rs600_irq_set, |
357 | // .irq_set = &rs600_irq_set, |
358 | // .irq_process = &rs600_irq_process, |
358 | // .irq_process = &rs600_irq_process, |
359 | // .get_vblank_counter = &rs600_get_vblank_counter, |
359 | // .get_vblank_counter = &rs600_get_vblank_counter, |
360 | // .fence_ring_emit = &r300_fence_ring_emit, |
360 | // .fence_ring_emit = &r300_fence_ring_emit, |
361 | // .cs_parse = &r300_cs_parse, |
361 | // .cs_parse = &r300_cs_parse, |
362 | // .copy_blit = &r100_copy_blit, |
362 | // .copy_blit = &r100_copy_blit, |
363 | // .copy_dma = &r300_copy_dma, |
363 | // .copy_dma = &r300_copy_dma, |
364 | // .copy = &r300_copy_dma, |
364 | // .copy = &r300_copy_dma, |
365 | .get_engine_clock = &radeon_atom_get_engine_clock, |
365 | .get_engine_clock = &radeon_atom_get_engine_clock, |
366 | .set_engine_clock = &radeon_atom_set_engine_clock, |
366 | .set_engine_clock = &radeon_atom_set_engine_clock, |
367 | .get_memory_clock = &radeon_atom_get_memory_clock, |
367 | .get_memory_clock = &radeon_atom_get_memory_clock, |
368 | .set_memory_clock = &radeon_atom_set_memory_clock, |
368 | .set_memory_clock = &radeon_atom_set_memory_clock, |
369 | .set_pcie_lanes = NULL, |
369 | .set_pcie_lanes = NULL, |
370 | .set_clock_gating = &radeon_atom_set_clock_gating, |
370 | .set_clock_gating = &radeon_atom_set_clock_gating, |
371 | .set_surface_reg = r100_set_surface_reg, |
371 | .set_surface_reg = r100_set_surface_reg, |
372 | .clear_surface_reg = r100_clear_surface_reg, |
372 | .clear_surface_reg = r100_clear_surface_reg, |
373 | .bandwidth_update = &rs690_bandwidth_update, |
373 | .bandwidth_update = &rs690_bandwidth_update, |
374 | .hpd_init = &rs600_hpd_init, |
374 | .hpd_init = &rs600_hpd_init, |
375 | .hpd_fini = &rs600_hpd_fini, |
375 | .hpd_fini = &rs600_hpd_fini, |
376 | .hpd_sense = &rs600_hpd_sense, |
376 | .hpd_sense = &rs600_hpd_sense, |
377 | .hpd_set_polarity = &rs600_hpd_set_polarity, |
377 | .hpd_set_polarity = &rs600_hpd_set_polarity, |
378 | .ioctl_wait_idle = NULL, |
378 | .ioctl_wait_idle = NULL, |
379 | }; |
379 | }; |
380 | 380 | ||
381 | 381 | ||
382 | /* |
382 | /* |
383 | * rv515 |
383 | * rv515 |
384 | */ |
384 | */ |
385 | int rv515_init(struct radeon_device *rdev); |
385 | int rv515_init(struct radeon_device *rdev); |
386 | void rv515_fini(struct radeon_device *rdev); |
386 | void rv515_fini(struct radeon_device *rdev); |
387 | int rv515_gpu_reset(struct radeon_device *rdev); |
387 | int rv515_gpu_reset(struct radeon_device *rdev); |
388 | uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg); |
388 | uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg); |
389 | void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
389 | void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
390 | void rv515_ring_start(struct radeon_device *rdev); |
390 | void rv515_ring_start(struct radeon_device *rdev); |
391 | uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg); |
391 | uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg); |
392 | void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
392 | void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
393 | void rv515_bandwidth_update(struct radeon_device *rdev); |
393 | void rv515_bandwidth_update(struct radeon_device *rdev); |
394 | int rv515_resume(struct radeon_device *rdev); |
394 | int rv515_resume(struct radeon_device *rdev); |
395 | int rv515_suspend(struct radeon_device *rdev); |
395 | int rv515_suspend(struct radeon_device *rdev); |
396 | static struct radeon_asic rv515_asic = { |
396 | static struct radeon_asic rv515_asic = { |
397 | .init = &rv515_init, |
397 | .init = &rv515_init, |
398 | // .fini = &rv515_fini, |
398 | // .fini = &rv515_fini, |
399 | // .suspend = &rv515_suspend, |
399 | // .suspend = &rv515_suspend, |
400 | // .resume = &rv515_resume, |
400 | // .resume = &rv515_resume, |
401 | // .vga_set_state = &r100_vga_set_state, |
401 | // .vga_set_state = &r100_vga_set_state, |
402 | .gpu_reset = &rv515_gpu_reset, |
402 | .gpu_reset = &rv515_gpu_reset, |
403 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, |
403 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, |
404 | .gart_set_page = &rv370_pcie_gart_set_page, |
404 | .gart_set_page = &rv370_pcie_gart_set_page, |
405 | // .cp_commit = &r100_cp_commit, |
405 | .cp_commit = &r100_cp_commit, |
406 | // .ring_start = &rv515_ring_start, |
406 | .ring_start = &rv515_ring_start, |
407 | // .ring_test = &r100_ring_test, |
407 | .ring_test = &r100_ring_test, |
408 | // .ring_ib_execute = &r100_ring_ib_execute, |
408 | // .ring_ib_execute = &r100_ring_ib_execute, |
409 | // .irq_set = &rs600_irq_set, |
409 | // .irq_set = &rs600_irq_set, |
410 | // .irq_process = &rs600_irq_process, |
410 | // .irq_process = &rs600_irq_process, |
411 | // .get_vblank_counter = &rs600_get_vblank_counter, |
411 | // .get_vblank_counter = &rs600_get_vblank_counter, |
412 | // .fence_ring_emit = &r300_fence_ring_emit, |
412 | // .fence_ring_emit = &r300_fence_ring_emit, |
413 | // .cs_parse = &r300_cs_parse, |
413 | // .cs_parse = &r300_cs_parse, |
414 | // .copy_blit = &r100_copy_blit, |
414 | // .copy_blit = &r100_copy_blit, |
415 | // .copy_dma = &r300_copy_dma, |
415 | // .copy_dma = &r300_copy_dma, |
416 | // .copy = &r100_copy_blit, |
416 | // .copy = &r100_copy_blit, |
417 | .get_engine_clock = &radeon_atom_get_engine_clock, |
417 | .get_engine_clock = &radeon_atom_get_engine_clock, |
418 | .set_engine_clock = &radeon_atom_set_engine_clock, |
418 | .set_engine_clock = &radeon_atom_set_engine_clock, |
419 | .get_memory_clock = &radeon_atom_get_memory_clock, |
419 | .get_memory_clock = &radeon_atom_get_memory_clock, |
420 | .set_memory_clock = &radeon_atom_set_memory_clock, |
420 | .set_memory_clock = &radeon_atom_set_memory_clock, |
421 | .set_pcie_lanes = &rv370_set_pcie_lanes, |
421 | .set_pcie_lanes = &rv370_set_pcie_lanes, |
422 | .set_clock_gating = &radeon_atom_set_clock_gating, |
422 | .set_clock_gating = &radeon_atom_set_clock_gating, |
423 | .set_surface_reg = r100_set_surface_reg, |
423 | .set_surface_reg = r100_set_surface_reg, |
424 | .clear_surface_reg = r100_clear_surface_reg, |
424 | .clear_surface_reg = r100_clear_surface_reg, |
425 | .bandwidth_update = &rv515_bandwidth_update, |
425 | .bandwidth_update = &rv515_bandwidth_update, |
426 | .hpd_init = &rs600_hpd_init, |
426 | .hpd_init = &rs600_hpd_init, |
427 | .hpd_fini = &rs600_hpd_fini, |
427 | .hpd_fini = &rs600_hpd_fini, |
428 | .hpd_sense = &rs600_hpd_sense, |
428 | .hpd_sense = &rs600_hpd_sense, |
429 | .hpd_set_polarity = &rs600_hpd_set_polarity, |
429 | .hpd_set_polarity = &rs600_hpd_set_polarity, |
430 | .ioctl_wait_idle = NULL, |
430 | .ioctl_wait_idle = NULL, |
431 | }; |
431 | }; |
432 | 432 | ||
433 | 433 | ||
434 | /* |
434 | /* |
435 | * r520,rv530,rv560,rv570,r580 |
435 | * r520,rv530,rv560,rv570,r580 |
436 | */ |
436 | */ |
437 | int r520_init(struct radeon_device *rdev); |
437 | int r520_init(struct radeon_device *rdev); |
438 | int r520_resume(struct radeon_device *rdev); |
438 | int r520_resume(struct radeon_device *rdev); |
439 | static struct radeon_asic r520_asic = { |
439 | static struct radeon_asic r520_asic = { |
440 | .init = &r520_init, |
440 | .init = &r520_init, |
441 | // .fini = &rv515_fini, |
441 | // .fini = &rv515_fini, |
442 | // .suspend = &rv515_suspend, |
442 | // .suspend = &rv515_suspend, |
443 | // .resume = &r520_resume, |
443 | // .resume = &r520_resume, |
444 | // .vga_set_state = &r100_vga_set_state, |
444 | // .vga_set_state = &r100_vga_set_state, |
445 | .gpu_reset = &rv515_gpu_reset, |
445 | .gpu_reset = &rv515_gpu_reset, |
446 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, |
446 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, |
447 | .gart_set_page = &rv370_pcie_gart_set_page, |
447 | .gart_set_page = &rv370_pcie_gart_set_page, |
448 | // .cp_commit = &r100_cp_commit, |
448 | .cp_commit = &r100_cp_commit, |
449 | // .ring_start = &rv515_ring_start, |
449 | .ring_start = &rv515_ring_start, |
450 | // .ring_test = &r100_ring_test, |
450 | .ring_test = &r100_ring_test, |
451 | // .ring_ib_execute = &r100_ring_ib_execute, |
451 | // .ring_ib_execute = &r100_ring_ib_execute, |
452 | // .irq_set = &rs600_irq_set, |
452 | // .irq_set = &rs600_irq_set, |
453 | // .irq_process = &rs600_irq_process, |
453 | // .irq_process = &rs600_irq_process, |
454 | // .get_vblank_counter = &rs600_get_vblank_counter, |
454 | // .get_vblank_counter = &rs600_get_vblank_counter, |
455 | // .fence_ring_emit = &r300_fence_ring_emit, |
455 | // .fence_ring_emit = &r300_fence_ring_emit, |
456 | // .cs_parse = &r300_cs_parse, |
456 | // .cs_parse = &r300_cs_parse, |
457 | // .copy_blit = &r100_copy_blit, |
457 | // .copy_blit = &r100_copy_blit, |
458 | // .copy_dma = &r300_copy_dma, |
458 | // .copy_dma = &r300_copy_dma, |
459 | // .copy = &r100_copy_blit, |
459 | // .copy = &r100_copy_blit, |
460 | .get_engine_clock = &radeon_atom_get_engine_clock, |
460 | .get_engine_clock = &radeon_atom_get_engine_clock, |
461 | .set_engine_clock = &radeon_atom_set_engine_clock, |
461 | .set_engine_clock = &radeon_atom_set_engine_clock, |
462 | .get_memory_clock = &radeon_atom_get_memory_clock, |
462 | .get_memory_clock = &radeon_atom_get_memory_clock, |
463 | .set_memory_clock = &radeon_atom_set_memory_clock, |
463 | .set_memory_clock = &radeon_atom_set_memory_clock, |
464 | .set_pcie_lanes = &rv370_set_pcie_lanes, |
464 | .set_pcie_lanes = &rv370_set_pcie_lanes, |
465 | .set_clock_gating = &radeon_atom_set_clock_gating, |
465 | .set_clock_gating = &radeon_atom_set_clock_gating, |
466 | .set_surface_reg = r100_set_surface_reg, |
466 | .set_surface_reg = r100_set_surface_reg, |
467 | .clear_surface_reg = r100_clear_surface_reg, |
467 | .clear_surface_reg = r100_clear_surface_reg, |
468 | .bandwidth_update = &rv515_bandwidth_update, |
468 | .bandwidth_update = &rv515_bandwidth_update, |
469 | .hpd_init = &rs600_hpd_init, |
469 | .hpd_init = &rs600_hpd_init, |
470 | .hpd_fini = &rs600_hpd_fini, |
470 | .hpd_fini = &rs600_hpd_fini, |
471 | .hpd_sense = &rs600_hpd_sense, |
471 | .hpd_sense = &rs600_hpd_sense, |
472 | .hpd_set_polarity = &rs600_hpd_set_polarity, |
472 | .hpd_set_polarity = &rs600_hpd_set_polarity, |
473 | .ioctl_wait_idle = NULL, |
473 | .ioctl_wait_idle = NULL, |
474 | }; |
474 | }; |
475 | 475 | ||
476 | /* |
476 | /* |
477 | * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880 |
477 | * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880 |
478 | */ |
478 | */ |
479 | int r600_init(struct radeon_device *rdev); |
479 | int r600_init(struct radeon_device *rdev); |
480 | void r600_fini(struct radeon_device *rdev); |
480 | void r600_fini(struct radeon_device *rdev); |
481 | int r600_suspend(struct radeon_device *rdev); |
481 | int r600_suspend(struct radeon_device *rdev); |
482 | int r600_resume(struct radeon_device *rdev); |
482 | int r600_resume(struct radeon_device *rdev); |
483 | void r600_vga_set_state(struct radeon_device *rdev, bool state); |
483 | void r600_vga_set_state(struct radeon_device *rdev, bool state); |
484 | int r600_wb_init(struct radeon_device *rdev); |
484 | int r600_wb_init(struct radeon_device *rdev); |
485 | void r600_wb_fini(struct radeon_device *rdev); |
485 | void r600_wb_fini(struct radeon_device *rdev); |
486 | void r600_cp_commit(struct radeon_device *rdev); |
486 | void r600_cp_commit(struct radeon_device *rdev); |
487 | void r600_pcie_gart_tlb_flush(struct radeon_device *rdev); |
487 | void r600_pcie_gart_tlb_flush(struct radeon_device *rdev); |
488 | uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg); |
488 | uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg); |
489 | void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
489 | void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); |
490 | int r600_cs_parse(struct radeon_cs_parser *p); |
490 | int r600_cs_parse(struct radeon_cs_parser *p); |
491 | void r600_fence_ring_emit(struct radeon_device *rdev, |
491 | void r600_fence_ring_emit(struct radeon_device *rdev, |
492 | struct radeon_fence *fence); |
492 | struct radeon_fence *fence); |
493 | int r600_copy_dma(struct radeon_device *rdev, |
493 | int r600_copy_dma(struct radeon_device *rdev, |
494 | uint64_t src_offset, |
494 | uint64_t src_offset, |
495 | uint64_t dst_offset, |
495 | uint64_t dst_offset, |
496 | unsigned num_pages, |
496 | unsigned num_pages, |
497 | struct radeon_fence *fence); |
497 | struct radeon_fence *fence); |
498 | int r600_irq_process(struct radeon_device *rdev); |
498 | int r600_irq_process(struct radeon_device *rdev); |
499 | int r600_irq_set(struct radeon_device *rdev); |
499 | int r600_irq_set(struct radeon_device *rdev); |
500 | int r600_gpu_reset(struct radeon_device *rdev); |
500 | int r600_gpu_reset(struct radeon_device *rdev); |
501 | int r600_set_surface_reg(struct radeon_device *rdev, int reg, |
501 | int r600_set_surface_reg(struct radeon_device *rdev, int reg, |
502 | uint32_t tiling_flags, uint32_t pitch, |
502 | uint32_t tiling_flags, uint32_t pitch, |
503 | uint32_t offset, uint32_t obj_size); |
503 | uint32_t offset, uint32_t obj_size); |
504 | int r600_clear_surface_reg(struct radeon_device *rdev, int reg); |
504 | int r600_clear_surface_reg(struct radeon_device *rdev, int reg); |
505 | void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); |
505 | void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); |
506 | int r600_ring_test(struct radeon_device *rdev); |
506 | int r600_ring_test(struct radeon_device *rdev); |
507 | int r600_copy_blit(struct radeon_device *rdev, |
507 | int r600_copy_blit(struct radeon_device *rdev, |
508 | uint64_t src_offset, uint64_t dst_offset, |
508 | uint64_t src_offset, uint64_t dst_offset, |
509 | unsigned num_pages, struct radeon_fence *fence); |
509 | unsigned num_pages, struct radeon_fence *fence); |
510 | void r600_hpd_init(struct radeon_device *rdev); |
510 | void r600_hpd_init(struct radeon_device *rdev); |
511 | void r600_hpd_fini(struct radeon_device *rdev); |
511 | void r600_hpd_fini(struct radeon_device *rdev); |
512 | bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd); |
512 | bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd); |
513 | void r600_hpd_set_polarity(struct radeon_device *rdev, |
513 | void r600_hpd_set_polarity(struct radeon_device *rdev, |
514 | enum radeon_hpd_id hpd); |
514 | enum radeon_hpd_id hpd); |
515 | extern void r600_ioctl_wait_idle(struct radeon_device *rdev, struct radeon_bo *bo); |
515 | extern void r600_ioctl_wait_idle(struct radeon_device *rdev, struct radeon_bo *bo); |
516 | 516 | ||
517 | static struct radeon_asic r600_asic = { |
517 | static struct radeon_asic r600_asic = { |
518 | .init = &r600_init, |
518 | .init = &r600_init, |
519 | // .fini = &r600_fini, |
519 | // .fini = &r600_fini, |
520 | // .suspend = &r600_suspend, |
520 | // .suspend = &r600_suspend, |
521 | // .resume = &r600_resume, |
521 | // .resume = &r600_resume, |
522 | // .cp_commit = &r600_cp_commit, |
522 | // .cp_commit = &r600_cp_commit, |
523 | .vga_set_state = &r600_vga_set_state, |
523 | .vga_set_state = &r600_vga_set_state, |
524 | .gpu_reset = &r600_gpu_reset, |
524 | .gpu_reset = &r600_gpu_reset, |
525 | .gart_tlb_flush = &r600_pcie_gart_tlb_flush, |
525 | .gart_tlb_flush = &r600_pcie_gart_tlb_flush, |
526 | .gart_set_page = &rs600_gart_set_page, |
526 | .gart_set_page = &rs600_gart_set_page, |
527 | // .ring_test = &r600_ring_test, |
527 | // .ring_test = &r600_ring_test, |
528 | // .ring_ib_execute = &r600_ring_ib_execute, |
528 | // .ring_ib_execute = &r600_ring_ib_execute, |
529 | // .irq_set = &r600_irq_set, |
529 | // .irq_set = &r600_irq_set, |
530 | // .irq_process = &r600_irq_process, |
530 | // .irq_process = &r600_irq_process, |
531 | // .fence_ring_emit = &r600_fence_ring_emit, |
531 | // .fence_ring_emit = &r600_fence_ring_emit, |
532 | // .cs_parse = &r600_cs_parse, |
532 | // .cs_parse = &r600_cs_parse, |
533 | // .copy_blit = &r600_copy_blit, |
533 | // .copy_blit = &r600_copy_blit, |
534 | // .copy_dma = &r600_copy_blit, |
534 | // .copy_dma = &r600_copy_blit, |
535 | // .copy = &r600_copy_blit, |
535 | // .copy = &r600_copy_blit, |
536 | .get_engine_clock = &radeon_atom_get_engine_clock, |
536 | .get_engine_clock = &radeon_atom_get_engine_clock, |
537 | .set_engine_clock = &radeon_atom_set_engine_clock, |
537 | .set_engine_clock = &radeon_atom_set_engine_clock, |
538 | .get_memory_clock = &radeon_atom_get_memory_clock, |
538 | .get_memory_clock = &radeon_atom_get_memory_clock, |
539 | .set_memory_clock = &radeon_atom_set_memory_clock, |
539 | .set_memory_clock = &radeon_atom_set_memory_clock, |
540 | .set_pcie_lanes = NULL, |
540 | .set_pcie_lanes = NULL, |
541 | .set_clock_gating = &radeon_atom_set_clock_gating, |
541 | .set_clock_gating = &radeon_atom_set_clock_gating, |
542 | .set_surface_reg = r600_set_surface_reg, |
542 | .set_surface_reg = r600_set_surface_reg, |
543 | .clear_surface_reg = r600_clear_surface_reg, |
543 | .clear_surface_reg = r600_clear_surface_reg, |
544 | .bandwidth_update = &rv515_bandwidth_update, |
544 | .bandwidth_update = &rv515_bandwidth_update, |
545 | .hpd_init = &r600_hpd_init, |
545 | .hpd_init = &r600_hpd_init, |
546 | .hpd_fini = &r600_hpd_fini, |
546 | .hpd_fini = &r600_hpd_fini, |
547 | .hpd_sense = &r600_hpd_sense, |
547 | .hpd_sense = &r600_hpd_sense, |
548 | .hpd_set_polarity = &r600_hpd_set_polarity, |
548 | .hpd_set_polarity = &r600_hpd_set_polarity, |
549 | // .ioctl_wait_idle = r600_ioctl_wait_idle, |
549 | // .ioctl_wait_idle = r600_ioctl_wait_idle, |
550 | }; |
550 | }; |
551 | 551 | ||
552 | /* |
552 | /* |
553 | * rv770,rv730,rv710,rv740 |
553 | * rv770,rv730,rv710,rv740 |
554 | */ |
554 | */ |
555 | int rv770_init(struct radeon_device *rdev); |
555 | int rv770_init(struct radeon_device *rdev); |
556 | void rv770_fini(struct radeon_device *rdev); |
556 | void rv770_fini(struct radeon_device *rdev); |
557 | int rv770_suspend(struct radeon_device *rdev); |
557 | int rv770_suspend(struct radeon_device *rdev); |
558 | int rv770_resume(struct radeon_device *rdev); |
558 | int rv770_resume(struct radeon_device *rdev); |
559 | int rv770_gpu_reset(struct radeon_device *rdev); |
559 | int rv770_gpu_reset(struct radeon_device *rdev); |
560 | 560 | ||
561 | static struct radeon_asic rv770_asic = { |
561 | static struct radeon_asic rv770_asic = { |
562 | .init = &rv770_init, |
562 | .init = &rv770_init, |
563 | // .fini = &rv770_fini, |
563 | // .fini = &rv770_fini, |
564 | // .suspend = &rv770_suspend, |
564 | // .suspend = &rv770_suspend, |
565 | // .resume = &rv770_resume, |
565 | // .resume = &rv770_resume, |
566 | // .cp_commit = &r600_cp_commit, |
566 | // .cp_commit = &r600_cp_commit, |
567 | .gpu_reset = &rv770_gpu_reset, |
567 | .gpu_reset = &rv770_gpu_reset, |
568 | .vga_set_state = &r600_vga_set_state, |
568 | .vga_set_state = &r600_vga_set_state, |
569 | .gart_tlb_flush = &r600_pcie_gart_tlb_flush, |
569 | .gart_tlb_flush = &r600_pcie_gart_tlb_flush, |
570 | .gart_set_page = &rs600_gart_set_page, |
570 | .gart_set_page = &rs600_gart_set_page, |
571 | // .ring_test = &r600_ring_test, |
571 | // .ring_test = &r600_ring_test, |
572 | // .ring_ib_execute = &r600_ring_ib_execute, |
572 | // .ring_ib_execute = &r600_ring_ib_execute, |
573 | // .irq_set = &r600_irq_set, |
573 | // .irq_set = &r600_irq_set, |
574 | // .irq_process = &r600_irq_process, |
574 | // .irq_process = &r600_irq_process, |
575 | // .fence_ring_emit = &r600_fence_ring_emit, |
575 | // .fence_ring_emit = &r600_fence_ring_emit, |
576 | // .cs_parse = &r600_cs_parse, |
576 | // .cs_parse = &r600_cs_parse, |
577 | // .copy_blit = &r600_copy_blit, |
577 | // .copy_blit = &r600_copy_blit, |
578 | // .copy_dma = &r600_copy_blit, |
578 | // .copy_dma = &r600_copy_blit, |
579 | // .copy = &r600_copy_blit, |
579 | // .copy = &r600_copy_blit, |
580 | .get_engine_clock = &radeon_atom_get_engine_clock, |
580 | .get_engine_clock = &radeon_atom_get_engine_clock, |
581 | .set_engine_clock = &radeon_atom_set_engine_clock, |
581 | .set_engine_clock = &radeon_atom_set_engine_clock, |
582 | .get_memory_clock = &radeon_atom_get_memory_clock, |
582 | .get_memory_clock = &radeon_atom_get_memory_clock, |
583 | .set_memory_clock = &radeon_atom_set_memory_clock, |
583 | .set_memory_clock = &radeon_atom_set_memory_clock, |
584 | .set_pcie_lanes = NULL, |
584 | .set_pcie_lanes = NULL, |
585 | .set_clock_gating = &radeon_atom_set_clock_gating, |
585 | .set_clock_gating = &radeon_atom_set_clock_gating, |
586 | .set_surface_reg = r600_set_surface_reg, |
586 | .set_surface_reg = r600_set_surface_reg, |
587 | .clear_surface_reg = r600_clear_surface_reg, |
587 | .clear_surface_reg = r600_clear_surface_reg, |
588 | .bandwidth_update = &rv515_bandwidth_update, |
588 | .bandwidth_update = &rv515_bandwidth_update, |
589 | .hpd_init = &r600_hpd_init, |
589 | .hpd_init = &r600_hpd_init, |
590 | .hpd_fini = &r600_hpd_fini, |
590 | .hpd_fini = &r600_hpd_fini, |
591 | .hpd_sense = &r600_hpd_sense, |
591 | .hpd_sense = &r600_hpd_sense, |
592 | .hpd_set_polarity = &r600_hpd_set_polarity, |
592 | .hpd_set_polarity = &r600_hpd_set_polarity, |
593 | // .ioctl_wait_idle = r600_ioctl_wait_idle, |
593 | // .ioctl_wait_idle = r600_ioctl_wait_idle, |
594 | }; |
594 | }; |
595 | 595 | ||
596 | #endif |
596 | #endif |