Subversion Repositories Kolibri OS

Rev

Rev 1430 | Rev 2997 | Go to most recent revision | Show entire file | Regard whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 1430 Rev 1963
Line 43... Line 43...
43
void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
43
void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
Line 44... Line 44...
44
 
44
 
45
/*
45
/*
46
 * r100,rv100,rs100,rv200,rs200
46
 * r100,rv100,rs100,rv200,rs200
-
 
47
 */
-
 
48
struct r100_mc_save {
-
 
49
	u32	GENMO_WT;
-
 
50
	u32	CRTC_EXT_CNTL;
-
 
51
	u32	CRTC_GEN_CNTL;
-
 
52
	u32	CRTC2_GEN_CNTL;
-
 
53
	u32	CUR_OFFSET;
-
 
54
	u32	CUR2_OFFSET;
47
 */
55
};
48
extern int r100_init(struct radeon_device *rdev);
56
int r100_init(struct radeon_device *rdev);
49
extern void r100_fini(struct radeon_device *rdev);
57
void r100_fini(struct radeon_device *rdev);
50
extern int r100_suspend(struct radeon_device *rdev);
58
int r100_suspend(struct radeon_device *rdev);
51
extern int r100_resume(struct radeon_device *rdev);
-
 
52
uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
-
 
53
void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
59
int r100_resume(struct radeon_device *rdev);
-
 
60
void r100_vga_set_state(struct radeon_device *rdev, bool state);
54
void r100_vga_set_state(struct radeon_device *rdev, bool state);
61
bool r100_gpu_is_lockup(struct radeon_device *rdev);
55
int r100_gpu_reset(struct radeon_device *rdev);
62
int r100_asic_reset(struct radeon_device *rdev);
56
u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
63
u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
57
void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
64
void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
58
int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
65
int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
59
void r100_cp_commit(struct radeon_device *rdev);
66
void r100_cp_commit(struct radeon_device *rdev);
Line 71... Line 78...
71
		   unsigned num_pages,
78
		   unsigned num_pages,
72
		   struct radeon_fence *fence);
79
		   struct radeon_fence *fence);
73
int r100_set_surface_reg(struct radeon_device *rdev, int reg,
80
int r100_set_surface_reg(struct radeon_device *rdev, int reg,
74
			 uint32_t tiling_flags, uint32_t pitch,
81
			 uint32_t tiling_flags, uint32_t pitch,
75
			 uint32_t offset, uint32_t obj_size);
82
			 uint32_t offset, uint32_t obj_size);
76
int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
83
void r100_clear_surface_reg(struct radeon_device *rdev, int reg);
77
void r100_bandwidth_update(struct radeon_device *rdev);
84
void r100_bandwidth_update(struct radeon_device *rdev);
78
void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
85
void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
79
int r100_ring_test(struct radeon_device *rdev);
86
int r100_ring_test(struct radeon_device *rdev);
80
void r100_hpd_init(struct radeon_device *rdev);
87
void r100_hpd_init(struct radeon_device *rdev);
81
void r100_hpd_fini(struct radeon_device *rdev);
88
void r100_hpd_fini(struct radeon_device *rdev);
82
bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
89
bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
83
void r100_hpd_set_polarity(struct radeon_device *rdev,
90
void r100_hpd_set_polarity(struct radeon_device *rdev,
84
			   enum radeon_hpd_id hpd);
91
			   enum radeon_hpd_id hpd);
85
 
-
 
-
 
92
int r100_debugfs_rbbm_init(struct radeon_device *rdev);
-
 
93
int r100_debugfs_cp_init(struct radeon_device *rdev);
-
 
94
void r100_cp_disable(struct radeon_device *rdev);
86
static struct radeon_asic r100_asic = {
95
int r100_cp_init(struct radeon_device *rdev, unsigned ring_size);
87
	.init = &r100_init,
96
void r100_cp_fini(struct radeon_device *rdev);
-
 
97
int r100_pci_gart_init(struct radeon_device *rdev);
88
//	.fini = &r100_fini,
98
void r100_pci_gart_fini(struct radeon_device *rdev);
89
//	.suspend = &r100_suspend,
99
int r100_pci_gart_enable(struct radeon_device *rdev);
-
 
100
void r100_pci_gart_disable(struct radeon_device *rdev);
-
 
101
int r100_debugfs_mc_info_init(struct radeon_device *rdev);
90
//	.resume = &r100_resume,
102
int r100_gui_wait_for_idle(struct radeon_device *rdev);
91
//	.vga_set_state = &r100_vga_set_state,
103
void r100_gpu_lockup_update(struct r100_gpu_lockup *lockup,
92
	.gpu_reset = &r100_gpu_reset,
104
			    struct radeon_cp *cp);
93
	.gart_tlb_flush = &r100_pci_gart_tlb_flush,
105
bool r100_gpu_cp_is_lockup(struct radeon_device *rdev,
94
	.gart_set_page = &r100_pci_gart_set_page,
106
			   struct r100_gpu_lockup *lockup,
95
	.cp_commit = &r100_cp_commit,
107
			   struct radeon_cp *cp);
96
	.ring_start = &r100_ring_start,
108
void r100_ib_fini(struct radeon_device *rdev);
97
    .ring_test = &r100_ring_test,
109
int r100_ib_init(struct radeon_device *rdev);
98
//	.ring_ib_execute = &r100_ring_ib_execute,
110
void r100_irq_disable(struct radeon_device *rdev);
-
 
111
void r100_mc_stop(struct radeon_device *rdev, struct r100_mc_save *save);
-
 
112
void r100_mc_resume(struct radeon_device *rdev, struct r100_mc_save *save);
99
//	.irq_set = &r100_irq_set,
113
void r100_vram_init_sizes(struct radeon_device *rdev);
100
//	.irq_process = &r100_irq_process,
114
int r100_cp_reset(struct radeon_device *rdev);
101
//	.get_vblank_counter = &r100_get_vblank_counter,
115
void r100_vga_render_disable(struct radeon_device *rdev);
102
	.fence_ring_emit = &r100_fence_ring_emit,
116
void r100_restore_sanity(struct radeon_device *rdev);
103
//	.cs_parse = &r100_cs_parse,
117
int r100_cs_track_check_pkt3_indx_buffer(struct radeon_cs_parser *p,
104
//	.copy_blit = &r100_copy_blit,
118
					 struct radeon_cs_packet *pkt,
105
//	.copy_dma = NULL,
119
					 struct radeon_bo *robj);
-
 
120
int r100_cs_parse_packet0(struct radeon_cs_parser *p,
106
//	.copy = &r100_copy_blit,
121
			  struct radeon_cs_packet *pkt,
107
	.get_engine_clock = &radeon_legacy_get_engine_clock,
122
			  const unsigned *auth, unsigned n,
108
	.set_engine_clock = &radeon_legacy_set_engine_clock,
123
			  radeon_packet0_check_t check);
109
	.get_memory_clock = &radeon_legacy_get_memory_clock,
124
int r100_cs_packet_parse(struct radeon_cs_parser *p,
110
	.set_memory_clock = NULL,
125
			 struct radeon_cs_packet *pkt,
111
	.get_pcie_lanes = NULL,
126
			 unsigned idx);
112
	.set_pcie_lanes = NULL,
127
void r100_enable_bm(struct radeon_device *rdev);
113
	.set_clock_gating = &radeon_legacy_set_clock_gating,
128
void r100_set_common_regs(struct radeon_device *rdev);
114
	.set_surface_reg = r100_set_surface_reg,
129
void r100_bm_disable(struct radeon_device *rdev);
115
	.clear_surface_reg = r100_clear_surface_reg,
130
extern bool r100_gui_idle(struct radeon_device *rdev);
116
	.bandwidth_update = &r100_bandwidth_update,
131
extern void r100_pm_misc(struct radeon_device *rdev);
117
	.hpd_init = &r100_hpd_init,
132
extern void r100_pm_prepare(struct radeon_device *rdev);
118
	.hpd_fini = &r100_hpd_fini,
133
extern void r100_pm_finish(struct radeon_device *rdev);
119
	.hpd_sense = &r100_hpd_sense,
134
extern void r100_pm_init_profile(struct radeon_device *rdev);
120
	.hpd_set_polarity = &r100_hpd_set_polarity,
135
extern void r100_pm_get_dynpm_state(struct radeon_device *rdev);
-
 
136
extern void r100_pre_page_flip(struct radeon_device *rdev, int crtc);
-
 
137
extern u32 r100_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base);
121
	.ioctl_wait_idle = NULL,
138
extern void r100_post_page_flip(struct radeon_device *rdev, int crtc);
122
};
-
 
Line 123... Line 139...
123
 
139
 
124
/*
140
/*
125
 * r200,rv250,rs300,rv280
141
 * r200,rv250,rs300,rv280
126
 */
142
 */
127
extern int r200_copy_dma(struct radeon_device *rdev,
143
extern int r200_copy_dma(struct radeon_device *rdev,
128
			uint64_t src_offset,
144
			uint64_t src_offset,
129
			uint64_t dst_offset,
145
			uint64_t dst_offset,
130
			unsigned num_pages,
146
			unsigned num_pages,
131
			struct radeon_fence *fence);
-
 
132
static struct radeon_asic r200_asic = {
-
 
133
	.init = &r100_init,
-
 
134
//	.fini = &r100_fini,
-
 
135
//	.suspend = &r100_suspend,
-
 
136
//	.resume = &r100_resume,
-
 
137
//	.vga_set_state = &r100_vga_set_state,
-
 
138
	.gpu_reset = &r100_gpu_reset,
-
 
139
	.gart_tlb_flush = &r100_pci_gart_tlb_flush,
-
 
140
	.gart_set_page = &r100_pci_gart_set_page,
-
 
141
	.cp_commit = &r100_cp_commit,
-
 
142
	.ring_start = &r100_ring_start,
-
 
143
    .ring_test = &r100_ring_test,
-
 
144
//	.ring_ib_execute = &r100_ring_ib_execute,
-
 
145
//	.irq_set = &r100_irq_set,
-
 
146
//	.irq_process = &r100_irq_process,
-
 
147
//	.get_vblank_counter = &r100_get_vblank_counter,
-
 
148
	.fence_ring_emit = &r100_fence_ring_emit,
-
 
149
//	.cs_parse = &r100_cs_parse,
-
 
150
//	.copy_blit = &r100_copy_blit,
-
 
151
//	.copy_dma = NULL,
-
 
152
//	.copy = &r100_copy_blit,
-
 
153
	.get_engine_clock = &radeon_legacy_get_engine_clock,
-
 
154
	.set_engine_clock = &radeon_legacy_set_engine_clock,
-
 
155
	.get_memory_clock = &radeon_legacy_get_memory_clock,
-
 
156
	.set_memory_clock = NULL,
-
 
157
	.set_pcie_lanes = NULL,
-
 
158
	.set_clock_gating = &radeon_legacy_set_clock_gating,
-
 
159
	.set_surface_reg = r100_set_surface_reg,
147
			struct radeon_fence *fence);
160
	.clear_surface_reg = r100_clear_surface_reg,
-
 
161
	.bandwidth_update = &r100_bandwidth_update,
-
 
162
	.hpd_init = &r100_hpd_init,
-
 
163
	.hpd_fini = &r100_hpd_fini,
-
 
164
	.hpd_sense = &r100_hpd_sense,
-
 
165
	.hpd_set_polarity = &r100_hpd_set_polarity,
-
 
166
	.ioctl_wait_idle = NULL,
-
 
167
};
-
 
Line 168... Line 148...
168
 
148
void r200_set_safe_registers(struct radeon_device *rdev);
169
 
149
 
170
/*
150
/*
171
 * r300,r350,rv350,rv380
151
 * r300,r350,rv350,rv380
172
 */
152
 */
173
extern int r300_init(struct radeon_device *rdev);
153
extern int r300_init(struct radeon_device *rdev);
174
extern void r300_fini(struct radeon_device *rdev);
154
extern void r300_fini(struct radeon_device *rdev);
-
 
155
extern int r300_suspend(struct radeon_device *rdev);
175
extern int r300_suspend(struct radeon_device *rdev);
156
extern int r300_resume(struct radeon_device *rdev);
176
extern int r300_resume(struct radeon_device *rdev);
157
extern bool r300_gpu_is_lockup(struct radeon_device *rdev);
177
extern int r300_gpu_reset(struct radeon_device *rdev);
158
extern int r300_asic_reset(struct radeon_device *rdev);
178
extern void r300_ring_start(struct radeon_device *rdev);
159
extern void r300_ring_start(struct radeon_device *rdev);
179
extern void r300_fence_ring_emit(struct radeon_device *rdev,
160
extern void r300_fence_ring_emit(struct radeon_device *rdev,
180
			  struct radeon_fence *fence);
161
			  struct radeon_fence *fence);
181
extern int r300_cs_parse(struct radeon_cs_parser *p);
162
extern int r300_cs_parse(struct radeon_cs_parser *p);
182
extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
-
 
183
extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
-
 
184
extern uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
163
extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
185
extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
164
extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
186
extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
-
 
187
extern int rv370_get_pcie_lanes(struct radeon_device *rdev);
165
extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
188
 
-
 
189
static struct radeon_asic r300_asic = {
-
 
190
	.init = &r300_init,
-
 
191
//	.fini = &r300_fini,
-
 
192
//	.suspend = &r300_suspend,
-
 
193
//	.resume = &r300_resume,
-
 
194
//	.vga_set_state = &r100_vga_set_state,
-
 
195
	.gpu_reset = &r300_gpu_reset,
-
 
196
	.gart_tlb_flush = &r100_pci_gart_tlb_flush,
-
 
197
	.gart_set_page = &r100_pci_gart_set_page,
-
 
198
	.cp_commit = &r100_cp_commit,
-
 
199
	.ring_start = &r300_ring_start,
-
 
200
    .ring_test = &r100_ring_test,
-
 
201
//	.ring_ib_execute = &r100_ring_ib_execute,
-
 
202
//	.irq_set = &r100_irq_set,
-
 
203
//	.irq_process = &r100_irq_process,
166
extern int rv370_get_pcie_lanes(struct radeon_device *rdev);
204
//	.get_vblank_counter = &r100_get_vblank_counter,
-
 
205
	.fence_ring_emit = &r300_fence_ring_emit,
-
 
206
//	.cs_parse = &r300_cs_parse,
-
 
207
//	.copy_blit = &r100_copy_blit,
-
 
208
//	.copy_dma = &r300_copy_dma,
167
extern void r300_set_reg_safe(struct radeon_device *rdev);
209
//	.copy = &r100_copy_blit,
168
extern void r300_mc_program(struct radeon_device *rdev);
210
	.get_engine_clock = &radeon_legacy_get_engine_clock,
169
extern void r300_mc_init(struct radeon_device *rdev);
211
	.set_engine_clock = &radeon_legacy_set_engine_clock,
-
 
212
	.get_memory_clock = &radeon_legacy_get_memory_clock,
-
 
213
	.set_memory_clock = NULL,
170
extern void r300_clock_startup(struct radeon_device *rdev);
214
	.get_pcie_lanes = &rv370_get_pcie_lanes,
-
 
215
	.set_pcie_lanes = &rv370_set_pcie_lanes,
-
 
216
	.set_clock_gating = &radeon_legacy_set_clock_gating,
-
 
217
	.set_surface_reg = r100_set_surface_reg,
-
 
218
	.clear_surface_reg = r100_clear_surface_reg,
-
 
219
	.bandwidth_update = &r100_bandwidth_update,
-
 
220
	.hpd_init = &r100_hpd_init,
-
 
221
	.hpd_fini = &r100_hpd_fini,
-
 
222
	.hpd_sense = &r100_hpd_sense,
-
 
223
	.hpd_set_polarity = &r100_hpd_set_polarity,
-
 
224
	.ioctl_wait_idle = NULL,
-
 
225
};
-
 
226
 
-
 
227
 
-
 
228
static struct radeon_asic r300_asic_pcie = {
-
 
229
	.init = &r300_init,
-
 
230
//	.fini = &r300_fini,
-
 
231
//	.suspend = &r300_suspend,
-
 
232
//	.resume = &r300_resume,
-
 
233
//	.vga_set_state = &r100_vga_set_state,
-
 
234
	.gpu_reset = &r300_gpu_reset,
171
extern int r300_mc_wait_for_idle(struct radeon_device *rdev);
235
	.gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
-
 
236
	.gart_set_page = &rv370_pcie_gart_set_page,
-
 
237
	.cp_commit = &r100_cp_commit,
-
 
238
	.ring_start = &r300_ring_start,
-
 
239
    .ring_test = &r100_ring_test,
-
 
240
//	.ring_ib_execute = &r100_ring_ib_execute,
-
 
241
//	.irq_set = &r100_irq_set,
-
 
242
//	.irq_process = &r100_irq_process,
-
 
243
//	.get_vblank_counter = &r100_get_vblank_counter,
-
 
244
	.fence_ring_emit = &r300_fence_ring_emit,
-
 
245
//	.cs_parse = &r300_cs_parse,
-
 
246
//	.copy_blit = &r100_copy_blit,
-
 
247
//	.copy_dma = &r300_copy_dma,
172
extern int rv370_pcie_gart_init(struct radeon_device *rdev);
248
//	.copy = &r100_copy_blit,
173
extern void rv370_pcie_gart_fini(struct radeon_device *rdev);
249
	.get_engine_clock = &radeon_legacy_get_engine_clock,
-
 
250
	.set_engine_clock = &radeon_legacy_set_engine_clock,
-
 
251
	.get_memory_clock = &radeon_legacy_get_memory_clock,
-
 
252
	.set_memory_clock = NULL,
-
 
253
	.set_pcie_lanes = &rv370_set_pcie_lanes,
-
 
254
	.set_clock_gating = &radeon_legacy_set_clock_gating,
-
 
255
	.set_surface_reg = r100_set_surface_reg,
-
 
256
	.clear_surface_reg = r100_clear_surface_reg,
-
 
257
	.bandwidth_update = &r100_bandwidth_update,
-
 
258
	.hpd_init = &r100_hpd_init,
-
 
259
	.hpd_fini = &r100_hpd_fini,
-
 
260
	.hpd_sense = &r100_hpd_sense,
-
 
261
	.hpd_set_polarity = &r100_hpd_set_polarity,
-
 
Line 262... Line 174...
262
	.ioctl_wait_idle = NULL,
174
extern int rv370_pcie_gart_enable(struct radeon_device *rdev);
263
};
175
extern void rv370_pcie_gart_disable(struct radeon_device *rdev);
264
 
176
 
265
/*
177
/*
266
 * r420,r423,rv410
178
 * r420,r423,rv410
267
 */
179
 */
268
extern int r420_init(struct radeon_device *rdev);
180
extern int r420_init(struct radeon_device *rdev);
269
extern void r420_fini(struct radeon_device *rdev);
-
 
270
extern int r420_suspend(struct radeon_device *rdev);
-
 
271
extern int r420_resume(struct radeon_device *rdev);
-
 
272
static struct radeon_asic r420_asic = {
-
 
273
	.init = &r420_init,
-
 
274
//	.fini = &r420_fini,
-
 
275
//	.suspend = &r420_suspend,
-
 
276
//	.resume = &r420_resume,
-
 
277
//	.vga_set_state = &r100_vga_set_state,
-
 
278
	.gpu_reset = &r300_gpu_reset,
-
 
279
	.gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
-
 
280
	.gart_set_page = &rv370_pcie_gart_set_page,
-
 
281
	.cp_commit = &r100_cp_commit,
-
 
282
	.ring_start = &r300_ring_start,
-
 
283
    .ring_test = &r100_ring_test,
-
 
284
//	.ring_ib_execute = &r100_ring_ib_execute,
-
 
285
//	.irq_set = &r100_irq_set,
-
 
286
//	.irq_process = &r100_irq_process,
-
 
287
//	.get_vblank_counter = &r100_get_vblank_counter,
-
 
288
	.fence_ring_emit = &r300_fence_ring_emit,
-
 
289
//	.cs_parse = &r300_cs_parse,
-
 
290
//	.copy_blit = &r100_copy_blit,
-
 
291
//	.copy_dma = &r300_copy_dma,
181
extern void r420_fini(struct radeon_device *rdev);
292
//	.copy = &r100_copy_blit,
182
extern int r420_suspend(struct radeon_device *rdev);
293
	.get_engine_clock = &radeon_atom_get_engine_clock,
183
extern int r420_resume(struct radeon_device *rdev);
294
	.set_engine_clock = &radeon_atom_set_engine_clock,
-
 
295
	.get_memory_clock = &radeon_atom_get_memory_clock,
-
 
296
	.set_memory_clock = &radeon_atom_set_memory_clock,
184
extern void r420_pm_init_profile(struct radeon_device *rdev);
297
	.get_pcie_lanes = &rv370_get_pcie_lanes,
-
 
298
	.set_pcie_lanes = &rv370_set_pcie_lanes,
-
 
299
	.set_clock_gating = &radeon_atom_set_clock_gating,
185
extern u32 r420_mc_rreg(struct radeon_device *rdev, u32 reg);
300
	.set_surface_reg = r100_set_surface_reg,
-
 
301
	.clear_surface_reg = r100_clear_surface_reg,
-
 
302
	.bandwidth_update = &r100_bandwidth_update,
-
 
303
	.hpd_init = &r100_hpd_init,
-
 
304
	.hpd_fini = &r100_hpd_fini,
-
 
305
	.hpd_sense = &r100_hpd_sense,
-
 
306
	.hpd_set_polarity = &r100_hpd_set_polarity,
-
 
Line 307... Line 186...
307
	.ioctl_wait_idle = NULL,
186
extern void r420_mc_wreg(struct radeon_device *rdev, u32 reg, u32 v);
308
};
187
extern int r420_debugfs_pipes_info_init(struct radeon_device *rdev);
309
 
188
extern void r420_pipes_init(struct radeon_device *rdev);
310
 
189
 
Line 317... Line 196...
317
extern int rs400_resume(struct radeon_device *rdev);
196
extern int rs400_resume(struct radeon_device *rdev);
318
void rs400_gart_tlb_flush(struct radeon_device *rdev);
197
void rs400_gart_tlb_flush(struct radeon_device *rdev);
319
int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
198
int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
320
uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
199
uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
321
void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
200
void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
322
static struct radeon_asic rs400_asic = {
201
int rs400_gart_init(struct radeon_device *rdev);
323
	.init = &rs400_init,
-
 
324
//	.fini = &rs400_fini,
-
 
325
//	.suspend = &rs400_suspend,
-
 
326
//	.resume = &rs400_resume,
-
 
327
//	.vga_set_state = &r100_vga_set_state,
-
 
328
	.gpu_reset = &r300_gpu_reset,
-
 
329
	.gart_tlb_flush = &rs400_gart_tlb_flush,
-
 
330
	.gart_set_page = &rs400_gart_set_page,
-
 
331
	.cp_commit = &r100_cp_commit,
-
 
332
	.ring_start = &r300_ring_start,
-
 
333
    .ring_test = &r100_ring_test,
-
 
334
//	.ring_ib_execute = &r100_ring_ib_execute,
-
 
335
//	.irq_set = &r100_irq_set,
-
 
336
//	.irq_process = &r100_irq_process,
-
 
337
//	.get_vblank_counter = &r100_get_vblank_counter,
-
 
338
	.fence_ring_emit = &r300_fence_ring_emit,
-
 
339
//	.cs_parse = &r300_cs_parse,
-
 
340
//	.copy_blit = &r100_copy_blit,
-
 
341
//	.copy_dma = &r300_copy_dma,
-
 
342
//	.copy = &r100_copy_blit,
-
 
343
	.get_engine_clock = &radeon_legacy_get_engine_clock,
-
 
344
	.set_engine_clock = &radeon_legacy_set_engine_clock,
202
int rs400_gart_enable(struct radeon_device *rdev);
345
	.get_memory_clock = &radeon_legacy_get_memory_clock,
-
 
346
	.set_memory_clock = NULL,
-
 
347
	.get_pcie_lanes = NULL,
-
 
348
	.set_pcie_lanes = NULL,
-
 
349
	.set_clock_gating = &radeon_legacy_set_clock_gating,
203
void rs400_gart_adjust_size(struct radeon_device *rdev);
350
	.set_surface_reg = r100_set_surface_reg,
204
void rs400_gart_disable(struct radeon_device *rdev);
351
	.clear_surface_reg = r100_clear_surface_reg,
205
void rs400_gart_fini(struct radeon_device *rdev);
352
	.bandwidth_update = &r100_bandwidth_update,
-
 
353
	.hpd_init = &r100_hpd_init,
-
 
354
	.hpd_fini = &r100_hpd_fini,
-
 
355
	.hpd_sense = &r100_hpd_sense,
-
 
356
	.hpd_set_polarity = &r100_hpd_set_polarity,
-
 
357
	.ioctl_wait_idle = NULL,
-
 
358
};
-
 
359
 
-
 
Line 360... Line 206...
360
 
206
 
361
/*
207
/*
362
 * rs600.
208
 * rs600.
-
 
209
 */
363
 */
210
extern int rs600_asic_reset(struct radeon_device *rdev);
364
extern int rs600_init(struct radeon_device *rdev);
211
extern int rs600_init(struct radeon_device *rdev);
365
extern void rs600_fini(struct radeon_device *rdev);
212
extern void rs600_fini(struct radeon_device *rdev);
366
extern int rs600_suspend(struct radeon_device *rdev);
213
extern int rs600_suspend(struct radeon_device *rdev);
367
extern int rs600_resume(struct radeon_device *rdev);
214
extern int rs600_resume(struct radeon_device *rdev);
368
int rs600_irq_set(struct radeon_device *rdev);
215
int rs600_irq_set(struct radeon_device *rdev);
-
 
216
int rs600_irq_process(struct radeon_device *rdev);
369
int rs600_irq_process(struct radeon_device *rdev);
217
void rs600_irq_disable(struct radeon_device *rdev);
370
u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
218
u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
371
void rs600_gart_tlb_flush(struct radeon_device *rdev);
219
void rs600_gart_tlb_flush(struct radeon_device *rdev);
372
int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
220
int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
373
uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
221
uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
Line 376... Line 224...
376
void rs600_hpd_init(struct radeon_device *rdev);
224
void rs600_hpd_init(struct radeon_device *rdev);
377
void rs600_hpd_fini(struct radeon_device *rdev);
225
void rs600_hpd_fini(struct radeon_device *rdev);
378
bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
226
bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
379
void rs600_hpd_set_polarity(struct radeon_device *rdev,
227
void rs600_hpd_set_polarity(struct radeon_device *rdev,
380
			    enum radeon_hpd_id hpd);
228
			    enum radeon_hpd_id hpd);
381
 
-
 
382
static struct radeon_asic rs600_asic = {
229
extern void rs600_pm_misc(struct radeon_device *rdev);
383
	.init = &rs600_init,
-
 
384
//	.fini = &rs600_fini,
-
 
385
//	.suspend = &rs600_suspend,
-
 
386
//	.resume = &rs600_resume,
-
 
387
//	.vga_set_state = &r100_vga_set_state,
-
 
388
	.gpu_reset = &r300_gpu_reset,
-
 
389
	.gart_tlb_flush = &rs600_gart_tlb_flush,
-
 
390
	.gart_set_page = &rs600_gart_set_page,
-
 
391
	.cp_commit = &r100_cp_commit,
-
 
392
	.ring_start = &r300_ring_start,
-
 
393
    .ring_test = &r100_ring_test,
-
 
394
//	.ring_ib_execute = &r100_ring_ib_execute,
230
extern void rs600_pm_prepare(struct radeon_device *rdev);
395
//	.irq_set = &rs600_irq_set,
-
 
396
//	.irq_process = &rs600_irq_process,
-
 
397
//	.get_vblank_counter = &rs600_get_vblank_counter,
-
 
398
    .fence_ring_emit = &r300_fence_ring_emit,
231
extern void rs600_pm_finish(struct radeon_device *rdev);
399
//   .cs_parse = &r300_cs_parse,
-
 
400
//   .copy_blit = &r100_copy_blit,
-
 
401
//   .copy_dma = &r300_copy_dma,
-
 
402
//   .copy = &r100_copy_blit,
-
 
403
	.get_engine_clock = &radeon_atom_get_engine_clock,
232
extern void rs600_pre_page_flip(struct radeon_device *rdev, int crtc);
404
	.set_engine_clock = &radeon_atom_set_engine_clock,
233
extern u32 rs600_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base);
405
	.get_memory_clock = &radeon_atom_get_memory_clock,
-
 
406
	.set_memory_clock = &radeon_atom_set_memory_clock,
234
extern void rs600_post_page_flip(struct radeon_device *rdev, int crtc);
407
	.get_pcie_lanes = NULL,
-
 
408
	.set_pcie_lanes = NULL,
-
 
409
	.set_clock_gating = &radeon_atom_set_clock_gating,
-
 
410
	.set_surface_reg = r100_set_surface_reg,
-
 
411
	.clear_surface_reg = r100_clear_surface_reg,
235
void rs600_set_safe_registers(struct radeon_device *rdev);
412
	.bandwidth_update = &rs600_bandwidth_update,
-
 
413
	.hpd_init = &rs600_hpd_init,
-
 
414
	.hpd_fini = &rs600_hpd_fini,
-
 
415
	.hpd_sense = &rs600_hpd_sense,
-
 
416
	.hpd_set_polarity = &rs600_hpd_set_polarity,
-
 
417
	.ioctl_wait_idle = NULL,
-
 
418
};
-
 
Line 419... Line 236...
419
 
236
 
420
 
237
 
421
/*
238
/*
Line 426... Line 243...
426
int rs690_resume(struct radeon_device *rdev);
243
int rs690_resume(struct radeon_device *rdev);
427
int rs690_suspend(struct radeon_device *rdev);
244
int rs690_suspend(struct radeon_device *rdev);
428
uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
245
uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
429
void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
246
void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
430
void rs690_bandwidth_update(struct radeon_device *rdev);
247
void rs690_bandwidth_update(struct radeon_device *rdev);
431
static struct radeon_asic rs690_asic = {
-
 
432
	.init = &rs690_init,
-
 
433
//	.fini = &rs690_fini,
-
 
434
//	.suspend = &rs690_suspend,
-
 
435
//	.resume = &rs690_resume,
-
 
436
//	.vga_set_state = &r100_vga_set_state,
-
 
437
	.gpu_reset = &r300_gpu_reset,
-
 
438
	.gart_tlb_flush = &rs400_gart_tlb_flush,
-
 
439
	.gart_set_page = &rs400_gart_set_page,
-
 
440
	.cp_commit = &r100_cp_commit,
-
 
441
	.ring_start = &r300_ring_start,
-
 
442
    .ring_test = &r100_ring_test,
-
 
443
//	.ring_ib_execute = &r100_ring_ib_execute,
-
 
444
//	.irq_set = &rs600_irq_set,
-
 
445
//	.irq_process = &rs600_irq_process,
-
 
446
//	.get_vblank_counter = &rs600_get_vblank_counter,
248
void rs690_line_buffer_adjust(struct radeon_device *rdev,
447
	.fence_ring_emit = &r300_fence_ring_emit,
-
 
448
//	.cs_parse = &r300_cs_parse,
-
 
449
//	.copy_blit = &r100_copy_blit,
-
 
450
//	.copy_dma = &r300_copy_dma,
-
 
451
//	.copy = &r300_copy_dma,
-
 
452
	.get_engine_clock = &radeon_atom_get_engine_clock,
-
 
453
	.set_engine_clock = &radeon_atom_set_engine_clock,
-
 
454
	.get_memory_clock = &radeon_atom_get_memory_clock,
-
 
455
	.set_memory_clock = &radeon_atom_set_memory_clock,
-
 
456
	.get_pcie_lanes = NULL,
-
 
457
	.set_pcie_lanes = NULL,
-
 
458
	.set_clock_gating = &radeon_atom_set_clock_gating,
-
 
459
	.set_surface_reg = r100_set_surface_reg,
249
					struct drm_display_mode *mode1,
460
	.clear_surface_reg = r100_clear_surface_reg,
-
 
461
	.bandwidth_update = &rs690_bandwidth_update,
250
					struct drm_display_mode *mode2);
462
	.hpd_init = &rs600_hpd_init,
-
 
463
	.hpd_fini = &rs600_hpd_fini,
-
 
464
	.hpd_sense = &rs600_hpd_sense,
-
 
465
	.hpd_set_polarity = &rs600_hpd_set_polarity,
-
 
466
	.ioctl_wait_idle = NULL,
-
 
467
};
-
 
468
 
-
 
Line 469... Line 251...
469
 
251
 
470
/*
252
/*
471
 * rv515
253
 * rv515
-
 
254
 */
-
 
255
struct rv515_mc_save {
-
 
256
	u32 d1vga_control;
-
 
257
	u32 d2vga_control;
-
 
258
	u32 vga_render_control;
-
 
259
	u32 vga_hdp_control;
-
 
260
	u32 d1crtc_control;
-
 
261
	u32 d2crtc_control;
472
 */
262
};
473
int rv515_init(struct radeon_device *rdev);
263
int rv515_init(struct radeon_device *rdev);
474
void rv515_fini(struct radeon_device *rdev);
-
 
475
int rv515_gpu_reset(struct radeon_device *rdev);
264
void rv515_fini(struct radeon_device *rdev);
476
uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
265
uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
477
void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
266
void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
478
void rv515_ring_start(struct radeon_device *rdev);
-
 
479
uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
-
 
480
void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
267
void rv515_ring_start(struct radeon_device *rdev);
481
void rv515_bandwidth_update(struct radeon_device *rdev);
268
void rv515_bandwidth_update(struct radeon_device *rdev);
482
int rv515_resume(struct radeon_device *rdev);
269
int rv515_resume(struct radeon_device *rdev);
483
int rv515_suspend(struct radeon_device *rdev);
270
int rv515_suspend(struct radeon_device *rdev);
484
static struct radeon_asic rv515_asic = {
-
 
485
	.init = &rv515_init,
-
 
486
//	.fini = &rv515_fini,
-
 
487
//	.suspend = &rv515_suspend,
-
 
488
//	.resume = &rv515_resume,
-
 
489
//	.vga_set_state = &r100_vga_set_state,
-
 
490
	.gpu_reset = &rv515_gpu_reset,
-
 
491
	.gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
-
 
492
	.gart_set_page = &rv370_pcie_gart_set_page,
-
 
493
	.cp_commit = &r100_cp_commit,
-
 
494
    .ring_start = &rv515_ring_start,
-
 
495
    .ring_test = &r100_ring_test,
-
 
496
//	.ring_ib_execute = &r100_ring_ib_execute,
-
 
497
//	.irq_set = &rs600_irq_set,
-
 
498
//	.irq_process = &rs600_irq_process,
-
 
499
//	.get_vblank_counter = &rs600_get_vblank_counter,
-
 
500
	.fence_ring_emit = &r300_fence_ring_emit,
-
 
501
//	.cs_parse = &r300_cs_parse,
-
 
502
//	.copy_blit = &r100_copy_blit,
-
 
503
//	.copy_dma = &r300_copy_dma,
-
 
504
//	.copy = &r100_copy_blit,
271
void rv515_bandwidth_avivo_update(struct radeon_device *rdev);
505
	.get_engine_clock = &radeon_atom_get_engine_clock,
272
void rv515_vga_render_disable(struct radeon_device *rdev);
506
	.set_engine_clock = &radeon_atom_set_engine_clock,
273
void rv515_set_safe_registers(struct radeon_device *rdev);
507
	.get_memory_clock = &radeon_atom_get_memory_clock,
274
void rv515_mc_stop(struct radeon_device *rdev, struct rv515_mc_save *save);
508
	.set_memory_clock = &radeon_atom_set_memory_clock,
-
 
509
	.get_pcie_lanes = &rv370_get_pcie_lanes,
-
 
510
	.set_pcie_lanes = &rv370_set_pcie_lanes,
275
void rv515_mc_resume(struct radeon_device *rdev, struct rv515_mc_save *save);
511
	.set_clock_gating = &radeon_atom_set_clock_gating,
-
 
512
	.set_surface_reg = r100_set_surface_reg,
276
void rv515_clock_startup(struct radeon_device *rdev);
513
	.clear_surface_reg = r100_clear_surface_reg,
-
 
514
	.bandwidth_update = &rv515_bandwidth_update,
-
 
515
	.hpd_init = &rs600_hpd_init,
-
 
516
	.hpd_fini = &rs600_hpd_fini,
-
 
517
	.hpd_sense = &rs600_hpd_sense,
-
 
518
	.hpd_set_polarity = &rs600_hpd_set_polarity,
-
 
519
	.ioctl_wait_idle = NULL,
-
 
Line 520... Line 277...
520
};
277
void rv515_debugfs(struct radeon_device *rdev);
521
 
278
 
522
 
279
 
523
/*
280
/*
524
 * r520,rv530,rv560,rv570,r580
281
 * r520,rv530,rv560,rv570,r580
525
 */
-
 
526
int r520_init(struct radeon_device *rdev);
-
 
527
int r520_resume(struct radeon_device *rdev);
-
 
528
static struct radeon_asic r520_asic = {
-
 
529
	.init = &r520_init,
-
 
530
//	.fini = &rv515_fini,
-
 
531
//	.suspend = &rv515_suspend,
-
 
532
//	.resume = &r520_resume,
-
 
533
//	.vga_set_state = &r100_vga_set_state,
-
 
534
    .gpu_reset = &rv515_gpu_reset,
-
 
535
    .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
-
 
536
    .gart_set_page = &rv370_pcie_gart_set_page,
-
 
537
	.cp_commit = &r100_cp_commit,
-
 
538
    .ring_start = &rv515_ring_start,
-
 
539
    .ring_test = &r100_ring_test,
-
 
540
//	.ring_ib_execute = &r100_ring_ib_execute,
-
 
541
//	.irq_set = &rs600_irq_set,
-
 
542
//	.irq_process = &rs600_irq_process,
-
 
543
//	.get_vblank_counter = &rs600_get_vblank_counter,
-
 
544
	.fence_ring_emit = &r300_fence_ring_emit,
-
 
545
//	.cs_parse = &r300_cs_parse,
-
 
546
//	.copy_blit = &r100_copy_blit,
-
 
547
//	.copy_dma = &r300_copy_dma,
-
 
548
//	.copy = &r100_copy_blit,
-
 
549
	.get_engine_clock = &radeon_atom_get_engine_clock,
-
 
550
	.set_engine_clock = &radeon_atom_set_engine_clock,
-
 
551
	.get_memory_clock = &radeon_atom_get_memory_clock,
-
 
552
	.set_memory_clock = &radeon_atom_set_memory_clock,
-
 
553
	.get_pcie_lanes = &rv370_get_pcie_lanes,
-
 
554
	.set_pcie_lanes = &rv370_set_pcie_lanes,
-
 
555
	.set_clock_gating = &radeon_atom_set_clock_gating,
-
 
556
	.set_surface_reg = r100_set_surface_reg,
-
 
557
	.clear_surface_reg = r100_clear_surface_reg,
-
 
558
	.bandwidth_update = &rv515_bandwidth_update,
-
 
559
	.hpd_init = &rs600_hpd_init,
-
 
560
	.hpd_fini = &rs600_hpd_fini,
-
 
561
	.hpd_sense = &rs600_hpd_sense,
-
 
Line 562... Line 282...
562
	.hpd_set_polarity = &rs600_hpd_set_polarity,
282
 */
563
	.ioctl_wait_idle = NULL,
283
int r520_init(struct radeon_device *rdev);
564
};
284
int r520_resume(struct radeon_device *rdev);
565
 
285
 
Line 578... Line 298...
578
uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
298
uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
579
void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
299
void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
580
int r600_cs_parse(struct radeon_cs_parser *p);
300
int r600_cs_parse(struct radeon_cs_parser *p);
581
void r600_fence_ring_emit(struct radeon_device *rdev,
301
void r600_fence_ring_emit(struct radeon_device *rdev,
582
			  struct radeon_fence *fence);
302
			  struct radeon_fence *fence);
583
int r600_copy_dma(struct radeon_device *rdev,
-
 
584
		  uint64_t src_offset,
-
 
585
		  uint64_t dst_offset,
-
 
586
		  unsigned num_pages,
-
 
587
		  struct radeon_fence *fence);
-
 
588
int r600_irq_process(struct radeon_device *rdev);
303
bool r600_gpu_is_lockup(struct radeon_device *rdev);
589
int r600_irq_set(struct radeon_device *rdev);
-
 
590
int r600_gpu_reset(struct radeon_device *rdev);
304
int r600_asic_reset(struct radeon_device *rdev);
591
int r600_set_surface_reg(struct radeon_device *rdev, int reg,
305
int r600_set_surface_reg(struct radeon_device *rdev, int reg,
592
			 uint32_t tiling_flags, uint32_t pitch,
306
			 uint32_t tiling_flags, uint32_t pitch,
593
			 uint32_t offset, uint32_t obj_size);
307
			 uint32_t offset, uint32_t obj_size);
594
int r600_clear_surface_reg(struct radeon_device *rdev, int reg);
308
void r600_clear_surface_reg(struct radeon_device *rdev, int reg);
-
 
309
int r600_ib_test(struct radeon_device *rdev);
595
void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
310
void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
596
int r600_ring_test(struct radeon_device *rdev);
311
int r600_ring_test(struct radeon_device *rdev);
597
int r600_copy_blit(struct radeon_device *rdev,
312
int r600_copy_blit(struct radeon_device *rdev,
598
		   uint64_t src_offset, uint64_t dst_offset,
313
		   uint64_t src_offset, uint64_t dst_offset,
599
		   unsigned num_pages, struct radeon_fence *fence);
314
		   unsigned num_pages, struct radeon_fence *fence);
Line 601... Line 316...
601
void r600_hpd_fini(struct radeon_device *rdev);
316
void r600_hpd_fini(struct radeon_device *rdev);
602
bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
317
bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
603
void r600_hpd_set_polarity(struct radeon_device *rdev,
318
void r600_hpd_set_polarity(struct radeon_device *rdev,
604
			   enum radeon_hpd_id hpd);
319
			   enum radeon_hpd_id hpd);
605
extern void r600_ioctl_wait_idle(struct radeon_device *rdev, struct radeon_bo *bo);
320
extern void r600_ioctl_wait_idle(struct radeon_device *rdev, struct radeon_bo *bo);
606
 
-
 
607
static struct radeon_asic r600_asic = {
321
extern bool r600_gui_idle(struct radeon_device *rdev);
608
	.init = &r600_init,
322
extern void r600_pm_misc(struct radeon_device *rdev);
609
//	.fini = &r600_fini,
323
extern void r600_pm_init_profile(struct radeon_device *rdev);
-
 
324
extern void rs780_pm_init_profile(struct radeon_device *rdev);
-
 
325
extern void r600_pm_get_dynpm_state(struct radeon_device *rdev);
-
 
326
extern void r600_set_pcie_lanes(struct radeon_device *rdev, int lanes);
610
//	.suspend = &r600_suspend,
327
extern int r600_get_pcie_lanes(struct radeon_device *rdev);
611
//	.resume = &r600_resume,
328
bool r600_card_posted(struct radeon_device *rdev);
612
	.cp_commit = &r600_cp_commit,
329
void r600_cp_stop(struct radeon_device *rdev);
613
	.vga_set_state = &r600_vga_set_state,
330
int r600_cp_start(struct radeon_device *rdev);
-
 
331
void r600_ring_init(struct radeon_device *rdev, unsigned ring_size);
614
	.gpu_reset = &r600_gpu_reset,
332
int r600_cp_resume(struct radeon_device *rdev);
615
	.gart_tlb_flush = &r600_pcie_gart_tlb_flush,
333
void r600_cp_fini(struct radeon_device *rdev);
616
	.gart_set_page = &rs600_gart_set_page,
334
int r600_count_pipe_bits(uint32_t val);
-
 
335
int r600_mc_wait_for_idle(struct radeon_device *rdev);
-
 
336
int r600_pcie_gart_init(struct radeon_device *rdev);
-
 
337
void r600_scratch_init(struct radeon_device *rdev);
617
	.ring_test = &r600_ring_test,
338
int r600_blit_init(struct radeon_device *rdev);
-
 
339
void r600_blit_fini(struct radeon_device *rdev);
618
//	.ring_ib_execute = &r600_ring_ib_execute,
340
int r600_init_microcode(struct radeon_device *rdev);
619
//	.irq_set = &r600_irq_set,
341
/* r600 irq */
620
//	.irq_process = &r600_irq_process,
342
int r600_irq_process(struct radeon_device *rdev);
621
	.fence_ring_emit = &r600_fence_ring_emit,
343
int r600_irq_init(struct radeon_device *rdev);
622
//	.cs_parse = &r600_cs_parse,
344
void r600_irq_fini(struct radeon_device *rdev);
-
 
345
void r600_ih_ring_init(struct radeon_device *rdev, unsigned ring_size);
623
//	.copy_blit = &r600_copy_blit,
346
int r600_irq_set(struct radeon_device *rdev);
-
 
347
void r600_irq_suspend(struct radeon_device *rdev);
-
 
348
void r600_disable_interrupts(struct radeon_device *rdev);
624
//	.copy_dma = &r600_copy_blit,
349
void r600_rlc_stop(struct radeon_device *rdev);
625
//	.copy = &r600_copy_blit,
350
/* r600 audio */
626
	.get_engine_clock = &radeon_atom_get_engine_clock,
351
int r600_audio_init(struct radeon_device *rdev);
627
	.set_engine_clock = &radeon_atom_set_engine_clock,
352
int r600_audio_tmds_index(struct drm_encoder *encoder);
628
	.get_memory_clock = &radeon_atom_get_memory_clock,
353
void r600_audio_set_clock(struct drm_encoder *encoder, int clock);
629
	.set_memory_clock = &radeon_atom_set_memory_clock,
354
int r600_audio_channels(struct radeon_device *rdev);
630
	.get_pcie_lanes = &rv370_get_pcie_lanes,
355
int r600_audio_bits_per_sample(struct radeon_device *rdev);
631
	.set_pcie_lanes = NULL,
356
int r600_audio_rate(struct radeon_device *rdev);
632
	.set_clock_gating = NULL,
357
uint8_t r600_audio_status_bits(struct radeon_device *rdev);
633
	.set_surface_reg = r600_set_surface_reg,
358
uint8_t r600_audio_category_code(struct radeon_device *rdev);
634
	.clear_surface_reg = r600_clear_surface_reg,
359
void r600_audio_schedule_polling(struct radeon_device *rdev);
-
 
360
void r600_audio_enable_polling(struct drm_encoder *encoder);
-
 
361
void r600_audio_disable_polling(struct drm_encoder *encoder);
635
	.bandwidth_update = &rv515_bandwidth_update,
362
void r600_audio_fini(struct radeon_device *rdev);
636
	.hpd_init = &r600_hpd_init,
363
void r600_hdmi_init(struct drm_encoder *encoder);
-
 
364
int r600_hdmi_buffer_status_changed(struct drm_encoder *encoder);
-
 
365
void r600_hdmi_update_audio_settings(struct drm_encoder *encoder);
637
	.hpd_fini = &r600_hpd_fini,
366
/* r600 blit */
638
	.hpd_sense = &r600_hpd_sense,
367
int r600_blit_prepare_copy(struct radeon_device *rdev, int size_bytes);
-
 
368
void r600_blit_done_copy(struct radeon_device *rdev, struct radeon_fence *fence);
639
	.hpd_set_polarity = &r600_hpd_set_polarity,
369
void r600_kms_blit_copy(struct radeon_device *rdev,
640
//	.ioctl_wait_idle = r600_ioctl_wait_idle,
370
			u64 src_gpu_addr, u64 dst_gpu_addr,
641
};
371
			int size_bytes);
Line 642... Line 372...
642
 
372
 
643
/*
373
/*
644
 * rv770,rv730,rv710,rv740
374
 * rv770,rv730,rv710,rv740
645
 */
375
 */
646
int rv770_init(struct radeon_device *rdev);
376
int rv770_init(struct radeon_device *rdev);
647
void rv770_fini(struct radeon_device *rdev);
377
void rv770_fini(struct radeon_device *rdev);
648
int rv770_suspend(struct radeon_device *rdev);
378
int rv770_suspend(struct radeon_device *rdev);
649
int rv770_resume(struct radeon_device *rdev);
379
int rv770_resume(struct radeon_device *rdev);
650
int rv770_gpu_reset(struct radeon_device *rdev);
-
 
651
 
380
void rv770_pm_misc(struct radeon_device *rdev);
652
static struct radeon_asic rv770_asic = {
-
 
653
	.init = &rv770_init,
-
 
654
//	.fini = &rv770_fini,
-
 
655
//	.suspend = &rv770_suspend,
-
 
656
//	.resume = &rv770_resume,
-
 
657
	.cp_commit = &r600_cp_commit,
-
 
658
	.gpu_reset = &rv770_gpu_reset,
-
 
659
	.vga_set_state = &r600_vga_set_state,
-
 
660
	.gart_tlb_flush = &r600_pcie_gart_tlb_flush,
-
 
661
	.gart_set_page = &rs600_gart_set_page,
-
 
662
	.ring_test = &r600_ring_test,
-
 
663
//	.ring_ib_execute = &r600_ring_ib_execute,
-
 
664
//	.irq_set = &r600_irq_set,
-
 
665
//	.irq_process = &r600_irq_process,
-
 
666
	.fence_ring_emit = &r600_fence_ring_emit,
-
 
667
//	.cs_parse = &r600_cs_parse,
-
 
668
//	.copy_blit = &r600_copy_blit,
-
 
669
//	.copy_dma = &r600_copy_blit,
-
 
670
//	.copy = &r600_copy_blit,
-
 
671
	.get_engine_clock = &radeon_atom_get_engine_clock,
-
 
672
	.set_engine_clock = &radeon_atom_set_engine_clock,
381
u32 rv770_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base);
673
	.get_memory_clock = &radeon_atom_get_memory_clock,
382
void r700_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc);
674
	.set_memory_clock = &radeon_atom_set_memory_clock,
-
 
675
	.get_pcie_lanes = &rv370_get_pcie_lanes,
-
 
676
	.set_pcie_lanes = NULL,
383
void r700_cp_stop(struct radeon_device *rdev);
677
	.set_clock_gating = &radeon_atom_set_clock_gating,
-
 
678
	.set_surface_reg = r600_set_surface_reg,
-
 
679
	.clear_surface_reg = r600_clear_surface_reg,
-
 
680
	.bandwidth_update = &rv515_bandwidth_update,
-
 
681
	.hpd_init = &r600_hpd_init,
-
 
682
	.hpd_fini = &r600_hpd_fini,
-
 
683
	.hpd_sense = &r600_hpd_sense,
-
 
684
	.hpd_set_polarity = &r600_hpd_set_polarity,
-
 
Line 685... Line 384...
685
};
384
void r700_cp_fini(struct radeon_device *rdev);
686
 
385
 
687
/*
386
/*
-
 
387
 * evergreen
-
 
388
 */
-
 
389
struct evergreen_mc_save {
-
 
390
	u32 vga_control[6];
-
 
391
	u32 vga_render_control;
-
 
392
	u32 vga_hdp_control;
-
 
393
	u32 crtc_control[6];
688
 * evergreen
394
};
689
 */
395
void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev);
690
int evergreen_init(struct radeon_device *rdev);
396
int evergreen_init(struct radeon_device *rdev);
691
void evergreen_fini(struct radeon_device *rdev);
397
void evergreen_fini(struct radeon_device *rdev);
-
 
398
int evergreen_suspend(struct radeon_device *rdev);
692
int evergreen_suspend(struct radeon_device *rdev);
399
int evergreen_resume(struct radeon_device *rdev);
693
int evergreen_resume(struct radeon_device *rdev);
400
bool evergreen_gpu_is_lockup(struct radeon_device *rdev);
-
 
401
int evergreen_asic_reset(struct radeon_device *rdev);
-
 
402
void evergreen_bandwidth_update(struct radeon_device *rdev);
-
 
403
void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
-
 
404
int evergreen_copy_blit(struct radeon_device *rdev,
694
int evergreen_gpu_reset(struct radeon_device *rdev);
405
			uint64_t src_offset, uint64_t dst_offset,
695
void evergreen_bandwidth_update(struct radeon_device *rdev);
406
			unsigned num_pages, struct radeon_fence *fence);
696
void evergreen_hpd_init(struct radeon_device *rdev);
407
void evergreen_hpd_init(struct radeon_device *rdev);
697
void evergreen_hpd_fini(struct radeon_device *rdev);
408
void evergreen_hpd_fini(struct radeon_device *rdev);
698
bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
409
bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
699
void evergreen_hpd_set_polarity(struct radeon_device *rdev,
-
 
700
				enum radeon_hpd_id hpd);
410
void evergreen_hpd_set_polarity(struct radeon_device *rdev,
701
 
411
				enum radeon_hpd_id hpd);
702
static struct radeon_asic evergreen_asic = {
412
u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc);
703
	.init = &evergreen_init,
413
int evergreen_irq_set(struct radeon_device *rdev);
704
//	.fini = &evergreen_fini,
414
int evergreen_irq_process(struct radeon_device *rdev);
705
//	.suspend = &evergreen_suspend,
-
 
706
//	.resume = &evergreen_resume,
415
extern int evergreen_cs_parse(struct radeon_cs_parser *p);
707
	.cp_commit = NULL,
416
extern void evergreen_pm_misc(struct radeon_device *rdev);
708
	.gpu_reset = &evergreen_gpu_reset,
417
extern void evergreen_pm_prepare(struct radeon_device *rdev);
709
	.vga_set_state = &r600_vga_set_state,
418
extern void evergreen_pm_finish(struct radeon_device *rdev);
710
	.gart_tlb_flush = &r600_pcie_gart_tlb_flush,
-
 
711
	.gart_set_page = &rs600_gart_set_page,
419
extern void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc);
712
	.ring_test = NULL,
-
 
713
//	.ring_ib_execute = &r600_ring_ib_execute,
420
extern u32 evergreen_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base);
714
//	.irq_set = &r600_irq_set,
421
extern void evergreen_post_page_flip(struct radeon_device *rdev, int crtc);
715
//	.irq_process = &r600_irq_process,
-
 
716
	.fence_ring_emit = &r600_fence_ring_emit,
422
void evergreen_disable_interrupt_state(struct radeon_device *rdev);
717
//	.cs_parse = &r600_cs_parse,
-
 
718
//	.copy_blit = &r600_copy_blit,
423
int evergreen_blit_init(struct radeon_device *rdev);
719
//	.copy_dma = &r600_copy_blit,
424
void evergreen_blit_fini(struct radeon_device *rdev);
720
//	.copy = &r600_copy_blit,
425
/* evergreen blit */
721
	.get_engine_clock = &radeon_atom_get_engine_clock,
426
int evergreen_blit_prepare_copy(struct radeon_device *rdev, int size_bytes);
722
	.set_engine_clock = &radeon_atom_set_engine_clock,
427
void evergreen_blit_done_copy(struct radeon_device *rdev, struct radeon_fence *fence);
723
	.get_memory_clock = &radeon_atom_get_memory_clock,
428
void evergreen_kms_blit_copy(struct radeon_device *rdev,
-
 
429
			     u64 src_gpu_addr, u64 dst_gpu_addr,
-
 
430
			     int size_bytes);
724
	.set_memory_clock = &radeon_atom_set_memory_clock,
431
 
-
 
432
/*
725
	.set_pcie_lanes = NULL,
433
 * cayman
726
	.set_clock_gating = NULL,
434
 */
727
	.set_surface_reg = r600_set_surface_reg,
435
void cayman_pcie_gart_tlb_flush(struct radeon_device *rdev);
728
	.clear_surface_reg = r600_clear_surface_reg,
436
int cayman_init(struct radeon_device *rdev);
729
	.bandwidth_update = &evergreen_bandwidth_update,
437
void cayman_fini(struct radeon_device *rdev);
730
	.hpd_init = &evergreen_hpd_init,
438
int cayman_suspend(struct radeon_device *rdev);
731
	.hpd_fini = &evergreen_hpd_fini,
439
int cayman_resume(struct radeon_device *rdev);
732
	.hpd_sense = &evergreen_hpd_sense,
-
 
Line 733... Line 440...
733
	.hpd_set_polarity = &evergreen_hpd_set_polarity,
440
bool cayman_gpu_is_lockup(struct radeon_device *rdev);