Subversion Repositories Kolibri OS

Rev

Rev 1321 | Rev 1404 | Go to most recent revision | Details | Compare with Previous | Last modification | View Log | RSS feed

Rev Author Line No. Line
1117 serge 1
/*
2
 * Copyright 2008 Advanced Micro Devices, Inc.
3
 * Copyright 2008 Red Hat Inc.
4
 * Copyright 2009 Jerome Glisse.
5
 *
6
 * Permission is hereby granted, free of charge, to any person obtaining a
7
 * copy of this software and associated documentation files (the "Software"),
8
 * to deal in the Software without restriction, including without limitation
9
 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10
 * and/or sell copies of the Software, and to permit persons to whom the
11
 * Software is furnished to do so, subject to the following conditions:
12
 *
13
 * The above copyright notice and this permission notice shall be included in
14
 * all copies or substantial portions of the Software.
15
 *
16
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
19
 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20
 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21
 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22
 * OTHER DEALINGS IN THE SOFTWARE.
23
 *
24
 * Authors: Dave Airlie
25
 *          Alex Deucher
26
 *          Jerome Glisse
27
 */
28
#ifndef __RADEON_ASIC_H__
29
#define __RADEON_ASIC_H__
30
 
31
/*
32
 * common functions
33
 */
1268 serge 34
uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev);
1117 serge 35
void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
1403 serge 36
uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev);
1117 serge 37
void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
38
 
1268 serge 39
uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev);
1117 serge 40
void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
1268 serge 41
uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev);
1117 serge 42
void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
43
void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
44
 
45
/*
46
 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
47
 */
1221 serge 48
extern int r100_init(struct radeon_device *rdev);
49
extern void r100_fini(struct radeon_device *rdev);
50
extern int r100_suspend(struct radeon_device *rdev);
51
extern int r100_resume(struct radeon_device *rdev);
1117 serge 52
uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
53
void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
1179 serge 54
void r100_vga_set_state(struct radeon_device *rdev, bool state);
1117 serge 55
int r100_gpu_reset(struct radeon_device *rdev);
1179 serge 56
u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
1117 serge 57
void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
58
int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
1179 serge 59
void r100_cp_commit(struct radeon_device *rdev);
1117 serge 60
void r100_ring_start(struct radeon_device *rdev);
61
int r100_irq_set(struct radeon_device *rdev);
62
int r100_irq_process(struct radeon_device *rdev);
1128 serge 63
void r100_fence_ring_emit(struct radeon_device *rdev,
64
			  struct radeon_fence *fence);
65
int r100_cs_parse(struct radeon_cs_parser *p);
1117 serge 66
void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
67
uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
1128 serge 68
int r100_copy_blit(struct radeon_device *rdev,
69
		   uint64_t src_offset,
70
		   uint64_t dst_offset,
71
		   unsigned num_pages,
72
		   struct radeon_fence *fence);
1179 serge 73
int r100_set_surface_reg(struct radeon_device *rdev, int reg,
74
			 uint32_t tiling_flags, uint32_t pitch,
75
			 uint32_t offset, uint32_t obj_size);
76
int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
77
void r100_bandwidth_update(struct radeon_device *rdev);
78
void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
79
int r100_ring_test(struct radeon_device *rdev);
1321 serge 80
void r100_hpd_init(struct radeon_device *rdev);
81
void r100_hpd_fini(struct radeon_device *rdev);
82
bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
83
void r100_hpd_set_polarity(struct radeon_device *rdev,
84
			   enum radeon_hpd_id hpd);
1117 serge 85
 
86
static struct radeon_asic r100_asic = {
87
	.init = &r100_init,
1221 serge 88
//	.fini = &r100_fini,
89
//	.suspend = &r100_suspend,
90
//	.resume = &r100_resume,
91
//	.vga_set_state = &r100_vga_set_state,
1117 serge 92
	.gpu_reset = &r100_gpu_reset,
93
	.gart_tlb_flush = &r100_pci_gart_tlb_flush,
94
	.gart_set_page = &r100_pci_gart_set_page,
1221 serge 95
	.cp_commit = &r100_cp_commit,
96
//	.ring_start = &r100_ring_start,
97
//   .ring_test = &r100_ring_test,
98
//	.ring_ib_execute = &r100_ring_ib_execute,
99
//	.irq_set = &r100_irq_set,
100
//	.irq_process = &r100_irq_process,
101
//	.get_vblank_counter = &r100_get_vblank_counter,
102
//	.fence_ring_emit = &r100_fence_ring_emit,
103
//	.cs_parse = &r100_cs_parse,
104
//	.copy_blit = &r100_copy_blit,
105
//	.copy_dma = NULL,
106
//	.copy = &r100_copy_blit,
1268 serge 107
	.get_engine_clock = &radeon_legacy_get_engine_clock,
1221 serge 108
	.set_engine_clock = &radeon_legacy_set_engine_clock,
1403 serge 109
	.get_memory_clock = &radeon_legacy_get_memory_clock,
1221 serge 110
	.set_memory_clock = NULL,
111
	.set_pcie_lanes = NULL,
112
	.set_clock_gating = &radeon_legacy_set_clock_gating,
1179 serge 113
	.set_surface_reg = r100_set_surface_reg,
114
	.clear_surface_reg = r100_clear_surface_reg,
115
	.bandwidth_update = &r100_bandwidth_update,
1321 serge 116
	.hpd_init = &r100_hpd_init,
117
	.hpd_fini = &r100_hpd_fini,
118
	.hpd_sense = &r100_hpd_sense,
119
	.hpd_set_polarity = &r100_hpd_set_polarity,
1117 serge 120
};
121
 
122
 
123
/*
124
 * r300,r350,rv350,rv380
125
 */
1221 serge 126
extern int r300_init(struct radeon_device *rdev);
127
extern void r300_fini(struct radeon_device *rdev);
128
extern int r300_suspend(struct radeon_device *rdev);
129
extern int r300_resume(struct radeon_device *rdev);
130
extern int r300_gpu_reset(struct radeon_device *rdev);
131
extern void r300_ring_start(struct radeon_device *rdev);
132
extern void r300_fence_ring_emit(struct radeon_device *rdev,
1128 serge 133
			  struct radeon_fence *fence);
1221 serge 134
extern int r300_cs_parse(struct radeon_cs_parser *p);
135
extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
136
extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
137
extern uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
138
extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
139
extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
140
extern int r300_copy_dma(struct radeon_device *rdev,
1128 serge 141
		  uint64_t src_offset,
142
		  uint64_t dst_offset,
143
		  unsigned num_pages,
144
		  struct radeon_fence *fence);
1117 serge 145
static struct radeon_asic r300_asic = {
146
	.init = &r300_init,
1221 serge 147
//	.fini = &r300_fini,
148
//	.suspend = &r300_suspend,
149
//	.resume = &r300_resume,
150
//	.vga_set_state = &r100_vga_set_state,
1117 serge 151
	.gpu_reset = &r300_gpu_reset,
152
	.gart_tlb_flush = &r100_pci_gart_tlb_flush,
153
	.gart_set_page = &r100_pci_gart_set_page,
1221 serge 154
//	.cp_commit = &r100_cp_commit,
155
//	.ring_start = &r300_ring_start,
156
//   .ring_test = &r100_ring_test,
157
//	.ring_ib_execute = &r100_ring_ib_execute,
158
//	.irq_set = &r100_irq_set,
159
//	.irq_process = &r100_irq_process,
160
//	.get_vblank_counter = &r100_get_vblank_counter,
161
//	.fence_ring_emit = &r300_fence_ring_emit,
162
//	.cs_parse = &r300_cs_parse,
163
//	.copy_blit = &r100_copy_blit,
164
//	.copy_dma = &r300_copy_dma,
165
//	.copy = &r100_copy_blit,
1268 serge 166
	.get_engine_clock = &radeon_legacy_get_engine_clock,
1221 serge 167
	.set_engine_clock = &radeon_legacy_set_engine_clock,
1403 serge 168
	.get_memory_clock = &radeon_legacy_get_memory_clock,
1221 serge 169
	.set_memory_clock = NULL,
170
	.set_pcie_lanes = &rv370_set_pcie_lanes,
171
	.set_clock_gating = &radeon_legacy_set_clock_gating,
1179 serge 172
	.set_surface_reg = r100_set_surface_reg,
173
	.clear_surface_reg = r100_clear_surface_reg,
174
	.bandwidth_update = &r100_bandwidth_update,
1321 serge 175
	.hpd_init = &r100_hpd_init,
176
	.hpd_fini = &r100_hpd_fini,
177
	.hpd_sense = &r100_hpd_sense,
178
	.hpd_set_polarity = &r100_hpd_set_polarity,
1117 serge 179
};
180
 
181
/*
182
 * r420,r423,rv410
183
 */
1179 serge 184
extern int r420_init(struct radeon_device *rdev);
185
extern void r420_fini(struct radeon_device *rdev);
186
extern int r420_suspend(struct radeon_device *rdev);
187
extern int r420_resume(struct radeon_device *rdev);
1117 serge 188
static struct radeon_asic r420_asic = {
1179 serge 189
	.init = &r420_init,
1221 serge 190
//	.fini = &r420_fini,
191
//	.suspend = &r420_suspend,
192
//	.resume = &r420_resume,
193
//	.vga_set_state = &r100_vga_set_state,
1117 serge 194
	.gpu_reset = &r300_gpu_reset,
195
	.gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
196
	.gart_set_page = &rv370_pcie_gart_set_page,
1221 serge 197
//	.cp_commit = &r100_cp_commit,
198
//	.ring_start = &r300_ring_start,
199
//   .ring_test = &r100_ring_test,
200
//	.ring_ib_execute = &r100_ring_ib_execute,
201
//	.irq_set = &r100_irq_set,
202
//	.irq_process = &r100_irq_process,
203
//	.get_vblank_counter = &r100_get_vblank_counter,
204
//	.fence_ring_emit = &r300_fence_ring_emit,
205
//	.cs_parse = &r300_cs_parse,
206
//	.copy_blit = &r100_copy_blit,
207
//	.copy_dma = &r300_copy_dma,
208
//	.copy = &r100_copy_blit,
1268 serge 209
	.get_engine_clock = &radeon_atom_get_engine_clock,
1221 serge 210
	.set_engine_clock = &radeon_atom_set_engine_clock,
1268 serge 211
	.get_memory_clock = &radeon_atom_get_memory_clock,
1221 serge 212
	.set_memory_clock = &radeon_atom_set_memory_clock,
213
	.set_pcie_lanes = &rv370_set_pcie_lanes,
214
	.set_clock_gating = &radeon_atom_set_clock_gating,
1179 serge 215
	.set_surface_reg = r100_set_surface_reg,
216
	.clear_surface_reg = r100_clear_surface_reg,
217
	.bandwidth_update = &r100_bandwidth_update,
1321 serge 218
	.hpd_init = &r100_hpd_init,
219
	.hpd_fini = &r100_hpd_fini,
220
	.hpd_sense = &r100_hpd_sense,
221
	.hpd_set_polarity = &r100_hpd_set_polarity,
1117 serge 222
};
223
 
224
 
225
/*
226
 * rs400,rs480
227
 */
1221 serge 228
extern int rs400_init(struct radeon_device *rdev);
229
extern void rs400_fini(struct radeon_device *rdev);
230
extern int rs400_suspend(struct radeon_device *rdev);
231
extern int rs400_resume(struct radeon_device *rdev);
1117 serge 232
void rs400_gart_tlb_flush(struct radeon_device *rdev);
233
int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
234
uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
235
void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
236
static struct radeon_asic rs400_asic = {
1221 serge 237
	.init = &rs400_init,
238
//	.fini = &rs400_fini,
239
//	.suspend = &rs400_suspend,
240
//	.resume = &rs400_resume,
241
//	.vga_set_state = &r100_vga_set_state,
1117 serge 242
	.gpu_reset = &r300_gpu_reset,
243
	.gart_tlb_flush = &rs400_gart_tlb_flush,
244
	.gart_set_page = &rs400_gart_set_page,
1221 serge 245
//	.cp_commit = &r100_cp_commit,
246
//	.ring_start = &r300_ring_start,
247
//   .ring_test = &r100_ring_test,
248
//	.ring_ib_execute = &r100_ring_ib_execute,
249
//	.irq_set = &r100_irq_set,
250
//	.irq_process = &r100_irq_process,
251
//	.get_vblank_counter = &r100_get_vblank_counter,
252
//	.fence_ring_emit = &r300_fence_ring_emit,
253
//	.cs_parse = &r300_cs_parse,
254
//	.copy_blit = &r100_copy_blit,
255
//	.copy_dma = &r300_copy_dma,
256
//	.copy = &r100_copy_blit,
1268 serge 257
	.get_engine_clock = &radeon_legacy_get_engine_clock,
1221 serge 258
	.set_engine_clock = &radeon_legacy_set_engine_clock,
1403 serge 259
	.get_memory_clock = &radeon_legacy_get_memory_clock,
1221 serge 260
	.set_memory_clock = NULL,
261
	.set_pcie_lanes = NULL,
262
	.set_clock_gating = &radeon_legacy_set_clock_gating,
1179 serge 263
	.set_surface_reg = r100_set_surface_reg,
264
	.clear_surface_reg = r100_clear_surface_reg,
265
	.bandwidth_update = &r100_bandwidth_update,
1321 serge 266
	.hpd_init = &r100_hpd_init,
267
	.hpd_fini = &r100_hpd_fini,
268
	.hpd_sense = &r100_hpd_sense,
269
	.hpd_set_polarity = &r100_hpd_set_polarity,
1117 serge 270
};
271
 
272
 
273
/*
274
 * rs600.
275
 */
1221 serge 276
extern int rs600_init(struct radeon_device *rdev);
277
extern void rs600_fini(struct radeon_device *rdev);
278
extern int rs600_suspend(struct radeon_device *rdev);
279
extern int rs600_resume(struct radeon_device *rdev);
1117 serge 280
int rs600_irq_set(struct radeon_device *rdev);
1179 serge 281
int rs600_irq_process(struct radeon_device *rdev);
282
u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
1117 serge 283
void rs600_gart_tlb_flush(struct radeon_device *rdev);
284
int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
285
uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
286
void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
1179 serge 287
void rs600_bandwidth_update(struct radeon_device *rdev);
1321 serge 288
void rs600_hpd_init(struct radeon_device *rdev);
289
void rs600_hpd_fini(struct radeon_device *rdev);
290
bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
291
void rs600_hpd_set_polarity(struct radeon_device *rdev,
292
			    enum radeon_hpd_id hpd);
293
 
1117 serge 294
static struct radeon_asic rs600_asic = {
1179 serge 295
	.init = &rs600_init,
1221 serge 296
//	.fini = &rs600_fini,
297
//	.suspend = &rs600_suspend,
298
//	.resume = &rs600_resume,
299
//	.vga_set_state = &r100_vga_set_state,
1117 serge 300
	.gpu_reset = &r300_gpu_reset,
301
	.gart_tlb_flush = &rs600_gart_tlb_flush,
302
	.gart_set_page = &rs600_gart_set_page,
1221 serge 303
//	.cp_commit = &r100_cp_commit,
304
//	.ring_start = &r300_ring_start,
305
//   .ring_test = &r100_ring_test,
306
//	.ring_ib_execute = &r100_ring_ib_execute,
307
//	.irq_set = &rs600_irq_set,
308
//	.irq_process = &rs600_irq_process,
309
//	.get_vblank_counter = &rs600_get_vblank_counter,
1117 serge 310
//   .fence_ring_emit = &r300_fence_ring_emit,
311
//   .cs_parse = &r300_cs_parse,
312
//   .copy_blit = &r100_copy_blit,
313
//   .copy_dma = &r300_copy_dma,
314
//   .copy = &r100_copy_blit,
1268 serge 315
	.get_engine_clock = &radeon_atom_get_engine_clock,
1221 serge 316
	.set_engine_clock = &radeon_atom_set_engine_clock,
1268 serge 317
	.get_memory_clock = &radeon_atom_get_memory_clock,
1221 serge 318
	.set_memory_clock = &radeon_atom_set_memory_clock,
319
	.set_pcie_lanes = NULL,
320
	.set_clock_gating = &radeon_atom_set_clock_gating,
1179 serge 321
	.bandwidth_update = &rs600_bandwidth_update,
1321 serge 322
	.hpd_init = &rs600_hpd_init,
323
	.hpd_fini = &rs600_hpd_fini,
324
	.hpd_sense = &rs600_hpd_sense,
325
	.hpd_set_polarity = &rs600_hpd_set_polarity,
1117 serge 326
};
327
 
328
 
329
/*
330
 * rs690,rs740
331
 */
1221 serge 332
int rs690_init(struct radeon_device *rdev);
333
void rs690_fini(struct radeon_device *rdev);
334
int rs690_resume(struct radeon_device *rdev);
335
int rs690_suspend(struct radeon_device *rdev);
1117 serge 336
uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
337
void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
1179 serge 338
void rs690_bandwidth_update(struct radeon_device *rdev);
1117 serge 339
static struct radeon_asic rs690_asic = {
1221 serge 340
	.init = &rs690_init,
341
//	.fini = &rs690_fini,
342
//	.suspend = &rs690_suspend,
343
//	.resume = &rs690_resume,
344
//	.vga_set_state = &r100_vga_set_state,
1117 serge 345
	.gpu_reset = &r300_gpu_reset,
346
	.gart_tlb_flush = &rs400_gart_tlb_flush,
347
	.gart_set_page = &rs400_gart_set_page,
1221 serge 348
//	.cp_commit = &r100_cp_commit,
349
//	.ring_start = &r300_ring_start,
350
//   .ring_test = &r100_ring_test,
351
//	.ring_ib_execute = &r100_ring_ib_execute,
352
//	.irq_set = &rs600_irq_set,
353
//	.irq_process = &rs600_irq_process,
354
//	.get_vblank_counter = &rs600_get_vblank_counter,
355
//	.fence_ring_emit = &r300_fence_ring_emit,
356
//	.cs_parse = &r300_cs_parse,
357
//	.copy_blit = &r100_copy_blit,
358
//	.copy_dma = &r300_copy_dma,
359
//	.copy = &r300_copy_dma,
1268 serge 360
	.get_engine_clock = &radeon_atom_get_engine_clock,
1221 serge 361
	.set_engine_clock = &radeon_atom_set_engine_clock,
1268 serge 362
	.get_memory_clock = &radeon_atom_get_memory_clock,
1221 serge 363
	.set_memory_clock = &radeon_atom_set_memory_clock,
364
	.set_pcie_lanes = NULL,
365
	.set_clock_gating = &radeon_atom_set_clock_gating,
1179 serge 366
	.set_surface_reg = r100_set_surface_reg,
367
	.clear_surface_reg = r100_clear_surface_reg,
368
	.bandwidth_update = &rs690_bandwidth_update,
1321 serge 369
	.hpd_init = &rs600_hpd_init,
370
	.hpd_fini = &rs600_hpd_fini,
371
	.hpd_sense = &rs600_hpd_sense,
372
	.hpd_set_polarity = &rs600_hpd_set_polarity,
1117 serge 373
};
374
 
1179 serge 375
 
1117 serge 376
/*
377
 * rv515
378
 */
379
int rv515_init(struct radeon_device *rdev);
1221 serge 380
void rv515_fini(struct radeon_device *rdev);
1117 serge 381
int rv515_gpu_reset(struct radeon_device *rdev);
382
uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
383
void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
384
void rv515_ring_start(struct radeon_device *rdev);
385
uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
386
void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
1179 serge 387
void rv515_bandwidth_update(struct radeon_device *rdev);
1221 serge 388
int rv515_resume(struct radeon_device *rdev);
389
int rv515_suspend(struct radeon_device *rdev);
1117 serge 390
static struct radeon_asic rv515_asic = {
391
	.init = &rv515_init,
1221 serge 392
//	.fini = &rv515_fini,
393
//	.suspend = &rv515_suspend,
394
//	.resume = &rv515_resume,
395
//	.vga_set_state = &r100_vga_set_state,
1117 serge 396
	.gpu_reset = &rv515_gpu_reset,
397
	.gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
398
	.gart_set_page = &rv370_pcie_gart_set_page,
1221 serge 399
//	.cp_commit = &r100_cp_commit,
400
//  .ring_start = &rv515_ring_start,
401
//   .ring_test = &r100_ring_test,
402
//	.ring_ib_execute = &r100_ring_ib_execute,
403
//	.irq_set = &rs600_irq_set,
404
//	.irq_process = &rs600_irq_process,
405
//	.get_vblank_counter = &rs600_get_vblank_counter,
406
//	.fence_ring_emit = &r300_fence_ring_emit,
407
//	.cs_parse = &r300_cs_parse,
408
//	.copy_blit = &r100_copy_blit,
409
//	.copy_dma = &r300_copy_dma,
410
//	.copy = &r100_copy_blit,
1268 serge 411
	.get_engine_clock = &radeon_atom_get_engine_clock,
1221 serge 412
	.set_engine_clock = &radeon_atom_set_engine_clock,
1268 serge 413
	.get_memory_clock = &radeon_atom_get_memory_clock,
1221 serge 414
	.set_memory_clock = &radeon_atom_set_memory_clock,
415
	.set_pcie_lanes = &rv370_set_pcie_lanes,
416
	.set_clock_gating = &radeon_atom_set_clock_gating,
1179 serge 417
	.set_surface_reg = r100_set_surface_reg,
418
	.clear_surface_reg = r100_clear_surface_reg,
419
	.bandwidth_update = &rv515_bandwidth_update,
1321 serge 420
	.hpd_init = &rs600_hpd_init,
421
	.hpd_fini = &rs600_hpd_fini,
422
	.hpd_sense = &rs600_hpd_sense,
423
	.hpd_set_polarity = &rs600_hpd_set_polarity,
1117 serge 424
};
425
 
426
 
427
/*
428
 * r520,rv530,rv560,rv570,r580
429
 */
1221 serge 430
int r520_init(struct radeon_device *rdev);
431
int r520_resume(struct radeon_device *rdev);
1117 serge 432
static struct radeon_asic r520_asic = {
1221 serge 433
	.init = &r520_init,
434
//	.fini = &rv515_fini,
435
//	.suspend = &rv515_suspend,
436
//	.resume = &r520_resume,
437
//	.vga_set_state = &r100_vga_set_state,
1117 serge 438
    .gpu_reset = &rv515_gpu_reset,
1119 serge 439
    .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
440
    .gart_set_page = &rv370_pcie_gart_set_page,
1221 serge 441
//	.cp_commit = &r100_cp_commit,
442
//  .ring_start = &rv515_ring_start,
443
//  .ring_test = &r100_ring_test,
444
//	.ring_ib_execute = &r100_ring_ib_execute,
445
//	.irq_set = &rs600_irq_set,
446
//	.irq_process = &rs600_irq_process,
447
//	.get_vblank_counter = &rs600_get_vblank_counter,
448
//	.fence_ring_emit = &r300_fence_ring_emit,
449
//	.cs_parse = &r300_cs_parse,
450
//	.copy_blit = &r100_copy_blit,
451
//	.copy_dma = &r300_copy_dma,
452
//	.copy = &r100_copy_blit,
1268 serge 453
	.get_engine_clock = &radeon_atom_get_engine_clock,
1221 serge 454
	.set_engine_clock = &radeon_atom_set_engine_clock,
1268 serge 455
	.get_memory_clock = &radeon_atom_get_memory_clock,
1221 serge 456
	.set_memory_clock = &radeon_atom_set_memory_clock,
457
	.set_pcie_lanes = &rv370_set_pcie_lanes,
458
	.set_clock_gating = &radeon_atom_set_clock_gating,
1179 serge 459
	.set_surface_reg = r100_set_surface_reg,
460
	.clear_surface_reg = r100_clear_surface_reg,
1221 serge 461
	.bandwidth_update = &rv515_bandwidth_update,
1321 serge 462
	.hpd_init = &rs600_hpd_init,
463
	.hpd_fini = &rs600_hpd_fini,
464
	.hpd_sense = &rs600_hpd_sense,
465
	.hpd_set_polarity = &rs600_hpd_set_polarity,
1117 serge 466
};
467
 
468
/*
1221 serge 469
 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
1117 serge 470
 */
1179 serge 471
int r600_init(struct radeon_device *rdev);
472
void r600_fini(struct radeon_device *rdev);
473
int r600_suspend(struct radeon_device *rdev);
474
int r600_resume(struct radeon_device *rdev);
475
void r600_vga_set_state(struct radeon_device *rdev, bool state);
476
int r600_wb_init(struct radeon_device *rdev);
477
void r600_wb_fini(struct radeon_device *rdev);
478
void r600_cp_commit(struct radeon_device *rdev);
479
void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
1117 serge 480
uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
481
void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
1233 serge 482
int r600_cs_parse(struct radeon_cs_parser *p);
483
void r600_fence_ring_emit(struct radeon_device *rdev,
484
			  struct radeon_fence *fence);
485
int r600_copy_dma(struct radeon_device *rdev,
486
		  uint64_t src_offset,
487
		  uint64_t dst_offset,
488
		  unsigned num_pages,
489
		  struct radeon_fence *fence);
490
int r600_irq_process(struct radeon_device *rdev);
491
int r600_irq_set(struct radeon_device *rdev);
492
int r600_gpu_reset(struct radeon_device *rdev);
493
int r600_set_surface_reg(struct radeon_device *rdev, int reg,
494
			 uint32_t tiling_flags, uint32_t pitch,
495
			 uint32_t offset, uint32_t obj_size);
496
int r600_clear_surface_reg(struct radeon_device *rdev, int reg);
497
void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
498
int r600_ring_test(struct radeon_device *rdev);
499
int r600_copy_blit(struct radeon_device *rdev,
500
		   uint64_t src_offset, uint64_t dst_offset,
501
		   unsigned num_pages, struct radeon_fence *fence);
1321 serge 502
void r600_hpd_init(struct radeon_device *rdev);
503
void r600_hpd_fini(struct radeon_device *rdev);
504
bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
505
void r600_hpd_set_polarity(struct radeon_device *rdev,
506
			   enum radeon_hpd_id hpd);
1117 serge 507
 
1233 serge 508
static struct radeon_asic r600_asic = {
509
	.init = &r600_init,
510
//	.fini = &r600_fini,
511
//	.suspend = &r600_suspend,
512
//	.resume = &r600_resume,
513
//	.cp_commit = &r600_cp_commit,
514
	.vga_set_state = &r600_vga_set_state,
515
	.gpu_reset = &r600_gpu_reset,
516
	.gart_tlb_flush = &r600_pcie_gart_tlb_flush,
517
	.gart_set_page = &rs600_gart_set_page,
518
//	.ring_test = &r600_ring_test,
519
//	.ring_ib_execute = &r600_ring_ib_execute,
520
//	.irq_set = &r600_irq_set,
521
//	.irq_process = &r600_irq_process,
522
//	.fence_ring_emit = &r600_fence_ring_emit,
523
//	.cs_parse = &r600_cs_parse,
524
//	.copy_blit = &r600_copy_blit,
525
//	.copy_dma = &r600_copy_blit,
526
//	.copy = &r600_copy_blit,
1268 serge 527
	.get_engine_clock = &radeon_atom_get_engine_clock,
1233 serge 528
	.set_engine_clock = &radeon_atom_set_engine_clock,
1268 serge 529
	.get_memory_clock = &radeon_atom_get_memory_clock,
1233 serge 530
	.set_memory_clock = &radeon_atom_set_memory_clock,
531
	.set_pcie_lanes = NULL,
532
	.set_clock_gating = &radeon_atom_set_clock_gating,
533
	.set_surface_reg = r600_set_surface_reg,
534
	.clear_surface_reg = r600_clear_surface_reg,
535
	.bandwidth_update = &rv515_bandwidth_update,
1321 serge 536
	.hpd_init = &r600_hpd_init,
537
	.hpd_fini = &r600_hpd_fini,
538
	.hpd_sense = &r600_hpd_sense,
539
	.hpd_set_polarity = &r600_hpd_set_polarity,
1233 serge 540
};
541
 
542
/*
543
 * rv770,rv730,rv710,rv740
544
 */
545
int rv770_init(struct radeon_device *rdev);
546
void rv770_fini(struct radeon_device *rdev);
547
int rv770_suspend(struct radeon_device *rdev);
548
int rv770_resume(struct radeon_device *rdev);
549
int rv770_gpu_reset(struct radeon_device *rdev);
550
 
551
static struct radeon_asic rv770_asic = {
552
	.init = &rv770_init,
553
//	.fini = &rv770_fini,
554
//	.suspend = &rv770_suspend,
555
//	.resume = &rv770_resume,
556
//	.cp_commit = &r600_cp_commit,
557
	.gpu_reset = &rv770_gpu_reset,
558
	.vga_set_state = &r600_vga_set_state,
559
	.gart_tlb_flush = &r600_pcie_gart_tlb_flush,
560
	.gart_set_page = &rs600_gart_set_page,
561
//	.ring_test = &r600_ring_test,
562
//	.ring_ib_execute = &r600_ring_ib_execute,
563
//	.irq_set = &r600_irq_set,
564
//	.irq_process = &r600_irq_process,
565
//	.fence_ring_emit = &r600_fence_ring_emit,
566
//	.cs_parse = &r600_cs_parse,
567
//	.copy_blit = &r600_copy_blit,
568
//	.copy_dma = &r600_copy_blit,
569
//	.copy = &r600_copy_blit,
1268 serge 570
	.get_engine_clock = &radeon_atom_get_engine_clock,
1233 serge 571
	.set_engine_clock = &radeon_atom_set_engine_clock,
1268 serge 572
	.get_memory_clock = &radeon_atom_get_memory_clock,
1233 serge 573
	.set_memory_clock = &radeon_atom_set_memory_clock,
574
	.set_pcie_lanes = NULL,
575
	.set_clock_gating = &radeon_atom_set_clock_gating,
576
	.set_surface_reg = r600_set_surface_reg,
577
	.clear_surface_reg = r600_clear_surface_reg,
578
	.bandwidth_update = &rv515_bandwidth_update,
1321 serge 579
	.hpd_init = &r600_hpd_init,
580
	.hpd_fini = &r600_hpd_fini,
581
	.hpd_sense = &r600_hpd_sense,
582
	.hpd_set_polarity = &r600_hpd_set_polarity,
1233 serge 583
};
584
 
1117 serge 585
#endif