Subversion Repositories Kolibri OS

Rev

Rev 1404 | Rev 1413 | Go to most recent revision | Details | Compare with Previous | Last modification | View Log | RSS feed

Rev Author Line No. Line
1117 serge 1
/*
2
 * Copyright 2008 Advanced Micro Devices, Inc.
3
 * Copyright 2008 Red Hat Inc.
4
 * Copyright 2009 Jerome Glisse.
5
 *
6
 * Permission is hereby granted, free of charge, to any person obtaining a
7
 * copy of this software and associated documentation files (the "Software"),
8
 * to deal in the Software without restriction, including without limitation
9
 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10
 * and/or sell copies of the Software, and to permit persons to whom the
11
 * Software is furnished to do so, subject to the following conditions:
12
 *
13
 * The above copyright notice and this permission notice shall be included in
14
 * all copies or substantial portions of the Software.
15
 *
16
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
19
 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20
 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21
 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22
 * OTHER DEALINGS IN THE SOFTWARE.
23
 *
24
 * Authors: Dave Airlie
25
 *          Alex Deucher
26
 *          Jerome Glisse
27
 */
28
#ifndef __RADEON_ASIC_H__
29
#define __RADEON_ASIC_H__
30
 
31
/*
32
 * common functions
33
 */
1268 serge 34
uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev);
1117 serge 35
void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
1403 serge 36
uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev);
1117 serge 37
void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
38
 
1268 serge 39
uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev);
1117 serge 40
void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
1268 serge 41
uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev);
1117 serge 42
void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
43
void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
44
 
45
/*
46
 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
47
 */
1221 serge 48
extern int r100_init(struct radeon_device *rdev);
49
extern void r100_fini(struct radeon_device *rdev);
50
extern int r100_suspend(struct radeon_device *rdev);
51
extern int r100_resume(struct radeon_device *rdev);
1117 serge 52
uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
53
void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
1179 serge 54
void r100_vga_set_state(struct radeon_device *rdev, bool state);
1117 serge 55
int r100_gpu_reset(struct radeon_device *rdev);
1179 serge 56
u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
1117 serge 57
void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
58
int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
1179 serge 59
void r100_cp_commit(struct radeon_device *rdev);
1117 serge 60
void r100_ring_start(struct radeon_device *rdev);
61
int r100_irq_set(struct radeon_device *rdev);
62
int r100_irq_process(struct radeon_device *rdev);
1128 serge 63
void r100_fence_ring_emit(struct radeon_device *rdev,
64
			  struct radeon_fence *fence);
65
int r100_cs_parse(struct radeon_cs_parser *p);
1117 serge 66
void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
67
uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
1128 serge 68
int r100_copy_blit(struct radeon_device *rdev,
69
		   uint64_t src_offset,
70
		   uint64_t dst_offset,
71
		   unsigned num_pages,
72
		   struct radeon_fence *fence);
1179 serge 73
int r100_set_surface_reg(struct radeon_device *rdev, int reg,
74
			 uint32_t tiling_flags, uint32_t pitch,
75
			 uint32_t offset, uint32_t obj_size);
76
int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
77
void r100_bandwidth_update(struct radeon_device *rdev);
78
void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
79
int r100_ring_test(struct radeon_device *rdev);
1321 serge 80
void r100_hpd_init(struct radeon_device *rdev);
81
void r100_hpd_fini(struct radeon_device *rdev);
82
bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
83
void r100_hpd_set_polarity(struct radeon_device *rdev,
84
			   enum radeon_hpd_id hpd);
1117 serge 85
 
86
static struct radeon_asic r100_asic = {
87
	.init = &r100_init,
1221 serge 88
//	.fini = &r100_fini,
89
//	.suspend = &r100_suspend,
90
//	.resume = &r100_resume,
91
//	.vga_set_state = &r100_vga_set_state,
1117 serge 92
	.gpu_reset = &r100_gpu_reset,
93
	.gart_tlb_flush = &r100_pci_gart_tlb_flush,
94
	.gart_set_page = &r100_pci_gart_set_page,
1221 serge 95
	.cp_commit = &r100_cp_commit,
1412 serge 96
	.ring_start = &r100_ring_start,
97
    .ring_test = &r100_ring_test,
1221 serge 98
//	.ring_ib_execute = &r100_ring_ib_execute,
99
//	.irq_set = &r100_irq_set,
100
//	.irq_process = &r100_irq_process,
101
//	.get_vblank_counter = &r100_get_vblank_counter,
102
//	.fence_ring_emit = &r100_fence_ring_emit,
103
//	.cs_parse = &r100_cs_parse,
104
//	.copy_blit = &r100_copy_blit,
105
//	.copy_dma = NULL,
106
//	.copy = &r100_copy_blit,
1268 serge 107
	.get_engine_clock = &radeon_legacy_get_engine_clock,
1221 serge 108
	.set_engine_clock = &radeon_legacy_set_engine_clock,
1403 serge 109
	.get_memory_clock = &radeon_legacy_get_memory_clock,
1221 serge 110
	.set_memory_clock = NULL,
111
	.set_pcie_lanes = NULL,
112
	.set_clock_gating = &radeon_legacy_set_clock_gating,
1179 serge 113
	.set_surface_reg = r100_set_surface_reg,
114
	.clear_surface_reg = r100_clear_surface_reg,
115
	.bandwidth_update = &r100_bandwidth_update,
1321 serge 116
	.hpd_init = &r100_hpd_init,
117
	.hpd_fini = &r100_hpd_fini,
118
	.hpd_sense = &r100_hpd_sense,
119
	.hpd_set_polarity = &r100_hpd_set_polarity,
1404 serge 120
	.ioctl_wait_idle = NULL,
1117 serge 121
};
122
 
123
 
124
/*
125
 * r300,r350,rv350,rv380
126
 */
1221 serge 127
extern int r300_init(struct radeon_device *rdev);
128
extern void r300_fini(struct radeon_device *rdev);
129
extern int r300_suspend(struct radeon_device *rdev);
130
extern int r300_resume(struct radeon_device *rdev);
131
extern int r300_gpu_reset(struct radeon_device *rdev);
132
extern void r300_ring_start(struct radeon_device *rdev);
133
extern void r300_fence_ring_emit(struct radeon_device *rdev,
1128 serge 134
			  struct radeon_fence *fence);
1221 serge 135
extern int r300_cs_parse(struct radeon_cs_parser *p);
136
extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
137
extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
138
extern uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
139
extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
140
extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
141
extern int r300_copy_dma(struct radeon_device *rdev,
1128 serge 142
		  uint64_t src_offset,
143
		  uint64_t dst_offset,
144
		  unsigned num_pages,
145
		  struct radeon_fence *fence);
1117 serge 146
static struct radeon_asic r300_asic = {
147
	.init = &r300_init,
1221 serge 148
//	.fini = &r300_fini,
149
//	.suspend = &r300_suspend,
150
//	.resume = &r300_resume,
151
//	.vga_set_state = &r100_vga_set_state,
1117 serge 152
	.gpu_reset = &r300_gpu_reset,
153
	.gart_tlb_flush = &r100_pci_gart_tlb_flush,
154
	.gart_set_page = &r100_pci_gart_set_page,
1412 serge 155
	.cp_commit = &r100_cp_commit,
156
	.ring_start = &r300_ring_start,
157
    .ring_test = &r100_ring_test,
1221 serge 158
//	.ring_ib_execute = &r100_ring_ib_execute,
159
//	.irq_set = &r100_irq_set,
160
//	.irq_process = &r100_irq_process,
161
//	.get_vblank_counter = &r100_get_vblank_counter,
162
//	.fence_ring_emit = &r300_fence_ring_emit,
163
//	.cs_parse = &r300_cs_parse,
164
//	.copy_blit = &r100_copy_blit,
165
//	.copy_dma = &r300_copy_dma,
166
//	.copy = &r100_copy_blit,
1268 serge 167
	.get_engine_clock = &radeon_legacy_get_engine_clock,
1221 serge 168
	.set_engine_clock = &radeon_legacy_set_engine_clock,
1403 serge 169
	.get_memory_clock = &radeon_legacy_get_memory_clock,
1221 serge 170
	.set_memory_clock = NULL,
171
	.set_pcie_lanes = &rv370_set_pcie_lanes,
172
	.set_clock_gating = &radeon_legacy_set_clock_gating,
1179 serge 173
	.set_surface_reg = r100_set_surface_reg,
174
	.clear_surface_reg = r100_clear_surface_reg,
175
	.bandwidth_update = &r100_bandwidth_update,
1321 serge 176
	.hpd_init = &r100_hpd_init,
177
	.hpd_fini = &r100_hpd_fini,
178
	.hpd_sense = &r100_hpd_sense,
179
	.hpd_set_polarity = &r100_hpd_set_polarity,
1404 serge 180
	.ioctl_wait_idle = NULL,
1117 serge 181
};
182
 
183
/*
184
 * r420,r423,rv410
185
 */
1179 serge 186
extern int r420_init(struct radeon_device *rdev);
187
extern void r420_fini(struct radeon_device *rdev);
188
extern int r420_suspend(struct radeon_device *rdev);
189
extern int r420_resume(struct radeon_device *rdev);
1117 serge 190
static struct radeon_asic r420_asic = {
1179 serge 191
	.init = &r420_init,
1221 serge 192
//	.fini = &r420_fini,
193
//	.suspend = &r420_suspend,
194
//	.resume = &r420_resume,
195
//	.vga_set_state = &r100_vga_set_state,
1117 serge 196
	.gpu_reset = &r300_gpu_reset,
197
	.gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
198
	.gart_set_page = &rv370_pcie_gart_set_page,
1412 serge 199
	.cp_commit = &r100_cp_commit,
200
	.ring_start = &r300_ring_start,
201
    .ring_test = &r100_ring_test,
1221 serge 202
//	.ring_ib_execute = &r100_ring_ib_execute,
203
//	.irq_set = &r100_irq_set,
204
//	.irq_process = &r100_irq_process,
205
//	.get_vblank_counter = &r100_get_vblank_counter,
206
//	.fence_ring_emit = &r300_fence_ring_emit,
207
//	.cs_parse = &r300_cs_parse,
208
//	.copy_blit = &r100_copy_blit,
209
//	.copy_dma = &r300_copy_dma,
210
//	.copy = &r100_copy_blit,
1268 serge 211
	.get_engine_clock = &radeon_atom_get_engine_clock,
1221 serge 212
	.set_engine_clock = &radeon_atom_set_engine_clock,
1268 serge 213
	.get_memory_clock = &radeon_atom_get_memory_clock,
1221 serge 214
	.set_memory_clock = &radeon_atom_set_memory_clock,
215
	.set_pcie_lanes = &rv370_set_pcie_lanes,
216
	.set_clock_gating = &radeon_atom_set_clock_gating,
1179 serge 217
	.set_surface_reg = r100_set_surface_reg,
218
	.clear_surface_reg = r100_clear_surface_reg,
219
	.bandwidth_update = &r100_bandwidth_update,
1321 serge 220
	.hpd_init = &r100_hpd_init,
221
	.hpd_fini = &r100_hpd_fini,
222
	.hpd_sense = &r100_hpd_sense,
223
	.hpd_set_polarity = &r100_hpd_set_polarity,
1404 serge 224
	.ioctl_wait_idle = NULL,
1117 serge 225
};
226
 
227
 
228
/*
229
 * rs400,rs480
230
 */
1221 serge 231
extern int rs400_init(struct radeon_device *rdev);
232
extern void rs400_fini(struct radeon_device *rdev);
233
extern int rs400_suspend(struct radeon_device *rdev);
234
extern int rs400_resume(struct radeon_device *rdev);
1117 serge 235
void rs400_gart_tlb_flush(struct radeon_device *rdev);
236
int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
237
uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
238
void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
239
static struct radeon_asic rs400_asic = {
1221 serge 240
	.init = &rs400_init,
241
//	.fini = &rs400_fini,
242
//	.suspend = &rs400_suspend,
243
//	.resume = &rs400_resume,
244
//	.vga_set_state = &r100_vga_set_state,
1117 serge 245
	.gpu_reset = &r300_gpu_reset,
246
	.gart_tlb_flush = &rs400_gart_tlb_flush,
247
	.gart_set_page = &rs400_gart_set_page,
1412 serge 248
	.cp_commit = &r100_cp_commit,
249
	.ring_start = &r300_ring_start,
250
    .ring_test = &r100_ring_test,
1221 serge 251
//	.ring_ib_execute = &r100_ring_ib_execute,
252
//	.irq_set = &r100_irq_set,
253
//	.irq_process = &r100_irq_process,
254
//	.get_vblank_counter = &r100_get_vblank_counter,
255
//	.fence_ring_emit = &r300_fence_ring_emit,
256
//	.cs_parse = &r300_cs_parse,
257
//	.copy_blit = &r100_copy_blit,
258
//	.copy_dma = &r300_copy_dma,
259
//	.copy = &r100_copy_blit,
1268 serge 260
	.get_engine_clock = &radeon_legacy_get_engine_clock,
1221 serge 261
	.set_engine_clock = &radeon_legacy_set_engine_clock,
1403 serge 262
	.get_memory_clock = &radeon_legacy_get_memory_clock,
1221 serge 263
	.set_memory_clock = NULL,
264
	.set_pcie_lanes = NULL,
265
	.set_clock_gating = &radeon_legacy_set_clock_gating,
1179 serge 266
	.set_surface_reg = r100_set_surface_reg,
267
	.clear_surface_reg = r100_clear_surface_reg,
268
	.bandwidth_update = &r100_bandwidth_update,
1321 serge 269
	.hpd_init = &r100_hpd_init,
270
	.hpd_fini = &r100_hpd_fini,
271
	.hpd_sense = &r100_hpd_sense,
272
	.hpd_set_polarity = &r100_hpd_set_polarity,
1404 serge 273
	.ioctl_wait_idle = NULL,
1117 serge 274
};
275
 
276
 
277
/*
278
 * rs600.
279
 */
1221 serge 280
extern int rs600_init(struct radeon_device *rdev);
281
extern void rs600_fini(struct radeon_device *rdev);
282
extern int rs600_suspend(struct radeon_device *rdev);
283
extern int rs600_resume(struct radeon_device *rdev);
1117 serge 284
int rs600_irq_set(struct radeon_device *rdev);
1179 serge 285
int rs600_irq_process(struct radeon_device *rdev);
286
u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
1117 serge 287
void rs600_gart_tlb_flush(struct radeon_device *rdev);
288
int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
289
uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
290
void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
1179 serge 291
void rs600_bandwidth_update(struct radeon_device *rdev);
1321 serge 292
void rs600_hpd_init(struct radeon_device *rdev);
293
void rs600_hpd_fini(struct radeon_device *rdev);
294
bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
295
void rs600_hpd_set_polarity(struct radeon_device *rdev,
296
			    enum radeon_hpd_id hpd);
297
 
1117 serge 298
static struct radeon_asic rs600_asic = {
1179 serge 299
	.init = &rs600_init,
1221 serge 300
//	.fini = &rs600_fini,
301
//	.suspend = &rs600_suspend,
302
//	.resume = &rs600_resume,
303
//	.vga_set_state = &r100_vga_set_state,
1117 serge 304
	.gpu_reset = &r300_gpu_reset,
305
	.gart_tlb_flush = &rs600_gart_tlb_flush,
306
	.gart_set_page = &rs600_gart_set_page,
1412 serge 307
	.cp_commit = &r100_cp_commit,
308
	.ring_start = &r300_ring_start,
309
    .ring_test = &r100_ring_test,
1221 serge 310
//	.ring_ib_execute = &r100_ring_ib_execute,
311
//	.irq_set = &rs600_irq_set,
312
//	.irq_process = &rs600_irq_process,
313
//	.get_vblank_counter = &rs600_get_vblank_counter,
1117 serge 314
//   .fence_ring_emit = &r300_fence_ring_emit,
315
//   .cs_parse = &r300_cs_parse,
316
//   .copy_blit = &r100_copy_blit,
317
//   .copy_dma = &r300_copy_dma,
318
//   .copy = &r100_copy_blit,
1268 serge 319
	.get_engine_clock = &radeon_atom_get_engine_clock,
1221 serge 320
	.set_engine_clock = &radeon_atom_set_engine_clock,
1268 serge 321
	.get_memory_clock = &radeon_atom_get_memory_clock,
1221 serge 322
	.set_memory_clock = &radeon_atom_set_memory_clock,
323
	.set_pcie_lanes = NULL,
324
	.set_clock_gating = &radeon_atom_set_clock_gating,
1179 serge 325
	.bandwidth_update = &rs600_bandwidth_update,
1321 serge 326
	.hpd_init = &rs600_hpd_init,
327
	.hpd_fini = &rs600_hpd_fini,
328
	.hpd_sense = &rs600_hpd_sense,
329
	.hpd_set_polarity = &rs600_hpd_set_polarity,
1404 serge 330
	.ioctl_wait_idle = NULL,
1117 serge 331
};
332
 
333
 
334
/*
335
 * rs690,rs740
336
 */
1221 serge 337
int rs690_init(struct radeon_device *rdev);
338
void rs690_fini(struct radeon_device *rdev);
339
int rs690_resume(struct radeon_device *rdev);
340
int rs690_suspend(struct radeon_device *rdev);
1117 serge 341
uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
342
void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
1179 serge 343
void rs690_bandwidth_update(struct radeon_device *rdev);
1117 serge 344
static struct radeon_asic rs690_asic = {
1221 serge 345
	.init = &rs690_init,
346
//	.fini = &rs690_fini,
347
//	.suspend = &rs690_suspend,
348
//	.resume = &rs690_resume,
349
//	.vga_set_state = &r100_vga_set_state,
1117 serge 350
	.gpu_reset = &r300_gpu_reset,
351
	.gart_tlb_flush = &rs400_gart_tlb_flush,
352
	.gart_set_page = &rs400_gart_set_page,
1412 serge 353
	.cp_commit = &r100_cp_commit,
354
	.ring_start = &r300_ring_start,
355
    .ring_test = &r100_ring_test,
1221 serge 356
//	.ring_ib_execute = &r100_ring_ib_execute,
357
//	.irq_set = &rs600_irq_set,
358
//	.irq_process = &rs600_irq_process,
359
//	.get_vblank_counter = &rs600_get_vblank_counter,
360
//	.fence_ring_emit = &r300_fence_ring_emit,
361
//	.cs_parse = &r300_cs_parse,
362
//	.copy_blit = &r100_copy_blit,
363
//	.copy_dma = &r300_copy_dma,
364
//	.copy = &r300_copy_dma,
1268 serge 365
	.get_engine_clock = &radeon_atom_get_engine_clock,
1221 serge 366
	.set_engine_clock = &radeon_atom_set_engine_clock,
1268 serge 367
	.get_memory_clock = &radeon_atom_get_memory_clock,
1221 serge 368
	.set_memory_clock = &radeon_atom_set_memory_clock,
369
	.set_pcie_lanes = NULL,
370
	.set_clock_gating = &radeon_atom_set_clock_gating,
1179 serge 371
	.set_surface_reg = r100_set_surface_reg,
372
	.clear_surface_reg = r100_clear_surface_reg,
373
	.bandwidth_update = &rs690_bandwidth_update,
1321 serge 374
	.hpd_init = &rs600_hpd_init,
375
	.hpd_fini = &rs600_hpd_fini,
376
	.hpd_sense = &rs600_hpd_sense,
377
	.hpd_set_polarity = &rs600_hpd_set_polarity,
1404 serge 378
	.ioctl_wait_idle = NULL,
1117 serge 379
};
380
 
1179 serge 381
 
1117 serge 382
/*
383
 * rv515
384
 */
385
int rv515_init(struct radeon_device *rdev);
1221 serge 386
void rv515_fini(struct radeon_device *rdev);
1117 serge 387
int rv515_gpu_reset(struct radeon_device *rdev);
388
uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
389
void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
390
void rv515_ring_start(struct radeon_device *rdev);
391
uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
392
void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
1179 serge 393
void rv515_bandwidth_update(struct radeon_device *rdev);
1221 serge 394
int rv515_resume(struct radeon_device *rdev);
395
int rv515_suspend(struct radeon_device *rdev);
1117 serge 396
static struct radeon_asic rv515_asic = {
397
	.init = &rv515_init,
1221 serge 398
//	.fini = &rv515_fini,
399
//	.suspend = &rv515_suspend,
400
//	.resume = &rv515_resume,
401
//	.vga_set_state = &r100_vga_set_state,
1117 serge 402
	.gpu_reset = &rv515_gpu_reset,
403
	.gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
404
	.gart_set_page = &rv370_pcie_gart_set_page,
1412 serge 405
	.cp_commit = &r100_cp_commit,
406
    .ring_start = &rv515_ring_start,
407
    .ring_test = &r100_ring_test,
1221 serge 408
//	.ring_ib_execute = &r100_ring_ib_execute,
409
//	.irq_set = &rs600_irq_set,
410
//	.irq_process = &rs600_irq_process,
411
//	.get_vblank_counter = &rs600_get_vblank_counter,
412
//	.fence_ring_emit = &r300_fence_ring_emit,
413
//	.cs_parse = &r300_cs_parse,
414
//	.copy_blit = &r100_copy_blit,
415
//	.copy_dma = &r300_copy_dma,
416
//	.copy = &r100_copy_blit,
1268 serge 417
	.get_engine_clock = &radeon_atom_get_engine_clock,
1221 serge 418
	.set_engine_clock = &radeon_atom_set_engine_clock,
1268 serge 419
	.get_memory_clock = &radeon_atom_get_memory_clock,
1221 serge 420
	.set_memory_clock = &radeon_atom_set_memory_clock,
421
	.set_pcie_lanes = &rv370_set_pcie_lanes,
422
	.set_clock_gating = &radeon_atom_set_clock_gating,
1179 serge 423
	.set_surface_reg = r100_set_surface_reg,
424
	.clear_surface_reg = r100_clear_surface_reg,
425
	.bandwidth_update = &rv515_bandwidth_update,
1321 serge 426
	.hpd_init = &rs600_hpd_init,
427
	.hpd_fini = &rs600_hpd_fini,
428
	.hpd_sense = &rs600_hpd_sense,
429
	.hpd_set_polarity = &rs600_hpd_set_polarity,
1404 serge 430
	.ioctl_wait_idle = NULL,
1117 serge 431
};
432
 
433
 
434
/*
435
 * r520,rv530,rv560,rv570,r580
436
 */
1221 serge 437
int r520_init(struct radeon_device *rdev);
438
int r520_resume(struct radeon_device *rdev);
1117 serge 439
static struct radeon_asic r520_asic = {
1221 serge 440
	.init = &r520_init,
441
//	.fini = &rv515_fini,
442
//	.suspend = &rv515_suspend,
443
//	.resume = &r520_resume,
444
//	.vga_set_state = &r100_vga_set_state,
1117 serge 445
    .gpu_reset = &rv515_gpu_reset,
1119 serge 446
    .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
447
    .gart_set_page = &rv370_pcie_gart_set_page,
1412 serge 448
	.cp_commit = &r100_cp_commit,
449
    .ring_start = &rv515_ring_start,
450
    .ring_test = &r100_ring_test,
1221 serge 451
//	.ring_ib_execute = &r100_ring_ib_execute,
452
//	.irq_set = &rs600_irq_set,
453
//	.irq_process = &rs600_irq_process,
454
//	.get_vblank_counter = &rs600_get_vblank_counter,
455
//	.fence_ring_emit = &r300_fence_ring_emit,
456
//	.cs_parse = &r300_cs_parse,
457
//	.copy_blit = &r100_copy_blit,
458
//	.copy_dma = &r300_copy_dma,
459
//	.copy = &r100_copy_blit,
1268 serge 460
	.get_engine_clock = &radeon_atom_get_engine_clock,
1221 serge 461
	.set_engine_clock = &radeon_atom_set_engine_clock,
1268 serge 462
	.get_memory_clock = &radeon_atom_get_memory_clock,
1221 serge 463
	.set_memory_clock = &radeon_atom_set_memory_clock,
464
	.set_pcie_lanes = &rv370_set_pcie_lanes,
465
	.set_clock_gating = &radeon_atom_set_clock_gating,
1179 serge 466
	.set_surface_reg = r100_set_surface_reg,
467
	.clear_surface_reg = r100_clear_surface_reg,
1221 serge 468
	.bandwidth_update = &rv515_bandwidth_update,
1321 serge 469
	.hpd_init = &rs600_hpd_init,
470
	.hpd_fini = &rs600_hpd_fini,
471
	.hpd_sense = &rs600_hpd_sense,
472
	.hpd_set_polarity = &rs600_hpd_set_polarity,
1404 serge 473
	.ioctl_wait_idle = NULL,
1117 serge 474
};
475
 
476
/*
1221 serge 477
 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
1117 serge 478
 */
1179 serge 479
int r600_init(struct radeon_device *rdev);
480
void r600_fini(struct radeon_device *rdev);
481
int r600_suspend(struct radeon_device *rdev);
482
int r600_resume(struct radeon_device *rdev);
483
void r600_vga_set_state(struct radeon_device *rdev, bool state);
484
int r600_wb_init(struct radeon_device *rdev);
485
void r600_wb_fini(struct radeon_device *rdev);
486
void r600_cp_commit(struct radeon_device *rdev);
487
void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
1117 serge 488
uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
489
void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
1233 serge 490
int r600_cs_parse(struct radeon_cs_parser *p);
491
void r600_fence_ring_emit(struct radeon_device *rdev,
492
			  struct radeon_fence *fence);
493
int r600_copy_dma(struct radeon_device *rdev,
494
		  uint64_t src_offset,
495
		  uint64_t dst_offset,
496
		  unsigned num_pages,
497
		  struct radeon_fence *fence);
498
int r600_irq_process(struct radeon_device *rdev);
499
int r600_irq_set(struct radeon_device *rdev);
500
int r600_gpu_reset(struct radeon_device *rdev);
501
int r600_set_surface_reg(struct radeon_device *rdev, int reg,
502
			 uint32_t tiling_flags, uint32_t pitch,
503
			 uint32_t offset, uint32_t obj_size);
504
int r600_clear_surface_reg(struct radeon_device *rdev, int reg);
505
void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
506
int r600_ring_test(struct radeon_device *rdev);
507
int r600_copy_blit(struct radeon_device *rdev,
508
		   uint64_t src_offset, uint64_t dst_offset,
509
		   unsigned num_pages, struct radeon_fence *fence);
1321 serge 510
void r600_hpd_init(struct radeon_device *rdev);
511
void r600_hpd_fini(struct radeon_device *rdev);
512
bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
513
void r600_hpd_set_polarity(struct radeon_device *rdev,
514
			   enum radeon_hpd_id hpd);
1404 serge 515
extern void r600_ioctl_wait_idle(struct radeon_device *rdev, struct radeon_bo *bo);
1117 serge 516
 
1233 serge 517
static struct radeon_asic r600_asic = {
518
	.init = &r600_init,
519
//	.fini = &r600_fini,
520
//	.suspend = &r600_suspend,
521
//	.resume = &r600_resume,
522
//	.cp_commit = &r600_cp_commit,
523
	.vga_set_state = &r600_vga_set_state,
524
	.gpu_reset = &r600_gpu_reset,
525
	.gart_tlb_flush = &r600_pcie_gart_tlb_flush,
526
	.gart_set_page = &rs600_gart_set_page,
527
//	.ring_test = &r600_ring_test,
528
//	.ring_ib_execute = &r600_ring_ib_execute,
529
//	.irq_set = &r600_irq_set,
530
//	.irq_process = &r600_irq_process,
531
//	.fence_ring_emit = &r600_fence_ring_emit,
532
//	.cs_parse = &r600_cs_parse,
533
//	.copy_blit = &r600_copy_blit,
534
//	.copy_dma = &r600_copy_blit,
535
//	.copy = &r600_copy_blit,
1268 serge 536
	.get_engine_clock = &radeon_atom_get_engine_clock,
1233 serge 537
	.set_engine_clock = &radeon_atom_set_engine_clock,
1268 serge 538
	.get_memory_clock = &radeon_atom_get_memory_clock,
1233 serge 539
	.set_memory_clock = &radeon_atom_set_memory_clock,
540
	.set_pcie_lanes = NULL,
541
	.set_clock_gating = &radeon_atom_set_clock_gating,
542
	.set_surface_reg = r600_set_surface_reg,
543
	.clear_surface_reg = r600_clear_surface_reg,
544
	.bandwidth_update = &rv515_bandwidth_update,
1321 serge 545
	.hpd_init = &r600_hpd_init,
546
	.hpd_fini = &r600_hpd_fini,
547
	.hpd_sense = &r600_hpd_sense,
548
	.hpd_set_polarity = &r600_hpd_set_polarity,
1404 serge 549
//	.ioctl_wait_idle = r600_ioctl_wait_idle,
1233 serge 550
};
551
 
552
/*
553
 * rv770,rv730,rv710,rv740
554
 */
555
int rv770_init(struct radeon_device *rdev);
556
void rv770_fini(struct radeon_device *rdev);
557
int rv770_suspend(struct radeon_device *rdev);
558
int rv770_resume(struct radeon_device *rdev);
559
int rv770_gpu_reset(struct radeon_device *rdev);
560
 
561
static struct radeon_asic rv770_asic = {
562
	.init = &rv770_init,
563
//	.fini = &rv770_fini,
564
//	.suspend = &rv770_suspend,
565
//	.resume = &rv770_resume,
566
//	.cp_commit = &r600_cp_commit,
567
	.gpu_reset = &rv770_gpu_reset,
568
	.vga_set_state = &r600_vga_set_state,
569
	.gart_tlb_flush = &r600_pcie_gart_tlb_flush,
570
	.gart_set_page = &rs600_gart_set_page,
571
//	.ring_test = &r600_ring_test,
572
//	.ring_ib_execute = &r600_ring_ib_execute,
573
//	.irq_set = &r600_irq_set,
574
//	.irq_process = &r600_irq_process,
575
//	.fence_ring_emit = &r600_fence_ring_emit,
576
//	.cs_parse = &r600_cs_parse,
577
//	.copy_blit = &r600_copy_blit,
578
//	.copy_dma = &r600_copy_blit,
579
//	.copy = &r600_copy_blit,
1268 serge 580
	.get_engine_clock = &radeon_atom_get_engine_clock,
1233 serge 581
	.set_engine_clock = &radeon_atom_set_engine_clock,
1268 serge 582
	.get_memory_clock = &radeon_atom_get_memory_clock,
1233 serge 583
	.set_memory_clock = &radeon_atom_set_memory_clock,
584
	.set_pcie_lanes = NULL,
585
	.set_clock_gating = &radeon_atom_set_clock_gating,
586
	.set_surface_reg = r600_set_surface_reg,
587
	.clear_surface_reg = r600_clear_surface_reg,
588
	.bandwidth_update = &rv515_bandwidth_update,
1321 serge 589
	.hpd_init = &r600_hpd_init,
590
	.hpd_fini = &r600_hpd_fini,
591
	.hpd_sense = &r600_hpd_sense,
592
	.hpd_set_polarity = &r600_hpd_set_polarity,
1404 serge 593
//	.ioctl_wait_idle = r600_ioctl_wait_idle,
1233 serge 594
};
595
 
1117 serge 596
#endif