Subversion Repositories Kolibri OS

Rev

Rev 1128 | Rev 1221 | Go to most recent revision | Details | Compare with Previous | Last modification | View Log | RSS feed

Rev Author Line No. Line
1117 serge 1
/*
2
 * Copyright 2008 Advanced Micro Devices, Inc.
3
 * Copyright 2008 Red Hat Inc.
4
 * Copyright 2009 Jerome Glisse.
5
 *
6
 * Permission is hereby granted, free of charge, to any person obtaining a
7
 * copy of this software and associated documentation files (the "Software"),
8
 * to deal in the Software without restriction, including without limitation
9
 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10
 * and/or sell copies of the Software, and to permit persons to whom the
11
 * Software is furnished to do so, subject to the following conditions:
12
 *
13
 * The above copyright notice and this permission notice shall be included in
14
 * all copies or substantial portions of the Software.
15
 *
16
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
19
 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20
 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21
 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22
 * OTHER DEALINGS IN THE SOFTWARE.
23
 *
24
 * Authors: Dave Airlie
25
 *          Alex Deucher
26
 *          Jerome Glisse
27
 */
28
#ifndef __RADEON_ASIC_H__
29
#define __RADEON_ASIC_H__
30
 
31
/*
32
 * common functions
33
 */
34
void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
35
void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
36
 
37
void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
38
void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
39
void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
40
 
41
/*
42
 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
43
 */
44
int r100_init(struct radeon_device *rdev);
1179 serge 45
int r200_init(struct radeon_device *rdev);
1117 serge 46
uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
47
void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
48
void r100_errata(struct radeon_device *rdev);
49
void r100_vram_info(struct radeon_device *rdev);
1179 serge 50
void r100_vga_set_state(struct radeon_device *rdev, bool state);
1117 serge 51
int r100_gpu_reset(struct radeon_device *rdev);
52
int r100_mc_init(struct radeon_device *rdev);
53
void r100_mc_fini(struct radeon_device *rdev);
1179 serge 54
u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
1117 serge 55
int r100_wb_init(struct radeon_device *rdev);
56
void r100_wb_fini(struct radeon_device *rdev);
1179 serge 57
int r100_pci_gart_init(struct radeon_device *rdev);
58
void r100_pci_gart_fini(struct radeon_device *rdev);
59
int r100_pci_gart_enable(struct radeon_device *rdev);
1117 serge 60
void r100_pci_gart_disable(struct radeon_device *rdev);
61
void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
62
int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
63
int r100_cp_init(struct radeon_device *rdev, unsigned ring_size);
64
void r100_cp_fini(struct radeon_device *rdev);
65
void r100_cp_disable(struct radeon_device *rdev);
1179 serge 66
void r100_cp_commit(struct radeon_device *rdev);
1117 serge 67
void r100_ring_start(struct radeon_device *rdev);
68
int r100_irq_set(struct radeon_device *rdev);
69
int r100_irq_process(struct radeon_device *rdev);
1128 serge 70
void r100_fence_ring_emit(struct radeon_device *rdev,
71
			  struct radeon_fence *fence);
72
int r100_cs_parse(struct radeon_cs_parser *p);
1117 serge 73
void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
74
uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
1128 serge 75
int r100_copy_blit(struct radeon_device *rdev,
76
		   uint64_t src_offset,
77
		   uint64_t dst_offset,
78
		   unsigned num_pages,
79
		   struct radeon_fence *fence);
1179 serge 80
int r100_set_surface_reg(struct radeon_device *rdev, int reg,
81
			 uint32_t tiling_flags, uint32_t pitch,
82
			 uint32_t offset, uint32_t obj_size);
83
int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
84
void r100_bandwidth_update(struct radeon_device *rdev);
85
void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
86
int r100_ib_test(struct radeon_device *rdev);
87
int r100_ring_test(struct radeon_device *rdev);
1117 serge 88
 
89
static struct radeon_asic r100_asic = {
90
	.init = &r100_init,
91
	.errata = &r100_errata,
92
	.vram_info = &r100_vram_info,
93
	.gpu_reset = &r100_gpu_reset,
94
	.mc_init = &r100_mc_init,
95
	.mc_fini = &r100_mc_fini,
1128 serge 96
//   .wb_init = &r100_wb_init,
97
//   .wb_fini = &r100_wb_fini,
1179 serge 98
	.gart_enable = &r100_pci_gart_enable,
1117 serge 99
	.gart_disable = &r100_pci_gart_disable,
100
	.gart_tlb_flush = &r100_pci_gart_tlb_flush,
101
	.gart_set_page = &r100_pci_gart_set_page,
102
	.cp_init = &r100_cp_init,
1128 serge 103
//   .cp_fini = &r100_cp_fini,
104
//   .cp_disable = &r100_cp_disable,
1117 serge 105
	.ring_start = &r100_ring_start,
1128 serge 106
//   .irq_set = &r100_irq_set,
107
//   .irq_process = &r100_irq_process,
1117 serge 108
 //  .fence_ring_emit = &r100_fence_ring_emit,
109
 //  .cs_parse = &r100_cs_parse,
110
 //  .copy_blit = &r100_copy_blit,
111
 //  .copy_dma = NULL,
112
 //  .copy = &r100_copy_blit,
1128 serge 113
//   .set_engine_clock = &radeon_legacy_set_engine_clock,
114
//   .set_memory_clock = NULL,
115
//   .set_pcie_lanes = NULL,
116
//   .set_clock_gating = &radeon_legacy_set_clock_gating,
1179 serge 117
	.set_surface_reg = r100_set_surface_reg,
118
	.clear_surface_reg = r100_clear_surface_reg,
119
	.bandwidth_update = &r100_bandwidth_update,
1117 serge 120
};
121
 
122
 
123
/*
124
 * r300,r350,rv350,rv380
125
 */
126
int r300_init(struct radeon_device *rdev);
127
void r300_errata(struct radeon_device *rdev);
128
void r300_vram_info(struct radeon_device *rdev);
129
int r300_gpu_reset(struct radeon_device *rdev);
130
int r300_mc_init(struct radeon_device *rdev);
131
void r300_mc_fini(struct radeon_device *rdev);
132
void r300_ring_start(struct radeon_device *rdev);
1128 serge 133
void r300_fence_ring_emit(struct radeon_device *rdev,
134
			  struct radeon_fence *fence);
135
int r300_cs_parse(struct radeon_cs_parser *p);
1179 serge 136
int rv370_pcie_gart_init(struct radeon_device *rdev);
137
void rv370_pcie_gart_fini(struct radeon_device *rdev);
138
int rv370_pcie_gart_enable(struct radeon_device *rdev);
1117 serge 139
void rv370_pcie_gart_disable(struct radeon_device *rdev);
140
void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
141
int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
142
uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
143
void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
144
void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
1128 serge 145
int r300_copy_dma(struct radeon_device *rdev,
146
		  uint64_t src_offset,
147
		  uint64_t dst_offset,
148
		  unsigned num_pages,
149
		  struct radeon_fence *fence);
1117 serge 150
 
151
static struct radeon_asic r300_asic = {
152
	.init = &r300_init,
153
	.errata = &r300_errata,
154
	.vram_info = &r300_vram_info,
1179 serge 155
	.vga_set_state = &r100_vga_set_state,
1117 serge 156
	.gpu_reset = &r300_gpu_reset,
157
	.mc_init = &r300_mc_init,
158
	.mc_fini = &r300_mc_fini,
1128 serge 159
//   .wb_init = &r100_wb_init,
160
//   .wb_fini = &r100_wb_fini,
1179 serge 161
	.gart_enable = &r100_pci_gart_enable,
1117 serge 162
	.gart_disable = &r100_pci_gart_disable,
163
	.gart_tlb_flush = &r100_pci_gart_tlb_flush,
164
	.gart_set_page = &r100_pci_gart_set_page,
165
	.cp_init = &r100_cp_init,
1128 serge 166
//   .cp_fini = &r100_cp_fini,
167
//   .cp_disable = &r100_cp_disable,
1117 serge 168
	.ring_start = &r300_ring_start,
1128 serge 169
//   .irq_set = &r100_irq_set,
170
//   .irq_process = &r100_irq_process,
1117 serge 171
 //  .fence_ring_emit = &r300_fence_ring_emit,
172
 //  .cs_parse = &r300_cs_parse,
173
 //  .copy_blit = &r100_copy_blit,
174
 //  .copy_dma = &r300_copy_dma,
175
 //  .copy = &r100_copy_blit,
1128 serge 176
 //  .set_engine_clock = &radeon_legacy_set_engine_clock,
177
 //  .set_memory_clock = NULL,
178
//   .set_pcie_lanes = &rv370_set_pcie_lanes,
179
//   .set_clock_gating = &radeon_legacy_set_clock_gating,
1179 serge 180
	.set_surface_reg = r100_set_surface_reg,
181
	.clear_surface_reg = r100_clear_surface_reg,
182
	.bandwidth_update = &r100_bandwidth_update,
1117 serge 183
};
184
 
185
/*
186
 * r420,r423,rv410
187
 */
1179 serge 188
extern int r420_init(struct radeon_device *rdev);
189
extern void r420_fini(struct radeon_device *rdev);
190
extern int r420_suspend(struct radeon_device *rdev);
191
extern int r420_resume(struct radeon_device *rdev);
1117 serge 192
static struct radeon_asic r420_asic = {
1179 serge 193
	.init = &r420_init,
194
	.fini = &r420_fini,
195
	.suspend = &r420_suspend,
196
	.resume = &r420_resume,
197
	.errata = NULL,
198
	.vram_info = NULL,
199
	.vga_set_state = &r100_vga_set_state,
1117 serge 200
	.gpu_reset = &r300_gpu_reset,
1179 serge 201
	.mc_init = NULL,
202
	.mc_fini = NULL,
203
	.wb_init = NULL,
204
	.wb_fini = NULL,
205
	.gart_enable = NULL,
206
	.gart_disable = NULL,
1117 serge 207
	.gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
208
	.gart_set_page = &rv370_pcie_gart_set_page,
1179 serge 209
	.cp_init = NULL,
210
	.cp_fini = NULL,
211
	.cp_disable = NULL,
1117 serge 212
	.ring_start = &r300_ring_start,
1128 serge 213
//   .irq_set = &r100_irq_set,
214
//   .irq_process = &r100_irq_process,
1117 serge 215
//   .fence_ring_emit = &r300_fence_ring_emit,
216
//   .cs_parse = &r300_cs_parse,
217
//   .copy_blit = &r100_copy_blit,
218
//   .copy_dma = &r300_copy_dma,
219
//   .copy = &r100_copy_blit,
1128 serge 220
//   .set_engine_clock = &radeon_atom_set_engine_clock,
221
//   .set_memory_clock = &radeon_atom_set_memory_clock,
222
//   .set_pcie_lanes = &rv370_set_pcie_lanes,
223
//   .set_clock_gating = &radeon_atom_set_clock_gating,
1179 serge 224
	.set_surface_reg = r100_set_surface_reg,
225
	.clear_surface_reg = r100_clear_surface_reg,
226
	.bandwidth_update = &r100_bandwidth_update,
1117 serge 227
};
228
 
229
 
230
/*
231
 * rs400,rs480
232
 */
233
void rs400_errata(struct radeon_device *rdev);
234
void rs400_vram_info(struct radeon_device *rdev);
235
int rs400_mc_init(struct radeon_device *rdev);
236
void rs400_mc_fini(struct radeon_device *rdev);
1179 serge 237
int rs400_gart_init(struct radeon_device *rdev);
238
void rs400_gart_fini(struct radeon_device *rdev);
1117 serge 239
int rs400_gart_enable(struct radeon_device *rdev);
240
void rs400_gart_disable(struct radeon_device *rdev);
241
void rs400_gart_tlb_flush(struct radeon_device *rdev);
242
int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
243
uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
244
void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
245
static struct radeon_asic rs400_asic = {
246
	.init = &r300_init,
247
	.errata = &rs400_errata,
248
	.vram_info = &rs400_vram_info,
1179 serge 249
	.vga_set_state = &r100_vga_set_state,
1117 serge 250
	.gpu_reset = &r300_gpu_reset,
251
	.mc_init = &rs400_mc_init,
252
	.mc_fini = &rs400_mc_fini,
1128 serge 253
//   .wb_init = &r100_wb_init,
254
//   .wb_fini = &r100_wb_fini,
1179 serge 255
	.gart_init = &rs400_gart_init,
256
	.gart_fini = &rs400_gart_fini,
1117 serge 257
	.gart_enable = &rs400_gart_enable,
258
	.gart_disable = &rs400_gart_disable,
259
	.gart_tlb_flush = &rs400_gart_tlb_flush,
260
	.gart_set_page = &rs400_gart_set_page,
261
	.cp_init = &r100_cp_init,
1128 serge 262
//   .cp_fini = &r100_cp_fini,
263
//   .cp_disable = &r100_cp_disable,
1179 serge 264
	.cp_commit = &r100_cp_commit,
1117 serge 265
	.ring_start = &r300_ring_start,
1128 serge 266
//   .irq_set = &r100_irq_set,
267
//   .irq_process = &r100_irq_process,
1117 serge 268
//   .fence_ring_emit = &r300_fence_ring_emit,
269
//   .cs_parse = &r300_cs_parse,
270
//   .copy_blit = &r100_copy_blit,
271
//   .copy_dma = &r300_copy_dma,
272
//   .copy = &r100_copy_blit,
1128 serge 273
//   .set_engine_clock = &radeon_legacy_set_engine_clock,
274
//   .set_memory_clock = NULL,
275
//   .set_pcie_lanes = NULL,
276
//   .set_clock_gating = &radeon_legacy_set_clock_gating,
1179 serge 277
	.set_surface_reg = r100_set_surface_reg,
278
	.clear_surface_reg = r100_clear_surface_reg,
279
	.bandwidth_update = &r100_bandwidth_update,
1117 serge 280
};
281
 
282
 
283
/*
284
 * rs600.
285
 */
1179 serge 286
int rs600_init(struct radeon_device *rdev);
1117 serge 287
void rs600_errata(struct radeon_device *rdev);
288
void rs600_vram_info(struct radeon_device *rdev);
289
int rs600_mc_init(struct radeon_device *rdev);
290
void rs600_mc_fini(struct radeon_device *rdev);
291
int rs600_irq_set(struct radeon_device *rdev);
1179 serge 292
int rs600_irq_process(struct radeon_device *rdev);
293
u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
294
int rs600_gart_init(struct radeon_device *rdev);
295
void rs600_gart_fini(struct radeon_device *rdev);
1117 serge 296
int rs600_gart_enable(struct radeon_device *rdev);
297
void rs600_gart_disable(struct radeon_device *rdev);
298
void rs600_gart_tlb_flush(struct radeon_device *rdev);
299
int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
300
uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
301
void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
1179 serge 302
void rs600_bandwidth_update(struct radeon_device *rdev);
1117 serge 303
static struct radeon_asic rs600_asic = {
1179 serge 304
	.init = &rs600_init,
1117 serge 305
	.errata = &rs600_errata,
306
	.vram_info = &rs600_vram_info,
1179 serge 307
	.vga_set_state = &r100_vga_set_state,
1117 serge 308
	.gpu_reset = &r300_gpu_reset,
309
	.mc_init = &rs600_mc_init,
310
	.mc_fini = &rs600_mc_fini,
1128 serge 311
//   .wb_init = &r100_wb_init,
312
//   .wb_fini = &r100_wb_fini,
1179 serge 313
	.gart_init = &rs600_gart_init,
314
	.gart_fini = &rs600_gart_fini,
1117 serge 315
	.gart_enable = &rs600_gart_enable,
316
	.gart_disable = &rs600_gart_disable,
317
	.gart_tlb_flush = &rs600_gart_tlb_flush,
318
	.gart_set_page = &rs600_gart_set_page,
319
	.cp_init = &r100_cp_init,
1128 serge 320
//   .cp_fini = &r100_cp_fini,
321
//   .cp_disable = &r100_cp_disable,
1179 serge 322
	.cp_commit = &r100_cp_commit,
1117 serge 323
	.ring_start = &r300_ring_start,
1128 serge 324
//   .irq_set = &rs600_irq_set,
325
//   .irq_process = &r100_irq_process,
1117 serge 326
//   .fence_ring_emit = &r300_fence_ring_emit,
327
//   .cs_parse = &r300_cs_parse,
328
//   .copy_blit = &r100_copy_blit,
329
//   .copy_dma = &r300_copy_dma,
330
//   .copy = &r100_copy_blit,
1128 serge 331
//   .set_engine_clock = &radeon_atom_set_engine_clock,
332
//   .set_memory_clock = &radeon_atom_set_memory_clock,
333
//   .set_pcie_lanes = NULL,
334
//   .set_clock_gating = &radeon_atom_set_clock_gating,
1179 serge 335
	.bandwidth_update = &rs600_bandwidth_update,
1117 serge 336
};
337
 
338
 
339
/*
340
 * rs690,rs740
341
 */
342
void rs690_errata(struct radeon_device *rdev);
343
void rs690_vram_info(struct radeon_device *rdev);
344
int rs690_mc_init(struct radeon_device *rdev);
345
void rs690_mc_fini(struct radeon_device *rdev);
346
uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
347
void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
1179 serge 348
void rs690_bandwidth_update(struct radeon_device *rdev);
1117 serge 349
static struct radeon_asic rs690_asic = {
1179 serge 350
	.init = &rs600_init,
1117 serge 351
	.errata = &rs690_errata,
352
	.vram_info = &rs690_vram_info,
1179 serge 353
	.vga_set_state = &r100_vga_set_state,
1117 serge 354
	.gpu_reset = &r300_gpu_reset,
355
	.mc_init = &rs690_mc_init,
356
	.mc_fini = &rs690_mc_fini,
1128 serge 357
//   .wb_init = &r100_wb_init,
358
//   .wb_fini = &r100_wb_fini,
1179 serge 359
	.gart_init = &rs400_gart_init,
360
	.gart_fini = &rs400_gart_fini,
1117 serge 361
	.gart_enable = &rs400_gart_enable,
362
	.gart_disable = &rs400_gart_disable,
363
	.gart_tlb_flush = &rs400_gart_tlb_flush,
364
	.gart_set_page = &rs400_gart_set_page,
365
	.cp_init = &r100_cp_init,
1128 serge 366
//   .cp_fini = &r100_cp_fini,
367
//   .cp_disable = &r100_cp_disable,
1179 serge 368
	.cp_commit = &r100_cp_commit,
1117 serge 369
	.ring_start = &r300_ring_start,
1128 serge 370
//   .irq_set = &rs600_irq_set,
371
//   .irq_process = &r100_irq_process,
1117 serge 372
 //  .fence_ring_emit = &r300_fence_ring_emit,
373
 //  .cs_parse = &r300_cs_parse,
374
 //  .copy_blit = &r100_copy_blit,
375
 //  .copy_dma = &r300_copy_dma,
376
//   .copy = &r300_copy_dma,
1128 serge 377
//   .set_engine_clock = &radeon_atom_set_engine_clock,
378
//   .set_memory_clock = &radeon_atom_set_memory_clock,
379
//   .set_pcie_lanes = NULL,
380
//   .set_clock_gating = &radeon_atom_set_clock_gating,
1179 serge 381
	.set_surface_reg = r100_set_surface_reg,
382
	.clear_surface_reg = r100_clear_surface_reg,
383
	.bandwidth_update = &rs690_bandwidth_update,
1117 serge 384
};
385
 
1179 serge 386
 
1117 serge 387
/*
388
 * rv515
389
 */
390
int rv515_init(struct radeon_device *rdev);
391
void rv515_errata(struct radeon_device *rdev);
392
void rv515_vram_info(struct radeon_device *rdev);
393
int rv515_gpu_reset(struct radeon_device *rdev);
394
int rv515_mc_init(struct radeon_device *rdev);
395
void rv515_mc_fini(struct radeon_device *rdev);
396
uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
397
void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
398
void rv515_ring_start(struct radeon_device *rdev);
399
uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
400
void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
1179 serge 401
void rv515_bandwidth_update(struct radeon_device *rdev);
1117 serge 402
static struct radeon_asic rv515_asic = {
403
	.init = &rv515_init,
404
	.errata = &rv515_errata,
405
	.vram_info = &rv515_vram_info,
1179 serge 406
	.vga_set_state = &r100_vga_set_state,
1117 serge 407
	.gpu_reset = &rv515_gpu_reset,
408
	.mc_init = &rv515_mc_init,
409
	.mc_fini = &rv515_mc_fini,
1128 serge 410
//   .wb_init = &r100_wb_init,
411
//   .wb_fini = &r100_wb_fini,
1179 serge 412
	.gart_init = &rv370_pcie_gart_init,
413
	.gart_fini = &rv370_pcie_gart_fini,
414
	.gart_enable = &rv370_pcie_gart_enable,
1117 serge 415
	.gart_disable = &rv370_pcie_gart_disable,
416
	.gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
417
	.gart_set_page = &rv370_pcie_gart_set_page,
418
	.cp_init = &r100_cp_init,
1128 serge 419
//   .cp_fini = &r100_cp_fini,
420
//   .cp_disable = &r100_cp_disable,
1179 serge 421
	.cp_commit = &r100_cp_commit,
1128 serge 422
    .ring_start = &rv515_ring_start,
423
//   .irq_set = &r100_irq_set,
424
//   .irq_process = &r100_irq_process,
1117 serge 425
//   .fence_ring_emit = &r300_fence_ring_emit,
426
//   .cs_parse = &r300_cs_parse,
427
//   .copy_blit = &r100_copy_blit,
428
//   .copy_dma = &r300_copy_dma,
429
//   .copy = &r100_copy_blit,
1128 serge 430
//   .set_engine_clock = &radeon_atom_set_engine_clock,
431
//   .set_memory_clock = &radeon_atom_set_memory_clock,
432
//   .set_pcie_lanes = &rv370_set_pcie_lanes,
433
//   .set_clock_gating = &radeon_atom_set_clock_gating,
1179 serge 434
	.set_surface_reg = r100_set_surface_reg,
435
	.clear_surface_reg = r100_clear_surface_reg,
436
	.bandwidth_update = &rv515_bandwidth_update,
1117 serge 437
};
438
 
439
 
440
/*
441
 * r520,rv530,rv560,rv570,r580
442
 */
443
void r520_errata(struct radeon_device *rdev);
444
void r520_vram_info(struct radeon_device *rdev);
445
int r520_mc_init(struct radeon_device *rdev);
446
void r520_mc_fini(struct radeon_device *rdev);
1179 serge 447
void r520_bandwidth_update(struct radeon_device *rdev);
1117 serge 448
static struct radeon_asic r520_asic = {
449
	.init = &rv515_init,
450
    .errata = &r520_errata,
451
    .vram_info = &r520_vram_info,
1179 serge 452
	.vga_set_state = &r100_vga_set_state,
1117 serge 453
    .gpu_reset = &rv515_gpu_reset,
1119 serge 454
    .mc_init = &r520_mc_init,
455
    .mc_fini = &r520_mc_fini,
1125 serge 456
//    .wb_init = &r100_wb_init,
457
//    .wb_fini = &r100_wb_fini,
1179 serge 458
	.gart_init = &rv370_pcie_gart_init,
459
	.gart_fini = &rv370_pcie_gart_fini,
460
	.gart_enable = &rv370_pcie_gart_enable,
1119 serge 461
    .gart_disable = &rv370_pcie_gart_disable,
462
    .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
463
    .gart_set_page = &rv370_pcie_gart_set_page,
464
    .cp_init = &r100_cp_init,
465
//    .cp_fini = &r100_cp_fini,
1117 serge 466
//   .cp_disable = &r100_cp_disable,
1179 serge 467
	.cp_commit = &r100_cp_commit,
1119 serge 468
    .ring_start = &rv515_ring_start,
1117 serge 469
//   .irq_set = &r100_irq_set,
470
//   .irq_process = &r100_irq_process,
471
//   .fence_ring_emit = &r300_fence_ring_emit,
472
//   .cs_parse = &r300_cs_parse,
473
//   .copy_blit = &r100_copy_blit,
474
//   .copy_dma = &r300_copy_dma,
475
//   .copy = &r100_copy_blit,
476
//   .set_engine_clock = &radeon_atom_set_engine_clock,
477
//   .set_memory_clock = &radeon_atom_set_memory_clock,
478
//   .set_pcie_lanes = &rv370_set_pcie_lanes,
479
//   .set_clock_gating = &radeon_atom_set_clock_gating,
1179 serge 480
	.set_surface_reg = r100_set_surface_reg,
481
	.clear_surface_reg = r100_clear_surface_reg,
482
	.bandwidth_update = &r520_bandwidth_update,
1117 serge 483
};
484
 
485
/*
486
 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rv770,rv730,rv710
487
 */
1179 serge 488
int r600_init(struct radeon_device *rdev);
489
void r600_fini(struct radeon_device *rdev);
490
int r600_suspend(struct radeon_device *rdev);
491
int r600_resume(struct radeon_device *rdev);
492
void r600_vga_set_state(struct radeon_device *rdev, bool state);
493
int r600_wb_init(struct radeon_device *rdev);
494
void r600_wb_fini(struct radeon_device *rdev);
495
void r600_cp_commit(struct radeon_device *rdev);
496
void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
1117 serge 497
uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
498
void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
499
 
500
#endif