Subversion Repositories Kolibri OS

Rev

Rev 1117 | Rev 1120 | Go to most recent revision | Details | Compare with Previous | Last modification | View Log | RSS feed

Rev Author Line No. Line
1117 serge 1
/*
2
 * Copyright 2008 Advanced Micro Devices, Inc.
3
 * Copyright 2008 Red Hat Inc.
4
 * Copyright 2009 Jerome Glisse.
5
 *
6
 * Permission is hereby granted, free of charge, to any person obtaining a
7
 * copy of this software and associated documentation files (the "Software"),
8
 * to deal in the Software without restriction, including without limitation
9
 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10
 * and/or sell copies of the Software, and to permit persons to whom the
11
 * Software is furnished to do so, subject to the following conditions:
12
 *
13
 * The above copyright notice and this permission notice shall be included in
14
 * all copies or substantial portions of the Software.
15
 *
16
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
19
 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20
 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21
 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22
 * OTHER DEALINGS IN THE SOFTWARE.
23
 *
24
 * Authors: Dave Airlie
25
 *          Alex Deucher
26
 *          Jerome Glisse
27
 */
28
#ifndef __RADEON_ASIC_H__
29
#define __RADEON_ASIC_H__
30
 
31
/*
32
 * common functions
33
 */
34
void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
35
void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
36
 
37
void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
38
void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
39
void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
40
 
41
/*
42
 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
43
 */
44
int r100_init(struct radeon_device *rdev);
45
uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
46
void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
47
void r100_errata(struct radeon_device *rdev);
48
void r100_vram_info(struct radeon_device *rdev);
49
int r100_gpu_reset(struct radeon_device *rdev);
50
int r100_mc_init(struct radeon_device *rdev);
51
void r100_mc_fini(struct radeon_device *rdev);
52
int r100_wb_init(struct radeon_device *rdev);
53
void r100_wb_fini(struct radeon_device *rdev);
54
int r100_gart_enable(struct radeon_device *rdev);
55
void r100_pci_gart_disable(struct radeon_device *rdev);
56
void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
57
int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
58
int r100_cp_init(struct radeon_device *rdev, unsigned ring_size);
59
void r100_cp_fini(struct radeon_device *rdev);
60
void r100_cp_disable(struct radeon_device *rdev);
61
void r100_ring_start(struct radeon_device *rdev);
62
int r100_irq_set(struct radeon_device *rdev);
63
int r100_irq_process(struct radeon_device *rdev);
64
//void r100_fence_ring_emit(struct radeon_device *rdev,
65
//             struct radeon_fence *fence);
66
//int r100_cs_parse(struct radeon_cs_parser *p);
67
void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
68
uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
69
//int r100_copy_blit(struct radeon_device *rdev,
70
//          uint64_t src_offset,
71
//          uint64_t dst_offset,
72
//          unsigned num_pages,
73
//          struct radeon_fence *fence);
74
 
75
 
76
#if 0
77
 
78
static struct radeon_asic r100_asic = {
79
	.init = &r100_init,
80
	.errata = &r100_errata,
81
	.vram_info = &r100_vram_info,
82
	.gpu_reset = &r100_gpu_reset,
83
	.mc_init = &r100_mc_init,
84
	.mc_fini = &r100_mc_fini,
85
	.wb_init = &r100_wb_init,
86
	.wb_fini = &r100_wb_fini,
87
	.gart_enable = &r100_gart_enable,
88
	.gart_disable = &r100_pci_gart_disable,
89
	.gart_tlb_flush = &r100_pci_gart_tlb_flush,
90
	.gart_set_page = &r100_pci_gart_set_page,
91
	.cp_init = &r100_cp_init,
92
	.cp_fini = &r100_cp_fini,
93
	.cp_disable = &r100_cp_disable,
94
	.ring_start = &r100_ring_start,
95
	.irq_set = &r100_irq_set,
96
	.irq_process = &r100_irq_process,
97
 //  .fence_ring_emit = &r100_fence_ring_emit,
98
 //  .cs_parse = &r100_cs_parse,
99
 //  .copy_blit = &r100_copy_blit,
100
 //  .copy_dma = NULL,
101
 //  .copy = &r100_copy_blit,
102
	.set_engine_clock = &radeon_legacy_set_engine_clock,
103
	.set_memory_clock = NULL,
104
	.set_pcie_lanes = NULL,
105
	.set_clock_gating = &radeon_legacy_set_clock_gating,
106
};
107
 
108
 
109
/*
110
 * r300,r350,rv350,rv380
111
 */
112
int r300_init(struct radeon_device *rdev);
113
void r300_errata(struct radeon_device *rdev);
114
void r300_vram_info(struct radeon_device *rdev);
115
int r300_gpu_reset(struct radeon_device *rdev);
116
int r300_mc_init(struct radeon_device *rdev);
117
void r300_mc_fini(struct radeon_device *rdev);
118
void r300_ring_start(struct radeon_device *rdev);
119
//void r300_fence_ring_emit(struct radeon_device *rdev,
120
//             struct radeon_fence *fence);
121
//int r300_cs_parse(struct radeon_cs_parser *p);
122
int r300_gart_enable(struct radeon_device *rdev);
123
void rv370_pcie_gart_disable(struct radeon_device *rdev);
124
void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
125
int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
126
uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
127
void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
128
void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
129
//int r300_copy_dma(struct radeon_device *rdev,
130
//         uint64_t src_offset,
131
//         uint64_t dst_offset,
132
//         unsigned num_pages,
133
//         struct radeon_fence *fence);
134
 
135
 
136
static struct radeon_asic r300_asic = {
137
	.init = &r300_init,
138
	.errata = &r300_errata,
139
	.vram_info = &r300_vram_info,
140
	.gpu_reset = &r300_gpu_reset,
141
	.mc_init = &r300_mc_init,
142
	.mc_fini = &r300_mc_fini,
143
	.wb_init = &r100_wb_init,
144
	.wb_fini = &r100_wb_fini,
145
	.gart_enable = &r300_gart_enable,
146
	.gart_disable = &r100_pci_gart_disable,
147
	.gart_tlb_flush = &r100_pci_gart_tlb_flush,
148
	.gart_set_page = &r100_pci_gart_set_page,
149
	.cp_init = &r100_cp_init,
150
	.cp_fini = &r100_cp_fini,
151
	.cp_disable = &r100_cp_disable,
152
	.ring_start = &r300_ring_start,
153
	.irq_set = &r100_irq_set,
154
	.irq_process = &r100_irq_process,
155
 //  .fence_ring_emit = &r300_fence_ring_emit,
156
 //  .cs_parse = &r300_cs_parse,
157
 //  .copy_blit = &r100_copy_blit,
158
 //  .copy_dma = &r300_copy_dma,
159
 //  .copy = &r100_copy_blit,
160
	.set_engine_clock = &radeon_legacy_set_engine_clock,
161
	.set_memory_clock = NULL,
162
	.set_pcie_lanes = &rv370_set_pcie_lanes,
163
	.set_clock_gating = &radeon_legacy_set_clock_gating,
164
};
165
 
166
/*
167
 * r420,r423,rv410
168
 */
169
void r420_errata(struct radeon_device *rdev);
170
void r420_vram_info(struct radeon_device *rdev);
171
int r420_mc_init(struct radeon_device *rdev);
172
void r420_mc_fini(struct radeon_device *rdev);
173
static struct radeon_asic r420_asic = {
174
	.init = &r300_init,
175
	.errata = &r420_errata,
176
	.vram_info = &r420_vram_info,
177
	.gpu_reset = &r300_gpu_reset,
178
	.mc_init = &r420_mc_init,
179
	.mc_fini = &r420_mc_fini,
180
	.wb_init = &r100_wb_init,
181
	.wb_fini = &r100_wb_fini,
182
	.gart_enable = &r300_gart_enable,
183
	.gart_disable = &rv370_pcie_gart_disable,
184
	.gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
185
	.gart_set_page = &rv370_pcie_gart_set_page,
186
	.cp_init = &r100_cp_init,
187
	.cp_fini = &r100_cp_fini,
188
	.cp_disable = &r100_cp_disable,
189
	.ring_start = &r300_ring_start,
190
	.irq_set = &r100_irq_set,
191
	.irq_process = &r100_irq_process,
192
//   .fence_ring_emit = &r300_fence_ring_emit,
193
//   .cs_parse = &r300_cs_parse,
194
//   .copy_blit = &r100_copy_blit,
195
//   .copy_dma = &r300_copy_dma,
196
//   .copy = &r100_copy_blit,
197
	.set_engine_clock = &radeon_atom_set_engine_clock,
198
	.set_memory_clock = &radeon_atom_set_memory_clock,
199
	.set_pcie_lanes = &rv370_set_pcie_lanes,
200
	.set_clock_gating = &radeon_atom_set_clock_gating,
201
};
202
 
203
 
204
/*
205
 * rs400,rs480
206
 */
207
void rs400_errata(struct radeon_device *rdev);
208
void rs400_vram_info(struct radeon_device *rdev);
209
int rs400_mc_init(struct radeon_device *rdev);
210
void rs400_mc_fini(struct radeon_device *rdev);
211
int rs400_gart_enable(struct radeon_device *rdev);
212
void rs400_gart_disable(struct radeon_device *rdev);
213
void rs400_gart_tlb_flush(struct radeon_device *rdev);
214
int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
215
uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
216
void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
217
static struct radeon_asic rs400_asic = {
218
	.init = &r300_init,
219
	.errata = &rs400_errata,
220
	.vram_info = &rs400_vram_info,
221
	.gpu_reset = &r300_gpu_reset,
222
	.mc_init = &rs400_mc_init,
223
	.mc_fini = &rs400_mc_fini,
224
	.wb_init = &r100_wb_init,
225
	.wb_fini = &r100_wb_fini,
226
	.gart_enable = &rs400_gart_enable,
227
	.gart_disable = &rs400_gart_disable,
228
	.gart_tlb_flush = &rs400_gart_tlb_flush,
229
	.gart_set_page = &rs400_gart_set_page,
230
	.cp_init = &r100_cp_init,
231
	.cp_fini = &r100_cp_fini,
232
	.cp_disable = &r100_cp_disable,
233
	.ring_start = &r300_ring_start,
234
	.irq_set = &r100_irq_set,
235
	.irq_process = &r100_irq_process,
236
//   .fence_ring_emit = &r300_fence_ring_emit,
237
//   .cs_parse = &r300_cs_parse,
238
//   .copy_blit = &r100_copy_blit,
239
//   .copy_dma = &r300_copy_dma,
240
//   .copy = &r100_copy_blit,
241
	.set_engine_clock = &radeon_legacy_set_engine_clock,
242
	.set_memory_clock = NULL,
243
	.set_pcie_lanes = NULL,
244
	.set_clock_gating = &radeon_legacy_set_clock_gating,
245
};
246
 
247
 
248
/*
249
 * rs600.
250
 */
251
void rs600_errata(struct radeon_device *rdev);
252
void rs600_vram_info(struct radeon_device *rdev);
253
int rs600_mc_init(struct radeon_device *rdev);
254
void rs600_mc_fini(struct radeon_device *rdev);
255
int rs600_irq_set(struct radeon_device *rdev);
256
int rs600_gart_enable(struct radeon_device *rdev);
257
void rs600_gart_disable(struct radeon_device *rdev);
258
void rs600_gart_tlb_flush(struct radeon_device *rdev);
259
int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
260
uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
261
void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
262
static struct radeon_asic rs600_asic = {
263
	.init = &r300_init,
264
	.errata = &rs600_errata,
265
	.vram_info = &rs600_vram_info,
266
	.gpu_reset = &r300_gpu_reset,
267
	.mc_init = &rs600_mc_init,
268
	.mc_fini = &rs600_mc_fini,
269
	.wb_init = &r100_wb_init,
270
	.wb_fini = &r100_wb_fini,
271
	.gart_enable = &rs600_gart_enable,
272
	.gart_disable = &rs600_gart_disable,
273
	.gart_tlb_flush = &rs600_gart_tlb_flush,
274
	.gart_set_page = &rs600_gart_set_page,
275
	.cp_init = &r100_cp_init,
276
	.cp_fini = &r100_cp_fini,
277
	.cp_disable = &r100_cp_disable,
278
	.ring_start = &r300_ring_start,
279
	.irq_set = &rs600_irq_set,
280
	.irq_process = &r100_irq_process,
281
//   .fence_ring_emit = &r300_fence_ring_emit,
282
//   .cs_parse = &r300_cs_parse,
283
//   .copy_blit = &r100_copy_blit,
284
//   .copy_dma = &r300_copy_dma,
285
//   .copy = &r100_copy_blit,
286
	.set_engine_clock = &radeon_atom_set_engine_clock,
287
	.set_memory_clock = &radeon_atom_set_memory_clock,
288
	.set_pcie_lanes = NULL,
289
	.set_clock_gating = &radeon_atom_set_clock_gating,
290
};
291
 
292
 
293
/*
294
 * rs690,rs740
295
 */
296
void rs690_errata(struct radeon_device *rdev);
297
void rs690_vram_info(struct radeon_device *rdev);
298
int rs690_mc_init(struct radeon_device *rdev);
299
void rs690_mc_fini(struct radeon_device *rdev);
300
uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
301
void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
302
static struct radeon_asic rs690_asic = {
303
	.init = &r300_init,
304
	.errata = &rs690_errata,
305
	.vram_info = &rs690_vram_info,
306
	.gpu_reset = &r300_gpu_reset,
307
	.mc_init = &rs690_mc_init,
308
	.mc_fini = &rs690_mc_fini,
309
	.wb_init = &r100_wb_init,
310
	.wb_fini = &r100_wb_fini,
311
	.gart_enable = &rs400_gart_enable,
312
	.gart_disable = &rs400_gart_disable,
313
	.gart_tlb_flush = &rs400_gart_tlb_flush,
314
	.gart_set_page = &rs400_gart_set_page,
315
	.cp_init = &r100_cp_init,
316
	.cp_fini = &r100_cp_fini,
317
	.cp_disable = &r100_cp_disable,
318
	.ring_start = &r300_ring_start,
319
	.irq_set = &rs600_irq_set,
320
	.irq_process = &r100_irq_process,
321
 //  .fence_ring_emit = &r300_fence_ring_emit,
322
 //  .cs_parse = &r300_cs_parse,
323
 //  .copy_blit = &r100_copy_blit,
324
 //  .copy_dma = &r300_copy_dma,
325
//   .copy = &r300_copy_dma,
326
	.set_engine_clock = &radeon_atom_set_engine_clock,
327
	.set_memory_clock = &radeon_atom_set_memory_clock,
328
	.set_pcie_lanes = NULL,
329
	.set_clock_gating = &radeon_atom_set_clock_gating,
330
};
331
 
332
#endif
333
/*
334
 * rv515
335
 */
336
int rv515_init(struct radeon_device *rdev);
337
void rv515_errata(struct radeon_device *rdev);
338
void rv515_vram_info(struct radeon_device *rdev);
339
int rv515_gpu_reset(struct radeon_device *rdev);
340
int rv515_mc_init(struct radeon_device *rdev);
341
void rv515_mc_fini(struct radeon_device *rdev);
342
uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
343
void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
344
void rv515_ring_start(struct radeon_device *rdev);
345
uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
346
void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
347
 
348
/*
349
static struct radeon_asic rv515_asic = {
350
	.init = &rv515_init,
351
	.errata = &rv515_errata,
352
	.vram_info = &rv515_vram_info,
353
	.gpu_reset = &rv515_gpu_reset,
354
	.mc_init = &rv515_mc_init,
355
	.mc_fini = &rv515_mc_fini,
356
	.wb_init = &r100_wb_init,
357
	.wb_fini = &r100_wb_fini,
358
	.gart_enable = &r300_gart_enable,
359
	.gart_disable = &rv370_pcie_gart_disable,
360
	.gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
361
	.gart_set_page = &rv370_pcie_gart_set_page,
362
	.cp_init = &r100_cp_init,
363
	.cp_fini = &r100_cp_fini,
364
	.cp_disable = &r100_cp_disable,
365
	.ring_start = &rv515_ring_start,
366
	.irq_set = &r100_irq_set,
367
	.irq_process = &r100_irq_process,
368
//   .fence_ring_emit = &r300_fence_ring_emit,
369
//   .cs_parse = &r300_cs_parse,
370
//   .copy_blit = &r100_copy_blit,
371
//   .copy_dma = &r300_copy_dma,
372
//   .copy = &r100_copy_blit,
373
	.set_engine_clock = &radeon_atom_set_engine_clock,
374
	.set_memory_clock = &radeon_atom_set_memory_clock,
375
	.set_pcie_lanes = &rv370_set_pcie_lanes,
376
	.set_clock_gating = &radeon_atom_set_clock_gating,
377
};
378
 
379
*/
380
 
381
 
382
int r300_gart_enable(struct radeon_device *rdev);
383
void rv370_pcie_gart_disable(struct radeon_device *rdev);
384
void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
385
int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
386
uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
387
void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
388
void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
389
 
390
 
391
/*
392
 * r520,rv530,rv560,rv570,r580
393
 */
394
void r520_errata(struct radeon_device *rdev);
395
void r520_vram_info(struct radeon_device *rdev);
396
int r520_mc_init(struct radeon_device *rdev);
397
void r520_mc_fini(struct radeon_device *rdev);
398
 
399
static struct radeon_asic r520_asic = {
400
	.init = &rv515_init,
401
    .errata = &r520_errata,
402
    .vram_info = &r520_vram_info,
403
    .gpu_reset = &rv515_gpu_reset,
1119 serge 404
    .mc_init = &r520_mc_init,
405
    .mc_fini = &r520_mc_fini,
1117 serge 406
//   .wb_init = &r100_wb_init,
407
//   .wb_fini = &r100_wb_fini,
1119 serge 408
    .gart_enable = &r300_gart_enable,
409
    .gart_disable = &rv370_pcie_gart_disable,
410
    .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
411
    .gart_set_page = &rv370_pcie_gart_set_page,
412
    .cp_init = &r100_cp_init,
413
//    .cp_fini = &r100_cp_fini,
1117 serge 414
//   .cp_disable = &r100_cp_disable,
1119 serge 415
    .ring_start = &rv515_ring_start,
1117 serge 416
//   .irq_set = &r100_irq_set,
417
//   .irq_process = &r100_irq_process,
418
//   .fence_ring_emit = &r300_fence_ring_emit,
419
//   .cs_parse = &r300_cs_parse,
420
//   .copy_blit = &r100_copy_blit,
421
//   .copy_dma = &r300_copy_dma,
422
//   .copy = &r100_copy_blit,
423
//   .set_engine_clock = &radeon_atom_set_engine_clock,
424
//   .set_memory_clock = &radeon_atom_set_memory_clock,
425
//   .set_pcie_lanes = &rv370_set_pcie_lanes,
426
//   .set_clock_gating = &radeon_atom_set_clock_gating,
427
};
428
 
429
/*
430
 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rv770,rv730,rv710
431
 */
432
uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
433
void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
434
 
435
#endif