blob: 8968f78fa1e30e8424b101a8ccf477d4885286d5 [file] [log] [blame]
Jerome Glisse771fe6b2009-06-05 14:42:42 +02001/*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Dave Airlie
25 * Alex Deucher
26 * Jerome Glisse
27 */
28#ifndef __RADEON_ASIC_H__
29#define __RADEON_ASIC_H__
30
31/*
32 * common functions
33 */
34void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
35void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
36
37void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
38void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
39void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
40
41/*
42 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
43 */
Jerome Glisse068a1172009-06-17 13:28:30 +020044int r100_init(struct radeon_device *rdev);
Dave Airlie551ebd82009-09-01 15:25:57 +100045int r200_init(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020046uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
47void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
48void r100_errata(struct radeon_device *rdev);
49void r100_vram_info(struct radeon_device *rdev);
Dave Airlie28d52042009-09-21 14:33:58 +100050void r100_vga_set_state(struct radeon_device *rdev, bool state);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020051int r100_gpu_reset(struct radeon_device *rdev);
52int r100_mc_init(struct radeon_device *rdev);
53void r100_mc_fini(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +020054u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020055int r100_wb_init(struct radeon_device *rdev);
56void r100_wb_fini(struct radeon_device *rdev);
Jerome Glisse4aac0472009-09-14 18:29:49 +020057int r100_pci_gart_init(struct radeon_device *rdev);
58void r100_pci_gart_fini(struct radeon_device *rdev);
59int r100_pci_gart_enable(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020060void r100_pci_gart_disable(struct radeon_device *rdev);
61void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
62int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
63int r100_cp_init(struct radeon_device *rdev, unsigned ring_size);
64void r100_cp_fini(struct radeon_device *rdev);
65void r100_cp_disable(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +100066void r100_cp_commit(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020067void r100_ring_start(struct radeon_device *rdev);
68int r100_irq_set(struct radeon_device *rdev);
69int r100_irq_process(struct radeon_device *rdev);
70void r100_fence_ring_emit(struct radeon_device *rdev,
71 struct radeon_fence *fence);
72int r100_cs_parse(struct radeon_cs_parser *p);
73void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
74uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
75int r100_copy_blit(struct radeon_device *rdev,
76 uint64_t src_offset,
77 uint64_t dst_offset,
78 unsigned num_pages,
79 struct radeon_fence *fence);
Dave Airliee024e112009-06-24 09:48:08 +100080int r100_set_surface_reg(struct radeon_device *rdev, int reg,
81 uint32_t tiling_flags, uint32_t pitch,
82 uint32_t offset, uint32_t obj_size);
83int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
Jerome Glissec93bb852009-07-13 21:04:08 +020084void r100_bandwidth_update(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +100085void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
86int r100_ib_test(struct radeon_device *rdev);
87int r100_ring_test(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020088
89static struct radeon_asic r100_asic = {
Jerome Glisse068a1172009-06-17 13:28:30 +020090 .init = &r100_init,
Jerome Glisse771fe6b2009-06-05 14:42:42 +020091 .errata = &r100_errata,
92 .vram_info = &r100_vram_info,
Dave Airlie28d52042009-09-21 14:33:58 +100093 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +020094 .gpu_reset = &r100_gpu_reset,
95 .mc_init = &r100_mc_init,
96 .mc_fini = &r100_mc_fini,
97 .wb_init = &r100_wb_init,
98 .wb_fini = &r100_wb_fini,
Jerome Glisse4aac0472009-09-14 18:29:49 +020099 .gart_init = &r100_pci_gart_init,
100 .gart_fini = &r100_pci_gart_fini,
101 .gart_enable = &r100_pci_gart_enable,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200102 .gart_disable = &r100_pci_gart_disable,
103 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
104 .gart_set_page = &r100_pci_gart_set_page,
105 .cp_init = &r100_cp_init,
106 .cp_fini = &r100_cp_fini,
107 .cp_disable = &r100_cp_disable,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000108 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200109 .ring_start = &r100_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000110 .ring_test = &r100_ring_test,
111 .ring_ib_execute = &r100_ring_ib_execute,
112 .ib_test = &r100_ib_test,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200113 .irq_set = &r100_irq_set,
114 .irq_process = &r100_irq_process,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200115 .get_vblank_counter = &r100_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200116 .fence_ring_emit = &r100_fence_ring_emit,
117 .cs_parse = &r100_cs_parse,
118 .copy_blit = &r100_copy_blit,
119 .copy_dma = NULL,
120 .copy = &r100_copy_blit,
121 .set_engine_clock = &radeon_legacy_set_engine_clock,
122 .set_memory_clock = NULL,
123 .set_pcie_lanes = NULL,
124 .set_clock_gating = &radeon_legacy_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000125 .set_surface_reg = r100_set_surface_reg,
126 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200127 .bandwidth_update = &r100_bandwidth_update,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200128};
129
130
131/*
132 * r300,r350,rv350,rv380
133 */
Jerome Glisse068a1172009-06-17 13:28:30 +0200134int r300_init(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200135void r300_errata(struct radeon_device *rdev);
136void r300_vram_info(struct radeon_device *rdev);
137int r300_gpu_reset(struct radeon_device *rdev);
138int r300_mc_init(struct radeon_device *rdev);
139void r300_mc_fini(struct radeon_device *rdev);
140void r300_ring_start(struct radeon_device *rdev);
141void r300_fence_ring_emit(struct radeon_device *rdev,
142 struct radeon_fence *fence);
143int r300_cs_parse(struct radeon_cs_parser *p);
Jerome Glisse4aac0472009-09-14 18:29:49 +0200144int rv370_pcie_gart_init(struct radeon_device *rdev);
145void rv370_pcie_gart_fini(struct radeon_device *rdev);
146int rv370_pcie_gart_enable(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200147void rv370_pcie_gart_disable(struct radeon_device *rdev);
148void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
149int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
150uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
151void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
152void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
153int r300_copy_dma(struct radeon_device *rdev,
154 uint64_t src_offset,
155 uint64_t dst_offset,
156 unsigned num_pages,
157 struct radeon_fence *fence);
Dave Airliee024e112009-06-24 09:48:08 +1000158
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200159static struct radeon_asic r300_asic = {
Jerome Glisse068a1172009-06-17 13:28:30 +0200160 .init = &r300_init,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200161 .errata = &r300_errata,
162 .vram_info = &r300_vram_info,
Dave Airlie28d52042009-09-21 14:33:58 +1000163 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200164 .gpu_reset = &r300_gpu_reset,
165 .mc_init = &r300_mc_init,
166 .mc_fini = &r300_mc_fini,
167 .wb_init = &r100_wb_init,
168 .wb_fini = &r100_wb_fini,
Jerome Glisse4aac0472009-09-14 18:29:49 +0200169 .gart_init = &r100_pci_gart_init,
170 .gart_fini = &r100_pci_gart_fini,
171 .gart_enable = &r100_pci_gart_enable,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200172 .gart_disable = &r100_pci_gart_disable,
173 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
174 .gart_set_page = &r100_pci_gart_set_page,
175 .cp_init = &r100_cp_init,
176 .cp_fini = &r100_cp_fini,
177 .cp_disable = &r100_cp_disable,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000178 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200179 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000180 .ring_test = &r100_ring_test,
181 .ring_ib_execute = &r100_ring_ib_execute,
182 .ib_test = &r100_ib_test,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200183 .irq_set = &r100_irq_set,
184 .irq_process = &r100_irq_process,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200185 .get_vblank_counter = &r100_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200186 .fence_ring_emit = &r300_fence_ring_emit,
187 .cs_parse = &r300_cs_parse,
188 .copy_blit = &r100_copy_blit,
189 .copy_dma = &r300_copy_dma,
190 .copy = &r100_copy_blit,
191 .set_engine_clock = &radeon_legacy_set_engine_clock,
192 .set_memory_clock = NULL,
193 .set_pcie_lanes = &rv370_set_pcie_lanes,
194 .set_clock_gating = &radeon_legacy_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000195 .set_surface_reg = r100_set_surface_reg,
196 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200197 .bandwidth_update = &r100_bandwidth_update,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200198};
199
200/*
201 * r420,r423,rv410
202 */
Jerome Glisse9f022dd2009-09-11 15:35:22 +0200203extern int r420_init(struct radeon_device *rdev);
204extern void r420_fini(struct radeon_device *rdev);
205extern int r420_suspend(struct radeon_device *rdev);
206extern int r420_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200207static struct radeon_asic r420_asic = {
Jerome Glisse9f022dd2009-09-11 15:35:22 +0200208 .init = &r420_init,
209 .fini = &r420_fini,
210 .suspend = &r420_suspend,
211 .resume = &r420_resume,
212 .errata = NULL,
213 .vram_info = NULL,
Dave Airlie28d52042009-09-21 14:33:58 +1000214 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200215 .gpu_reset = &r300_gpu_reset,
Jerome Glisse9f022dd2009-09-11 15:35:22 +0200216 .mc_init = NULL,
217 .mc_fini = NULL,
218 .wb_init = NULL,
219 .wb_fini = NULL,
Jerome Glisse4aac0472009-09-14 18:29:49 +0200220 .gart_enable = NULL,
221 .gart_disable = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200222 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
223 .gart_set_page = &rv370_pcie_gart_set_page,
Jerome Glisse9f022dd2009-09-11 15:35:22 +0200224 .cp_init = NULL,
225 .cp_fini = NULL,
226 .cp_disable = NULL,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000227 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200228 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000229 .ring_test = &r100_ring_test,
230 .ring_ib_execute = &r100_ring_ib_execute,
Jerome Glisse9f022dd2009-09-11 15:35:22 +0200231 .ib_test = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200232 .irq_set = &r100_irq_set,
233 .irq_process = &r100_irq_process,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200234 .get_vblank_counter = &r100_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200235 .fence_ring_emit = &r300_fence_ring_emit,
236 .cs_parse = &r300_cs_parse,
237 .copy_blit = &r100_copy_blit,
238 .copy_dma = &r300_copy_dma,
239 .copy = &r100_copy_blit,
240 .set_engine_clock = &radeon_atom_set_engine_clock,
241 .set_memory_clock = &radeon_atom_set_memory_clock,
242 .set_pcie_lanes = &rv370_set_pcie_lanes,
243 .set_clock_gating = &radeon_atom_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000244 .set_surface_reg = r100_set_surface_reg,
245 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200246 .bandwidth_update = &r100_bandwidth_update,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200247};
248
249
250/*
251 * rs400,rs480
252 */
253void rs400_errata(struct radeon_device *rdev);
254void rs400_vram_info(struct radeon_device *rdev);
255int rs400_mc_init(struct radeon_device *rdev);
256void rs400_mc_fini(struct radeon_device *rdev);
Jerome Glisse4aac0472009-09-14 18:29:49 +0200257int rs400_gart_init(struct radeon_device *rdev);
258void rs400_gart_fini(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200259int rs400_gart_enable(struct radeon_device *rdev);
260void rs400_gart_disable(struct radeon_device *rdev);
261void rs400_gart_tlb_flush(struct radeon_device *rdev);
262int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
263uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
264void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
265static struct radeon_asic rs400_asic = {
Jerome Glisse068a1172009-06-17 13:28:30 +0200266 .init = &r300_init,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200267 .errata = &rs400_errata,
268 .vram_info = &rs400_vram_info,
Dave Airlie28d52042009-09-21 14:33:58 +1000269 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200270 .gpu_reset = &r300_gpu_reset,
271 .mc_init = &rs400_mc_init,
272 .mc_fini = &rs400_mc_fini,
273 .wb_init = &r100_wb_init,
274 .wb_fini = &r100_wb_fini,
Jerome Glisse4aac0472009-09-14 18:29:49 +0200275 .gart_init = &rs400_gart_init,
276 .gart_fini = &rs400_gart_fini,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200277 .gart_enable = &rs400_gart_enable,
278 .gart_disable = &rs400_gart_disable,
279 .gart_tlb_flush = &rs400_gart_tlb_flush,
280 .gart_set_page = &rs400_gart_set_page,
281 .cp_init = &r100_cp_init,
282 .cp_fini = &r100_cp_fini,
283 .cp_disable = &r100_cp_disable,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000284 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200285 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000286 .ring_test = &r100_ring_test,
287 .ring_ib_execute = &r100_ring_ib_execute,
288 .ib_test = &r100_ib_test,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200289 .irq_set = &r100_irq_set,
290 .irq_process = &r100_irq_process,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200291 .get_vblank_counter = &r100_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200292 .fence_ring_emit = &r300_fence_ring_emit,
293 .cs_parse = &r300_cs_parse,
294 .copy_blit = &r100_copy_blit,
295 .copy_dma = &r300_copy_dma,
296 .copy = &r100_copy_blit,
297 .set_engine_clock = &radeon_legacy_set_engine_clock,
298 .set_memory_clock = NULL,
299 .set_pcie_lanes = NULL,
300 .set_clock_gating = &radeon_legacy_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000301 .set_surface_reg = r100_set_surface_reg,
302 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200303 .bandwidth_update = &r100_bandwidth_update,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200304};
305
306
307/*
308 * rs600.
309 */
Dave Airlie3f7dc91a2009-08-27 11:10:15 +1000310int rs600_init(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200311void rs600_errata(struct radeon_device *rdev);
312void rs600_vram_info(struct radeon_device *rdev);
313int rs600_mc_init(struct radeon_device *rdev);
314void rs600_mc_fini(struct radeon_device *rdev);
315int rs600_irq_set(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200316int rs600_irq_process(struct radeon_device *rdev);
317u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
Jerome Glisse4aac0472009-09-14 18:29:49 +0200318int rs600_gart_init(struct radeon_device *rdev);
319void rs600_gart_fini(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200320int rs600_gart_enable(struct radeon_device *rdev);
321void rs600_gart_disable(struct radeon_device *rdev);
322void rs600_gart_tlb_flush(struct radeon_device *rdev);
323int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
324uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
325void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200326void rs600_bandwidth_update(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200327static struct radeon_asic rs600_asic = {
Dave Airlie3f7dc91a2009-08-27 11:10:15 +1000328 .init = &rs600_init,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200329 .errata = &rs600_errata,
330 .vram_info = &rs600_vram_info,
Dave Airlie28d52042009-09-21 14:33:58 +1000331 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200332 .gpu_reset = &r300_gpu_reset,
333 .mc_init = &rs600_mc_init,
334 .mc_fini = &rs600_mc_fini,
335 .wb_init = &r100_wb_init,
336 .wb_fini = &r100_wb_fini,
Jerome Glisse4aac0472009-09-14 18:29:49 +0200337 .gart_init = &rs600_gart_init,
338 .gart_fini = &rs600_gart_fini,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200339 .gart_enable = &rs600_gart_enable,
340 .gart_disable = &rs600_gart_disable,
341 .gart_tlb_flush = &rs600_gart_tlb_flush,
342 .gart_set_page = &rs600_gart_set_page,
343 .cp_init = &r100_cp_init,
344 .cp_fini = &r100_cp_fini,
345 .cp_disable = &r100_cp_disable,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000346 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200347 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000348 .ring_test = &r100_ring_test,
349 .ring_ib_execute = &r100_ring_ib_execute,
350 .ib_test = &r100_ib_test,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200351 .irq_set = &rs600_irq_set,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200352 .irq_process = &rs600_irq_process,
353 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200354 .fence_ring_emit = &r300_fence_ring_emit,
355 .cs_parse = &r300_cs_parse,
356 .copy_blit = &r100_copy_blit,
357 .copy_dma = &r300_copy_dma,
358 .copy = &r100_copy_blit,
359 .set_engine_clock = &radeon_atom_set_engine_clock,
360 .set_memory_clock = &radeon_atom_set_memory_clock,
361 .set_pcie_lanes = NULL,
362 .set_clock_gating = &radeon_atom_set_clock_gating,
Jerome Glissec93bb852009-07-13 21:04:08 +0200363 .bandwidth_update = &rs600_bandwidth_update,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200364};
365
366
367/*
368 * rs690,rs740
369 */
370void rs690_errata(struct radeon_device *rdev);
371void rs690_vram_info(struct radeon_device *rdev);
372int rs690_mc_init(struct radeon_device *rdev);
373void rs690_mc_fini(struct radeon_device *rdev);
374uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
375void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200376void rs690_bandwidth_update(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200377static struct radeon_asic rs690_asic = {
Dave Airlie3f7dc91a2009-08-27 11:10:15 +1000378 .init = &rs600_init,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200379 .errata = &rs690_errata,
380 .vram_info = &rs690_vram_info,
Dave Airlie28d52042009-09-21 14:33:58 +1000381 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200382 .gpu_reset = &r300_gpu_reset,
383 .mc_init = &rs690_mc_init,
384 .mc_fini = &rs690_mc_fini,
385 .wb_init = &r100_wb_init,
386 .wb_fini = &r100_wb_fini,
Jerome Glisse4aac0472009-09-14 18:29:49 +0200387 .gart_init = &rs400_gart_init,
388 .gart_fini = &rs400_gart_fini,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200389 .gart_enable = &rs400_gart_enable,
390 .gart_disable = &rs400_gart_disable,
391 .gart_tlb_flush = &rs400_gart_tlb_flush,
392 .gart_set_page = &rs400_gart_set_page,
393 .cp_init = &r100_cp_init,
394 .cp_fini = &r100_cp_fini,
395 .cp_disable = &r100_cp_disable,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000396 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200397 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000398 .ring_test = &r100_ring_test,
399 .ring_ib_execute = &r100_ring_ib_execute,
400 .ib_test = &r100_ib_test,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200401 .irq_set = &rs600_irq_set,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200402 .irq_process = &rs600_irq_process,
403 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200404 .fence_ring_emit = &r300_fence_ring_emit,
405 .cs_parse = &r300_cs_parse,
406 .copy_blit = &r100_copy_blit,
407 .copy_dma = &r300_copy_dma,
408 .copy = &r300_copy_dma,
409 .set_engine_clock = &radeon_atom_set_engine_clock,
410 .set_memory_clock = &radeon_atom_set_memory_clock,
411 .set_pcie_lanes = NULL,
412 .set_clock_gating = &radeon_atom_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000413 .set_surface_reg = r100_set_surface_reg,
414 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200415 .bandwidth_update = &rs690_bandwidth_update,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200416};
417
418
419/*
420 * rv515
421 */
Jerome Glisse068a1172009-06-17 13:28:30 +0200422int rv515_init(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200423void rv515_errata(struct radeon_device *rdev);
424void rv515_vram_info(struct radeon_device *rdev);
425int rv515_gpu_reset(struct radeon_device *rdev);
426int rv515_mc_init(struct radeon_device *rdev);
427void rv515_mc_fini(struct radeon_device *rdev);
428uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
429void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
430void rv515_ring_start(struct radeon_device *rdev);
431uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
432void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200433void rv515_bandwidth_update(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200434static struct radeon_asic rv515_asic = {
Jerome Glisse068a1172009-06-17 13:28:30 +0200435 .init = &rv515_init,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200436 .errata = &rv515_errata,
437 .vram_info = &rv515_vram_info,
Dave Airlie28d52042009-09-21 14:33:58 +1000438 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200439 .gpu_reset = &rv515_gpu_reset,
440 .mc_init = &rv515_mc_init,
441 .mc_fini = &rv515_mc_fini,
442 .wb_init = &r100_wb_init,
443 .wb_fini = &r100_wb_fini,
Jerome Glisse4aac0472009-09-14 18:29:49 +0200444 .gart_init = &rv370_pcie_gart_init,
445 .gart_fini = &rv370_pcie_gart_fini,
446 .gart_enable = &rv370_pcie_gart_enable,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200447 .gart_disable = &rv370_pcie_gart_disable,
448 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
449 .gart_set_page = &rv370_pcie_gart_set_page,
450 .cp_init = &r100_cp_init,
451 .cp_fini = &r100_cp_fini,
452 .cp_disable = &r100_cp_disable,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000453 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200454 .ring_start = &rv515_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000455 .ring_test = &r100_ring_test,
456 .ring_ib_execute = &r100_ring_ib_execute,
457 .ib_test = &r100_ib_test,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200458 .irq_set = &rs600_irq_set,
459 .irq_process = &rs600_irq_process,
460 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200461 .fence_ring_emit = &r300_fence_ring_emit,
Jerome Glisse068a1172009-06-17 13:28:30 +0200462 .cs_parse = &r300_cs_parse,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200463 .copy_blit = &r100_copy_blit,
464 .copy_dma = &r300_copy_dma,
465 .copy = &r100_copy_blit,
466 .set_engine_clock = &radeon_atom_set_engine_clock,
467 .set_memory_clock = &radeon_atom_set_memory_clock,
468 .set_pcie_lanes = &rv370_set_pcie_lanes,
469 .set_clock_gating = &radeon_atom_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000470 .set_surface_reg = r100_set_surface_reg,
471 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200472 .bandwidth_update = &rv515_bandwidth_update,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200473};
474
475
476/*
477 * r520,rv530,rv560,rv570,r580
478 */
479void r520_errata(struct radeon_device *rdev);
480void r520_vram_info(struct radeon_device *rdev);
481int r520_mc_init(struct radeon_device *rdev);
482void r520_mc_fini(struct radeon_device *rdev);
Jerome Glissec93bb852009-07-13 21:04:08 +0200483void r520_bandwidth_update(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200484static struct radeon_asic r520_asic = {
Jerome Glisse068a1172009-06-17 13:28:30 +0200485 .init = &rv515_init,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200486 .errata = &r520_errata,
487 .vram_info = &r520_vram_info,
Dave Airlie28d52042009-09-21 14:33:58 +1000488 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200489 .gpu_reset = &rv515_gpu_reset,
490 .mc_init = &r520_mc_init,
491 .mc_fini = &r520_mc_fini,
492 .wb_init = &r100_wb_init,
493 .wb_fini = &r100_wb_fini,
Jerome Glisse4aac0472009-09-14 18:29:49 +0200494 .gart_init = &rv370_pcie_gart_init,
495 .gart_fini = &rv370_pcie_gart_fini,
496 .gart_enable = &rv370_pcie_gart_enable,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200497 .gart_disable = &rv370_pcie_gart_disable,
498 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
499 .gart_set_page = &rv370_pcie_gart_set_page,
500 .cp_init = &r100_cp_init,
501 .cp_fini = &r100_cp_fini,
502 .cp_disable = &r100_cp_disable,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000503 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200504 .ring_start = &rv515_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000505 .ring_test = &r100_ring_test,
506 .ring_ib_execute = &r100_ring_ib_execute,
507 .ib_test = &r100_ib_test,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200508 .irq_set = &rs600_irq_set,
509 .irq_process = &rs600_irq_process,
510 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200511 .fence_ring_emit = &r300_fence_ring_emit,
Jerome Glisse068a1172009-06-17 13:28:30 +0200512 .cs_parse = &r300_cs_parse,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200513 .copy_blit = &r100_copy_blit,
514 .copy_dma = &r300_copy_dma,
515 .copy = &r100_copy_blit,
516 .set_engine_clock = &radeon_atom_set_engine_clock,
517 .set_memory_clock = &radeon_atom_set_memory_clock,
518 .set_pcie_lanes = &rv370_set_pcie_lanes,
519 .set_clock_gating = &radeon_atom_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000520 .set_surface_reg = r100_set_surface_reg,
521 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200522 .bandwidth_update = &r520_bandwidth_update,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200523};
524
525/*
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000526 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200527 */
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000528int r600_init(struct radeon_device *rdev);
529void r600_fini(struct radeon_device *rdev);
530int r600_suspend(struct radeon_device *rdev);
531int r600_resume(struct radeon_device *rdev);
Dave Airlie28d52042009-09-21 14:33:58 +1000532void r600_vga_set_state(struct radeon_device *rdev, bool state);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000533int r600_wb_init(struct radeon_device *rdev);
534void r600_wb_fini(struct radeon_device *rdev);
535void r600_cp_commit(struct radeon_device *rdev);
536void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200537uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
538void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000539int r600_cs_parse(struct radeon_cs_parser *p);
540void r600_fence_ring_emit(struct radeon_device *rdev,
541 struct radeon_fence *fence);
542int r600_copy_dma(struct radeon_device *rdev,
543 uint64_t src_offset,
544 uint64_t dst_offset,
545 unsigned num_pages,
546 struct radeon_fence *fence);
547int r600_irq_process(struct radeon_device *rdev);
548int r600_irq_set(struct radeon_device *rdev);
549int r600_gpu_reset(struct radeon_device *rdev);
550int r600_set_surface_reg(struct radeon_device *rdev, int reg,
551 uint32_t tiling_flags, uint32_t pitch,
552 uint32_t offset, uint32_t obj_size);
553int r600_clear_surface_reg(struct radeon_device *rdev, int reg);
554void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
555int r600_ib_test(struct radeon_device *rdev);
556int r600_ring_test(struct radeon_device *rdev);
557int r600_copy_blit(struct radeon_device *rdev,
558 uint64_t src_offset, uint64_t dst_offset,
559 unsigned num_pages, struct radeon_fence *fence);
560
561static struct radeon_asic r600_asic = {
562 .errata = NULL,
563 .init = &r600_init,
564 .fini = &r600_fini,
565 .suspend = &r600_suspend,
566 .resume = &r600_resume,
567 .cp_commit = &r600_cp_commit,
568 .vram_info = NULL,
Dave Airlie28d52042009-09-21 14:33:58 +1000569 .vga_set_state = &r600_vga_set_state,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000570 .gpu_reset = &r600_gpu_reset,
571 .mc_init = NULL,
572 .mc_fini = NULL,
573 .wb_init = &r600_wb_init,
574 .wb_fini = &r600_wb_fini,
575 .gart_enable = NULL,
576 .gart_disable = NULL,
577 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
578 .gart_set_page = &rs600_gart_set_page,
579 .cp_init = NULL,
580 .cp_fini = NULL,
581 .cp_disable = NULL,
582 .ring_start = NULL,
583 .ring_test = &r600_ring_test,
584 .ring_ib_execute = &r600_ring_ib_execute,
585 .ib_test = &r600_ib_test,
586 .irq_set = &r600_irq_set,
587 .irq_process = &r600_irq_process,
588 .fence_ring_emit = &r600_fence_ring_emit,
589 .cs_parse = &r600_cs_parse,
590 .copy_blit = &r600_copy_blit,
591 .copy_dma = &r600_copy_blit,
Alex Deuchera3812872009-09-10 15:54:35 -0400592 .copy = &r600_copy_blit,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000593 .set_engine_clock = &radeon_atom_set_engine_clock,
594 .set_memory_clock = &radeon_atom_set_memory_clock,
595 .set_pcie_lanes = NULL,
596 .set_clock_gating = &radeon_atom_set_clock_gating,
597 .set_surface_reg = r600_set_surface_reg,
598 .clear_surface_reg = r600_clear_surface_reg,
599 .bandwidth_update = &r520_bandwidth_update,
600};
601
602/*
603 * rv770,rv730,rv710,rv740
604 */
605int rv770_init(struct radeon_device *rdev);
606void rv770_fini(struct radeon_device *rdev);
607int rv770_suspend(struct radeon_device *rdev);
608int rv770_resume(struct radeon_device *rdev);
609int rv770_gpu_reset(struct radeon_device *rdev);
610
611static struct radeon_asic rv770_asic = {
612 .errata = NULL,
613 .init = &rv770_init,
614 .fini = &rv770_fini,
615 .suspend = &rv770_suspend,
616 .resume = &rv770_resume,
617 .cp_commit = &r600_cp_commit,
618 .vram_info = NULL,
619 .gpu_reset = &rv770_gpu_reset,
Dave Airlie28d52042009-09-21 14:33:58 +1000620 .vga_set_state = &r600_vga_set_state,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000621 .mc_init = NULL,
622 .mc_fini = NULL,
623 .wb_init = &r600_wb_init,
624 .wb_fini = &r600_wb_fini,
625 .gart_enable = NULL,
626 .gart_disable = NULL,
627 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
628 .gart_set_page = &rs600_gart_set_page,
629 .cp_init = NULL,
630 .cp_fini = NULL,
631 .cp_disable = NULL,
632 .ring_start = NULL,
633 .ring_test = &r600_ring_test,
634 .ring_ib_execute = &r600_ring_ib_execute,
635 .ib_test = &r600_ib_test,
636 .irq_set = &r600_irq_set,
637 .irq_process = &r600_irq_process,
638 .fence_ring_emit = &r600_fence_ring_emit,
639 .cs_parse = &r600_cs_parse,
640 .copy_blit = &r600_copy_blit,
641 .copy_dma = &r600_copy_blit,
Alex Deuchera3812872009-09-10 15:54:35 -0400642 .copy = &r600_copy_blit,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000643 .set_engine_clock = &radeon_atom_set_engine_clock,
644 .set_memory_clock = &radeon_atom_set_memory_clock,
645 .set_pcie_lanes = NULL,
646 .set_clock_gating = &radeon_atom_set_clock_gating,
647 .set_surface_reg = r600_set_surface_reg,
648 .clear_surface_reg = r600_clear_surface_reg,
649 .bandwidth_update = &r520_bandwidth_update,
650};
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200651
652#endif