blob: 8f27be31e09454e264f41bedc88bd1fe0289bbaa [file] [log] [blame]
Jerome Glisse771fe6b2009-06-05 14:42:42 +02001/*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Dave Airlie
25 * Alex Deucher
26 * Jerome Glisse
27 */
28#ifndef __RADEON_ASIC_H__
29#define __RADEON_ASIC_H__
30
31/*
32 * common functions
33 */
34void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
35void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
36
37void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
38void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
39void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
40
41/*
42 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
43 */
Jerome Glisse068a1172009-06-17 13:28:30 +020044int r100_init(struct radeon_device *rdev);
Dave Airlie551ebd82009-09-01 15:25:57 +100045int r200_init(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020046uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
47void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
48void r100_errata(struct radeon_device *rdev);
49void r100_vram_info(struct radeon_device *rdev);
50int r100_gpu_reset(struct radeon_device *rdev);
51int r100_mc_init(struct radeon_device *rdev);
52void r100_mc_fini(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +020053u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020054int r100_wb_init(struct radeon_device *rdev);
55void r100_wb_fini(struct radeon_device *rdev);
56int r100_gart_enable(struct radeon_device *rdev);
57void r100_pci_gart_disable(struct radeon_device *rdev);
58void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
59int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
60int r100_cp_init(struct radeon_device *rdev, unsigned ring_size);
61void r100_cp_fini(struct radeon_device *rdev);
62void r100_cp_disable(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +100063void r100_cp_commit(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020064void r100_ring_start(struct radeon_device *rdev);
65int r100_irq_set(struct radeon_device *rdev);
66int r100_irq_process(struct radeon_device *rdev);
67void r100_fence_ring_emit(struct radeon_device *rdev,
68 struct radeon_fence *fence);
69int r100_cs_parse(struct radeon_cs_parser *p);
70void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
71uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
72int r100_copy_blit(struct radeon_device *rdev,
73 uint64_t src_offset,
74 uint64_t dst_offset,
75 unsigned num_pages,
76 struct radeon_fence *fence);
Dave Airliee024e112009-06-24 09:48:08 +100077int r100_set_surface_reg(struct radeon_device *rdev, int reg,
78 uint32_t tiling_flags, uint32_t pitch,
79 uint32_t offset, uint32_t obj_size);
80int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
Jerome Glissec93bb852009-07-13 21:04:08 +020081void r100_bandwidth_update(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +100082void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
83int r100_ib_test(struct radeon_device *rdev);
84int r100_ring_test(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020085
86static struct radeon_asic r100_asic = {
Jerome Glisse068a1172009-06-17 13:28:30 +020087 .init = &r100_init,
Jerome Glisse771fe6b2009-06-05 14:42:42 +020088 .errata = &r100_errata,
89 .vram_info = &r100_vram_info,
90 .gpu_reset = &r100_gpu_reset,
91 .mc_init = &r100_mc_init,
92 .mc_fini = &r100_mc_fini,
93 .wb_init = &r100_wb_init,
94 .wb_fini = &r100_wb_fini,
95 .gart_enable = &r100_gart_enable,
96 .gart_disable = &r100_pci_gart_disable,
97 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
98 .gart_set_page = &r100_pci_gart_set_page,
99 .cp_init = &r100_cp_init,
100 .cp_fini = &r100_cp_fini,
101 .cp_disable = &r100_cp_disable,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000102 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200103 .ring_start = &r100_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000104 .ring_test = &r100_ring_test,
105 .ring_ib_execute = &r100_ring_ib_execute,
106 .ib_test = &r100_ib_test,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200107 .irq_set = &r100_irq_set,
108 .irq_process = &r100_irq_process,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200109 .get_vblank_counter = &r100_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200110 .fence_ring_emit = &r100_fence_ring_emit,
111 .cs_parse = &r100_cs_parse,
112 .copy_blit = &r100_copy_blit,
113 .copy_dma = NULL,
114 .copy = &r100_copy_blit,
115 .set_engine_clock = &radeon_legacy_set_engine_clock,
116 .set_memory_clock = NULL,
117 .set_pcie_lanes = NULL,
118 .set_clock_gating = &radeon_legacy_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000119 .set_surface_reg = r100_set_surface_reg,
120 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200121 .bandwidth_update = &r100_bandwidth_update,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200122};
123
124
125/*
126 * r300,r350,rv350,rv380
127 */
Jerome Glisse068a1172009-06-17 13:28:30 +0200128int r300_init(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200129void r300_errata(struct radeon_device *rdev);
130void r300_vram_info(struct radeon_device *rdev);
131int r300_gpu_reset(struct radeon_device *rdev);
132int r300_mc_init(struct radeon_device *rdev);
133void r300_mc_fini(struct radeon_device *rdev);
134void r300_ring_start(struct radeon_device *rdev);
135void r300_fence_ring_emit(struct radeon_device *rdev,
136 struct radeon_fence *fence);
137int r300_cs_parse(struct radeon_cs_parser *p);
138int r300_gart_enable(struct radeon_device *rdev);
139void rv370_pcie_gart_disable(struct radeon_device *rdev);
140void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
141int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
142uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
143void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
144void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
145int r300_copy_dma(struct radeon_device *rdev,
146 uint64_t src_offset,
147 uint64_t dst_offset,
148 unsigned num_pages,
149 struct radeon_fence *fence);
Dave Airliee024e112009-06-24 09:48:08 +1000150
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200151static struct radeon_asic r300_asic = {
Jerome Glisse068a1172009-06-17 13:28:30 +0200152 .init = &r300_init,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200153 .errata = &r300_errata,
154 .vram_info = &r300_vram_info,
155 .gpu_reset = &r300_gpu_reset,
156 .mc_init = &r300_mc_init,
157 .mc_fini = &r300_mc_fini,
158 .wb_init = &r100_wb_init,
159 .wb_fini = &r100_wb_fini,
160 .gart_enable = &r300_gart_enable,
161 .gart_disable = &r100_pci_gart_disable,
162 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
163 .gart_set_page = &r100_pci_gart_set_page,
164 .cp_init = &r100_cp_init,
165 .cp_fini = &r100_cp_fini,
166 .cp_disable = &r100_cp_disable,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000167 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200168 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000169 .ring_test = &r100_ring_test,
170 .ring_ib_execute = &r100_ring_ib_execute,
171 .ib_test = &r100_ib_test,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200172 .irq_set = &r100_irq_set,
173 .irq_process = &r100_irq_process,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200174 .get_vblank_counter = &r100_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200175 .fence_ring_emit = &r300_fence_ring_emit,
176 .cs_parse = &r300_cs_parse,
177 .copy_blit = &r100_copy_blit,
178 .copy_dma = &r300_copy_dma,
179 .copy = &r100_copy_blit,
180 .set_engine_clock = &radeon_legacy_set_engine_clock,
181 .set_memory_clock = NULL,
182 .set_pcie_lanes = &rv370_set_pcie_lanes,
183 .set_clock_gating = &radeon_legacy_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000184 .set_surface_reg = r100_set_surface_reg,
185 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200186 .bandwidth_update = &r100_bandwidth_update,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200187};
188
189/*
190 * r420,r423,rv410
191 */
Jerome Glisse9f022dd2009-09-11 15:35:22 +0200192extern int r420_init(struct radeon_device *rdev);
193extern void r420_fini(struct radeon_device *rdev);
194extern int r420_suspend(struct radeon_device *rdev);
195extern int r420_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200196static struct radeon_asic r420_asic = {
Jerome Glisse9f022dd2009-09-11 15:35:22 +0200197 .init = &r420_init,
198 .fini = &r420_fini,
199 .suspend = &r420_suspend,
200 .resume = &r420_resume,
201 .errata = NULL,
202 .vram_info = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200203 .gpu_reset = &r300_gpu_reset,
Jerome Glisse9f022dd2009-09-11 15:35:22 +0200204 .mc_init = NULL,
205 .mc_fini = NULL,
206 .wb_init = NULL,
207 .wb_fini = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200208 .gart_enable = &r300_gart_enable,
209 .gart_disable = &rv370_pcie_gart_disable,
210 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
211 .gart_set_page = &rv370_pcie_gart_set_page,
Jerome Glisse9f022dd2009-09-11 15:35:22 +0200212 .cp_init = NULL,
213 .cp_fini = NULL,
214 .cp_disable = NULL,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000215 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200216 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000217 .ring_test = &r100_ring_test,
218 .ring_ib_execute = &r100_ring_ib_execute,
Jerome Glisse9f022dd2009-09-11 15:35:22 +0200219 .ib_test = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200220 .irq_set = &r100_irq_set,
221 .irq_process = &r100_irq_process,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200222 .get_vblank_counter = &r100_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200223 .fence_ring_emit = &r300_fence_ring_emit,
224 .cs_parse = &r300_cs_parse,
225 .copy_blit = &r100_copy_blit,
226 .copy_dma = &r300_copy_dma,
227 .copy = &r100_copy_blit,
228 .set_engine_clock = &radeon_atom_set_engine_clock,
229 .set_memory_clock = &radeon_atom_set_memory_clock,
230 .set_pcie_lanes = &rv370_set_pcie_lanes,
231 .set_clock_gating = &radeon_atom_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000232 .set_surface_reg = r100_set_surface_reg,
233 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200234 .bandwidth_update = &r100_bandwidth_update,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200235};
236
237
238/*
239 * rs400,rs480
240 */
241void rs400_errata(struct radeon_device *rdev);
242void rs400_vram_info(struct radeon_device *rdev);
243int rs400_mc_init(struct radeon_device *rdev);
244void rs400_mc_fini(struct radeon_device *rdev);
245int rs400_gart_enable(struct radeon_device *rdev);
246void rs400_gart_disable(struct radeon_device *rdev);
247void rs400_gart_tlb_flush(struct radeon_device *rdev);
248int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
249uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
250void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
251static struct radeon_asic rs400_asic = {
Jerome Glisse068a1172009-06-17 13:28:30 +0200252 .init = &r300_init,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200253 .errata = &rs400_errata,
254 .vram_info = &rs400_vram_info,
255 .gpu_reset = &r300_gpu_reset,
256 .mc_init = &rs400_mc_init,
257 .mc_fini = &rs400_mc_fini,
258 .wb_init = &r100_wb_init,
259 .wb_fini = &r100_wb_fini,
260 .gart_enable = &rs400_gart_enable,
261 .gart_disable = &rs400_gart_disable,
262 .gart_tlb_flush = &rs400_gart_tlb_flush,
263 .gart_set_page = &rs400_gart_set_page,
264 .cp_init = &r100_cp_init,
265 .cp_fini = &r100_cp_fini,
266 .cp_disable = &r100_cp_disable,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000267 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200268 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000269 .ring_test = &r100_ring_test,
270 .ring_ib_execute = &r100_ring_ib_execute,
271 .ib_test = &r100_ib_test,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200272 .irq_set = &r100_irq_set,
273 .irq_process = &r100_irq_process,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200274 .get_vblank_counter = &r100_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200275 .fence_ring_emit = &r300_fence_ring_emit,
276 .cs_parse = &r300_cs_parse,
277 .copy_blit = &r100_copy_blit,
278 .copy_dma = &r300_copy_dma,
279 .copy = &r100_copy_blit,
280 .set_engine_clock = &radeon_legacy_set_engine_clock,
281 .set_memory_clock = NULL,
282 .set_pcie_lanes = NULL,
283 .set_clock_gating = &radeon_legacy_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000284 .set_surface_reg = r100_set_surface_reg,
285 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200286 .bandwidth_update = &r100_bandwidth_update,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200287};
288
289
290/*
291 * rs600.
292 */
Dave Airlie3f7dc91a2009-08-27 11:10:15 +1000293int rs600_init(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200294void rs600_errata(struct radeon_device *rdev);
295void rs600_vram_info(struct radeon_device *rdev);
296int rs600_mc_init(struct radeon_device *rdev);
297void rs600_mc_fini(struct radeon_device *rdev);
298int rs600_irq_set(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200299int rs600_irq_process(struct radeon_device *rdev);
300u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200301int rs600_gart_enable(struct radeon_device *rdev);
302void rs600_gart_disable(struct radeon_device *rdev);
303void rs600_gart_tlb_flush(struct radeon_device *rdev);
304int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
305uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
306void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200307void rs600_bandwidth_update(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200308static struct radeon_asic rs600_asic = {
Dave Airlie3f7dc91a2009-08-27 11:10:15 +1000309 .init = &rs600_init,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200310 .errata = &rs600_errata,
311 .vram_info = &rs600_vram_info,
312 .gpu_reset = &r300_gpu_reset,
313 .mc_init = &rs600_mc_init,
314 .mc_fini = &rs600_mc_fini,
315 .wb_init = &r100_wb_init,
316 .wb_fini = &r100_wb_fini,
317 .gart_enable = &rs600_gart_enable,
318 .gart_disable = &rs600_gart_disable,
319 .gart_tlb_flush = &rs600_gart_tlb_flush,
320 .gart_set_page = &rs600_gart_set_page,
321 .cp_init = &r100_cp_init,
322 .cp_fini = &r100_cp_fini,
323 .cp_disable = &r100_cp_disable,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000324 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200325 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000326 .ring_test = &r100_ring_test,
327 .ring_ib_execute = &r100_ring_ib_execute,
328 .ib_test = &r100_ib_test,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200329 .irq_set = &rs600_irq_set,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200330 .irq_process = &rs600_irq_process,
331 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200332 .fence_ring_emit = &r300_fence_ring_emit,
333 .cs_parse = &r300_cs_parse,
334 .copy_blit = &r100_copy_blit,
335 .copy_dma = &r300_copy_dma,
336 .copy = &r100_copy_blit,
337 .set_engine_clock = &radeon_atom_set_engine_clock,
338 .set_memory_clock = &radeon_atom_set_memory_clock,
339 .set_pcie_lanes = NULL,
340 .set_clock_gating = &radeon_atom_set_clock_gating,
Jerome Glissec93bb852009-07-13 21:04:08 +0200341 .bandwidth_update = &rs600_bandwidth_update,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200342};
343
344
345/*
346 * rs690,rs740
347 */
348void rs690_errata(struct radeon_device *rdev);
349void rs690_vram_info(struct radeon_device *rdev);
350int rs690_mc_init(struct radeon_device *rdev);
351void rs690_mc_fini(struct radeon_device *rdev);
352uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
353void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200354void rs690_bandwidth_update(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200355static struct radeon_asic rs690_asic = {
Dave Airlie3f7dc91a2009-08-27 11:10:15 +1000356 .init = &rs600_init,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200357 .errata = &rs690_errata,
358 .vram_info = &rs690_vram_info,
359 .gpu_reset = &r300_gpu_reset,
360 .mc_init = &rs690_mc_init,
361 .mc_fini = &rs690_mc_fini,
362 .wb_init = &r100_wb_init,
363 .wb_fini = &r100_wb_fini,
364 .gart_enable = &rs400_gart_enable,
365 .gart_disable = &rs400_gart_disable,
366 .gart_tlb_flush = &rs400_gart_tlb_flush,
367 .gart_set_page = &rs400_gart_set_page,
368 .cp_init = &r100_cp_init,
369 .cp_fini = &r100_cp_fini,
370 .cp_disable = &r100_cp_disable,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000371 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200372 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000373 .ring_test = &r100_ring_test,
374 .ring_ib_execute = &r100_ring_ib_execute,
375 .ib_test = &r100_ib_test,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200376 .irq_set = &rs600_irq_set,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200377 .irq_process = &rs600_irq_process,
378 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200379 .fence_ring_emit = &r300_fence_ring_emit,
380 .cs_parse = &r300_cs_parse,
381 .copy_blit = &r100_copy_blit,
382 .copy_dma = &r300_copy_dma,
383 .copy = &r300_copy_dma,
384 .set_engine_clock = &radeon_atom_set_engine_clock,
385 .set_memory_clock = &radeon_atom_set_memory_clock,
386 .set_pcie_lanes = NULL,
387 .set_clock_gating = &radeon_atom_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000388 .set_surface_reg = r100_set_surface_reg,
389 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200390 .bandwidth_update = &rs690_bandwidth_update,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200391};
392
393
394/*
395 * rv515
396 */
Jerome Glisse068a1172009-06-17 13:28:30 +0200397int rv515_init(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200398void rv515_errata(struct radeon_device *rdev);
399void rv515_vram_info(struct radeon_device *rdev);
400int rv515_gpu_reset(struct radeon_device *rdev);
401int rv515_mc_init(struct radeon_device *rdev);
402void rv515_mc_fini(struct radeon_device *rdev);
403uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
404void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
405void rv515_ring_start(struct radeon_device *rdev);
406uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
407void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200408void rv515_bandwidth_update(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200409static struct radeon_asic rv515_asic = {
Jerome Glisse068a1172009-06-17 13:28:30 +0200410 .init = &rv515_init,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200411 .errata = &rv515_errata,
412 .vram_info = &rv515_vram_info,
413 .gpu_reset = &rv515_gpu_reset,
414 .mc_init = &rv515_mc_init,
415 .mc_fini = &rv515_mc_fini,
416 .wb_init = &r100_wb_init,
417 .wb_fini = &r100_wb_fini,
418 .gart_enable = &r300_gart_enable,
419 .gart_disable = &rv370_pcie_gart_disable,
420 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
421 .gart_set_page = &rv370_pcie_gart_set_page,
422 .cp_init = &r100_cp_init,
423 .cp_fini = &r100_cp_fini,
424 .cp_disable = &r100_cp_disable,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000425 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200426 .ring_start = &rv515_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000427 .ring_test = &r100_ring_test,
428 .ring_ib_execute = &r100_ring_ib_execute,
429 .ib_test = &r100_ib_test,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200430 .irq_set = &rs600_irq_set,
431 .irq_process = &rs600_irq_process,
432 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200433 .fence_ring_emit = &r300_fence_ring_emit,
Jerome Glisse068a1172009-06-17 13:28:30 +0200434 .cs_parse = &r300_cs_parse,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200435 .copy_blit = &r100_copy_blit,
436 .copy_dma = &r300_copy_dma,
437 .copy = &r100_copy_blit,
438 .set_engine_clock = &radeon_atom_set_engine_clock,
439 .set_memory_clock = &radeon_atom_set_memory_clock,
440 .set_pcie_lanes = &rv370_set_pcie_lanes,
441 .set_clock_gating = &radeon_atom_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000442 .set_surface_reg = r100_set_surface_reg,
443 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200444 .bandwidth_update = &rv515_bandwidth_update,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200445};
446
447
448/*
449 * r520,rv530,rv560,rv570,r580
450 */
451void r520_errata(struct radeon_device *rdev);
452void r520_vram_info(struct radeon_device *rdev);
453int r520_mc_init(struct radeon_device *rdev);
454void r520_mc_fini(struct radeon_device *rdev);
Jerome Glissec93bb852009-07-13 21:04:08 +0200455void r520_bandwidth_update(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200456static struct radeon_asic r520_asic = {
Jerome Glisse068a1172009-06-17 13:28:30 +0200457 .init = &rv515_init,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200458 .errata = &r520_errata,
459 .vram_info = &r520_vram_info,
460 .gpu_reset = &rv515_gpu_reset,
461 .mc_init = &r520_mc_init,
462 .mc_fini = &r520_mc_fini,
463 .wb_init = &r100_wb_init,
464 .wb_fini = &r100_wb_fini,
465 .gart_enable = &r300_gart_enable,
466 .gart_disable = &rv370_pcie_gart_disable,
467 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
468 .gart_set_page = &rv370_pcie_gart_set_page,
469 .cp_init = &r100_cp_init,
470 .cp_fini = &r100_cp_fini,
471 .cp_disable = &r100_cp_disable,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000472 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200473 .ring_start = &rv515_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000474 .ring_test = &r100_ring_test,
475 .ring_ib_execute = &r100_ring_ib_execute,
476 .ib_test = &r100_ib_test,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200477 .irq_set = &rs600_irq_set,
478 .irq_process = &rs600_irq_process,
479 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200480 .fence_ring_emit = &r300_fence_ring_emit,
Jerome Glisse068a1172009-06-17 13:28:30 +0200481 .cs_parse = &r300_cs_parse,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200482 .copy_blit = &r100_copy_blit,
483 .copy_dma = &r300_copy_dma,
484 .copy = &r100_copy_blit,
485 .set_engine_clock = &radeon_atom_set_engine_clock,
486 .set_memory_clock = &radeon_atom_set_memory_clock,
487 .set_pcie_lanes = &rv370_set_pcie_lanes,
488 .set_clock_gating = &radeon_atom_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000489 .set_surface_reg = r100_set_surface_reg,
490 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200491 .bandwidth_update = &r520_bandwidth_update,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200492};
493
494/*
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000495 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200496 */
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000497int r600_init(struct radeon_device *rdev);
498void r600_fini(struct radeon_device *rdev);
499int r600_suspend(struct radeon_device *rdev);
500int r600_resume(struct radeon_device *rdev);
501int r600_wb_init(struct radeon_device *rdev);
502void r600_wb_fini(struct radeon_device *rdev);
503void r600_cp_commit(struct radeon_device *rdev);
504void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200505uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
506void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000507int r600_cs_parse(struct radeon_cs_parser *p);
508void r600_fence_ring_emit(struct radeon_device *rdev,
509 struct radeon_fence *fence);
510int r600_copy_dma(struct radeon_device *rdev,
511 uint64_t src_offset,
512 uint64_t dst_offset,
513 unsigned num_pages,
514 struct radeon_fence *fence);
515int r600_irq_process(struct radeon_device *rdev);
516int r600_irq_set(struct radeon_device *rdev);
517int r600_gpu_reset(struct radeon_device *rdev);
518int r600_set_surface_reg(struct radeon_device *rdev, int reg,
519 uint32_t tiling_flags, uint32_t pitch,
520 uint32_t offset, uint32_t obj_size);
521int r600_clear_surface_reg(struct radeon_device *rdev, int reg);
522void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
523int r600_ib_test(struct radeon_device *rdev);
524int r600_ring_test(struct radeon_device *rdev);
525int r600_copy_blit(struct radeon_device *rdev,
526 uint64_t src_offset, uint64_t dst_offset,
527 unsigned num_pages, struct radeon_fence *fence);
528
529static struct radeon_asic r600_asic = {
530 .errata = NULL,
531 .init = &r600_init,
532 .fini = &r600_fini,
533 .suspend = &r600_suspend,
534 .resume = &r600_resume,
535 .cp_commit = &r600_cp_commit,
536 .vram_info = NULL,
537 .gpu_reset = &r600_gpu_reset,
538 .mc_init = NULL,
539 .mc_fini = NULL,
540 .wb_init = &r600_wb_init,
541 .wb_fini = &r600_wb_fini,
542 .gart_enable = NULL,
543 .gart_disable = NULL,
544 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
545 .gart_set_page = &rs600_gart_set_page,
546 .cp_init = NULL,
547 .cp_fini = NULL,
548 .cp_disable = NULL,
549 .ring_start = NULL,
550 .ring_test = &r600_ring_test,
551 .ring_ib_execute = &r600_ring_ib_execute,
552 .ib_test = &r600_ib_test,
553 .irq_set = &r600_irq_set,
554 .irq_process = &r600_irq_process,
555 .fence_ring_emit = &r600_fence_ring_emit,
556 .cs_parse = &r600_cs_parse,
557 .copy_blit = &r600_copy_blit,
558 .copy_dma = &r600_copy_blit,
Alex Deuchera3812872009-09-10 15:54:35 -0400559 .copy = &r600_copy_blit,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000560 .set_engine_clock = &radeon_atom_set_engine_clock,
561 .set_memory_clock = &radeon_atom_set_memory_clock,
562 .set_pcie_lanes = NULL,
563 .set_clock_gating = &radeon_atom_set_clock_gating,
564 .set_surface_reg = r600_set_surface_reg,
565 .clear_surface_reg = r600_clear_surface_reg,
566 .bandwidth_update = &r520_bandwidth_update,
567};
568
569/*
570 * rv770,rv730,rv710,rv740
571 */
572int rv770_init(struct radeon_device *rdev);
573void rv770_fini(struct radeon_device *rdev);
574int rv770_suspend(struct radeon_device *rdev);
575int rv770_resume(struct radeon_device *rdev);
576int rv770_gpu_reset(struct radeon_device *rdev);
577
578static struct radeon_asic rv770_asic = {
579 .errata = NULL,
580 .init = &rv770_init,
581 .fini = &rv770_fini,
582 .suspend = &rv770_suspend,
583 .resume = &rv770_resume,
584 .cp_commit = &r600_cp_commit,
585 .vram_info = NULL,
586 .gpu_reset = &rv770_gpu_reset,
587 .mc_init = NULL,
588 .mc_fini = NULL,
589 .wb_init = &r600_wb_init,
590 .wb_fini = &r600_wb_fini,
591 .gart_enable = NULL,
592 .gart_disable = NULL,
593 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
594 .gart_set_page = &rs600_gart_set_page,
595 .cp_init = NULL,
596 .cp_fini = NULL,
597 .cp_disable = NULL,
598 .ring_start = NULL,
599 .ring_test = &r600_ring_test,
600 .ring_ib_execute = &r600_ring_ib_execute,
601 .ib_test = &r600_ib_test,
602 .irq_set = &r600_irq_set,
603 .irq_process = &r600_irq_process,
604 .fence_ring_emit = &r600_fence_ring_emit,
605 .cs_parse = &r600_cs_parse,
606 .copy_blit = &r600_copy_blit,
607 .copy_dma = &r600_copy_blit,
Alex Deuchera3812872009-09-10 15:54:35 -0400608 .copy = &r600_copy_blit,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000609 .set_engine_clock = &radeon_atom_set_engine_clock,
610 .set_memory_clock = &radeon_atom_set_memory_clock,
611 .set_pcie_lanes = NULL,
612 .set_clock_gating = &radeon_atom_set_clock_gating,
613 .set_surface_reg = r600_set_surface_reg,
614 .clear_surface_reg = r600_clear_surface_reg,
615 .bandwidth_update = &r520_bandwidth_update,
616};
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200617
618#endif