blob: 94991edc839f274379f8717080b683bc571ea871 [file] [log] [blame]
Jerome Glisse771fe6b2009-06-05 14:42:42 +02001/*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Dave Airlie
25 * Alex Deucher
26 * Jerome Glisse
27 */
28#ifndef __RADEON_ASIC_H__
29#define __RADEON_ASIC_H__
30
31/*
32 * common functions
33 */
Rafał Miłecki74338742009-11-03 00:53:02 +010034uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020035void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
36void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
37
Rafał Miłecki74338742009-11-03 00:53:02 +010038uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020039void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
Rafał Miłecki74338742009-11-03 00:53:02 +010040uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020041void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
42void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
43
44/*
45 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
46 */
Jerome Glissed4550902009-10-01 10:12:06 +020047extern int r100_init(struct radeon_device *rdev);
48extern void r100_fini(struct radeon_device *rdev);
49extern int r100_suspend(struct radeon_device *rdev);
50extern int r100_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020051uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
52void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020053int r100_gpu_reset(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +020054u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020055void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
56int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
Jerome Glisse3ce0a232009-09-08 10:10:24 +100057void r100_cp_commit(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020058void r100_ring_start(struct radeon_device *rdev);
59int r100_irq_set(struct radeon_device *rdev);
60int r100_irq_process(struct radeon_device *rdev);
61void r100_fence_ring_emit(struct radeon_device *rdev,
62 struct radeon_fence *fence);
63int r100_cs_parse(struct radeon_cs_parser *p);
64void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
65uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
66int r100_copy_blit(struct radeon_device *rdev,
67 uint64_t src_offset,
68 uint64_t dst_offset,
69 unsigned num_pages,
70 struct radeon_fence *fence);
Dave Airliee024e112009-06-24 09:48:08 +100071int r100_set_surface_reg(struct radeon_device *rdev, int reg,
72 uint32_t tiling_flags, uint32_t pitch,
73 uint32_t offset, uint32_t obj_size);
74int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
Jerome Glissec93bb852009-07-13 21:04:08 +020075void r100_bandwidth_update(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +100076void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Jerome Glisse3ce0a232009-09-08 10:10:24 +100077int r100_ring_test(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020078
79static struct radeon_asic r100_asic = {
Jerome Glisse068a1172009-06-17 13:28:30 +020080 .init = &r100_init,
Jerome Glissed4550902009-10-01 10:12:06 +020081 .fini = &r100_fini,
82 .suspend = &r100_suspend,
83 .resume = &r100_resume,
Jerome Glisse771fe6b2009-06-05 14:42:42 +020084 .gpu_reset = &r100_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +020085 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
86 .gart_set_page = &r100_pci_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +100087 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +020088 .ring_start = &r100_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +100089 .ring_test = &r100_ring_test,
90 .ring_ib_execute = &r100_ring_ib_execute,
Jerome Glisse771fe6b2009-06-05 14:42:42 +020091 .irq_set = &r100_irq_set,
92 .irq_process = &r100_irq_process,
Michel Dänzer7ed220d2009-08-13 11:10:51 +020093 .get_vblank_counter = &r100_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +020094 .fence_ring_emit = &r100_fence_ring_emit,
95 .cs_parse = &r100_cs_parse,
96 .copy_blit = &r100_copy_blit,
97 .copy_dma = NULL,
98 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +010099 .get_engine_clock = &radeon_legacy_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200100 .set_engine_clock = &radeon_legacy_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100101 .get_memory_clock = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200102 .set_memory_clock = NULL,
103 .set_pcie_lanes = NULL,
104 .set_clock_gating = &radeon_legacy_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000105 .set_surface_reg = r100_set_surface_reg,
106 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200107 .bandwidth_update = &r100_bandwidth_update,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200108};
109
110
111/*
112 * r300,r350,rv350,rv380
113 */
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200114extern int r300_init(struct radeon_device *rdev);
115extern void r300_fini(struct radeon_device *rdev);
116extern int r300_suspend(struct radeon_device *rdev);
117extern int r300_resume(struct radeon_device *rdev);
118extern int r300_gpu_reset(struct radeon_device *rdev);
119extern void r300_ring_start(struct radeon_device *rdev);
120extern void r300_fence_ring_emit(struct radeon_device *rdev,
121 struct radeon_fence *fence);
122extern int r300_cs_parse(struct radeon_cs_parser *p);
123extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
124extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
125extern uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
126extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
127extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
128extern int r300_copy_dma(struct radeon_device *rdev,
129 uint64_t src_offset,
130 uint64_t dst_offset,
131 unsigned num_pages,
132 struct radeon_fence *fence);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200133static struct radeon_asic r300_asic = {
Jerome Glisse068a1172009-06-17 13:28:30 +0200134 .init = &r300_init,
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200135 .fini = &r300_fini,
136 .suspend = &r300_suspend,
137 .resume = &r300_resume,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200138 .gpu_reset = &r300_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200139 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
140 .gart_set_page = &r100_pci_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000141 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200142 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000143 .ring_test = &r100_ring_test,
144 .ring_ib_execute = &r100_ring_ib_execute,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200145 .irq_set = &r100_irq_set,
146 .irq_process = &r100_irq_process,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200147 .get_vblank_counter = &r100_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200148 .fence_ring_emit = &r300_fence_ring_emit,
149 .cs_parse = &r300_cs_parse,
150 .copy_blit = &r100_copy_blit,
151 .copy_dma = &r300_copy_dma,
152 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100153 .get_engine_clock = &radeon_legacy_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200154 .set_engine_clock = &radeon_legacy_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100155 .get_memory_clock = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200156 .set_memory_clock = NULL,
157 .set_pcie_lanes = &rv370_set_pcie_lanes,
158 .set_clock_gating = &radeon_legacy_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000159 .set_surface_reg = r100_set_surface_reg,
160 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200161 .bandwidth_update = &r100_bandwidth_update,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200162};
163
164/*
165 * r420,r423,rv410
166 */
Jerome Glisse9f022dd2009-09-11 15:35:22 +0200167extern int r420_init(struct radeon_device *rdev);
168extern void r420_fini(struct radeon_device *rdev);
169extern int r420_suspend(struct radeon_device *rdev);
170extern int r420_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200171static struct radeon_asic r420_asic = {
Jerome Glisse9f022dd2009-09-11 15:35:22 +0200172 .init = &r420_init,
173 .fini = &r420_fini,
174 .suspend = &r420_suspend,
175 .resume = &r420_resume,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200176 .gpu_reset = &r300_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200177 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
178 .gart_set_page = &rv370_pcie_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000179 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200180 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000181 .ring_test = &r100_ring_test,
182 .ring_ib_execute = &r100_ring_ib_execute,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200183 .irq_set = &r100_irq_set,
184 .irq_process = &r100_irq_process,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200185 .get_vblank_counter = &r100_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200186 .fence_ring_emit = &r300_fence_ring_emit,
187 .cs_parse = &r300_cs_parse,
188 .copy_blit = &r100_copy_blit,
189 .copy_dma = &r300_copy_dma,
190 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100191 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200192 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100193 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200194 .set_memory_clock = &radeon_atom_set_memory_clock,
195 .set_pcie_lanes = &rv370_set_pcie_lanes,
196 .set_clock_gating = &radeon_atom_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000197 .set_surface_reg = r100_set_surface_reg,
198 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200199 .bandwidth_update = &r100_bandwidth_update,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200200};
201
202
203/*
204 * rs400,rs480
205 */
Jerome Glisseca6ffc62009-10-01 10:20:52 +0200206extern int rs400_init(struct radeon_device *rdev);
207extern void rs400_fini(struct radeon_device *rdev);
208extern int rs400_suspend(struct radeon_device *rdev);
209extern int rs400_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200210void rs400_gart_tlb_flush(struct radeon_device *rdev);
211int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
212uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
213void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
214static struct radeon_asic rs400_asic = {
Jerome Glisseca6ffc62009-10-01 10:20:52 +0200215 .init = &rs400_init,
216 .fini = &rs400_fini,
217 .suspend = &rs400_suspend,
218 .resume = &rs400_resume,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200219 .gpu_reset = &r300_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200220 .gart_tlb_flush = &rs400_gart_tlb_flush,
221 .gart_set_page = &rs400_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000222 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200223 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000224 .ring_test = &r100_ring_test,
225 .ring_ib_execute = &r100_ring_ib_execute,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200226 .irq_set = &r100_irq_set,
227 .irq_process = &r100_irq_process,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200228 .get_vblank_counter = &r100_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200229 .fence_ring_emit = &r300_fence_ring_emit,
230 .cs_parse = &r300_cs_parse,
231 .copy_blit = &r100_copy_blit,
232 .copy_dma = &r300_copy_dma,
233 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100234 .get_engine_clock = &radeon_legacy_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200235 .set_engine_clock = &radeon_legacy_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100236 .get_memory_clock = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200237 .set_memory_clock = NULL,
238 .set_pcie_lanes = NULL,
239 .set_clock_gating = &radeon_legacy_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000240 .set_surface_reg = r100_set_surface_reg,
241 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200242 .bandwidth_update = &r100_bandwidth_update,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200243};
244
245
246/*
247 * rs600.
248 */
Jerome Glissec010f802009-09-30 22:09:06 +0200249extern int rs600_init(struct radeon_device *rdev);
250extern void rs600_fini(struct radeon_device *rdev);
251extern int rs600_suspend(struct radeon_device *rdev);
252extern int rs600_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200253int rs600_irq_set(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200254int rs600_irq_process(struct radeon_device *rdev);
255u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200256void rs600_gart_tlb_flush(struct radeon_device *rdev);
257int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
258uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
259void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200260void rs600_bandwidth_update(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200261static struct radeon_asic rs600_asic = {
Dave Airlie3f7dc91a2009-08-27 11:10:15 +1000262 .init = &rs600_init,
Jerome Glissec010f802009-09-30 22:09:06 +0200263 .fini = &rs600_fini,
264 .suspend = &rs600_suspend,
265 .resume = &rs600_resume,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200266 .gpu_reset = &r300_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200267 .gart_tlb_flush = &rs600_gart_tlb_flush,
268 .gart_set_page = &rs600_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000269 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200270 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000271 .ring_test = &r100_ring_test,
272 .ring_ib_execute = &r100_ring_ib_execute,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200273 .irq_set = &rs600_irq_set,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200274 .irq_process = &rs600_irq_process,
275 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200276 .fence_ring_emit = &r300_fence_ring_emit,
277 .cs_parse = &r300_cs_parse,
278 .copy_blit = &r100_copy_blit,
279 .copy_dma = &r300_copy_dma,
280 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100281 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200282 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100283 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200284 .set_memory_clock = &radeon_atom_set_memory_clock,
285 .set_pcie_lanes = NULL,
286 .set_clock_gating = &radeon_atom_set_clock_gating,
Jerome Glissec93bb852009-07-13 21:04:08 +0200287 .bandwidth_update = &rs600_bandwidth_update,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200288};
289
290
291/*
292 * rs690,rs740
293 */
Jerome Glisse3bc68532009-10-01 09:39:24 +0200294int rs690_init(struct radeon_device *rdev);
295void rs690_fini(struct radeon_device *rdev);
296int rs690_resume(struct radeon_device *rdev);
297int rs690_suspend(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200298uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
299void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200300void rs690_bandwidth_update(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200301static struct radeon_asic rs690_asic = {
Jerome Glisse3bc68532009-10-01 09:39:24 +0200302 .init = &rs690_init,
303 .fini = &rs690_fini,
304 .suspend = &rs690_suspend,
305 .resume = &rs690_resume,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200306 .gpu_reset = &r300_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200307 .gart_tlb_flush = &rs400_gart_tlb_flush,
308 .gart_set_page = &rs400_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000309 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200310 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000311 .ring_test = &r100_ring_test,
312 .ring_ib_execute = &r100_ring_ib_execute,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200313 .irq_set = &rs600_irq_set,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200314 .irq_process = &rs600_irq_process,
315 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200316 .fence_ring_emit = &r300_fence_ring_emit,
317 .cs_parse = &r300_cs_parse,
318 .copy_blit = &r100_copy_blit,
319 .copy_dma = &r300_copy_dma,
320 .copy = &r300_copy_dma,
Rafał Miłecki74338742009-11-03 00:53:02 +0100321 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200322 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100323 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200324 .set_memory_clock = &radeon_atom_set_memory_clock,
325 .set_pcie_lanes = NULL,
326 .set_clock_gating = &radeon_atom_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000327 .set_surface_reg = r100_set_surface_reg,
328 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200329 .bandwidth_update = &rs690_bandwidth_update,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200330};
331
332
333/*
334 * rv515
335 */
Jerome Glisse068a1172009-06-17 13:28:30 +0200336int rv515_init(struct radeon_device *rdev);
Jerome Glissed39c3b82009-09-28 18:34:43 +0200337void rv515_fini(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200338int rv515_gpu_reset(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200339uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
340void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
341void rv515_ring_start(struct radeon_device *rdev);
342uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
343void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200344void rv515_bandwidth_update(struct radeon_device *rdev);
Jerome Glissed39c3b82009-09-28 18:34:43 +0200345int rv515_resume(struct radeon_device *rdev);
346int rv515_suspend(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200347static struct radeon_asic rv515_asic = {
Jerome Glisse068a1172009-06-17 13:28:30 +0200348 .init = &rv515_init,
Jerome Glissed39c3b82009-09-28 18:34:43 +0200349 .fini = &rv515_fini,
350 .suspend = &rv515_suspend,
351 .resume = &rv515_resume,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200352 .gpu_reset = &rv515_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200353 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
354 .gart_set_page = &rv370_pcie_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000355 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200356 .ring_start = &rv515_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000357 .ring_test = &r100_ring_test,
358 .ring_ib_execute = &r100_ring_ib_execute,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200359 .irq_set = &rs600_irq_set,
360 .irq_process = &rs600_irq_process,
361 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200362 .fence_ring_emit = &r300_fence_ring_emit,
Jerome Glisse068a1172009-06-17 13:28:30 +0200363 .cs_parse = &r300_cs_parse,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200364 .copy_blit = &r100_copy_blit,
365 .copy_dma = &r300_copy_dma,
366 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100367 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200368 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100369 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200370 .set_memory_clock = &radeon_atom_set_memory_clock,
371 .set_pcie_lanes = &rv370_set_pcie_lanes,
372 .set_clock_gating = &radeon_atom_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000373 .set_surface_reg = r100_set_surface_reg,
374 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200375 .bandwidth_update = &rv515_bandwidth_update,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200376};
377
378
379/*
380 * r520,rv530,rv560,rv570,r580
381 */
Jerome Glissed39c3b82009-09-28 18:34:43 +0200382int r520_init(struct radeon_device *rdev);
Jerome Glissef0ed1f62009-09-28 20:39:19 +0200383int r520_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200384static struct radeon_asic r520_asic = {
Jerome Glissed39c3b82009-09-28 18:34:43 +0200385 .init = &r520_init,
Jerome Glissef0ed1f62009-09-28 20:39:19 +0200386 .fini = &rv515_fini,
387 .suspend = &rv515_suspend,
388 .resume = &r520_resume,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200389 .gpu_reset = &rv515_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200390 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
391 .gart_set_page = &rv370_pcie_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000392 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200393 .ring_start = &rv515_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000394 .ring_test = &r100_ring_test,
395 .ring_ib_execute = &r100_ring_ib_execute,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200396 .irq_set = &rs600_irq_set,
397 .irq_process = &rs600_irq_process,
398 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200399 .fence_ring_emit = &r300_fence_ring_emit,
Jerome Glisse068a1172009-06-17 13:28:30 +0200400 .cs_parse = &r300_cs_parse,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200401 .copy_blit = &r100_copy_blit,
402 .copy_dma = &r300_copy_dma,
403 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100404 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200405 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100406 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200407 .set_memory_clock = &radeon_atom_set_memory_clock,
408 .set_pcie_lanes = &rv370_set_pcie_lanes,
409 .set_clock_gating = &radeon_atom_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000410 .set_surface_reg = r100_set_surface_reg,
411 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissef0ed1f62009-09-28 20:39:19 +0200412 .bandwidth_update = &rv515_bandwidth_update,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200413};
414
415/*
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000416 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200417 */
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000418int r600_init(struct radeon_device *rdev);
419void r600_fini(struct radeon_device *rdev);
420int r600_suspend(struct radeon_device *rdev);
421int r600_resume(struct radeon_device *rdev);
422int r600_wb_init(struct radeon_device *rdev);
423void r600_wb_fini(struct radeon_device *rdev);
424void r600_cp_commit(struct radeon_device *rdev);
425void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200426uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
427void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000428int r600_cs_parse(struct radeon_cs_parser *p);
429void r600_fence_ring_emit(struct radeon_device *rdev,
430 struct radeon_fence *fence);
431int r600_copy_dma(struct radeon_device *rdev,
432 uint64_t src_offset,
433 uint64_t dst_offset,
434 unsigned num_pages,
435 struct radeon_fence *fence);
436int r600_irq_process(struct radeon_device *rdev);
437int r600_irq_set(struct radeon_device *rdev);
438int r600_gpu_reset(struct radeon_device *rdev);
439int r600_set_surface_reg(struct radeon_device *rdev, int reg,
440 uint32_t tiling_flags, uint32_t pitch,
441 uint32_t offset, uint32_t obj_size);
442int r600_clear_surface_reg(struct radeon_device *rdev, int reg);
443void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000444int r600_ring_test(struct radeon_device *rdev);
445int r600_copy_blit(struct radeon_device *rdev,
446 uint64_t src_offset, uint64_t dst_offset,
447 unsigned num_pages, struct radeon_fence *fence);
448
449static struct radeon_asic r600_asic = {
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000450 .init = &r600_init,
451 .fini = &r600_fini,
452 .suspend = &r600_suspend,
453 .resume = &r600_resume,
454 .cp_commit = &r600_cp_commit,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000455 .gpu_reset = &r600_gpu_reset,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000456 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
457 .gart_set_page = &rs600_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000458 .ring_test = &r600_ring_test,
459 .ring_ib_execute = &r600_ring_ib_execute,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000460 .irq_set = &r600_irq_set,
461 .irq_process = &r600_irq_process,
462 .fence_ring_emit = &r600_fence_ring_emit,
463 .cs_parse = &r600_cs_parse,
464 .copy_blit = &r600_copy_blit,
465 .copy_dma = &r600_copy_blit,
Alex Deuchera3812872009-09-10 15:54:35 -0400466 .copy = &r600_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100467 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000468 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100469 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000470 .set_memory_clock = &radeon_atom_set_memory_clock,
471 .set_pcie_lanes = NULL,
472 .set_clock_gating = &radeon_atom_set_clock_gating,
473 .set_surface_reg = r600_set_surface_reg,
474 .clear_surface_reg = r600_clear_surface_reg,
Jerome Glissef0ed1f62009-09-28 20:39:19 +0200475 .bandwidth_update = &rv515_bandwidth_update,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000476};
477
478/*
479 * rv770,rv730,rv710,rv740
480 */
481int rv770_init(struct radeon_device *rdev);
482void rv770_fini(struct radeon_device *rdev);
483int rv770_suspend(struct radeon_device *rdev);
484int rv770_resume(struct radeon_device *rdev);
485int rv770_gpu_reset(struct radeon_device *rdev);
486
487static struct radeon_asic rv770_asic = {
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000488 .init = &rv770_init,
489 .fini = &rv770_fini,
490 .suspend = &rv770_suspend,
491 .resume = &rv770_resume,
492 .cp_commit = &r600_cp_commit,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000493 .gpu_reset = &rv770_gpu_reset,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000494 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
495 .gart_set_page = &rs600_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000496 .ring_test = &r600_ring_test,
497 .ring_ib_execute = &r600_ring_ib_execute,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000498 .irq_set = &r600_irq_set,
499 .irq_process = &r600_irq_process,
500 .fence_ring_emit = &r600_fence_ring_emit,
501 .cs_parse = &r600_cs_parse,
502 .copy_blit = &r600_copy_blit,
503 .copy_dma = &r600_copy_blit,
Alex Deuchera3812872009-09-10 15:54:35 -0400504 .copy = &r600_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100505 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000506 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100507 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000508 .set_memory_clock = &radeon_atom_set_memory_clock,
509 .set_pcie_lanes = NULL,
510 .set_clock_gating = &radeon_atom_set_clock_gating,
511 .set_surface_reg = r600_set_surface_reg,
512 .clear_surface_reg = r600_clear_surface_reg,
Jerome Glissef0ed1f62009-09-28 20:39:19 +0200513 .bandwidth_update = &rv515_bandwidth_update,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000514};
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200515
516#endif