blob: b7030d7c03965de49835c689758cdb451f8f4b31 [file] [log] [blame]
Jerome Glisse771fe6b2009-06-05 14:42:42 +02001/*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Dave Airlie
25 * Alex Deucher
26 * Jerome Glisse
27 */
28#ifndef __RADEON_ASIC_H__
29#define __RADEON_ASIC_H__
30
31/*
32 * common functions
33 */
Rafał Miłecki74338742009-11-03 00:53:02 +010034uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020035void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
Rafał Miłecki5ea597f2009-12-17 13:50:09 +010036uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020037void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
38
Rafał Miłecki74338742009-11-03 00:53:02 +010039uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020040void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
Rafał Miłecki74338742009-11-03 00:53:02 +010041uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020042void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
43void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
44
45/*
Pauli Nieminen44ca7472010-02-11 17:25:47 +000046 * r100,rv100,rs100,rv200,rs200
Jerome Glisse771fe6b2009-06-05 14:42:42 +020047 */
Jerome Glissed4550902009-10-01 10:12:06 +020048extern int r100_init(struct radeon_device *rdev);
49extern void r100_fini(struct radeon_device *rdev);
50extern int r100_suspend(struct radeon_device *rdev);
51extern int r100_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020052uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
53void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Dave Airlie28d52042009-09-21 14:33:58 +100054void r100_vga_set_state(struct radeon_device *rdev, bool state);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020055int r100_gpu_reset(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +020056u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020057void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
58int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
Jerome Glisse3ce0a232009-09-08 10:10:24 +100059void r100_cp_commit(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020060void r100_ring_start(struct radeon_device *rdev);
61int r100_irq_set(struct radeon_device *rdev);
62int r100_irq_process(struct radeon_device *rdev);
63void r100_fence_ring_emit(struct radeon_device *rdev,
64 struct radeon_fence *fence);
65int r100_cs_parse(struct radeon_cs_parser *p);
66void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
67uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
68int r100_copy_blit(struct radeon_device *rdev,
69 uint64_t src_offset,
70 uint64_t dst_offset,
71 unsigned num_pages,
72 struct radeon_fence *fence);
Dave Airliee024e112009-06-24 09:48:08 +100073int r100_set_surface_reg(struct radeon_device *rdev, int reg,
74 uint32_t tiling_flags, uint32_t pitch,
75 uint32_t offset, uint32_t obj_size);
76int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
Jerome Glissec93bb852009-07-13 21:04:08 +020077void r100_bandwidth_update(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +100078void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Jerome Glisse3ce0a232009-09-08 10:10:24 +100079int r100_ring_test(struct radeon_device *rdev);
Alex Deucher429770b2009-12-04 15:26:55 -050080void r100_hpd_init(struct radeon_device *rdev);
81void r100_hpd_fini(struct radeon_device *rdev);
82bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
83void r100_hpd_set_polarity(struct radeon_device *rdev,
84 enum radeon_hpd_id hpd);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020085
86static struct radeon_asic r100_asic = {
Jerome Glisse068a1172009-06-17 13:28:30 +020087 .init = &r100_init,
Jerome Glissed4550902009-10-01 10:12:06 +020088 .fini = &r100_fini,
89 .suspend = &r100_suspend,
90 .resume = &r100_resume,
Dave Airlie28d52042009-09-21 14:33:58 +100091 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +020092 .gpu_reset = &r100_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +020093 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
94 .gart_set_page = &r100_pci_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +100095 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +020096 .ring_start = &r100_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +100097 .ring_test = &r100_ring_test,
98 .ring_ib_execute = &r100_ring_ib_execute,
Jerome Glisse771fe6b2009-06-05 14:42:42 +020099 .irq_set = &r100_irq_set,
100 .irq_process = &r100_irq_process,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200101 .get_vblank_counter = &r100_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200102 .fence_ring_emit = &r100_fence_ring_emit,
103 .cs_parse = &r100_cs_parse,
104 .copy_blit = &r100_copy_blit,
105 .copy_dma = NULL,
106 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100107 .get_engine_clock = &radeon_legacy_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200108 .set_engine_clock = &radeon_legacy_set_engine_clock,
Rafał Miłecki5ea597f2009-12-17 13:50:09 +0100109 .get_memory_clock = &radeon_legacy_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200110 .set_memory_clock = NULL,
Alex Deucherc836a412009-12-23 10:07:50 -0500111 .get_pcie_lanes = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200112 .set_pcie_lanes = NULL,
113 .set_clock_gating = &radeon_legacy_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000114 .set_surface_reg = r100_set_surface_reg,
115 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200116 .bandwidth_update = &r100_bandwidth_update,
Alex Deucher429770b2009-12-04 15:26:55 -0500117 .hpd_init = &r100_hpd_init,
118 .hpd_fini = &r100_hpd_fini,
119 .hpd_sense = &r100_hpd_sense,
120 .hpd_set_polarity = &r100_hpd_set_polarity,
Jerome Glisse062b3892010-02-04 20:36:39 +0100121 .ioctl_wait_idle = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200122};
123
Pauli Nieminen44ca7472010-02-11 17:25:47 +0000124/*
125 * r200,rv250,rs300,rv280
126 */
127extern int r200_copy_dma(struct radeon_device *rdev,
128 uint64_t src_offset,
129 uint64_t dst_offset,
130 unsigned num_pages,
131 struct radeon_fence *fence);
132static struct radeon_asic r200_asic = {
133 .init = &r100_init,
134 .fini = &r100_fini,
135 .suspend = &r100_suspend,
136 .resume = &r100_resume,
137 .vga_set_state = &r100_vga_set_state,
138 .gpu_reset = &r100_gpu_reset,
139 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
140 .gart_set_page = &r100_pci_gart_set_page,
141 .cp_commit = &r100_cp_commit,
142 .ring_start = &r100_ring_start,
143 .ring_test = &r100_ring_test,
144 .ring_ib_execute = &r100_ring_ib_execute,
145 .irq_set = &r100_irq_set,
146 .irq_process = &r100_irq_process,
147 .get_vblank_counter = &r100_get_vblank_counter,
148 .fence_ring_emit = &r100_fence_ring_emit,
149 .cs_parse = &r100_cs_parse,
150 .copy_blit = &r100_copy_blit,
151 .copy_dma = &r200_copy_dma,
152 .copy = &r100_copy_blit,
153 .get_engine_clock = &radeon_legacy_get_engine_clock,
154 .set_engine_clock = &radeon_legacy_set_engine_clock,
155 .get_memory_clock = &radeon_legacy_get_memory_clock,
156 .set_memory_clock = NULL,
157 .set_pcie_lanes = NULL,
158 .set_clock_gating = &radeon_legacy_set_clock_gating,
159 .set_surface_reg = r100_set_surface_reg,
160 .clear_surface_reg = r100_clear_surface_reg,
161 .bandwidth_update = &r100_bandwidth_update,
162 .hpd_init = &r100_hpd_init,
163 .hpd_fini = &r100_hpd_fini,
164 .hpd_sense = &r100_hpd_sense,
165 .hpd_set_polarity = &r100_hpd_set_polarity,
166 .ioctl_wait_idle = NULL,
167};
168
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200169
170/*
171 * r300,r350,rv350,rv380
172 */
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200173extern int r300_init(struct radeon_device *rdev);
174extern void r300_fini(struct radeon_device *rdev);
175extern int r300_suspend(struct radeon_device *rdev);
176extern int r300_resume(struct radeon_device *rdev);
177extern int r300_gpu_reset(struct radeon_device *rdev);
178extern void r300_ring_start(struct radeon_device *rdev);
179extern void r300_fence_ring_emit(struct radeon_device *rdev,
180 struct radeon_fence *fence);
181extern int r300_cs_parse(struct radeon_cs_parser *p);
182extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
183extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
184extern uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
185extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
186extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
Alex Deucherc836a412009-12-23 10:07:50 -0500187extern int rv370_get_pcie_lanes(struct radeon_device *rdev);
Pauli Nieminen44ca7472010-02-11 17:25:47 +0000188
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200189static struct radeon_asic r300_asic = {
Jerome Glisse068a1172009-06-17 13:28:30 +0200190 .init = &r300_init,
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200191 .fini = &r300_fini,
192 .suspend = &r300_suspend,
193 .resume = &r300_resume,
Dave Airlie28d52042009-09-21 14:33:58 +1000194 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200195 .gpu_reset = &r300_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200196 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
197 .gart_set_page = &r100_pci_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000198 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200199 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000200 .ring_test = &r100_ring_test,
201 .ring_ib_execute = &r100_ring_ib_execute,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200202 .irq_set = &r100_irq_set,
203 .irq_process = &r100_irq_process,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200204 .get_vblank_counter = &r100_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200205 .fence_ring_emit = &r300_fence_ring_emit,
206 .cs_parse = &r300_cs_parse,
207 .copy_blit = &r100_copy_blit,
Pauli Nieminen44ca7472010-02-11 17:25:47 +0000208 .copy_dma = &r200_copy_dma,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200209 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100210 .get_engine_clock = &radeon_legacy_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200211 .set_engine_clock = &radeon_legacy_set_engine_clock,
Rafał Miłecki5ea597f2009-12-17 13:50:09 +0100212 .get_memory_clock = &radeon_legacy_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200213 .set_memory_clock = NULL,
Alex Deucherc836a412009-12-23 10:07:50 -0500214 .get_pcie_lanes = &rv370_get_pcie_lanes,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200215 .set_pcie_lanes = &rv370_set_pcie_lanes,
216 .set_clock_gating = &radeon_legacy_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000217 .set_surface_reg = r100_set_surface_reg,
218 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200219 .bandwidth_update = &r100_bandwidth_update,
Alex Deucher429770b2009-12-04 15:26:55 -0500220 .hpd_init = &r100_hpd_init,
221 .hpd_fini = &r100_hpd_fini,
222 .hpd_sense = &r100_hpd_sense,
223 .hpd_set_polarity = &r100_hpd_set_polarity,
Jerome Glisse062b3892010-02-04 20:36:39 +0100224 .ioctl_wait_idle = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200225};
226
Pauli Nieminend80eeb02010-02-11 17:55:35 +0000227
228static struct radeon_asic r300_asic_pcie = {
229 .init = &r300_init,
230 .fini = &r300_fini,
231 .suspend = &r300_suspend,
232 .resume = &r300_resume,
233 .vga_set_state = &r100_vga_set_state,
234 .gpu_reset = &r300_gpu_reset,
235 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
236 .gart_set_page = &rv370_pcie_gart_set_page,
237 .cp_commit = &r100_cp_commit,
238 .ring_start = &r300_ring_start,
239 .ring_test = &r100_ring_test,
240 .ring_ib_execute = &r100_ring_ib_execute,
241 .irq_set = &r100_irq_set,
242 .irq_process = &r100_irq_process,
243 .get_vblank_counter = &r100_get_vblank_counter,
244 .fence_ring_emit = &r300_fence_ring_emit,
245 .cs_parse = &r300_cs_parse,
246 .copy_blit = &r100_copy_blit,
247 .copy_dma = &r200_copy_dma,
248 .copy = &r100_copy_blit,
249 .get_engine_clock = &radeon_legacy_get_engine_clock,
250 .set_engine_clock = &radeon_legacy_set_engine_clock,
251 .get_memory_clock = &radeon_legacy_get_memory_clock,
252 .set_memory_clock = NULL,
253 .set_pcie_lanes = &rv370_set_pcie_lanes,
254 .set_clock_gating = &radeon_legacy_set_clock_gating,
255 .set_surface_reg = r100_set_surface_reg,
256 .clear_surface_reg = r100_clear_surface_reg,
257 .bandwidth_update = &r100_bandwidth_update,
258 .hpd_init = &r100_hpd_init,
259 .hpd_fini = &r100_hpd_fini,
260 .hpd_sense = &r100_hpd_sense,
261 .hpd_set_polarity = &r100_hpd_set_polarity,
262 .ioctl_wait_idle = NULL,
263};
264
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200265/*
266 * r420,r423,rv410
267 */
Jerome Glisse9f022dd2009-09-11 15:35:22 +0200268extern int r420_init(struct radeon_device *rdev);
269extern void r420_fini(struct radeon_device *rdev);
270extern int r420_suspend(struct radeon_device *rdev);
271extern int r420_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200272static struct radeon_asic r420_asic = {
Jerome Glisse9f022dd2009-09-11 15:35:22 +0200273 .init = &r420_init,
274 .fini = &r420_fini,
275 .suspend = &r420_suspend,
276 .resume = &r420_resume,
Dave Airlie28d52042009-09-21 14:33:58 +1000277 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200278 .gpu_reset = &r300_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200279 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
280 .gart_set_page = &rv370_pcie_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000281 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200282 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000283 .ring_test = &r100_ring_test,
284 .ring_ib_execute = &r100_ring_ib_execute,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200285 .irq_set = &r100_irq_set,
286 .irq_process = &r100_irq_process,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200287 .get_vblank_counter = &r100_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200288 .fence_ring_emit = &r300_fence_ring_emit,
289 .cs_parse = &r300_cs_parse,
290 .copy_blit = &r100_copy_blit,
Pauli Nieminen44ca7472010-02-11 17:25:47 +0000291 .copy_dma = &r200_copy_dma,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200292 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100293 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200294 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100295 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200296 .set_memory_clock = &radeon_atom_set_memory_clock,
Alex Deucherc836a412009-12-23 10:07:50 -0500297 .get_pcie_lanes = &rv370_get_pcie_lanes,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200298 .set_pcie_lanes = &rv370_set_pcie_lanes,
299 .set_clock_gating = &radeon_atom_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000300 .set_surface_reg = r100_set_surface_reg,
301 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200302 .bandwidth_update = &r100_bandwidth_update,
Alex Deucher429770b2009-12-04 15:26:55 -0500303 .hpd_init = &r100_hpd_init,
304 .hpd_fini = &r100_hpd_fini,
305 .hpd_sense = &r100_hpd_sense,
306 .hpd_set_polarity = &r100_hpd_set_polarity,
Jerome Glisse062b3892010-02-04 20:36:39 +0100307 .ioctl_wait_idle = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200308};
309
310
311/*
312 * rs400,rs480
313 */
Jerome Glisseca6ffc62009-10-01 10:20:52 +0200314extern int rs400_init(struct radeon_device *rdev);
315extern void rs400_fini(struct radeon_device *rdev);
316extern int rs400_suspend(struct radeon_device *rdev);
317extern int rs400_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200318void rs400_gart_tlb_flush(struct radeon_device *rdev);
319int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
320uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
321void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
322static struct radeon_asic rs400_asic = {
Jerome Glisseca6ffc62009-10-01 10:20:52 +0200323 .init = &rs400_init,
324 .fini = &rs400_fini,
325 .suspend = &rs400_suspend,
326 .resume = &rs400_resume,
Dave Airlie28d52042009-09-21 14:33:58 +1000327 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200328 .gpu_reset = &r300_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200329 .gart_tlb_flush = &rs400_gart_tlb_flush,
330 .gart_set_page = &rs400_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000331 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200332 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000333 .ring_test = &r100_ring_test,
334 .ring_ib_execute = &r100_ring_ib_execute,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200335 .irq_set = &r100_irq_set,
336 .irq_process = &r100_irq_process,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200337 .get_vblank_counter = &r100_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200338 .fence_ring_emit = &r300_fence_ring_emit,
339 .cs_parse = &r300_cs_parse,
340 .copy_blit = &r100_copy_blit,
Pauli Nieminen44ca7472010-02-11 17:25:47 +0000341 .copy_dma = &r200_copy_dma,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200342 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100343 .get_engine_clock = &radeon_legacy_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200344 .set_engine_clock = &radeon_legacy_set_engine_clock,
Rafał Miłecki5ea597f2009-12-17 13:50:09 +0100345 .get_memory_clock = &radeon_legacy_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200346 .set_memory_clock = NULL,
Alex Deucherc836a412009-12-23 10:07:50 -0500347 .get_pcie_lanes = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200348 .set_pcie_lanes = NULL,
349 .set_clock_gating = &radeon_legacy_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000350 .set_surface_reg = r100_set_surface_reg,
351 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200352 .bandwidth_update = &r100_bandwidth_update,
Alex Deucher429770b2009-12-04 15:26:55 -0500353 .hpd_init = &r100_hpd_init,
354 .hpd_fini = &r100_hpd_fini,
355 .hpd_sense = &r100_hpd_sense,
356 .hpd_set_polarity = &r100_hpd_set_polarity,
Jerome Glisse062b3892010-02-04 20:36:39 +0100357 .ioctl_wait_idle = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200358};
359
360
361/*
362 * rs600.
363 */
Jerome Glissec010f802009-09-30 22:09:06 +0200364extern int rs600_init(struct radeon_device *rdev);
365extern void rs600_fini(struct radeon_device *rdev);
366extern int rs600_suspend(struct radeon_device *rdev);
367extern int rs600_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200368int rs600_irq_set(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200369int rs600_irq_process(struct radeon_device *rdev);
370u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200371void rs600_gart_tlb_flush(struct radeon_device *rdev);
372int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
373uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
374void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200375void rs600_bandwidth_update(struct radeon_device *rdev);
Alex Deucher429770b2009-12-04 15:26:55 -0500376void rs600_hpd_init(struct radeon_device *rdev);
377void rs600_hpd_fini(struct radeon_device *rdev);
378bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
379void rs600_hpd_set_polarity(struct radeon_device *rdev,
380 enum radeon_hpd_id hpd);
381
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200382static struct radeon_asic rs600_asic = {
Dave Airlie3f7dc91a2009-08-27 11:10:15 +1000383 .init = &rs600_init,
Jerome Glissec010f802009-09-30 22:09:06 +0200384 .fini = &rs600_fini,
385 .suspend = &rs600_suspend,
386 .resume = &rs600_resume,
Dave Airlie28d52042009-09-21 14:33:58 +1000387 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200388 .gpu_reset = &r300_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200389 .gart_tlb_flush = &rs600_gart_tlb_flush,
390 .gart_set_page = &rs600_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000391 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200392 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000393 .ring_test = &r100_ring_test,
394 .ring_ib_execute = &r100_ring_ib_execute,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200395 .irq_set = &rs600_irq_set,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200396 .irq_process = &rs600_irq_process,
397 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200398 .fence_ring_emit = &r300_fence_ring_emit,
399 .cs_parse = &r300_cs_parse,
400 .copy_blit = &r100_copy_blit,
Pauli Nieminen44ca7472010-02-11 17:25:47 +0000401 .copy_dma = &r200_copy_dma,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200402 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100403 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200404 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100405 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200406 .set_memory_clock = &radeon_atom_set_memory_clock,
Alex Deucherc836a412009-12-23 10:07:50 -0500407 .get_pcie_lanes = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200408 .set_pcie_lanes = NULL,
409 .set_clock_gating = &radeon_atom_set_clock_gating,
Jerome Glissec93bb852009-07-13 21:04:08 +0200410 .bandwidth_update = &rs600_bandwidth_update,
Alex Deucher429770b2009-12-04 15:26:55 -0500411 .hpd_init = &rs600_hpd_init,
412 .hpd_fini = &rs600_hpd_fini,
413 .hpd_sense = &rs600_hpd_sense,
414 .hpd_set_polarity = &rs600_hpd_set_polarity,
Jerome Glisse062b3892010-02-04 20:36:39 +0100415 .ioctl_wait_idle = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200416};
417
418
419/*
420 * rs690,rs740
421 */
Jerome Glisse3bc68532009-10-01 09:39:24 +0200422int rs690_init(struct radeon_device *rdev);
423void rs690_fini(struct radeon_device *rdev);
424int rs690_resume(struct radeon_device *rdev);
425int rs690_suspend(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200426uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
427void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200428void rs690_bandwidth_update(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200429static struct radeon_asic rs690_asic = {
Jerome Glisse3bc68532009-10-01 09:39:24 +0200430 .init = &rs690_init,
431 .fini = &rs690_fini,
432 .suspend = &rs690_suspend,
433 .resume = &rs690_resume,
Dave Airlie28d52042009-09-21 14:33:58 +1000434 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200435 .gpu_reset = &r300_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200436 .gart_tlb_flush = &rs400_gart_tlb_flush,
437 .gart_set_page = &rs400_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000438 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200439 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000440 .ring_test = &r100_ring_test,
441 .ring_ib_execute = &r100_ring_ib_execute,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200442 .irq_set = &rs600_irq_set,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200443 .irq_process = &rs600_irq_process,
444 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200445 .fence_ring_emit = &r300_fence_ring_emit,
446 .cs_parse = &r300_cs_parse,
447 .copy_blit = &r100_copy_blit,
Pauli Nieminen44ca7472010-02-11 17:25:47 +0000448 .copy_dma = &r200_copy_dma,
449 .copy = &r200_copy_dma,
Rafał Miłecki74338742009-11-03 00:53:02 +0100450 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200451 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100452 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200453 .set_memory_clock = &radeon_atom_set_memory_clock,
Alex Deucherc836a412009-12-23 10:07:50 -0500454 .get_pcie_lanes = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200455 .set_pcie_lanes = NULL,
456 .set_clock_gating = &radeon_atom_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000457 .set_surface_reg = r100_set_surface_reg,
458 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200459 .bandwidth_update = &rs690_bandwidth_update,
Alex Deucher429770b2009-12-04 15:26:55 -0500460 .hpd_init = &rs600_hpd_init,
461 .hpd_fini = &rs600_hpd_fini,
462 .hpd_sense = &rs600_hpd_sense,
463 .hpd_set_polarity = &rs600_hpd_set_polarity,
Jerome Glisse062b3892010-02-04 20:36:39 +0100464 .ioctl_wait_idle = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200465};
466
467
468/*
469 * rv515
470 */
Jerome Glisse068a1172009-06-17 13:28:30 +0200471int rv515_init(struct radeon_device *rdev);
Jerome Glissed39c3b82009-09-28 18:34:43 +0200472void rv515_fini(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200473int rv515_gpu_reset(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200474uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
475void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
476void rv515_ring_start(struct radeon_device *rdev);
477uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
478void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200479void rv515_bandwidth_update(struct radeon_device *rdev);
Jerome Glissed39c3b82009-09-28 18:34:43 +0200480int rv515_resume(struct radeon_device *rdev);
481int rv515_suspend(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200482static struct radeon_asic rv515_asic = {
Jerome Glisse068a1172009-06-17 13:28:30 +0200483 .init = &rv515_init,
Jerome Glissed39c3b82009-09-28 18:34:43 +0200484 .fini = &rv515_fini,
485 .suspend = &rv515_suspend,
486 .resume = &rv515_resume,
Dave Airlie28d52042009-09-21 14:33:58 +1000487 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200488 .gpu_reset = &rv515_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200489 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
490 .gart_set_page = &rv370_pcie_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000491 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200492 .ring_start = &rv515_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000493 .ring_test = &r100_ring_test,
494 .ring_ib_execute = &r100_ring_ib_execute,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200495 .irq_set = &rs600_irq_set,
496 .irq_process = &rs600_irq_process,
497 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200498 .fence_ring_emit = &r300_fence_ring_emit,
Jerome Glisse068a1172009-06-17 13:28:30 +0200499 .cs_parse = &r300_cs_parse,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200500 .copy_blit = &r100_copy_blit,
Pauli Nieminen44ca7472010-02-11 17:25:47 +0000501 .copy_dma = &r200_copy_dma,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200502 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100503 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200504 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100505 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200506 .set_memory_clock = &radeon_atom_set_memory_clock,
Alex Deucherc836a412009-12-23 10:07:50 -0500507 .get_pcie_lanes = &rv370_get_pcie_lanes,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200508 .set_pcie_lanes = &rv370_set_pcie_lanes,
509 .set_clock_gating = &radeon_atom_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000510 .set_surface_reg = r100_set_surface_reg,
511 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200512 .bandwidth_update = &rv515_bandwidth_update,
Alex Deucher429770b2009-12-04 15:26:55 -0500513 .hpd_init = &rs600_hpd_init,
514 .hpd_fini = &rs600_hpd_fini,
515 .hpd_sense = &rs600_hpd_sense,
516 .hpd_set_polarity = &rs600_hpd_set_polarity,
Jerome Glisse062b3892010-02-04 20:36:39 +0100517 .ioctl_wait_idle = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200518};
519
520
521/*
522 * r520,rv530,rv560,rv570,r580
523 */
Jerome Glissed39c3b82009-09-28 18:34:43 +0200524int r520_init(struct radeon_device *rdev);
Jerome Glissef0ed1f62009-09-28 20:39:19 +0200525int r520_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200526static struct radeon_asic r520_asic = {
Jerome Glissed39c3b82009-09-28 18:34:43 +0200527 .init = &r520_init,
Jerome Glissef0ed1f62009-09-28 20:39:19 +0200528 .fini = &rv515_fini,
529 .suspend = &rv515_suspend,
530 .resume = &r520_resume,
Dave Airlie28d52042009-09-21 14:33:58 +1000531 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200532 .gpu_reset = &rv515_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200533 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
534 .gart_set_page = &rv370_pcie_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000535 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200536 .ring_start = &rv515_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000537 .ring_test = &r100_ring_test,
538 .ring_ib_execute = &r100_ring_ib_execute,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200539 .irq_set = &rs600_irq_set,
540 .irq_process = &rs600_irq_process,
541 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200542 .fence_ring_emit = &r300_fence_ring_emit,
Jerome Glisse068a1172009-06-17 13:28:30 +0200543 .cs_parse = &r300_cs_parse,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200544 .copy_blit = &r100_copy_blit,
Pauli Nieminen44ca7472010-02-11 17:25:47 +0000545 .copy_dma = &r200_copy_dma,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200546 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100547 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200548 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100549 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200550 .set_memory_clock = &radeon_atom_set_memory_clock,
Alex Deucherc836a412009-12-23 10:07:50 -0500551 .get_pcie_lanes = &rv370_get_pcie_lanes,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200552 .set_pcie_lanes = &rv370_set_pcie_lanes,
553 .set_clock_gating = &radeon_atom_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000554 .set_surface_reg = r100_set_surface_reg,
555 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissef0ed1f62009-09-28 20:39:19 +0200556 .bandwidth_update = &rv515_bandwidth_update,
Alex Deucher429770b2009-12-04 15:26:55 -0500557 .hpd_init = &rs600_hpd_init,
558 .hpd_fini = &rs600_hpd_fini,
559 .hpd_sense = &rs600_hpd_sense,
560 .hpd_set_polarity = &rs600_hpd_set_polarity,
Jerome Glisse062b3892010-02-04 20:36:39 +0100561 .ioctl_wait_idle = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200562};
563
564/*
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000565 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200566 */
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000567int r600_init(struct radeon_device *rdev);
568void r600_fini(struct radeon_device *rdev);
569int r600_suspend(struct radeon_device *rdev);
570int r600_resume(struct radeon_device *rdev);
Dave Airlie28d52042009-09-21 14:33:58 +1000571void r600_vga_set_state(struct radeon_device *rdev, bool state);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000572int r600_wb_init(struct radeon_device *rdev);
573void r600_wb_fini(struct radeon_device *rdev);
574void r600_cp_commit(struct radeon_device *rdev);
575void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200576uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
577void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000578int r600_cs_parse(struct radeon_cs_parser *p);
579void r600_fence_ring_emit(struct radeon_device *rdev,
580 struct radeon_fence *fence);
581int r600_copy_dma(struct radeon_device *rdev,
582 uint64_t src_offset,
583 uint64_t dst_offset,
584 unsigned num_pages,
585 struct radeon_fence *fence);
586int r600_irq_process(struct radeon_device *rdev);
587int r600_irq_set(struct radeon_device *rdev);
588int r600_gpu_reset(struct radeon_device *rdev);
589int r600_set_surface_reg(struct radeon_device *rdev, int reg,
590 uint32_t tiling_flags, uint32_t pitch,
591 uint32_t offset, uint32_t obj_size);
592int r600_clear_surface_reg(struct radeon_device *rdev, int reg);
593void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000594int r600_ring_test(struct radeon_device *rdev);
595int r600_copy_blit(struct radeon_device *rdev,
596 uint64_t src_offset, uint64_t dst_offset,
597 unsigned num_pages, struct radeon_fence *fence);
Alex Deucher429770b2009-12-04 15:26:55 -0500598void r600_hpd_init(struct radeon_device *rdev);
599void r600_hpd_fini(struct radeon_device *rdev);
600bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
601void r600_hpd_set_polarity(struct radeon_device *rdev,
602 enum radeon_hpd_id hpd);
Jerome Glisse062b3892010-02-04 20:36:39 +0100603extern void r600_ioctl_wait_idle(struct radeon_device *rdev, struct radeon_bo *bo);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000604
605static struct radeon_asic r600_asic = {
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000606 .init = &r600_init,
607 .fini = &r600_fini,
608 .suspend = &r600_suspend,
609 .resume = &r600_resume,
610 .cp_commit = &r600_cp_commit,
Dave Airlie28d52042009-09-21 14:33:58 +1000611 .vga_set_state = &r600_vga_set_state,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000612 .gpu_reset = &r600_gpu_reset,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000613 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
614 .gart_set_page = &rs600_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000615 .ring_test = &r600_ring_test,
616 .ring_ib_execute = &r600_ring_ib_execute,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000617 .irq_set = &r600_irq_set,
618 .irq_process = &r600_irq_process,
Alex Deucherd8f60cf2009-12-01 13:43:46 -0500619 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000620 .fence_ring_emit = &r600_fence_ring_emit,
621 .cs_parse = &r600_cs_parse,
622 .copy_blit = &r600_copy_blit,
623 .copy_dma = &r600_copy_blit,
Alex Deuchera3812872009-09-10 15:54:35 -0400624 .copy = &r600_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100625 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000626 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100627 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000628 .set_memory_clock = &radeon_atom_set_memory_clock,
Alex Deucherc836a412009-12-23 10:07:50 -0500629 .get_pcie_lanes = NULL,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000630 .set_pcie_lanes = NULL,
Alex Deucher6d7f2d82010-02-05 00:55:32 -0500631 .set_clock_gating = NULL,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000632 .set_surface_reg = r600_set_surface_reg,
633 .clear_surface_reg = r600_clear_surface_reg,
Jerome Glissef0ed1f62009-09-28 20:39:19 +0200634 .bandwidth_update = &rv515_bandwidth_update,
Alex Deucher429770b2009-12-04 15:26:55 -0500635 .hpd_init = &r600_hpd_init,
636 .hpd_fini = &r600_hpd_fini,
637 .hpd_sense = &r600_hpd_sense,
638 .hpd_set_polarity = &r600_hpd_set_polarity,
Jerome Glisse062b3892010-02-04 20:36:39 +0100639 .ioctl_wait_idle = r600_ioctl_wait_idle,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000640};
641
642/*
643 * rv770,rv730,rv710,rv740
644 */
645int rv770_init(struct radeon_device *rdev);
646void rv770_fini(struct radeon_device *rdev);
647int rv770_suspend(struct radeon_device *rdev);
648int rv770_resume(struct radeon_device *rdev);
649int rv770_gpu_reset(struct radeon_device *rdev);
650
651static struct radeon_asic rv770_asic = {
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000652 .init = &rv770_init,
653 .fini = &rv770_fini,
654 .suspend = &rv770_suspend,
655 .resume = &rv770_resume,
656 .cp_commit = &r600_cp_commit,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000657 .gpu_reset = &rv770_gpu_reset,
Dave Airlie28d52042009-09-21 14:33:58 +1000658 .vga_set_state = &r600_vga_set_state,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000659 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
660 .gart_set_page = &rs600_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000661 .ring_test = &r600_ring_test,
662 .ring_ib_execute = &r600_ring_ib_execute,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000663 .irq_set = &r600_irq_set,
664 .irq_process = &r600_irq_process,
Alex Deucherd8f60cf2009-12-01 13:43:46 -0500665 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000666 .fence_ring_emit = &r600_fence_ring_emit,
667 .cs_parse = &r600_cs_parse,
668 .copy_blit = &r600_copy_blit,
669 .copy_dma = &r600_copy_blit,
Alex Deuchera3812872009-09-10 15:54:35 -0400670 .copy = &r600_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100671 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000672 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100673 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000674 .set_memory_clock = &radeon_atom_set_memory_clock,
Alex Deucherc836a412009-12-23 10:07:50 -0500675 .get_pcie_lanes = NULL,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000676 .set_pcie_lanes = NULL,
677 .set_clock_gating = &radeon_atom_set_clock_gating,
678 .set_surface_reg = r600_set_surface_reg,
679 .clear_surface_reg = r600_clear_surface_reg,
Jerome Glissef0ed1f62009-09-28 20:39:19 +0200680 .bandwidth_update = &rv515_bandwidth_update,
Alex Deucher429770b2009-12-04 15:26:55 -0500681 .hpd_init = &r600_hpd_init,
682 .hpd_fini = &r600_hpd_fini,
683 .hpd_sense = &r600_hpd_sense,
684 .hpd_set_polarity = &r600_hpd_set_polarity,
Jerome Glisse062b3892010-02-04 20:36:39 +0100685 .ioctl_wait_idle = r600_ioctl_wait_idle,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000686};
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200687
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500688/*
689 * evergreen
690 */
691int evergreen_init(struct radeon_device *rdev);
692void evergreen_fini(struct radeon_device *rdev);
693int evergreen_suspend(struct radeon_device *rdev);
694int evergreen_resume(struct radeon_device *rdev);
695int evergreen_gpu_reset(struct radeon_device *rdev);
696void evergreen_bandwidth_update(struct radeon_device *rdev);
697void evergreen_hpd_init(struct radeon_device *rdev);
698void evergreen_hpd_fini(struct radeon_device *rdev);
699bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
700void evergreen_hpd_set_polarity(struct radeon_device *rdev,
701 enum radeon_hpd_id hpd);
702
703static struct radeon_asic evergreen_asic = {
704 .init = &evergreen_init,
705 .fini = &evergreen_fini,
706 .suspend = &evergreen_suspend,
707 .resume = &evergreen_resume,
708 .cp_commit = NULL,
709 .gpu_reset = &evergreen_gpu_reset,
710 .vga_set_state = &r600_vga_set_state,
711 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
712 .gart_set_page = &rs600_gart_set_page,
713 .ring_test = NULL,
714 .ring_ib_execute = NULL,
715 .irq_set = NULL,
716 .irq_process = NULL,
717 .get_vblank_counter = NULL,
718 .fence_ring_emit = NULL,
719 .cs_parse = NULL,
720 .copy_blit = NULL,
721 .copy_dma = NULL,
722 .copy = NULL,
723 .get_engine_clock = &radeon_atom_get_engine_clock,
724 .set_engine_clock = &radeon_atom_set_engine_clock,
725 .get_memory_clock = &radeon_atom_get_memory_clock,
726 .set_memory_clock = &radeon_atom_set_memory_clock,
727 .set_pcie_lanes = NULL,
728 .set_clock_gating = NULL,
729 .set_surface_reg = r600_set_surface_reg,
730 .clear_surface_reg = r600_clear_surface_reg,
731 .bandwidth_update = &evergreen_bandwidth_update,
732 .hpd_init = &evergreen_hpd_init,
733 .hpd_fini = &evergreen_hpd_fini,
734 .hpd_sense = &evergreen_hpd_sense,
735 .hpd_set_polarity = &evergreen_hpd_set_polarity,
736};
737
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200738#endif