blob: 9d2a113f78ebb8565961f301cc85627e04cdb345 [file] [log] [blame]
Jerome Glisse771fe6b2009-06-05 14:42:42 +02001/*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Dave Airlie
25 * Alex Deucher
26 * Jerome Glisse
27 */
28#ifndef __RADEON_ASIC_H__
29#define __RADEON_ASIC_H__
30
31/*
32 * common functions
33 */
Rafał Miłecki74338742009-11-03 00:53:02 +010034uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020035void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
Rafał Miłecki5ea597f2009-12-17 13:50:09 +010036uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020037void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
38
Rafał Miłecki74338742009-11-03 00:53:02 +010039uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020040void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
Rafał Miłecki74338742009-11-03 00:53:02 +010041uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020042void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
43void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
44
45/*
46 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
47 */
Jerome Glissed4550902009-10-01 10:12:06 +020048extern int r100_init(struct radeon_device *rdev);
49extern void r100_fini(struct radeon_device *rdev);
50extern int r100_suspend(struct radeon_device *rdev);
51extern int r100_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020052uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
53void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Dave Airlie28d52042009-09-21 14:33:58 +100054void r100_vga_set_state(struct radeon_device *rdev, bool state);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020055int r100_gpu_reset(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +020056u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020057void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
58int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
Jerome Glisse3ce0a232009-09-08 10:10:24 +100059void r100_cp_commit(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020060void r100_ring_start(struct radeon_device *rdev);
61int r100_irq_set(struct radeon_device *rdev);
62int r100_irq_process(struct radeon_device *rdev);
63void r100_fence_ring_emit(struct radeon_device *rdev,
64 struct radeon_fence *fence);
65int r100_cs_parse(struct radeon_cs_parser *p);
66void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
67uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
68int r100_copy_blit(struct radeon_device *rdev,
69 uint64_t src_offset,
70 uint64_t dst_offset,
71 unsigned num_pages,
72 struct radeon_fence *fence);
Dave Airliee024e112009-06-24 09:48:08 +100073int r100_set_surface_reg(struct radeon_device *rdev, int reg,
74 uint32_t tiling_flags, uint32_t pitch,
75 uint32_t offset, uint32_t obj_size);
76int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
Jerome Glissec93bb852009-07-13 21:04:08 +020077void r100_bandwidth_update(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +100078void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Jerome Glisse3ce0a232009-09-08 10:10:24 +100079int r100_ring_test(struct radeon_device *rdev);
Alex Deucher429770b2009-12-04 15:26:55 -050080void r100_hpd_init(struct radeon_device *rdev);
81void r100_hpd_fini(struct radeon_device *rdev);
82bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
83void r100_hpd_set_polarity(struct radeon_device *rdev,
84 enum radeon_hpd_id hpd);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020085
86static struct radeon_asic r100_asic = {
Jerome Glisse068a1172009-06-17 13:28:30 +020087 .init = &r100_init,
Jerome Glissed4550902009-10-01 10:12:06 +020088 .fini = &r100_fini,
89 .suspend = &r100_suspend,
90 .resume = &r100_resume,
Dave Airlie28d52042009-09-21 14:33:58 +100091 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +020092 .gpu_reset = &r100_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +020093 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
94 .gart_set_page = &r100_pci_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +100095 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +020096 .ring_start = &r100_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +100097 .ring_test = &r100_ring_test,
98 .ring_ib_execute = &r100_ring_ib_execute,
Jerome Glisse771fe6b2009-06-05 14:42:42 +020099 .irq_set = &r100_irq_set,
100 .irq_process = &r100_irq_process,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200101 .get_vblank_counter = &r100_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200102 .fence_ring_emit = &r100_fence_ring_emit,
103 .cs_parse = &r100_cs_parse,
104 .copy_blit = &r100_copy_blit,
105 .copy_dma = NULL,
106 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100107 .get_engine_clock = &radeon_legacy_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200108 .set_engine_clock = &radeon_legacy_set_engine_clock,
Rafał Miłecki5ea597f2009-12-17 13:50:09 +0100109 .get_memory_clock = &radeon_legacy_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200110 .set_memory_clock = NULL,
Alex Deucherc836a412009-12-23 10:07:50 -0500111 .get_pcie_lanes = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200112 .set_pcie_lanes = NULL,
113 .set_clock_gating = &radeon_legacy_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000114 .set_surface_reg = r100_set_surface_reg,
115 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200116 .bandwidth_update = &r100_bandwidth_update,
Alex Deucher429770b2009-12-04 15:26:55 -0500117 .hpd_init = &r100_hpd_init,
118 .hpd_fini = &r100_hpd_fini,
119 .hpd_sense = &r100_hpd_sense,
120 .hpd_set_polarity = &r100_hpd_set_polarity,
Jerome Glisse062b3892010-02-04 20:36:39 +0100121 .ioctl_wait_idle = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200122};
123
124
125/*
126 * r300,r350,rv350,rv380
127 */
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200128extern int r300_init(struct radeon_device *rdev);
129extern void r300_fini(struct radeon_device *rdev);
130extern int r300_suspend(struct radeon_device *rdev);
131extern int r300_resume(struct radeon_device *rdev);
132extern int r300_gpu_reset(struct radeon_device *rdev);
133extern void r300_ring_start(struct radeon_device *rdev);
134extern void r300_fence_ring_emit(struct radeon_device *rdev,
135 struct radeon_fence *fence);
136extern int r300_cs_parse(struct radeon_cs_parser *p);
137extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
138extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
139extern uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
140extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
141extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
Alex Deucherc836a412009-12-23 10:07:50 -0500142extern int rv370_get_pcie_lanes(struct radeon_device *rdev);
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200143extern int r300_copy_dma(struct radeon_device *rdev,
144 uint64_t src_offset,
145 uint64_t dst_offset,
146 unsigned num_pages,
147 struct radeon_fence *fence);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200148static struct radeon_asic r300_asic = {
Jerome Glisse068a1172009-06-17 13:28:30 +0200149 .init = &r300_init,
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200150 .fini = &r300_fini,
151 .suspend = &r300_suspend,
152 .resume = &r300_resume,
Dave Airlie28d52042009-09-21 14:33:58 +1000153 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200154 .gpu_reset = &r300_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200155 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
156 .gart_set_page = &r100_pci_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000157 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200158 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000159 .ring_test = &r100_ring_test,
160 .ring_ib_execute = &r100_ring_ib_execute,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200161 .irq_set = &r100_irq_set,
162 .irq_process = &r100_irq_process,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200163 .get_vblank_counter = &r100_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200164 .fence_ring_emit = &r300_fence_ring_emit,
165 .cs_parse = &r300_cs_parse,
166 .copy_blit = &r100_copy_blit,
167 .copy_dma = &r300_copy_dma,
168 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100169 .get_engine_clock = &radeon_legacy_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200170 .set_engine_clock = &radeon_legacy_set_engine_clock,
Rafał Miłecki5ea597f2009-12-17 13:50:09 +0100171 .get_memory_clock = &radeon_legacy_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200172 .set_memory_clock = NULL,
Alex Deucherc836a412009-12-23 10:07:50 -0500173 .get_pcie_lanes = &rv370_get_pcie_lanes,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200174 .set_pcie_lanes = &rv370_set_pcie_lanes,
175 .set_clock_gating = &radeon_legacy_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000176 .set_surface_reg = r100_set_surface_reg,
177 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200178 .bandwidth_update = &r100_bandwidth_update,
Alex Deucher429770b2009-12-04 15:26:55 -0500179 .hpd_init = &r100_hpd_init,
180 .hpd_fini = &r100_hpd_fini,
181 .hpd_sense = &r100_hpd_sense,
182 .hpd_set_polarity = &r100_hpd_set_polarity,
Jerome Glisse062b3892010-02-04 20:36:39 +0100183 .ioctl_wait_idle = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200184};
185
Pauli Nieminend80eeb02010-02-11 17:55:35 +0000186
187static struct radeon_asic r300_asic_pcie = {
188 .init = &r300_init,
189 .fini = &r300_fini,
190 .suspend = &r300_suspend,
191 .resume = &r300_resume,
192 .vga_set_state = &r100_vga_set_state,
193 .gpu_reset = &r300_gpu_reset,
194 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
195 .gart_set_page = &rv370_pcie_gart_set_page,
196 .cp_commit = &r100_cp_commit,
197 .ring_start = &r300_ring_start,
198 .ring_test = &r100_ring_test,
199 .ring_ib_execute = &r100_ring_ib_execute,
200 .irq_set = &r100_irq_set,
201 .irq_process = &r100_irq_process,
202 .get_vblank_counter = &r100_get_vblank_counter,
203 .fence_ring_emit = &r300_fence_ring_emit,
204 .cs_parse = &r300_cs_parse,
205 .copy_blit = &r100_copy_blit,
206 .copy_dma = &r200_copy_dma,
207 .copy = &r100_copy_blit,
208 .get_engine_clock = &radeon_legacy_get_engine_clock,
209 .set_engine_clock = &radeon_legacy_set_engine_clock,
210 .get_memory_clock = &radeon_legacy_get_memory_clock,
211 .set_memory_clock = NULL,
212 .set_pcie_lanes = &rv370_set_pcie_lanes,
213 .set_clock_gating = &radeon_legacy_set_clock_gating,
214 .set_surface_reg = r100_set_surface_reg,
215 .clear_surface_reg = r100_clear_surface_reg,
216 .bandwidth_update = &r100_bandwidth_update,
217 .hpd_init = &r100_hpd_init,
218 .hpd_fini = &r100_hpd_fini,
219 .hpd_sense = &r100_hpd_sense,
220 .hpd_set_polarity = &r100_hpd_set_polarity,
221 .ioctl_wait_idle = NULL,
222};
223
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200224/*
225 * r420,r423,rv410
226 */
Jerome Glisse9f022dd2009-09-11 15:35:22 +0200227extern int r420_init(struct radeon_device *rdev);
228extern void r420_fini(struct radeon_device *rdev);
229extern int r420_suspend(struct radeon_device *rdev);
230extern int r420_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200231static struct radeon_asic r420_asic = {
Jerome Glisse9f022dd2009-09-11 15:35:22 +0200232 .init = &r420_init,
233 .fini = &r420_fini,
234 .suspend = &r420_suspend,
235 .resume = &r420_resume,
Dave Airlie28d52042009-09-21 14:33:58 +1000236 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200237 .gpu_reset = &r300_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200238 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
239 .gart_set_page = &rv370_pcie_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000240 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200241 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000242 .ring_test = &r100_ring_test,
243 .ring_ib_execute = &r100_ring_ib_execute,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200244 .irq_set = &r100_irq_set,
245 .irq_process = &r100_irq_process,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200246 .get_vblank_counter = &r100_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200247 .fence_ring_emit = &r300_fence_ring_emit,
248 .cs_parse = &r300_cs_parse,
249 .copy_blit = &r100_copy_blit,
250 .copy_dma = &r300_copy_dma,
251 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100252 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200253 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100254 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200255 .set_memory_clock = &radeon_atom_set_memory_clock,
Alex Deucherc836a412009-12-23 10:07:50 -0500256 .get_pcie_lanes = &rv370_get_pcie_lanes,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200257 .set_pcie_lanes = &rv370_set_pcie_lanes,
258 .set_clock_gating = &radeon_atom_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000259 .set_surface_reg = r100_set_surface_reg,
260 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200261 .bandwidth_update = &r100_bandwidth_update,
Alex Deucher429770b2009-12-04 15:26:55 -0500262 .hpd_init = &r100_hpd_init,
263 .hpd_fini = &r100_hpd_fini,
264 .hpd_sense = &r100_hpd_sense,
265 .hpd_set_polarity = &r100_hpd_set_polarity,
Jerome Glisse062b3892010-02-04 20:36:39 +0100266 .ioctl_wait_idle = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200267};
268
269
270/*
271 * rs400,rs480
272 */
Jerome Glisseca6ffc62009-10-01 10:20:52 +0200273extern int rs400_init(struct radeon_device *rdev);
274extern void rs400_fini(struct radeon_device *rdev);
275extern int rs400_suspend(struct radeon_device *rdev);
276extern int rs400_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200277void rs400_gart_tlb_flush(struct radeon_device *rdev);
278int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
279uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
280void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
281static struct radeon_asic rs400_asic = {
Jerome Glisseca6ffc62009-10-01 10:20:52 +0200282 .init = &rs400_init,
283 .fini = &rs400_fini,
284 .suspend = &rs400_suspend,
285 .resume = &rs400_resume,
Dave Airlie28d52042009-09-21 14:33:58 +1000286 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200287 .gpu_reset = &r300_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200288 .gart_tlb_flush = &rs400_gart_tlb_flush,
289 .gart_set_page = &rs400_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000290 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200291 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000292 .ring_test = &r100_ring_test,
293 .ring_ib_execute = &r100_ring_ib_execute,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200294 .irq_set = &r100_irq_set,
295 .irq_process = &r100_irq_process,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200296 .get_vblank_counter = &r100_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200297 .fence_ring_emit = &r300_fence_ring_emit,
298 .cs_parse = &r300_cs_parse,
299 .copy_blit = &r100_copy_blit,
300 .copy_dma = &r300_copy_dma,
301 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100302 .get_engine_clock = &radeon_legacy_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200303 .set_engine_clock = &radeon_legacy_set_engine_clock,
Rafał Miłecki5ea597f2009-12-17 13:50:09 +0100304 .get_memory_clock = &radeon_legacy_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200305 .set_memory_clock = NULL,
Alex Deucherc836a412009-12-23 10:07:50 -0500306 .get_pcie_lanes = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200307 .set_pcie_lanes = NULL,
308 .set_clock_gating = &radeon_legacy_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000309 .set_surface_reg = r100_set_surface_reg,
310 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200311 .bandwidth_update = &r100_bandwidth_update,
Alex Deucher429770b2009-12-04 15:26:55 -0500312 .hpd_init = &r100_hpd_init,
313 .hpd_fini = &r100_hpd_fini,
314 .hpd_sense = &r100_hpd_sense,
315 .hpd_set_polarity = &r100_hpd_set_polarity,
Jerome Glisse062b3892010-02-04 20:36:39 +0100316 .ioctl_wait_idle = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200317};
318
319
320/*
321 * rs600.
322 */
Jerome Glissec010f802009-09-30 22:09:06 +0200323extern int rs600_init(struct radeon_device *rdev);
324extern void rs600_fini(struct radeon_device *rdev);
325extern int rs600_suspend(struct radeon_device *rdev);
326extern int rs600_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200327int rs600_irq_set(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200328int rs600_irq_process(struct radeon_device *rdev);
329u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200330void rs600_gart_tlb_flush(struct radeon_device *rdev);
331int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
332uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
333void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200334void rs600_bandwidth_update(struct radeon_device *rdev);
Alex Deucher429770b2009-12-04 15:26:55 -0500335void rs600_hpd_init(struct radeon_device *rdev);
336void rs600_hpd_fini(struct radeon_device *rdev);
337bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
338void rs600_hpd_set_polarity(struct radeon_device *rdev,
339 enum radeon_hpd_id hpd);
340
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200341static struct radeon_asic rs600_asic = {
Dave Airlie3f7dc91a2009-08-27 11:10:15 +1000342 .init = &rs600_init,
Jerome Glissec010f802009-09-30 22:09:06 +0200343 .fini = &rs600_fini,
344 .suspend = &rs600_suspend,
345 .resume = &rs600_resume,
Dave Airlie28d52042009-09-21 14:33:58 +1000346 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200347 .gpu_reset = &r300_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200348 .gart_tlb_flush = &rs600_gart_tlb_flush,
349 .gart_set_page = &rs600_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000350 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200351 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000352 .ring_test = &r100_ring_test,
353 .ring_ib_execute = &r100_ring_ib_execute,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200354 .irq_set = &rs600_irq_set,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200355 .irq_process = &rs600_irq_process,
356 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200357 .fence_ring_emit = &r300_fence_ring_emit,
358 .cs_parse = &r300_cs_parse,
359 .copy_blit = &r100_copy_blit,
360 .copy_dma = &r300_copy_dma,
361 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100362 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200363 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100364 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200365 .set_memory_clock = &radeon_atom_set_memory_clock,
Alex Deucherc836a412009-12-23 10:07:50 -0500366 .get_pcie_lanes = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200367 .set_pcie_lanes = NULL,
368 .set_clock_gating = &radeon_atom_set_clock_gating,
Jerome Glissec93bb852009-07-13 21:04:08 +0200369 .bandwidth_update = &rs600_bandwidth_update,
Alex Deucher429770b2009-12-04 15:26:55 -0500370 .hpd_init = &rs600_hpd_init,
371 .hpd_fini = &rs600_hpd_fini,
372 .hpd_sense = &rs600_hpd_sense,
373 .hpd_set_polarity = &rs600_hpd_set_polarity,
Jerome Glisse062b3892010-02-04 20:36:39 +0100374 .ioctl_wait_idle = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200375};
376
377
378/*
379 * rs690,rs740
380 */
Jerome Glisse3bc68532009-10-01 09:39:24 +0200381int rs690_init(struct radeon_device *rdev);
382void rs690_fini(struct radeon_device *rdev);
383int rs690_resume(struct radeon_device *rdev);
384int rs690_suspend(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200385uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
386void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200387void rs690_bandwidth_update(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200388static struct radeon_asic rs690_asic = {
Jerome Glisse3bc68532009-10-01 09:39:24 +0200389 .init = &rs690_init,
390 .fini = &rs690_fini,
391 .suspend = &rs690_suspend,
392 .resume = &rs690_resume,
Dave Airlie28d52042009-09-21 14:33:58 +1000393 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200394 .gpu_reset = &r300_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200395 .gart_tlb_flush = &rs400_gart_tlb_flush,
396 .gart_set_page = &rs400_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000397 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200398 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000399 .ring_test = &r100_ring_test,
400 .ring_ib_execute = &r100_ring_ib_execute,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200401 .irq_set = &rs600_irq_set,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200402 .irq_process = &rs600_irq_process,
403 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200404 .fence_ring_emit = &r300_fence_ring_emit,
405 .cs_parse = &r300_cs_parse,
406 .copy_blit = &r100_copy_blit,
407 .copy_dma = &r300_copy_dma,
408 .copy = &r300_copy_dma,
Rafał Miłecki74338742009-11-03 00:53:02 +0100409 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200410 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100411 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200412 .set_memory_clock = &radeon_atom_set_memory_clock,
Alex Deucherc836a412009-12-23 10:07:50 -0500413 .get_pcie_lanes = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200414 .set_pcie_lanes = NULL,
415 .set_clock_gating = &radeon_atom_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000416 .set_surface_reg = r100_set_surface_reg,
417 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200418 .bandwidth_update = &rs690_bandwidth_update,
Alex Deucher429770b2009-12-04 15:26:55 -0500419 .hpd_init = &rs600_hpd_init,
420 .hpd_fini = &rs600_hpd_fini,
421 .hpd_sense = &rs600_hpd_sense,
422 .hpd_set_polarity = &rs600_hpd_set_polarity,
Jerome Glisse062b3892010-02-04 20:36:39 +0100423 .ioctl_wait_idle = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200424};
425
426
427/*
428 * rv515
429 */
Jerome Glisse068a1172009-06-17 13:28:30 +0200430int rv515_init(struct radeon_device *rdev);
Jerome Glissed39c3b82009-09-28 18:34:43 +0200431void rv515_fini(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200432int rv515_gpu_reset(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200433uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
434void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
435void rv515_ring_start(struct radeon_device *rdev);
436uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
437void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200438void rv515_bandwidth_update(struct radeon_device *rdev);
Jerome Glissed39c3b82009-09-28 18:34:43 +0200439int rv515_resume(struct radeon_device *rdev);
440int rv515_suspend(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200441static struct radeon_asic rv515_asic = {
Jerome Glisse068a1172009-06-17 13:28:30 +0200442 .init = &rv515_init,
Jerome Glissed39c3b82009-09-28 18:34:43 +0200443 .fini = &rv515_fini,
444 .suspend = &rv515_suspend,
445 .resume = &rv515_resume,
Dave Airlie28d52042009-09-21 14:33:58 +1000446 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200447 .gpu_reset = &rv515_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200448 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
449 .gart_set_page = &rv370_pcie_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000450 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200451 .ring_start = &rv515_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000452 .ring_test = &r100_ring_test,
453 .ring_ib_execute = &r100_ring_ib_execute,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200454 .irq_set = &rs600_irq_set,
455 .irq_process = &rs600_irq_process,
456 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200457 .fence_ring_emit = &r300_fence_ring_emit,
Jerome Glisse068a1172009-06-17 13:28:30 +0200458 .cs_parse = &r300_cs_parse,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200459 .copy_blit = &r100_copy_blit,
460 .copy_dma = &r300_copy_dma,
461 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100462 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200463 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100464 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200465 .set_memory_clock = &radeon_atom_set_memory_clock,
Alex Deucherc836a412009-12-23 10:07:50 -0500466 .get_pcie_lanes = &rv370_get_pcie_lanes,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200467 .set_pcie_lanes = &rv370_set_pcie_lanes,
468 .set_clock_gating = &radeon_atom_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000469 .set_surface_reg = r100_set_surface_reg,
470 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200471 .bandwidth_update = &rv515_bandwidth_update,
Alex Deucher429770b2009-12-04 15:26:55 -0500472 .hpd_init = &rs600_hpd_init,
473 .hpd_fini = &rs600_hpd_fini,
474 .hpd_sense = &rs600_hpd_sense,
475 .hpd_set_polarity = &rs600_hpd_set_polarity,
Jerome Glisse062b3892010-02-04 20:36:39 +0100476 .ioctl_wait_idle = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200477};
478
479
480/*
481 * r520,rv530,rv560,rv570,r580
482 */
Jerome Glissed39c3b82009-09-28 18:34:43 +0200483int r520_init(struct radeon_device *rdev);
Jerome Glissef0ed1f62009-09-28 20:39:19 +0200484int r520_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200485static struct radeon_asic r520_asic = {
Jerome Glissed39c3b82009-09-28 18:34:43 +0200486 .init = &r520_init,
Jerome Glissef0ed1f62009-09-28 20:39:19 +0200487 .fini = &rv515_fini,
488 .suspend = &rv515_suspend,
489 .resume = &r520_resume,
Dave Airlie28d52042009-09-21 14:33:58 +1000490 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200491 .gpu_reset = &rv515_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200492 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
493 .gart_set_page = &rv370_pcie_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000494 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200495 .ring_start = &rv515_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000496 .ring_test = &r100_ring_test,
497 .ring_ib_execute = &r100_ring_ib_execute,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200498 .irq_set = &rs600_irq_set,
499 .irq_process = &rs600_irq_process,
500 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200501 .fence_ring_emit = &r300_fence_ring_emit,
Jerome Glisse068a1172009-06-17 13:28:30 +0200502 .cs_parse = &r300_cs_parse,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200503 .copy_blit = &r100_copy_blit,
504 .copy_dma = &r300_copy_dma,
505 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100506 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200507 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100508 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200509 .set_memory_clock = &radeon_atom_set_memory_clock,
Alex Deucherc836a412009-12-23 10:07:50 -0500510 .get_pcie_lanes = &rv370_get_pcie_lanes,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200511 .set_pcie_lanes = &rv370_set_pcie_lanes,
512 .set_clock_gating = &radeon_atom_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000513 .set_surface_reg = r100_set_surface_reg,
514 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissef0ed1f62009-09-28 20:39:19 +0200515 .bandwidth_update = &rv515_bandwidth_update,
Alex Deucher429770b2009-12-04 15:26:55 -0500516 .hpd_init = &rs600_hpd_init,
517 .hpd_fini = &rs600_hpd_fini,
518 .hpd_sense = &rs600_hpd_sense,
519 .hpd_set_polarity = &rs600_hpd_set_polarity,
Jerome Glisse062b3892010-02-04 20:36:39 +0100520 .ioctl_wait_idle = NULL,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200521};
522
523/*
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000524 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200525 */
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000526int r600_init(struct radeon_device *rdev);
527void r600_fini(struct radeon_device *rdev);
528int r600_suspend(struct radeon_device *rdev);
529int r600_resume(struct radeon_device *rdev);
Dave Airlie28d52042009-09-21 14:33:58 +1000530void r600_vga_set_state(struct radeon_device *rdev, bool state);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000531int r600_wb_init(struct radeon_device *rdev);
532void r600_wb_fini(struct radeon_device *rdev);
533void r600_cp_commit(struct radeon_device *rdev);
534void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200535uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
536void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000537int r600_cs_parse(struct radeon_cs_parser *p);
538void r600_fence_ring_emit(struct radeon_device *rdev,
539 struct radeon_fence *fence);
540int r600_copy_dma(struct radeon_device *rdev,
541 uint64_t src_offset,
542 uint64_t dst_offset,
543 unsigned num_pages,
544 struct radeon_fence *fence);
545int r600_irq_process(struct radeon_device *rdev);
546int r600_irq_set(struct radeon_device *rdev);
547int r600_gpu_reset(struct radeon_device *rdev);
548int r600_set_surface_reg(struct radeon_device *rdev, int reg,
549 uint32_t tiling_flags, uint32_t pitch,
550 uint32_t offset, uint32_t obj_size);
551int r600_clear_surface_reg(struct radeon_device *rdev, int reg);
552void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000553int r600_ring_test(struct radeon_device *rdev);
554int r600_copy_blit(struct radeon_device *rdev,
555 uint64_t src_offset, uint64_t dst_offset,
556 unsigned num_pages, struct radeon_fence *fence);
Alex Deucher429770b2009-12-04 15:26:55 -0500557void r600_hpd_init(struct radeon_device *rdev);
558void r600_hpd_fini(struct radeon_device *rdev);
559bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
560void r600_hpd_set_polarity(struct radeon_device *rdev,
561 enum radeon_hpd_id hpd);
Jerome Glisse062b3892010-02-04 20:36:39 +0100562extern void r600_ioctl_wait_idle(struct radeon_device *rdev, struct radeon_bo *bo);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000563
564static struct radeon_asic r600_asic = {
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000565 .init = &r600_init,
566 .fini = &r600_fini,
567 .suspend = &r600_suspend,
568 .resume = &r600_resume,
569 .cp_commit = &r600_cp_commit,
Dave Airlie28d52042009-09-21 14:33:58 +1000570 .vga_set_state = &r600_vga_set_state,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000571 .gpu_reset = &r600_gpu_reset,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000572 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
573 .gart_set_page = &rs600_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000574 .ring_test = &r600_ring_test,
575 .ring_ib_execute = &r600_ring_ib_execute,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000576 .irq_set = &r600_irq_set,
577 .irq_process = &r600_irq_process,
Alex Deucherd8f60cf2009-12-01 13:43:46 -0500578 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000579 .fence_ring_emit = &r600_fence_ring_emit,
580 .cs_parse = &r600_cs_parse,
581 .copy_blit = &r600_copy_blit,
582 .copy_dma = &r600_copy_blit,
Alex Deuchera3812872009-09-10 15:54:35 -0400583 .copy = &r600_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100584 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000585 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100586 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000587 .set_memory_clock = &radeon_atom_set_memory_clock,
Alex Deucherc836a412009-12-23 10:07:50 -0500588 .get_pcie_lanes = NULL,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000589 .set_pcie_lanes = NULL,
Alex Deucher6d7f2d82010-02-05 00:55:32 -0500590 .set_clock_gating = NULL,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000591 .set_surface_reg = r600_set_surface_reg,
592 .clear_surface_reg = r600_clear_surface_reg,
Jerome Glissef0ed1f62009-09-28 20:39:19 +0200593 .bandwidth_update = &rv515_bandwidth_update,
Alex Deucher429770b2009-12-04 15:26:55 -0500594 .hpd_init = &r600_hpd_init,
595 .hpd_fini = &r600_hpd_fini,
596 .hpd_sense = &r600_hpd_sense,
597 .hpd_set_polarity = &r600_hpd_set_polarity,
Jerome Glisse062b3892010-02-04 20:36:39 +0100598 .ioctl_wait_idle = r600_ioctl_wait_idle,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000599};
600
601/*
602 * rv770,rv730,rv710,rv740
603 */
604int rv770_init(struct radeon_device *rdev);
605void rv770_fini(struct radeon_device *rdev);
606int rv770_suspend(struct radeon_device *rdev);
607int rv770_resume(struct radeon_device *rdev);
608int rv770_gpu_reset(struct radeon_device *rdev);
609
610static struct radeon_asic rv770_asic = {
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000611 .init = &rv770_init,
612 .fini = &rv770_fini,
613 .suspend = &rv770_suspend,
614 .resume = &rv770_resume,
615 .cp_commit = &r600_cp_commit,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000616 .gpu_reset = &rv770_gpu_reset,
Dave Airlie28d52042009-09-21 14:33:58 +1000617 .vga_set_state = &r600_vga_set_state,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000618 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
619 .gart_set_page = &rs600_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000620 .ring_test = &r600_ring_test,
621 .ring_ib_execute = &r600_ring_ib_execute,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000622 .irq_set = &r600_irq_set,
623 .irq_process = &r600_irq_process,
Alex Deucherd8f60cf2009-12-01 13:43:46 -0500624 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000625 .fence_ring_emit = &r600_fence_ring_emit,
626 .cs_parse = &r600_cs_parse,
627 .copy_blit = &r600_copy_blit,
628 .copy_dma = &r600_copy_blit,
Alex Deuchera3812872009-09-10 15:54:35 -0400629 .copy = &r600_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100630 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000631 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100632 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000633 .set_memory_clock = &radeon_atom_set_memory_clock,
Alex Deucherc836a412009-12-23 10:07:50 -0500634 .get_pcie_lanes = NULL,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000635 .set_pcie_lanes = NULL,
636 .set_clock_gating = &radeon_atom_set_clock_gating,
637 .set_surface_reg = r600_set_surface_reg,
638 .clear_surface_reg = r600_clear_surface_reg,
Jerome Glissef0ed1f62009-09-28 20:39:19 +0200639 .bandwidth_update = &rv515_bandwidth_update,
Alex Deucher429770b2009-12-04 15:26:55 -0500640 .hpd_init = &r600_hpd_init,
641 .hpd_fini = &r600_hpd_fini,
642 .hpd_sense = &r600_hpd_sense,
643 .hpd_set_polarity = &r600_hpd_set_polarity,
Jerome Glisse062b3892010-02-04 20:36:39 +0100644 .ioctl_wait_idle = r600_ioctl_wait_idle,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000645};
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200646
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500647/*
648 * evergreen
649 */
650int evergreen_init(struct radeon_device *rdev);
651void evergreen_fini(struct radeon_device *rdev);
652int evergreen_suspend(struct radeon_device *rdev);
653int evergreen_resume(struct radeon_device *rdev);
654int evergreen_gpu_reset(struct radeon_device *rdev);
655void evergreen_bandwidth_update(struct radeon_device *rdev);
656void evergreen_hpd_init(struct radeon_device *rdev);
657void evergreen_hpd_fini(struct radeon_device *rdev);
658bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
659void evergreen_hpd_set_polarity(struct radeon_device *rdev,
660 enum radeon_hpd_id hpd);
661
662static struct radeon_asic evergreen_asic = {
663 .init = &evergreen_init,
664 .fini = &evergreen_fini,
665 .suspend = &evergreen_suspend,
666 .resume = &evergreen_resume,
667 .cp_commit = NULL,
668 .gpu_reset = &evergreen_gpu_reset,
669 .vga_set_state = &r600_vga_set_state,
670 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
671 .gart_set_page = &rs600_gart_set_page,
672 .ring_test = NULL,
673 .ring_ib_execute = NULL,
674 .irq_set = NULL,
675 .irq_process = NULL,
676 .get_vblank_counter = NULL,
677 .fence_ring_emit = NULL,
678 .cs_parse = NULL,
679 .copy_blit = NULL,
680 .copy_dma = NULL,
681 .copy = NULL,
682 .get_engine_clock = &radeon_atom_get_engine_clock,
683 .set_engine_clock = &radeon_atom_set_engine_clock,
684 .get_memory_clock = &radeon_atom_get_memory_clock,
685 .set_memory_clock = &radeon_atom_set_memory_clock,
686 .set_pcie_lanes = NULL,
687 .set_clock_gating = NULL,
688 .set_surface_reg = r600_set_surface_reg,
689 .clear_surface_reg = r600_clear_surface_reg,
690 .bandwidth_update = &evergreen_bandwidth_update,
691 .hpd_init = &evergreen_hpd_init,
692 .hpd_fini = &evergreen_hpd_fini,
693 .hpd_sense = &evergreen_hpd_sense,
694 .hpd_set_polarity = &evergreen_hpd_set_polarity,
695};
696
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200697#endif