blob: eb29217bbf1d51d90f7060c1610e0342b65bb2d9 [file] [log] [blame]
Jerome Glisse771fe6b2009-06-05 14:42:42 +02001/*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Dave Airlie
25 * Alex Deucher
26 * Jerome Glisse
27 */
28#ifndef __RADEON_ASIC_H__
29#define __RADEON_ASIC_H__
30
31/*
32 * common functions
33 */
Rafał Miłecki74338742009-11-03 00:53:02 +010034uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020035void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
Rafał Miłecki5ea597f2009-12-17 13:50:09 +010036uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020037void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
38
Rafał Miłecki74338742009-11-03 00:53:02 +010039uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020040void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
Rafał Miłecki74338742009-11-03 00:53:02 +010041uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020042void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
43void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
44
45/*
46 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
47 */
Jerome Glissed4550902009-10-01 10:12:06 +020048extern int r100_init(struct radeon_device *rdev);
49extern void r100_fini(struct radeon_device *rdev);
50extern int r100_suspend(struct radeon_device *rdev);
51extern int r100_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020052uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
53void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Dave Airlie28d52042009-09-21 14:33:58 +100054void r100_vga_set_state(struct radeon_device *rdev, bool state);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020055int r100_gpu_reset(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +020056u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020057void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
58int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
Jerome Glisse3ce0a232009-09-08 10:10:24 +100059void r100_cp_commit(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020060void r100_ring_start(struct radeon_device *rdev);
61int r100_irq_set(struct radeon_device *rdev);
62int r100_irq_process(struct radeon_device *rdev);
63void r100_fence_ring_emit(struct radeon_device *rdev,
64 struct radeon_fence *fence);
65int r100_cs_parse(struct radeon_cs_parser *p);
66void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
67uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
68int r100_copy_blit(struct radeon_device *rdev,
69 uint64_t src_offset,
70 uint64_t dst_offset,
71 unsigned num_pages,
72 struct radeon_fence *fence);
Dave Airliee024e112009-06-24 09:48:08 +100073int r100_set_surface_reg(struct radeon_device *rdev, int reg,
74 uint32_t tiling_flags, uint32_t pitch,
75 uint32_t offset, uint32_t obj_size);
76int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
Jerome Glissec93bb852009-07-13 21:04:08 +020077void r100_bandwidth_update(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +100078void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Jerome Glisse3ce0a232009-09-08 10:10:24 +100079int r100_ring_test(struct radeon_device *rdev);
Dave Airlie23956df2009-11-23 12:01:09 +100080void r100_hdp_flush(struct radeon_device *rdev);
Alex Deucher429770b2009-12-04 15:26:55 -050081void r100_hpd_init(struct radeon_device *rdev);
82void r100_hpd_fini(struct radeon_device *rdev);
83bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
84void r100_hpd_set_polarity(struct radeon_device *rdev,
85 enum radeon_hpd_id hpd);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020086
87static struct radeon_asic r100_asic = {
Jerome Glisse068a1172009-06-17 13:28:30 +020088 .init = &r100_init,
Jerome Glissed4550902009-10-01 10:12:06 +020089 .fini = &r100_fini,
90 .suspend = &r100_suspend,
91 .resume = &r100_resume,
Dave Airlie28d52042009-09-21 14:33:58 +100092 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +020093 .gpu_reset = &r100_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +020094 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
95 .gart_set_page = &r100_pci_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +100096 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +020097 .ring_start = &r100_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +100098 .ring_test = &r100_ring_test,
99 .ring_ib_execute = &r100_ring_ib_execute,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200100 .irq_set = &r100_irq_set,
101 .irq_process = &r100_irq_process,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200102 .get_vblank_counter = &r100_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200103 .fence_ring_emit = &r100_fence_ring_emit,
104 .cs_parse = &r100_cs_parse,
105 .copy_blit = &r100_copy_blit,
106 .copy_dma = NULL,
107 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100108 .get_engine_clock = &radeon_legacy_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200109 .set_engine_clock = &radeon_legacy_set_engine_clock,
Rafał Miłecki5ea597f2009-12-17 13:50:09 +0100110 .get_memory_clock = &radeon_legacy_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200111 .set_memory_clock = NULL,
112 .set_pcie_lanes = NULL,
113 .set_clock_gating = &radeon_legacy_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000114 .set_surface_reg = r100_set_surface_reg,
115 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200116 .bandwidth_update = &r100_bandwidth_update,
Dave Airlie23956df2009-11-23 12:01:09 +1000117 .hdp_flush = &r100_hdp_flush,
Alex Deucher429770b2009-12-04 15:26:55 -0500118 .hpd_init = &r100_hpd_init,
119 .hpd_fini = &r100_hpd_fini,
120 .hpd_sense = &r100_hpd_sense,
121 .hpd_set_polarity = &r100_hpd_set_polarity,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200122};
123
124
125/*
126 * r300,r350,rv350,rv380
127 */
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200128extern int r300_init(struct radeon_device *rdev);
129extern void r300_fini(struct radeon_device *rdev);
130extern int r300_suspend(struct radeon_device *rdev);
131extern int r300_resume(struct radeon_device *rdev);
132extern int r300_gpu_reset(struct radeon_device *rdev);
133extern void r300_ring_start(struct radeon_device *rdev);
134extern void r300_fence_ring_emit(struct radeon_device *rdev,
135 struct radeon_fence *fence);
136extern int r300_cs_parse(struct radeon_cs_parser *p);
137extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
138extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
139extern uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
140extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
141extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
142extern int r300_copy_dma(struct radeon_device *rdev,
143 uint64_t src_offset,
144 uint64_t dst_offset,
145 unsigned num_pages,
146 struct radeon_fence *fence);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200147static struct radeon_asic r300_asic = {
Jerome Glisse068a1172009-06-17 13:28:30 +0200148 .init = &r300_init,
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200149 .fini = &r300_fini,
150 .suspend = &r300_suspend,
151 .resume = &r300_resume,
Dave Airlie28d52042009-09-21 14:33:58 +1000152 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200153 .gpu_reset = &r300_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200154 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
155 .gart_set_page = &r100_pci_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000156 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200157 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000158 .ring_test = &r100_ring_test,
159 .ring_ib_execute = &r100_ring_ib_execute,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200160 .irq_set = &r100_irq_set,
161 .irq_process = &r100_irq_process,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200162 .get_vblank_counter = &r100_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200163 .fence_ring_emit = &r300_fence_ring_emit,
164 .cs_parse = &r300_cs_parse,
165 .copy_blit = &r100_copy_blit,
166 .copy_dma = &r300_copy_dma,
167 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100168 .get_engine_clock = &radeon_legacy_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200169 .set_engine_clock = &radeon_legacy_set_engine_clock,
Rafał Miłecki5ea597f2009-12-17 13:50:09 +0100170 .get_memory_clock = &radeon_legacy_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200171 .set_memory_clock = NULL,
172 .set_pcie_lanes = &rv370_set_pcie_lanes,
173 .set_clock_gating = &radeon_legacy_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000174 .set_surface_reg = r100_set_surface_reg,
175 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200176 .bandwidth_update = &r100_bandwidth_update,
Dave Airlie23956df2009-11-23 12:01:09 +1000177 .hdp_flush = &r100_hdp_flush,
Alex Deucher429770b2009-12-04 15:26:55 -0500178 .hpd_init = &r100_hpd_init,
179 .hpd_fini = &r100_hpd_fini,
180 .hpd_sense = &r100_hpd_sense,
181 .hpd_set_polarity = &r100_hpd_set_polarity,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200182};
183
184/*
185 * r420,r423,rv410
186 */
Jerome Glisse9f022dd2009-09-11 15:35:22 +0200187extern int r420_init(struct radeon_device *rdev);
188extern void r420_fini(struct radeon_device *rdev);
189extern int r420_suspend(struct radeon_device *rdev);
190extern int r420_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200191static struct radeon_asic r420_asic = {
Jerome Glisse9f022dd2009-09-11 15:35:22 +0200192 .init = &r420_init,
193 .fini = &r420_fini,
194 .suspend = &r420_suspend,
195 .resume = &r420_resume,
Dave Airlie28d52042009-09-21 14:33:58 +1000196 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200197 .gpu_reset = &r300_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200198 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
199 .gart_set_page = &rv370_pcie_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000200 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200201 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000202 .ring_test = &r100_ring_test,
203 .ring_ib_execute = &r100_ring_ib_execute,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200204 .irq_set = &r100_irq_set,
205 .irq_process = &r100_irq_process,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200206 .get_vblank_counter = &r100_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200207 .fence_ring_emit = &r300_fence_ring_emit,
208 .cs_parse = &r300_cs_parse,
209 .copy_blit = &r100_copy_blit,
210 .copy_dma = &r300_copy_dma,
211 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100212 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200213 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100214 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200215 .set_memory_clock = &radeon_atom_set_memory_clock,
216 .set_pcie_lanes = &rv370_set_pcie_lanes,
217 .set_clock_gating = &radeon_atom_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000218 .set_surface_reg = r100_set_surface_reg,
219 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200220 .bandwidth_update = &r100_bandwidth_update,
Dave Airlie23956df2009-11-23 12:01:09 +1000221 .hdp_flush = &r100_hdp_flush,
Alex Deucher429770b2009-12-04 15:26:55 -0500222 .hpd_init = &r100_hpd_init,
223 .hpd_fini = &r100_hpd_fini,
224 .hpd_sense = &r100_hpd_sense,
225 .hpd_set_polarity = &r100_hpd_set_polarity,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200226};
227
228
229/*
230 * rs400,rs480
231 */
Jerome Glisseca6ffc62009-10-01 10:20:52 +0200232extern int rs400_init(struct radeon_device *rdev);
233extern void rs400_fini(struct radeon_device *rdev);
234extern int rs400_suspend(struct radeon_device *rdev);
235extern int rs400_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200236void rs400_gart_tlb_flush(struct radeon_device *rdev);
237int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
238uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
239void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
240static struct radeon_asic rs400_asic = {
Jerome Glisseca6ffc62009-10-01 10:20:52 +0200241 .init = &rs400_init,
242 .fini = &rs400_fini,
243 .suspend = &rs400_suspend,
244 .resume = &rs400_resume,
Dave Airlie28d52042009-09-21 14:33:58 +1000245 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200246 .gpu_reset = &r300_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200247 .gart_tlb_flush = &rs400_gart_tlb_flush,
248 .gart_set_page = &rs400_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000249 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200250 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000251 .ring_test = &r100_ring_test,
252 .ring_ib_execute = &r100_ring_ib_execute,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200253 .irq_set = &r100_irq_set,
254 .irq_process = &r100_irq_process,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200255 .get_vblank_counter = &r100_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200256 .fence_ring_emit = &r300_fence_ring_emit,
257 .cs_parse = &r300_cs_parse,
258 .copy_blit = &r100_copy_blit,
259 .copy_dma = &r300_copy_dma,
260 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100261 .get_engine_clock = &radeon_legacy_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200262 .set_engine_clock = &radeon_legacy_set_engine_clock,
Rafał Miłecki5ea597f2009-12-17 13:50:09 +0100263 .get_memory_clock = &radeon_legacy_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200264 .set_memory_clock = NULL,
265 .set_pcie_lanes = NULL,
266 .set_clock_gating = &radeon_legacy_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000267 .set_surface_reg = r100_set_surface_reg,
268 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200269 .bandwidth_update = &r100_bandwidth_update,
Dave Airlie23956df2009-11-23 12:01:09 +1000270 .hdp_flush = &r100_hdp_flush,
Alex Deucher429770b2009-12-04 15:26:55 -0500271 .hpd_init = &r100_hpd_init,
272 .hpd_fini = &r100_hpd_fini,
273 .hpd_sense = &r100_hpd_sense,
274 .hpd_set_polarity = &r100_hpd_set_polarity,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200275};
276
277
278/*
279 * rs600.
280 */
Jerome Glissec010f802009-09-30 22:09:06 +0200281extern int rs600_init(struct radeon_device *rdev);
282extern void rs600_fini(struct radeon_device *rdev);
283extern int rs600_suspend(struct radeon_device *rdev);
284extern int rs600_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200285int rs600_irq_set(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200286int rs600_irq_process(struct radeon_device *rdev);
287u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200288void rs600_gart_tlb_flush(struct radeon_device *rdev);
289int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
290uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
291void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200292void rs600_bandwidth_update(struct radeon_device *rdev);
Alex Deucher429770b2009-12-04 15:26:55 -0500293void rs600_hpd_init(struct radeon_device *rdev);
294void rs600_hpd_fini(struct radeon_device *rdev);
295bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
296void rs600_hpd_set_polarity(struct radeon_device *rdev,
297 enum radeon_hpd_id hpd);
298
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200299static struct radeon_asic rs600_asic = {
Dave Airlie3f7dc91a2009-08-27 11:10:15 +1000300 .init = &rs600_init,
Jerome Glissec010f802009-09-30 22:09:06 +0200301 .fini = &rs600_fini,
302 .suspend = &rs600_suspend,
303 .resume = &rs600_resume,
Dave Airlie28d52042009-09-21 14:33:58 +1000304 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200305 .gpu_reset = &r300_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200306 .gart_tlb_flush = &rs600_gart_tlb_flush,
307 .gart_set_page = &rs600_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000308 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200309 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000310 .ring_test = &r100_ring_test,
311 .ring_ib_execute = &r100_ring_ib_execute,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200312 .irq_set = &rs600_irq_set,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200313 .irq_process = &rs600_irq_process,
314 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200315 .fence_ring_emit = &r300_fence_ring_emit,
316 .cs_parse = &r300_cs_parse,
317 .copy_blit = &r100_copy_blit,
318 .copy_dma = &r300_copy_dma,
319 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100320 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200321 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100322 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200323 .set_memory_clock = &radeon_atom_set_memory_clock,
324 .set_pcie_lanes = NULL,
325 .set_clock_gating = &radeon_atom_set_clock_gating,
Jerome Glissec93bb852009-07-13 21:04:08 +0200326 .bandwidth_update = &rs600_bandwidth_update,
Dave Airlie23956df2009-11-23 12:01:09 +1000327 .hdp_flush = &r100_hdp_flush,
Alex Deucher429770b2009-12-04 15:26:55 -0500328 .hpd_init = &rs600_hpd_init,
329 .hpd_fini = &rs600_hpd_fini,
330 .hpd_sense = &rs600_hpd_sense,
331 .hpd_set_polarity = &rs600_hpd_set_polarity,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200332};
333
334
335/*
336 * rs690,rs740
337 */
Jerome Glisse3bc68532009-10-01 09:39:24 +0200338int rs690_init(struct radeon_device *rdev);
339void rs690_fini(struct radeon_device *rdev);
340int rs690_resume(struct radeon_device *rdev);
341int rs690_suspend(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200342uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
343void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200344void rs690_bandwidth_update(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200345static struct radeon_asic rs690_asic = {
Jerome Glisse3bc68532009-10-01 09:39:24 +0200346 .init = &rs690_init,
347 .fini = &rs690_fini,
348 .suspend = &rs690_suspend,
349 .resume = &rs690_resume,
Dave Airlie28d52042009-09-21 14:33:58 +1000350 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200351 .gpu_reset = &r300_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200352 .gart_tlb_flush = &rs400_gart_tlb_flush,
353 .gart_set_page = &rs400_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000354 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200355 .ring_start = &r300_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000356 .ring_test = &r100_ring_test,
357 .ring_ib_execute = &r100_ring_ib_execute,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200358 .irq_set = &rs600_irq_set,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200359 .irq_process = &rs600_irq_process,
360 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200361 .fence_ring_emit = &r300_fence_ring_emit,
362 .cs_parse = &r300_cs_parse,
363 .copy_blit = &r100_copy_blit,
364 .copy_dma = &r300_copy_dma,
365 .copy = &r300_copy_dma,
Rafał Miłecki74338742009-11-03 00:53:02 +0100366 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200367 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100368 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200369 .set_memory_clock = &radeon_atom_set_memory_clock,
370 .set_pcie_lanes = NULL,
371 .set_clock_gating = &radeon_atom_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000372 .set_surface_reg = r100_set_surface_reg,
373 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200374 .bandwidth_update = &rs690_bandwidth_update,
Dave Airlie23956df2009-11-23 12:01:09 +1000375 .hdp_flush = &r100_hdp_flush,
Alex Deucher429770b2009-12-04 15:26:55 -0500376 .hpd_init = &rs600_hpd_init,
377 .hpd_fini = &rs600_hpd_fini,
378 .hpd_sense = &rs600_hpd_sense,
379 .hpd_set_polarity = &rs600_hpd_set_polarity,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200380};
381
382
383/*
384 * rv515
385 */
Jerome Glisse068a1172009-06-17 13:28:30 +0200386int rv515_init(struct radeon_device *rdev);
Jerome Glissed39c3b82009-09-28 18:34:43 +0200387void rv515_fini(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200388int rv515_gpu_reset(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200389uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
390void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
391void rv515_ring_start(struct radeon_device *rdev);
392uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
393void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200394void rv515_bandwidth_update(struct radeon_device *rdev);
Jerome Glissed39c3b82009-09-28 18:34:43 +0200395int rv515_resume(struct radeon_device *rdev);
396int rv515_suspend(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200397static struct radeon_asic rv515_asic = {
Jerome Glisse068a1172009-06-17 13:28:30 +0200398 .init = &rv515_init,
Jerome Glissed39c3b82009-09-28 18:34:43 +0200399 .fini = &rv515_fini,
400 .suspend = &rv515_suspend,
401 .resume = &rv515_resume,
Dave Airlie28d52042009-09-21 14:33:58 +1000402 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200403 .gpu_reset = &rv515_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200404 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
405 .gart_set_page = &rv370_pcie_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000406 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200407 .ring_start = &rv515_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000408 .ring_test = &r100_ring_test,
409 .ring_ib_execute = &r100_ring_ib_execute,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200410 .irq_set = &rs600_irq_set,
411 .irq_process = &rs600_irq_process,
412 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200413 .fence_ring_emit = &r300_fence_ring_emit,
Jerome Glisse068a1172009-06-17 13:28:30 +0200414 .cs_parse = &r300_cs_parse,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200415 .copy_blit = &r100_copy_blit,
416 .copy_dma = &r300_copy_dma,
417 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100418 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200419 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100420 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200421 .set_memory_clock = &radeon_atom_set_memory_clock,
422 .set_pcie_lanes = &rv370_set_pcie_lanes,
423 .set_clock_gating = &radeon_atom_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000424 .set_surface_reg = r100_set_surface_reg,
425 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissec93bb852009-07-13 21:04:08 +0200426 .bandwidth_update = &rv515_bandwidth_update,
Dave Airlie23956df2009-11-23 12:01:09 +1000427 .hdp_flush = &r100_hdp_flush,
Alex Deucher429770b2009-12-04 15:26:55 -0500428 .hpd_init = &rs600_hpd_init,
429 .hpd_fini = &rs600_hpd_fini,
430 .hpd_sense = &rs600_hpd_sense,
431 .hpd_set_polarity = &rs600_hpd_set_polarity,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200432};
433
434
435/*
436 * r520,rv530,rv560,rv570,r580
437 */
Jerome Glissed39c3b82009-09-28 18:34:43 +0200438int r520_init(struct radeon_device *rdev);
Jerome Glissef0ed1f62009-09-28 20:39:19 +0200439int r520_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200440static struct radeon_asic r520_asic = {
Jerome Glissed39c3b82009-09-28 18:34:43 +0200441 .init = &r520_init,
Jerome Glissef0ed1f62009-09-28 20:39:19 +0200442 .fini = &rv515_fini,
443 .suspend = &rv515_suspend,
444 .resume = &r520_resume,
Dave Airlie28d52042009-09-21 14:33:58 +1000445 .vga_set_state = &r100_vga_set_state,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200446 .gpu_reset = &rv515_gpu_reset,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200447 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
448 .gart_set_page = &rv370_pcie_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000449 .cp_commit = &r100_cp_commit,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200450 .ring_start = &rv515_ring_start,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000451 .ring_test = &r100_ring_test,
452 .ring_ib_execute = &r100_ring_ib_execute,
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200453 .irq_set = &rs600_irq_set,
454 .irq_process = &rs600_irq_process,
455 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200456 .fence_ring_emit = &r300_fence_ring_emit,
Jerome Glisse068a1172009-06-17 13:28:30 +0200457 .cs_parse = &r300_cs_parse,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200458 .copy_blit = &r100_copy_blit,
459 .copy_dma = &r300_copy_dma,
460 .copy = &r100_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100461 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200462 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100463 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200464 .set_memory_clock = &radeon_atom_set_memory_clock,
465 .set_pcie_lanes = &rv370_set_pcie_lanes,
466 .set_clock_gating = &radeon_atom_set_clock_gating,
Dave Airliee024e112009-06-24 09:48:08 +1000467 .set_surface_reg = r100_set_surface_reg,
468 .clear_surface_reg = r100_clear_surface_reg,
Jerome Glissef0ed1f62009-09-28 20:39:19 +0200469 .bandwidth_update = &rv515_bandwidth_update,
Dave Airlie23956df2009-11-23 12:01:09 +1000470 .hdp_flush = &r100_hdp_flush,
Alex Deucher429770b2009-12-04 15:26:55 -0500471 .hpd_init = &rs600_hpd_init,
472 .hpd_fini = &rs600_hpd_fini,
473 .hpd_sense = &rs600_hpd_sense,
474 .hpd_set_polarity = &rs600_hpd_set_polarity,
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200475};
476
477/*
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000478 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200479 */
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000480int r600_init(struct radeon_device *rdev);
481void r600_fini(struct radeon_device *rdev);
482int r600_suspend(struct radeon_device *rdev);
483int r600_resume(struct radeon_device *rdev);
Dave Airlie28d52042009-09-21 14:33:58 +1000484void r600_vga_set_state(struct radeon_device *rdev, bool state);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000485int r600_wb_init(struct radeon_device *rdev);
486void r600_wb_fini(struct radeon_device *rdev);
487void r600_cp_commit(struct radeon_device *rdev);
488void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200489uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
490void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000491int r600_cs_parse(struct radeon_cs_parser *p);
492void r600_fence_ring_emit(struct radeon_device *rdev,
493 struct radeon_fence *fence);
494int r600_copy_dma(struct radeon_device *rdev,
495 uint64_t src_offset,
496 uint64_t dst_offset,
497 unsigned num_pages,
498 struct radeon_fence *fence);
499int r600_irq_process(struct radeon_device *rdev);
500int r600_irq_set(struct radeon_device *rdev);
501int r600_gpu_reset(struct radeon_device *rdev);
502int r600_set_surface_reg(struct radeon_device *rdev, int reg,
503 uint32_t tiling_flags, uint32_t pitch,
504 uint32_t offset, uint32_t obj_size);
505int r600_clear_surface_reg(struct radeon_device *rdev, int reg);
506void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000507int r600_ring_test(struct radeon_device *rdev);
508int r600_copy_blit(struct radeon_device *rdev,
509 uint64_t src_offset, uint64_t dst_offset,
510 unsigned num_pages, struct radeon_fence *fence);
Dave Airlie23956df2009-11-23 12:01:09 +1000511void r600_hdp_flush(struct radeon_device *rdev);
Alex Deucher429770b2009-12-04 15:26:55 -0500512void r600_hpd_init(struct radeon_device *rdev);
513void r600_hpd_fini(struct radeon_device *rdev);
514bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
515void r600_hpd_set_polarity(struct radeon_device *rdev,
516 enum radeon_hpd_id hpd);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000517
518static struct radeon_asic r600_asic = {
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000519 .init = &r600_init,
520 .fini = &r600_fini,
521 .suspend = &r600_suspend,
522 .resume = &r600_resume,
523 .cp_commit = &r600_cp_commit,
Dave Airlie28d52042009-09-21 14:33:58 +1000524 .vga_set_state = &r600_vga_set_state,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000525 .gpu_reset = &r600_gpu_reset,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000526 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
527 .gart_set_page = &rs600_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000528 .ring_test = &r600_ring_test,
529 .ring_ib_execute = &r600_ring_ib_execute,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000530 .irq_set = &r600_irq_set,
531 .irq_process = &r600_irq_process,
Alex Deucherd8f60cf2009-12-01 13:43:46 -0500532 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000533 .fence_ring_emit = &r600_fence_ring_emit,
534 .cs_parse = &r600_cs_parse,
535 .copy_blit = &r600_copy_blit,
536 .copy_dma = &r600_copy_blit,
Alex Deuchera3812872009-09-10 15:54:35 -0400537 .copy = &r600_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100538 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000539 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100540 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000541 .set_memory_clock = &radeon_atom_set_memory_clock,
542 .set_pcie_lanes = NULL,
543 .set_clock_gating = &radeon_atom_set_clock_gating,
544 .set_surface_reg = r600_set_surface_reg,
545 .clear_surface_reg = r600_clear_surface_reg,
Jerome Glissef0ed1f62009-09-28 20:39:19 +0200546 .bandwidth_update = &rv515_bandwidth_update,
Dave Airlie23956df2009-11-23 12:01:09 +1000547 .hdp_flush = &r600_hdp_flush,
Alex Deucher429770b2009-12-04 15:26:55 -0500548 .hpd_init = &r600_hpd_init,
549 .hpd_fini = &r600_hpd_fini,
550 .hpd_sense = &r600_hpd_sense,
551 .hpd_set_polarity = &r600_hpd_set_polarity,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000552};
553
554/*
555 * rv770,rv730,rv710,rv740
556 */
557int rv770_init(struct radeon_device *rdev);
558void rv770_fini(struct radeon_device *rdev);
559int rv770_suspend(struct radeon_device *rdev);
560int rv770_resume(struct radeon_device *rdev);
561int rv770_gpu_reset(struct radeon_device *rdev);
562
563static struct radeon_asic rv770_asic = {
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000564 .init = &rv770_init,
565 .fini = &rv770_fini,
566 .suspend = &rv770_suspend,
567 .resume = &rv770_resume,
568 .cp_commit = &r600_cp_commit,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000569 .gpu_reset = &rv770_gpu_reset,
Dave Airlie28d52042009-09-21 14:33:58 +1000570 .vga_set_state = &r600_vga_set_state,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000571 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
572 .gart_set_page = &rs600_gart_set_page,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000573 .ring_test = &r600_ring_test,
574 .ring_ib_execute = &r600_ring_ib_execute,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000575 .irq_set = &r600_irq_set,
576 .irq_process = &r600_irq_process,
Alex Deucherd8f60cf2009-12-01 13:43:46 -0500577 .get_vblank_counter = &rs600_get_vblank_counter,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000578 .fence_ring_emit = &r600_fence_ring_emit,
579 .cs_parse = &r600_cs_parse,
580 .copy_blit = &r600_copy_blit,
581 .copy_dma = &r600_copy_blit,
Alex Deuchera3812872009-09-10 15:54:35 -0400582 .copy = &r600_copy_blit,
Rafał Miłecki74338742009-11-03 00:53:02 +0100583 .get_engine_clock = &radeon_atom_get_engine_clock,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000584 .set_engine_clock = &radeon_atom_set_engine_clock,
Rafał Miłecki74338742009-11-03 00:53:02 +0100585 .get_memory_clock = &radeon_atom_get_memory_clock,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000586 .set_memory_clock = &radeon_atom_set_memory_clock,
587 .set_pcie_lanes = NULL,
588 .set_clock_gating = &radeon_atom_set_clock_gating,
589 .set_surface_reg = r600_set_surface_reg,
590 .clear_surface_reg = r600_clear_surface_reg,
Jerome Glissef0ed1f62009-09-28 20:39:19 +0200591 .bandwidth_update = &rv515_bandwidth_update,
Dave Airlie23956df2009-11-23 12:01:09 +1000592 .hdp_flush = &r600_hdp_flush,
Alex Deucher429770b2009-12-04 15:26:55 -0500593 .hpd_init = &r600_hpd_init,
594 .hpd_fini = &r600_hpd_fini,
595 .hpd_sense = &r600_hpd_sense,
596 .hpd_set_polarity = &r600_hpd_set_polarity,
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000597};
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200598
599#endif