blob: 248da7205a091ea2f3b4d7455b534139f48ad1ac [file] [log] [blame]
Jerome Glisse771fe6b2009-06-05 14:42:42 +02001/*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Dave Airlie
25 * Alex Deucher
26 * Jerome Glisse
27 */
28#ifndef __RADEON_ASIC_H__
29#define __RADEON_ASIC_H__
30
31/*
32 * common functions
33 */
Rafał Miłecki74338742009-11-03 00:53:02 +010034uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020035void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
Rafał Miłecki5ea597f2009-12-17 13:50:09 +010036uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020037void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
38
Rafał Miłecki74338742009-11-03 00:53:02 +010039uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020040void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
Rafał Miłecki74338742009-11-03 00:53:02 +010041uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020042void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
43void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
44
Alex Deucher37e9b6a2012-08-03 11:39:43 -040045void atombios_set_backlight_level(struct radeon_encoder *radeon_encoder, u8 level);
Alex Deucher6d92f812012-09-14 09:59:26 -040046u8 atombios_get_backlight_level(struct radeon_encoder *radeon_encoder);
Alex Deucher37e9b6a2012-08-03 11:39:43 -040047void radeon_legacy_set_backlight_level(struct radeon_encoder *radeon_encoder, u8 level);
Alex Deucher6d92f812012-09-14 09:59:26 -040048u8 radeon_legacy_get_backlight_level(struct radeon_encoder *radeon_encoder);
Alex Deucher37e9b6a2012-08-03 11:39:43 -040049
50
Jerome Glisse771fe6b2009-06-05 14:42:42 +020051/*
Pauli Nieminen44ca7472010-02-11 17:25:47 +000052 * r100,rv100,rs100,rv200,rs200
Jerome Glisse771fe6b2009-06-05 14:42:42 +020053 */
Daniel Vetter2b497502010-03-11 21:19:18 +000054struct r100_mc_save {
55 u32 GENMO_WT;
56 u32 CRTC_EXT_CNTL;
57 u32 CRTC_GEN_CNTL;
58 u32 CRTC2_GEN_CNTL;
59 u32 CUR_OFFSET;
60 u32 CUR2_OFFSET;
61};
62int r100_init(struct radeon_device *rdev);
63void r100_fini(struct radeon_device *rdev);
64int r100_suspend(struct radeon_device *rdev);
65int r100_resume(struct radeon_device *rdev);
Dave Airlie28d52042009-09-21 14:33:58 +100066void r100_vga_set_state(struct radeon_device *rdev, bool state);
Christian Könige32eb502011-10-23 12:56:27 +020067bool r100_gpu_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
Jerome Glissea2d07b72010-03-09 14:45:11 +000068int r100_asic_reset(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +020069u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020070void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
71int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
Alex Deucherf7128122012-02-23 17:53:45 -050072void r100_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020073int r100_irq_set(struct radeon_device *rdev);
74int r100_irq_process(struct radeon_device *rdev);
75void r100_fence_ring_emit(struct radeon_device *rdev,
76 struct radeon_fence *fence);
Christian König15d33322011-09-15 19:02:22 +020077void r100_semaphore_ring_emit(struct radeon_device *rdev,
Christian Könige32eb502011-10-23 12:56:27 +020078 struct radeon_ring *cp,
Christian König15d33322011-09-15 19:02:22 +020079 struct radeon_semaphore *semaphore,
Christian König7b1f2482011-09-23 15:11:23 +020080 bool emit_wait);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020081int r100_cs_parse(struct radeon_cs_parser *p);
82void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
83uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
84int r100_copy_blit(struct radeon_device *rdev,
85 uint64_t src_offset,
86 uint64_t dst_offset,
Alex Deucher003cefe2011-09-16 12:04:08 -040087 unsigned num_gpu_pages,
Christian König876dc9f2012-05-08 14:24:01 +020088 struct radeon_fence **fence);
Dave Airliee024e112009-06-24 09:48:08 +100089int r100_set_surface_reg(struct radeon_device *rdev, int reg,
90 uint32_t tiling_flags, uint32_t pitch,
91 uint32_t offset, uint32_t obj_size);
Daniel Vetter9479c542010-03-11 21:19:16 +000092void r100_clear_surface_reg(struct radeon_device *rdev, int reg);
Jerome Glissec93bb852009-07-13 21:04:08 +020093void r100_bandwidth_update(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +100094void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Christian Könige32eb502011-10-23 12:56:27 +020095int r100_ring_test(struct radeon_device *rdev, struct radeon_ring *cp);
Alex Deucher429770b2009-12-04 15:26:55 -050096void r100_hpd_init(struct radeon_device *rdev);
97void r100_hpd_fini(struct radeon_device *rdev);
98bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
99void r100_hpd_set_polarity(struct radeon_device *rdev,
100 enum radeon_hpd_id hpd);
Daniel Vetter2b497502010-03-11 21:19:18 +0000101int r100_debugfs_rbbm_init(struct radeon_device *rdev);
102int r100_debugfs_cp_init(struct radeon_device *rdev);
103void r100_cp_disable(struct radeon_device *rdev);
104int r100_cp_init(struct radeon_device *rdev, unsigned ring_size);
105void r100_cp_fini(struct radeon_device *rdev);
106int r100_pci_gart_init(struct radeon_device *rdev);
107void r100_pci_gart_fini(struct radeon_device *rdev);
108int r100_pci_gart_enable(struct radeon_device *rdev);
109void r100_pci_gart_disable(struct radeon_device *rdev);
110int r100_debugfs_mc_info_init(struct radeon_device *rdev);
111int r100_gui_wait_for_idle(struct radeon_device *rdev);
Alex Deucherf7128122012-02-23 17:53:45 -0500112int r100_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
Daniel Vetter2b497502010-03-11 21:19:18 +0000113void r100_irq_disable(struct radeon_device *rdev);
114void r100_mc_stop(struct radeon_device *rdev, struct r100_mc_save *save);
115void r100_mc_resume(struct radeon_device *rdev, struct r100_mc_save *save);
116void r100_vram_init_sizes(struct radeon_device *rdev);
Daniel Vetter2b497502010-03-11 21:19:18 +0000117int r100_cp_reset(struct radeon_device *rdev);
118void r100_vga_render_disable(struct radeon_device *rdev);
Dave Airlie4c712e62010-07-15 12:13:50 +1000119void r100_restore_sanity(struct radeon_device *rdev);
Daniel Vetter2b497502010-03-11 21:19:18 +0000120int r100_cs_track_check_pkt3_indx_buffer(struct radeon_cs_parser *p,
121 struct radeon_cs_packet *pkt,
122 struct radeon_bo *robj);
123int r100_cs_parse_packet0(struct radeon_cs_parser *p,
124 struct radeon_cs_packet *pkt,
125 const unsigned *auth, unsigned n,
126 radeon_packet0_check_t check);
127int r100_cs_packet_parse(struct radeon_cs_parser *p,
128 struct radeon_cs_packet *pkt,
129 unsigned idx);
130void r100_enable_bm(struct radeon_device *rdev);
131void r100_set_common_regs(struct radeon_device *rdev);
Jerome Glisse90aca4d2010-03-09 14:45:12 +0000132void r100_bm_disable(struct radeon_device *rdev);
Alex Deucherdef9ba92010-04-22 12:39:58 -0400133extern bool r100_gui_idle(struct radeon_device *rdev);
Alex Deucher49e02b72010-04-23 17:57:27 -0400134extern void r100_pm_misc(struct radeon_device *rdev);
135extern void r100_pm_prepare(struct radeon_device *rdev);
136extern void r100_pm_finish(struct radeon_device *rdev);
Alex Deucherce8f5372010-05-07 15:10:16 -0400137extern void r100_pm_init_profile(struct radeon_device *rdev);
138extern void r100_pm_get_dynpm_state(struct radeon_device *rdev);
Alex Deucher6f34be52010-11-21 10:59:01 -0500139extern void r100_pre_page_flip(struct radeon_device *rdev, int crtc);
140extern u32 r100_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base);
141extern void r100_post_page_flip(struct radeon_device *rdev, int crtc);
Alex Deucher3ae19b72012-02-23 17:53:37 -0500142extern void r100_wait_for_vblank(struct radeon_device *rdev, int crtc);
Alex Deucher89e51812012-02-23 17:53:38 -0500143extern int r100_mc_wait_for_idle(struct radeon_device *rdev);
Alex Deucherbae6b5622010-04-22 13:38:05 -0400144
Pauli Nieminen44ca7472010-02-11 17:25:47 +0000145/*
146 * r200,rv250,rs300,rv280
147 */
148extern int r200_copy_dma(struct radeon_device *rdev,
Daniel Vetter187f3da2010-11-28 19:06:09 +0100149 uint64_t src_offset,
150 uint64_t dst_offset,
Alex Deucher003cefe2011-09-16 12:04:08 -0400151 unsigned num_gpu_pages,
Christian König876dc9f2012-05-08 14:24:01 +0200152 struct radeon_fence **fence);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100153void r200_set_safe_registers(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200154
155/*
156 * r300,r350,rv350,rv380
157 */
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200158extern int r300_init(struct radeon_device *rdev);
159extern void r300_fini(struct radeon_device *rdev);
160extern int r300_suspend(struct radeon_device *rdev);
161extern int r300_resume(struct radeon_device *rdev);
Jerome Glissea2d07b72010-03-09 14:45:11 +0000162extern int r300_asic_reset(struct radeon_device *rdev);
Alex Deucherf7128122012-02-23 17:53:45 -0500163extern void r300_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200164extern void r300_fence_ring_emit(struct radeon_device *rdev,
165 struct radeon_fence *fence);
166extern int r300_cs_parse(struct radeon_cs_parser *p);
167extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
168extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200169extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
Alex Deucherc836a412009-12-23 10:07:50 -0500170extern int rv370_get_pcie_lanes(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100171extern void r300_set_reg_safe(struct radeon_device *rdev);
172extern void r300_mc_program(struct radeon_device *rdev);
173extern void r300_mc_init(struct radeon_device *rdev);
174extern void r300_clock_startup(struct radeon_device *rdev);
175extern int r300_mc_wait_for_idle(struct radeon_device *rdev);
176extern int rv370_pcie_gart_init(struct radeon_device *rdev);
177extern void rv370_pcie_gart_fini(struct radeon_device *rdev);
178extern int rv370_pcie_gart_enable(struct radeon_device *rdev);
179extern void rv370_pcie_gart_disable(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500180extern int r300_mc_wait_for_idle(struct radeon_device *rdev);
Pauli Nieminen44ca7472010-02-11 17:25:47 +0000181
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200182/*
183 * r420,r423,rv410
184 */
Jerome Glisse9f022dd2009-09-11 15:35:22 +0200185extern int r420_init(struct radeon_device *rdev);
186extern void r420_fini(struct radeon_device *rdev);
187extern int r420_suspend(struct radeon_device *rdev);
188extern int r420_resume(struct radeon_device *rdev);
Alex Deucherce8f5372010-05-07 15:10:16 -0400189extern void r420_pm_init_profile(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100190extern u32 r420_mc_rreg(struct radeon_device *rdev, u32 reg);
191extern void r420_mc_wreg(struct radeon_device *rdev, u32 reg, u32 v);
192extern int r420_debugfs_pipes_info_init(struct radeon_device *rdev);
193extern void r420_pipes_init(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200194
195/*
196 * rs400,rs480
197 */
Jerome Glisseca6ffc62009-10-01 10:20:52 +0200198extern int rs400_init(struct radeon_device *rdev);
199extern void rs400_fini(struct radeon_device *rdev);
200extern int rs400_suspend(struct radeon_device *rdev);
201extern int rs400_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200202void rs400_gart_tlb_flush(struct radeon_device *rdev);
203int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
204uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
205void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100206int rs400_gart_init(struct radeon_device *rdev);
207int rs400_gart_enable(struct radeon_device *rdev);
208void rs400_gart_adjust_size(struct radeon_device *rdev);
209void rs400_gart_disable(struct radeon_device *rdev);
210void rs400_gart_fini(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500211extern int rs400_mc_wait_for_idle(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100212
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200213/*
214 * rs600.
215 */
Jerome Glisse90aca4d2010-03-09 14:45:12 +0000216extern int rs600_asic_reset(struct radeon_device *rdev);
Jerome Glissec010f802009-09-30 22:09:06 +0200217extern int rs600_init(struct radeon_device *rdev);
218extern void rs600_fini(struct radeon_device *rdev);
219extern int rs600_suspend(struct radeon_device *rdev);
220extern int rs600_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200221int rs600_irq_set(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200222int rs600_irq_process(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100223void rs600_irq_disable(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200224u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200225void rs600_gart_tlb_flush(struct radeon_device *rdev);
226int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
227uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
228void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200229void rs600_bandwidth_update(struct radeon_device *rdev);
Alex Deucher429770b2009-12-04 15:26:55 -0500230void rs600_hpd_init(struct radeon_device *rdev);
231void rs600_hpd_fini(struct radeon_device *rdev);
232bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
233void rs600_hpd_set_polarity(struct radeon_device *rdev,
234 enum radeon_hpd_id hpd);
Alex Deucher49e02b72010-04-23 17:57:27 -0400235extern void rs600_pm_misc(struct radeon_device *rdev);
236extern void rs600_pm_prepare(struct radeon_device *rdev);
237extern void rs600_pm_finish(struct radeon_device *rdev);
Alex Deucher6f34be52010-11-21 10:59:01 -0500238extern void rs600_pre_page_flip(struct radeon_device *rdev, int crtc);
239extern u32 rs600_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base);
240extern void rs600_post_page_flip(struct radeon_device *rdev, int crtc);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100241void rs600_set_safe_registers(struct radeon_device *rdev);
Alex Deucher3ae19b72012-02-23 17:53:37 -0500242extern void avivo_wait_for_vblank(struct radeon_device *rdev, int crtc);
Alex Deucher89e51812012-02-23 17:53:38 -0500243extern int rs600_mc_wait_for_idle(struct radeon_device *rdev);
Alex Deucher429770b2009-12-04 15:26:55 -0500244
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200245/*
246 * rs690,rs740
247 */
Jerome Glisse3bc68532009-10-01 09:39:24 +0200248int rs690_init(struct radeon_device *rdev);
249void rs690_fini(struct radeon_device *rdev);
250int rs690_resume(struct radeon_device *rdev);
251int rs690_suspend(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200252uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
253void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200254void rs690_bandwidth_update(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100255void rs690_line_buffer_adjust(struct radeon_device *rdev,
256 struct drm_display_mode *mode1,
257 struct drm_display_mode *mode2);
Alex Deucher89e51812012-02-23 17:53:38 -0500258extern int rs690_mc_wait_for_idle(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200259
260/*
261 * rv515
262 */
Daniel Vetter187f3da2010-11-28 19:06:09 +0100263struct rv515_mc_save {
Daniel Vetter187f3da2010-11-28 19:06:09 +0100264 u32 vga_render_control;
265 u32 vga_hdp_control;
Alex Deucher6253e4c2012-12-12 14:30:32 -0500266 bool crtc_enabled[2];
Daniel Vetter187f3da2010-11-28 19:06:09 +0100267};
Jerome Glisse81ee8fb2012-07-27 16:32:24 -0400268
Jerome Glisse068a1172009-06-17 13:28:30 +0200269int rv515_init(struct radeon_device *rdev);
Jerome Glissed39c3b82009-09-28 18:34:43 +0200270void rv515_fini(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200271uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
272void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Alex Deucherf7128122012-02-23 17:53:45 -0500273void rv515_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
Jerome Glissec93bb852009-07-13 21:04:08 +0200274void rv515_bandwidth_update(struct radeon_device *rdev);
Jerome Glissed39c3b82009-09-28 18:34:43 +0200275int rv515_resume(struct radeon_device *rdev);
276int rv515_suspend(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100277void rv515_bandwidth_avivo_update(struct radeon_device *rdev);
278void rv515_vga_render_disable(struct radeon_device *rdev);
279void rv515_set_safe_registers(struct radeon_device *rdev);
280void rv515_mc_stop(struct radeon_device *rdev, struct rv515_mc_save *save);
281void rv515_mc_resume(struct radeon_device *rdev, struct rv515_mc_save *save);
282void rv515_clock_startup(struct radeon_device *rdev);
283void rv515_debugfs(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500284int rv515_mc_wait_for_idle(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200285
286/*
287 * r520,rv530,rv560,rv570,r580
288 */
Jerome Glissed39c3b82009-09-28 18:34:43 +0200289int r520_init(struct radeon_device *rdev);
Jerome Glissef0ed1f62009-09-28 20:39:19 +0200290int r520_resume(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500291int r520_mc_wait_for_idle(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200292
293/*
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000294 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200295 */
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000296int r600_init(struct radeon_device *rdev);
297void r600_fini(struct radeon_device *rdev);
298int r600_suspend(struct radeon_device *rdev);
299int r600_resume(struct radeon_device *rdev);
Dave Airlie28d52042009-09-21 14:33:58 +1000300void r600_vga_set_state(struct radeon_device *rdev, bool state);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000301int r600_wb_init(struct radeon_device *rdev);
302void r600_wb_fini(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000303void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200304uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
305void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000306int r600_cs_parse(struct radeon_cs_parser *p);
Alex Deuchercf4ccd02011-11-18 10:19:47 -0500307int r600_dma_cs_parse(struct radeon_cs_parser *p);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000308void r600_fence_ring_emit(struct radeon_device *rdev,
309 struct radeon_fence *fence);
Christian König15d33322011-09-15 19:02:22 +0200310void r600_semaphore_ring_emit(struct radeon_device *rdev,
Christian Könige32eb502011-10-23 12:56:27 +0200311 struct radeon_ring *cp,
Christian König15d33322011-09-15 19:02:22 +0200312 struct radeon_semaphore *semaphore,
Christian König7b1f2482011-09-23 15:11:23 +0200313 bool emit_wait);
Alex Deucher4d756582012-09-27 15:08:35 -0400314void r600_dma_fence_ring_emit(struct radeon_device *rdev,
315 struct radeon_fence *fence);
316void r600_dma_semaphore_ring_emit(struct radeon_device *rdev,
317 struct radeon_ring *ring,
318 struct radeon_semaphore *semaphore,
319 bool emit_wait);
320void r600_dma_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
321bool r600_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
Alex Deucher123bc182013-01-24 11:37:19 -0500322bool r600_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
Jerome Glissea2d07b72010-03-09 14:45:11 +0000323int r600_asic_reset(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000324int r600_set_surface_reg(struct radeon_device *rdev, int reg,
325 uint32_t tiling_flags, uint32_t pitch,
326 uint32_t offset, uint32_t obj_size);
Daniel Vetter9479c542010-03-11 21:19:16 +0000327void r600_clear_surface_reg(struct radeon_device *rdev, int reg);
Alex Deucherf7128122012-02-23 17:53:45 -0500328int r600_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
Alex Deucher4d756582012-09-27 15:08:35 -0400329int r600_dma_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000330void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Christian Könige32eb502011-10-23 12:56:27 +0200331int r600_ring_test(struct radeon_device *rdev, struct radeon_ring *cp);
Alex Deucher4d756582012-09-27 15:08:35 -0400332int r600_dma_ring_test(struct radeon_device *rdev, struct radeon_ring *cp);
Christian Königf2ba57b2013-04-08 12:41:29 +0200333int r600_uvd_ring_test(struct radeon_device *rdev, struct radeon_ring *ring);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000334int r600_copy_blit(struct radeon_device *rdev,
335 uint64_t src_offset, uint64_t dst_offset,
Christian König876dc9f2012-05-08 14:24:01 +0200336 unsigned num_gpu_pages, struct radeon_fence **fence);
Alex Deucher4d756582012-09-27 15:08:35 -0400337int r600_copy_dma(struct radeon_device *rdev,
338 uint64_t src_offset, uint64_t dst_offset,
339 unsigned num_gpu_pages, struct radeon_fence **fence);
Alex Deucher429770b2009-12-04 15:26:55 -0500340void r600_hpd_init(struct radeon_device *rdev);
341void r600_hpd_fini(struct radeon_device *rdev);
342bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
343void r600_hpd_set_polarity(struct radeon_device *rdev,
344 enum radeon_hpd_id hpd);
Jerome Glisse062b3892010-02-04 20:36:39 +0100345extern void r600_ioctl_wait_idle(struct radeon_device *rdev, struct radeon_bo *bo);
Alex Deucherdef9ba92010-04-22 12:39:58 -0400346extern bool r600_gui_idle(struct radeon_device *rdev);
Alex Deucher49e02b72010-04-23 17:57:27 -0400347extern void r600_pm_misc(struct radeon_device *rdev);
Alex Deucherce8f5372010-05-07 15:10:16 -0400348extern void r600_pm_init_profile(struct radeon_device *rdev);
349extern void rs780_pm_init_profile(struct radeon_device *rdev);
Samuel Li65337e62013-04-05 17:50:53 -0400350extern uint32_t rs780_mc_rreg(struct radeon_device *rdev, uint32_t reg);
351extern void rs780_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Alex Deucherce8f5372010-05-07 15:10:16 -0400352extern void r600_pm_get_dynpm_state(struct radeon_device *rdev);
Alex Deucher3313e3d2011-01-06 18:49:34 -0500353extern void r600_set_pcie_lanes(struct radeon_device *rdev, int lanes);
354extern int r600_get_pcie_lanes(struct radeon_device *rdev);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100355bool r600_card_posted(struct radeon_device *rdev);
356void r600_cp_stop(struct radeon_device *rdev);
357int r600_cp_start(struct radeon_device *rdev);
Christian Könige32eb502011-10-23 12:56:27 +0200358void r600_ring_init(struct radeon_device *rdev, struct radeon_ring *cp, unsigned ring_size);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100359int r600_cp_resume(struct radeon_device *rdev);
360void r600_cp_fini(struct radeon_device *rdev);
361int r600_count_pipe_bits(uint32_t val);
362int r600_mc_wait_for_idle(struct radeon_device *rdev);
363int r600_pcie_gart_init(struct radeon_device *rdev);
364void r600_scratch_init(struct radeon_device *rdev);
365int r600_blit_init(struct radeon_device *rdev);
366void r600_blit_fini(struct radeon_device *rdev);
367int r600_init_microcode(struct radeon_device *rdev);
368/* r600 irq */
369int r600_irq_process(struct radeon_device *rdev);
370int r600_irq_init(struct radeon_device *rdev);
371void r600_irq_fini(struct radeon_device *rdev);
372void r600_ih_ring_init(struct radeon_device *rdev, unsigned ring_size);
373int r600_irq_set(struct radeon_device *rdev);
374void r600_irq_suspend(struct radeon_device *rdev);
375void r600_disable_interrupts(struct radeon_device *rdev);
376void r600_rlc_stop(struct radeon_device *rdev);
377/* r600 audio */
378int r600_audio_init(struct radeon_device *rdev);
Rafał Miłecki3299de92012-05-14 21:25:57 +0200379struct r600_audio r600_audio_status(struct radeon_device *rdev);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100380void r600_audio_fini(struct radeon_device *rdev);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100381int r600_hdmi_buffer_status_changed(struct drm_encoder *encoder);
382void r600_hdmi_update_audio_settings(struct drm_encoder *encoder);
Alex Deuchera973bea2013-04-18 11:32:16 -0400383void r600_hdmi_enable(struct drm_encoder *encoder, bool enable);
384void r600_hdmi_setmode(struct drm_encoder *encoder, struct drm_display_mode *mode);
Daniel Vetter4546b2c2011-02-18 17:59:21 +0100385/* r600 blit */
Christian Königf2377502012-05-09 15:35:01 +0200386int r600_blit_prepare_copy(struct radeon_device *rdev, unsigned num_gpu_pages,
Christian König220907d2012-05-10 16:46:43 +0200387 struct radeon_fence **fence, struct radeon_sa_bo **vb,
388 struct radeon_semaphore **sem);
Christian König876dc9f2012-05-08 14:24:01 +0200389void r600_blit_done_copy(struct radeon_device *rdev, struct radeon_fence **fence,
Christian König220907d2012-05-10 16:46:43 +0200390 struct radeon_sa_bo *vb, struct radeon_semaphore *sem);
Daniel Vetter4546b2c2011-02-18 17:59:21 +0100391void r600_kms_blit_copy(struct radeon_device *rdev,
392 u64 src_gpu_addr, u64 dst_gpu_addr,
Christian Königf2377502012-05-09 15:35:01 +0200393 unsigned num_gpu_pages,
394 struct radeon_sa_bo *vb);
Alex Deucher89e51812012-02-23 17:53:38 -0500395int r600_mc_wait_for_idle(struct radeon_device *rdev);
Alex Deucher454d2e22013-02-14 10:04:02 -0500396u32 r600_get_xclk(struct radeon_device *rdev);
Alex Deucherd0418892013-01-24 10:35:23 -0500397uint64_t r600_get_gpu_clock_counter(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000398
Christian Königf2ba57b2013-04-08 12:41:29 +0200399/* uvd */
400int r600_uvd_init(struct radeon_device *rdev);
401int r600_uvd_rbc_start(struct radeon_device *rdev);
402void r600_uvd_rbc_stop(struct radeon_device *rdev);
403int r600_uvd_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
404void r600_uvd_fence_emit(struct radeon_device *rdev,
405 struct radeon_fence *fence);
406void r600_uvd_semaphore_emit(struct radeon_device *rdev,
407 struct radeon_ring *ring,
408 struct radeon_semaphore *semaphore,
409 bool emit_wait);
410void r600_uvd_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
411
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000412/*
413 * rv770,rv730,rv710,rv740
414 */
415int rv770_init(struct radeon_device *rdev);
416void rv770_fini(struct radeon_device *rdev);
417int rv770_suspend(struct radeon_device *rdev);
418int rv770_resume(struct radeon_device *rdev);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100419void rv770_pm_misc(struct radeon_device *rdev);
420u32 rv770_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base);
421void r700_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc);
422void r700_cp_stop(struct radeon_device *rdev);
423void r700_cp_fini(struct radeon_device *rdev);
Alex Deucher43fb7782013-01-04 09:24:18 -0500424int rv770_copy_dma(struct radeon_device *rdev,
425 uint64_t src_offset, uint64_t dst_offset,
426 unsigned num_gpu_pages,
427 struct radeon_fence **fence);
Alex Deucher454d2e22013-02-14 10:04:02 -0500428u32 rv770_get_xclk(struct radeon_device *rdev);
Christian Königf2ba57b2013-04-08 12:41:29 +0200429int rv770_uvd_resume(struct radeon_device *rdev);
Christian Königef0e6e62013-04-08 12:41:35 +0200430int rv770_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000431
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500432/*
433 * evergreen
434 */
Daniel Vetter3574dda2011-02-18 17:59:19 +0100435struct evergreen_mc_save {
Daniel Vetter3574dda2011-02-18 17:59:19 +0100436 u32 vga_render_control;
437 u32 vga_hdp_control;
Alex Deucher62444b72012-08-15 17:18:42 -0400438 bool crtc_enabled[RADEON_MAX_CRTCS];
Daniel Vetter3574dda2011-02-18 17:59:19 +0100439};
Jerome Glisse81ee8fb2012-07-27 16:32:24 -0400440
Alex Deucher0fcdb612010-03-24 13:20:41 -0400441void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500442int evergreen_init(struct radeon_device *rdev);
443void evergreen_fini(struct radeon_device *rdev);
444int evergreen_suspend(struct radeon_device *rdev);
445int evergreen_resume(struct radeon_device *rdev);
Alex Deucher123bc182013-01-24 11:37:19 -0500446bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
447bool evergreen_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
Jerome Glissea2d07b72010-03-09 14:45:11 +0000448int evergreen_asic_reset(struct radeon_device *rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500449void evergreen_bandwidth_update(struct radeon_device *rdev);
Alex Deucher12920592011-02-02 12:37:40 -0500450void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500451void evergreen_hpd_init(struct radeon_device *rdev);
452void evergreen_hpd_fini(struct radeon_device *rdev);
453bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
454void evergreen_hpd_set_polarity(struct radeon_device *rdev,
455 enum radeon_hpd_id hpd);
Alex Deucher45f9a392010-03-24 13:55:51 -0400456u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc);
457int evergreen_irq_set(struct radeon_device *rdev);
458int evergreen_irq_process(struct radeon_device *rdev);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -0400459extern int evergreen_cs_parse(struct radeon_cs_parser *p);
Alex Deucherd2ead3e2012-12-13 09:55:45 -0500460extern int evergreen_dma_cs_parse(struct radeon_cs_parser *p);
Alex Deucher49e02b72010-04-23 17:57:27 -0400461extern void evergreen_pm_misc(struct radeon_device *rdev);
462extern void evergreen_pm_prepare(struct radeon_device *rdev);
463extern void evergreen_pm_finish(struct radeon_device *rdev);
Alex Deuchera4c9e2e2011-11-04 10:09:41 -0400464extern void sumo_pm_init_profile(struct radeon_device *rdev);
Alex Deucher27810fb2012-10-01 19:25:11 -0400465extern void btc_pm_init_profile(struct radeon_device *rdev);
Alex Deucher23d33ba2013-04-08 12:41:32 +0200466int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
Alex Deuchera8b49252013-04-08 12:41:33 +0200467int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
Alex Deucher6f34be52010-11-21 10:59:01 -0500468extern void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc);
469extern u32 evergreen_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base);
470extern void evergreen_post_page_flip(struct radeon_device *rdev, int crtc);
Alex Deucher3ae19b72012-02-23 17:53:37 -0500471extern void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100472void evergreen_disable_interrupt_state(struct radeon_device *rdev);
473int evergreen_blit_init(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500474int evergreen_mc_wait_for_idle(struct radeon_device *rdev);
Alex Deucher233d1ad2012-12-04 15:25:59 -0500475void evergreen_dma_fence_ring_emit(struct radeon_device *rdev,
476 struct radeon_fence *fence);
477void evergreen_dma_ring_ib_execute(struct radeon_device *rdev,
478 struct radeon_ib *ib);
479int evergreen_copy_dma(struct radeon_device *rdev,
480 uint64_t src_offset, uint64_t dst_offset,
481 unsigned num_gpu_pages,
482 struct radeon_fence **fence);
Alex Deuchera973bea2013-04-18 11:32:16 -0400483void evergreen_hdmi_enable(struct drm_encoder *encoder, bool enable);
484void evergreen_hdmi_setmode(struct drm_encoder *encoder, struct drm_display_mode *mode);
Daniel Vetter4546b2c2011-02-18 17:59:21 +0100485
Alex Deuchere3487622011-03-02 20:07:36 -0500486/*
487 * cayman
488 */
Alex Deucherb40e7e12011-11-17 14:57:50 -0500489void cayman_fence_ring_emit(struct radeon_device *rdev,
490 struct radeon_fence *fence);
Christian Königf2ba57b2013-04-08 12:41:29 +0200491void cayman_uvd_semaphore_emit(struct radeon_device *rdev,
492 struct radeon_ring *ring,
493 struct radeon_semaphore *semaphore,
494 bool emit_wait);
Alex Deuchere3487622011-03-02 20:07:36 -0500495void cayman_pcie_gart_tlb_flush(struct radeon_device *rdev);
496int cayman_init(struct radeon_device *rdev);
497void cayman_fini(struct radeon_device *rdev);
498int cayman_suspend(struct radeon_device *rdev);
499int cayman_resume(struct radeon_device *rdev);
Alex Deuchere3487622011-03-02 20:07:36 -0500500int cayman_asic_reset(struct radeon_device *rdev);
Jerome Glisse721604a2012-01-05 22:11:05 -0500501void cayman_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
502int cayman_vm_init(struct radeon_device *rdev);
503void cayman_vm_fini(struct radeon_device *rdev);
Alex Deucher498522b2012-10-02 14:43:38 -0400504void cayman_vm_flush(struct radeon_device *rdev, int ridx, struct radeon_vm *vm);
Christian König089a7862012-08-11 11:54:05 +0200505uint32_t cayman_vm_page_flags(struct radeon_device *rdev, uint32_t flags);
Alex Deucher43f12142013-02-01 17:32:42 +0100506void cayman_vm_set_page(struct radeon_device *rdev,
507 struct radeon_ib *ib,
508 uint64_t pe,
Christian Königdce34bf2012-09-17 19:36:18 +0200509 uint64_t addr, unsigned count,
510 uint32_t incr, uint32_t flags);
Jerome Glisse721604a2012-01-05 22:11:05 -0500511int evergreen_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
Alex Deuchercd459e52012-12-13 12:17:38 -0500512int evergreen_dma_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
Alex Deucherf60cbd12012-12-04 15:27:33 -0500513void cayman_dma_ring_ib_execute(struct radeon_device *rdev,
514 struct radeon_ib *ib);
Alex Deucher123bc182013-01-24 11:37:19 -0500515bool cayman_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
Alex Deucherf60cbd12012-12-04 15:27:33 -0500516bool cayman_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
517void cayman_dma_vm_flush(struct radeon_device *rdev, int ridx, struct radeon_vm *vm);
Alex Deucher45f9a392010-03-24 13:55:51 -0400518
Alex Deucher43b3cd92012-03-20 17:18:00 -0400519/* DCE6 - SI */
520void dce6_bandwidth_update(struct radeon_device *rdev);
521
Alex Deucher02779c02012-03-20 17:18:25 -0400522/*
523 * si
524 */
525void si_fence_ring_emit(struct radeon_device *rdev,
526 struct radeon_fence *fence);
527void si_pcie_gart_tlb_flush(struct radeon_device *rdev);
528int si_init(struct radeon_device *rdev);
529void si_fini(struct radeon_device *rdev);
530int si_suspend(struct radeon_device *rdev);
531int si_resume(struct radeon_device *rdev);
Alex Deucher123bc182013-01-24 11:37:19 -0500532bool si_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
533bool si_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
Alex Deucher02779c02012-03-20 17:18:25 -0400534int si_asic_reset(struct radeon_device *rdev);
535void si_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
536int si_irq_set(struct radeon_device *rdev);
537int si_irq_process(struct radeon_device *rdev);
538int si_vm_init(struct radeon_device *rdev);
539void si_vm_fini(struct radeon_device *rdev);
Alex Deucher43f12142013-02-01 17:32:42 +0100540void si_vm_set_page(struct radeon_device *rdev,
541 struct radeon_ib *ib,
542 uint64_t pe,
Alex Deucher82ffd922012-10-02 14:47:46 -0400543 uint64_t addr, unsigned count,
544 uint32_t incr, uint32_t flags);
Alex Deucher498522b2012-10-02 14:43:38 -0400545void si_vm_flush(struct radeon_device *rdev, int ridx, struct radeon_vm *vm);
Alex Deucher02779c02012-03-20 17:18:25 -0400546int si_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
Alex Deucher8c5fd7e2012-12-04 15:28:18 -0500547int si_copy_dma(struct radeon_device *rdev,
548 uint64_t src_offset, uint64_t dst_offset,
549 unsigned num_gpu_pages,
550 struct radeon_fence **fence);
551void si_dma_vm_flush(struct radeon_device *rdev, int ridx, struct radeon_vm *vm);
Alex Deucher454d2e22013-02-14 10:04:02 -0500552u32 si_get_xclk(struct radeon_device *rdev);
Alex Deucherd0418892013-01-24 10:35:23 -0500553uint64_t si_get_gpu_clock_counter(struct radeon_device *rdev);
Christian König2539eb02013-04-08 12:41:34 +0200554int si_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
Alex Deucher02779c02012-03-20 17:18:25 -0400555
Alex Deucher44fa3462012-12-18 22:17:00 -0500556/*
557 * cik
558 */
559uint64_t cik_get_gpu_clock_counter(struct radeon_device *rdev);
560
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200561#endif