blob: dd4f8bcb96335b913cc835be602c65d44278c2e7 [file] [log] [blame]
Jerome Glisse771fe6b2009-06-05 14:42:42 +02001/*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Dave Airlie
25 * Alex Deucher
26 * Jerome Glisse
27 */
28#ifndef __RADEON_ASIC_H__
29#define __RADEON_ASIC_H__
30
31/*
32 * common functions
33 */
Rafał Miłecki74338742009-11-03 00:53:02 +010034uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020035void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
Rafał Miłecki5ea597f2009-12-17 13:50:09 +010036uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020037void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
38
Rafał Miłecki74338742009-11-03 00:53:02 +010039uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020040void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
Rafał Miłecki74338742009-11-03 00:53:02 +010041uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020042void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
43void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
44
Alex Deucher37e9b6a2012-08-03 11:39:43 -040045void atombios_set_backlight_level(struct radeon_encoder *radeon_encoder, u8 level);
Alex Deucher6d92f812012-09-14 09:59:26 -040046u8 atombios_get_backlight_level(struct radeon_encoder *radeon_encoder);
Alex Deucher37e9b6a2012-08-03 11:39:43 -040047void radeon_legacy_set_backlight_level(struct radeon_encoder *radeon_encoder, u8 level);
Alex Deucher6d92f812012-09-14 09:59:26 -040048u8 radeon_legacy_get_backlight_level(struct radeon_encoder *radeon_encoder);
Alex Deucher37e9b6a2012-08-03 11:39:43 -040049
Jerome Glisse771fe6b2009-06-05 14:42:42 +020050/*
Pauli Nieminen44ca7472010-02-11 17:25:47 +000051 * r100,rv100,rs100,rv200,rs200
Jerome Glisse771fe6b2009-06-05 14:42:42 +020052 */
Daniel Vetter2b497502010-03-11 21:19:18 +000053struct r100_mc_save {
54 u32 GENMO_WT;
55 u32 CRTC_EXT_CNTL;
56 u32 CRTC_GEN_CNTL;
57 u32 CRTC2_GEN_CNTL;
58 u32 CUR_OFFSET;
59 u32 CUR2_OFFSET;
60};
61int r100_init(struct radeon_device *rdev);
62void r100_fini(struct radeon_device *rdev);
63int r100_suspend(struct radeon_device *rdev);
64int r100_resume(struct radeon_device *rdev);
Dave Airlie28d52042009-09-21 14:33:58 +100065void r100_vga_set_state(struct radeon_device *rdev, bool state);
Christian Könige32eb502011-10-23 12:56:27 +020066bool r100_gpu_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
Jerome Glissea2d07b72010-03-09 14:45:11 +000067int r100_asic_reset(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +020068u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020069void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
Michel Dänzercb658902015-01-21 17:36:35 +090070uint64_t r100_pci_gart_get_page_entry(uint64_t addr, uint32_t flags);
Christian König7f90fc92014-06-04 15:29:57 +020071void r100_pci_gart_set_page(struct radeon_device *rdev, unsigned i,
Michel Dänzercb658902015-01-21 17:36:35 +090072 uint64_t entry);
Alex Deucherf7128122012-02-23 17:53:45 -050073void r100_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020074int r100_irq_set(struct radeon_device *rdev);
75int r100_irq_process(struct radeon_device *rdev);
76void r100_fence_ring_emit(struct radeon_device *rdev,
77 struct radeon_fence *fence);
Christian König1654b812013-11-12 12:58:05 +010078bool r100_semaphore_ring_emit(struct radeon_device *rdev,
Christian Könige32eb502011-10-23 12:56:27 +020079 struct radeon_ring *cp,
Christian König15d33322011-09-15 19:02:22 +020080 struct radeon_semaphore *semaphore,
Christian König7b1f2482011-09-23 15:11:23 +020081 bool emit_wait);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020082int r100_cs_parse(struct radeon_cs_parser *p);
83void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
84uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
Christian König57d20a42014-09-04 20:01:53 +020085struct radeon_fence *r100_copy_blit(struct radeon_device *rdev,
86 uint64_t src_offset,
87 uint64_t dst_offset,
88 unsigned num_gpu_pages,
89 struct reservation_object *resv);
Dave Airliee024e112009-06-24 09:48:08 +100090int r100_set_surface_reg(struct radeon_device *rdev, int reg,
91 uint32_t tiling_flags, uint32_t pitch,
92 uint32_t offset, uint32_t obj_size);
Daniel Vetter9479c542010-03-11 21:19:16 +000093void r100_clear_surface_reg(struct radeon_device *rdev, int reg);
Jerome Glissec93bb852009-07-13 21:04:08 +020094void r100_bandwidth_update(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +100095void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Christian Könige32eb502011-10-23 12:56:27 +020096int r100_ring_test(struct radeon_device *rdev, struct radeon_ring *cp);
Alex Deucher429770b2009-12-04 15:26:55 -050097void r100_hpd_init(struct radeon_device *rdev);
98void r100_hpd_fini(struct radeon_device *rdev);
99bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
100void r100_hpd_set_polarity(struct radeon_device *rdev,
101 enum radeon_hpd_id hpd);
Daniel Vetter2b497502010-03-11 21:19:18 +0000102int r100_debugfs_rbbm_init(struct radeon_device *rdev);
103int r100_debugfs_cp_init(struct radeon_device *rdev);
104void r100_cp_disable(struct radeon_device *rdev);
105int r100_cp_init(struct radeon_device *rdev, unsigned ring_size);
106void r100_cp_fini(struct radeon_device *rdev);
107int r100_pci_gart_init(struct radeon_device *rdev);
108void r100_pci_gart_fini(struct radeon_device *rdev);
109int r100_pci_gart_enable(struct radeon_device *rdev);
110void r100_pci_gart_disable(struct radeon_device *rdev);
111int r100_debugfs_mc_info_init(struct radeon_device *rdev);
112int r100_gui_wait_for_idle(struct radeon_device *rdev);
Alex Deucherf7128122012-02-23 17:53:45 -0500113int r100_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
Daniel Vetter2b497502010-03-11 21:19:18 +0000114void r100_irq_disable(struct radeon_device *rdev);
115void r100_mc_stop(struct radeon_device *rdev, struct r100_mc_save *save);
116void r100_mc_resume(struct radeon_device *rdev, struct r100_mc_save *save);
117void r100_vram_init_sizes(struct radeon_device *rdev);
Daniel Vetter2b497502010-03-11 21:19:18 +0000118int r100_cp_reset(struct radeon_device *rdev);
119void r100_vga_render_disable(struct radeon_device *rdev);
Dave Airlie4c712e62010-07-15 12:13:50 +1000120void r100_restore_sanity(struct radeon_device *rdev);
Daniel Vetter2b497502010-03-11 21:19:18 +0000121int r100_cs_track_check_pkt3_indx_buffer(struct radeon_cs_parser *p,
122 struct radeon_cs_packet *pkt,
123 struct radeon_bo *robj);
124int r100_cs_parse_packet0(struct radeon_cs_parser *p,
125 struct radeon_cs_packet *pkt,
126 const unsigned *auth, unsigned n,
127 radeon_packet0_check_t check);
128int r100_cs_packet_parse(struct radeon_cs_parser *p,
129 struct radeon_cs_packet *pkt,
130 unsigned idx);
131void r100_enable_bm(struct radeon_device *rdev);
132void r100_set_common_regs(struct radeon_device *rdev);
Jerome Glisse90aca4d2010-03-09 14:45:12 +0000133void r100_bm_disable(struct radeon_device *rdev);
Alex Deucherdef9ba92010-04-22 12:39:58 -0400134extern bool r100_gui_idle(struct radeon_device *rdev);
Alex Deucher49e02b72010-04-23 17:57:27 -0400135extern void r100_pm_misc(struct radeon_device *rdev);
136extern void r100_pm_prepare(struct radeon_device *rdev);
137extern void r100_pm_finish(struct radeon_device *rdev);
Alex Deucherce8f5372010-05-07 15:10:16 -0400138extern void r100_pm_init_profile(struct radeon_device *rdev);
139extern void r100_pm_get_dynpm_state(struct radeon_device *rdev);
Christian König157fa142014-05-27 16:49:20 +0200140extern void r100_page_flip(struct radeon_device *rdev, int crtc,
141 u64 crtc_base);
142extern bool r100_page_flip_pending(struct radeon_device *rdev, int crtc);
Alex Deucher3ae19b72012-02-23 17:53:37 -0500143extern void r100_wait_for_vblank(struct radeon_device *rdev, int crtc);
Alex Deucher89e51812012-02-23 17:53:38 -0500144extern int r100_mc_wait_for_idle(struct radeon_device *rdev);
Alex Deucherbae6b5622010-04-22 13:38:05 -0400145
Alex Deucherea31bf62013-12-09 19:44:30 -0500146u32 r100_gfx_get_rptr(struct radeon_device *rdev,
147 struct radeon_ring *ring);
148u32 r100_gfx_get_wptr(struct radeon_device *rdev,
149 struct radeon_ring *ring);
150void r100_gfx_set_wptr(struct radeon_device *rdev,
151 struct radeon_ring *ring);
Michel Dänzer897eba82014-09-17 16:25:55 +0900152
Pauli Nieminen44ca7472010-02-11 17:25:47 +0000153/*
154 * r200,rv250,rs300,rv280
155 */
Christian König57d20a42014-09-04 20:01:53 +0200156struct radeon_fence *r200_copy_dma(struct radeon_device *rdev,
157 uint64_t src_offset,
158 uint64_t dst_offset,
159 unsigned num_gpu_pages,
160 struct reservation_object *resv);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100161void r200_set_safe_registers(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200162
163/*
164 * r300,r350,rv350,rv380
165 */
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200166extern int r300_init(struct radeon_device *rdev);
167extern void r300_fini(struct radeon_device *rdev);
168extern int r300_suspend(struct radeon_device *rdev);
169extern int r300_resume(struct radeon_device *rdev);
Jerome Glissea2d07b72010-03-09 14:45:11 +0000170extern int r300_asic_reset(struct radeon_device *rdev);
Alex Deucherf7128122012-02-23 17:53:45 -0500171extern void r300_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200172extern void r300_fence_ring_emit(struct radeon_device *rdev,
173 struct radeon_fence *fence);
174extern int r300_cs_parse(struct radeon_cs_parser *p);
175extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
Michel Dänzercb658902015-01-21 17:36:35 +0900176extern uint64_t rv370_pcie_gart_get_page_entry(uint64_t addr, uint32_t flags);
Christian König7f90fc92014-06-04 15:29:57 +0200177extern void rv370_pcie_gart_set_page(struct radeon_device *rdev, unsigned i,
Michel Dänzercb658902015-01-21 17:36:35 +0900178 uint64_t entry);
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200179extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
Alex Deucherc836a412009-12-23 10:07:50 -0500180extern int rv370_get_pcie_lanes(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100181extern void r300_set_reg_safe(struct radeon_device *rdev);
182extern void r300_mc_program(struct radeon_device *rdev);
183extern void r300_mc_init(struct radeon_device *rdev);
184extern void r300_clock_startup(struct radeon_device *rdev);
185extern int r300_mc_wait_for_idle(struct radeon_device *rdev);
186extern int rv370_pcie_gart_init(struct radeon_device *rdev);
187extern void rv370_pcie_gart_fini(struct radeon_device *rdev);
188extern int rv370_pcie_gart_enable(struct radeon_device *rdev);
189extern void rv370_pcie_gart_disable(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500190extern int r300_mc_wait_for_idle(struct radeon_device *rdev);
Pauli Nieminen44ca7472010-02-11 17:25:47 +0000191
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200192/*
193 * r420,r423,rv410
194 */
Jerome Glisse9f022dd2009-09-11 15:35:22 +0200195extern int r420_init(struct radeon_device *rdev);
196extern void r420_fini(struct radeon_device *rdev);
197extern int r420_suspend(struct radeon_device *rdev);
198extern int r420_resume(struct radeon_device *rdev);
Alex Deucherce8f5372010-05-07 15:10:16 -0400199extern void r420_pm_init_profile(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100200extern u32 r420_mc_rreg(struct radeon_device *rdev, u32 reg);
201extern void r420_mc_wreg(struct radeon_device *rdev, u32 reg, u32 v);
202extern int r420_debugfs_pipes_info_init(struct radeon_device *rdev);
203extern void r420_pipes_init(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200204
205/*
206 * rs400,rs480
207 */
Jerome Glisseca6ffc62009-10-01 10:20:52 +0200208extern int rs400_init(struct radeon_device *rdev);
209extern void rs400_fini(struct radeon_device *rdev);
210extern int rs400_suspend(struct radeon_device *rdev);
211extern int rs400_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200212void rs400_gart_tlb_flush(struct radeon_device *rdev);
Michel Dänzercb658902015-01-21 17:36:35 +0900213uint64_t rs400_gart_get_page_entry(uint64_t addr, uint32_t flags);
Christian König7f90fc92014-06-04 15:29:57 +0200214void rs400_gart_set_page(struct radeon_device *rdev, unsigned i,
Michel Dänzercb658902015-01-21 17:36:35 +0900215 uint64_t entry);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200216uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
217void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100218int rs400_gart_init(struct radeon_device *rdev);
219int rs400_gart_enable(struct radeon_device *rdev);
220void rs400_gart_adjust_size(struct radeon_device *rdev);
221void rs400_gart_disable(struct radeon_device *rdev);
222void rs400_gart_fini(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500223extern int rs400_mc_wait_for_idle(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100224
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200225/*
226 * rs600.
227 */
Jerome Glisse90aca4d2010-03-09 14:45:12 +0000228extern int rs600_asic_reset(struct radeon_device *rdev);
Jerome Glissec010f802009-09-30 22:09:06 +0200229extern int rs600_init(struct radeon_device *rdev);
230extern void rs600_fini(struct radeon_device *rdev);
231extern int rs600_suspend(struct radeon_device *rdev);
232extern int rs600_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200233int rs600_irq_set(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200234int rs600_irq_process(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100235void rs600_irq_disable(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200236u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200237void rs600_gart_tlb_flush(struct radeon_device *rdev);
Michel Dänzercb658902015-01-21 17:36:35 +0900238uint64_t rs600_gart_get_page_entry(uint64_t addr, uint32_t flags);
Christian König7f90fc92014-06-04 15:29:57 +0200239void rs600_gart_set_page(struct radeon_device *rdev, unsigned i,
Michel Dänzercb658902015-01-21 17:36:35 +0900240 uint64_t entry);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200241uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
242void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200243void rs600_bandwidth_update(struct radeon_device *rdev);
Alex Deucher429770b2009-12-04 15:26:55 -0500244void rs600_hpd_init(struct radeon_device *rdev);
245void rs600_hpd_fini(struct radeon_device *rdev);
246bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
247void rs600_hpd_set_polarity(struct radeon_device *rdev,
248 enum radeon_hpd_id hpd);
Alex Deucher49e02b72010-04-23 17:57:27 -0400249extern void rs600_pm_misc(struct radeon_device *rdev);
250extern void rs600_pm_prepare(struct radeon_device *rdev);
251extern void rs600_pm_finish(struct radeon_device *rdev);
Christian König157fa142014-05-27 16:49:20 +0200252extern void rs600_page_flip(struct radeon_device *rdev, int crtc,
253 u64 crtc_base);
254extern bool rs600_page_flip_pending(struct radeon_device *rdev, int crtc);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100255void rs600_set_safe_registers(struct radeon_device *rdev);
Alex Deucher3ae19b72012-02-23 17:53:37 -0500256extern void avivo_wait_for_vblank(struct radeon_device *rdev, int crtc);
Alex Deucher89e51812012-02-23 17:53:38 -0500257extern int rs600_mc_wait_for_idle(struct radeon_device *rdev);
Alex Deucher429770b2009-12-04 15:26:55 -0500258
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200259/*
260 * rs690,rs740
261 */
Jerome Glisse3bc68532009-10-01 09:39:24 +0200262int rs690_init(struct radeon_device *rdev);
263void rs690_fini(struct radeon_device *rdev);
264int rs690_resume(struct radeon_device *rdev);
265int rs690_suspend(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200266uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
267void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200268void rs690_bandwidth_update(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100269void rs690_line_buffer_adjust(struct radeon_device *rdev,
270 struct drm_display_mode *mode1,
271 struct drm_display_mode *mode2);
Alex Deucher89e51812012-02-23 17:53:38 -0500272extern int rs690_mc_wait_for_idle(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200273
274/*
275 * rv515
276 */
Daniel Vetter187f3da2010-11-28 19:06:09 +0100277struct rv515_mc_save {
Daniel Vetter187f3da2010-11-28 19:06:09 +0100278 u32 vga_render_control;
279 u32 vga_hdp_control;
Alex Deucher6253e4c2012-12-12 14:30:32 -0500280 bool crtc_enabled[2];
Daniel Vetter187f3da2010-11-28 19:06:09 +0100281};
Jerome Glisse81ee8fb2012-07-27 16:32:24 -0400282
Jerome Glisse068a1172009-06-17 13:28:30 +0200283int rv515_init(struct radeon_device *rdev);
Jerome Glissed39c3b82009-09-28 18:34:43 +0200284void rv515_fini(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200285uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
286void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Alex Deucherf7128122012-02-23 17:53:45 -0500287void rv515_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
Jerome Glissec93bb852009-07-13 21:04:08 +0200288void rv515_bandwidth_update(struct radeon_device *rdev);
Jerome Glissed39c3b82009-09-28 18:34:43 +0200289int rv515_resume(struct radeon_device *rdev);
290int rv515_suspend(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100291void rv515_bandwidth_avivo_update(struct radeon_device *rdev);
292void rv515_vga_render_disable(struct radeon_device *rdev);
293void rv515_set_safe_registers(struct radeon_device *rdev);
294void rv515_mc_stop(struct radeon_device *rdev, struct rv515_mc_save *save);
295void rv515_mc_resume(struct radeon_device *rdev, struct rv515_mc_save *save);
296void rv515_clock_startup(struct radeon_device *rdev);
297void rv515_debugfs(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500298int rv515_mc_wait_for_idle(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200299
300/*
301 * r520,rv530,rv560,rv570,r580
302 */
Jerome Glissed39c3b82009-09-28 18:34:43 +0200303int r520_init(struct radeon_device *rdev);
Jerome Glissef0ed1f62009-09-28 20:39:19 +0200304int r520_resume(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500305int r520_mc_wait_for_idle(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200306
307/*
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000308 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200309 */
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000310int r600_init(struct radeon_device *rdev);
311void r600_fini(struct radeon_device *rdev);
312int r600_suspend(struct radeon_device *rdev);
313int r600_resume(struct radeon_device *rdev);
Dave Airlie28d52042009-09-21 14:33:58 +1000314void r600_vga_set_state(struct radeon_device *rdev, bool state);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000315int r600_wb_init(struct radeon_device *rdev);
316void r600_wb_fini(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000317void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200318uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
319void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000320int r600_cs_parse(struct radeon_cs_parser *p);
Alex Deuchercf4ccd02011-11-18 10:19:47 -0500321int r600_dma_cs_parse(struct radeon_cs_parser *p);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000322void r600_fence_ring_emit(struct radeon_device *rdev,
323 struct radeon_fence *fence);
Christian König1654b812013-11-12 12:58:05 +0100324bool r600_semaphore_ring_emit(struct radeon_device *rdev,
Christian Könige32eb502011-10-23 12:56:27 +0200325 struct radeon_ring *cp,
Christian König15d33322011-09-15 19:02:22 +0200326 struct radeon_semaphore *semaphore,
Christian König7b1f2482011-09-23 15:11:23 +0200327 bool emit_wait);
Alex Deucher4d756582012-09-27 15:08:35 -0400328void r600_dma_fence_ring_emit(struct radeon_device *rdev,
329 struct radeon_fence *fence);
Christian König1654b812013-11-12 12:58:05 +0100330bool r600_dma_semaphore_ring_emit(struct radeon_device *rdev,
Alex Deucher4d756582012-09-27 15:08:35 -0400331 struct radeon_ring *ring,
332 struct radeon_semaphore *semaphore,
333 bool emit_wait);
334void r600_dma_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
335bool r600_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
Alex Deucher123bc182013-01-24 11:37:19 -0500336bool r600_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
Jerome Glissea2d07b72010-03-09 14:45:11 +0000337int r600_asic_reset(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000338int r600_set_surface_reg(struct radeon_device *rdev, int reg,
339 uint32_t tiling_flags, uint32_t pitch,
340 uint32_t offset, uint32_t obj_size);
Daniel Vetter9479c542010-03-11 21:19:16 +0000341void r600_clear_surface_reg(struct radeon_device *rdev, int reg);
Alex Deucherf7128122012-02-23 17:53:45 -0500342int r600_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
Alex Deucher4d756582012-09-27 15:08:35 -0400343int r600_dma_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000344void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Christian Könige32eb502011-10-23 12:56:27 +0200345int r600_ring_test(struct radeon_device *rdev, struct radeon_ring *cp);
Alex Deucher4d756582012-09-27 15:08:35 -0400346int r600_dma_ring_test(struct radeon_device *rdev, struct radeon_ring *cp);
Christian König57d20a42014-09-04 20:01:53 +0200347struct radeon_fence *r600_copy_cpdma(struct radeon_device *rdev,
348 uint64_t src_offset, uint64_t dst_offset,
349 unsigned num_gpu_pages,
350 struct reservation_object *resv);
351struct radeon_fence *r600_copy_dma(struct radeon_device *rdev,
352 uint64_t src_offset, uint64_t dst_offset,
353 unsigned num_gpu_pages,
354 struct reservation_object *resv);
Alex Deucher429770b2009-12-04 15:26:55 -0500355void r600_hpd_init(struct radeon_device *rdev);
356void r600_hpd_fini(struct radeon_device *rdev);
357bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
358void r600_hpd_set_polarity(struct radeon_device *rdev,
359 enum radeon_hpd_id hpd);
Michel Dänzer124764f2014-07-31 18:43:48 +0900360extern void r600_mmio_hdp_flush(struct radeon_device *rdev);
Alex Deucherdef9ba92010-04-22 12:39:58 -0400361extern bool r600_gui_idle(struct radeon_device *rdev);
Alex Deucher49e02b72010-04-23 17:57:27 -0400362extern void r600_pm_misc(struct radeon_device *rdev);
Alex Deucherce8f5372010-05-07 15:10:16 -0400363extern void r600_pm_init_profile(struct radeon_device *rdev);
364extern void rs780_pm_init_profile(struct radeon_device *rdev);
Samuel Li65337e62013-04-05 17:50:53 -0400365extern uint32_t rs780_mc_rreg(struct radeon_device *rdev, uint32_t reg);
366extern void rs780_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Alex Deucherce8f5372010-05-07 15:10:16 -0400367extern void r600_pm_get_dynpm_state(struct radeon_device *rdev);
Alex Deucher3313e3d2011-01-06 18:49:34 -0500368extern void r600_set_pcie_lanes(struct radeon_device *rdev, int lanes);
369extern int r600_get_pcie_lanes(struct radeon_device *rdev);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100370bool r600_card_posted(struct radeon_device *rdev);
371void r600_cp_stop(struct radeon_device *rdev);
372int r600_cp_start(struct radeon_device *rdev);
Christian Könige32eb502011-10-23 12:56:27 +0200373void r600_ring_init(struct radeon_device *rdev, struct radeon_ring *cp, unsigned ring_size);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100374int r600_cp_resume(struct radeon_device *rdev);
375void r600_cp_fini(struct radeon_device *rdev);
376int r600_count_pipe_bits(uint32_t val);
377int r600_mc_wait_for_idle(struct radeon_device *rdev);
378int r600_pcie_gart_init(struct radeon_device *rdev);
379void r600_scratch_init(struct radeon_device *rdev);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100380int r600_init_microcode(struct radeon_device *rdev);
Alex Deucherea31bf62013-12-09 19:44:30 -0500381u32 r600_gfx_get_rptr(struct radeon_device *rdev,
382 struct radeon_ring *ring);
383u32 r600_gfx_get_wptr(struct radeon_device *rdev,
384 struct radeon_ring *ring);
385void r600_gfx_set_wptr(struct radeon_device *rdev,
386 struct radeon_ring *ring);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100387/* r600 irq */
388int r600_irq_process(struct radeon_device *rdev);
389int r600_irq_init(struct radeon_device *rdev);
390void r600_irq_fini(struct radeon_device *rdev);
391void r600_ih_ring_init(struct radeon_device *rdev, unsigned ring_size);
392int r600_irq_set(struct radeon_device *rdev);
393void r600_irq_suspend(struct radeon_device *rdev);
394void r600_disable_interrupts(struct radeon_device *rdev);
395void r600_rlc_stop(struct radeon_device *rdev);
396/* r600 audio */
Daniel Vetter3574dda2011-02-18 17:59:19 +0100397void r600_audio_fini(struct radeon_device *rdev);
Rafał Miłecki8f33a152014-05-16 11:36:24 +0200398void r600_audio_set_dto(struct drm_encoder *encoder, u32 clock);
399void r600_hdmi_update_avi_infoframe(struct drm_encoder *encoder, void *buffer,
400 size_t size);
401void r600_hdmi_update_ACR(struct drm_encoder *encoder, uint32_t clock);
402void r600_hdmi_audio_workaround(struct drm_encoder *encoder);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100403int r600_hdmi_buffer_status_changed(struct drm_encoder *encoder);
404void r600_hdmi_update_audio_settings(struct drm_encoder *encoder);
Alex Deucher89e51812012-02-23 17:53:38 -0500405int r600_mc_wait_for_idle(struct radeon_device *rdev);
Alex Deucher454d2e22013-02-14 10:04:02 -0500406u32 r600_get_xclk(struct radeon_device *rdev);
Alex Deucherd0418892013-01-24 10:35:23 -0500407uint64_t r600_get_gpu_clock_counter(struct radeon_device *rdev);
Alex Deucher6bd1c382013-06-21 14:38:03 -0400408int rv6xx_get_temp(struct radeon_device *rdev);
Alex Deucher1b9ba702013-09-05 09:52:37 -0400409int r600_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
Alex Deucher98243912013-01-16 13:13:42 -0500410int r600_dpm_pre_set_power_state(struct radeon_device *rdev);
411void r600_dpm_post_set_power_state(struct radeon_device *rdev);
Alex Deuchera4643ba2013-12-19 12:18:13 -0500412int r600_dpm_late_enable(struct radeon_device *rdev);
Christian König2e1e6da2013-08-13 11:56:52 +0200413/* r600 dma */
414uint32_t r600_dma_get_rptr(struct radeon_device *rdev,
415 struct radeon_ring *ring);
416uint32_t r600_dma_get_wptr(struct radeon_device *rdev,
417 struct radeon_ring *ring);
418void r600_dma_set_wptr(struct radeon_device *rdev,
419 struct radeon_ring *ring);
Alex Deucher4a6369e2013-04-12 14:04:10 -0400420/* rv6xx dpm */
421int rv6xx_dpm_init(struct radeon_device *rdev);
422int rv6xx_dpm_enable(struct radeon_device *rdev);
423void rv6xx_dpm_disable(struct radeon_device *rdev);
424int rv6xx_dpm_set_power_state(struct radeon_device *rdev);
425void rv6xx_setup_asic(struct radeon_device *rdev);
426void rv6xx_dpm_display_configuration_changed(struct radeon_device *rdev);
427void rv6xx_dpm_fini(struct radeon_device *rdev);
428u32 rv6xx_dpm_get_sclk(struct radeon_device *rdev, bool low);
429u32 rv6xx_dpm_get_mclk(struct radeon_device *rdev, bool low);
430void rv6xx_dpm_print_power_state(struct radeon_device *rdev,
431 struct radeon_ps *ps);
Alex Deucher242916a2013-06-28 14:20:53 -0400432void rv6xx_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
433 struct seq_file *m);
Alex Deucherf4f85a82013-07-25 20:07:25 -0400434int rv6xx_dpm_force_performance_level(struct radeon_device *rdev,
435 enum radeon_dpm_forced_level level);
Alex Deucherd0a04d32014-09-30 10:27:42 -0400436u32 rv6xx_dpm_get_current_sclk(struct radeon_device *rdev);
437u32 rv6xx_dpm_get_current_mclk(struct radeon_device *rdev);
Alex Deucher9d670062013-04-12 13:59:22 -0400438/* rs780 dpm */
439int rs780_dpm_init(struct radeon_device *rdev);
440int rs780_dpm_enable(struct radeon_device *rdev);
441void rs780_dpm_disable(struct radeon_device *rdev);
442int rs780_dpm_set_power_state(struct radeon_device *rdev);
443void rs780_dpm_setup_asic(struct radeon_device *rdev);
444void rs780_dpm_display_configuration_changed(struct radeon_device *rdev);
445void rs780_dpm_fini(struct radeon_device *rdev);
446u32 rs780_dpm_get_sclk(struct radeon_device *rdev, bool low);
447u32 rs780_dpm_get_mclk(struct radeon_device *rdev, bool low);
448void rs780_dpm_print_power_state(struct radeon_device *rdev,
449 struct radeon_ps *ps);
Alex Deucher444bddc2013-07-02 13:05:23 -0400450void rs780_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
451 struct seq_file *m);
Anthoine Bourgeois63580c32013-09-03 13:52:19 -0400452int rs780_dpm_force_performance_level(struct radeon_device *rdev,
453 enum radeon_dpm_forced_level level);
Alex Deucher3c945662014-09-30 10:19:57 -0400454u32 rs780_dpm_get_current_sclk(struct radeon_device *rdev);
455u32 rs780_dpm_get_current_mclk(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000456
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000457/*
458 * rv770,rv730,rv710,rv740
459 */
460int rv770_init(struct radeon_device *rdev);
461void rv770_fini(struct radeon_device *rdev);
462int rv770_suspend(struct radeon_device *rdev);
463int rv770_resume(struct radeon_device *rdev);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100464void rv770_pm_misc(struct radeon_device *rdev);
Christian König157fa142014-05-27 16:49:20 +0200465void rv770_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base);
466bool rv770_page_flip_pending(struct radeon_device *rdev, int crtc);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100467void r700_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc);
468void r700_cp_stop(struct radeon_device *rdev);
469void r700_cp_fini(struct radeon_device *rdev);
Christian König57d20a42014-09-04 20:01:53 +0200470struct radeon_fence *rv770_copy_dma(struct radeon_device *rdev,
471 uint64_t src_offset, uint64_t dst_offset,
472 unsigned num_gpu_pages,
473 struct reservation_object *resv);
Alex Deucher454d2e22013-02-14 10:04:02 -0500474u32 rv770_get_xclk(struct radeon_device *rdev);
Christian Königef0e6e62013-04-08 12:41:35 +0200475int rv770_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
Alex Deucher6bd1c382013-06-21 14:38:03 -0400476int rv770_get_temp(struct radeon_device *rdev);
Alex Deucher66229b22013-06-26 00:11:19 -0400477/* rv7xx pm */
478int rv770_dpm_init(struct radeon_device *rdev);
479int rv770_dpm_enable(struct radeon_device *rdev);
Alex Deuchera3f11242013-12-19 13:48:36 -0500480int rv770_dpm_late_enable(struct radeon_device *rdev);
Alex Deucher66229b22013-06-26 00:11:19 -0400481void rv770_dpm_disable(struct radeon_device *rdev);
482int rv770_dpm_set_power_state(struct radeon_device *rdev);
483void rv770_dpm_setup_asic(struct radeon_device *rdev);
484void rv770_dpm_display_configuration_changed(struct radeon_device *rdev);
485void rv770_dpm_fini(struct radeon_device *rdev);
486u32 rv770_dpm_get_sclk(struct radeon_device *rdev, bool low);
487u32 rv770_dpm_get_mclk(struct radeon_device *rdev, bool low);
488void rv770_dpm_print_power_state(struct radeon_device *rdev,
489 struct radeon_ps *ps);
Alex Deucherbd210d12013-06-28 10:06:26 -0400490void rv770_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
491 struct seq_file *m);
Alex Deucher8b5e6b72013-07-02 18:40:35 -0400492int rv770_dpm_force_performance_level(struct radeon_device *rdev,
493 enum radeon_dpm_forced_level level);
Alex Deucherb06195d2013-07-08 11:49:48 -0400494bool rv770_dpm_vblank_too_short(struct radeon_device *rdev);
Alex Deucher296deb72014-09-30 10:34:39 -0400495u32 rv770_dpm_get_current_sclk(struct radeon_device *rdev);
496u32 rv770_dpm_get_current_mclk(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000497
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500498/*
499 * evergreen
500 */
Daniel Vetter3574dda2011-02-18 17:59:19 +0100501struct evergreen_mc_save {
Daniel Vetter3574dda2011-02-18 17:59:19 +0100502 u32 vga_render_control;
503 u32 vga_hdp_control;
Alex Deucher62444b72012-08-15 17:18:42 -0400504 bool crtc_enabled[RADEON_MAX_CRTCS];
Daniel Vetter3574dda2011-02-18 17:59:19 +0100505};
Jerome Glisse81ee8fb2012-07-27 16:32:24 -0400506
Alex Deucher0fcdb612010-03-24 13:20:41 -0400507void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500508int evergreen_init(struct radeon_device *rdev);
509void evergreen_fini(struct radeon_device *rdev);
510int evergreen_suspend(struct radeon_device *rdev);
511int evergreen_resume(struct radeon_device *rdev);
Alex Deucher123bc182013-01-24 11:37:19 -0500512bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
513bool evergreen_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
Jerome Glissea2d07b72010-03-09 14:45:11 +0000514int evergreen_asic_reset(struct radeon_device *rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500515void evergreen_bandwidth_update(struct radeon_device *rdev);
Alex Deucher12920592011-02-02 12:37:40 -0500516void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500517void evergreen_hpd_init(struct radeon_device *rdev);
518void evergreen_hpd_fini(struct radeon_device *rdev);
519bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
520void evergreen_hpd_set_polarity(struct radeon_device *rdev,
521 enum radeon_hpd_id hpd);
Alex Deucher45f9a392010-03-24 13:55:51 -0400522u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc);
523int evergreen_irq_set(struct radeon_device *rdev);
524int evergreen_irq_process(struct radeon_device *rdev);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -0400525extern int evergreen_cs_parse(struct radeon_cs_parser *p);
Alex Deucherd2ead3e2012-12-13 09:55:45 -0500526extern int evergreen_dma_cs_parse(struct radeon_cs_parser *p);
Alex Deucher49e02b72010-04-23 17:57:27 -0400527extern void evergreen_pm_misc(struct radeon_device *rdev);
528extern void evergreen_pm_prepare(struct radeon_device *rdev);
529extern void evergreen_pm_finish(struct radeon_device *rdev);
Alex Deuchera4c9e2e2011-11-04 10:09:41 -0400530extern void sumo_pm_init_profile(struct radeon_device *rdev);
Alex Deucher27810fb2012-10-01 19:25:11 -0400531extern void btc_pm_init_profile(struct radeon_device *rdev);
Alex Deucher23d33ba2013-04-08 12:41:32 +0200532int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
Alex Deuchera8b49252013-04-08 12:41:33 +0200533int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
Christian König157fa142014-05-27 16:49:20 +0200534extern void evergreen_page_flip(struct radeon_device *rdev, int crtc,
535 u64 crtc_base);
536extern bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc);
Alex Deucher3ae19b72012-02-23 17:53:37 -0500537extern void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100538void evergreen_disable_interrupt_state(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500539int evergreen_mc_wait_for_idle(struct radeon_device *rdev);
Alex Deucher233d1ad2012-12-04 15:25:59 -0500540void evergreen_dma_fence_ring_emit(struct radeon_device *rdev,
541 struct radeon_fence *fence);
542void evergreen_dma_ring_ib_execute(struct radeon_device *rdev,
543 struct radeon_ib *ib);
Christian König57d20a42014-09-04 20:01:53 +0200544struct radeon_fence *evergreen_copy_dma(struct radeon_device *rdev,
545 uint64_t src_offset, uint64_t dst_offset,
546 unsigned num_gpu_pages,
547 struct reservation_object *resv);
Alex Deucher6bd1c382013-06-21 14:38:03 -0400548int evergreen_get_temp(struct radeon_device *rdev);
549int sumo_get_temp(struct radeon_device *rdev);
Alex Deucher29a15222012-12-14 11:57:36 -0500550int tn_get_temp(struct radeon_device *rdev);
Alex Deucherdc50ba72013-06-26 00:33:35 -0400551int cypress_dpm_init(struct radeon_device *rdev);
552void cypress_dpm_setup_asic(struct radeon_device *rdev);
553int cypress_dpm_enable(struct radeon_device *rdev);
554void cypress_dpm_disable(struct radeon_device *rdev);
555int cypress_dpm_set_power_state(struct radeon_device *rdev);
556void cypress_dpm_display_configuration_changed(struct radeon_device *rdev);
557void cypress_dpm_fini(struct radeon_device *rdev);
Alex Deucherd0b54bd2013-07-08 11:56:09 -0400558bool cypress_dpm_vblank_too_short(struct radeon_device *rdev);
Alex Deucher6596afd2013-06-26 00:15:24 -0400559int btc_dpm_init(struct radeon_device *rdev);
560void btc_dpm_setup_asic(struct radeon_device *rdev);
561int btc_dpm_enable(struct radeon_device *rdev);
562void btc_dpm_disable(struct radeon_device *rdev);
Alex Deuchere8a95392013-01-16 14:17:23 -0500563int btc_dpm_pre_set_power_state(struct radeon_device *rdev);
Alex Deucher6596afd2013-06-26 00:15:24 -0400564int btc_dpm_set_power_state(struct radeon_device *rdev);
Alex Deuchere8a95392013-01-16 14:17:23 -0500565void btc_dpm_post_set_power_state(struct radeon_device *rdev);
Alex Deucher6596afd2013-06-26 00:15:24 -0400566void btc_dpm_fini(struct radeon_device *rdev);
Alex Deuchere8a95392013-01-16 14:17:23 -0500567u32 btc_dpm_get_sclk(struct radeon_device *rdev, bool low);
568u32 btc_dpm_get_mclk(struct radeon_device *rdev, bool low);
Alex Deuchera84301c2013-07-08 12:03:55 -0400569bool btc_dpm_vblank_too_short(struct radeon_device *rdev);
Alex Deucher9f3f63f2014-01-30 11:19:22 -0500570void btc_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
571 struct seq_file *m);
Alex Deucher99550ee2014-09-30 10:39:30 -0400572u32 btc_dpm_get_current_sclk(struct radeon_device *rdev);
573u32 btc_dpm_get_current_mclk(struct radeon_device *rdev);
Alex Deucher80ea2c12013-04-12 14:56:21 -0400574int sumo_dpm_init(struct radeon_device *rdev);
575int sumo_dpm_enable(struct radeon_device *rdev);
Alex Deucher14ec9fa2013-12-19 11:56:52 -0500576int sumo_dpm_late_enable(struct radeon_device *rdev);
Alex Deucher80ea2c12013-04-12 14:56:21 -0400577void sumo_dpm_disable(struct radeon_device *rdev);
Alex Deucher422a56b2013-06-25 15:40:21 -0400578int sumo_dpm_pre_set_power_state(struct radeon_device *rdev);
Alex Deucher80ea2c12013-04-12 14:56:21 -0400579int sumo_dpm_set_power_state(struct radeon_device *rdev);
Alex Deucher422a56b2013-06-25 15:40:21 -0400580void sumo_dpm_post_set_power_state(struct radeon_device *rdev);
Alex Deucher80ea2c12013-04-12 14:56:21 -0400581void sumo_dpm_setup_asic(struct radeon_device *rdev);
582void sumo_dpm_display_configuration_changed(struct radeon_device *rdev);
583void sumo_dpm_fini(struct radeon_device *rdev);
584u32 sumo_dpm_get_sclk(struct radeon_device *rdev, bool low);
585u32 sumo_dpm_get_mclk(struct radeon_device *rdev, bool low);
586void sumo_dpm_print_power_state(struct radeon_device *rdev,
587 struct radeon_ps *ps);
Alex Deucherfb701602013-06-28 10:47:56 -0400588void sumo_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
589 struct seq_file *m);
Alex Deucher5d5e5592013-07-02 18:50:09 -0400590int sumo_dpm_force_performance_level(struct radeon_device *rdev,
591 enum radeon_dpm_forced_level level);
Daniel Vetter4546b2c2011-02-18 17:59:21 +0100592
Alex Deuchere3487622011-03-02 20:07:36 -0500593/*
594 * cayman
595 */
Alex Deucherb40e7e12011-11-17 14:57:50 -0500596void cayman_fence_ring_emit(struct radeon_device *rdev,
597 struct radeon_fence *fence);
Alex Deuchere3487622011-03-02 20:07:36 -0500598void cayman_pcie_gart_tlb_flush(struct radeon_device *rdev);
599int cayman_init(struct radeon_device *rdev);
600void cayman_fini(struct radeon_device *rdev);
601int cayman_suspend(struct radeon_device *rdev);
602int cayman_resume(struct radeon_device *rdev);
Alex Deuchere3487622011-03-02 20:07:36 -0500603int cayman_asic_reset(struct radeon_device *rdev);
Jerome Glisse721604a2012-01-05 22:11:05 -0500604void cayman_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
605int cayman_vm_init(struct radeon_device *rdev);
606void cayman_vm_fini(struct radeon_device *rdev);
Christian Königfaffaf62014-11-19 14:01:19 +0100607void cayman_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
608 unsigned vm_id, uint64_t pd_addr);
Christian König089a7862012-08-11 11:54:05 +0200609uint32_t cayman_vm_page_flags(struct radeon_device *rdev, uint32_t flags);
Jerome Glisse721604a2012-01-05 22:11:05 -0500610int evergreen_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
Alex Deuchercd459e52012-12-13 12:17:38 -0500611int evergreen_dma_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
Alex Deucherf60cbd12012-12-04 15:27:33 -0500612void cayman_dma_ring_ib_execute(struct radeon_device *rdev,
613 struct radeon_ib *ib);
Alex Deucher123bc182013-01-24 11:37:19 -0500614bool cayman_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
Alex Deucherf60cbd12012-12-04 15:27:33 -0500615bool cayman_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
Christian König03f62ab2014-07-30 21:05:17 +0200616
617void cayman_dma_vm_copy_pages(struct radeon_device *rdev,
618 struct radeon_ib *ib,
619 uint64_t pe, uint64_t src,
620 unsigned count);
621void cayman_dma_vm_write_pages(struct radeon_device *rdev,
622 struct radeon_ib *ib,
623 uint64_t pe,
624 uint64_t addr, unsigned count,
625 uint32_t incr, uint32_t flags);
626void cayman_dma_vm_set_pages(struct radeon_device *rdev,
627 struct radeon_ib *ib,
628 uint64_t pe,
629 uint64_t addr, unsigned count,
630 uint32_t incr, uint32_t flags);
631void cayman_dma_vm_pad_ib(struct radeon_ib *ib);
Christian König24c16432013-10-30 11:51:09 -0400632
Christian Königfaffaf62014-11-19 14:01:19 +0100633void cayman_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
634 unsigned vm_id, uint64_t pd_addr);
Alex Deucher45f9a392010-03-24 13:55:51 -0400635
Alex Deucherea31bf62013-12-09 19:44:30 -0500636u32 cayman_gfx_get_rptr(struct radeon_device *rdev,
637 struct radeon_ring *ring);
638u32 cayman_gfx_get_wptr(struct radeon_device *rdev,
639 struct radeon_ring *ring);
640void cayman_gfx_set_wptr(struct radeon_device *rdev,
641 struct radeon_ring *ring);
642uint32_t cayman_dma_get_rptr(struct radeon_device *rdev,
643 struct radeon_ring *ring);
644uint32_t cayman_dma_get_wptr(struct radeon_device *rdev,
645 struct radeon_ring *ring);
646void cayman_dma_set_wptr(struct radeon_device *rdev,
647 struct radeon_ring *ring);
648
Alex Deucher69e0b572013-04-12 16:42:42 -0400649int ni_dpm_init(struct radeon_device *rdev);
650void ni_dpm_setup_asic(struct radeon_device *rdev);
651int ni_dpm_enable(struct radeon_device *rdev);
652void ni_dpm_disable(struct radeon_device *rdev);
Alex Deucherfee3d742013-01-16 14:35:39 -0500653int ni_dpm_pre_set_power_state(struct radeon_device *rdev);
Alex Deucher69e0b572013-04-12 16:42:42 -0400654int ni_dpm_set_power_state(struct radeon_device *rdev);
Alex Deucherfee3d742013-01-16 14:35:39 -0500655void ni_dpm_post_set_power_state(struct radeon_device *rdev);
Alex Deucher69e0b572013-04-12 16:42:42 -0400656void ni_dpm_fini(struct radeon_device *rdev);
657u32 ni_dpm_get_sclk(struct radeon_device *rdev, bool low);
658u32 ni_dpm_get_mclk(struct radeon_device *rdev, bool low);
659void ni_dpm_print_power_state(struct radeon_device *rdev,
660 struct radeon_ps *ps);
Alex Deucherbdf0c4f2013-06-28 17:49:02 -0400661void ni_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
662 struct seq_file *m);
Alex Deucher170a47f2013-07-02 18:43:53 -0400663int ni_dpm_force_performance_level(struct radeon_device *rdev,
664 enum radeon_dpm_forced_level level);
Alex Deucher76ad73e2013-07-08 12:09:41 -0400665bool ni_dpm_vblank_too_short(struct radeon_device *rdev);
Alex Deucher1d633e32014-09-30 10:46:02 -0400666u32 ni_dpm_get_current_sclk(struct radeon_device *rdev);
667u32 ni_dpm_get_current_mclk(struct radeon_device *rdev);
Alex Deucherd70229f2013-04-12 16:40:41 -0400668int trinity_dpm_init(struct radeon_device *rdev);
669int trinity_dpm_enable(struct radeon_device *rdev);
Alex Deucherbda44c12013-12-19 12:03:35 -0500670int trinity_dpm_late_enable(struct radeon_device *rdev);
Alex Deucherd70229f2013-04-12 16:40:41 -0400671void trinity_dpm_disable(struct radeon_device *rdev);
Alex Deuchera284c482013-01-16 13:53:40 -0500672int trinity_dpm_pre_set_power_state(struct radeon_device *rdev);
Alex Deucherd70229f2013-04-12 16:40:41 -0400673int trinity_dpm_set_power_state(struct radeon_device *rdev);
Alex Deuchera284c482013-01-16 13:53:40 -0500674void trinity_dpm_post_set_power_state(struct radeon_device *rdev);
Alex Deucherd70229f2013-04-12 16:40:41 -0400675void trinity_dpm_setup_asic(struct radeon_device *rdev);
676void trinity_dpm_display_configuration_changed(struct radeon_device *rdev);
677void trinity_dpm_fini(struct radeon_device *rdev);
678u32 trinity_dpm_get_sclk(struct radeon_device *rdev, bool low);
679u32 trinity_dpm_get_mclk(struct radeon_device *rdev, bool low);
680void trinity_dpm_print_power_state(struct radeon_device *rdev,
681 struct radeon_ps *ps);
Alex Deucher490ab932013-06-28 12:01:38 -0400682void trinity_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
683 struct seq_file *m);
Alex Deucher9b5de592013-07-02 18:52:10 -0400684int trinity_dpm_force_performance_level(struct radeon_device *rdev,
685 enum radeon_dpm_forced_level level);
Alex Deucher11877062013-09-09 19:19:52 -0400686void trinity_dpm_enable_bapm(struct radeon_device *rdev, bool enable);
Alex Deucherd70229f2013-04-12 16:40:41 -0400687
Alex Deucher43b3cd92012-03-20 17:18:00 -0400688/* DCE6 - SI */
689void dce6_bandwidth_update(struct radeon_device *rdev);
Alex Deucherb5306022013-07-31 16:51:33 -0400690void dce6_audio_fini(struct radeon_device *rdev);
Alex Deucher43b3cd92012-03-20 17:18:00 -0400691
Alex Deucher02779c02012-03-20 17:18:25 -0400692/*
693 * si
694 */
695void si_fence_ring_emit(struct radeon_device *rdev,
696 struct radeon_fence *fence);
697void si_pcie_gart_tlb_flush(struct radeon_device *rdev);
698int si_init(struct radeon_device *rdev);
699void si_fini(struct radeon_device *rdev);
700int si_suspend(struct radeon_device *rdev);
701int si_resume(struct radeon_device *rdev);
Alex Deucher123bc182013-01-24 11:37:19 -0500702bool si_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
703bool si_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
Alex Deucher02779c02012-03-20 17:18:25 -0400704int si_asic_reset(struct radeon_device *rdev);
705void si_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
706int si_irq_set(struct radeon_device *rdev);
707int si_irq_process(struct radeon_device *rdev);
708int si_vm_init(struct radeon_device *rdev);
709void si_vm_fini(struct radeon_device *rdev);
Christian Königfaffaf62014-11-19 14:01:19 +0100710void si_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
711 unsigned vm_id, uint64_t pd_addr);
Alex Deucher02779c02012-03-20 17:18:25 -0400712int si_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
Christian König57d20a42014-09-04 20:01:53 +0200713struct radeon_fence *si_copy_dma(struct radeon_device *rdev,
714 uint64_t src_offset, uint64_t dst_offset,
715 unsigned num_gpu_pages,
716 struct reservation_object *resv);
Christian König03f62ab2014-07-30 21:05:17 +0200717
718void si_dma_vm_copy_pages(struct radeon_device *rdev,
719 struct radeon_ib *ib,
720 uint64_t pe, uint64_t src,
721 unsigned count);
722void si_dma_vm_write_pages(struct radeon_device *rdev,
723 struct radeon_ib *ib,
724 uint64_t pe,
725 uint64_t addr, unsigned count,
726 uint32_t incr, uint32_t flags);
727void si_dma_vm_set_pages(struct radeon_device *rdev,
728 struct radeon_ib *ib,
729 uint64_t pe,
730 uint64_t addr, unsigned count,
731 uint32_t incr, uint32_t flags);
732
Christian Königfaffaf62014-11-19 14:01:19 +0100733void si_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
734 unsigned vm_id, uint64_t pd_addr);
Alex Deucher454d2e22013-02-14 10:04:02 -0500735u32 si_get_xclk(struct radeon_device *rdev);
Alex Deucherd0418892013-01-24 10:35:23 -0500736uint64_t si_get_gpu_clock_counter(struct radeon_device *rdev);
Christian König2539eb02013-04-08 12:41:34 +0200737int si_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
Alex Deucher6bd1c382013-06-21 14:38:03 -0400738int si_get_temp(struct radeon_device *rdev);
Alex Deuchera9e61412013-06-25 17:56:16 -0400739int si_dpm_init(struct radeon_device *rdev);
740void si_dpm_setup_asic(struct radeon_device *rdev);
741int si_dpm_enable(struct radeon_device *rdev);
Alex Deucher963c1152013-12-19 13:54:35 -0500742int si_dpm_late_enable(struct radeon_device *rdev);
Alex Deuchera9e61412013-06-25 17:56:16 -0400743void si_dpm_disable(struct radeon_device *rdev);
744int si_dpm_pre_set_power_state(struct radeon_device *rdev);
745int si_dpm_set_power_state(struct radeon_device *rdev);
746void si_dpm_post_set_power_state(struct radeon_device *rdev);
747void si_dpm_fini(struct radeon_device *rdev);
748void si_dpm_display_configuration_changed(struct radeon_device *rdev);
Alex Deucher79821282013-06-28 18:02:19 -0400749void si_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
750 struct seq_file *m);
Alex Deuchera160a6a2013-07-02 18:46:28 -0400751int si_dpm_force_performance_level(struct radeon_device *rdev,
752 enum radeon_dpm_forced_level level);
Alex Deucher5e8150a2015-01-07 15:29:06 -0500753int si_fan_ctrl_get_fan_speed_percent(struct radeon_device *rdev,
754 u32 *speed);
755int si_fan_ctrl_set_fan_speed_percent(struct radeon_device *rdev,
756 u32 speed);
757u32 si_fan_ctrl_get_mode(struct radeon_device *rdev);
758void si_fan_ctrl_set_mode(struct radeon_device *rdev, u32 mode);
Alex Deucherca1110b2014-09-30 10:50:07 -0400759u32 si_dpm_get_current_sclk(struct radeon_device *rdev);
760u32 si_dpm_get_current_mclk(struct radeon_device *rdev);
Alex Deucher02779c02012-03-20 17:18:25 -0400761
Alex Deucher0672e272013-04-09 16:22:31 -0400762/* DCE8 - CIK */
763void dce8_bandwidth_update(struct radeon_device *rdev);
764
Alex Deucher44fa3462012-12-18 22:17:00 -0500765/*
766 * cik
767 */
768uint64_t cik_get_gpu_clock_counter(struct radeon_device *rdev);
Alex Deucher2c679122013-04-09 13:32:18 -0400769u32 cik_get_xclk(struct radeon_device *rdev);
Alex Deucher6e2c3c02013-04-03 19:28:32 -0400770uint32_t cik_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
771void cik_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Christian König87167bb2013-04-09 13:39:21 -0400772int cik_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
Alex Deucher5ad6bf92013-08-22 17:09:06 -0400773int cik_set_vce_clocks(struct radeon_device *rdev, u32 evclk, u32 ecclk);
Alex Deucher0672e272013-04-09 16:22:31 -0400774void cik_sdma_fence_ring_emit(struct radeon_device *rdev,
775 struct radeon_fence *fence);
Christian König1654b812013-11-12 12:58:05 +0100776bool cik_sdma_semaphore_ring_emit(struct radeon_device *rdev,
Alex Deucher0672e272013-04-09 16:22:31 -0400777 struct radeon_ring *ring,
778 struct radeon_semaphore *semaphore,
779 bool emit_wait);
780void cik_sdma_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Christian König57d20a42014-09-04 20:01:53 +0200781struct radeon_fence *cik_copy_dma(struct radeon_device *rdev,
782 uint64_t src_offset, uint64_t dst_offset,
783 unsigned num_gpu_pages,
784 struct reservation_object *resv);
785struct radeon_fence *cik_copy_cpdma(struct radeon_device *rdev,
786 uint64_t src_offset, uint64_t dst_offset,
787 unsigned num_gpu_pages,
788 struct reservation_object *resv);
Alex Deucher0672e272013-04-09 16:22:31 -0400789int cik_sdma_ring_test(struct radeon_device *rdev, struct radeon_ring *ring);
790int cik_sdma_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
791bool cik_sdma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
792void cik_fence_gfx_ring_emit(struct radeon_device *rdev,
793 struct radeon_fence *fence);
794void cik_fence_compute_ring_emit(struct radeon_device *rdev,
795 struct radeon_fence *fence);
Christian König1654b812013-11-12 12:58:05 +0100796bool cik_semaphore_ring_emit(struct radeon_device *rdev,
Alex Deucher0672e272013-04-09 16:22:31 -0400797 struct radeon_ring *cp,
798 struct radeon_semaphore *semaphore,
799 bool emit_wait);
800void cik_pcie_gart_tlb_flush(struct radeon_device *rdev);
801int cik_init(struct radeon_device *rdev);
802void cik_fini(struct radeon_device *rdev);
803int cik_suspend(struct radeon_device *rdev);
804int cik_resume(struct radeon_device *rdev);
805bool cik_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
806int cik_asic_reset(struct radeon_device *rdev);
807void cik_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
808int cik_ring_test(struct radeon_device *rdev, struct radeon_ring *ring);
809int cik_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
810int cik_irq_set(struct radeon_device *rdev);
811int cik_irq_process(struct radeon_device *rdev);
812int cik_vm_init(struct radeon_device *rdev);
813void cik_vm_fini(struct radeon_device *rdev);
Christian Königfaffaf62014-11-19 14:01:19 +0100814void cik_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
815 unsigned vm_id, uint64_t pd_addr);
Christian König03f62ab2014-07-30 21:05:17 +0200816
817void cik_sdma_vm_copy_pages(struct radeon_device *rdev,
818 struct radeon_ib *ib,
819 uint64_t pe, uint64_t src,
820 unsigned count);
821void cik_sdma_vm_write_pages(struct radeon_device *rdev,
822 struct radeon_ib *ib,
823 uint64_t pe,
824 uint64_t addr, unsigned count,
825 uint32_t incr, uint32_t flags);
826void cik_sdma_vm_set_pages(struct radeon_device *rdev,
827 struct radeon_ib *ib,
828 uint64_t pe,
829 uint64_t addr, unsigned count,
830 uint32_t incr, uint32_t flags);
831void cik_sdma_vm_pad_ib(struct radeon_ib *ib);
832
Christian Königfaffaf62014-11-19 14:01:19 +0100833void cik_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
834 unsigned vm_id, uint64_t pd_addr);
Alex Deucher0672e272013-04-09 16:22:31 -0400835int cik_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
Alex Deucherea31bf62013-12-09 19:44:30 -0500836u32 cik_gfx_get_rptr(struct radeon_device *rdev,
837 struct radeon_ring *ring);
838u32 cik_gfx_get_wptr(struct radeon_device *rdev,
839 struct radeon_ring *ring);
840void cik_gfx_set_wptr(struct radeon_device *rdev,
841 struct radeon_ring *ring);
842u32 cik_compute_get_rptr(struct radeon_device *rdev,
843 struct radeon_ring *ring);
844u32 cik_compute_get_wptr(struct radeon_device *rdev,
845 struct radeon_ring *ring);
846void cik_compute_set_wptr(struct radeon_device *rdev,
847 struct radeon_ring *ring);
848u32 cik_sdma_get_rptr(struct radeon_device *rdev,
849 struct radeon_ring *ring);
850u32 cik_sdma_get_wptr(struct radeon_device *rdev,
851 struct radeon_ring *ring);
852void cik_sdma_set_wptr(struct radeon_device *rdev,
853 struct radeon_ring *ring);
Alex Deucher286d9cc2013-06-21 15:50:47 -0400854int ci_get_temp(struct radeon_device *rdev);
855int kv_get_temp(struct radeon_device *rdev);
Alex Deucher44fa3462012-12-18 22:17:00 -0500856
Alex Deuchercc8dbbb2013-08-14 01:03:41 -0400857int ci_dpm_init(struct radeon_device *rdev);
858int ci_dpm_enable(struct radeon_device *rdev);
Alex Deucher90208422013-12-19 13:59:46 -0500859int ci_dpm_late_enable(struct radeon_device *rdev);
Alex Deuchercc8dbbb2013-08-14 01:03:41 -0400860void ci_dpm_disable(struct radeon_device *rdev);
861int ci_dpm_pre_set_power_state(struct radeon_device *rdev);
862int ci_dpm_set_power_state(struct radeon_device *rdev);
863void ci_dpm_post_set_power_state(struct radeon_device *rdev);
864void ci_dpm_setup_asic(struct radeon_device *rdev);
865void ci_dpm_display_configuration_changed(struct radeon_device *rdev);
866void ci_dpm_fini(struct radeon_device *rdev);
867u32 ci_dpm_get_sclk(struct radeon_device *rdev, bool low);
868u32 ci_dpm_get_mclk(struct radeon_device *rdev, bool low);
869void ci_dpm_print_power_state(struct radeon_device *rdev,
870 struct radeon_ps *ps);
Alex Deucher94b4adc2013-07-15 17:34:33 -0400871void ci_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
872 struct seq_file *m);
Alex Deucher89536fd2013-07-15 18:14:24 -0400873int ci_dpm_force_performance_level(struct radeon_device *rdev,
874 enum radeon_dpm_forced_level level);
Alex Deucher54961312013-07-15 18:24:31 -0400875bool ci_dpm_vblank_too_short(struct radeon_device *rdev);
Alex Deucher942bdf72013-08-09 10:05:24 -0400876void ci_dpm_powergate_uvd(struct radeon_device *rdev, bool gate);
Alex Deucherdbbd3c82014-09-30 10:54:05 -0400877u32 ci_dpm_get_current_sclk(struct radeon_device *rdev);
878u32 ci_dpm_get_current_mclk(struct radeon_device *rdev);
Alex Deuchercc8dbbb2013-08-14 01:03:41 -0400879
Oleg Chernovskiy36689e52014-12-08 00:10:46 +0300880int ci_fan_ctrl_get_fan_speed_percent(struct radeon_device *rdev,
881 u32 *speed);
882int ci_fan_ctrl_set_fan_speed_percent(struct radeon_device *rdev,
883 u32 speed);
884u32 ci_fan_ctrl_get_mode(struct radeon_device *rdev);
885void ci_fan_ctrl_set_mode(struct radeon_device *rdev, u32 mode);
886
Alex Deucher41a524a2013-08-14 01:01:40 -0400887int kv_dpm_init(struct radeon_device *rdev);
888int kv_dpm_enable(struct radeon_device *rdev);
Alex Deucherd8852c32013-12-19 14:03:36 -0500889int kv_dpm_late_enable(struct radeon_device *rdev);
Alex Deucher41a524a2013-08-14 01:01:40 -0400890void kv_dpm_disable(struct radeon_device *rdev);
891int kv_dpm_pre_set_power_state(struct radeon_device *rdev);
892int kv_dpm_set_power_state(struct radeon_device *rdev);
893void kv_dpm_post_set_power_state(struct radeon_device *rdev);
894void kv_dpm_setup_asic(struct radeon_device *rdev);
895void kv_dpm_display_configuration_changed(struct radeon_device *rdev);
896void kv_dpm_fini(struct radeon_device *rdev);
897u32 kv_dpm_get_sclk(struct radeon_device *rdev, bool low);
898u32 kv_dpm_get_mclk(struct radeon_device *rdev, bool low);
899void kv_dpm_print_power_state(struct radeon_device *rdev,
900 struct radeon_ps *ps);
Alex Deucherae3e40e2013-07-18 16:39:53 -0400901void kv_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
902 struct seq_file *m);
Alex Deucher2b4c8022013-07-18 16:48:46 -0400903int kv_dpm_force_performance_level(struct radeon_device *rdev,
904 enum radeon_dpm_forced_level level);
Alex Deucher77df5082013-08-09 10:02:40 -0400905void kv_dpm_powergate_uvd(struct radeon_device *rdev, bool gate);
Alex Deucherb7a5ae92013-09-09 19:33:08 -0400906void kv_dpm_enable_bapm(struct radeon_device *rdev, bool enable);
Alex Deucher41a524a2013-08-14 01:01:40 -0400907
Christian Könige409b122013-08-13 11:56:53 +0200908/* uvd v1.0 */
909uint32_t uvd_v1_0_get_rptr(struct radeon_device *rdev,
910 struct radeon_ring *ring);
911uint32_t uvd_v1_0_get_wptr(struct radeon_device *rdev,
912 struct radeon_ring *ring);
913void uvd_v1_0_set_wptr(struct radeon_device *rdev,
914 struct radeon_ring *ring);
Christian König856754c2013-04-16 22:11:22 +0200915int uvd_v1_0_resume(struct radeon_device *rdev);
Christian Könige409b122013-08-13 11:56:53 +0200916
917int uvd_v1_0_init(struct radeon_device *rdev);
918void uvd_v1_0_fini(struct radeon_device *rdev);
919int uvd_v1_0_start(struct radeon_device *rdev);
920void uvd_v1_0_stop(struct radeon_device *rdev);
921
922int uvd_v1_0_ring_test(struct radeon_device *rdev, struct radeon_ring *ring);
Christian König856754c2013-04-16 22:11:22 +0200923void uvd_v1_0_fence_emit(struct radeon_device *rdev,
924 struct radeon_fence *fence);
Christian Könige409b122013-08-13 11:56:53 +0200925int uvd_v1_0_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
Christian König1654b812013-11-12 12:58:05 +0100926bool uvd_v1_0_semaphore_emit(struct radeon_device *rdev,
Christian Könige409b122013-08-13 11:56:53 +0200927 struct radeon_ring *ring,
928 struct radeon_semaphore *semaphore,
929 bool emit_wait);
930void uvd_v1_0_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
931
932/* uvd v2.2 */
933int uvd_v2_2_resume(struct radeon_device *rdev);
934void uvd_v2_2_fence_emit(struct radeon_device *rdev,
935 struct radeon_fence *fence);
936
937/* uvd v3.1 */
Christian König1654b812013-11-12 12:58:05 +0100938bool uvd_v3_1_semaphore_emit(struct radeon_device *rdev,
Christian Könige409b122013-08-13 11:56:53 +0200939 struct radeon_ring *ring,
940 struct radeon_semaphore *semaphore,
941 bool emit_wait);
942
943/* uvd v4.2 */
944int uvd_v4_2_resume(struct radeon_device *rdev);
945
Christian Königd93f7932013-05-23 12:10:04 +0200946/* vce v1.0 */
947uint32_t vce_v1_0_get_rptr(struct radeon_device *rdev,
948 struct radeon_ring *ring);
949uint32_t vce_v1_0_get_wptr(struct radeon_device *rdev,
950 struct radeon_ring *ring);
951void vce_v1_0_set_wptr(struct radeon_device *rdev,
952 struct radeon_ring *ring);
953int vce_v1_0_init(struct radeon_device *rdev);
954int vce_v1_0_start(struct radeon_device *rdev);
955
956/* vce v2.0 */
957int vce_v2_0_resume(struct radeon_device *rdev);
958
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200959#endif