blob: a3ca8cd305c5c21541bae20820dbe3366f416706 [file] [log] [blame]
Jerome Glisse771fe6b2009-06-05 14:42:42 +02001/*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Dave Airlie
25 * Alex Deucher
26 * Jerome Glisse
27 */
28#ifndef __RADEON_ASIC_H__
29#define __RADEON_ASIC_H__
30
31/*
32 * common functions
33 */
Rafał Miłecki74338742009-11-03 00:53:02 +010034uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020035void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
Rafał Miłecki5ea597f2009-12-17 13:50:09 +010036uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020037void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
38
Rafał Miłecki74338742009-11-03 00:53:02 +010039uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020040void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
Rafał Miłecki74338742009-11-03 00:53:02 +010041uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020042void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
43void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
44
Alex Deucher37e9b6a2012-08-03 11:39:43 -040045void atombios_set_backlight_level(struct radeon_encoder *radeon_encoder, u8 level);
Alex Deucher6d92f812012-09-14 09:59:26 -040046u8 atombios_get_backlight_level(struct radeon_encoder *radeon_encoder);
Alex Deucher37e9b6a2012-08-03 11:39:43 -040047void radeon_legacy_set_backlight_level(struct radeon_encoder *radeon_encoder, u8 level);
Alex Deucher6d92f812012-09-14 09:59:26 -040048u8 radeon_legacy_get_backlight_level(struct radeon_encoder *radeon_encoder);
Alex Deucher37e9b6a2012-08-03 11:39:43 -040049
Jerome Glisse771fe6b2009-06-05 14:42:42 +020050/*
Pauli Nieminen44ca7472010-02-11 17:25:47 +000051 * r100,rv100,rs100,rv200,rs200
Jerome Glisse771fe6b2009-06-05 14:42:42 +020052 */
Daniel Vetter2b497502010-03-11 21:19:18 +000053struct r100_mc_save {
54 u32 GENMO_WT;
55 u32 CRTC_EXT_CNTL;
56 u32 CRTC_GEN_CNTL;
57 u32 CRTC2_GEN_CNTL;
58 u32 CUR_OFFSET;
59 u32 CUR2_OFFSET;
60};
61int r100_init(struct radeon_device *rdev);
62void r100_fini(struct radeon_device *rdev);
63int r100_suspend(struct radeon_device *rdev);
64int r100_resume(struct radeon_device *rdev);
Dave Airlie28d52042009-09-21 14:33:58 +100065void r100_vga_set_state(struct radeon_device *rdev, bool state);
Christian Könige32eb502011-10-23 12:56:27 +020066bool r100_gpu_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
Jerome Glissea2d07b72010-03-09 14:45:11 +000067int r100_asic_reset(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +020068u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020069void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
Michel Dänzercb658902015-01-21 17:36:35 +090070uint64_t r100_pci_gart_get_page_entry(uint64_t addr, uint32_t flags);
Christian König7f90fc92014-06-04 15:29:57 +020071void r100_pci_gart_set_page(struct radeon_device *rdev, unsigned i,
Michel Dänzercb658902015-01-21 17:36:35 +090072 uint64_t entry);
Alex Deucherf7128122012-02-23 17:53:45 -050073void r100_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020074int r100_irq_set(struct radeon_device *rdev);
75int r100_irq_process(struct radeon_device *rdev);
76void r100_fence_ring_emit(struct radeon_device *rdev,
77 struct radeon_fence *fence);
Christian König1654b812013-11-12 12:58:05 +010078bool r100_semaphore_ring_emit(struct radeon_device *rdev,
Christian Könige32eb502011-10-23 12:56:27 +020079 struct radeon_ring *cp,
Christian König15d33322011-09-15 19:02:22 +020080 struct radeon_semaphore *semaphore,
Christian König7b1f2482011-09-23 15:11:23 +020081 bool emit_wait);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020082int r100_cs_parse(struct radeon_cs_parser *p);
83void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
84uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
Christian König57d20a42014-09-04 20:01:53 +020085struct radeon_fence *r100_copy_blit(struct radeon_device *rdev,
86 uint64_t src_offset,
87 uint64_t dst_offset,
88 unsigned num_gpu_pages,
89 struct reservation_object *resv);
Dave Airliee024e112009-06-24 09:48:08 +100090int r100_set_surface_reg(struct radeon_device *rdev, int reg,
91 uint32_t tiling_flags, uint32_t pitch,
92 uint32_t offset, uint32_t obj_size);
Daniel Vetter9479c542010-03-11 21:19:16 +000093void r100_clear_surface_reg(struct radeon_device *rdev, int reg);
Jerome Glissec93bb852009-07-13 21:04:08 +020094void r100_bandwidth_update(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +100095void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Christian Könige32eb502011-10-23 12:56:27 +020096int r100_ring_test(struct radeon_device *rdev, struct radeon_ring *cp);
Alex Deucher429770b2009-12-04 15:26:55 -050097void r100_hpd_init(struct radeon_device *rdev);
98void r100_hpd_fini(struct radeon_device *rdev);
99bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
100void r100_hpd_set_polarity(struct radeon_device *rdev,
101 enum radeon_hpd_id hpd);
Daniel Vetter2b497502010-03-11 21:19:18 +0000102int r100_debugfs_rbbm_init(struct radeon_device *rdev);
103int r100_debugfs_cp_init(struct radeon_device *rdev);
104void r100_cp_disable(struct radeon_device *rdev);
105int r100_cp_init(struct radeon_device *rdev, unsigned ring_size);
106void r100_cp_fini(struct radeon_device *rdev);
107int r100_pci_gart_init(struct radeon_device *rdev);
108void r100_pci_gart_fini(struct radeon_device *rdev);
109int r100_pci_gart_enable(struct radeon_device *rdev);
110void r100_pci_gart_disable(struct radeon_device *rdev);
111int r100_debugfs_mc_info_init(struct radeon_device *rdev);
112int r100_gui_wait_for_idle(struct radeon_device *rdev);
Alex Deucherf7128122012-02-23 17:53:45 -0500113int r100_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
Daniel Vetter2b497502010-03-11 21:19:18 +0000114void r100_irq_disable(struct radeon_device *rdev);
115void r100_mc_stop(struct radeon_device *rdev, struct r100_mc_save *save);
116void r100_mc_resume(struct radeon_device *rdev, struct r100_mc_save *save);
117void r100_vram_init_sizes(struct radeon_device *rdev);
Daniel Vetter2b497502010-03-11 21:19:18 +0000118int r100_cp_reset(struct radeon_device *rdev);
119void r100_vga_render_disable(struct radeon_device *rdev);
Dave Airlie4c712e62010-07-15 12:13:50 +1000120void r100_restore_sanity(struct radeon_device *rdev);
Daniel Vetter2b497502010-03-11 21:19:18 +0000121int r100_cs_track_check_pkt3_indx_buffer(struct radeon_cs_parser *p,
122 struct radeon_cs_packet *pkt,
123 struct radeon_bo *robj);
124int r100_cs_parse_packet0(struct radeon_cs_parser *p,
125 struct radeon_cs_packet *pkt,
126 const unsigned *auth, unsigned n,
127 radeon_packet0_check_t check);
128int r100_cs_packet_parse(struct radeon_cs_parser *p,
129 struct radeon_cs_packet *pkt,
130 unsigned idx);
131void r100_enable_bm(struct radeon_device *rdev);
132void r100_set_common_regs(struct radeon_device *rdev);
Jerome Glisse90aca4d2010-03-09 14:45:12 +0000133void r100_bm_disable(struct radeon_device *rdev);
Alex Deucherdef9ba92010-04-22 12:39:58 -0400134extern bool r100_gui_idle(struct radeon_device *rdev);
Alex Deucher49e02b72010-04-23 17:57:27 -0400135extern void r100_pm_misc(struct radeon_device *rdev);
136extern void r100_pm_prepare(struct radeon_device *rdev);
137extern void r100_pm_finish(struct radeon_device *rdev);
Alex Deucherce8f5372010-05-07 15:10:16 -0400138extern void r100_pm_init_profile(struct radeon_device *rdev);
139extern void r100_pm_get_dynpm_state(struct radeon_device *rdev);
Christian König157fa142014-05-27 16:49:20 +0200140extern void r100_page_flip(struct radeon_device *rdev, int crtc,
141 u64 crtc_base);
142extern bool r100_page_flip_pending(struct radeon_device *rdev, int crtc);
Alex Deucher3ae19b72012-02-23 17:53:37 -0500143extern void r100_wait_for_vblank(struct radeon_device *rdev, int crtc);
Alex Deucher89e51812012-02-23 17:53:38 -0500144extern int r100_mc_wait_for_idle(struct radeon_device *rdev);
Alex Deucherbae6b5622010-04-22 13:38:05 -0400145
Alex Deucherea31bf62013-12-09 19:44:30 -0500146u32 r100_gfx_get_rptr(struct radeon_device *rdev,
147 struct radeon_ring *ring);
148u32 r100_gfx_get_wptr(struct radeon_device *rdev,
149 struct radeon_ring *ring);
150void r100_gfx_set_wptr(struct radeon_device *rdev,
151 struct radeon_ring *ring);
Michel Dänzer897eba82014-09-17 16:25:55 +0900152
Pauli Nieminen44ca7472010-02-11 17:25:47 +0000153/*
154 * r200,rv250,rs300,rv280
155 */
Christian König57d20a42014-09-04 20:01:53 +0200156struct radeon_fence *r200_copy_dma(struct radeon_device *rdev,
157 uint64_t src_offset,
158 uint64_t dst_offset,
159 unsigned num_gpu_pages,
160 struct reservation_object *resv);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100161void r200_set_safe_registers(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200162
163/*
164 * r300,r350,rv350,rv380
165 */
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200166extern int r300_init(struct radeon_device *rdev);
167extern void r300_fini(struct radeon_device *rdev);
168extern int r300_suspend(struct radeon_device *rdev);
169extern int r300_resume(struct radeon_device *rdev);
Jerome Glissea2d07b72010-03-09 14:45:11 +0000170extern int r300_asic_reset(struct radeon_device *rdev);
Alex Deucherf7128122012-02-23 17:53:45 -0500171extern void r300_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200172extern void r300_fence_ring_emit(struct radeon_device *rdev,
173 struct radeon_fence *fence);
174extern int r300_cs_parse(struct radeon_cs_parser *p);
175extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
Michel Dänzercb658902015-01-21 17:36:35 +0900176extern uint64_t rv370_pcie_gart_get_page_entry(uint64_t addr, uint32_t flags);
Christian König7f90fc92014-06-04 15:29:57 +0200177extern void rv370_pcie_gart_set_page(struct radeon_device *rdev, unsigned i,
Michel Dänzercb658902015-01-21 17:36:35 +0900178 uint64_t entry);
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200179extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
Alex Deucherc836a412009-12-23 10:07:50 -0500180extern int rv370_get_pcie_lanes(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100181extern void r300_set_reg_safe(struct radeon_device *rdev);
182extern void r300_mc_program(struct radeon_device *rdev);
183extern void r300_mc_init(struct radeon_device *rdev);
184extern void r300_clock_startup(struct radeon_device *rdev);
185extern int r300_mc_wait_for_idle(struct radeon_device *rdev);
186extern int rv370_pcie_gart_init(struct radeon_device *rdev);
187extern void rv370_pcie_gart_fini(struct radeon_device *rdev);
188extern int rv370_pcie_gart_enable(struct radeon_device *rdev);
189extern void rv370_pcie_gart_disable(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500190extern int r300_mc_wait_for_idle(struct radeon_device *rdev);
Pauli Nieminen44ca7472010-02-11 17:25:47 +0000191
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200192/*
193 * r420,r423,rv410
194 */
Jerome Glisse9f022dd2009-09-11 15:35:22 +0200195extern int r420_init(struct radeon_device *rdev);
196extern void r420_fini(struct radeon_device *rdev);
197extern int r420_suspend(struct radeon_device *rdev);
198extern int r420_resume(struct radeon_device *rdev);
Alex Deucherce8f5372010-05-07 15:10:16 -0400199extern void r420_pm_init_profile(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100200extern u32 r420_mc_rreg(struct radeon_device *rdev, u32 reg);
201extern void r420_mc_wreg(struct radeon_device *rdev, u32 reg, u32 v);
202extern int r420_debugfs_pipes_info_init(struct radeon_device *rdev);
203extern void r420_pipes_init(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200204
205/*
206 * rs400,rs480
207 */
Jerome Glisseca6ffc62009-10-01 10:20:52 +0200208extern int rs400_init(struct radeon_device *rdev);
209extern void rs400_fini(struct radeon_device *rdev);
210extern int rs400_suspend(struct radeon_device *rdev);
211extern int rs400_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200212void rs400_gart_tlb_flush(struct radeon_device *rdev);
Michel Dänzercb658902015-01-21 17:36:35 +0900213uint64_t rs400_gart_get_page_entry(uint64_t addr, uint32_t flags);
Christian König7f90fc92014-06-04 15:29:57 +0200214void rs400_gart_set_page(struct radeon_device *rdev, unsigned i,
Michel Dänzercb658902015-01-21 17:36:35 +0900215 uint64_t entry);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200216uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
217void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100218int rs400_gart_init(struct radeon_device *rdev);
219int rs400_gart_enable(struct radeon_device *rdev);
220void rs400_gart_adjust_size(struct radeon_device *rdev);
221void rs400_gart_disable(struct radeon_device *rdev);
222void rs400_gart_fini(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500223extern int rs400_mc_wait_for_idle(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100224
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200225/*
226 * rs600.
227 */
Jerome Glisse90aca4d2010-03-09 14:45:12 +0000228extern int rs600_asic_reset(struct radeon_device *rdev);
Jerome Glissec010f802009-09-30 22:09:06 +0200229extern int rs600_init(struct radeon_device *rdev);
230extern void rs600_fini(struct radeon_device *rdev);
231extern int rs600_suspend(struct radeon_device *rdev);
232extern int rs600_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200233int rs600_irq_set(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200234int rs600_irq_process(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100235void rs600_irq_disable(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200236u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200237void rs600_gart_tlb_flush(struct radeon_device *rdev);
Michel Dänzercb658902015-01-21 17:36:35 +0900238uint64_t rs600_gart_get_page_entry(uint64_t addr, uint32_t flags);
Christian König7f90fc92014-06-04 15:29:57 +0200239void rs600_gart_set_page(struct radeon_device *rdev, unsigned i,
Michel Dänzercb658902015-01-21 17:36:35 +0900240 uint64_t entry);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200241uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
242void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200243void rs600_bandwidth_update(struct radeon_device *rdev);
Alex Deucher429770b2009-12-04 15:26:55 -0500244void rs600_hpd_init(struct radeon_device *rdev);
245void rs600_hpd_fini(struct radeon_device *rdev);
246bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
247void rs600_hpd_set_polarity(struct radeon_device *rdev,
248 enum radeon_hpd_id hpd);
Alex Deucher49e02b72010-04-23 17:57:27 -0400249extern void rs600_pm_misc(struct radeon_device *rdev);
250extern void rs600_pm_prepare(struct radeon_device *rdev);
251extern void rs600_pm_finish(struct radeon_device *rdev);
Christian König157fa142014-05-27 16:49:20 +0200252extern void rs600_page_flip(struct radeon_device *rdev, int crtc,
253 u64 crtc_base);
254extern bool rs600_page_flip_pending(struct radeon_device *rdev, int crtc);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100255void rs600_set_safe_registers(struct radeon_device *rdev);
Alex Deucher3ae19b72012-02-23 17:53:37 -0500256extern void avivo_wait_for_vblank(struct radeon_device *rdev, int crtc);
Alex Deucher89e51812012-02-23 17:53:38 -0500257extern int rs600_mc_wait_for_idle(struct radeon_device *rdev);
Alex Deucher429770b2009-12-04 15:26:55 -0500258
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200259/*
260 * rs690,rs740
261 */
Jerome Glisse3bc68532009-10-01 09:39:24 +0200262int rs690_init(struct radeon_device *rdev);
263void rs690_fini(struct radeon_device *rdev);
264int rs690_resume(struct radeon_device *rdev);
265int rs690_suspend(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200266uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
267void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200268void rs690_bandwidth_update(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100269void rs690_line_buffer_adjust(struct radeon_device *rdev,
270 struct drm_display_mode *mode1,
271 struct drm_display_mode *mode2);
Alex Deucher89e51812012-02-23 17:53:38 -0500272extern int rs690_mc_wait_for_idle(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200273
274/*
275 * rv515
276 */
Daniel Vetter187f3da2010-11-28 19:06:09 +0100277struct rv515_mc_save {
Daniel Vetter187f3da2010-11-28 19:06:09 +0100278 u32 vga_render_control;
279 u32 vga_hdp_control;
Alex Deucher6253e4c2012-12-12 14:30:32 -0500280 bool crtc_enabled[2];
Daniel Vetter187f3da2010-11-28 19:06:09 +0100281};
Jerome Glisse81ee8fb2012-07-27 16:32:24 -0400282
Jerome Glisse068a1172009-06-17 13:28:30 +0200283int rv515_init(struct radeon_device *rdev);
Jerome Glissed39c3b82009-09-28 18:34:43 +0200284void rv515_fini(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200285uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
286void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Alex Deucherf7128122012-02-23 17:53:45 -0500287void rv515_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
Jerome Glissec93bb852009-07-13 21:04:08 +0200288void rv515_bandwidth_update(struct radeon_device *rdev);
Jerome Glissed39c3b82009-09-28 18:34:43 +0200289int rv515_resume(struct radeon_device *rdev);
290int rv515_suspend(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100291void rv515_bandwidth_avivo_update(struct radeon_device *rdev);
292void rv515_vga_render_disable(struct radeon_device *rdev);
293void rv515_set_safe_registers(struct radeon_device *rdev);
294void rv515_mc_stop(struct radeon_device *rdev, struct rv515_mc_save *save);
295void rv515_mc_resume(struct radeon_device *rdev, struct rv515_mc_save *save);
296void rv515_clock_startup(struct radeon_device *rdev);
297void rv515_debugfs(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500298int rv515_mc_wait_for_idle(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200299
300/*
301 * r520,rv530,rv560,rv570,r580
302 */
Jerome Glissed39c3b82009-09-28 18:34:43 +0200303int r520_init(struct radeon_device *rdev);
Jerome Glissef0ed1f62009-09-28 20:39:19 +0200304int r520_resume(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500305int r520_mc_wait_for_idle(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200306
307/*
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000308 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200309 */
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000310int r600_init(struct radeon_device *rdev);
311void r600_fini(struct radeon_device *rdev);
312int r600_suspend(struct radeon_device *rdev);
313int r600_resume(struct radeon_device *rdev);
Dave Airlie28d52042009-09-21 14:33:58 +1000314void r600_vga_set_state(struct radeon_device *rdev, bool state);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000315int r600_wb_init(struct radeon_device *rdev);
316void r600_wb_fini(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000317void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200318uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
319void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000320int r600_cs_parse(struct radeon_cs_parser *p);
Alex Deuchercf4ccd02011-11-18 10:19:47 -0500321int r600_dma_cs_parse(struct radeon_cs_parser *p);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000322void r600_fence_ring_emit(struct radeon_device *rdev,
323 struct radeon_fence *fence);
Christian König1654b812013-11-12 12:58:05 +0100324bool r600_semaphore_ring_emit(struct radeon_device *rdev,
Christian Könige32eb502011-10-23 12:56:27 +0200325 struct radeon_ring *cp,
Christian König15d33322011-09-15 19:02:22 +0200326 struct radeon_semaphore *semaphore,
Christian König7b1f2482011-09-23 15:11:23 +0200327 bool emit_wait);
Alex Deucher4d756582012-09-27 15:08:35 -0400328void r600_dma_fence_ring_emit(struct radeon_device *rdev,
329 struct radeon_fence *fence);
Christian König1654b812013-11-12 12:58:05 +0100330bool r600_dma_semaphore_ring_emit(struct radeon_device *rdev,
Alex Deucher4d756582012-09-27 15:08:35 -0400331 struct radeon_ring *ring,
332 struct radeon_semaphore *semaphore,
333 bool emit_wait);
334void r600_dma_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
335bool r600_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
Alex Deucher123bc182013-01-24 11:37:19 -0500336bool r600_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
Jerome Glissea2d07b72010-03-09 14:45:11 +0000337int r600_asic_reset(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000338int r600_set_surface_reg(struct radeon_device *rdev, int reg,
339 uint32_t tiling_flags, uint32_t pitch,
340 uint32_t offset, uint32_t obj_size);
Daniel Vetter9479c542010-03-11 21:19:16 +0000341void r600_clear_surface_reg(struct radeon_device *rdev, int reg);
Alex Deucherf7128122012-02-23 17:53:45 -0500342int r600_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
Alex Deucher4d756582012-09-27 15:08:35 -0400343int r600_dma_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000344void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Christian Könige32eb502011-10-23 12:56:27 +0200345int r600_ring_test(struct radeon_device *rdev, struct radeon_ring *cp);
Alex Deucher4d756582012-09-27 15:08:35 -0400346int r600_dma_ring_test(struct radeon_device *rdev, struct radeon_ring *cp);
Christian König57d20a42014-09-04 20:01:53 +0200347struct radeon_fence *r600_copy_cpdma(struct radeon_device *rdev,
348 uint64_t src_offset, uint64_t dst_offset,
349 unsigned num_gpu_pages,
350 struct reservation_object *resv);
351struct radeon_fence *r600_copy_dma(struct radeon_device *rdev,
352 uint64_t src_offset, uint64_t dst_offset,
353 unsigned num_gpu_pages,
354 struct reservation_object *resv);
Alex Deucher429770b2009-12-04 15:26:55 -0500355void r600_hpd_init(struct radeon_device *rdev);
356void r600_hpd_fini(struct radeon_device *rdev);
357bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
358void r600_hpd_set_polarity(struct radeon_device *rdev,
359 enum radeon_hpd_id hpd);
Michel Dänzer124764f2014-07-31 18:43:48 +0900360extern void r600_mmio_hdp_flush(struct radeon_device *rdev);
Alex Deucherdef9ba92010-04-22 12:39:58 -0400361extern bool r600_gui_idle(struct radeon_device *rdev);
Alex Deucher49e02b72010-04-23 17:57:27 -0400362extern void r600_pm_misc(struct radeon_device *rdev);
Alex Deucherce8f5372010-05-07 15:10:16 -0400363extern void r600_pm_init_profile(struct radeon_device *rdev);
364extern void rs780_pm_init_profile(struct radeon_device *rdev);
Samuel Li65337e62013-04-05 17:50:53 -0400365extern uint32_t rs780_mc_rreg(struct radeon_device *rdev, uint32_t reg);
366extern void rs780_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Alex Deucherce8f5372010-05-07 15:10:16 -0400367extern void r600_pm_get_dynpm_state(struct radeon_device *rdev);
Alex Deucher3313e3d2011-01-06 18:49:34 -0500368extern void r600_set_pcie_lanes(struct radeon_device *rdev, int lanes);
369extern int r600_get_pcie_lanes(struct radeon_device *rdev);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100370bool r600_card_posted(struct radeon_device *rdev);
371void r600_cp_stop(struct radeon_device *rdev);
372int r600_cp_start(struct radeon_device *rdev);
Christian Könige32eb502011-10-23 12:56:27 +0200373void r600_ring_init(struct radeon_device *rdev, struct radeon_ring *cp, unsigned ring_size);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100374int r600_cp_resume(struct radeon_device *rdev);
375void r600_cp_fini(struct radeon_device *rdev);
376int r600_count_pipe_bits(uint32_t val);
377int r600_mc_wait_for_idle(struct radeon_device *rdev);
378int r600_pcie_gart_init(struct radeon_device *rdev);
379void r600_scratch_init(struct radeon_device *rdev);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100380int r600_init_microcode(struct radeon_device *rdev);
Alex Deucherea31bf62013-12-09 19:44:30 -0500381u32 r600_gfx_get_rptr(struct radeon_device *rdev,
382 struct radeon_ring *ring);
383u32 r600_gfx_get_wptr(struct radeon_device *rdev,
384 struct radeon_ring *ring);
385void r600_gfx_set_wptr(struct radeon_device *rdev,
386 struct radeon_ring *ring);
Alex Deucherc6d2ac22014-10-01 09:36:57 -0400387int r600_get_allowed_info_register(struct radeon_device *rdev,
388 u32 reg, u32 *val);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100389/* r600 irq */
390int r600_irq_process(struct radeon_device *rdev);
391int r600_irq_init(struct radeon_device *rdev);
392void r600_irq_fini(struct radeon_device *rdev);
393void r600_ih_ring_init(struct radeon_device *rdev, unsigned ring_size);
394int r600_irq_set(struct radeon_device *rdev);
395void r600_irq_suspend(struct radeon_device *rdev);
396void r600_disable_interrupts(struct radeon_device *rdev);
397void r600_rlc_stop(struct radeon_device *rdev);
398/* r600 audio */
Daniel Vetter3574dda2011-02-18 17:59:19 +0100399void r600_audio_fini(struct radeon_device *rdev);
Rafał Miłecki8f33a152014-05-16 11:36:24 +0200400void r600_audio_set_dto(struct drm_encoder *encoder, u32 clock);
401void r600_hdmi_update_avi_infoframe(struct drm_encoder *encoder, void *buffer,
402 size_t size);
403void r600_hdmi_update_ACR(struct drm_encoder *encoder, uint32_t clock);
404void r600_hdmi_audio_workaround(struct drm_encoder *encoder);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100405int r600_hdmi_buffer_status_changed(struct drm_encoder *encoder);
406void r600_hdmi_update_audio_settings(struct drm_encoder *encoder);
Alex Deucher89e51812012-02-23 17:53:38 -0500407int r600_mc_wait_for_idle(struct radeon_device *rdev);
Alex Deucher454d2e22013-02-14 10:04:02 -0500408u32 r600_get_xclk(struct radeon_device *rdev);
Alex Deucherd0418892013-01-24 10:35:23 -0500409uint64_t r600_get_gpu_clock_counter(struct radeon_device *rdev);
Alex Deucher6bd1c382013-06-21 14:38:03 -0400410int rv6xx_get_temp(struct radeon_device *rdev);
Alex Deucher1b9ba702013-09-05 09:52:37 -0400411int r600_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
Alex Deucher98243912013-01-16 13:13:42 -0500412int r600_dpm_pre_set_power_state(struct radeon_device *rdev);
413void r600_dpm_post_set_power_state(struct radeon_device *rdev);
Alex Deuchera4643ba2013-12-19 12:18:13 -0500414int r600_dpm_late_enable(struct radeon_device *rdev);
Christian König2e1e6da2013-08-13 11:56:52 +0200415/* r600 dma */
416uint32_t r600_dma_get_rptr(struct radeon_device *rdev,
417 struct radeon_ring *ring);
418uint32_t r600_dma_get_wptr(struct radeon_device *rdev,
419 struct radeon_ring *ring);
420void r600_dma_set_wptr(struct radeon_device *rdev,
421 struct radeon_ring *ring);
Alex Deucher4a6369e2013-04-12 14:04:10 -0400422/* rv6xx dpm */
423int rv6xx_dpm_init(struct radeon_device *rdev);
424int rv6xx_dpm_enable(struct radeon_device *rdev);
425void rv6xx_dpm_disable(struct radeon_device *rdev);
426int rv6xx_dpm_set_power_state(struct radeon_device *rdev);
427void rv6xx_setup_asic(struct radeon_device *rdev);
428void rv6xx_dpm_display_configuration_changed(struct radeon_device *rdev);
429void rv6xx_dpm_fini(struct radeon_device *rdev);
430u32 rv6xx_dpm_get_sclk(struct radeon_device *rdev, bool low);
431u32 rv6xx_dpm_get_mclk(struct radeon_device *rdev, bool low);
432void rv6xx_dpm_print_power_state(struct radeon_device *rdev,
433 struct radeon_ps *ps);
Alex Deucher242916a2013-06-28 14:20:53 -0400434void rv6xx_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
435 struct seq_file *m);
Alex Deucherf4f85a82013-07-25 20:07:25 -0400436int rv6xx_dpm_force_performance_level(struct radeon_device *rdev,
437 enum radeon_dpm_forced_level level);
Alex Deucherd0a04d32014-09-30 10:27:42 -0400438u32 rv6xx_dpm_get_current_sclk(struct radeon_device *rdev);
439u32 rv6xx_dpm_get_current_mclk(struct radeon_device *rdev);
Alex Deucher9d670062013-04-12 13:59:22 -0400440/* rs780 dpm */
441int rs780_dpm_init(struct radeon_device *rdev);
442int rs780_dpm_enable(struct radeon_device *rdev);
443void rs780_dpm_disable(struct radeon_device *rdev);
444int rs780_dpm_set_power_state(struct radeon_device *rdev);
445void rs780_dpm_setup_asic(struct radeon_device *rdev);
446void rs780_dpm_display_configuration_changed(struct radeon_device *rdev);
447void rs780_dpm_fini(struct radeon_device *rdev);
448u32 rs780_dpm_get_sclk(struct radeon_device *rdev, bool low);
449u32 rs780_dpm_get_mclk(struct radeon_device *rdev, bool low);
450void rs780_dpm_print_power_state(struct radeon_device *rdev,
451 struct radeon_ps *ps);
Alex Deucher444bddc2013-07-02 13:05:23 -0400452void rs780_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
453 struct seq_file *m);
Anthoine Bourgeois63580c32013-09-03 13:52:19 -0400454int rs780_dpm_force_performance_level(struct radeon_device *rdev,
455 enum radeon_dpm_forced_level level);
Alex Deucher3c945662014-09-30 10:19:57 -0400456u32 rs780_dpm_get_current_sclk(struct radeon_device *rdev);
457u32 rs780_dpm_get_current_mclk(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000458
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000459/*
460 * rv770,rv730,rv710,rv740
461 */
462int rv770_init(struct radeon_device *rdev);
463void rv770_fini(struct radeon_device *rdev);
464int rv770_suspend(struct radeon_device *rdev);
465int rv770_resume(struct radeon_device *rdev);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100466void rv770_pm_misc(struct radeon_device *rdev);
Christian König157fa142014-05-27 16:49:20 +0200467void rv770_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base);
468bool rv770_page_flip_pending(struct radeon_device *rdev, int crtc);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100469void r700_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc);
470void r700_cp_stop(struct radeon_device *rdev);
471void r700_cp_fini(struct radeon_device *rdev);
Christian König57d20a42014-09-04 20:01:53 +0200472struct radeon_fence *rv770_copy_dma(struct radeon_device *rdev,
473 uint64_t src_offset, uint64_t dst_offset,
474 unsigned num_gpu_pages,
475 struct reservation_object *resv);
Alex Deucher454d2e22013-02-14 10:04:02 -0500476u32 rv770_get_xclk(struct radeon_device *rdev);
Christian Königef0e6e62013-04-08 12:41:35 +0200477int rv770_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
Alex Deucher6bd1c382013-06-21 14:38:03 -0400478int rv770_get_temp(struct radeon_device *rdev);
Alex Deucher66229b22013-06-26 00:11:19 -0400479/* rv7xx pm */
480int rv770_dpm_init(struct radeon_device *rdev);
481int rv770_dpm_enable(struct radeon_device *rdev);
Alex Deuchera3f11242013-12-19 13:48:36 -0500482int rv770_dpm_late_enable(struct radeon_device *rdev);
Alex Deucher66229b22013-06-26 00:11:19 -0400483void rv770_dpm_disable(struct radeon_device *rdev);
484int rv770_dpm_set_power_state(struct radeon_device *rdev);
485void rv770_dpm_setup_asic(struct radeon_device *rdev);
486void rv770_dpm_display_configuration_changed(struct radeon_device *rdev);
487void rv770_dpm_fini(struct radeon_device *rdev);
488u32 rv770_dpm_get_sclk(struct radeon_device *rdev, bool low);
489u32 rv770_dpm_get_mclk(struct radeon_device *rdev, bool low);
490void rv770_dpm_print_power_state(struct radeon_device *rdev,
491 struct radeon_ps *ps);
Alex Deucherbd210d12013-06-28 10:06:26 -0400492void rv770_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
493 struct seq_file *m);
Alex Deucher8b5e6b72013-07-02 18:40:35 -0400494int rv770_dpm_force_performance_level(struct radeon_device *rdev,
495 enum radeon_dpm_forced_level level);
Alex Deucherb06195d2013-07-08 11:49:48 -0400496bool rv770_dpm_vblank_too_short(struct radeon_device *rdev);
Alex Deucher296deb72014-09-30 10:34:39 -0400497u32 rv770_dpm_get_current_sclk(struct radeon_device *rdev);
498u32 rv770_dpm_get_current_mclk(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000499
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500500/*
501 * evergreen
502 */
Daniel Vetter3574dda2011-02-18 17:59:19 +0100503struct evergreen_mc_save {
Daniel Vetter3574dda2011-02-18 17:59:19 +0100504 u32 vga_render_control;
505 u32 vga_hdp_control;
Alex Deucher62444b72012-08-15 17:18:42 -0400506 bool crtc_enabled[RADEON_MAX_CRTCS];
Daniel Vetter3574dda2011-02-18 17:59:19 +0100507};
Jerome Glisse81ee8fb2012-07-27 16:32:24 -0400508
Alex Deucher0fcdb612010-03-24 13:20:41 -0400509void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500510int evergreen_init(struct radeon_device *rdev);
511void evergreen_fini(struct radeon_device *rdev);
512int evergreen_suspend(struct radeon_device *rdev);
513int evergreen_resume(struct radeon_device *rdev);
Alex Deucher123bc182013-01-24 11:37:19 -0500514bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
515bool evergreen_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
Jerome Glissea2d07b72010-03-09 14:45:11 +0000516int evergreen_asic_reset(struct radeon_device *rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500517void evergreen_bandwidth_update(struct radeon_device *rdev);
Alex Deucher12920592011-02-02 12:37:40 -0500518void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500519void evergreen_hpd_init(struct radeon_device *rdev);
520void evergreen_hpd_fini(struct radeon_device *rdev);
521bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
522void evergreen_hpd_set_polarity(struct radeon_device *rdev,
523 enum radeon_hpd_id hpd);
Alex Deucher45f9a392010-03-24 13:55:51 -0400524u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc);
525int evergreen_irq_set(struct radeon_device *rdev);
526int evergreen_irq_process(struct radeon_device *rdev);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -0400527extern int evergreen_cs_parse(struct radeon_cs_parser *p);
Alex Deucherd2ead3e2012-12-13 09:55:45 -0500528extern int evergreen_dma_cs_parse(struct radeon_cs_parser *p);
Alex Deucher49e02b72010-04-23 17:57:27 -0400529extern void evergreen_pm_misc(struct radeon_device *rdev);
530extern void evergreen_pm_prepare(struct radeon_device *rdev);
531extern void evergreen_pm_finish(struct radeon_device *rdev);
Alex Deuchera4c9e2e2011-11-04 10:09:41 -0400532extern void sumo_pm_init_profile(struct radeon_device *rdev);
Alex Deucher27810fb2012-10-01 19:25:11 -0400533extern void btc_pm_init_profile(struct radeon_device *rdev);
Alex Deucher23d33ba2013-04-08 12:41:32 +0200534int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
Alex Deuchera8b49252013-04-08 12:41:33 +0200535int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
Christian König157fa142014-05-27 16:49:20 +0200536extern void evergreen_page_flip(struct radeon_device *rdev, int crtc,
537 u64 crtc_base);
538extern bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc);
Alex Deucher3ae19b72012-02-23 17:53:37 -0500539extern void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100540void evergreen_disable_interrupt_state(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500541int evergreen_mc_wait_for_idle(struct radeon_device *rdev);
Alex Deucher233d1ad2012-12-04 15:25:59 -0500542void evergreen_dma_fence_ring_emit(struct radeon_device *rdev,
543 struct radeon_fence *fence);
544void evergreen_dma_ring_ib_execute(struct radeon_device *rdev,
545 struct radeon_ib *ib);
Christian König57d20a42014-09-04 20:01:53 +0200546struct radeon_fence *evergreen_copy_dma(struct radeon_device *rdev,
547 uint64_t src_offset, uint64_t dst_offset,
548 unsigned num_gpu_pages,
549 struct reservation_object *resv);
Alex Deucher6bd1c382013-06-21 14:38:03 -0400550int evergreen_get_temp(struct radeon_device *rdev);
Alex Deucherff609972014-10-01 09:43:38 -0400551int evergreen_get_allowed_info_register(struct radeon_device *rdev,
552 u32 reg, u32 *val);
Alex Deucher6bd1c382013-06-21 14:38:03 -0400553int sumo_get_temp(struct radeon_device *rdev);
Alex Deucher29a15222012-12-14 11:57:36 -0500554int tn_get_temp(struct radeon_device *rdev);
Alex Deucherdc50ba72013-06-26 00:33:35 -0400555int cypress_dpm_init(struct radeon_device *rdev);
556void cypress_dpm_setup_asic(struct radeon_device *rdev);
557int cypress_dpm_enable(struct radeon_device *rdev);
558void cypress_dpm_disable(struct radeon_device *rdev);
559int cypress_dpm_set_power_state(struct radeon_device *rdev);
560void cypress_dpm_display_configuration_changed(struct radeon_device *rdev);
561void cypress_dpm_fini(struct radeon_device *rdev);
Alex Deucherd0b54bd2013-07-08 11:56:09 -0400562bool cypress_dpm_vblank_too_short(struct radeon_device *rdev);
Alex Deucher6596afd2013-06-26 00:15:24 -0400563int btc_dpm_init(struct radeon_device *rdev);
564void btc_dpm_setup_asic(struct radeon_device *rdev);
565int btc_dpm_enable(struct radeon_device *rdev);
566void btc_dpm_disable(struct radeon_device *rdev);
Alex Deuchere8a95392013-01-16 14:17:23 -0500567int btc_dpm_pre_set_power_state(struct radeon_device *rdev);
Alex Deucher6596afd2013-06-26 00:15:24 -0400568int btc_dpm_set_power_state(struct radeon_device *rdev);
Alex Deuchere8a95392013-01-16 14:17:23 -0500569void btc_dpm_post_set_power_state(struct radeon_device *rdev);
Alex Deucher6596afd2013-06-26 00:15:24 -0400570void btc_dpm_fini(struct radeon_device *rdev);
Alex Deuchere8a95392013-01-16 14:17:23 -0500571u32 btc_dpm_get_sclk(struct radeon_device *rdev, bool low);
572u32 btc_dpm_get_mclk(struct radeon_device *rdev, bool low);
Alex Deuchera84301c2013-07-08 12:03:55 -0400573bool btc_dpm_vblank_too_short(struct radeon_device *rdev);
Alex Deucher9f3f63f2014-01-30 11:19:22 -0500574void btc_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
575 struct seq_file *m);
Alex Deucher99550ee2014-09-30 10:39:30 -0400576u32 btc_dpm_get_current_sclk(struct radeon_device *rdev);
577u32 btc_dpm_get_current_mclk(struct radeon_device *rdev);
Alex Deucher80ea2c12013-04-12 14:56:21 -0400578int sumo_dpm_init(struct radeon_device *rdev);
579int sumo_dpm_enable(struct radeon_device *rdev);
Alex Deucher14ec9fa2013-12-19 11:56:52 -0500580int sumo_dpm_late_enable(struct radeon_device *rdev);
Alex Deucher80ea2c12013-04-12 14:56:21 -0400581void sumo_dpm_disable(struct radeon_device *rdev);
Alex Deucher422a56b2013-06-25 15:40:21 -0400582int sumo_dpm_pre_set_power_state(struct radeon_device *rdev);
Alex Deucher80ea2c12013-04-12 14:56:21 -0400583int sumo_dpm_set_power_state(struct radeon_device *rdev);
Alex Deucher422a56b2013-06-25 15:40:21 -0400584void sumo_dpm_post_set_power_state(struct radeon_device *rdev);
Alex Deucher80ea2c12013-04-12 14:56:21 -0400585void sumo_dpm_setup_asic(struct radeon_device *rdev);
586void sumo_dpm_display_configuration_changed(struct radeon_device *rdev);
587void sumo_dpm_fini(struct radeon_device *rdev);
588u32 sumo_dpm_get_sclk(struct radeon_device *rdev, bool low);
589u32 sumo_dpm_get_mclk(struct radeon_device *rdev, bool low);
590void sumo_dpm_print_power_state(struct radeon_device *rdev,
591 struct radeon_ps *ps);
Alex Deucherfb701602013-06-28 10:47:56 -0400592void sumo_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
593 struct seq_file *m);
Alex Deucher5d5e5592013-07-02 18:50:09 -0400594int sumo_dpm_force_performance_level(struct radeon_device *rdev,
595 enum radeon_dpm_forced_level level);
Alex Deucher2f8e1eb2014-09-30 10:58:22 -0400596u32 sumo_dpm_get_current_sclk(struct radeon_device *rdev);
597u32 sumo_dpm_get_current_mclk(struct radeon_device *rdev);
Daniel Vetter4546b2c2011-02-18 17:59:21 +0100598
Alex Deuchere3487622011-03-02 20:07:36 -0500599/*
600 * cayman
601 */
Alex Deucherb40e7e12011-11-17 14:57:50 -0500602void cayman_fence_ring_emit(struct radeon_device *rdev,
603 struct radeon_fence *fence);
Alex Deuchere3487622011-03-02 20:07:36 -0500604void cayman_pcie_gart_tlb_flush(struct radeon_device *rdev);
605int cayman_init(struct radeon_device *rdev);
606void cayman_fini(struct radeon_device *rdev);
607int cayman_suspend(struct radeon_device *rdev);
608int cayman_resume(struct radeon_device *rdev);
Alex Deuchere3487622011-03-02 20:07:36 -0500609int cayman_asic_reset(struct radeon_device *rdev);
Jerome Glisse721604a2012-01-05 22:11:05 -0500610void cayman_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
611int cayman_vm_init(struct radeon_device *rdev);
612void cayman_vm_fini(struct radeon_device *rdev);
Christian Königfaffaf62014-11-19 14:01:19 +0100613void cayman_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
614 unsigned vm_id, uint64_t pd_addr);
Christian König089a7862012-08-11 11:54:05 +0200615uint32_t cayman_vm_page_flags(struct radeon_device *rdev, uint32_t flags);
Jerome Glisse721604a2012-01-05 22:11:05 -0500616int evergreen_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
Alex Deuchercd459e52012-12-13 12:17:38 -0500617int evergreen_dma_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
Alex Deucherf60cbd12012-12-04 15:27:33 -0500618void cayman_dma_ring_ib_execute(struct radeon_device *rdev,
619 struct radeon_ib *ib);
Alex Deucher123bc182013-01-24 11:37:19 -0500620bool cayman_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
Alex Deucherf60cbd12012-12-04 15:27:33 -0500621bool cayman_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
Christian König03f62ab2014-07-30 21:05:17 +0200622
623void cayman_dma_vm_copy_pages(struct radeon_device *rdev,
624 struct radeon_ib *ib,
625 uint64_t pe, uint64_t src,
626 unsigned count);
627void cayman_dma_vm_write_pages(struct radeon_device *rdev,
628 struct radeon_ib *ib,
629 uint64_t pe,
630 uint64_t addr, unsigned count,
631 uint32_t incr, uint32_t flags);
632void cayman_dma_vm_set_pages(struct radeon_device *rdev,
633 struct radeon_ib *ib,
634 uint64_t pe,
635 uint64_t addr, unsigned count,
636 uint32_t incr, uint32_t flags);
637void cayman_dma_vm_pad_ib(struct radeon_ib *ib);
Christian König24c16432013-10-30 11:51:09 -0400638
Christian Königfaffaf62014-11-19 14:01:19 +0100639void cayman_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
640 unsigned vm_id, uint64_t pd_addr);
Alex Deucher45f9a392010-03-24 13:55:51 -0400641
Alex Deucherea31bf62013-12-09 19:44:30 -0500642u32 cayman_gfx_get_rptr(struct radeon_device *rdev,
643 struct radeon_ring *ring);
644u32 cayman_gfx_get_wptr(struct radeon_device *rdev,
645 struct radeon_ring *ring);
646void cayman_gfx_set_wptr(struct radeon_device *rdev,
647 struct radeon_ring *ring);
648uint32_t cayman_dma_get_rptr(struct radeon_device *rdev,
649 struct radeon_ring *ring);
650uint32_t cayman_dma_get_wptr(struct radeon_device *rdev,
651 struct radeon_ring *ring);
652void cayman_dma_set_wptr(struct radeon_device *rdev,
653 struct radeon_ring *ring);
Alex Deuchere66582f2014-10-01 09:51:29 -0400654int cayman_get_allowed_info_register(struct radeon_device *rdev,
655 u32 reg, u32 *val);
Alex Deucherea31bf62013-12-09 19:44:30 -0500656
Alex Deucher69e0b572013-04-12 16:42:42 -0400657int ni_dpm_init(struct radeon_device *rdev);
658void ni_dpm_setup_asic(struct radeon_device *rdev);
659int ni_dpm_enable(struct radeon_device *rdev);
660void ni_dpm_disable(struct radeon_device *rdev);
Alex Deucherfee3d742013-01-16 14:35:39 -0500661int ni_dpm_pre_set_power_state(struct radeon_device *rdev);
Alex Deucher69e0b572013-04-12 16:42:42 -0400662int ni_dpm_set_power_state(struct radeon_device *rdev);
Alex Deucherfee3d742013-01-16 14:35:39 -0500663void ni_dpm_post_set_power_state(struct radeon_device *rdev);
Alex Deucher69e0b572013-04-12 16:42:42 -0400664void ni_dpm_fini(struct radeon_device *rdev);
665u32 ni_dpm_get_sclk(struct radeon_device *rdev, bool low);
666u32 ni_dpm_get_mclk(struct radeon_device *rdev, bool low);
667void ni_dpm_print_power_state(struct radeon_device *rdev,
668 struct radeon_ps *ps);
Alex Deucherbdf0c4f2013-06-28 17:49:02 -0400669void ni_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
670 struct seq_file *m);
Alex Deucher170a47f2013-07-02 18:43:53 -0400671int ni_dpm_force_performance_level(struct radeon_device *rdev,
672 enum radeon_dpm_forced_level level);
Alex Deucher76ad73e2013-07-08 12:09:41 -0400673bool ni_dpm_vblank_too_short(struct radeon_device *rdev);
Alex Deucher1d633e32014-09-30 10:46:02 -0400674u32 ni_dpm_get_current_sclk(struct radeon_device *rdev);
675u32 ni_dpm_get_current_mclk(struct radeon_device *rdev);
Alex Deucherd70229f2013-04-12 16:40:41 -0400676int trinity_dpm_init(struct radeon_device *rdev);
677int trinity_dpm_enable(struct radeon_device *rdev);
Alex Deucherbda44c12013-12-19 12:03:35 -0500678int trinity_dpm_late_enable(struct radeon_device *rdev);
Alex Deucherd70229f2013-04-12 16:40:41 -0400679void trinity_dpm_disable(struct radeon_device *rdev);
Alex Deuchera284c482013-01-16 13:53:40 -0500680int trinity_dpm_pre_set_power_state(struct radeon_device *rdev);
Alex Deucherd70229f2013-04-12 16:40:41 -0400681int trinity_dpm_set_power_state(struct radeon_device *rdev);
Alex Deuchera284c482013-01-16 13:53:40 -0500682void trinity_dpm_post_set_power_state(struct radeon_device *rdev);
Alex Deucherd70229f2013-04-12 16:40:41 -0400683void trinity_dpm_setup_asic(struct radeon_device *rdev);
684void trinity_dpm_display_configuration_changed(struct radeon_device *rdev);
685void trinity_dpm_fini(struct radeon_device *rdev);
686u32 trinity_dpm_get_sclk(struct radeon_device *rdev, bool low);
687u32 trinity_dpm_get_mclk(struct radeon_device *rdev, bool low);
688void trinity_dpm_print_power_state(struct radeon_device *rdev,
689 struct radeon_ps *ps);
Alex Deucher490ab932013-06-28 12:01:38 -0400690void trinity_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
691 struct seq_file *m);
Alex Deucher9b5de592013-07-02 18:52:10 -0400692int trinity_dpm_force_performance_level(struct radeon_device *rdev,
693 enum radeon_dpm_forced_level level);
Alex Deucher11877062013-09-09 19:19:52 -0400694void trinity_dpm_enable_bapm(struct radeon_device *rdev, bool enable);
Alex Deucher7ce9cda2014-09-30 11:01:59 -0400695u32 trinity_dpm_get_current_sclk(struct radeon_device *rdev);
696u32 trinity_dpm_get_current_mclk(struct radeon_device *rdev);
Alex Deucherd70229f2013-04-12 16:40:41 -0400697
Alex Deucher43b3cd92012-03-20 17:18:00 -0400698/* DCE6 - SI */
699void dce6_bandwidth_update(struct radeon_device *rdev);
Alex Deucherb5306022013-07-31 16:51:33 -0400700void dce6_audio_fini(struct radeon_device *rdev);
Alex Deucher43b3cd92012-03-20 17:18:00 -0400701
Alex Deucher02779c02012-03-20 17:18:25 -0400702/*
703 * si
704 */
705void si_fence_ring_emit(struct radeon_device *rdev,
706 struct radeon_fence *fence);
707void si_pcie_gart_tlb_flush(struct radeon_device *rdev);
708int si_init(struct radeon_device *rdev);
709void si_fini(struct radeon_device *rdev);
710int si_suspend(struct radeon_device *rdev);
711int si_resume(struct radeon_device *rdev);
Alex Deucher123bc182013-01-24 11:37:19 -0500712bool si_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
713bool si_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
Alex Deucher02779c02012-03-20 17:18:25 -0400714int si_asic_reset(struct radeon_device *rdev);
715void si_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
716int si_irq_set(struct radeon_device *rdev);
717int si_irq_process(struct radeon_device *rdev);
718int si_vm_init(struct radeon_device *rdev);
719void si_vm_fini(struct radeon_device *rdev);
Christian Königfaffaf62014-11-19 14:01:19 +0100720void si_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
721 unsigned vm_id, uint64_t pd_addr);
Alex Deucher02779c02012-03-20 17:18:25 -0400722int si_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
Christian König57d20a42014-09-04 20:01:53 +0200723struct radeon_fence *si_copy_dma(struct radeon_device *rdev,
724 uint64_t src_offset, uint64_t dst_offset,
725 unsigned num_gpu_pages,
726 struct reservation_object *resv);
Christian König03f62ab2014-07-30 21:05:17 +0200727
728void si_dma_vm_copy_pages(struct radeon_device *rdev,
729 struct radeon_ib *ib,
730 uint64_t pe, uint64_t src,
731 unsigned count);
732void si_dma_vm_write_pages(struct radeon_device *rdev,
733 struct radeon_ib *ib,
734 uint64_t pe,
735 uint64_t addr, unsigned count,
736 uint32_t incr, uint32_t flags);
737void si_dma_vm_set_pages(struct radeon_device *rdev,
738 struct radeon_ib *ib,
739 uint64_t pe,
740 uint64_t addr, unsigned count,
741 uint32_t incr, uint32_t flags);
742
Christian Königfaffaf62014-11-19 14:01:19 +0100743void si_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
744 unsigned vm_id, uint64_t pd_addr);
Alex Deucher454d2e22013-02-14 10:04:02 -0500745u32 si_get_xclk(struct radeon_device *rdev);
Alex Deucherd0418892013-01-24 10:35:23 -0500746uint64_t si_get_gpu_clock_counter(struct radeon_device *rdev);
Christian König2539eb02013-04-08 12:41:34 +0200747int si_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
Alex Deucher6bd1c382013-06-21 14:38:03 -0400748int si_get_temp(struct radeon_device *rdev);
Alex Deucher4af692f2014-10-01 10:03:31 -0400749int si_get_allowed_info_register(struct radeon_device *rdev,
750 u32 reg, u32 *val);
Alex Deuchera9e61412013-06-25 17:56:16 -0400751int si_dpm_init(struct radeon_device *rdev);
752void si_dpm_setup_asic(struct radeon_device *rdev);
753int si_dpm_enable(struct radeon_device *rdev);
Alex Deucher963c1152013-12-19 13:54:35 -0500754int si_dpm_late_enable(struct radeon_device *rdev);
Alex Deuchera9e61412013-06-25 17:56:16 -0400755void si_dpm_disable(struct radeon_device *rdev);
756int si_dpm_pre_set_power_state(struct radeon_device *rdev);
757int si_dpm_set_power_state(struct radeon_device *rdev);
758void si_dpm_post_set_power_state(struct radeon_device *rdev);
759void si_dpm_fini(struct radeon_device *rdev);
760void si_dpm_display_configuration_changed(struct radeon_device *rdev);
Alex Deucher79821282013-06-28 18:02:19 -0400761void si_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
762 struct seq_file *m);
Alex Deuchera160a6a2013-07-02 18:46:28 -0400763int si_dpm_force_performance_level(struct radeon_device *rdev,
764 enum radeon_dpm_forced_level level);
Alex Deucher5e8150a2015-01-07 15:29:06 -0500765int si_fan_ctrl_get_fan_speed_percent(struct radeon_device *rdev,
766 u32 *speed);
767int si_fan_ctrl_set_fan_speed_percent(struct radeon_device *rdev,
768 u32 speed);
769u32 si_fan_ctrl_get_mode(struct radeon_device *rdev);
770void si_fan_ctrl_set_mode(struct radeon_device *rdev, u32 mode);
Alex Deucherca1110b2014-09-30 10:50:07 -0400771u32 si_dpm_get_current_sclk(struct radeon_device *rdev);
772u32 si_dpm_get_current_mclk(struct radeon_device *rdev);
Alex Deucher02779c02012-03-20 17:18:25 -0400773
Alex Deucher0672e272013-04-09 16:22:31 -0400774/* DCE8 - CIK */
775void dce8_bandwidth_update(struct radeon_device *rdev);
776
Alex Deucher44fa3462012-12-18 22:17:00 -0500777/*
778 * cik
779 */
780uint64_t cik_get_gpu_clock_counter(struct radeon_device *rdev);
Alex Deucher2c679122013-04-09 13:32:18 -0400781u32 cik_get_xclk(struct radeon_device *rdev);
Alex Deucher6e2c3c02013-04-03 19:28:32 -0400782uint32_t cik_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
783void cik_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Christian König87167bb2013-04-09 13:39:21 -0400784int cik_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
Alex Deucher5ad6bf92013-08-22 17:09:06 -0400785int cik_set_vce_clocks(struct radeon_device *rdev, u32 evclk, u32 ecclk);
Alex Deucher0672e272013-04-09 16:22:31 -0400786void cik_sdma_fence_ring_emit(struct radeon_device *rdev,
787 struct radeon_fence *fence);
Christian König1654b812013-11-12 12:58:05 +0100788bool cik_sdma_semaphore_ring_emit(struct radeon_device *rdev,
Alex Deucher0672e272013-04-09 16:22:31 -0400789 struct radeon_ring *ring,
790 struct radeon_semaphore *semaphore,
791 bool emit_wait);
792void cik_sdma_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Christian König57d20a42014-09-04 20:01:53 +0200793struct radeon_fence *cik_copy_dma(struct radeon_device *rdev,
794 uint64_t src_offset, uint64_t dst_offset,
795 unsigned num_gpu_pages,
796 struct reservation_object *resv);
797struct radeon_fence *cik_copy_cpdma(struct radeon_device *rdev,
798 uint64_t src_offset, uint64_t dst_offset,
799 unsigned num_gpu_pages,
800 struct reservation_object *resv);
Alex Deucher0672e272013-04-09 16:22:31 -0400801int cik_sdma_ring_test(struct radeon_device *rdev, struct radeon_ring *ring);
802int cik_sdma_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
803bool cik_sdma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
804void cik_fence_gfx_ring_emit(struct radeon_device *rdev,
805 struct radeon_fence *fence);
806void cik_fence_compute_ring_emit(struct radeon_device *rdev,
807 struct radeon_fence *fence);
Christian König1654b812013-11-12 12:58:05 +0100808bool cik_semaphore_ring_emit(struct radeon_device *rdev,
Alex Deucher0672e272013-04-09 16:22:31 -0400809 struct radeon_ring *cp,
810 struct radeon_semaphore *semaphore,
811 bool emit_wait);
812void cik_pcie_gart_tlb_flush(struct radeon_device *rdev);
813int cik_init(struct radeon_device *rdev);
814void cik_fini(struct radeon_device *rdev);
815int cik_suspend(struct radeon_device *rdev);
816int cik_resume(struct radeon_device *rdev);
817bool cik_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
818int cik_asic_reset(struct radeon_device *rdev);
819void cik_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
820int cik_ring_test(struct radeon_device *rdev, struct radeon_ring *ring);
821int cik_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
822int cik_irq_set(struct radeon_device *rdev);
823int cik_irq_process(struct radeon_device *rdev);
824int cik_vm_init(struct radeon_device *rdev);
825void cik_vm_fini(struct radeon_device *rdev);
Christian Königfaffaf62014-11-19 14:01:19 +0100826void cik_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
827 unsigned vm_id, uint64_t pd_addr);
Christian König03f62ab2014-07-30 21:05:17 +0200828
829void cik_sdma_vm_copy_pages(struct radeon_device *rdev,
830 struct radeon_ib *ib,
831 uint64_t pe, uint64_t src,
832 unsigned count);
833void cik_sdma_vm_write_pages(struct radeon_device *rdev,
834 struct radeon_ib *ib,
835 uint64_t pe,
836 uint64_t addr, unsigned count,
837 uint32_t incr, uint32_t flags);
838void cik_sdma_vm_set_pages(struct radeon_device *rdev,
839 struct radeon_ib *ib,
840 uint64_t pe,
841 uint64_t addr, unsigned count,
842 uint32_t incr, uint32_t flags);
843void cik_sdma_vm_pad_ib(struct radeon_ib *ib);
844
Christian Königfaffaf62014-11-19 14:01:19 +0100845void cik_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
846 unsigned vm_id, uint64_t pd_addr);
Alex Deucher0672e272013-04-09 16:22:31 -0400847int cik_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
Alex Deucherea31bf62013-12-09 19:44:30 -0500848u32 cik_gfx_get_rptr(struct radeon_device *rdev,
849 struct radeon_ring *ring);
850u32 cik_gfx_get_wptr(struct radeon_device *rdev,
851 struct radeon_ring *ring);
852void cik_gfx_set_wptr(struct radeon_device *rdev,
853 struct radeon_ring *ring);
854u32 cik_compute_get_rptr(struct radeon_device *rdev,
855 struct radeon_ring *ring);
856u32 cik_compute_get_wptr(struct radeon_device *rdev,
857 struct radeon_ring *ring);
858void cik_compute_set_wptr(struct radeon_device *rdev,
859 struct radeon_ring *ring);
860u32 cik_sdma_get_rptr(struct radeon_device *rdev,
861 struct radeon_ring *ring);
862u32 cik_sdma_get_wptr(struct radeon_device *rdev,
863 struct radeon_ring *ring);
864void cik_sdma_set_wptr(struct radeon_device *rdev,
865 struct radeon_ring *ring);
Alex Deucher286d9cc2013-06-21 15:50:47 -0400866int ci_get_temp(struct radeon_device *rdev);
867int kv_get_temp(struct radeon_device *rdev);
Alex Deucher353eec22014-10-01 11:18:46 -0400868int cik_get_allowed_info_register(struct radeon_device *rdev,
869 u32 reg, u32 *val);
Alex Deucher44fa3462012-12-18 22:17:00 -0500870
Alex Deuchercc8dbbb2013-08-14 01:03:41 -0400871int ci_dpm_init(struct radeon_device *rdev);
872int ci_dpm_enable(struct radeon_device *rdev);
Alex Deucher90208422013-12-19 13:59:46 -0500873int ci_dpm_late_enable(struct radeon_device *rdev);
Alex Deuchercc8dbbb2013-08-14 01:03:41 -0400874void ci_dpm_disable(struct radeon_device *rdev);
875int ci_dpm_pre_set_power_state(struct radeon_device *rdev);
876int ci_dpm_set_power_state(struct radeon_device *rdev);
877void ci_dpm_post_set_power_state(struct radeon_device *rdev);
878void ci_dpm_setup_asic(struct radeon_device *rdev);
879void ci_dpm_display_configuration_changed(struct radeon_device *rdev);
880void ci_dpm_fini(struct radeon_device *rdev);
881u32 ci_dpm_get_sclk(struct radeon_device *rdev, bool low);
882u32 ci_dpm_get_mclk(struct radeon_device *rdev, bool low);
883void ci_dpm_print_power_state(struct radeon_device *rdev,
884 struct radeon_ps *ps);
Alex Deucher94b4adc2013-07-15 17:34:33 -0400885void ci_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
886 struct seq_file *m);
Alex Deucher89536fd2013-07-15 18:14:24 -0400887int ci_dpm_force_performance_level(struct radeon_device *rdev,
888 enum radeon_dpm_forced_level level);
Alex Deucher54961312013-07-15 18:24:31 -0400889bool ci_dpm_vblank_too_short(struct radeon_device *rdev);
Alex Deucher942bdf72013-08-09 10:05:24 -0400890void ci_dpm_powergate_uvd(struct radeon_device *rdev, bool gate);
Alex Deucherdbbd3c82014-09-30 10:54:05 -0400891u32 ci_dpm_get_current_sclk(struct radeon_device *rdev);
892u32 ci_dpm_get_current_mclk(struct radeon_device *rdev);
Alex Deuchercc8dbbb2013-08-14 01:03:41 -0400893
Oleg Chernovskiy36689e52014-12-08 00:10:46 +0300894int ci_fan_ctrl_get_fan_speed_percent(struct radeon_device *rdev,
895 u32 *speed);
896int ci_fan_ctrl_set_fan_speed_percent(struct radeon_device *rdev,
897 u32 speed);
898u32 ci_fan_ctrl_get_mode(struct radeon_device *rdev);
899void ci_fan_ctrl_set_mode(struct radeon_device *rdev, u32 mode);
900
Alex Deucher41a524a2013-08-14 01:01:40 -0400901int kv_dpm_init(struct radeon_device *rdev);
902int kv_dpm_enable(struct radeon_device *rdev);
Alex Deucherd8852c32013-12-19 14:03:36 -0500903int kv_dpm_late_enable(struct radeon_device *rdev);
Alex Deucher41a524a2013-08-14 01:01:40 -0400904void kv_dpm_disable(struct radeon_device *rdev);
905int kv_dpm_pre_set_power_state(struct radeon_device *rdev);
906int kv_dpm_set_power_state(struct radeon_device *rdev);
907void kv_dpm_post_set_power_state(struct radeon_device *rdev);
908void kv_dpm_setup_asic(struct radeon_device *rdev);
909void kv_dpm_display_configuration_changed(struct radeon_device *rdev);
910void kv_dpm_fini(struct radeon_device *rdev);
911u32 kv_dpm_get_sclk(struct radeon_device *rdev, bool low);
912u32 kv_dpm_get_mclk(struct radeon_device *rdev, bool low);
913void kv_dpm_print_power_state(struct radeon_device *rdev,
914 struct radeon_ps *ps);
Alex Deucherae3e40e2013-07-18 16:39:53 -0400915void kv_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
916 struct seq_file *m);
Alex Deucher2b4c8022013-07-18 16:48:46 -0400917int kv_dpm_force_performance_level(struct radeon_device *rdev,
918 enum radeon_dpm_forced_level level);
Alex Deucher77df5082013-08-09 10:02:40 -0400919void kv_dpm_powergate_uvd(struct radeon_device *rdev, bool gate);
Alex Deucherb7a5ae92013-09-09 19:33:08 -0400920void kv_dpm_enable_bapm(struct radeon_device *rdev, bool enable);
Alex Deucher9b23bad2014-09-30 11:21:23 -0400921u32 kv_dpm_get_current_sclk(struct radeon_device *rdev);
922u32 kv_dpm_get_current_mclk(struct radeon_device *rdev);
Alex Deucher41a524a2013-08-14 01:01:40 -0400923
Christian Könige409b122013-08-13 11:56:53 +0200924/* uvd v1.0 */
925uint32_t uvd_v1_0_get_rptr(struct radeon_device *rdev,
926 struct radeon_ring *ring);
927uint32_t uvd_v1_0_get_wptr(struct radeon_device *rdev,
928 struct radeon_ring *ring);
929void uvd_v1_0_set_wptr(struct radeon_device *rdev,
930 struct radeon_ring *ring);
Christian König856754c2013-04-16 22:11:22 +0200931int uvd_v1_0_resume(struct radeon_device *rdev);
Christian Könige409b122013-08-13 11:56:53 +0200932
933int uvd_v1_0_init(struct radeon_device *rdev);
934void uvd_v1_0_fini(struct radeon_device *rdev);
935int uvd_v1_0_start(struct radeon_device *rdev);
936void uvd_v1_0_stop(struct radeon_device *rdev);
937
938int uvd_v1_0_ring_test(struct radeon_device *rdev, struct radeon_ring *ring);
Christian König856754c2013-04-16 22:11:22 +0200939void uvd_v1_0_fence_emit(struct radeon_device *rdev,
940 struct radeon_fence *fence);
Christian Könige409b122013-08-13 11:56:53 +0200941int uvd_v1_0_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
Christian König1654b812013-11-12 12:58:05 +0100942bool uvd_v1_0_semaphore_emit(struct radeon_device *rdev,
Christian Könige409b122013-08-13 11:56:53 +0200943 struct radeon_ring *ring,
944 struct radeon_semaphore *semaphore,
945 bool emit_wait);
946void uvd_v1_0_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
947
948/* uvd v2.2 */
949int uvd_v2_2_resume(struct radeon_device *rdev);
950void uvd_v2_2_fence_emit(struct radeon_device *rdev,
951 struct radeon_fence *fence);
Christian König013ead42015-05-01 12:34:12 +0200952bool uvd_v2_2_semaphore_emit(struct radeon_device *rdev,
953 struct radeon_ring *ring,
954 struct radeon_semaphore *semaphore,
955 bool emit_wait);
Christian Könige409b122013-08-13 11:56:53 +0200956
957/* uvd v3.1 */
Christian König1654b812013-11-12 12:58:05 +0100958bool uvd_v3_1_semaphore_emit(struct radeon_device *rdev,
Christian Könige409b122013-08-13 11:56:53 +0200959 struct radeon_ring *ring,
960 struct radeon_semaphore *semaphore,
961 bool emit_wait);
962
963/* uvd v4.2 */
964int uvd_v4_2_resume(struct radeon_device *rdev);
965
Christian Königd93f7932013-05-23 12:10:04 +0200966/* vce v1.0 */
967uint32_t vce_v1_0_get_rptr(struct radeon_device *rdev,
968 struct radeon_ring *ring);
969uint32_t vce_v1_0_get_wptr(struct radeon_device *rdev,
970 struct radeon_ring *ring);
971void vce_v1_0_set_wptr(struct radeon_device *rdev,
972 struct radeon_ring *ring);
973int vce_v1_0_init(struct radeon_device *rdev);
974int vce_v1_0_start(struct radeon_device *rdev);
975
976/* vce v2.0 */
977int vce_v2_0_resume(struct radeon_device *rdev);
978
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200979#endif