blob: e3f036c20d64d97f2943dfc4c66723fcc5556765 [file] [log] [blame]
Jerome Glisse771fe6b2009-06-05 14:42:42 +02001/*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Dave Airlie
25 * Alex Deucher
26 * Jerome Glisse
27 */
28#ifndef __RADEON_ASIC_H__
29#define __RADEON_ASIC_H__
30
31/*
32 * common functions
33 */
Rafał Miłecki74338742009-11-03 00:53:02 +010034uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020035void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
Rafał Miłecki5ea597f2009-12-17 13:50:09 +010036uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020037void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
38
Rafał Miłecki74338742009-11-03 00:53:02 +010039uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020040void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
Rafał Miłecki74338742009-11-03 00:53:02 +010041uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020042void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
43void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
44
Alex Deucher37e9b6a2012-08-03 11:39:43 -040045void atombios_set_backlight_level(struct radeon_encoder *radeon_encoder, u8 level);
Alex Deucher6d92f812012-09-14 09:59:26 -040046u8 atombios_get_backlight_level(struct radeon_encoder *radeon_encoder);
Alex Deucher37e9b6a2012-08-03 11:39:43 -040047void radeon_legacy_set_backlight_level(struct radeon_encoder *radeon_encoder, u8 level);
Alex Deucher6d92f812012-09-14 09:59:26 -040048u8 radeon_legacy_get_backlight_level(struct radeon_encoder *radeon_encoder);
Alex Deucher37e9b6a2012-08-03 11:39:43 -040049
Jerome Glisse771fe6b2009-06-05 14:42:42 +020050/*
Pauli Nieminen44ca7472010-02-11 17:25:47 +000051 * r100,rv100,rs100,rv200,rs200
Jerome Glisse771fe6b2009-06-05 14:42:42 +020052 */
Daniel Vetter2b497502010-03-11 21:19:18 +000053struct r100_mc_save {
54 u32 GENMO_WT;
55 u32 CRTC_EXT_CNTL;
56 u32 CRTC_GEN_CNTL;
57 u32 CRTC2_GEN_CNTL;
58 u32 CUR_OFFSET;
59 u32 CUR2_OFFSET;
60};
61int r100_init(struct radeon_device *rdev);
62void r100_fini(struct radeon_device *rdev);
63int r100_suspend(struct radeon_device *rdev);
64int r100_resume(struct radeon_device *rdev);
Dave Airlie28d52042009-09-21 14:33:58 +100065void r100_vga_set_state(struct radeon_device *rdev, bool state);
Christian Könige32eb502011-10-23 12:56:27 +020066bool r100_gpu_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
Jérome Glisse71fe2892016-03-18 16:58:38 +010067int r100_asic_reset(struct radeon_device *rdev, bool hard);
Michel Dänzer7ed220d2009-08-13 11:10:51 +020068u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020069void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
Michel Dänzercb658902015-01-21 17:36:35 +090070uint64_t r100_pci_gart_get_page_entry(uint64_t addr, uint32_t flags);
Christian König7f90fc92014-06-04 15:29:57 +020071void r100_pci_gart_set_page(struct radeon_device *rdev, unsigned i,
Michel Dänzercb658902015-01-21 17:36:35 +090072 uint64_t entry);
Alex Deucherf7128122012-02-23 17:53:45 -050073void r100_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020074int r100_irq_set(struct radeon_device *rdev);
75int r100_irq_process(struct radeon_device *rdev);
76void r100_fence_ring_emit(struct radeon_device *rdev,
77 struct radeon_fence *fence);
Christian König1654b812013-11-12 12:58:05 +010078bool r100_semaphore_ring_emit(struct radeon_device *rdev,
Christian Könige32eb502011-10-23 12:56:27 +020079 struct radeon_ring *cp,
Christian König15d33322011-09-15 19:02:22 +020080 struct radeon_semaphore *semaphore,
Christian König7b1f2482011-09-23 15:11:23 +020081 bool emit_wait);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020082int r100_cs_parse(struct radeon_cs_parser *p);
83void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
84uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
Christian König57d20a42014-09-04 20:01:53 +020085struct radeon_fence *r100_copy_blit(struct radeon_device *rdev,
86 uint64_t src_offset,
87 uint64_t dst_offset,
88 unsigned num_gpu_pages,
89 struct reservation_object *resv);
Dave Airliee024e112009-06-24 09:48:08 +100090int r100_set_surface_reg(struct radeon_device *rdev, int reg,
91 uint32_t tiling_flags, uint32_t pitch,
92 uint32_t offset, uint32_t obj_size);
Daniel Vetter9479c542010-03-11 21:19:16 +000093void r100_clear_surface_reg(struct radeon_device *rdev, int reg);
Jerome Glissec93bb852009-07-13 21:04:08 +020094void r100_bandwidth_update(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +100095void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Christian Könige32eb502011-10-23 12:56:27 +020096int r100_ring_test(struct radeon_device *rdev, struct radeon_ring *cp);
Alex Deucher429770b2009-12-04 15:26:55 -050097void r100_hpd_init(struct radeon_device *rdev);
98void r100_hpd_fini(struct radeon_device *rdev);
99bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
100void r100_hpd_set_polarity(struct radeon_device *rdev,
101 enum radeon_hpd_id hpd);
Daniel Vetter2b497502010-03-11 21:19:18 +0000102int r100_debugfs_rbbm_init(struct radeon_device *rdev);
103int r100_debugfs_cp_init(struct radeon_device *rdev);
104void r100_cp_disable(struct radeon_device *rdev);
105int r100_cp_init(struct radeon_device *rdev, unsigned ring_size);
106void r100_cp_fini(struct radeon_device *rdev);
107int r100_pci_gart_init(struct radeon_device *rdev);
108void r100_pci_gart_fini(struct radeon_device *rdev);
109int r100_pci_gart_enable(struct radeon_device *rdev);
110void r100_pci_gart_disable(struct radeon_device *rdev);
111int r100_debugfs_mc_info_init(struct radeon_device *rdev);
112int r100_gui_wait_for_idle(struct radeon_device *rdev);
Alex Deucherf7128122012-02-23 17:53:45 -0500113int r100_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
Daniel Vetter2b497502010-03-11 21:19:18 +0000114void r100_irq_disable(struct radeon_device *rdev);
115void r100_mc_stop(struct radeon_device *rdev, struct r100_mc_save *save);
116void r100_mc_resume(struct radeon_device *rdev, struct r100_mc_save *save);
117void r100_vram_init_sizes(struct radeon_device *rdev);
Daniel Vetter2b497502010-03-11 21:19:18 +0000118int r100_cp_reset(struct radeon_device *rdev);
119void r100_vga_render_disable(struct radeon_device *rdev);
Dave Airlie4c712e62010-07-15 12:13:50 +1000120void r100_restore_sanity(struct radeon_device *rdev);
Daniel Vetter2b497502010-03-11 21:19:18 +0000121int r100_cs_track_check_pkt3_indx_buffer(struct radeon_cs_parser *p,
122 struct radeon_cs_packet *pkt,
123 struct radeon_bo *robj);
124int r100_cs_parse_packet0(struct radeon_cs_parser *p,
125 struct radeon_cs_packet *pkt,
126 const unsigned *auth, unsigned n,
127 radeon_packet0_check_t check);
128int r100_cs_packet_parse(struct radeon_cs_parser *p,
129 struct radeon_cs_packet *pkt,
130 unsigned idx);
131void r100_enable_bm(struct radeon_device *rdev);
132void r100_set_common_regs(struct radeon_device *rdev);
Jerome Glisse90aca4d2010-03-09 14:45:12 +0000133void r100_bm_disable(struct radeon_device *rdev);
Alex Deucherdef9ba92010-04-22 12:39:58 -0400134extern bool r100_gui_idle(struct radeon_device *rdev);
Alex Deucher49e02b72010-04-23 17:57:27 -0400135extern void r100_pm_misc(struct radeon_device *rdev);
136extern void r100_pm_prepare(struct radeon_device *rdev);
137extern void r100_pm_finish(struct radeon_device *rdev);
Alex Deucherce8f5372010-05-07 15:10:16 -0400138extern void r100_pm_init_profile(struct radeon_device *rdev);
139extern void r100_pm_get_dynpm_state(struct radeon_device *rdev);
Christian König157fa142014-05-27 16:49:20 +0200140extern void r100_page_flip(struct radeon_device *rdev, int crtc,
Michel Dänzerc63dd752016-04-01 18:51:34 +0900141 u64 crtc_base, bool async);
Christian König157fa142014-05-27 16:49:20 +0200142extern bool r100_page_flip_pending(struct radeon_device *rdev, int crtc);
Alex Deucher3ae19b72012-02-23 17:53:37 -0500143extern void r100_wait_for_vblank(struct radeon_device *rdev, int crtc);
Alex Deucher89e51812012-02-23 17:53:38 -0500144extern int r100_mc_wait_for_idle(struct radeon_device *rdev);
Alex Deucherbae6b5622010-04-22 13:38:05 -0400145
Alex Deucherea31bf62013-12-09 19:44:30 -0500146u32 r100_gfx_get_rptr(struct radeon_device *rdev,
147 struct radeon_ring *ring);
148u32 r100_gfx_get_wptr(struct radeon_device *rdev,
149 struct radeon_ring *ring);
150void r100_gfx_set_wptr(struct radeon_device *rdev,
151 struct radeon_ring *ring);
Michel Dänzer897eba82014-09-17 16:25:55 +0900152
Pauli Nieminen44ca7472010-02-11 17:25:47 +0000153/*
154 * r200,rv250,rs300,rv280
155 */
Christian König57d20a42014-09-04 20:01:53 +0200156struct radeon_fence *r200_copy_dma(struct radeon_device *rdev,
157 uint64_t src_offset,
158 uint64_t dst_offset,
159 unsigned num_gpu_pages,
160 struct reservation_object *resv);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100161void r200_set_safe_registers(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200162
163/*
164 * r300,r350,rv350,rv380
165 */
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200166extern int r300_init(struct radeon_device *rdev);
167extern void r300_fini(struct radeon_device *rdev);
168extern int r300_suspend(struct radeon_device *rdev);
169extern int r300_resume(struct radeon_device *rdev);
Jérome Glisse71fe2892016-03-18 16:58:38 +0100170extern int r300_asic_reset(struct radeon_device *rdev, bool hard);
Alex Deucherf7128122012-02-23 17:53:45 -0500171extern void r300_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200172extern void r300_fence_ring_emit(struct radeon_device *rdev,
173 struct radeon_fence *fence);
174extern int r300_cs_parse(struct radeon_cs_parser *p);
175extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
Michel Dänzercb658902015-01-21 17:36:35 +0900176extern uint64_t rv370_pcie_gart_get_page_entry(uint64_t addr, uint32_t flags);
Christian König7f90fc92014-06-04 15:29:57 +0200177extern void rv370_pcie_gart_set_page(struct radeon_device *rdev, unsigned i,
Michel Dänzercb658902015-01-21 17:36:35 +0900178 uint64_t entry);
Jerome Glisse207bf9e2009-09-30 15:35:32 +0200179extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
Alex Deucherc836a412009-12-23 10:07:50 -0500180extern int rv370_get_pcie_lanes(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100181extern void r300_set_reg_safe(struct radeon_device *rdev);
182extern void r300_mc_program(struct radeon_device *rdev);
183extern void r300_mc_init(struct radeon_device *rdev);
184extern void r300_clock_startup(struct radeon_device *rdev);
185extern int r300_mc_wait_for_idle(struct radeon_device *rdev);
186extern int rv370_pcie_gart_init(struct radeon_device *rdev);
187extern void rv370_pcie_gart_fini(struct radeon_device *rdev);
188extern int rv370_pcie_gart_enable(struct radeon_device *rdev);
189extern void rv370_pcie_gart_disable(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500190extern int r300_mc_wait_for_idle(struct radeon_device *rdev);
Pauli Nieminen44ca7472010-02-11 17:25:47 +0000191
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200192/*
193 * r420,r423,rv410
194 */
Jerome Glisse9f022dd2009-09-11 15:35:22 +0200195extern int r420_init(struct radeon_device *rdev);
196extern void r420_fini(struct radeon_device *rdev);
197extern int r420_suspend(struct radeon_device *rdev);
198extern int r420_resume(struct radeon_device *rdev);
Alex Deucherce8f5372010-05-07 15:10:16 -0400199extern void r420_pm_init_profile(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100200extern u32 r420_mc_rreg(struct radeon_device *rdev, u32 reg);
201extern void r420_mc_wreg(struct radeon_device *rdev, u32 reg, u32 v);
202extern int r420_debugfs_pipes_info_init(struct radeon_device *rdev);
203extern void r420_pipes_init(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200204
205/*
206 * rs400,rs480
207 */
Jerome Glisseca6ffc62009-10-01 10:20:52 +0200208extern int rs400_init(struct radeon_device *rdev);
209extern void rs400_fini(struct radeon_device *rdev);
210extern int rs400_suspend(struct radeon_device *rdev);
211extern int rs400_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200212void rs400_gart_tlb_flush(struct radeon_device *rdev);
Michel Dänzercb658902015-01-21 17:36:35 +0900213uint64_t rs400_gart_get_page_entry(uint64_t addr, uint32_t flags);
Christian König7f90fc92014-06-04 15:29:57 +0200214void rs400_gart_set_page(struct radeon_device *rdev, unsigned i,
Michel Dänzercb658902015-01-21 17:36:35 +0900215 uint64_t entry);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200216uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
217void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100218int rs400_gart_init(struct radeon_device *rdev);
219int rs400_gart_enable(struct radeon_device *rdev);
220void rs400_gart_adjust_size(struct radeon_device *rdev);
221void rs400_gart_disable(struct radeon_device *rdev);
222void rs400_gart_fini(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500223extern int rs400_mc_wait_for_idle(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100224
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200225/*
226 * rs600.
227 */
Jérome Glisse71fe2892016-03-18 16:58:38 +0100228extern int rs600_asic_reset(struct radeon_device *rdev, bool hard);
Jerome Glissec010f802009-09-30 22:09:06 +0200229extern int rs600_init(struct radeon_device *rdev);
230extern void rs600_fini(struct radeon_device *rdev);
231extern int rs600_suspend(struct radeon_device *rdev);
232extern int rs600_resume(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200233int rs600_irq_set(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200234int rs600_irq_process(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100235void rs600_irq_disable(struct radeon_device *rdev);
Michel Dänzer7ed220d2009-08-13 11:10:51 +0200236u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200237void rs600_gart_tlb_flush(struct radeon_device *rdev);
Michel Dänzercb658902015-01-21 17:36:35 +0900238uint64_t rs600_gart_get_page_entry(uint64_t addr, uint32_t flags);
Christian König7f90fc92014-06-04 15:29:57 +0200239void rs600_gart_set_page(struct radeon_device *rdev, unsigned i,
Michel Dänzercb658902015-01-21 17:36:35 +0900240 uint64_t entry);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200241uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
242void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200243void rs600_bandwidth_update(struct radeon_device *rdev);
Alex Deucher429770b2009-12-04 15:26:55 -0500244void rs600_hpd_init(struct radeon_device *rdev);
245void rs600_hpd_fini(struct radeon_device *rdev);
246bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
247void rs600_hpd_set_polarity(struct radeon_device *rdev,
248 enum radeon_hpd_id hpd);
Alex Deucher49e02b72010-04-23 17:57:27 -0400249extern void rs600_pm_misc(struct radeon_device *rdev);
250extern void rs600_pm_prepare(struct radeon_device *rdev);
251extern void rs600_pm_finish(struct radeon_device *rdev);
Christian König157fa142014-05-27 16:49:20 +0200252extern void rs600_page_flip(struct radeon_device *rdev, int crtc,
Michel Dänzerc63dd752016-04-01 18:51:34 +0900253 u64 crtc_base, bool async);
Christian König157fa142014-05-27 16:49:20 +0200254extern bool rs600_page_flip_pending(struct radeon_device *rdev, int crtc);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100255void rs600_set_safe_registers(struct radeon_device *rdev);
Alex Deucher3ae19b72012-02-23 17:53:37 -0500256extern void avivo_wait_for_vblank(struct radeon_device *rdev, int crtc);
Alex Deucher89e51812012-02-23 17:53:38 -0500257extern int rs600_mc_wait_for_idle(struct radeon_device *rdev);
Alex Deucher429770b2009-12-04 15:26:55 -0500258
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200259/*
260 * rs690,rs740
261 */
Jerome Glisse3bc68532009-10-01 09:39:24 +0200262int rs690_init(struct radeon_device *rdev);
263void rs690_fini(struct radeon_device *rdev);
264int rs690_resume(struct radeon_device *rdev);
265int rs690_suspend(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200266uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
267void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glissec93bb852009-07-13 21:04:08 +0200268void rs690_bandwidth_update(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100269void rs690_line_buffer_adjust(struct radeon_device *rdev,
270 struct drm_display_mode *mode1,
271 struct drm_display_mode *mode2);
Alex Deucher89e51812012-02-23 17:53:38 -0500272extern int rs690_mc_wait_for_idle(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200273
274/*
275 * rv515
276 */
Daniel Vetter187f3da2010-11-28 19:06:09 +0100277struct rv515_mc_save {
Daniel Vetter187f3da2010-11-28 19:06:09 +0100278 u32 vga_render_control;
279 u32 vga_hdp_control;
Alex Deucher6253e4c2012-12-12 14:30:32 -0500280 bool crtc_enabled[2];
Daniel Vetter187f3da2010-11-28 19:06:09 +0100281};
Jerome Glisse81ee8fb2012-07-27 16:32:24 -0400282
Jerome Glisse068a1172009-06-17 13:28:30 +0200283int rv515_init(struct radeon_device *rdev);
Jerome Glissed39c3b82009-09-28 18:34:43 +0200284void rv515_fini(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200285uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
286void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Alex Deucherf7128122012-02-23 17:53:45 -0500287void rv515_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
Jerome Glissec93bb852009-07-13 21:04:08 +0200288void rv515_bandwidth_update(struct radeon_device *rdev);
Jerome Glissed39c3b82009-09-28 18:34:43 +0200289int rv515_resume(struct radeon_device *rdev);
290int rv515_suspend(struct radeon_device *rdev);
Daniel Vetter187f3da2010-11-28 19:06:09 +0100291void rv515_bandwidth_avivo_update(struct radeon_device *rdev);
292void rv515_vga_render_disable(struct radeon_device *rdev);
293void rv515_set_safe_registers(struct radeon_device *rdev);
294void rv515_mc_stop(struct radeon_device *rdev, struct rv515_mc_save *save);
295void rv515_mc_resume(struct radeon_device *rdev, struct rv515_mc_save *save);
296void rv515_clock_startup(struct radeon_device *rdev);
297void rv515_debugfs(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500298int rv515_mc_wait_for_idle(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200299
300/*
301 * r520,rv530,rv560,rv570,r580
302 */
Jerome Glissed39c3b82009-09-28 18:34:43 +0200303int r520_init(struct radeon_device *rdev);
Jerome Glissef0ed1f62009-09-28 20:39:19 +0200304int r520_resume(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500305int r520_mc_wait_for_idle(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200306
307/*
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000308 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200309 */
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000310int r600_init(struct radeon_device *rdev);
311void r600_fini(struct radeon_device *rdev);
312int r600_suspend(struct radeon_device *rdev);
313int r600_resume(struct radeon_device *rdev);
Dave Airlie28d52042009-09-21 14:33:58 +1000314void r600_vga_set_state(struct radeon_device *rdev, bool state);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000315int r600_wb_init(struct radeon_device *rdev);
316void r600_wb_fini(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000317void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200318uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
319void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000320int r600_cs_parse(struct radeon_cs_parser *p);
Alex Deuchercf4ccd02011-11-18 10:19:47 -0500321int r600_dma_cs_parse(struct radeon_cs_parser *p);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000322void r600_fence_ring_emit(struct radeon_device *rdev,
323 struct radeon_fence *fence);
Christian König1654b812013-11-12 12:58:05 +0100324bool r600_semaphore_ring_emit(struct radeon_device *rdev,
Christian Könige32eb502011-10-23 12:56:27 +0200325 struct radeon_ring *cp,
Christian König15d33322011-09-15 19:02:22 +0200326 struct radeon_semaphore *semaphore,
Christian König7b1f2482011-09-23 15:11:23 +0200327 bool emit_wait);
Alex Deucher4d756582012-09-27 15:08:35 -0400328void r600_dma_fence_ring_emit(struct radeon_device *rdev,
329 struct radeon_fence *fence);
Christian König1654b812013-11-12 12:58:05 +0100330bool r600_dma_semaphore_ring_emit(struct radeon_device *rdev,
Alex Deucher4d756582012-09-27 15:08:35 -0400331 struct radeon_ring *ring,
332 struct radeon_semaphore *semaphore,
333 bool emit_wait);
334void r600_dma_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
335bool r600_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
Alex Deucher123bc182013-01-24 11:37:19 -0500336bool r600_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
Jérome Glisse71fe2892016-03-18 16:58:38 +0100337int r600_asic_reset(struct radeon_device *rdev, bool hard);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000338int r600_set_surface_reg(struct radeon_device *rdev, int reg,
339 uint32_t tiling_flags, uint32_t pitch,
340 uint32_t offset, uint32_t obj_size);
Daniel Vetter9479c542010-03-11 21:19:16 +0000341void r600_clear_surface_reg(struct radeon_device *rdev, int reg);
Alex Deucherf7128122012-02-23 17:53:45 -0500342int r600_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
Alex Deucher4d756582012-09-27 15:08:35 -0400343int r600_dma_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000344void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Christian Könige32eb502011-10-23 12:56:27 +0200345int r600_ring_test(struct radeon_device *rdev, struct radeon_ring *cp);
Alex Deucher4d756582012-09-27 15:08:35 -0400346int r600_dma_ring_test(struct radeon_device *rdev, struct radeon_ring *cp);
Christian König57d20a42014-09-04 20:01:53 +0200347struct radeon_fence *r600_copy_cpdma(struct radeon_device *rdev,
348 uint64_t src_offset, uint64_t dst_offset,
349 unsigned num_gpu_pages,
350 struct reservation_object *resv);
351struct radeon_fence *r600_copy_dma(struct radeon_device *rdev,
352 uint64_t src_offset, uint64_t dst_offset,
353 unsigned num_gpu_pages,
354 struct reservation_object *resv);
Alex Deucher429770b2009-12-04 15:26:55 -0500355void r600_hpd_init(struct radeon_device *rdev);
356void r600_hpd_fini(struct radeon_device *rdev);
357bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
358void r600_hpd_set_polarity(struct radeon_device *rdev,
359 enum radeon_hpd_id hpd);
Michel Dänzer124764f2014-07-31 18:43:48 +0900360extern void r600_mmio_hdp_flush(struct radeon_device *rdev);
Alex Deucherdef9ba92010-04-22 12:39:58 -0400361extern bool r600_gui_idle(struct radeon_device *rdev);
Alex Deucher49e02b72010-04-23 17:57:27 -0400362extern void r600_pm_misc(struct radeon_device *rdev);
Alex Deucherce8f5372010-05-07 15:10:16 -0400363extern void r600_pm_init_profile(struct radeon_device *rdev);
364extern void rs780_pm_init_profile(struct radeon_device *rdev);
Samuel Li65337e62013-04-05 17:50:53 -0400365extern uint32_t rs780_mc_rreg(struct radeon_device *rdev, uint32_t reg);
366extern void rs780_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Alex Deucherce8f5372010-05-07 15:10:16 -0400367extern void r600_pm_get_dynpm_state(struct radeon_device *rdev);
Alex Deucher3313e3d2011-01-06 18:49:34 -0500368extern void r600_set_pcie_lanes(struct radeon_device *rdev, int lanes);
369extern int r600_get_pcie_lanes(struct radeon_device *rdev);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100370bool r600_card_posted(struct radeon_device *rdev);
371void r600_cp_stop(struct radeon_device *rdev);
372int r600_cp_start(struct radeon_device *rdev);
Christian Könige32eb502011-10-23 12:56:27 +0200373void r600_ring_init(struct radeon_device *rdev, struct radeon_ring *cp, unsigned ring_size);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100374int r600_cp_resume(struct radeon_device *rdev);
375void r600_cp_fini(struct radeon_device *rdev);
376int r600_count_pipe_bits(uint32_t val);
377int r600_mc_wait_for_idle(struct radeon_device *rdev);
378int r600_pcie_gart_init(struct radeon_device *rdev);
379void r600_scratch_init(struct radeon_device *rdev);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100380int r600_init_microcode(struct radeon_device *rdev);
Alex Deucherea31bf62013-12-09 19:44:30 -0500381u32 r600_gfx_get_rptr(struct radeon_device *rdev,
382 struct radeon_ring *ring);
383u32 r600_gfx_get_wptr(struct radeon_device *rdev,
384 struct radeon_ring *ring);
385void r600_gfx_set_wptr(struct radeon_device *rdev,
386 struct radeon_ring *ring);
Alex Deucherc6d2ac22014-10-01 09:36:57 -0400387int r600_get_allowed_info_register(struct radeon_device *rdev,
388 u32 reg, u32 *val);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100389/* r600 irq */
390int r600_irq_process(struct radeon_device *rdev);
391int r600_irq_init(struct radeon_device *rdev);
392void r600_irq_fini(struct radeon_device *rdev);
393void r600_ih_ring_init(struct radeon_device *rdev, unsigned ring_size);
394int r600_irq_set(struct radeon_device *rdev);
395void r600_irq_suspend(struct radeon_device *rdev);
396void r600_disable_interrupts(struct radeon_device *rdev);
397void r600_rlc_stop(struct radeon_device *rdev);
398/* r600 audio */
Daniel Vetter3574dda2011-02-18 17:59:19 +0100399void r600_audio_fini(struct radeon_device *rdev);
Rafał Miłecki8f33a152014-05-16 11:36:24 +0200400void r600_audio_set_dto(struct drm_encoder *encoder, u32 clock);
401void r600_hdmi_update_avi_infoframe(struct drm_encoder *encoder, void *buffer,
402 size_t size);
403void r600_hdmi_update_ACR(struct drm_encoder *encoder, uint32_t clock);
404void r600_hdmi_audio_workaround(struct drm_encoder *encoder);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100405int r600_hdmi_buffer_status_changed(struct drm_encoder *encoder);
406void r600_hdmi_update_audio_settings(struct drm_encoder *encoder);
Alex Deucher89e51812012-02-23 17:53:38 -0500407int r600_mc_wait_for_idle(struct radeon_device *rdev);
Alex Deucher454d2e22013-02-14 10:04:02 -0500408u32 r600_get_xclk(struct radeon_device *rdev);
Alex Deucherd0418892013-01-24 10:35:23 -0500409uint64_t r600_get_gpu_clock_counter(struct radeon_device *rdev);
Alex Deucher6bd1c382013-06-21 14:38:03 -0400410int rv6xx_get_temp(struct radeon_device *rdev);
Alex Deucher1b9ba702013-09-05 09:52:37 -0400411int r600_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
Alex Deucher98243912013-01-16 13:13:42 -0500412int r600_dpm_pre_set_power_state(struct radeon_device *rdev);
413void r600_dpm_post_set_power_state(struct radeon_device *rdev);
Alex Deuchera4643ba2013-12-19 12:18:13 -0500414int r600_dpm_late_enable(struct radeon_device *rdev);
Christian König2e1e6da2013-08-13 11:56:52 +0200415/* r600 dma */
416uint32_t r600_dma_get_rptr(struct radeon_device *rdev,
417 struct radeon_ring *ring);
418uint32_t r600_dma_get_wptr(struct radeon_device *rdev,
419 struct radeon_ring *ring);
420void r600_dma_set_wptr(struct radeon_device *rdev,
421 struct radeon_ring *ring);
Alex Deucher4a6369e2013-04-12 14:04:10 -0400422/* rv6xx dpm */
423int rv6xx_dpm_init(struct radeon_device *rdev);
424int rv6xx_dpm_enable(struct radeon_device *rdev);
425void rv6xx_dpm_disable(struct radeon_device *rdev);
426int rv6xx_dpm_set_power_state(struct radeon_device *rdev);
427void rv6xx_setup_asic(struct radeon_device *rdev);
428void rv6xx_dpm_display_configuration_changed(struct radeon_device *rdev);
429void rv6xx_dpm_fini(struct radeon_device *rdev);
430u32 rv6xx_dpm_get_sclk(struct radeon_device *rdev, bool low);
431u32 rv6xx_dpm_get_mclk(struct radeon_device *rdev, bool low);
432void rv6xx_dpm_print_power_state(struct radeon_device *rdev,
433 struct radeon_ps *ps);
Alex Deucher242916a2013-06-28 14:20:53 -0400434void rv6xx_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
435 struct seq_file *m);
Alex Deucherf4f85a82013-07-25 20:07:25 -0400436int rv6xx_dpm_force_performance_level(struct radeon_device *rdev,
437 enum radeon_dpm_forced_level level);
Alex Deucherd0a04d32014-09-30 10:27:42 -0400438u32 rv6xx_dpm_get_current_sclk(struct radeon_device *rdev);
439u32 rv6xx_dpm_get_current_mclk(struct radeon_device *rdev);
Alex Deucher9d670062013-04-12 13:59:22 -0400440/* rs780 dpm */
441int rs780_dpm_init(struct radeon_device *rdev);
442int rs780_dpm_enable(struct radeon_device *rdev);
443void rs780_dpm_disable(struct radeon_device *rdev);
444int rs780_dpm_set_power_state(struct radeon_device *rdev);
445void rs780_dpm_setup_asic(struct radeon_device *rdev);
446void rs780_dpm_display_configuration_changed(struct radeon_device *rdev);
447void rs780_dpm_fini(struct radeon_device *rdev);
448u32 rs780_dpm_get_sclk(struct radeon_device *rdev, bool low);
449u32 rs780_dpm_get_mclk(struct radeon_device *rdev, bool low);
450void rs780_dpm_print_power_state(struct radeon_device *rdev,
451 struct radeon_ps *ps);
Alex Deucher444bddc2013-07-02 13:05:23 -0400452void rs780_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
453 struct seq_file *m);
Anthoine Bourgeois63580c32013-09-03 13:52:19 -0400454int rs780_dpm_force_performance_level(struct radeon_device *rdev,
455 enum radeon_dpm_forced_level level);
Alex Deucher3c945662014-09-30 10:19:57 -0400456u32 rs780_dpm_get_current_sclk(struct radeon_device *rdev);
457u32 rs780_dpm_get_current_mclk(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000458
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000459/*
460 * rv770,rv730,rv710,rv740
461 */
462int rv770_init(struct radeon_device *rdev);
463void rv770_fini(struct radeon_device *rdev);
464int rv770_suspend(struct radeon_device *rdev);
465int rv770_resume(struct radeon_device *rdev);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100466void rv770_pm_misc(struct radeon_device *rdev);
Michel Dänzerc63dd752016-04-01 18:51:34 +0900467void rv770_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base,
468 bool async);
Christian König157fa142014-05-27 16:49:20 +0200469bool rv770_page_flip_pending(struct radeon_device *rdev, int crtc);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100470void r700_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc);
471void r700_cp_stop(struct radeon_device *rdev);
472void r700_cp_fini(struct radeon_device *rdev);
Christian König57d20a42014-09-04 20:01:53 +0200473struct radeon_fence *rv770_copy_dma(struct radeon_device *rdev,
474 uint64_t src_offset, uint64_t dst_offset,
475 unsigned num_gpu_pages,
476 struct reservation_object *resv);
Alex Deucher454d2e22013-02-14 10:04:02 -0500477u32 rv770_get_xclk(struct radeon_device *rdev);
Christian Königef0e6e62013-04-08 12:41:35 +0200478int rv770_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
Alex Deucher6bd1c382013-06-21 14:38:03 -0400479int rv770_get_temp(struct radeon_device *rdev);
Alex Deucher66229b22013-06-26 00:11:19 -0400480/* rv7xx pm */
481int rv770_dpm_init(struct radeon_device *rdev);
482int rv770_dpm_enable(struct radeon_device *rdev);
Alex Deuchera3f11242013-12-19 13:48:36 -0500483int rv770_dpm_late_enable(struct radeon_device *rdev);
Alex Deucher66229b22013-06-26 00:11:19 -0400484void rv770_dpm_disable(struct radeon_device *rdev);
485int rv770_dpm_set_power_state(struct radeon_device *rdev);
486void rv770_dpm_setup_asic(struct radeon_device *rdev);
487void rv770_dpm_display_configuration_changed(struct radeon_device *rdev);
488void rv770_dpm_fini(struct radeon_device *rdev);
489u32 rv770_dpm_get_sclk(struct radeon_device *rdev, bool low);
490u32 rv770_dpm_get_mclk(struct radeon_device *rdev, bool low);
491void rv770_dpm_print_power_state(struct radeon_device *rdev,
492 struct radeon_ps *ps);
Alex Deucherbd210d12013-06-28 10:06:26 -0400493void rv770_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
494 struct seq_file *m);
Alex Deucher8b5e6b72013-07-02 18:40:35 -0400495int rv770_dpm_force_performance_level(struct radeon_device *rdev,
496 enum radeon_dpm_forced_level level);
Alex Deucherb06195d2013-07-08 11:49:48 -0400497bool rv770_dpm_vblank_too_short(struct radeon_device *rdev);
Alex Deucher296deb72014-09-30 10:34:39 -0400498u32 rv770_dpm_get_current_sclk(struct radeon_device *rdev);
499u32 rv770_dpm_get_current_mclk(struct radeon_device *rdev);
Jerome Glisse3ce0a232009-09-08 10:10:24 +1000500
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500501/*
502 * evergreen
503 */
Daniel Vetter3574dda2011-02-18 17:59:19 +0100504struct evergreen_mc_save {
Daniel Vetter3574dda2011-02-18 17:59:19 +0100505 u32 vga_render_control;
506 u32 vga_hdp_control;
Alex Deucher62444b72012-08-15 17:18:42 -0400507 bool crtc_enabled[RADEON_MAX_CRTCS];
Daniel Vetter3574dda2011-02-18 17:59:19 +0100508};
Jerome Glisse81ee8fb2012-07-27 16:32:24 -0400509
Alex Deucher0fcdb612010-03-24 13:20:41 -0400510void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500511int evergreen_init(struct radeon_device *rdev);
512void evergreen_fini(struct radeon_device *rdev);
513int evergreen_suspend(struct radeon_device *rdev);
514int evergreen_resume(struct radeon_device *rdev);
Alex Deucher123bc182013-01-24 11:37:19 -0500515bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
516bool evergreen_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
Jérome Glisse71fe2892016-03-18 16:58:38 +0100517int evergreen_asic_reset(struct radeon_device *rdev, bool hard);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500518void evergreen_bandwidth_update(struct radeon_device *rdev);
Alex Deucher12920592011-02-02 12:37:40 -0500519void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500520void evergreen_hpd_init(struct radeon_device *rdev);
521void evergreen_hpd_fini(struct radeon_device *rdev);
522bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
523void evergreen_hpd_set_polarity(struct radeon_device *rdev,
524 enum radeon_hpd_id hpd);
Alex Deucher45f9a392010-03-24 13:55:51 -0400525u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc);
526int evergreen_irq_set(struct radeon_device *rdev);
527int evergreen_irq_process(struct radeon_device *rdev);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -0400528extern int evergreen_cs_parse(struct radeon_cs_parser *p);
Alex Deucherd2ead3e2012-12-13 09:55:45 -0500529extern int evergreen_dma_cs_parse(struct radeon_cs_parser *p);
Alex Deucher49e02b72010-04-23 17:57:27 -0400530extern void evergreen_pm_misc(struct radeon_device *rdev);
531extern void evergreen_pm_prepare(struct radeon_device *rdev);
532extern void evergreen_pm_finish(struct radeon_device *rdev);
Alex Deuchera4c9e2e2011-11-04 10:09:41 -0400533extern void sumo_pm_init_profile(struct radeon_device *rdev);
Alex Deucher27810fb2012-10-01 19:25:11 -0400534extern void btc_pm_init_profile(struct radeon_device *rdev);
Alex Deucher23d33ba2013-04-08 12:41:32 +0200535int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
Alex Deuchera8b49252013-04-08 12:41:33 +0200536int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
Christian König157fa142014-05-27 16:49:20 +0200537extern void evergreen_page_flip(struct radeon_device *rdev, int crtc,
Michel Dänzerc63dd752016-04-01 18:51:34 +0900538 u64 crtc_base, bool async);
Christian König157fa142014-05-27 16:49:20 +0200539extern bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc);
Alex Deucher3ae19b72012-02-23 17:53:37 -0500540extern void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc);
Daniel Vetter3574dda2011-02-18 17:59:19 +0100541void evergreen_disable_interrupt_state(struct radeon_device *rdev);
Alex Deucher89e51812012-02-23 17:53:38 -0500542int evergreen_mc_wait_for_idle(struct radeon_device *rdev);
Alex Deucher233d1ad2012-12-04 15:25:59 -0500543void evergreen_dma_fence_ring_emit(struct radeon_device *rdev,
544 struct radeon_fence *fence);
545void evergreen_dma_ring_ib_execute(struct radeon_device *rdev,
546 struct radeon_ib *ib);
Christian König57d20a42014-09-04 20:01:53 +0200547struct radeon_fence *evergreen_copy_dma(struct radeon_device *rdev,
548 uint64_t src_offset, uint64_t dst_offset,
549 unsigned num_gpu_pages,
550 struct reservation_object *resv);
Alex Deucher6bd1c382013-06-21 14:38:03 -0400551int evergreen_get_temp(struct radeon_device *rdev);
Alex Deucherff609972014-10-01 09:43:38 -0400552int evergreen_get_allowed_info_register(struct radeon_device *rdev,
553 u32 reg, u32 *val);
Alex Deucher6bd1c382013-06-21 14:38:03 -0400554int sumo_get_temp(struct radeon_device *rdev);
Alex Deucher29a15222012-12-14 11:57:36 -0500555int tn_get_temp(struct radeon_device *rdev);
Alex Deucherdc50ba72013-06-26 00:33:35 -0400556int cypress_dpm_init(struct radeon_device *rdev);
557void cypress_dpm_setup_asic(struct radeon_device *rdev);
558int cypress_dpm_enable(struct radeon_device *rdev);
559void cypress_dpm_disable(struct radeon_device *rdev);
560int cypress_dpm_set_power_state(struct radeon_device *rdev);
561void cypress_dpm_display_configuration_changed(struct radeon_device *rdev);
562void cypress_dpm_fini(struct radeon_device *rdev);
Alex Deucherd0b54bd2013-07-08 11:56:09 -0400563bool cypress_dpm_vblank_too_short(struct radeon_device *rdev);
Alex Deucher6596afd2013-06-26 00:15:24 -0400564int btc_dpm_init(struct radeon_device *rdev);
565void btc_dpm_setup_asic(struct radeon_device *rdev);
566int btc_dpm_enable(struct radeon_device *rdev);
567void btc_dpm_disable(struct radeon_device *rdev);
Alex Deuchere8a95392013-01-16 14:17:23 -0500568int btc_dpm_pre_set_power_state(struct radeon_device *rdev);
Alex Deucher6596afd2013-06-26 00:15:24 -0400569int btc_dpm_set_power_state(struct radeon_device *rdev);
Alex Deuchere8a95392013-01-16 14:17:23 -0500570void btc_dpm_post_set_power_state(struct radeon_device *rdev);
Alex Deucher6596afd2013-06-26 00:15:24 -0400571void btc_dpm_fini(struct radeon_device *rdev);
Alex Deuchere8a95392013-01-16 14:17:23 -0500572u32 btc_dpm_get_sclk(struct radeon_device *rdev, bool low);
573u32 btc_dpm_get_mclk(struct radeon_device *rdev, bool low);
Alex Deuchera84301c2013-07-08 12:03:55 -0400574bool btc_dpm_vblank_too_short(struct radeon_device *rdev);
Alex Deucher9f3f63f2014-01-30 11:19:22 -0500575void btc_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
576 struct seq_file *m);
Alex Deucher99550ee2014-09-30 10:39:30 -0400577u32 btc_dpm_get_current_sclk(struct radeon_device *rdev);
578u32 btc_dpm_get_current_mclk(struct radeon_device *rdev);
Alex Deucher80ea2c12013-04-12 14:56:21 -0400579int sumo_dpm_init(struct radeon_device *rdev);
580int sumo_dpm_enable(struct radeon_device *rdev);
Alex Deucher14ec9fa2013-12-19 11:56:52 -0500581int sumo_dpm_late_enable(struct radeon_device *rdev);
Alex Deucher80ea2c12013-04-12 14:56:21 -0400582void sumo_dpm_disable(struct radeon_device *rdev);
Alex Deucher422a56b2013-06-25 15:40:21 -0400583int sumo_dpm_pre_set_power_state(struct radeon_device *rdev);
Alex Deucher80ea2c12013-04-12 14:56:21 -0400584int sumo_dpm_set_power_state(struct radeon_device *rdev);
Alex Deucher422a56b2013-06-25 15:40:21 -0400585void sumo_dpm_post_set_power_state(struct radeon_device *rdev);
Alex Deucher80ea2c12013-04-12 14:56:21 -0400586void sumo_dpm_setup_asic(struct radeon_device *rdev);
587void sumo_dpm_display_configuration_changed(struct radeon_device *rdev);
588void sumo_dpm_fini(struct radeon_device *rdev);
589u32 sumo_dpm_get_sclk(struct radeon_device *rdev, bool low);
590u32 sumo_dpm_get_mclk(struct radeon_device *rdev, bool low);
591void sumo_dpm_print_power_state(struct radeon_device *rdev,
592 struct radeon_ps *ps);
Alex Deucherfb701602013-06-28 10:47:56 -0400593void sumo_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
594 struct seq_file *m);
Alex Deucher5d5e5592013-07-02 18:50:09 -0400595int sumo_dpm_force_performance_level(struct radeon_device *rdev,
596 enum radeon_dpm_forced_level level);
Alex Deucher2f8e1eb2014-09-30 10:58:22 -0400597u32 sumo_dpm_get_current_sclk(struct radeon_device *rdev);
598u32 sumo_dpm_get_current_mclk(struct radeon_device *rdev);
Daniel Vetter4546b2c2011-02-18 17:59:21 +0100599
Alex Deuchere3487622011-03-02 20:07:36 -0500600/*
601 * cayman
602 */
Alex Deucherb40e7e12011-11-17 14:57:50 -0500603void cayman_fence_ring_emit(struct radeon_device *rdev,
604 struct radeon_fence *fence);
Alex Deuchere3487622011-03-02 20:07:36 -0500605void cayman_pcie_gart_tlb_flush(struct radeon_device *rdev);
606int cayman_init(struct radeon_device *rdev);
607void cayman_fini(struct radeon_device *rdev);
608int cayman_suspend(struct radeon_device *rdev);
609int cayman_resume(struct radeon_device *rdev);
Jérome Glisse71fe2892016-03-18 16:58:38 +0100610int cayman_asic_reset(struct radeon_device *rdev, bool hard);
Jerome Glisse721604a2012-01-05 22:11:05 -0500611void cayman_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
612int cayman_vm_init(struct radeon_device *rdev);
613void cayman_vm_fini(struct radeon_device *rdev);
Christian Königfaffaf62014-11-19 14:01:19 +0100614void cayman_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
615 unsigned vm_id, uint64_t pd_addr);
Christian König089a7862012-08-11 11:54:05 +0200616uint32_t cayman_vm_page_flags(struct radeon_device *rdev, uint32_t flags);
Jerome Glisse721604a2012-01-05 22:11:05 -0500617int evergreen_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
Alex Deuchercd459e52012-12-13 12:17:38 -0500618int evergreen_dma_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
Alex Deucherf60cbd12012-12-04 15:27:33 -0500619void cayman_dma_ring_ib_execute(struct radeon_device *rdev,
620 struct radeon_ib *ib);
Alex Deucher123bc182013-01-24 11:37:19 -0500621bool cayman_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
Alex Deucherf60cbd12012-12-04 15:27:33 -0500622bool cayman_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
Christian König03f62ab2014-07-30 21:05:17 +0200623
624void cayman_dma_vm_copy_pages(struct radeon_device *rdev,
625 struct radeon_ib *ib,
626 uint64_t pe, uint64_t src,
627 unsigned count);
628void cayman_dma_vm_write_pages(struct radeon_device *rdev,
629 struct radeon_ib *ib,
630 uint64_t pe,
631 uint64_t addr, unsigned count,
632 uint32_t incr, uint32_t flags);
633void cayman_dma_vm_set_pages(struct radeon_device *rdev,
634 struct radeon_ib *ib,
635 uint64_t pe,
636 uint64_t addr, unsigned count,
637 uint32_t incr, uint32_t flags);
638void cayman_dma_vm_pad_ib(struct radeon_ib *ib);
Christian König24c16432013-10-30 11:51:09 -0400639
Christian Königfaffaf62014-11-19 14:01:19 +0100640void cayman_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
641 unsigned vm_id, uint64_t pd_addr);
Alex Deucher45f9a392010-03-24 13:55:51 -0400642
Alex Deucherea31bf62013-12-09 19:44:30 -0500643u32 cayman_gfx_get_rptr(struct radeon_device *rdev,
644 struct radeon_ring *ring);
645u32 cayman_gfx_get_wptr(struct radeon_device *rdev,
646 struct radeon_ring *ring);
647void cayman_gfx_set_wptr(struct radeon_device *rdev,
648 struct radeon_ring *ring);
649uint32_t cayman_dma_get_rptr(struct radeon_device *rdev,
650 struct radeon_ring *ring);
651uint32_t cayman_dma_get_wptr(struct radeon_device *rdev,
652 struct radeon_ring *ring);
653void cayman_dma_set_wptr(struct radeon_device *rdev,
654 struct radeon_ring *ring);
Alex Deuchere66582f2014-10-01 09:51:29 -0400655int cayman_get_allowed_info_register(struct radeon_device *rdev,
656 u32 reg, u32 *val);
Alex Deucherea31bf62013-12-09 19:44:30 -0500657
Alex Deucher69e0b572013-04-12 16:42:42 -0400658int ni_dpm_init(struct radeon_device *rdev);
659void ni_dpm_setup_asic(struct radeon_device *rdev);
660int ni_dpm_enable(struct radeon_device *rdev);
661void ni_dpm_disable(struct radeon_device *rdev);
Alex Deucherfee3d742013-01-16 14:35:39 -0500662int ni_dpm_pre_set_power_state(struct radeon_device *rdev);
Alex Deucher69e0b572013-04-12 16:42:42 -0400663int ni_dpm_set_power_state(struct radeon_device *rdev);
Alex Deucherfee3d742013-01-16 14:35:39 -0500664void ni_dpm_post_set_power_state(struct radeon_device *rdev);
Alex Deucher69e0b572013-04-12 16:42:42 -0400665void ni_dpm_fini(struct radeon_device *rdev);
666u32 ni_dpm_get_sclk(struct radeon_device *rdev, bool low);
667u32 ni_dpm_get_mclk(struct radeon_device *rdev, bool low);
668void ni_dpm_print_power_state(struct radeon_device *rdev,
669 struct radeon_ps *ps);
Alex Deucherbdf0c4f2013-06-28 17:49:02 -0400670void ni_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
671 struct seq_file *m);
Alex Deucher170a47f2013-07-02 18:43:53 -0400672int ni_dpm_force_performance_level(struct radeon_device *rdev,
673 enum radeon_dpm_forced_level level);
Alex Deucher76ad73e2013-07-08 12:09:41 -0400674bool ni_dpm_vblank_too_short(struct radeon_device *rdev);
Alex Deucher1d633e32014-09-30 10:46:02 -0400675u32 ni_dpm_get_current_sclk(struct radeon_device *rdev);
676u32 ni_dpm_get_current_mclk(struct radeon_device *rdev);
Alex Deucherd70229f2013-04-12 16:40:41 -0400677int trinity_dpm_init(struct radeon_device *rdev);
678int trinity_dpm_enable(struct radeon_device *rdev);
Alex Deucherbda44c12013-12-19 12:03:35 -0500679int trinity_dpm_late_enable(struct radeon_device *rdev);
Alex Deucherd70229f2013-04-12 16:40:41 -0400680void trinity_dpm_disable(struct radeon_device *rdev);
Alex Deuchera284c482013-01-16 13:53:40 -0500681int trinity_dpm_pre_set_power_state(struct radeon_device *rdev);
Alex Deucherd70229f2013-04-12 16:40:41 -0400682int trinity_dpm_set_power_state(struct radeon_device *rdev);
Alex Deuchera284c482013-01-16 13:53:40 -0500683void trinity_dpm_post_set_power_state(struct radeon_device *rdev);
Alex Deucherd70229f2013-04-12 16:40:41 -0400684void trinity_dpm_setup_asic(struct radeon_device *rdev);
685void trinity_dpm_display_configuration_changed(struct radeon_device *rdev);
686void trinity_dpm_fini(struct radeon_device *rdev);
687u32 trinity_dpm_get_sclk(struct radeon_device *rdev, bool low);
688u32 trinity_dpm_get_mclk(struct radeon_device *rdev, bool low);
689void trinity_dpm_print_power_state(struct radeon_device *rdev,
690 struct radeon_ps *ps);
Alex Deucher490ab932013-06-28 12:01:38 -0400691void trinity_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
692 struct seq_file *m);
Alex Deucher9b5de592013-07-02 18:52:10 -0400693int trinity_dpm_force_performance_level(struct radeon_device *rdev,
694 enum radeon_dpm_forced_level level);
Alex Deucher11877062013-09-09 19:19:52 -0400695void trinity_dpm_enable_bapm(struct radeon_device *rdev, bool enable);
Alex Deucher7ce9cda2014-09-30 11:01:59 -0400696u32 trinity_dpm_get_current_sclk(struct radeon_device *rdev);
697u32 trinity_dpm_get_current_mclk(struct radeon_device *rdev);
Alex Deucher0fda42a2015-05-11 22:01:50 +0200698int tn_set_vce_clocks(struct radeon_device *rdev, u32 evclk, u32 ecclk);
Alex Deucherd70229f2013-04-12 16:40:41 -0400699
Alex Deucher43b3cd92012-03-20 17:18:00 -0400700/* DCE6 - SI */
701void dce6_bandwidth_update(struct radeon_device *rdev);
Alex Deucherb5306022013-07-31 16:51:33 -0400702void dce6_audio_fini(struct radeon_device *rdev);
Alex Deucher43b3cd92012-03-20 17:18:00 -0400703
Alex Deucher02779c02012-03-20 17:18:25 -0400704/*
705 * si
706 */
707void si_fence_ring_emit(struct radeon_device *rdev,
708 struct radeon_fence *fence);
709void si_pcie_gart_tlb_flush(struct radeon_device *rdev);
710int si_init(struct radeon_device *rdev);
711void si_fini(struct radeon_device *rdev);
712int si_suspend(struct radeon_device *rdev);
713int si_resume(struct radeon_device *rdev);
Alex Deucher123bc182013-01-24 11:37:19 -0500714bool si_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
715bool si_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
Jérome Glisse71fe2892016-03-18 16:58:38 +0100716int si_asic_reset(struct radeon_device *rdev, bool hard);
Alex Deucher02779c02012-03-20 17:18:25 -0400717void si_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
718int si_irq_set(struct radeon_device *rdev);
719int si_irq_process(struct radeon_device *rdev);
720int si_vm_init(struct radeon_device *rdev);
721void si_vm_fini(struct radeon_device *rdev);
Christian Königfaffaf62014-11-19 14:01:19 +0100722void si_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
723 unsigned vm_id, uint64_t pd_addr);
Alex Deucher02779c02012-03-20 17:18:25 -0400724int si_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
Christian König57d20a42014-09-04 20:01:53 +0200725struct radeon_fence *si_copy_dma(struct radeon_device *rdev,
726 uint64_t src_offset, uint64_t dst_offset,
727 unsigned num_gpu_pages,
728 struct reservation_object *resv);
Christian König03f62ab2014-07-30 21:05:17 +0200729
730void si_dma_vm_copy_pages(struct radeon_device *rdev,
731 struct radeon_ib *ib,
732 uint64_t pe, uint64_t src,
733 unsigned count);
734void si_dma_vm_write_pages(struct radeon_device *rdev,
735 struct radeon_ib *ib,
736 uint64_t pe,
737 uint64_t addr, unsigned count,
738 uint32_t incr, uint32_t flags);
739void si_dma_vm_set_pages(struct radeon_device *rdev,
740 struct radeon_ib *ib,
741 uint64_t pe,
742 uint64_t addr, unsigned count,
743 uint32_t incr, uint32_t flags);
744
Christian Königfaffaf62014-11-19 14:01:19 +0100745void si_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
746 unsigned vm_id, uint64_t pd_addr);
Alex Deucher454d2e22013-02-14 10:04:02 -0500747u32 si_get_xclk(struct radeon_device *rdev);
Alex Deucherd0418892013-01-24 10:35:23 -0500748uint64_t si_get_gpu_clock_counter(struct radeon_device *rdev);
Christian König2539eb02013-04-08 12:41:34 +0200749int si_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
Christian Königb7af6302015-05-11 22:01:49 +0200750int si_set_vce_clocks(struct radeon_device *rdev, u32 evclk, u32 ecclk);
Alex Deucher6bd1c382013-06-21 14:38:03 -0400751int si_get_temp(struct radeon_device *rdev);
Alex Deucher4af692f2014-10-01 10:03:31 -0400752int si_get_allowed_info_register(struct radeon_device *rdev,
753 u32 reg, u32 *val);
Alex Deuchera9e61412013-06-25 17:56:16 -0400754int si_dpm_init(struct radeon_device *rdev);
755void si_dpm_setup_asic(struct radeon_device *rdev);
756int si_dpm_enable(struct radeon_device *rdev);
Alex Deucher963c1152013-12-19 13:54:35 -0500757int si_dpm_late_enable(struct radeon_device *rdev);
Alex Deuchera9e61412013-06-25 17:56:16 -0400758void si_dpm_disable(struct radeon_device *rdev);
759int si_dpm_pre_set_power_state(struct radeon_device *rdev);
760int si_dpm_set_power_state(struct radeon_device *rdev);
761void si_dpm_post_set_power_state(struct radeon_device *rdev);
762void si_dpm_fini(struct radeon_device *rdev);
763void si_dpm_display_configuration_changed(struct radeon_device *rdev);
Alex Deucher79821282013-06-28 18:02:19 -0400764void si_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
765 struct seq_file *m);
Alex Deuchera160a6a2013-07-02 18:46:28 -0400766int si_dpm_force_performance_level(struct radeon_device *rdev,
767 enum radeon_dpm_forced_level level);
Alex Deucher5e8150a2015-01-07 15:29:06 -0500768int si_fan_ctrl_get_fan_speed_percent(struct radeon_device *rdev,
769 u32 *speed);
770int si_fan_ctrl_set_fan_speed_percent(struct radeon_device *rdev,
771 u32 speed);
772u32 si_fan_ctrl_get_mode(struct radeon_device *rdev);
773void si_fan_ctrl_set_mode(struct radeon_device *rdev, u32 mode);
Alex Deucherca1110b2014-09-30 10:50:07 -0400774u32 si_dpm_get_current_sclk(struct radeon_device *rdev);
775u32 si_dpm_get_current_mclk(struct radeon_device *rdev);
Alex Deucher02779c02012-03-20 17:18:25 -0400776
Alex Deucher0672e272013-04-09 16:22:31 -0400777/* DCE8 - CIK */
778void dce8_bandwidth_update(struct radeon_device *rdev);
779
Alex Deucher44fa3462012-12-18 22:17:00 -0500780/*
781 * cik
782 */
783uint64_t cik_get_gpu_clock_counter(struct radeon_device *rdev);
Alex Deucher2c679122013-04-09 13:32:18 -0400784u32 cik_get_xclk(struct radeon_device *rdev);
Alex Deucher6e2c3c02013-04-03 19:28:32 -0400785uint32_t cik_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
786void cik_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
Christian König87167bb2013-04-09 13:39:21 -0400787int cik_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
Alex Deucher5ad6bf92013-08-22 17:09:06 -0400788int cik_set_vce_clocks(struct radeon_device *rdev, u32 evclk, u32 ecclk);
Alex Deucher0672e272013-04-09 16:22:31 -0400789void cik_sdma_fence_ring_emit(struct radeon_device *rdev,
790 struct radeon_fence *fence);
Christian König1654b812013-11-12 12:58:05 +0100791bool cik_sdma_semaphore_ring_emit(struct radeon_device *rdev,
Alex Deucher0672e272013-04-09 16:22:31 -0400792 struct radeon_ring *ring,
793 struct radeon_semaphore *semaphore,
794 bool emit_wait);
795void cik_sdma_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
Christian König57d20a42014-09-04 20:01:53 +0200796struct radeon_fence *cik_copy_dma(struct radeon_device *rdev,
797 uint64_t src_offset, uint64_t dst_offset,
798 unsigned num_gpu_pages,
799 struct reservation_object *resv);
800struct radeon_fence *cik_copy_cpdma(struct radeon_device *rdev,
801 uint64_t src_offset, uint64_t dst_offset,
802 unsigned num_gpu_pages,
803 struct reservation_object *resv);
Alex Deucher0672e272013-04-09 16:22:31 -0400804int cik_sdma_ring_test(struct radeon_device *rdev, struct radeon_ring *ring);
805int cik_sdma_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
806bool cik_sdma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
807void cik_fence_gfx_ring_emit(struct radeon_device *rdev,
808 struct radeon_fence *fence);
809void cik_fence_compute_ring_emit(struct radeon_device *rdev,
810 struct radeon_fence *fence);
Christian König1654b812013-11-12 12:58:05 +0100811bool cik_semaphore_ring_emit(struct radeon_device *rdev,
Alex Deucher0672e272013-04-09 16:22:31 -0400812 struct radeon_ring *cp,
813 struct radeon_semaphore *semaphore,
814 bool emit_wait);
815void cik_pcie_gart_tlb_flush(struct radeon_device *rdev);
816int cik_init(struct radeon_device *rdev);
817void cik_fini(struct radeon_device *rdev);
818int cik_suspend(struct radeon_device *rdev);
819int cik_resume(struct radeon_device *rdev);
820bool cik_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
Jérome Glisse71fe2892016-03-18 16:58:38 +0100821int cik_asic_reset(struct radeon_device *rdev, bool hard);
Alex Deucher0672e272013-04-09 16:22:31 -0400822void cik_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
823int cik_ring_test(struct radeon_device *rdev, struct radeon_ring *ring);
824int cik_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
825int cik_irq_set(struct radeon_device *rdev);
826int cik_irq_process(struct radeon_device *rdev);
827int cik_vm_init(struct radeon_device *rdev);
828void cik_vm_fini(struct radeon_device *rdev);
Christian Königfaffaf62014-11-19 14:01:19 +0100829void cik_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
830 unsigned vm_id, uint64_t pd_addr);
Christian König03f62ab2014-07-30 21:05:17 +0200831
832void cik_sdma_vm_copy_pages(struct radeon_device *rdev,
833 struct radeon_ib *ib,
834 uint64_t pe, uint64_t src,
835 unsigned count);
836void cik_sdma_vm_write_pages(struct radeon_device *rdev,
837 struct radeon_ib *ib,
838 uint64_t pe,
839 uint64_t addr, unsigned count,
840 uint32_t incr, uint32_t flags);
841void cik_sdma_vm_set_pages(struct radeon_device *rdev,
842 struct radeon_ib *ib,
843 uint64_t pe,
844 uint64_t addr, unsigned count,
845 uint32_t incr, uint32_t flags);
846void cik_sdma_vm_pad_ib(struct radeon_ib *ib);
847
Christian Königfaffaf62014-11-19 14:01:19 +0100848void cik_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
849 unsigned vm_id, uint64_t pd_addr);
Alex Deucher0672e272013-04-09 16:22:31 -0400850int cik_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
Alex Deucherea31bf62013-12-09 19:44:30 -0500851u32 cik_gfx_get_rptr(struct radeon_device *rdev,
852 struct radeon_ring *ring);
853u32 cik_gfx_get_wptr(struct radeon_device *rdev,
854 struct radeon_ring *ring);
855void cik_gfx_set_wptr(struct radeon_device *rdev,
856 struct radeon_ring *ring);
857u32 cik_compute_get_rptr(struct radeon_device *rdev,
858 struct radeon_ring *ring);
859u32 cik_compute_get_wptr(struct radeon_device *rdev,
860 struct radeon_ring *ring);
861void cik_compute_set_wptr(struct radeon_device *rdev,
862 struct radeon_ring *ring);
863u32 cik_sdma_get_rptr(struct radeon_device *rdev,
864 struct radeon_ring *ring);
865u32 cik_sdma_get_wptr(struct radeon_device *rdev,
866 struct radeon_ring *ring);
867void cik_sdma_set_wptr(struct radeon_device *rdev,
868 struct radeon_ring *ring);
Alex Deucher286d9cc2013-06-21 15:50:47 -0400869int ci_get_temp(struct radeon_device *rdev);
870int kv_get_temp(struct radeon_device *rdev);
Alex Deucher353eec22014-10-01 11:18:46 -0400871int cik_get_allowed_info_register(struct radeon_device *rdev,
872 u32 reg, u32 *val);
Alex Deucher44fa3462012-12-18 22:17:00 -0500873
Alex Deuchercc8dbbb2013-08-14 01:03:41 -0400874int ci_dpm_init(struct radeon_device *rdev);
875int ci_dpm_enable(struct radeon_device *rdev);
Alex Deucher90208422013-12-19 13:59:46 -0500876int ci_dpm_late_enable(struct radeon_device *rdev);
Alex Deuchercc8dbbb2013-08-14 01:03:41 -0400877void ci_dpm_disable(struct radeon_device *rdev);
878int ci_dpm_pre_set_power_state(struct radeon_device *rdev);
879int ci_dpm_set_power_state(struct radeon_device *rdev);
880void ci_dpm_post_set_power_state(struct radeon_device *rdev);
881void ci_dpm_setup_asic(struct radeon_device *rdev);
882void ci_dpm_display_configuration_changed(struct radeon_device *rdev);
883void ci_dpm_fini(struct radeon_device *rdev);
884u32 ci_dpm_get_sclk(struct radeon_device *rdev, bool low);
885u32 ci_dpm_get_mclk(struct radeon_device *rdev, bool low);
886void ci_dpm_print_power_state(struct radeon_device *rdev,
887 struct radeon_ps *ps);
Alex Deucher94b4adc2013-07-15 17:34:33 -0400888void ci_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
889 struct seq_file *m);
Alex Deucher89536fd2013-07-15 18:14:24 -0400890int ci_dpm_force_performance_level(struct radeon_device *rdev,
891 enum radeon_dpm_forced_level level);
Alex Deucher54961312013-07-15 18:24:31 -0400892bool ci_dpm_vblank_too_short(struct radeon_device *rdev);
Alex Deucher942bdf72013-08-09 10:05:24 -0400893void ci_dpm_powergate_uvd(struct radeon_device *rdev, bool gate);
Alex Deucherdbbd3c82014-09-30 10:54:05 -0400894u32 ci_dpm_get_current_sclk(struct radeon_device *rdev);
895u32 ci_dpm_get_current_mclk(struct radeon_device *rdev);
Alex Deuchercc8dbbb2013-08-14 01:03:41 -0400896
Oleg Chernovskiy36689e52014-12-08 00:10:46 +0300897int ci_fan_ctrl_get_fan_speed_percent(struct radeon_device *rdev,
898 u32 *speed);
899int ci_fan_ctrl_set_fan_speed_percent(struct radeon_device *rdev,
900 u32 speed);
901u32 ci_fan_ctrl_get_mode(struct radeon_device *rdev);
902void ci_fan_ctrl_set_mode(struct radeon_device *rdev, u32 mode);
903
Alex Deucher41a524a2013-08-14 01:01:40 -0400904int kv_dpm_init(struct radeon_device *rdev);
905int kv_dpm_enable(struct radeon_device *rdev);
Alex Deucherd8852c32013-12-19 14:03:36 -0500906int kv_dpm_late_enable(struct radeon_device *rdev);
Alex Deucher41a524a2013-08-14 01:01:40 -0400907void kv_dpm_disable(struct radeon_device *rdev);
908int kv_dpm_pre_set_power_state(struct radeon_device *rdev);
909int kv_dpm_set_power_state(struct radeon_device *rdev);
910void kv_dpm_post_set_power_state(struct radeon_device *rdev);
911void kv_dpm_setup_asic(struct radeon_device *rdev);
912void kv_dpm_display_configuration_changed(struct radeon_device *rdev);
913void kv_dpm_fini(struct radeon_device *rdev);
914u32 kv_dpm_get_sclk(struct radeon_device *rdev, bool low);
915u32 kv_dpm_get_mclk(struct radeon_device *rdev, bool low);
916void kv_dpm_print_power_state(struct radeon_device *rdev,
917 struct radeon_ps *ps);
Alex Deucherae3e40e2013-07-18 16:39:53 -0400918void kv_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
919 struct seq_file *m);
Alex Deucher2b4c8022013-07-18 16:48:46 -0400920int kv_dpm_force_performance_level(struct radeon_device *rdev,
921 enum radeon_dpm_forced_level level);
Alex Deucher77df5082013-08-09 10:02:40 -0400922void kv_dpm_powergate_uvd(struct radeon_device *rdev, bool gate);
Alex Deucherb7a5ae92013-09-09 19:33:08 -0400923void kv_dpm_enable_bapm(struct radeon_device *rdev, bool enable);
Alex Deucher9b23bad2014-09-30 11:21:23 -0400924u32 kv_dpm_get_current_sclk(struct radeon_device *rdev);
925u32 kv_dpm_get_current_mclk(struct radeon_device *rdev);
Alex Deucher41a524a2013-08-14 01:01:40 -0400926
Christian Könige409b122013-08-13 11:56:53 +0200927/* uvd v1.0 */
928uint32_t uvd_v1_0_get_rptr(struct radeon_device *rdev,
929 struct radeon_ring *ring);
930uint32_t uvd_v1_0_get_wptr(struct radeon_device *rdev,
931 struct radeon_ring *ring);
932void uvd_v1_0_set_wptr(struct radeon_device *rdev,
933 struct radeon_ring *ring);
Christian König856754c2013-04-16 22:11:22 +0200934int uvd_v1_0_resume(struct radeon_device *rdev);
Christian Könige409b122013-08-13 11:56:53 +0200935
936int uvd_v1_0_init(struct radeon_device *rdev);
937void uvd_v1_0_fini(struct radeon_device *rdev);
938int uvd_v1_0_start(struct radeon_device *rdev);
939void uvd_v1_0_stop(struct radeon_device *rdev);
940
941int uvd_v1_0_ring_test(struct radeon_device *rdev, struct radeon_ring *ring);
Christian König856754c2013-04-16 22:11:22 +0200942void uvd_v1_0_fence_emit(struct radeon_device *rdev,
943 struct radeon_fence *fence);
Christian Könige409b122013-08-13 11:56:53 +0200944int uvd_v1_0_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
Christian König1654b812013-11-12 12:58:05 +0100945bool uvd_v1_0_semaphore_emit(struct radeon_device *rdev,
Christian Könige409b122013-08-13 11:56:53 +0200946 struct radeon_ring *ring,
947 struct radeon_semaphore *semaphore,
948 bool emit_wait);
949void uvd_v1_0_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
950
951/* uvd v2.2 */
952int uvd_v2_2_resume(struct radeon_device *rdev);
953void uvd_v2_2_fence_emit(struct radeon_device *rdev,
954 struct radeon_fence *fence);
Christian König013ead42015-05-01 12:34:12 +0200955bool uvd_v2_2_semaphore_emit(struct radeon_device *rdev,
956 struct radeon_ring *ring,
957 struct radeon_semaphore *semaphore,
958 bool emit_wait);
Christian Könige409b122013-08-13 11:56:53 +0200959
960/* uvd v3.1 */
Christian König1654b812013-11-12 12:58:05 +0100961bool uvd_v3_1_semaphore_emit(struct radeon_device *rdev,
Christian Könige409b122013-08-13 11:56:53 +0200962 struct radeon_ring *ring,
963 struct radeon_semaphore *semaphore,
964 bool emit_wait);
965
966/* uvd v4.2 */
967int uvd_v4_2_resume(struct radeon_device *rdev);
968
Christian Königd93f7932013-05-23 12:10:04 +0200969/* vce v1.0 */
970uint32_t vce_v1_0_get_rptr(struct radeon_device *rdev,
971 struct radeon_ring *ring);
972uint32_t vce_v1_0_get_wptr(struct radeon_device *rdev,
973 struct radeon_ring *ring);
974void vce_v1_0_set_wptr(struct radeon_device *rdev,
975 struct radeon_ring *ring);
Christian Königa918efa2015-05-11 22:01:53 +0200976int vce_v1_0_load_fw(struct radeon_device *rdev, uint32_t *data);
977unsigned vce_v1_0_bo_size(struct radeon_device *rdev);
978int vce_v1_0_resume(struct radeon_device *rdev);
Christian Königd93f7932013-05-23 12:10:04 +0200979int vce_v1_0_init(struct radeon_device *rdev);
980int vce_v1_0_start(struct radeon_device *rdev);
981
982/* vce v2.0 */
Christian Königfa0cf2f2015-05-11 22:01:47 +0200983unsigned vce_v2_0_bo_size(struct radeon_device *rdev);
Christian Königd93f7932013-05-23 12:10:04 +0200984int vce_v2_0_resume(struct radeon_device *rdev);
985
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200986#endif