blob: 9630e8d95fb40655df4ceefd18e5015ca47194c6 [file] [log] [blame]
Jerome Glisse771fe6b2009-06-05 14:42:42 +02001/*
2 * Copyright 2007-8 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
22 *
23 * Authors: Dave Airlie
24 * Alex Deucher
25 */
David Howells760285e2012-10-02 18:01:07 +010026#include <drm/drmP.h>
27#include <drm/radeon_drm.h>
Jerome Glisse771fe6b2009-06-05 14:42:42 +020028#include "radeon.h"
29
Jerome Glisse771fe6b2009-06-05 14:42:42 +020030static void radeon_lock_cursor(struct drm_crtc *crtc, bool lock)
31{
32 struct radeon_device *rdev = crtc->dev->dev_private;
33 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
34 uint32_t cur_lock;
35
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050036 if (ASIC_IS_DCE4(rdev)) {
37 cur_lock = RREG32(EVERGREEN_CUR_UPDATE + radeon_crtc->crtc_offset);
38 if (lock)
39 cur_lock |= EVERGREEN_CURSOR_UPDATE_LOCK;
40 else
41 cur_lock &= ~EVERGREEN_CURSOR_UPDATE_LOCK;
42 WREG32(EVERGREEN_CUR_UPDATE + radeon_crtc->crtc_offset, cur_lock);
43 } else if (ASIC_IS_AVIVO(rdev)) {
Jerome Glisse771fe6b2009-06-05 14:42:42 +020044 cur_lock = RREG32(AVIVO_D1CUR_UPDATE + radeon_crtc->crtc_offset);
45 if (lock)
46 cur_lock |= AVIVO_D1CURSOR_UPDATE_LOCK;
47 else
48 cur_lock &= ~AVIVO_D1CURSOR_UPDATE_LOCK;
49 WREG32(AVIVO_D1CUR_UPDATE + radeon_crtc->crtc_offset, cur_lock);
50 } else {
51 cur_lock = RREG32(RADEON_CUR_OFFSET + radeon_crtc->crtc_offset);
52 if (lock)
53 cur_lock |= RADEON_CUR_LOCK;
54 else
55 cur_lock &= ~RADEON_CUR_LOCK;
56 WREG32(RADEON_CUR_OFFSET + radeon_crtc->crtc_offset, cur_lock);
57 }
58}
59
60static void radeon_hide_cursor(struct drm_crtc *crtc)
61{
62 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
63 struct radeon_device *rdev = crtc->dev->dev_private;
64
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050065 if (ASIC_IS_DCE4(rdev)) {
Daniel Vetter2ef9bdf2012-12-02 14:02:51 +010066 WREG32_IDX(EVERGREEN_CUR_CONTROL + radeon_crtc->crtc_offset,
67 EVERGREEN_CURSOR_MODE(EVERGREEN_CURSOR_24_8_PRE_MULT) |
68 EVERGREEN_CURSOR_URGENT_CONTROL(EVERGREEN_CURSOR_URGENT_1_2));
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050069 } else if (ASIC_IS_AVIVO(rdev)) {
Daniel Vetter2ef9bdf2012-12-02 14:02:51 +010070 WREG32_IDX(AVIVO_D1CUR_CONTROL + radeon_crtc->crtc_offset,
71 (AVIVO_D1CURSOR_MODE_24BPP << AVIVO_D1CURSOR_MODE_SHIFT));
Jerome Glisse771fe6b2009-06-05 14:42:42 +020072 } else {
Daniel Vetter2ef9bdf2012-12-02 14:02:51 +010073 u32 reg;
Jerome Glisse771fe6b2009-06-05 14:42:42 +020074 switch (radeon_crtc->crtc_id) {
75 case 0:
Daniel Vetter2ef9bdf2012-12-02 14:02:51 +010076 reg = RADEON_CRTC_GEN_CNTL;
Jerome Glisse771fe6b2009-06-05 14:42:42 +020077 break;
78 case 1:
Daniel Vetter2ef9bdf2012-12-02 14:02:51 +010079 reg = RADEON_CRTC2_GEN_CNTL;
Jerome Glisse771fe6b2009-06-05 14:42:42 +020080 break;
81 default:
82 return;
83 }
Daniel Vetter2ef9bdf2012-12-02 14:02:51 +010084 WREG32_IDX(reg, RREG32_IDX(reg) & ~RADEON_CRTC_CUR_EN);
Jerome Glisse771fe6b2009-06-05 14:42:42 +020085 }
86}
87
88static void radeon_show_cursor(struct drm_crtc *crtc)
89{
90 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
91 struct radeon_device *rdev = crtc->dev->dev_private;
92
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050093 if (ASIC_IS_DCE4(rdev)) {
94 WREG32(RADEON_MM_INDEX, EVERGREEN_CUR_CONTROL + radeon_crtc->crtc_offset);
95 WREG32(RADEON_MM_DATA, EVERGREEN_CURSOR_EN |
Alex Deucherf4254a22012-07-10 15:20:24 -040096 EVERGREEN_CURSOR_MODE(EVERGREEN_CURSOR_24_8_PRE_MULT) |
97 EVERGREEN_CURSOR_URGENT_CONTROL(EVERGREEN_CURSOR_URGENT_1_2));
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050098 } else if (ASIC_IS_AVIVO(rdev)) {
Jerome Glisse771fe6b2009-06-05 14:42:42 +020099 WREG32(RADEON_MM_INDEX, AVIVO_D1CUR_CONTROL + radeon_crtc->crtc_offset);
100 WREG32(RADEON_MM_DATA, AVIVO_D1CURSOR_EN |
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500101 (AVIVO_D1CURSOR_MODE_24BPP << AVIVO_D1CURSOR_MODE_SHIFT));
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200102 } else {
103 switch (radeon_crtc->crtc_id) {
104 case 0:
105 WREG32(RADEON_MM_INDEX, RADEON_CRTC_GEN_CNTL);
106 break;
107 case 1:
108 WREG32(RADEON_MM_INDEX, RADEON_CRTC2_GEN_CNTL);
109 break;
110 default:
111 return;
112 }
113
114 WREG32_P(RADEON_MM_DATA, (RADEON_CRTC_CUR_EN |
115 (RADEON_CRTC_CUR_MODE_24BPP << RADEON_CRTC_CUR_MODE_SHIFT)),
116 ~(RADEON_CRTC_CUR_EN | RADEON_CRTC_CUR_MODE_MASK));
117 }
118}
119
120static void radeon_set_cursor(struct drm_crtc *crtc, struct drm_gem_object *obj,
Alex Deucherf981d462010-09-30 19:16:03 -0400121 uint64_t gpu_addr)
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200122{
123 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
124 struct radeon_device *rdev = crtc->dev->dev_private;
125
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500126 if (ASIC_IS_DCE4(rdev)) {
Alex Deucherf981d462010-09-30 19:16:03 -0400127 WREG32(EVERGREEN_CUR_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
128 upper_32_bits(gpu_addr));
129 WREG32(EVERGREEN_CUR_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
130 gpu_addr & 0xffffffff);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500131 } else if (ASIC_IS_AVIVO(rdev)) {
Alex Deucherc290dad2009-10-22 16:12:34 -0400132 if (rdev->family >= CHIP_RV770) {
133 if (radeon_crtc->crtc_id)
Alex Deucherf981d462010-09-30 19:16:03 -0400134 WREG32(R700_D2CUR_SURFACE_ADDRESS_HIGH, upper_32_bits(gpu_addr));
Alex Deucherc290dad2009-10-22 16:12:34 -0400135 else
Alex Deucherf981d462010-09-30 19:16:03 -0400136 WREG32(R700_D1CUR_SURFACE_ADDRESS_HIGH, upper_32_bits(gpu_addr));
Alex Deucherc290dad2009-10-22 16:12:34 -0400137 }
Alex Deucherf981d462010-09-30 19:16:03 -0400138 WREG32(AVIVO_D1CUR_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
139 gpu_addr & 0xffffffff);
Alex Deucherc290dad2009-10-22 16:12:34 -0400140 } else {
Alex Deucherc836e862009-07-13 13:51:03 -0400141 radeon_crtc->legacy_cursor_offset = gpu_addr - radeon_crtc->legacy_display_base_addr;
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200142 /* offset is from DISP(2)_BASE_ADDRESS */
Alex Deucherc836e862009-07-13 13:51:03 -0400143 WREG32(RADEON_CUR_OFFSET + radeon_crtc->crtc_offset, radeon_crtc->legacy_cursor_offset);
144 }
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200145}
146
147int radeon_crtc_cursor_set(struct drm_crtc *crtc,
148 struct drm_file *file_priv,
149 uint32_t handle,
150 uint32_t width,
151 uint32_t height)
152{
153 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
Michel Dänzerc4353012012-03-14 17:12:41 +0100154 struct radeon_device *rdev = crtc->dev->dev_private;
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200155 struct drm_gem_object *obj;
Michel Dänzerc4353012012-03-14 17:12:41 +0100156 struct radeon_bo *robj;
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200157 uint64_t gpu_addr;
158 int ret;
159
160 if (!handle) {
161 /* turn off cursor */
162 radeon_hide_cursor(crtc);
163 obj = NULL;
164 goto unpin;
165 }
166
Alex Deucher9e05fa12013-01-24 10:06:33 -0500167 if ((width > radeon_crtc->max_cursor_width) ||
168 (height > radeon_crtc->max_cursor_height)) {
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200169 DRM_ERROR("bad cursor width or height %d x %d\n", width, height);
170 return -EINVAL;
171 }
172
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200173 obj = drm_gem_object_lookup(crtc->dev, file_priv, handle);
174 if (!obj) {
175 DRM_ERROR("Cannot find cursor object %x for crtc %d\n", handle, radeon_crtc->crtc_id);
Chris Wilsonbf79cb92010-08-04 14:19:46 +0100176 return -ENOENT;
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200177 }
178
Michel Dänzerc4353012012-03-14 17:12:41 +0100179 robj = gem_to_radeon_bo(obj);
180 ret = radeon_bo_reserve(robj, false);
181 if (unlikely(ret != 0))
182 goto fail;
183 /* Only 27 bit offset for legacy cursor */
184 ret = radeon_bo_pin_restricted(robj, RADEON_GEM_DOMAIN_VRAM,
185 ASIC_IS_AVIVO(rdev) ? 0 : 1 << 27,
186 &gpu_addr);
187 radeon_bo_unreserve(robj);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200188 if (ret)
189 goto fail;
190
Ilija Hadzic45e5f6a2011-05-04 20:15:03 -0400191 radeon_crtc->cursor_width = width;
192 radeon_crtc->cursor_height = height;
193
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200194 radeon_lock_cursor(crtc, true);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200195 radeon_set_cursor(crtc, obj, gpu_addr);
196 radeon_show_cursor(crtc);
197 radeon_lock_cursor(crtc, false);
198
199unpin:
200 if (radeon_crtc->cursor_bo) {
Michel Dänzer654c59c2012-03-14 14:59:25 +0100201 robj = gem_to_radeon_bo(radeon_crtc->cursor_bo);
202 ret = radeon_bo_reserve(robj, false);
203 if (likely(ret == 0)) {
204 radeon_bo_unpin(robj);
205 radeon_bo_unreserve(robj);
206 }
Luca Barbieribc9025b2010-02-09 05:49:12 +0000207 drm_gem_object_unreference_unlocked(radeon_crtc->cursor_bo);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200208 }
209
210 radeon_crtc->cursor_bo = obj;
211 return 0;
212fail:
Luca Barbieribc9025b2010-02-09 05:49:12 +0000213 drm_gem_object_unreference_unlocked(obj);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200214
Matt Turner4cdb82b2010-06-19 14:13:45 -0400215 return ret;
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200216}
217
218int radeon_crtc_cursor_move(struct drm_crtc *crtc,
219 int x, int y)
220{
221 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
222 struct radeon_device *rdev = crtc->dev->dev_private;
223 int xorigin = 0, yorigin = 0;
Alex Deucher6a2a11d2010-10-14 17:14:57 -0400224 int w = radeon_crtc->cursor_width;
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200225
Michel Dänzerb8aee292011-09-30 17:16:52 +0200226 if (ASIC_IS_AVIVO(rdev)) {
227 /* avivo cursor are offset into the total surface */
228 x += crtc->x;
229 y += crtc->y;
230 }
231 DRM_DEBUG("x %d y %d c->x %d c->y %d\n", x, y, crtc->x, crtc->y);
232
Michel Dänzer02e68592011-09-30 17:16:53 +0200233 if (x < 0) {
Alex Deucher9e05fa12013-01-24 10:06:33 -0500234 xorigin = min(-x, radeon_crtc->max_cursor_width - 1);
Michel Dänzer02e68592011-09-30 17:16:53 +0200235 x = 0;
236 }
237 if (y < 0) {
Alex Deucher9e05fa12013-01-24 10:06:33 -0500238 yorigin = min(-y, radeon_crtc->max_cursor_height - 1);
Michel Dänzer02e68592011-09-30 17:16:53 +0200239 y = 0;
240 }
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200241
Jerome Glissee521a292013-01-21 15:50:03 -0500242 /* fixed on DCE6 and newer */
243 if (ASIC_IS_AVIVO(rdev) && !ASIC_IS_DCE6(rdev)) {
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200244 int i = 0;
245 struct drm_crtc *crtc_p;
246
Daniel Vetterdac35662012-12-02 15:24:10 +0100247 /*
248 * avivo cursor image can't end on 128 pixel boundary or
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200249 * go past the end of the frame if both crtcs are enabled
Daniel Vetterdac35662012-12-02 15:24:10 +0100250 *
251 * NOTE: It is safe to access crtc->enabled of other crtcs
252 * without holding either the mode_config lock or the other
253 * crtc's lock as long as write access to this flag _always_
254 * grabs all locks.
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200255 */
256 list_for_each_entry(crtc_p, &crtc->dev->mode_config.crtc_list, head) {
257 if (crtc_p->enabled)
258 i++;
259 }
260 if (i > 1) {
261 int cursor_end, frame_end;
262
263 cursor_end = x - xorigin + w;
264 frame_end = crtc->x + crtc->mode.crtc_hdisplay;
265 if (cursor_end >= frame_end) {
266 w = w - (cursor_end - frame_end);
267 if (!(frame_end & 0x7f))
268 w--;
269 } else {
270 if (!(cursor_end & 0x7f))
271 w--;
272 }
Michel Dänzerf60ec4c2012-07-17 19:02:09 +0200273 if (w <= 0) {
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200274 w = 1;
Michel Dänzerf60ec4c2012-07-17 19:02:09 +0200275 cursor_end = x - xorigin + w;
276 if (!(cursor_end & 0x7f)) {
277 x--;
278 WARN_ON_ONCE(x < 0);
279 }
280 }
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200281 }
Alex Deucher6a2a11d2010-10-14 17:14:57 -0400282 }
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200283
Alex Deucher6a2a11d2010-10-14 17:14:57 -0400284 radeon_lock_cursor(crtc, true);
285 if (ASIC_IS_DCE4(rdev)) {
Michel Dänzer02e68592011-09-30 17:16:53 +0200286 WREG32(EVERGREEN_CUR_POSITION + radeon_crtc->crtc_offset, (x << 16) | y);
Alex Deucher6a2a11d2010-10-14 17:14:57 -0400287 WREG32(EVERGREEN_CUR_HOT_SPOT + radeon_crtc->crtc_offset, (xorigin << 16) | yorigin);
288 WREG32(EVERGREEN_CUR_SIZE + radeon_crtc->crtc_offset,
289 ((w - 1) << 16) | (radeon_crtc->cursor_height - 1));
290 } else if (ASIC_IS_AVIVO(rdev)) {
Michel Dänzer02e68592011-09-30 17:16:53 +0200291 WREG32(AVIVO_D1CUR_POSITION + radeon_crtc->crtc_offset, (x << 16) | y);
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200292 WREG32(AVIVO_D1CUR_HOT_SPOT + radeon_crtc->crtc_offset, (xorigin << 16) | yorigin);
293 WREG32(AVIVO_D1CUR_SIZE + radeon_crtc->crtc_offset,
294 ((w - 1) << 16) | (radeon_crtc->cursor_height - 1));
295 } else {
296 if (crtc->mode.flags & DRM_MODE_FLAG_DBLSCAN)
297 y *= 2;
298
299 WREG32(RADEON_CUR_HORZ_VERT_OFF + radeon_crtc->crtc_offset,
300 (RADEON_CUR_LOCK
301 | (xorigin << 16)
302 | yorigin));
303 WREG32(RADEON_CUR_HORZ_VERT_POSN + radeon_crtc->crtc_offset,
304 (RADEON_CUR_LOCK
Michel Dänzer02e68592011-09-30 17:16:53 +0200305 | (x << 16)
306 | y));
Alex Deucherc836e862009-07-13 13:51:03 -0400307 /* offset is from DISP(2)_BASE_ADDRESS */
308 WREG32(RADEON_CUR_OFFSET + radeon_crtc->crtc_offset, (radeon_crtc->legacy_cursor_offset +
309 (yorigin * 256)));
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200310 }
311 radeon_lock_cursor(crtc, false);
312
313 return 0;
314}