blob: 79027b1c64d32acfdcf969b8a2106fcbeebeb59b [file] [log] [blame]
Rob Clark16ea9752013-01-08 15:04:28 -06001/*
2 * Copyright (C) 2012 Texas Instruments
3 * Author: Rob Clark <robdclark@gmail.com>
4 *
5 * This program is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 as published by
7 * the Free Software Foundation.
8 *
9 * This program is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
12 * more details.
13 *
14 * You should have received a copy of the GNU General Public License along with
15 * this program. If not, see <http://www.gnu.org/licenses/>.
16 */
17
Rob Clarka464d612013-08-07 13:41:20 -040018#include "drm_flip_work.h"
Daniel Vetter3cb9ae42014-10-29 10:03:57 +010019#include <drm/drm_plane_helper.h>
Rob Clark16ea9752013-01-08 15:04:28 -060020
21#include "tilcdc_drv.h"
22#include "tilcdc_regs.h"
23
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +020024#define TILCDC_VBLANK_SAFETY_THRESHOLD_US 1000
25
Rob Clark16ea9752013-01-08 15:04:28 -060026struct tilcdc_crtc {
27 struct drm_crtc base;
28
29 const struct tilcdc_panel_info *info;
Rob Clark16ea9752013-01-08 15:04:28 -060030 struct drm_pending_vblank_event *event;
31 int dpms;
32 wait_queue_head_t frame_done_wq;
33 bool frame_done;
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +020034 spinlock_t irq_lock;
35
36 ktime_t last_vblank;
Rob Clark16ea9752013-01-08 15:04:28 -060037
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +030038 struct drm_framebuffer *curr_fb;
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +020039 struct drm_framebuffer *next_fb;
Rob Clark16ea9752013-01-08 15:04:28 -060040
41 /* for deferred fb unref's: */
Rob Clarka464d612013-08-07 13:41:20 -040042 struct drm_flip_work unref_work;
Jyri Sarha103cd8b2015-02-10 14:13:23 +020043
44 /* Only set if an external encoder is connected */
45 bool simulate_vesa_sync;
Jyri Sarha5895d082016-01-08 14:33:09 +020046
47 int sync_lost_count;
48 bool frame_intact;
Rob Clark16ea9752013-01-08 15:04:28 -060049};
50#define to_tilcdc_crtc(x) container_of(x, struct tilcdc_crtc, base)
51
Rob Clarka464d612013-08-07 13:41:20 -040052static void unref_worker(struct drm_flip_work *work, void *val)
Rob Clark16ea9752013-01-08 15:04:28 -060053{
Darren Etheridgef7b45752013-06-21 13:52:26 -050054 struct tilcdc_crtc *tilcdc_crtc =
Rob Clarka464d612013-08-07 13:41:20 -040055 container_of(work, struct tilcdc_crtc, unref_work);
Rob Clark16ea9752013-01-08 15:04:28 -060056 struct drm_device *dev = tilcdc_crtc->base.dev;
Rob Clark16ea9752013-01-08 15:04:28 -060057
58 mutex_lock(&dev->mode_config.mutex);
Rob Clarka464d612013-08-07 13:41:20 -040059 drm_framebuffer_unreference(val);
Rob Clark16ea9752013-01-08 15:04:28 -060060 mutex_unlock(&dev->mode_config.mutex);
61}
62
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +030063static void set_scanout(struct drm_crtc *crtc, struct drm_framebuffer *fb)
Rob Clark16ea9752013-01-08 15:04:28 -060064{
65 struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
66 struct drm_device *dev = crtc->dev;
Rob Clark16ea9752013-01-08 15:04:28 -060067 struct drm_gem_cma_object *gem;
68 unsigned int depth, bpp;
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +030069 dma_addr_t start, end;
Rob Clark16ea9752013-01-08 15:04:28 -060070
71 drm_fb_get_bpp_depth(fb->pixel_format, &depth, &bpp);
72 gem = drm_fb_cma_get_gem_obj(fb, 0);
73
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +030074 start = gem->paddr + fb->offsets[0] +
75 crtc->y * fb->pitches[0] +
76 crtc->x * bpp / 8;
Rob Clark16ea9752013-01-08 15:04:28 -060077
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +030078 end = start + (crtc->mode.vdisplay * fb->pitches[0]);
Rob Clark16ea9752013-01-08 15:04:28 -060079
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +030080 tilcdc_write(dev, LCDC_DMA_FB_BASE_ADDR_0_REG, start);
81 tilcdc_write(dev, LCDC_DMA_FB_CEILING_ADDR_0_REG, end);
82
83 if (tilcdc_crtc->curr_fb)
84 drm_flip_work_queue(&tilcdc_crtc->unref_work,
85 tilcdc_crtc->curr_fb);
86
87 tilcdc_crtc->curr_fb = fb;
Rob Clark16ea9752013-01-08 15:04:28 -060088}
89
Tomi Valkeinen2efec4f2015-10-20 09:37:27 +030090static void reset(struct drm_crtc *crtc)
Rob Clark16ea9752013-01-08 15:04:28 -060091{
92 struct drm_device *dev = crtc->dev;
93 struct tilcdc_drm_private *priv = dev->dev_private;
94
Tomi Valkeinen2efec4f2015-10-20 09:37:27 +030095 if (priv->rev != 2)
96 return;
97
98 tilcdc_set(dev, LCDC_CLK_RESET_REG, LCDC_CLK_MAIN_RESET);
99 usleep_range(250, 1000);
100 tilcdc_clear(dev, LCDC_CLK_RESET_REG, LCDC_CLK_MAIN_RESET);
101}
102
103static void start(struct drm_crtc *crtc)
104{
105 struct drm_device *dev = crtc->dev;
106
107 reset(crtc);
Rob Clark16ea9752013-01-08 15:04:28 -0600108
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +0300109 tilcdc_clear(dev, LCDC_DMA_CTRL_REG, LCDC_DUAL_FRAME_BUFFER_ENABLE);
Rob Clark16ea9752013-01-08 15:04:28 -0600110 tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_PALETTE_LOAD_MODE(DATA_ONLY));
111 tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
112}
113
114static void stop(struct drm_crtc *crtc)
115{
116 struct drm_device *dev = crtc->dev;
117
118 tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
119}
120
121static void tilcdc_crtc_destroy(struct drm_crtc *crtc)
122{
123 struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
124
Jyri Sarhade9cb5f2015-02-26 10:12:41 +0200125 tilcdc_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
Rob Clark16ea9752013-01-08 15:04:28 -0600126
Jyri Sarhad66284fb2015-05-27 11:58:37 +0300127 of_node_put(crtc->port);
Rob Clark16ea9752013-01-08 15:04:28 -0600128 drm_crtc_cleanup(crtc);
Rob Clarka464d612013-08-07 13:41:20 -0400129 drm_flip_work_cleanup(&tilcdc_crtc->unref_work);
Rob Clark16ea9752013-01-08 15:04:28 -0600130}
131
Tomi Valkeinen6f206e92014-02-07 17:37:07 +0000132static int tilcdc_verify_fb(struct drm_crtc *crtc, struct drm_framebuffer *fb)
133{
134 struct drm_device *dev = crtc->dev;
135 unsigned int depth, bpp;
136
137 drm_fb_get_bpp_depth(fb->pixel_format, &depth, &bpp);
138
139 if (fb->pitches[0] != crtc->mode.hdisplay * bpp / 8) {
140 dev_err(dev->dev,
141 "Invalid pitch: fb and crtc widths must be the same");
142 return -EINVAL;
143 }
144
145 return 0;
146}
147
Rob Clark16ea9752013-01-08 15:04:28 -0600148static int tilcdc_crtc_page_flip(struct drm_crtc *crtc,
149 struct drm_framebuffer *fb,
Keith Packarded8d1972013-07-22 18:49:58 -0700150 struct drm_pending_vblank_event *event,
151 uint32_t page_flip_flags)
Rob Clark16ea9752013-01-08 15:04:28 -0600152{
153 struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
154 struct drm_device *dev = crtc->dev;
Tomi Valkeinen6f206e92014-02-07 17:37:07 +0000155 int r;
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +0300156 unsigned long flags;
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +0200157 s64 tdiff;
158 ktime_t next_vblank;
Tomi Valkeinen6f206e92014-02-07 17:37:07 +0000159
160 r = tilcdc_verify_fb(crtc, fb);
161 if (r)
162 return r;
Rob Clark16ea9752013-01-08 15:04:28 -0600163
164 if (tilcdc_crtc->event) {
165 dev_err(dev->dev, "already pending page flip!\n");
166 return -EBUSY;
167 }
168
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +0300169 drm_framebuffer_reference(fb);
170
Matt Roperf4510a22014-04-01 15:22:40 -0700171 crtc->primary->fb = fb;
Tomi Valkeinen65734a22015-10-19 12:30:03 +0300172
173 pm_runtime_get_sync(dev->dev);
174
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +0200175 spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +0300176
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +0200177 next_vblank = ktime_add_us(tilcdc_crtc->last_vblank,
178 1000000 / crtc->hwmode.vrefresh);
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +0300179
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +0200180 tdiff = ktime_to_us(ktime_sub(next_vblank, ktime_get()));
181
182 if (tdiff >= TILCDC_VBLANK_SAFETY_THRESHOLD_US)
183 set_scanout(crtc, fb);
184 else
185 tilcdc_crtc->next_fb = fb;
186
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +0300187 tilcdc_crtc->event = event;
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +0200188
189 spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
Rob Clark16ea9752013-01-08 15:04:28 -0600190
Tomi Valkeinen65734a22015-10-19 12:30:03 +0300191 pm_runtime_put_sync(dev->dev);
192
Rob Clark16ea9752013-01-08 15:04:28 -0600193 return 0;
194}
195
Darren Etheridge614b3cfe2014-09-25 00:59:32 +0000196void tilcdc_crtc_dpms(struct drm_crtc *crtc, int mode)
Rob Clark16ea9752013-01-08 15:04:28 -0600197{
198 struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
199 struct drm_device *dev = crtc->dev;
200 struct tilcdc_drm_private *priv = dev->dev_private;
201
202 /* we really only care about on or off: */
203 if (mode != DRM_MODE_DPMS_ON)
204 mode = DRM_MODE_DPMS_OFF;
205
206 if (tilcdc_crtc->dpms == mode)
207 return;
208
209 tilcdc_crtc->dpms = mode;
210
Rob Clark16ea9752013-01-08 15:04:28 -0600211 if (mode == DRM_MODE_DPMS_ON) {
Tomi Valkeinen65734a22015-10-19 12:30:03 +0300212 pm_runtime_get_sync(dev->dev);
Rob Clark16ea9752013-01-08 15:04:28 -0600213 start(crtc);
214 } else {
215 tilcdc_crtc->frame_done = false;
216 stop(crtc);
217
Darren Etheridgef7b45752013-06-21 13:52:26 -0500218 /*
219 * if necessary wait for framedone irq which will still come
Rob Clark16ea9752013-01-08 15:04:28 -0600220 * before putting things to sleep..
221 */
222 if (priv->rev == 2) {
223 int ret = wait_event_timeout(
224 tilcdc_crtc->frame_done_wq,
225 tilcdc_crtc->frame_done,
226 msecs_to_jiffies(50));
227 if (ret == 0)
228 dev_err(dev->dev, "timeout waiting for framedone\n");
229 }
Rob Clark16ea9752013-01-08 15:04:28 -0600230
Tomi Valkeinen65734a22015-10-19 12:30:03 +0300231 pm_runtime_put_sync(dev->dev);
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +0300232
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +0200233 if (tilcdc_crtc->next_fb) {
234 drm_flip_work_queue(&tilcdc_crtc->unref_work,
235 tilcdc_crtc->next_fb);
236 tilcdc_crtc->next_fb = NULL;
237 }
238
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +0300239 if (tilcdc_crtc->curr_fb) {
240 drm_flip_work_queue(&tilcdc_crtc->unref_work,
241 tilcdc_crtc->curr_fb);
242 tilcdc_crtc->curr_fb = NULL;
243 }
244
245 drm_flip_work_commit(&tilcdc_crtc->unref_work, priv->wq);
Tomi Valkeinen65734a22015-10-19 12:30:03 +0300246 }
Rob Clark16ea9752013-01-08 15:04:28 -0600247}
248
249static bool tilcdc_crtc_mode_fixup(struct drm_crtc *crtc,
250 const struct drm_display_mode *mode,
251 struct drm_display_mode *adjusted_mode)
252{
Jyri Sarha103cd8b2015-02-10 14:13:23 +0200253 struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
254
255 if (!tilcdc_crtc->simulate_vesa_sync)
256 return true;
257
258 /*
259 * tilcdc does not generate VESA-compliant sync but aligns
260 * VS on the second edge of HS instead of first edge.
261 * We use adjusted_mode, to fixup sync by aligning both rising
262 * edges and add HSKEW offset to fix the sync.
263 */
264 adjusted_mode->hskew = mode->hsync_end - mode->hsync_start;
265 adjusted_mode->flags |= DRM_MODE_FLAG_HSKEW;
266
267 if (mode->flags & DRM_MODE_FLAG_NHSYNC) {
268 adjusted_mode->flags |= DRM_MODE_FLAG_PHSYNC;
269 adjusted_mode->flags &= ~DRM_MODE_FLAG_NHSYNC;
270 } else {
271 adjusted_mode->flags |= DRM_MODE_FLAG_NHSYNC;
272 adjusted_mode->flags &= ~DRM_MODE_FLAG_PHSYNC;
273 }
274
Rob Clark16ea9752013-01-08 15:04:28 -0600275 return true;
276}
277
278static void tilcdc_crtc_prepare(struct drm_crtc *crtc)
279{
280 tilcdc_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
281}
282
283static void tilcdc_crtc_commit(struct drm_crtc *crtc)
284{
285 tilcdc_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
286}
287
288static int tilcdc_crtc_mode_set(struct drm_crtc *crtc,
289 struct drm_display_mode *mode,
290 struct drm_display_mode *adjusted_mode,
291 int x, int y,
292 struct drm_framebuffer *old_fb)
293{
294 struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
295 struct drm_device *dev = crtc->dev;
296 struct tilcdc_drm_private *priv = dev->dev_private;
297 const struct tilcdc_panel_info *info = tilcdc_crtc->info;
298 uint32_t reg, hbp, hfp, hsw, vbp, vfp, vsw;
299 int ret;
300
301 ret = tilcdc_crtc_mode_valid(crtc, mode);
302 if (WARN_ON(ret))
303 return ret;
304
305 if (WARN_ON(!info))
306 return -EINVAL;
307
Tomi Valkeinen6f206e92014-02-07 17:37:07 +0000308 ret = tilcdc_verify_fb(crtc, crtc->primary->fb);
309 if (ret)
310 return ret;
311
Rob Clark16ea9752013-01-08 15:04:28 -0600312 pm_runtime_get_sync(dev->dev);
313
314 /* Configure the Burst Size and fifo threshold of DMA: */
315 reg = tilcdc_read(dev, LCDC_DMA_CTRL_REG) & ~0x00000770;
316 switch (info->dma_burst_sz) {
317 case 1:
318 reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_1);
319 break;
320 case 2:
321 reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_2);
322 break;
323 case 4:
324 reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_4);
325 break;
326 case 8:
327 reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_8);
328 break;
329 case 16:
330 reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_16);
331 break;
332 default:
333 return -EINVAL;
334 }
335 reg |= (info->fifo_th << 8);
336 tilcdc_write(dev, LCDC_DMA_CTRL_REG, reg);
337
338 /* Configure timings: */
339 hbp = mode->htotal - mode->hsync_end;
340 hfp = mode->hsync_start - mode->hdisplay;
341 hsw = mode->hsync_end - mode->hsync_start;
342 vbp = mode->vtotal - mode->vsync_end;
343 vfp = mode->vsync_start - mode->vdisplay;
344 vsw = mode->vsync_end - mode->vsync_start;
345
346 DBG("%dx%d, hbp=%u, hfp=%u, hsw=%u, vbp=%u, vfp=%u, vsw=%u",
347 mode->hdisplay, mode->vdisplay, hbp, hfp, hsw, vbp, vfp, vsw);
348
349 /* Configure the AC Bias Period and Number of Transitions per Interrupt: */
350 reg = tilcdc_read(dev, LCDC_RASTER_TIMING_2_REG) & ~0x000fff00;
351 reg |= LCDC_AC_BIAS_FREQUENCY(info->ac_bias) |
352 LCDC_AC_BIAS_TRANSITIONS_PER_INT(info->ac_bias_intrpt);
Darren Etheridgedb2b4bd2013-06-21 13:52:24 -0500353
354 /*
355 * subtract one from hfp, hbp, hsw because the hardware uses
356 * a value of 0 as 1
357 */
Rob Clark16ea9752013-01-08 15:04:28 -0600358 if (priv->rev == 2) {
Pantelis Antoniouc19b3e22013-06-21 13:52:28 -0500359 /* clear bits we're going to set */
360 reg &= ~0x78000033;
Darren Etheridgedb2b4bd2013-06-21 13:52:24 -0500361 reg |= ((hfp-1) & 0x300) >> 8;
362 reg |= ((hbp-1) & 0x300) >> 4;
363 reg |= ((hsw-1) & 0x3c0) << 21;
Rob Clark16ea9752013-01-08 15:04:28 -0600364 }
365 tilcdc_write(dev, LCDC_RASTER_TIMING_2_REG, reg);
366
367 reg = (((mode->hdisplay >> 4) - 1) << 4) |
Darren Etheridgedb2b4bd2013-06-21 13:52:24 -0500368 (((hbp-1) & 0xff) << 24) |
369 (((hfp-1) & 0xff) << 16) |
370 (((hsw-1) & 0x3f) << 10);
Rob Clark16ea9752013-01-08 15:04:28 -0600371 if (priv->rev == 2)
372 reg |= (((mode->hdisplay >> 4) - 1) & 0x40) >> 3;
373 tilcdc_write(dev, LCDC_RASTER_TIMING_0_REG, reg);
374
375 reg = ((mode->vdisplay - 1) & 0x3ff) |
376 ((vbp & 0xff) << 24) |
377 ((vfp & 0xff) << 16) |
Darren Etheridgedb2b4bd2013-06-21 13:52:24 -0500378 (((vsw-1) & 0x3f) << 10);
Rob Clark16ea9752013-01-08 15:04:28 -0600379 tilcdc_write(dev, LCDC_RASTER_TIMING_1_REG, reg);
380
Darren Etheridge6bf02c62013-06-21 13:52:22 -0500381 /*
382 * be sure to set Bit 10 for the V2 LCDC controller,
383 * otherwise limited to 1024 pixels width, stopping
384 * 1920x1080 being suppoted.
385 */
386 if (priv->rev == 2) {
387 if ((mode->vdisplay - 1) & 0x400) {
388 tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG,
389 LCDC_LPP_B10);
390 } else {
391 tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG,
392 LCDC_LPP_B10);
393 }
394 }
395
Rob Clark16ea9752013-01-08 15:04:28 -0600396 /* Configure display type: */
397 reg = tilcdc_read(dev, LCDC_RASTER_CTRL_REG) &
398 ~(LCDC_TFT_MODE | LCDC_MONO_8BIT_MODE | LCDC_MONOCHROME_MODE |
399 LCDC_V2_TFT_24BPP_MODE | LCDC_V2_TFT_24BPP_UNPACK | 0x000ff000);
400 reg |= LCDC_TFT_MODE; /* no monochrome/passive support */
401 if (info->tft_alt_mode)
402 reg |= LCDC_TFT_ALT_ENABLE;
403 if (priv->rev == 2) {
404 unsigned int depth, bpp;
405
Matt Roperf4510a22014-04-01 15:22:40 -0700406 drm_fb_get_bpp_depth(crtc->primary->fb->pixel_format, &depth, &bpp);
Rob Clark16ea9752013-01-08 15:04:28 -0600407 switch (bpp) {
408 case 16:
409 break;
410 case 32:
411 reg |= LCDC_V2_TFT_24BPP_UNPACK;
412 /* fallthrough */
413 case 24:
414 reg |= LCDC_V2_TFT_24BPP_MODE;
415 break;
416 default:
417 dev_err(dev->dev, "invalid pixel format\n");
418 return -EINVAL;
419 }
420 }
421 reg |= info->fdd < 12;
422 tilcdc_write(dev, LCDC_RASTER_CTRL_REG, reg);
423
424 if (info->invert_pxl_clk)
425 tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_PIXEL_CLOCK);
426 else
427 tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_PIXEL_CLOCK);
428
429 if (info->sync_ctrl)
430 tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_CTRL);
431 else
432 tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_CTRL);
433
434 if (info->sync_edge)
435 tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_EDGE);
436 else
437 tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_EDGE);
438
Darren Etheridgea9767182013-08-14 21:43:33 +0200439 /*
440 * use value from adjusted_mode here as this might have been
441 * changed as part of the fixup for slave encoders to solve the
442 * issue where tilcdc timings are not VESA compliant
443 */
444 if (adjusted_mode->flags & DRM_MODE_FLAG_NHSYNC)
Rob Clark16ea9752013-01-08 15:04:28 -0600445 tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_HSYNC);
446 else
447 tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_HSYNC);
448
449 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
450 tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_VSYNC);
451 else
452 tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_VSYNC);
453
454 if (info->raster_order)
455 tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ORDER);
456 else
457 tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ORDER);
458
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +0300459 drm_framebuffer_reference(crtc->primary->fb);
Rob Clark16ea9752013-01-08 15:04:28 -0600460
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +0300461 set_scanout(crtc, crtc->primary->fb);
462
Rob Clark16ea9752013-01-08 15:04:28 -0600463 tilcdc_crtc_update_clk(crtc);
464
465 pm_runtime_put_sync(dev->dev);
466
467 return 0;
468}
469
470static int tilcdc_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
471 struct drm_framebuffer *old_fb)
472{
Tomi Valkeinen65734a22015-10-19 12:30:03 +0300473 struct drm_device *dev = crtc->dev;
Tomi Valkeinen6f206e92014-02-07 17:37:07 +0000474 int r;
475
476 r = tilcdc_verify_fb(crtc, crtc->primary->fb);
477 if (r)
478 return r;
479
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +0300480 drm_framebuffer_reference(crtc->primary->fb);
481
Tomi Valkeinen65734a22015-10-19 12:30:03 +0300482 pm_runtime_get_sync(dev->dev);
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +0300483
484 set_scanout(crtc, crtc->primary->fb);
485
Tomi Valkeinen65734a22015-10-19 12:30:03 +0300486 pm_runtime_put_sync(dev->dev);
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +0300487
Rob Clark16ea9752013-01-08 15:04:28 -0600488 return 0;
489}
490
Rob Clark16ea9752013-01-08 15:04:28 -0600491static const struct drm_crtc_funcs tilcdc_crtc_funcs = {
492 .destroy = tilcdc_crtc_destroy,
493 .set_config = drm_crtc_helper_set_config,
494 .page_flip = tilcdc_crtc_page_flip,
495};
496
497static const struct drm_crtc_helper_funcs tilcdc_crtc_helper_funcs = {
498 .dpms = tilcdc_crtc_dpms,
499 .mode_fixup = tilcdc_crtc_mode_fixup,
500 .prepare = tilcdc_crtc_prepare,
501 .commit = tilcdc_crtc_commit,
502 .mode_set = tilcdc_crtc_mode_set,
503 .mode_set_base = tilcdc_crtc_mode_set_base,
Rob Clark16ea9752013-01-08 15:04:28 -0600504};
505
506int tilcdc_crtc_max_width(struct drm_crtc *crtc)
507{
508 struct drm_device *dev = crtc->dev;
509 struct tilcdc_drm_private *priv = dev->dev_private;
510 int max_width = 0;
511
512 if (priv->rev == 1)
513 max_width = 1024;
514 else if (priv->rev == 2)
515 max_width = 2048;
516
517 return max_width;
518}
519
520int tilcdc_crtc_mode_valid(struct drm_crtc *crtc, struct drm_display_mode *mode)
521{
522 struct tilcdc_drm_private *priv = crtc->dev->dev_private;
523 unsigned int bandwidth;
Darren Etheridgee1c5d0a2013-06-21 13:52:25 -0500524 uint32_t hbp, hfp, hsw, vbp, vfp, vsw;
Rob Clark16ea9752013-01-08 15:04:28 -0600525
Darren Etheridgee1c5d0a2013-06-21 13:52:25 -0500526 /*
527 * check to see if the width is within the range that
528 * the LCD Controller physically supports
529 */
Rob Clark16ea9752013-01-08 15:04:28 -0600530 if (mode->hdisplay > tilcdc_crtc_max_width(crtc))
531 return MODE_VIRTUAL_X;
532
533 /* width must be multiple of 16 */
534 if (mode->hdisplay & 0xf)
535 return MODE_VIRTUAL_X;
536
537 if (mode->vdisplay > 2048)
538 return MODE_VIRTUAL_Y;
539
Darren Etheridgee1c5d0a2013-06-21 13:52:25 -0500540 DBG("Processing mode %dx%d@%d with pixel clock %d",
541 mode->hdisplay, mode->vdisplay,
542 drm_mode_vrefresh(mode), mode->clock);
543
544 hbp = mode->htotal - mode->hsync_end;
545 hfp = mode->hsync_start - mode->hdisplay;
546 hsw = mode->hsync_end - mode->hsync_start;
547 vbp = mode->vtotal - mode->vsync_end;
548 vfp = mode->vsync_start - mode->vdisplay;
549 vsw = mode->vsync_end - mode->vsync_start;
550
551 if ((hbp-1) & ~0x3ff) {
552 DBG("Pruning mode: Horizontal Back Porch out of range");
553 return MODE_HBLANK_WIDE;
554 }
555
556 if ((hfp-1) & ~0x3ff) {
557 DBG("Pruning mode: Horizontal Front Porch out of range");
558 return MODE_HBLANK_WIDE;
559 }
560
561 if ((hsw-1) & ~0x3ff) {
562 DBG("Pruning mode: Horizontal Sync Width out of range");
563 return MODE_HSYNC_WIDE;
564 }
565
566 if (vbp & ~0xff) {
567 DBG("Pruning mode: Vertical Back Porch out of range");
568 return MODE_VBLANK_WIDE;
569 }
570
571 if (vfp & ~0xff) {
572 DBG("Pruning mode: Vertical Front Porch out of range");
573 return MODE_VBLANK_WIDE;
574 }
575
576 if ((vsw-1) & ~0x3f) {
577 DBG("Pruning mode: Vertical Sync Width out of range");
578 return MODE_VSYNC_WIDE;
579 }
580
Darren Etheridge4e564342013-06-21 13:52:23 -0500581 /*
582 * some devices have a maximum allowed pixel clock
583 * configured from the DT
584 */
585 if (mode->clock > priv->max_pixelclock) {
Darren Etheridgef7b45752013-06-21 13:52:26 -0500586 DBG("Pruning mode: pixel clock too high");
Darren Etheridge4e564342013-06-21 13:52:23 -0500587 return MODE_CLOCK_HIGH;
588 }
589
590 /*
591 * some devices further limit the max horizontal resolution
592 * configured from the DT
593 */
594 if (mode->hdisplay > priv->max_width)
595 return MODE_BAD_WIDTH;
596
Rob Clark16ea9752013-01-08 15:04:28 -0600597 /* filter out modes that would require too much memory bandwidth: */
Darren Etheridge4e564342013-06-21 13:52:23 -0500598 bandwidth = mode->hdisplay * mode->vdisplay *
599 drm_mode_vrefresh(mode);
600 if (bandwidth > priv->max_bandwidth) {
Darren Etheridgef7b45752013-06-21 13:52:26 -0500601 DBG("Pruning mode: exceeds defined bandwidth limit");
Rob Clark16ea9752013-01-08 15:04:28 -0600602 return MODE_BAD;
Darren Etheridge4e564342013-06-21 13:52:23 -0500603 }
Rob Clark16ea9752013-01-08 15:04:28 -0600604
605 return MODE_OK;
606}
607
608void tilcdc_crtc_set_panel_info(struct drm_crtc *crtc,
609 const struct tilcdc_panel_info *info)
610{
611 struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
612 tilcdc_crtc->info = info;
613}
614
Jyri Sarha103cd8b2015-02-10 14:13:23 +0200615void tilcdc_crtc_set_simulate_vesa_sync(struct drm_crtc *crtc,
616 bool simulate_vesa_sync)
617{
618 struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
619
620 tilcdc_crtc->simulate_vesa_sync = simulate_vesa_sync;
621}
622
Rob Clark16ea9752013-01-08 15:04:28 -0600623void tilcdc_crtc_update_clk(struct drm_crtc *crtc)
624{
625 struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
626 struct drm_device *dev = crtc->dev;
627 struct tilcdc_drm_private *priv = dev->dev_private;
628 int dpms = tilcdc_crtc->dpms;
Darren Etheridge3d193062014-01-15 15:52:36 -0600629 unsigned long lcd_clk;
630 const unsigned clkdiv = 2; /* using a fixed divider of 2 */
Rob Clark16ea9752013-01-08 15:04:28 -0600631 int ret;
632
633 pm_runtime_get_sync(dev->dev);
634
635 if (dpms == DRM_MODE_DPMS_ON)
636 tilcdc_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
637
Darren Etheridge3d193062014-01-15 15:52:36 -0600638 /* mode.clock is in KHz, set_rate wants parameter in Hz */
639 ret = clk_set_rate(priv->clk, crtc->mode.clock * 1000 * clkdiv);
640 if (ret < 0) {
Rob Clark16ea9752013-01-08 15:04:28 -0600641 dev_err(dev->dev, "failed to set display clock rate to: %d\n",
642 crtc->mode.clock);
643 goto out;
644 }
645
646 lcd_clk = clk_get_rate(priv->clk);
Rob Clark16ea9752013-01-08 15:04:28 -0600647
Darren Etheridge3d193062014-01-15 15:52:36 -0600648 DBG("lcd_clk=%lu, mode clock=%d, div=%u",
649 lcd_clk, crtc->mode.clock, clkdiv);
Rob Clark16ea9752013-01-08 15:04:28 -0600650
651 /* Configure the LCD clock divisor. */
Darren Etheridge3d193062014-01-15 15:52:36 -0600652 tilcdc_write(dev, LCDC_CTRL_REG, LCDC_CLK_DIVISOR(clkdiv) |
Rob Clark16ea9752013-01-08 15:04:28 -0600653 LCDC_RASTER_MODE);
654
655 if (priv->rev == 2)
656 tilcdc_set(dev, LCDC_CLK_ENABLE_REG,
657 LCDC_V2_DMA_CLK_EN | LCDC_V2_LIDD_CLK_EN |
658 LCDC_V2_CORE_CLK_EN);
659
660 if (dpms == DRM_MODE_DPMS_ON)
661 tilcdc_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
662
663out:
664 pm_runtime_put_sync(dev->dev);
665}
666
Jyri Sarha5895d082016-01-08 14:33:09 +0200667#define SYNC_LOST_COUNT_LIMIT 50
668
Rob Clark16ea9752013-01-08 15:04:28 -0600669irqreturn_t tilcdc_crtc_irq(struct drm_crtc *crtc)
670{
671 struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
672 struct drm_device *dev = crtc->dev;
673 struct tilcdc_drm_private *priv = dev->dev_private;
Tomi Valkeinen317aae72015-10-20 12:08:03 +0300674 uint32_t stat;
Rob Clark16ea9752013-01-08 15:04:28 -0600675
Tomi Valkeinen317aae72015-10-20 12:08:03 +0300676 stat = tilcdc_read_irqstatus(dev);
677 tilcdc_clear_irqstatus(dev, stat);
678
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +0300679 if (stat & LCDC_END_OF_FRAME0) {
Rob Clark16ea9752013-01-08 15:04:28 -0600680 unsigned long flags;
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +0200681 bool skip_event = false;
682 ktime_t now;
683
684 now = ktime_get();
Rob Clark16ea9752013-01-08 15:04:28 -0600685
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +0300686 drm_flip_work_commit(&tilcdc_crtc->unref_work, priv->wq);
Rob Clark16ea9752013-01-08 15:04:28 -0600687
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +0200688 spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
Rob Clark16ea9752013-01-08 15:04:28 -0600689
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +0200690 tilcdc_crtc->last_vblank = now;
Rob Clark16ea9752013-01-08 15:04:28 -0600691
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +0200692 if (tilcdc_crtc->next_fb) {
693 set_scanout(crtc, tilcdc_crtc->next_fb);
694 tilcdc_crtc->next_fb = NULL;
695 skip_event = true;
Tomi Valkeinen2b2080d2015-10-20 09:37:27 +0300696 }
697
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +0200698 spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
699
700 drm_handle_vblank(dev, 0);
701
702 if (!skip_event) {
703 struct drm_pending_vblank_event *event;
704
705 spin_lock_irqsave(&dev->event_lock, flags);
706
707 event = tilcdc_crtc->event;
708 tilcdc_crtc->event = NULL;
709 if (event)
Gustavo Padovandfebc152016-04-14 10:48:22 -0700710 drm_crtc_send_vblank_event(crtc, event);
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +0200711
712 spin_unlock_irqrestore(&dev->event_lock, flags);
713 }
Jyri Sarha5895d082016-01-08 14:33:09 +0200714
715 if (tilcdc_crtc->frame_intact)
716 tilcdc_crtc->sync_lost_count = 0;
717 else
718 tilcdc_crtc->frame_intact = true;
Rob Clark16ea9752013-01-08 15:04:28 -0600719 }
720
721 if (priv->rev == 2) {
722 if (stat & LCDC_FRAME_DONE) {
723 tilcdc_crtc->frame_done = true;
724 wake_up(&tilcdc_crtc->frame_done_wq);
725 }
726 tilcdc_write(dev, LCDC_END_OF_INT_IND_REG, 0);
727 }
728
Jyri Sarha5895d082016-01-08 14:33:09 +0200729 if (stat & LCDC_SYNC_LOST) {
Jyri Sarhac0c2baa2015-12-18 13:07:52 +0200730 dev_err_ratelimited(dev->dev, "%s(0x%08x): Sync lost",
731 __func__, stat);
Jyri Sarha5895d082016-01-08 14:33:09 +0200732 tilcdc_crtc->frame_intact = false;
733 if (tilcdc_crtc->sync_lost_count++ > SYNC_LOST_COUNT_LIMIT) {
734 dev_err(dev->dev,
735 "%s(0x%08x): Sync lost flood detected, disabling the interrupt",
736 __func__, stat);
737 tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
738 LCDC_SYNC_LOST);
739 }
740 }
Jyri Sarhac0c2baa2015-12-18 13:07:52 +0200741
742 if (stat & LCDC_FIFO_UNDERFLOW)
743 dev_err_ratelimited(dev->dev, "%s(0x%08x): FIFO underfow",
744 __func__, stat);
745
Rob Clark16ea9752013-01-08 15:04:28 -0600746 return IRQ_HANDLED;
747}
748
Rob Clark16ea9752013-01-08 15:04:28 -0600749struct drm_crtc *tilcdc_crtc_create(struct drm_device *dev)
750{
Jyri Sarhad66284fb2015-05-27 11:58:37 +0300751 struct tilcdc_drm_private *priv = dev->dev_private;
Rob Clark16ea9752013-01-08 15:04:28 -0600752 struct tilcdc_crtc *tilcdc_crtc;
753 struct drm_crtc *crtc;
754 int ret;
755
Jyri Sarhad0ec32c2016-02-23 12:44:27 +0200756 tilcdc_crtc = devm_kzalloc(dev->dev, sizeof(*tilcdc_crtc), GFP_KERNEL);
Rob Clark16ea9752013-01-08 15:04:28 -0600757 if (!tilcdc_crtc) {
758 dev_err(dev->dev, "allocation failed\n");
759 return NULL;
760 }
761
762 crtc = &tilcdc_crtc->base;
763
764 tilcdc_crtc->dpms = DRM_MODE_DPMS_OFF;
765 init_waitqueue_head(&tilcdc_crtc->frame_done_wq);
766
Boris BREZILLONd7f8db52014-11-14 19:30:30 +0100767 drm_flip_work_init(&tilcdc_crtc->unref_work,
Rob Clarka464d612013-08-07 13:41:20 -0400768 "unref", unref_worker);
Rob Clark16ea9752013-01-08 15:04:28 -0600769
Tomi Valkeinen2b3a8cd2015-11-03 12:00:51 +0200770 spin_lock_init(&tilcdc_crtc->irq_lock);
771
Rob Clark16ea9752013-01-08 15:04:28 -0600772 ret = drm_crtc_init(dev, crtc, &tilcdc_crtc_funcs);
773 if (ret < 0)
774 goto fail;
775
776 drm_crtc_helper_add(crtc, &tilcdc_crtc_helper_funcs);
777
Jyri Sarhad66284fb2015-05-27 11:58:37 +0300778 if (priv->is_componentized) {
779 struct device_node *ports =
780 of_get_child_by_name(dev->dev->of_node, "ports");
781
782 if (ports) {
783 crtc->port = of_get_child_by_name(ports, "port");
784 of_node_put(ports);
785 } else {
786 crtc->port =
787 of_get_child_by_name(dev->dev->of_node, "port");
788 }
789 if (!crtc->port) { /* This should never happen */
790 dev_err(dev->dev, "Port node not found in %s\n",
791 dev->dev->of_node->full_name);
792 goto fail;
793 }
794 }
795
Rob Clark16ea9752013-01-08 15:04:28 -0600796 return crtc;
797
798fail:
799 tilcdc_crtc_destroy(crtc);
800 return NULL;
801}