blob: 8c43ecc19c157983363e4e2561284746f279b2d9 [file] [log] [blame]
Russell King96f60e32012-08-15 13:59:49 +01001/*
2 * Copyright (C) 2012 Russell King
3 * Rewritten from the dovefb driver, and Armada510 manuals.
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License version 2 as
7 * published by the Free Software Foundation.
8 */
9#include <linux/clk.h>
Russell Kingd8c96082014-04-22 11:10:15 +010010#include <linux/component.h>
11#include <linux/of_device.h>
12#include <linux/platform_device.h>
Russell King96f60e32012-08-15 13:59:49 +010013#include <drm/drmP.h>
14#include <drm/drm_crtc_helper.h>
Daniel Vetter3cb9ae42014-10-29 10:03:57 +010015#include <drm/drm_plane_helper.h>
Russell King96f60e32012-08-15 13:59:49 +010016#include "armada_crtc.h"
17#include "armada_drm.h"
18#include "armada_fb.h"
19#include "armada_gem.h"
20#include "armada_hw.h"
21
22struct armada_frame_work {
23 struct drm_pending_vblank_event *event;
24 struct armada_regs regs[4];
25 struct drm_framebuffer *old_fb;
26};
27
28enum csc_mode {
29 CSC_AUTO = 0,
30 CSC_YUV_CCIR601 = 1,
31 CSC_YUV_CCIR709 = 2,
32 CSC_RGB_COMPUTER = 1,
33 CSC_RGB_STUDIO = 2,
34};
35
36/*
37 * A note about interlacing. Let's consider HDMI 1920x1080i.
38 * The timing parameters we have from X are:
39 * Hact HsyA HsyI Htot Vact VsyA VsyI Vtot
40 * 1920 2448 2492 2640 1080 1084 1094 1125
41 * Which get translated to:
42 * Hact HsyA HsyI Htot Vact VsyA VsyI Vtot
43 * 1920 2448 2492 2640 540 542 547 562
44 *
45 * This is how it is defined by CEA-861-D - line and pixel numbers are
46 * referenced to the rising edge of VSYNC and HSYNC. Total clocks per
47 * line: 2640. The odd frame, the first active line is at line 21, and
48 * the even frame, the first active line is 584.
49 *
50 * LN: 560 561 562 563 567 568 569
51 * DE: ~~~|____________________________//__________________________
52 * HSYNC: ____|~|_____|~|_____|~|_____|~|_//__|~|_____|~|_____|~|_____
53 * VSYNC: _________________________|~~~~~~//~~~~~~~~~~~~~~~|__________
54 * 22 blanking lines. VSYNC at 1320 (referenced to the HSYNC rising edge).
55 *
56 * LN: 1123 1124 1125 1 5 6 7
57 * DE: ~~~|____________________________//__________________________
58 * HSYNC: ____|~|_____|~|_____|~|_____|~|_//__|~|_____|~|_____|~|_____
59 * VSYNC: ____________________|~~~~~~~~~~~//~~~~~~~~~~|_______________
60 * 23 blanking lines
61 *
62 * The Armada LCD Controller line and pixel numbers are, like X timings,
63 * referenced to the top left of the active frame.
64 *
65 * So, translating these to our LCD controller:
66 * Odd frame, 563 total lines, VSYNC at line 543-548, pixel 1128.
67 * Even frame, 562 total lines, VSYNC at line 542-547, pixel 2448.
68 * Note: Vsync front porch remains constant!
69 *
70 * if (odd_frame) {
71 * vtotal = mode->crtc_vtotal + 1;
72 * vbackporch = mode->crtc_vsync_start - mode->crtc_vdisplay + 1;
73 * vhorizpos = mode->crtc_hsync_start - mode->crtc_htotal / 2
74 * } else {
75 * vtotal = mode->crtc_vtotal;
76 * vbackporch = mode->crtc_vsync_start - mode->crtc_vdisplay;
77 * vhorizpos = mode->crtc_hsync_start;
78 * }
79 * vfrontporch = mode->crtc_vtotal - mode->crtc_vsync_end;
80 *
81 * So, we need to reprogram these registers on each vsync event:
82 * LCD_SPU_V_PORCH, LCD_SPU_ADV_REG, LCD_SPUT_V_H_TOTAL
83 *
84 * Note: we do not use the frame done interrupts because these appear
85 * to happen too early, and lead to jitter on the display (presumably
86 * they occur at the end of the last active line, before the vsync back
87 * porch, which we're reprogramming.)
88 */
89
90void
91armada_drm_crtc_update_regs(struct armada_crtc *dcrtc, struct armada_regs *regs)
92{
93 while (regs->offset != ~0) {
94 void __iomem *reg = dcrtc->base + regs->offset;
95 uint32_t val;
96
97 val = regs->mask;
98 if (val != 0)
99 val &= readl_relaxed(reg);
100 writel_relaxed(val | regs->val, reg);
101 ++regs;
102 }
103}
104
105#define dpms_blanked(dpms) ((dpms) != DRM_MODE_DPMS_ON)
106
107static void armada_drm_crtc_update(struct armada_crtc *dcrtc)
108{
109 uint32_t dumb_ctrl;
110
111 dumb_ctrl = dcrtc->cfg_dumb_ctrl;
112
113 if (!dpms_blanked(dcrtc->dpms))
114 dumb_ctrl |= CFG_DUMB_ENA;
115
116 /*
117 * When the dumb interface isn't in DUMB24_RGB888_0 mode, it might
118 * be using SPI or GPIO. If we set this to DUMB_BLANK, we will
119 * force LCD_D[23:0] to output blank color, overriding the GPIO or
120 * SPI usage. So leave it as-is unless in DUMB24_RGB888_0 mode.
121 */
122 if (dpms_blanked(dcrtc->dpms) &&
123 (dumb_ctrl & DUMB_MASK) == DUMB24_RGB888_0) {
124 dumb_ctrl &= ~DUMB_MASK;
125 dumb_ctrl |= DUMB_BLANK;
126 }
127
128 /*
129 * The documentation doesn't indicate what the normal state of
130 * the sync signals are. Sebastian Hesselbart kindly probed
131 * these signals on his board to determine their state.
132 *
133 * The non-inverted state of the sync signals is active high.
134 * Setting these bits makes the appropriate signal active low.
135 */
136 if (dcrtc->crtc.mode.flags & DRM_MODE_FLAG_NCSYNC)
137 dumb_ctrl |= CFG_INV_CSYNC;
138 if (dcrtc->crtc.mode.flags & DRM_MODE_FLAG_NHSYNC)
139 dumb_ctrl |= CFG_INV_HSYNC;
140 if (dcrtc->crtc.mode.flags & DRM_MODE_FLAG_NVSYNC)
141 dumb_ctrl |= CFG_INV_VSYNC;
142
143 if (dcrtc->dumb_ctrl != dumb_ctrl) {
144 dcrtc->dumb_ctrl = dumb_ctrl;
145 writel_relaxed(dumb_ctrl, dcrtc->base + LCD_SPU_DUMB_CTRL);
146 }
147}
148
149static unsigned armada_drm_crtc_calc_fb(struct drm_framebuffer *fb,
150 int x, int y, struct armada_regs *regs, bool interlaced)
151{
152 struct armada_gem_object *obj = drm_fb_obj(fb);
153 unsigned pitch = fb->pitches[0];
154 unsigned offset = y * pitch + x * fb->bits_per_pixel / 8;
155 uint32_t addr_odd, addr_even;
156 unsigned i = 0;
157
158 DRM_DEBUG_DRIVER("pitch %u x %d y %d bpp %d\n",
159 pitch, x, y, fb->bits_per_pixel);
160
161 addr_odd = addr_even = obj->dev_addr + offset;
162
163 if (interlaced) {
164 addr_even += pitch;
165 pitch *= 2;
166 }
167
168 /* write offset, base, and pitch */
169 armada_reg_queue_set(regs, i, addr_odd, LCD_CFG_GRA_START_ADDR0);
170 armada_reg_queue_set(regs, i, addr_even, LCD_CFG_GRA_START_ADDR1);
171 armada_reg_queue_mod(regs, i, pitch, 0xffff, LCD_CFG_GRA_PITCH);
172
173 return i;
174}
175
Russell King7c8f7e12015-06-29 17:52:16 +0100176void armada_drm_vbl_event_add(struct armada_crtc *dcrtc,
177 struct armada_vbl_event *evt)
178{
179 unsigned long flags;
180 bool not_on_list;
181
182 WARN_ON(drm_vblank_get(dcrtc->crtc.dev, dcrtc->num));
183
184 spin_lock_irqsave(&dcrtc->irq_lock, flags);
185 not_on_list = list_empty(&evt->node);
186 if (not_on_list)
187 list_add_tail(&evt->node, &dcrtc->vbl_list);
188 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
189
190 if (!not_on_list)
191 drm_vblank_put(dcrtc->crtc.dev, dcrtc->num);
192}
193
194void armada_drm_vbl_event_remove(struct armada_crtc *dcrtc,
195 struct armada_vbl_event *evt)
196{
197 if (!list_empty(&evt->node)) {
198 list_del_init(&evt->node);
199 drm_vblank_put(dcrtc->crtc.dev, dcrtc->num);
200 }
201}
202
203static void armada_drm_vbl_event_run(struct armada_crtc *dcrtc)
204{
205 struct armada_vbl_event *e, *n;
206
207 list_for_each_entry_safe(e, n, &dcrtc->vbl_list, node) {
208 list_del_init(&e->node);
209 drm_vblank_put(dcrtc->crtc.dev, dcrtc->num);
210 e->fn(dcrtc, e->data);
211 }
212}
213
Russell King96f60e32012-08-15 13:59:49 +0100214static int armada_drm_crtc_queue_frame_work(struct armada_crtc *dcrtc,
215 struct armada_frame_work *work)
216{
217 struct drm_device *dev = dcrtc->crtc.dev;
218 unsigned long flags;
219 int ret;
220
221 ret = drm_vblank_get(dev, dcrtc->num);
222 if (ret) {
223 DRM_ERROR("failed to acquire vblank counter\n");
224 return ret;
225 }
226
227 spin_lock_irqsave(&dev->event_lock, flags);
228 if (!dcrtc->frame_work)
229 dcrtc->frame_work = work;
230 else
231 ret = -EBUSY;
232 spin_unlock_irqrestore(&dev->event_lock, flags);
233
234 if (ret)
235 drm_vblank_put(dev, dcrtc->num);
236
237 return ret;
238}
239
240static void armada_drm_crtc_complete_frame_work(struct armada_crtc *dcrtc)
241{
242 struct drm_device *dev = dcrtc->crtc.dev;
243 struct armada_frame_work *work = dcrtc->frame_work;
244
245 dcrtc->frame_work = NULL;
246
247 armada_drm_crtc_update_regs(dcrtc, work->regs);
248
249 if (work->event)
250 drm_send_vblank_event(dev, dcrtc->num, work->event);
251
252 drm_vblank_put(dev, dcrtc->num);
253
254 /* Finally, queue the process-half of the cleanup. */
255 __armada_drm_queue_unref_work(dcrtc->crtc.dev, work->old_fb);
256 kfree(work);
257}
258
259static void armada_drm_crtc_finish_fb(struct armada_crtc *dcrtc,
260 struct drm_framebuffer *fb, bool force)
261{
262 struct armada_frame_work *work;
263
264 if (!fb)
265 return;
266
267 if (force) {
268 /* Display is disabled, so just drop the old fb */
269 drm_framebuffer_unreference(fb);
270 return;
271 }
272
273 work = kmalloc(sizeof(*work), GFP_KERNEL);
274 if (work) {
275 int i = 0;
276 work->event = NULL;
277 work->old_fb = fb;
278 armada_reg_queue_end(work->regs, i);
279
280 if (armada_drm_crtc_queue_frame_work(dcrtc, work) == 0)
281 return;
282
283 kfree(work);
284 }
285
286 /*
287 * Oops - just drop the reference immediately and hope for
288 * the best. The worst that will happen is the buffer gets
289 * reused before it has finished being displayed.
290 */
291 drm_framebuffer_unreference(fb);
292}
293
294static void armada_drm_vblank_off(struct armada_crtc *dcrtc)
295{
296 struct drm_device *dev = dcrtc->crtc.dev;
297
298 /*
299 * Tell the DRM core that vblank IRQs aren't going to happen for
300 * a while. This cleans up any pending vblank events for us.
301 */
Russell King178e5612014-10-11 23:57:04 +0100302 drm_crtc_vblank_off(&dcrtc->crtc);
Russell King96f60e32012-08-15 13:59:49 +0100303
304 /* Handle any pending flip event. */
305 spin_lock_irq(&dev->event_lock);
306 if (dcrtc->frame_work)
307 armada_drm_crtc_complete_frame_work(dcrtc);
308 spin_unlock_irq(&dev->event_lock);
309}
310
311void armada_drm_crtc_gamma_set(struct drm_crtc *crtc, u16 r, u16 g, u16 b,
312 int idx)
313{
314}
315
316void armada_drm_crtc_gamma_get(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
317 int idx)
318{
319}
320
321/* The mode_config.mutex will be held for this call */
322static void armada_drm_crtc_dpms(struct drm_crtc *crtc, int dpms)
323{
324 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
325
326 if (dcrtc->dpms != dpms) {
327 dcrtc->dpms = dpms;
Russell Kinge0ac5e92015-06-29 18:01:38 +0100328 if (!IS_ERR(dcrtc->clk) && !dpms_blanked(dpms))
329 WARN_ON(clk_prepare_enable(dcrtc->clk));
Russell King96f60e32012-08-15 13:59:49 +0100330 armada_drm_crtc_update(dcrtc);
Russell Kinge0ac5e92015-06-29 18:01:38 +0100331 if (!IS_ERR(dcrtc->clk) && dpms_blanked(dpms))
332 clk_disable_unprepare(dcrtc->clk);
Russell King96f60e32012-08-15 13:59:49 +0100333 if (dpms_blanked(dpms))
334 armada_drm_vblank_off(dcrtc);
Russell King178e5612014-10-11 23:57:04 +0100335 else
336 drm_crtc_vblank_on(&dcrtc->crtc);
Russell King96f60e32012-08-15 13:59:49 +0100337 }
338}
339
340/*
341 * Prepare for a mode set. Turn off overlay to ensure that we don't end
342 * up with the overlay size being bigger than the active screen size.
343 * We rely upon X refreshing this state after the mode set has completed.
344 *
345 * The mode_config.mutex will be held for this call
346 */
347static void armada_drm_crtc_prepare(struct drm_crtc *crtc)
348{
349 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
350 struct drm_plane *plane;
351
352 /*
353 * If we have an overlay plane associated with this CRTC, disable
354 * it before the modeset to avoid its coordinates being outside
Russell Kingf8e14062015-06-29 17:52:42 +0100355 * the new mode parameters.
Russell King96f60e32012-08-15 13:59:49 +0100356 */
357 plane = dcrtc->plane;
Russell Kingf8e14062015-06-29 17:52:42 +0100358 if (plane)
359 drm_plane_force_disable(plane);
Russell King96f60e32012-08-15 13:59:49 +0100360}
361
362/* The mode_config.mutex will be held for this call */
363static void armada_drm_crtc_commit(struct drm_crtc *crtc)
364{
365 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
366
367 if (dcrtc->dpms != DRM_MODE_DPMS_ON) {
368 dcrtc->dpms = DRM_MODE_DPMS_ON;
369 armada_drm_crtc_update(dcrtc);
370 }
371}
372
373/* The mode_config.mutex will be held for this call */
374static bool armada_drm_crtc_mode_fixup(struct drm_crtc *crtc,
375 const struct drm_display_mode *mode, struct drm_display_mode *adj)
376{
Russell King96f60e32012-08-15 13:59:49 +0100377 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
378 int ret;
379
380 /* We can't do interlaced modes if we don't have the SPU_ADV_REG */
Russell King42e62ba2014-04-22 15:24:03 +0100381 if (!dcrtc->variant->has_spu_adv_reg &&
Russell King96f60e32012-08-15 13:59:49 +0100382 adj->flags & DRM_MODE_FLAG_INTERLACE)
383 return false;
384
385 /* Check whether the display mode is possible */
Russell King42e62ba2014-04-22 15:24:03 +0100386 ret = dcrtc->variant->compute_clock(dcrtc, adj, NULL);
Russell King96f60e32012-08-15 13:59:49 +0100387 if (ret)
388 return false;
389
390 return true;
391}
392
Russell Kinge5d9ddf2014-04-26 15:19:38 +0100393static void armada_drm_crtc_irq(struct armada_crtc *dcrtc, u32 stat)
Russell King96f60e32012-08-15 13:59:49 +0100394{
Russell King96f60e32012-08-15 13:59:49 +0100395 void __iomem *base = dcrtc->base;
396
397 if (stat & DMA_FF_UNDERFLOW)
398 DRM_ERROR("video underflow on crtc %u\n", dcrtc->num);
399 if (stat & GRA_FF_UNDERFLOW)
400 DRM_ERROR("graphics underflow on crtc %u\n", dcrtc->num);
401
402 if (stat & VSYNC_IRQ)
403 drm_handle_vblank(dcrtc->crtc.dev, dcrtc->num);
404
405 spin_lock(&dcrtc->irq_lock);
Russell King7c8f7e12015-06-29 17:52:16 +0100406 armada_drm_vbl_event_run(dcrtc);
Russell King96f60e32012-08-15 13:59:49 +0100407
408 if (stat & GRA_FRAME_IRQ && dcrtc->interlaced) {
409 int i = stat & GRA_FRAME_IRQ0 ? 0 : 1;
410 uint32_t val;
411
412 writel_relaxed(dcrtc->v[i].spu_v_porch, base + LCD_SPU_V_PORCH);
413 writel_relaxed(dcrtc->v[i].spu_v_h_total,
414 base + LCD_SPUT_V_H_TOTAL);
415
416 val = readl_relaxed(base + LCD_SPU_ADV_REG);
417 val &= ~(ADV_VSYNC_L_OFF | ADV_VSYNC_H_OFF | ADV_VSYNCOFFEN);
418 val |= dcrtc->v[i].spu_adv_reg;
Russell King662af0d2013-05-19 10:55:17 +0100419 writel_relaxed(val, base + LCD_SPU_ADV_REG);
Russell King96f60e32012-08-15 13:59:49 +0100420 }
Russell King662af0d2013-05-19 10:55:17 +0100421
422 if (stat & DUMB_FRAMEDONE && dcrtc->cursor_update) {
423 writel_relaxed(dcrtc->cursor_hw_pos,
424 base + LCD_SPU_HWC_OVSA_HPXL_VLN);
425 writel_relaxed(dcrtc->cursor_hw_sz,
426 base + LCD_SPU_HWC_HPXL_VLN);
427 armada_updatel(CFG_HWC_ENA,
428 CFG_HWC_ENA | CFG_HWC_1BITMOD | CFG_HWC_1BITENA,
429 base + LCD_SPU_DMA_CTRL0);
430 dcrtc->cursor_update = false;
431 armada_drm_crtc_disable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
432 }
433
Russell King96f60e32012-08-15 13:59:49 +0100434 spin_unlock(&dcrtc->irq_lock);
435
436 if (stat & GRA_FRAME_IRQ) {
437 struct drm_device *dev = dcrtc->crtc.dev;
438
439 spin_lock(&dev->event_lock);
440 if (dcrtc->frame_work)
441 armada_drm_crtc_complete_frame_work(dcrtc);
442 spin_unlock(&dev->event_lock);
443
444 wake_up(&dcrtc->frame_wait);
445 }
446}
447
Russell Kinge5d9ddf2014-04-26 15:19:38 +0100448static irqreturn_t armada_drm_irq(int irq, void *arg)
449{
450 struct armada_crtc *dcrtc = arg;
451 u32 v, stat = readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR);
452
453 /*
454 * This is rediculous - rather than writing bits to clear, we
455 * have to set the actual status register value. This is racy.
456 */
457 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ISR);
458
459 /* Mask out those interrupts we haven't enabled */
460 v = stat & dcrtc->irq_ena;
461
462 if (v & (VSYNC_IRQ|GRA_FRAME_IRQ|DUMB_FRAMEDONE)) {
463 armada_drm_crtc_irq(dcrtc, stat);
464 return IRQ_HANDLED;
465 }
466 return IRQ_NONE;
467}
468
Russell King96f60e32012-08-15 13:59:49 +0100469/* These are locked by dev->vbl_lock */
470void armada_drm_crtc_disable_irq(struct armada_crtc *dcrtc, u32 mask)
471{
472 if (dcrtc->irq_ena & mask) {
473 dcrtc->irq_ena &= ~mask;
474 writel(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
475 }
476}
477
478void armada_drm_crtc_enable_irq(struct armada_crtc *dcrtc, u32 mask)
479{
480 if ((dcrtc->irq_ena & mask) != mask) {
481 dcrtc->irq_ena |= mask;
482 writel(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
483 if (readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR) & mask)
484 writel(0, dcrtc->base + LCD_SPU_IRQ_ISR);
485 }
486}
487
488static uint32_t armada_drm_crtc_calculate_csc(struct armada_crtc *dcrtc)
489{
490 struct drm_display_mode *adj = &dcrtc->crtc.mode;
491 uint32_t val = 0;
492
493 if (dcrtc->csc_yuv_mode == CSC_YUV_CCIR709)
494 val |= CFG_CSC_YUV_CCIR709;
495 if (dcrtc->csc_rgb_mode == CSC_RGB_STUDIO)
496 val |= CFG_CSC_RGB_STUDIO;
497
498 /*
499 * In auto mode, set the colorimetry, based upon the HDMI spec.
500 * 1280x720p, 1920x1080p and 1920x1080i use ITU709, others use
501 * ITU601. It may be more appropriate to set this depending on
502 * the source - but what if the graphic frame is YUV and the
503 * video frame is RGB?
504 */
505 if ((adj->hdisplay == 1280 && adj->vdisplay == 720 &&
506 !(adj->flags & DRM_MODE_FLAG_INTERLACE)) ||
507 (adj->hdisplay == 1920 && adj->vdisplay == 1080)) {
508 if (dcrtc->csc_yuv_mode == CSC_AUTO)
509 val |= CFG_CSC_YUV_CCIR709;
510 }
511
512 /*
513 * We assume we're connected to a TV-like device, so the YUV->RGB
514 * conversion should produce a limited range. We should set this
515 * depending on the connectors attached to this CRTC, and what
516 * kind of device they report being connected.
517 */
518 if (dcrtc->csc_rgb_mode == CSC_AUTO)
519 val |= CFG_CSC_RGB_STUDIO;
520
521 return val;
522}
523
524/* The mode_config.mutex will be held for this call */
525static int armada_drm_crtc_mode_set(struct drm_crtc *crtc,
526 struct drm_display_mode *mode, struct drm_display_mode *adj,
527 int x, int y, struct drm_framebuffer *old_fb)
528{
Russell King96f60e32012-08-15 13:59:49 +0100529 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
530 struct armada_regs regs[17];
531 uint32_t lm, rm, tm, bm, val, sclk;
532 unsigned long flags;
533 unsigned i;
534 bool interlaced;
535
Matt Roperf4510a22014-04-01 15:22:40 -0700536 drm_framebuffer_reference(crtc->primary->fb);
Russell King96f60e32012-08-15 13:59:49 +0100537
538 interlaced = !!(adj->flags & DRM_MODE_FLAG_INTERLACE);
539
Matt Roperf4510a22014-04-01 15:22:40 -0700540 i = armada_drm_crtc_calc_fb(dcrtc->crtc.primary->fb,
541 x, y, regs, interlaced);
Russell King96f60e32012-08-15 13:59:49 +0100542
543 rm = adj->crtc_hsync_start - adj->crtc_hdisplay;
544 lm = adj->crtc_htotal - adj->crtc_hsync_end;
545 bm = adj->crtc_vsync_start - adj->crtc_vdisplay;
546 tm = adj->crtc_vtotal - adj->crtc_vsync_end;
547
548 DRM_DEBUG_DRIVER("H: %d %d %d %d lm %d rm %d\n",
549 adj->crtc_hdisplay,
550 adj->crtc_hsync_start,
551 adj->crtc_hsync_end,
552 adj->crtc_htotal, lm, rm);
553 DRM_DEBUG_DRIVER("V: %d %d %d %d tm %d bm %d\n",
554 adj->crtc_vdisplay,
555 adj->crtc_vsync_start,
556 adj->crtc_vsync_end,
557 adj->crtc_vtotal, tm, bm);
558
559 /* Wait for pending flips to complete */
560 wait_event(dcrtc->frame_wait, !dcrtc->frame_work);
561
Russell King178e5612014-10-11 23:57:04 +0100562 drm_crtc_vblank_off(crtc);
Russell King96f60e32012-08-15 13:59:49 +0100563
Russell King96f60e32012-08-15 13:59:49 +0100564 val = dcrtc->dumb_ctrl & ~CFG_DUMB_ENA;
565 if (val != dcrtc->dumb_ctrl) {
566 dcrtc->dumb_ctrl = val;
567 writel_relaxed(val, dcrtc->base + LCD_SPU_DUMB_CTRL);
568 }
569
Russell Kinge0ac5e92015-06-29 18:01:38 +0100570 /*
571 * If we are blanked, we would have disabled the clock. Re-enable
572 * it so that compute_clock() does the right thing.
573 */
574 if (!IS_ERR(dcrtc->clk) && dpms_blanked(dcrtc->dpms))
575 WARN_ON(clk_prepare_enable(dcrtc->clk));
576
Russell King96f60e32012-08-15 13:59:49 +0100577 /* Now compute the divider for real */
Russell King42e62ba2014-04-22 15:24:03 +0100578 dcrtc->variant->compute_clock(dcrtc, adj, &sclk);
Russell King96f60e32012-08-15 13:59:49 +0100579
580 /* Ensure graphic fifo is enabled */
581 armada_reg_queue_mod(regs, i, 0, CFG_PDWN64x66, LCD_SPU_SRAM_PARA1);
582 armada_reg_queue_set(regs, i, sclk, LCD_CFG_SCLK_DIV);
583
584 if (interlaced ^ dcrtc->interlaced) {
585 if (adj->flags & DRM_MODE_FLAG_INTERLACE)
586 drm_vblank_get(dcrtc->crtc.dev, dcrtc->num);
587 else
588 drm_vblank_put(dcrtc->crtc.dev, dcrtc->num);
589 dcrtc->interlaced = interlaced;
590 }
591
592 spin_lock_irqsave(&dcrtc->irq_lock, flags);
593
594 /* Even interlaced/progressive frame */
595 dcrtc->v[1].spu_v_h_total = adj->crtc_vtotal << 16 |
596 adj->crtc_htotal;
597 dcrtc->v[1].spu_v_porch = tm << 16 | bm;
598 val = adj->crtc_hsync_start;
Russell King662af0d2013-05-19 10:55:17 +0100599 dcrtc->v[1].spu_adv_reg = val << 20 | val | ADV_VSYNCOFFEN |
Russell King42e62ba2014-04-22 15:24:03 +0100600 dcrtc->variant->spu_adv_reg;
Russell King96f60e32012-08-15 13:59:49 +0100601
602 if (interlaced) {
603 /* Odd interlaced frame */
604 dcrtc->v[0].spu_v_h_total = dcrtc->v[1].spu_v_h_total +
605 (1 << 16);
606 dcrtc->v[0].spu_v_porch = dcrtc->v[1].spu_v_porch + 1;
607 val = adj->crtc_hsync_start - adj->crtc_htotal / 2;
Russell King662af0d2013-05-19 10:55:17 +0100608 dcrtc->v[0].spu_adv_reg = val << 20 | val | ADV_VSYNCOFFEN |
Russell King42e62ba2014-04-22 15:24:03 +0100609 dcrtc->variant->spu_adv_reg;
Russell King96f60e32012-08-15 13:59:49 +0100610 } else {
611 dcrtc->v[0] = dcrtc->v[1];
612 }
613
614 val = adj->crtc_vdisplay << 16 | adj->crtc_hdisplay;
615
616 armada_reg_queue_set(regs, i, val, LCD_SPU_V_H_ACTIVE);
617 armada_reg_queue_set(regs, i, val, LCD_SPU_GRA_HPXL_VLN);
618 armada_reg_queue_set(regs, i, val, LCD_SPU_GZM_HPXL_VLN);
619 armada_reg_queue_set(regs, i, (lm << 16) | rm, LCD_SPU_H_PORCH);
620 armada_reg_queue_set(regs, i, dcrtc->v[0].spu_v_porch, LCD_SPU_V_PORCH);
621 armada_reg_queue_set(regs, i, dcrtc->v[0].spu_v_h_total,
622 LCD_SPUT_V_H_TOTAL);
623
Russell King42e62ba2014-04-22 15:24:03 +0100624 if (dcrtc->variant->has_spu_adv_reg) {
Russell King96f60e32012-08-15 13:59:49 +0100625 armada_reg_queue_mod(regs, i, dcrtc->v[0].spu_adv_reg,
626 ADV_VSYNC_L_OFF | ADV_VSYNC_H_OFF |
627 ADV_VSYNCOFFEN, LCD_SPU_ADV_REG);
Russell King662af0d2013-05-19 10:55:17 +0100628 }
Russell King96f60e32012-08-15 13:59:49 +0100629
630 val = CFG_GRA_ENA | CFG_GRA_HSMOOTH;
Matt Roperf4510a22014-04-01 15:22:40 -0700631 val |= CFG_GRA_FMT(drm_fb_to_armada_fb(dcrtc->crtc.primary->fb)->fmt);
632 val |= CFG_GRA_MOD(drm_fb_to_armada_fb(dcrtc->crtc.primary->fb)->mod);
Russell King96f60e32012-08-15 13:59:49 +0100633
Matt Roperf4510a22014-04-01 15:22:40 -0700634 if (drm_fb_to_armada_fb(dcrtc->crtc.primary->fb)->fmt > CFG_420)
Russell King96f60e32012-08-15 13:59:49 +0100635 val |= CFG_PALETTE_ENA;
636
637 if (interlaced)
638 val |= CFG_GRA_FTOGGLE;
639
640 armada_reg_queue_mod(regs, i, val, CFG_GRAFORMAT |
641 CFG_GRA_MOD(CFG_SWAPRB | CFG_SWAPUV |
642 CFG_SWAPYU | CFG_YUV2RGB) |
643 CFG_PALETTE_ENA | CFG_GRA_FTOGGLE,
644 LCD_SPU_DMA_CTRL0);
645
646 val = adj->flags & DRM_MODE_FLAG_NVSYNC ? CFG_VSYNC_INV : 0;
647 armada_reg_queue_mod(regs, i, val, CFG_VSYNC_INV, LCD_SPU_DMA_CTRL1);
648
649 val = dcrtc->spu_iopad_ctrl | armada_drm_crtc_calculate_csc(dcrtc);
650 armada_reg_queue_set(regs, i, val, LCD_SPU_IOPAD_CONTROL);
651 armada_reg_queue_end(regs, i);
652
653 armada_drm_crtc_update_regs(dcrtc, regs);
654 spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
655
656 armada_drm_crtc_update(dcrtc);
657
Russell King178e5612014-10-11 23:57:04 +0100658 drm_crtc_vblank_on(crtc);
Russell King96f60e32012-08-15 13:59:49 +0100659 armada_drm_crtc_finish_fb(dcrtc, old_fb, dpms_blanked(dcrtc->dpms));
660
661 return 0;
662}
663
664/* The mode_config.mutex will be held for this call */
665static int armada_drm_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
666 struct drm_framebuffer *old_fb)
667{
668 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
669 struct armada_regs regs[4];
670 unsigned i;
671
Matt Roperf4510a22014-04-01 15:22:40 -0700672 i = armada_drm_crtc_calc_fb(crtc->primary->fb, crtc->x, crtc->y, regs,
Russell King96f60e32012-08-15 13:59:49 +0100673 dcrtc->interlaced);
674 armada_reg_queue_end(regs, i);
675
676 /* Wait for pending flips to complete */
677 wait_event(dcrtc->frame_wait, !dcrtc->frame_work);
678
679 /* Take a reference to the new fb as we're using it */
Matt Roperf4510a22014-04-01 15:22:40 -0700680 drm_framebuffer_reference(crtc->primary->fb);
Russell King96f60e32012-08-15 13:59:49 +0100681
682 /* Update the base in the CRTC */
683 armada_drm_crtc_update_regs(dcrtc, regs);
684
685 /* Drop our previously held reference */
686 armada_drm_crtc_finish_fb(dcrtc, old_fb, dpms_blanked(dcrtc->dpms));
687
688 return 0;
689}
690
Russell King96f60e32012-08-15 13:59:49 +0100691/* The mode_config.mutex will be held for this call */
692static void armada_drm_crtc_disable(struct drm_crtc *crtc)
693{
694 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
695
696 armada_drm_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
Matt Roperf4510a22014-04-01 15:22:40 -0700697 armada_drm_crtc_finish_fb(dcrtc, crtc->primary->fb, true);
Russell King96f60e32012-08-15 13:59:49 +0100698
699 /* Power down most RAMs and FIFOs */
700 writel_relaxed(CFG_PDWN256x32 | CFG_PDWN256x24 | CFG_PDWN256x8 |
701 CFG_PDWN32x32 | CFG_PDWN16x66 | CFG_PDWN32x66 |
702 CFG_PDWN64x66, dcrtc->base + LCD_SPU_SRAM_PARA1);
703}
704
705static const struct drm_crtc_helper_funcs armada_crtc_helper_funcs = {
706 .dpms = armada_drm_crtc_dpms,
707 .prepare = armada_drm_crtc_prepare,
708 .commit = armada_drm_crtc_commit,
709 .mode_fixup = armada_drm_crtc_mode_fixup,
710 .mode_set = armada_drm_crtc_mode_set,
711 .mode_set_base = armada_drm_crtc_mode_set_base,
Russell King96f60e32012-08-15 13:59:49 +0100712 .disable = armada_drm_crtc_disable,
713};
714
Russell King662af0d2013-05-19 10:55:17 +0100715static void armada_load_cursor_argb(void __iomem *base, uint32_t *pix,
716 unsigned stride, unsigned width, unsigned height)
717{
718 uint32_t addr;
719 unsigned y;
720
721 addr = SRAM_HWC32_RAM1;
722 for (y = 0; y < height; y++) {
723 uint32_t *p = &pix[y * stride];
724 unsigned x;
725
726 for (x = 0; x < width; x++, p++) {
727 uint32_t val = *p;
728
729 val = (val & 0xff00ff00) |
730 (val & 0x000000ff) << 16 |
731 (val & 0x00ff0000) >> 16;
732
733 writel_relaxed(val,
734 base + LCD_SPU_SRAM_WRDAT);
735 writel_relaxed(addr | SRAM_WRITE,
736 base + LCD_SPU_SRAM_CTRL);
Russell Kingc39b0692014-04-07 12:00:17 +0100737 readl_relaxed(base + LCD_SPU_HWC_OVSA_HPXL_VLN);
Russell King662af0d2013-05-19 10:55:17 +0100738 addr += 1;
739 if ((addr & 0x00ff) == 0)
740 addr += 0xf00;
741 if ((addr & 0x30ff) == 0)
742 addr = SRAM_HWC32_RAM2;
743 }
744 }
745}
746
747static void armada_drm_crtc_cursor_tran(void __iomem *base)
748{
749 unsigned addr;
750
751 for (addr = 0; addr < 256; addr++) {
752 /* write the default value */
753 writel_relaxed(0x55555555, base + LCD_SPU_SRAM_WRDAT);
754 writel_relaxed(addr | SRAM_WRITE | SRAM_HWC32_TRAN,
755 base + LCD_SPU_SRAM_CTRL);
756 }
757}
758
759static int armada_drm_crtc_cursor_update(struct armada_crtc *dcrtc, bool reload)
760{
761 uint32_t xoff, xscr, w = dcrtc->cursor_w, s;
762 uint32_t yoff, yscr, h = dcrtc->cursor_h;
763 uint32_t para1;
764
765 /*
766 * Calculate the visible width and height of the cursor,
767 * screen position, and the position in the cursor bitmap.
768 */
769 if (dcrtc->cursor_x < 0) {
770 xoff = -dcrtc->cursor_x;
771 xscr = 0;
772 w -= min(xoff, w);
773 } else if (dcrtc->cursor_x + w > dcrtc->crtc.mode.hdisplay) {
774 xoff = 0;
775 xscr = dcrtc->cursor_x;
776 w = max_t(int, dcrtc->crtc.mode.hdisplay - dcrtc->cursor_x, 0);
777 } else {
778 xoff = 0;
779 xscr = dcrtc->cursor_x;
780 }
781
782 if (dcrtc->cursor_y < 0) {
783 yoff = -dcrtc->cursor_y;
784 yscr = 0;
785 h -= min(yoff, h);
786 } else if (dcrtc->cursor_y + h > dcrtc->crtc.mode.vdisplay) {
787 yoff = 0;
788 yscr = dcrtc->cursor_y;
789 h = max_t(int, dcrtc->crtc.mode.vdisplay - dcrtc->cursor_y, 0);
790 } else {
791 yoff = 0;
792 yscr = dcrtc->cursor_y;
793 }
794
795 /* On interlaced modes, the vertical cursor size must be halved */
796 s = dcrtc->cursor_w;
797 if (dcrtc->interlaced) {
798 s *= 2;
799 yscr /= 2;
800 h /= 2;
801 }
802
803 if (!dcrtc->cursor_obj || !h || !w) {
804 spin_lock_irq(&dcrtc->irq_lock);
805 armada_drm_crtc_disable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
806 dcrtc->cursor_update = false;
807 armada_updatel(0, CFG_HWC_ENA, dcrtc->base + LCD_SPU_DMA_CTRL0);
808 spin_unlock_irq(&dcrtc->irq_lock);
809 return 0;
810 }
811
812 para1 = readl_relaxed(dcrtc->base + LCD_SPU_SRAM_PARA1);
813 armada_updatel(CFG_CSB_256x32, CFG_CSB_256x32 | CFG_PDWN256x32,
814 dcrtc->base + LCD_SPU_SRAM_PARA1);
815
816 /*
817 * Initialize the transparency if the SRAM was powered down.
818 * We must also reload the cursor data as well.
819 */
820 if (!(para1 & CFG_CSB_256x32)) {
821 armada_drm_crtc_cursor_tran(dcrtc->base);
822 reload = true;
823 }
824
825 if (dcrtc->cursor_hw_sz != (h << 16 | w)) {
826 spin_lock_irq(&dcrtc->irq_lock);
827 armada_drm_crtc_disable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
828 dcrtc->cursor_update = false;
829 armada_updatel(0, CFG_HWC_ENA, dcrtc->base + LCD_SPU_DMA_CTRL0);
830 spin_unlock_irq(&dcrtc->irq_lock);
831 reload = true;
832 }
833 if (reload) {
834 struct armada_gem_object *obj = dcrtc->cursor_obj;
835 uint32_t *pix;
836 /* Set the top-left corner of the cursor image */
837 pix = obj->addr;
838 pix += yoff * s + xoff;
839 armada_load_cursor_argb(dcrtc->base, pix, s, w, h);
840 }
841
842 /* Reload the cursor position, size and enable in the IRQ handler */
843 spin_lock_irq(&dcrtc->irq_lock);
844 dcrtc->cursor_hw_pos = yscr << 16 | xscr;
845 dcrtc->cursor_hw_sz = h << 16 | w;
846 dcrtc->cursor_update = true;
847 armada_drm_crtc_enable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
848 spin_unlock_irq(&dcrtc->irq_lock);
849
850 return 0;
851}
852
853static void cursor_update(void *data)
854{
855 armada_drm_crtc_cursor_update(data, true);
856}
857
858static int armada_drm_crtc_cursor_set(struct drm_crtc *crtc,
859 struct drm_file *file, uint32_t handle, uint32_t w, uint32_t h)
860{
861 struct drm_device *dev = crtc->dev;
862 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
Russell King662af0d2013-05-19 10:55:17 +0100863 struct armada_gem_object *obj = NULL;
864 int ret;
865
866 /* If no cursor support, replicate drm's return value */
Russell King42e62ba2014-04-22 15:24:03 +0100867 if (!dcrtc->variant->has_spu_adv_reg)
Russell King662af0d2013-05-19 10:55:17 +0100868 return -ENXIO;
869
870 if (handle && w > 0 && h > 0) {
871 /* maximum size is 64x32 or 32x64 */
872 if (w > 64 || h > 64 || (w > 32 && h > 32))
873 return -ENOMEM;
874
875 obj = armada_gem_object_lookup(dev, file, handle);
876 if (!obj)
877 return -ENOENT;
878
879 /* Must be a kernel-mapped object */
880 if (!obj->addr) {
881 drm_gem_object_unreference_unlocked(&obj->obj);
882 return -EINVAL;
883 }
884
885 if (obj->obj.size < w * h * 4) {
886 DRM_ERROR("buffer is too small\n");
887 drm_gem_object_unreference_unlocked(&obj->obj);
888 return -ENOMEM;
889 }
890 }
891
892 mutex_lock(&dev->struct_mutex);
893 if (dcrtc->cursor_obj) {
894 dcrtc->cursor_obj->update = NULL;
895 dcrtc->cursor_obj->update_data = NULL;
896 drm_gem_object_unreference(&dcrtc->cursor_obj->obj);
897 }
898 dcrtc->cursor_obj = obj;
899 dcrtc->cursor_w = w;
900 dcrtc->cursor_h = h;
901 ret = armada_drm_crtc_cursor_update(dcrtc, true);
902 if (obj) {
903 obj->update_data = dcrtc;
904 obj->update = cursor_update;
905 }
906 mutex_unlock(&dev->struct_mutex);
907
908 return ret;
909}
910
911static int armada_drm_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
912{
913 struct drm_device *dev = crtc->dev;
914 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
Russell King662af0d2013-05-19 10:55:17 +0100915 int ret;
916
917 /* If no cursor support, replicate drm's return value */
Russell King42e62ba2014-04-22 15:24:03 +0100918 if (!dcrtc->variant->has_spu_adv_reg)
Russell King662af0d2013-05-19 10:55:17 +0100919 return -EFAULT;
920
921 mutex_lock(&dev->struct_mutex);
922 dcrtc->cursor_x = x;
923 dcrtc->cursor_y = y;
924 ret = armada_drm_crtc_cursor_update(dcrtc, false);
925 mutex_unlock(&dev->struct_mutex);
926
927 return ret;
928}
929
Russell King96f60e32012-08-15 13:59:49 +0100930static void armada_drm_crtc_destroy(struct drm_crtc *crtc)
931{
932 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
933 struct armada_private *priv = crtc->dev->dev_private;
934
Russell King662af0d2013-05-19 10:55:17 +0100935 if (dcrtc->cursor_obj)
936 drm_gem_object_unreference(&dcrtc->cursor_obj->obj);
937
Russell King96f60e32012-08-15 13:59:49 +0100938 priv->dcrtc[dcrtc->num] = NULL;
939 drm_crtc_cleanup(&dcrtc->crtc);
940
941 if (!IS_ERR(dcrtc->clk))
942 clk_disable_unprepare(dcrtc->clk);
943
Russell Kinge5d9ddf2014-04-26 15:19:38 +0100944 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ENA);
945
Russell King9611cb92014-06-15 11:21:23 +0100946 of_node_put(dcrtc->crtc.port);
947
Russell King96f60e32012-08-15 13:59:49 +0100948 kfree(dcrtc);
949}
950
951/*
952 * The mode_config lock is held here, to prevent races between this
953 * and a mode_set.
954 */
955static int armada_drm_crtc_page_flip(struct drm_crtc *crtc,
Dave Airlie5e4e3ba2013-10-22 09:38:18 +0100956 struct drm_framebuffer *fb, struct drm_pending_vblank_event *event, uint32_t page_flip_flags)
Russell King96f60e32012-08-15 13:59:49 +0100957{
958 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
959 struct armada_frame_work *work;
960 struct drm_device *dev = crtc->dev;
961 unsigned long flags;
962 unsigned i;
963 int ret;
964
965 /* We don't support changing the pixel format */
Matt Roperf4510a22014-04-01 15:22:40 -0700966 if (fb->pixel_format != crtc->primary->fb->pixel_format)
Russell King96f60e32012-08-15 13:59:49 +0100967 return -EINVAL;
968
969 work = kmalloc(sizeof(*work), GFP_KERNEL);
970 if (!work)
971 return -ENOMEM;
972
973 work->event = event;
Matt Roperf4510a22014-04-01 15:22:40 -0700974 work->old_fb = dcrtc->crtc.primary->fb;
Russell King96f60e32012-08-15 13:59:49 +0100975
976 i = armada_drm_crtc_calc_fb(fb, crtc->x, crtc->y, work->regs,
977 dcrtc->interlaced);
978 armada_reg_queue_end(work->regs, i);
979
980 /*
Russell Kingc5488302014-10-11 23:53:35 +0100981 * Ensure that we hold a reference on the new framebuffer.
982 * This has to match the behaviour in mode_set.
Russell King96f60e32012-08-15 13:59:49 +0100983 */
Russell Kingc5488302014-10-11 23:53:35 +0100984 drm_framebuffer_reference(fb);
Russell King96f60e32012-08-15 13:59:49 +0100985
986 ret = armada_drm_crtc_queue_frame_work(dcrtc, work);
987 if (ret) {
Russell Kingc5488302014-10-11 23:53:35 +0100988 /* Undo our reference above */
989 drm_framebuffer_unreference(fb);
Russell King96f60e32012-08-15 13:59:49 +0100990 kfree(work);
991 return ret;
992 }
993
994 /*
995 * Don't take a reference on the new framebuffer;
996 * drm_mode_page_flip_ioctl() has already grabbed a reference and
997 * will _not_ drop that reference on successful return from this
998 * function. Simply mark this new framebuffer as the current one.
999 */
Matt Roperf4510a22014-04-01 15:22:40 -07001000 dcrtc->crtc.primary->fb = fb;
Russell King96f60e32012-08-15 13:59:49 +01001001
1002 /*
1003 * Finally, if the display is blanked, we won't receive an
1004 * interrupt, so complete it now.
1005 */
1006 if (dpms_blanked(dcrtc->dpms)) {
1007 spin_lock_irqsave(&dev->event_lock, flags);
1008 if (dcrtc->frame_work)
1009 armada_drm_crtc_complete_frame_work(dcrtc);
1010 spin_unlock_irqrestore(&dev->event_lock, flags);
1011 }
1012
1013 return 0;
1014}
1015
1016static int
1017armada_drm_crtc_set_property(struct drm_crtc *crtc,
1018 struct drm_property *property, uint64_t val)
1019{
1020 struct armada_private *priv = crtc->dev->dev_private;
1021 struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
1022 bool update_csc = false;
1023
1024 if (property == priv->csc_yuv_prop) {
1025 dcrtc->csc_yuv_mode = val;
1026 update_csc = true;
1027 } else if (property == priv->csc_rgb_prop) {
1028 dcrtc->csc_rgb_mode = val;
1029 update_csc = true;
1030 }
1031
1032 if (update_csc) {
1033 uint32_t val;
1034
1035 val = dcrtc->spu_iopad_ctrl |
1036 armada_drm_crtc_calculate_csc(dcrtc);
1037 writel_relaxed(val, dcrtc->base + LCD_SPU_IOPAD_CONTROL);
1038 }
1039
1040 return 0;
1041}
1042
1043static struct drm_crtc_funcs armada_crtc_funcs = {
Russell King662af0d2013-05-19 10:55:17 +01001044 .cursor_set = armada_drm_crtc_cursor_set,
1045 .cursor_move = armada_drm_crtc_cursor_move,
Russell King96f60e32012-08-15 13:59:49 +01001046 .destroy = armada_drm_crtc_destroy,
1047 .set_config = drm_crtc_helper_set_config,
1048 .page_flip = armada_drm_crtc_page_flip,
1049 .set_property = armada_drm_crtc_set_property,
1050};
1051
1052static struct drm_prop_enum_list armada_drm_csc_yuv_enum_list[] = {
1053 { CSC_AUTO, "Auto" },
1054 { CSC_YUV_CCIR601, "CCIR601" },
1055 { CSC_YUV_CCIR709, "CCIR709" },
1056};
1057
1058static struct drm_prop_enum_list armada_drm_csc_rgb_enum_list[] = {
1059 { CSC_AUTO, "Auto" },
1060 { CSC_RGB_COMPUTER, "Computer system" },
1061 { CSC_RGB_STUDIO, "Studio" },
1062};
1063
1064static int armada_drm_crtc_create_properties(struct drm_device *dev)
1065{
1066 struct armada_private *priv = dev->dev_private;
1067
1068 if (priv->csc_yuv_prop)
1069 return 0;
1070
1071 priv->csc_yuv_prop = drm_property_create_enum(dev, 0,
1072 "CSC_YUV", armada_drm_csc_yuv_enum_list,
1073 ARRAY_SIZE(armada_drm_csc_yuv_enum_list));
1074 priv->csc_rgb_prop = drm_property_create_enum(dev, 0,
1075 "CSC_RGB", armada_drm_csc_rgb_enum_list,
1076 ARRAY_SIZE(armada_drm_csc_rgb_enum_list));
1077
1078 if (!priv->csc_yuv_prop || !priv->csc_rgb_prop)
1079 return -ENOMEM;
1080
1081 return 0;
1082}
1083
Russell King0fb29702015-06-06 21:46:53 +01001084static int armada_drm_crtc_create(struct drm_device *drm, struct device *dev,
Russell King9611cb92014-06-15 11:21:23 +01001085 struct resource *res, int irq, const struct armada_variant *variant,
1086 struct device_node *port)
Russell King96f60e32012-08-15 13:59:49 +01001087{
Russell Kingd8c96082014-04-22 11:10:15 +01001088 struct armada_private *priv = drm->dev_private;
Russell King96f60e32012-08-15 13:59:49 +01001089 struct armada_crtc *dcrtc;
1090 void __iomem *base;
1091 int ret;
1092
Russell Kingd8c96082014-04-22 11:10:15 +01001093 ret = armada_drm_crtc_create_properties(drm);
Russell King96f60e32012-08-15 13:59:49 +01001094 if (ret)
1095 return ret;
1096
Linus Torvaldsa7d7a142014-08-07 17:36:12 -07001097 base = devm_ioremap_resource(dev, res);
Jingoo Hanc9d53c02014-06-11 14:00:05 +09001098 if (IS_ERR(base))
1099 return PTR_ERR(base);
Russell King96f60e32012-08-15 13:59:49 +01001100
1101 dcrtc = kzalloc(sizeof(*dcrtc), GFP_KERNEL);
1102 if (!dcrtc) {
1103 DRM_ERROR("failed to allocate Armada crtc\n");
1104 return -ENOMEM;
1105 }
1106
Russell Kingd8c96082014-04-22 11:10:15 +01001107 if (dev != drm->dev)
1108 dev_set_drvdata(dev, dcrtc);
1109
Russell King42e62ba2014-04-22 15:24:03 +01001110 dcrtc->variant = variant;
Russell King96f60e32012-08-15 13:59:49 +01001111 dcrtc->base = base;
Russell Kingd8c96082014-04-22 11:10:15 +01001112 dcrtc->num = drm->mode_config.num_crtc;
Russell King96f60e32012-08-15 13:59:49 +01001113 dcrtc->clk = ERR_PTR(-EINVAL);
1114 dcrtc->csc_yuv_mode = CSC_AUTO;
1115 dcrtc->csc_rgb_mode = CSC_AUTO;
1116 dcrtc->cfg_dumb_ctrl = DUMB24_RGB888_0;
1117 dcrtc->spu_iopad_ctrl = CFG_VSCALE_LN_EN | CFG_IOPAD_DUMB24;
1118 spin_lock_init(&dcrtc->irq_lock);
1119 dcrtc->irq_ena = CLEAN_SPU_IRQ_ISR;
1120 INIT_LIST_HEAD(&dcrtc->vbl_list);
1121 init_waitqueue_head(&dcrtc->frame_wait);
1122
1123 /* Initialize some registers which we don't otherwise set */
1124 writel_relaxed(0x00000001, dcrtc->base + LCD_CFG_SCLK_DIV);
1125 writel_relaxed(0x00000000, dcrtc->base + LCD_SPU_BLANKCOLOR);
1126 writel_relaxed(dcrtc->spu_iopad_ctrl,
1127 dcrtc->base + LCD_SPU_IOPAD_CONTROL);
1128 writel_relaxed(0x00000000, dcrtc->base + LCD_SPU_SRAM_PARA0);
1129 writel_relaxed(CFG_PDWN256x32 | CFG_PDWN256x24 | CFG_PDWN256x8 |
1130 CFG_PDWN32x32 | CFG_PDWN16x66 | CFG_PDWN32x66 |
1131 CFG_PDWN64x66, dcrtc->base + LCD_SPU_SRAM_PARA1);
1132 writel_relaxed(0x2032ff81, dcrtc->base + LCD_SPU_DMA_CTRL1);
1133 writel_relaxed(0x00000000, dcrtc->base + LCD_SPU_GRA_OVSA_HPXL_VLN);
Russell Kinge5d9ddf2014-04-26 15:19:38 +01001134 writel_relaxed(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
1135 writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ISR);
Russell King96f60e32012-08-15 13:59:49 +01001136
Russell Kinge5d9ddf2014-04-26 15:19:38 +01001137 ret = devm_request_irq(dev, irq, armada_drm_irq, 0, "armada_drm_crtc",
1138 dcrtc);
1139 if (ret < 0) {
1140 kfree(dcrtc);
1141 return ret;
1142 }
Russell King96f60e32012-08-15 13:59:49 +01001143
Russell King42e62ba2014-04-22 15:24:03 +01001144 if (dcrtc->variant->init) {
Russell Kingd8c96082014-04-22 11:10:15 +01001145 ret = dcrtc->variant->init(dcrtc, dev);
Russell King96f60e32012-08-15 13:59:49 +01001146 if (ret) {
1147 kfree(dcrtc);
1148 return ret;
1149 }
1150 }
1151
1152 /* Ensure AXI pipeline is enabled */
1153 armada_updatel(CFG_ARBFAST_ENA, 0, dcrtc->base + LCD_SPU_DMA_CTRL0);
1154
1155 priv->dcrtc[dcrtc->num] = dcrtc;
1156
Russell King9611cb92014-06-15 11:21:23 +01001157 dcrtc->crtc.port = port;
Russell Kingd8c96082014-04-22 11:10:15 +01001158 drm_crtc_init(drm, &dcrtc->crtc, &armada_crtc_funcs);
Russell King96f60e32012-08-15 13:59:49 +01001159 drm_crtc_helper_add(&dcrtc->crtc, &armada_crtc_helper_funcs);
1160
1161 drm_object_attach_property(&dcrtc->crtc.base, priv->csc_yuv_prop,
1162 dcrtc->csc_yuv_mode);
1163 drm_object_attach_property(&dcrtc->crtc.base, priv->csc_rgb_prop,
1164 dcrtc->csc_rgb_mode);
1165
Russell Kingd8c96082014-04-22 11:10:15 +01001166 return armada_overlay_plane_create(drm, 1 << dcrtc->num);
Russell King96f60e32012-08-15 13:59:49 +01001167}
Russell Kingd8c96082014-04-22 11:10:15 +01001168
1169static int
1170armada_lcd_bind(struct device *dev, struct device *master, void *data)
1171{
1172 struct platform_device *pdev = to_platform_device(dev);
1173 struct drm_device *drm = data;
1174 struct resource *res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1175 int irq = platform_get_irq(pdev, 0);
1176 const struct armada_variant *variant;
Russell King9611cb92014-06-15 11:21:23 +01001177 struct device_node *port = NULL;
Russell Kingd8c96082014-04-22 11:10:15 +01001178
1179 if (irq < 0)
1180 return irq;
1181
1182 if (!dev->of_node) {
1183 const struct platform_device_id *id;
1184
1185 id = platform_get_device_id(pdev);
1186 if (!id)
1187 return -ENXIO;
1188
1189 variant = (const struct armada_variant *)id->driver_data;
1190 } else {
1191 const struct of_device_id *match;
Russell King9611cb92014-06-15 11:21:23 +01001192 struct device_node *np, *parent = dev->of_node;
Russell Kingd8c96082014-04-22 11:10:15 +01001193
1194 match = of_match_device(dev->driver->of_match_table, dev);
1195 if (!match)
1196 return -ENXIO;
1197
Russell King9611cb92014-06-15 11:21:23 +01001198 np = of_get_child_by_name(parent, "ports");
1199 if (np)
1200 parent = np;
1201 port = of_get_child_by_name(parent, "port");
1202 of_node_put(np);
1203 if (!port) {
1204 dev_err(dev, "no port node found in %s\n",
1205 parent->full_name);
1206 return -ENXIO;
1207 }
1208
Russell Kingd8c96082014-04-22 11:10:15 +01001209 variant = match->data;
1210 }
1211
Russell King9611cb92014-06-15 11:21:23 +01001212 return armada_drm_crtc_create(drm, dev, res, irq, variant, port);
Russell Kingd8c96082014-04-22 11:10:15 +01001213}
1214
1215static void
1216armada_lcd_unbind(struct device *dev, struct device *master, void *data)
1217{
1218 struct armada_crtc *dcrtc = dev_get_drvdata(dev);
1219
1220 armada_drm_crtc_destroy(&dcrtc->crtc);
1221}
1222
1223static const struct component_ops armada_lcd_ops = {
1224 .bind = armada_lcd_bind,
1225 .unbind = armada_lcd_unbind,
1226};
1227
1228static int armada_lcd_probe(struct platform_device *pdev)
1229{
1230 return component_add(&pdev->dev, &armada_lcd_ops);
1231}
1232
1233static int armada_lcd_remove(struct platform_device *pdev)
1234{
1235 component_del(&pdev->dev, &armada_lcd_ops);
1236 return 0;
1237}
1238
1239static struct of_device_id armada_lcd_of_match[] = {
1240 {
1241 .compatible = "marvell,dove-lcd",
1242 .data = &armada510_ops,
1243 },
1244 {}
1245};
1246MODULE_DEVICE_TABLE(of, armada_lcd_of_match);
1247
1248static const struct platform_device_id armada_lcd_platform_ids[] = {
1249 {
1250 .name = "armada-lcd",
1251 .driver_data = (unsigned long)&armada510_ops,
1252 }, {
1253 .name = "armada-510-lcd",
1254 .driver_data = (unsigned long)&armada510_ops,
1255 },
1256 { },
1257};
1258MODULE_DEVICE_TABLE(platform, armada_lcd_platform_ids);
1259
1260struct platform_driver armada_lcd_platform_driver = {
1261 .probe = armada_lcd_probe,
1262 .remove = armada_lcd_remove,
1263 .driver = {
1264 .name = "armada-lcd",
1265 .owner = THIS_MODULE,
1266 .of_match_table = armada_lcd_of_match,
1267 },
1268 .id_table = armada_lcd_platform_ids,
1269};