blob: 77b22bfec7294981a533a5186c09f0ed7c46f5c6 [file] [log] [blame]
Sachin Bhayaree78ce6e2018-01-23 11:22:54 +05301/* Copyright (c) 2013-2014, 2016-2018, The Linux Foundation. All rights reserved.
Sachin Bhayareeeb88892018-01-02 16:36:01 +05302 * This program is free software; you can redistribute it and/or modify
3 * it under the terms of the GNU General Public License version 2 and
4 * only version 2 as published by the Free Software Foundation.
5 *
6 * This program is distributed in the hope that it will be useful,
7 * but WITHOUT ANY WARRANTY; without even the implied warranty of
8 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 * GNU General Public License for more details.
10 *
11 */
12#include <linux/bitops.h>
13#include <linux/iopoll.h>
14
15#include "mdp3.h"
16#include "mdp3_dma.h"
17#include "mdp3_hwio.h"
18#include "mdss_debug.h"
Animesh Kishore3650a562018-11-16 00:26:26 +053019#include "mdp3_ctrl.h"
Sachin Bhayareeeb88892018-01-02 16:36:01 +053020
21#define DMA_STOP_POLL_SLEEP_US 1000
22#define DMA_STOP_POLL_TIMEOUT_US 200000
23#define DMA_HISTO_RESET_TIMEOUT_MS 40
24#define DMA_LUT_CONFIG_MASK 0xfffffbe8
25#define DMA_CCS_CONFIG_MASK 0xfffffc17
26#define HIST_WAIT_TIMEOUT(frame) ((75 * HZ * (frame)) / 1000)
27
28#define VSYNC_SELECT 0x024
29#define VSYNC_TOTAL_LINES_SHIFT 21
30#define VSYNC_COUNT_MASK 0x7ffff
31#define VSYNC_THRESH_CONT_SHIFT 16
32
33static void mdp3_vsync_intr_handler(int type, void *arg)
34{
35 struct mdp3_dma *dma = (struct mdp3_dma *)arg;
36 struct mdp3_notification vsync_client;
37 struct mdp3_notification retire_client;
38 unsigned int wait_for_next_vs;
39
Abhijith Desaib502b282018-09-12 14:48:22 +053040 if (!dma) {
41 pr_err("dma is null\n");
42 return;
43 }
44
Sachin Bhayareeeb88892018-01-02 16:36:01 +053045 pr_debug("mdp3_vsync_intr_handler\n");
Abhijith Desaib502b282018-09-12 14:48:22 +053046 MDSS_XLOG(0x111, dma->vsync_period);
Sachin Bhayareeeb88892018-01-02 16:36:01 +053047 spin_lock(&dma->dma_lock);
48 vsync_client = dma->vsync_client;
49 retire_client = dma->retire_client;
50 wait_for_next_vs = !dma->vsync_status;
51 dma->vsync_status = 0;
52 if (wait_for_next_vs)
53 complete(&dma->vsync_comp);
54 spin_unlock(&dma->dma_lock);
55 if (vsync_client.handler) {
56 vsync_client.handler(vsync_client.arg);
57 } else {
58 if (wait_for_next_vs)
59 mdp3_irq_disable_nosync(type);
60 }
61
62 if (retire_client.handler)
63 retire_client.handler(retire_client.arg);
64}
65
66static void mdp3_dma_done_intr_handler(int type, void *arg)
67{
68 struct mdp3_dma *dma = (struct mdp3_dma *)arg;
69 struct mdp3_notification dma_client;
70
Abhijith Desaib502b282018-09-12 14:48:22 +053071 if (!dma) {
72 pr_err("dma is null\n");
73 return;
74 }
75
Sachin Bhayareeeb88892018-01-02 16:36:01 +053076 pr_debug("mdp3_dma_done_intr_handler\n");
77 spin_lock(&dma->dma_lock);
78 dma_client = dma->dma_notifier_client;
79 complete(&dma->dma_comp);
80 spin_unlock(&dma->dma_lock);
81 mdp3_irq_disable_nosync(type);
82 if (dma_client.handler)
83 dma_client.handler(dma_client.arg);
84}
85
86static void mdp3_hist_done_intr_handler(int type, void *arg)
87{
88 struct mdp3_dma *dma = (struct mdp3_dma *)arg;
89 u32 isr, mask;
90
Abhijith Desaib502b282018-09-12 14:48:22 +053091 if (!dma) {
92 pr_err("dma is null\n");
93 return;
94 }
95
Sachin Bhayareeeb88892018-01-02 16:36:01 +053096 isr = MDP3_REG_READ(MDP3_REG_DMA_P_HIST_INTR_STATUS);
97 mask = MDP3_REG_READ(MDP3_REG_DMA_P_HIST_INTR_ENABLE);
98 MDP3_REG_WRITE(MDP3_REG_DMA_P_HIST_INTR_CLEAR, isr);
99
100 isr &= mask;
101 if (isr == 0)
102 return;
103
104 if (isr & MDP3_DMA_P_HIST_INTR_HIST_DONE_BIT) {
105 spin_lock(&dma->histo_lock);
106 dma->histo_state = MDP3_DMA_HISTO_STATE_READY;
107 complete(&dma->histo_comp);
108 spin_unlock(&dma->histo_lock);
109 mdp3_hist_intr_notify(dma);
110 }
111 if (isr & MDP3_DMA_P_HIST_INTR_RESET_DONE_BIT) {
112 spin_lock(&dma->histo_lock);
113 dma->histo_state = MDP3_DMA_HISTO_STATE_IDLE;
114 complete(&dma->histo_comp);
115 spin_unlock(&dma->histo_lock);
116 }
117}
118
119void mdp3_dma_callback_enable(struct mdp3_dma *dma, int type)
120{
121 int irq_bit;
122
123 pr_debug("mdp3_dma_callback_enable type=%d\n", type);
124
125 if (dma->dma_sel == MDP3_DMA_P) {
126 if (type & MDP3_DMA_CALLBACK_TYPE_HIST_RESET_DONE)
127 mdp3_irq_enable(MDP3_INTR_DMA_P_HISTO);
128
129 if (type & MDP3_DMA_CALLBACK_TYPE_HIST_DONE)
130 mdp3_irq_enable(MDP3_INTR_DMA_P_HISTO);
131 }
132
133 if (dma->output_config.out_sel == MDP3_DMA_OUTPUT_SEL_DSI_VIDEO ||
Arun kumardb962812018-05-30 16:31:52 +0530134 dma->output_config.out_sel == MDP3_DMA_OUTPUT_SEL_LCDC ||
135 dma->output_config.out_sel == MDP3_DMA_OUTPUT_SEL_SPI_CMD) {
Sachin Bhayareeeb88892018-01-02 16:36:01 +0530136 if (type & MDP3_DMA_CALLBACK_TYPE_VSYNC)
137 mdp3_irq_enable(MDP3_INTR_LCDC_START_OF_FRAME);
138 } else if (dma->output_config.out_sel == MDP3_DMA_OUTPUT_SEL_DSI_CMD) {
139 if (type & MDP3_DMA_CALLBACK_TYPE_VSYNC) {
140 irq_bit = MDP3_INTR_SYNC_PRIMARY_LINE;
141 irq_bit += dma->dma_sel;
142 mdp3_irq_enable(irq_bit);
143 }
144
145 if (type & MDP3_DMA_CALLBACK_TYPE_DMA_DONE) {
146 irq_bit = MDP3_INTR_DMA_P_DONE;
147 if (dma->dma_sel == MDP3_DMA_S)
148 irq_bit = MDP3_INTR_DMA_S_DONE;
149 mdp3_irq_enable(irq_bit);
150 }
151 } else {
152 pr_err("mdp3_dma_callback_enable not supported interface\n");
153 }
154}
155
156void mdp3_dma_callback_disable(struct mdp3_dma *dma, int type)
157{
158 int irq_bit;
159
160 pr_debug("mdp3_dma_callback_disable type=%d\n", type);
161
162 if (dma->dma_sel == MDP3_DMA_P) {
163 if (type & MDP3_DMA_CALLBACK_TYPE_HIST_RESET_DONE)
164 mdp3_irq_disable(MDP3_INTR_DMA_P_HISTO);
165
166 if (type & MDP3_DMA_CALLBACK_TYPE_HIST_DONE)
167 mdp3_irq_disable(MDP3_INTR_DMA_P_HISTO);
168 }
169
170 if (dma->output_config.out_sel == MDP3_DMA_OUTPUT_SEL_DSI_VIDEO ||
Arun kumardb962812018-05-30 16:31:52 +0530171 dma->output_config.out_sel == MDP3_DMA_OUTPUT_SEL_LCDC ||
172 dma->output_config.out_sel == MDP3_DMA_OUTPUT_SEL_SPI_CMD) {
Sachin Bhayareeeb88892018-01-02 16:36:01 +0530173 if (type & MDP3_DMA_CALLBACK_TYPE_VSYNC)
174 mdp3_irq_disable(MDP3_INTR_LCDC_START_OF_FRAME);
175 } else if (dma->output_config.out_sel == MDP3_DMA_OUTPUT_SEL_DSI_CMD) {
176 if (type & MDP3_DMA_CALLBACK_TYPE_VSYNC) {
177 irq_bit = MDP3_INTR_SYNC_PRIMARY_LINE;
178 irq_bit += dma->dma_sel;
179 mdp3_irq_disable(irq_bit);
180 /*
181 * Clear read pointer interrupt before disabling clocks.
182 * Else pending ISR handling will result in NOC error
183 * since the clock will be disable after this point.
184 */
185 mdp3_clear_irq(irq_bit);
186 }
187
188 if (type & MDP3_DMA_CALLBACK_TYPE_DMA_DONE) {
189 irq_bit = MDP3_INTR_DMA_P_DONE;
190 if (dma->dma_sel == MDP3_DMA_S)
191 irq_bit = MDP3_INTR_DMA_S_DONE;
192 mdp3_irq_disable(irq_bit);
193 }
194 }
195}
196
197static int mdp3_dma_callback_setup(struct mdp3_dma *dma)
198{
199 int rc = 0;
200 struct mdp3_intr_cb vsync_cb = {
201 .cb = mdp3_vsync_intr_handler,
202 .data = dma,
203 };
204
205 struct mdp3_intr_cb dma_cb = {
206 .cb = mdp3_dma_done_intr_handler,
207 .data = dma,
208 };
209
210
211 struct mdp3_intr_cb hist_cb = {
212 .cb = mdp3_hist_done_intr_handler,
213 .data = dma,
214 };
215
216 if (dma->dma_sel == MDP3_DMA_P)
217 rc = mdp3_set_intr_callback(MDP3_INTR_DMA_P_HISTO, &hist_cb);
218
219 if (dma->output_config.out_sel == MDP3_DMA_OUTPUT_SEL_DSI_VIDEO ||
220 dma->output_config.out_sel == MDP3_DMA_OUTPUT_SEL_LCDC)
221 rc |= mdp3_set_intr_callback(MDP3_INTR_LCDC_START_OF_FRAME,
222 &vsync_cb);
223 else if (dma->output_config.out_sel == MDP3_DMA_OUTPUT_SEL_DSI_CMD) {
224 int irq_bit = MDP3_INTR_SYNC_PRIMARY_LINE;
225
226 irq_bit += dma->dma_sel;
227 rc |= mdp3_set_intr_callback(irq_bit, &vsync_cb);
228 irq_bit = MDP3_INTR_DMA_P_DONE;
229 if (dma->dma_sel == MDP3_DMA_S)
230 irq_bit = MDP3_INTR_DMA_S_DONE;
231 rc |= mdp3_set_intr_callback(irq_bit, &dma_cb);
232 } else {
233 pr_err("mdp3_dma_callback_setup not supported interface\n");
234 rc = -ENODEV;
235 }
236
237 return rc;
238}
239
240static void mdp3_dma_vsync_enable(struct mdp3_dma *dma,
241 struct mdp3_notification *vsync_client)
242{
243 unsigned long flag;
244 int updated = 0;
245 int cb_type = MDP3_DMA_CALLBACK_TYPE_VSYNC;
246
247 pr_debug("mdp3_dma_vsync_enable\n");
248
249 spin_lock_irqsave(&dma->dma_lock, flag);
250 if (vsync_client) {
251 if (dma->vsync_client.handler != vsync_client->handler) {
252 dma->vsync_client = *vsync_client;
253 updated = 1;
254 }
255 } else {
256 if (dma->vsync_client.handler) {
257 dma->vsync_client.handler = NULL;
258 dma->vsync_client.arg = NULL;
259 updated = 1;
260 }
261 }
262 spin_unlock_irqrestore(&dma->dma_lock, flag);
263
264 if (updated) {
265 if (vsync_client && vsync_client->handler)
266 mdp3_dma_callback_enable(dma, cb_type);
267 else
268 mdp3_dma_callback_disable(dma, cb_type);
269 }
270}
271
272static void mdp3_dma_done_notifier(struct mdp3_dma *dma,
273 struct mdp3_notification *dma_client)
274{
275 unsigned long flag;
276
277 spin_lock_irqsave(&dma->dma_lock, flag);
278 if (dma_client) {
279 dma->dma_notifier_client = *dma_client;
280 } else {
281 dma->dma_notifier_client.handler = NULL;
282 dma->dma_notifier_client.arg = NULL;
283 }
284 spin_unlock_irqrestore(&dma->dma_lock, flag);
285}
286
287int mdp3_dma_sync_config(struct mdp3_dma *dma,
288 struct mdp3_dma_source *source_config, struct mdp3_tear_check *te)
289{
290 u32 vsync_clk_speed_hz, vclks_line, cfg;
291 int porch = source_config->vporch;
292 int height = source_config->height;
293 int total_lines = height + porch;
294 int dma_sel = dma->dma_sel;
295
296 vsync_clk_speed_hz = MDP_VSYNC_CLK_RATE;
297
Krishna Manikandan8a532a12018-07-06 15:55:09 +0530298 cfg = te->sync_cfg_height << VSYNC_TOTAL_LINES_SHIFT;
Sachin Bhayareeeb88892018-01-02 16:36:01 +0530299 total_lines *= te->frame_rate;
300
301 vclks_line = (total_lines) ? vsync_clk_speed_hz / total_lines : 0;
302
303 cfg |= BIT(19);
304 if (te->hw_vsync_mode)
305 cfg |= BIT(20);
306
307 if (te->refx100) {
308 vclks_line = vclks_line * te->frame_rate *
309 100 / te->refx100;
310 } else {
311 pr_warn("refx100 cannot be zero! Use 6000 as default\n");
312 vclks_line = vclks_line * te->frame_rate *
313 100 / 6000;
314 }
315
316 cfg |= (vclks_line & VSYNC_COUNT_MASK);
317
318 MDP3_REG_WRITE(MDP3_REG_SYNC_CONFIG_0 + dma_sel, cfg);
319 MDP3_REG_WRITE(MDP3_REG_VSYNC_SEL, VSYNC_SELECT);
320 MDP3_REG_WRITE(MDP3_REG_PRIMARY_VSYNC_INIT_VAL + dma_sel,
321 te->vsync_init_val);
322 MDP3_REG_WRITE(MDP3_REG_PRIMARY_RD_PTR_IRQ, te->rd_ptr_irq);
323 MDP3_REG_WRITE(MDP3_REG_SYNC_THRESH_0 + dma_sel,
324 ((te->sync_threshold_continue << VSYNC_THRESH_CONT_SHIFT) |
325 te->sync_threshold_start));
326 MDP3_REG_WRITE(MDP3_REG_PRIMARY_START_P0S + dma_sel, te->start_pos);
327 MDP3_REG_WRITE(MDP3_REG_TEAR_CHECK_EN, te->tear_check_en);
328 return 0;
329}
330
331static int mdp3_dmap_config(struct mdp3_dma *dma,
332 struct mdp3_dma_source *source_config,
333 struct mdp3_dma_output_config *output_config,
334 bool splash_screen_active)
335{
336 u32 dma_p_cfg_reg, dma_p_size, dma_p_out_xy;
337
338 dma_p_cfg_reg = source_config->format << 25;
339 if (output_config->dither_en)
340 dma_p_cfg_reg |= BIT(24);
341 dma_p_cfg_reg |= output_config->out_sel << 19;
342 dma_p_cfg_reg |= output_config->bit_mask_polarity << 18;
343 dma_p_cfg_reg |= output_config->color_components_flip << 14;
344 dma_p_cfg_reg |= output_config->pack_pattern << 8;
345 dma_p_cfg_reg |= output_config->pack_align << 7;
346 dma_p_cfg_reg |= output_config->color_comp_out_bits;
347
348 dma_p_size = source_config->width | (source_config->height << 16);
349 dma_p_out_xy = source_config->x | (source_config->y << 16);
350 if (!splash_screen_active) {
351 MDP3_REG_WRITE(MDP3_REG_DMA_P_CONFIG, dma_p_cfg_reg);
352 MDP3_REG_WRITE(MDP3_REG_DMA_P_SIZE, dma_p_size);
353 MDP3_REG_WRITE(MDP3_REG_DMA_P_IBUF_ADDR,
354 (u32)source_config->buf);
355 MDP3_REG_WRITE(MDP3_REG_DMA_P_IBUF_Y_STRIDE,
356 source_config->stride);
357 MDP3_REG_WRITE(MDP3_REG_DMA_P_OUT_XY, dma_p_out_xy);
358 MDP3_REG_WRITE(MDP3_REG_DMA_P_FETCH_CFG, 0x40);
359 }
360
361 dma->source_config = *source_config;
362 dma->output_config = *output_config;
363
364 if (dma->output_config.out_sel != MDP3_DMA_OUTPUT_SEL_DSI_CMD)
365 mdp3_irq_enable(MDP3_INTR_LCDC_UNDERFLOW);
366
367 mdp3_dma_callback_setup(dma);
368 return 0;
369}
370
371static void mdp3_dmap_config_source(struct mdp3_dma *dma)
372{
373 struct mdp3_dma_source *source_config = &dma->source_config;
374 u32 dma_p_cfg_reg, dma_p_size;
375
376 dma_p_cfg_reg = MDP3_REG_READ(MDP3_REG_DMA_P_CONFIG);
377 dma_p_cfg_reg &= ~MDP3_DMA_IBUF_FORMAT_MASK;
378 dma_p_cfg_reg |= source_config->format << 25;
379 dma_p_cfg_reg &= ~MDP3_DMA_PACK_PATTERN_MASK;
380 dma_p_cfg_reg |= dma->output_config.pack_pattern << 8;
381
382 dma_p_size = dma->roi.w | (dma->roi.h << 16);
383
384 MDP3_REG_WRITE(MDP3_REG_DMA_P_CONFIG, dma_p_cfg_reg);
385 MDP3_REG_WRITE(MDP3_REG_DMA_P_SIZE, dma_p_size);
386 MDP3_REG_WRITE(MDP3_REG_DMA_P_IBUF_Y_STRIDE, source_config->stride);
387}
388
389static int mdp3_dmas_config(struct mdp3_dma *dma,
390 struct mdp3_dma_source *source_config,
391 struct mdp3_dma_output_config *output_config,
392 bool splash_screen_active)
393{
394 u32 dma_s_cfg_reg, dma_s_size, dma_s_out_xy;
395
396 dma_s_cfg_reg = source_config->format << 25;
397 if (output_config->dither_en)
398 dma_s_cfg_reg |= BIT(24);
399 dma_s_cfg_reg |= output_config->out_sel << 19;
400 dma_s_cfg_reg |= output_config->bit_mask_polarity << 18;
401 dma_s_cfg_reg |= output_config->color_components_flip << 14;
402 dma_s_cfg_reg |= output_config->pack_pattern << 8;
403 dma_s_cfg_reg |= output_config->pack_align << 7;
404 dma_s_cfg_reg |= output_config->color_comp_out_bits;
405
406 dma_s_size = source_config->width | (source_config->height << 16);
407 dma_s_out_xy = source_config->x | (source_config->y << 16);
408
409 if (!splash_screen_active) {
410 MDP3_REG_WRITE(MDP3_REG_DMA_S_CONFIG, dma_s_cfg_reg);
411 MDP3_REG_WRITE(MDP3_REG_DMA_S_SIZE, dma_s_size);
412 MDP3_REG_WRITE(MDP3_REG_DMA_S_IBUF_ADDR,
413 (u32)source_config->buf);
414 MDP3_REG_WRITE(MDP3_REG_DMA_S_IBUF_Y_STRIDE,
415 source_config->stride);
416 MDP3_REG_WRITE(MDP3_REG_DMA_S_OUT_XY, dma_s_out_xy);
417 MDP3_REG_WRITE(MDP3_REG_SECONDARY_RD_PTR_IRQ, 0x10);
418 }
419 dma->source_config = *source_config;
420 dma->output_config = *output_config;
421
422 mdp3_dma_callback_setup(dma);
423 return 0;
424}
425
426static void mdp3_dmas_config_source(struct mdp3_dma *dma)
427{
428 struct mdp3_dma_source *source_config = &dma->source_config;
429 u32 dma_s_cfg_reg, dma_s_size;
430
431 dma_s_cfg_reg = MDP3_REG_READ(MDP3_REG_DMA_S_CONFIG);
432 dma_s_cfg_reg &= ~MDP3_DMA_IBUF_FORMAT_MASK;
433 dma_s_cfg_reg |= source_config->format << 25;
434
435 dma_s_size = source_config->width | (source_config->height << 16);
436
437 MDP3_REG_WRITE(MDP3_REG_DMA_S_CONFIG, dma_s_cfg_reg);
438 MDP3_REG_WRITE(MDP3_REG_DMA_S_SIZE, dma_s_size);
439 MDP3_REG_WRITE(MDP3_REG_DMA_S_IBUF_Y_STRIDE, source_config->stride);
440}
441
442static int mdp3_dmap_cursor_config(struct mdp3_dma *dma,
443 struct mdp3_dma_cursor *cursor)
444{
445 u32 cursor_size, cursor_pos, blend_param, trans_mask;
446
447 cursor_size = cursor->width | (cursor->height << 16);
448 cursor_pos = cursor->x | (cursor->y << 16);
449 trans_mask = 0;
450 if (cursor->blend_config.mode == MDP3_DMA_CURSOR_BLEND_CONSTANT_ALPHA) {
451 blend_param = cursor->blend_config.constant_alpha << 24;
452 } else if (cursor->blend_config.mode ==
453 MDP3_DMA_CURSOR_BLEND_COLOR_KEYING) {
454 blend_param = cursor->blend_config.transparent_color;
455 trans_mask = cursor->blend_config.transparency_mask;
456 } else {
457 blend_param = 0;
458 }
459
460 MDP3_REG_WRITE(MDP3_REG_DMA_P_CURSOR_FORMAT, cursor->format);
461 MDP3_REG_WRITE(MDP3_REG_DMA_P_CURSOR_SIZE, cursor_size);
462 MDP3_REG_WRITE(MDP3_REG_DMA_P_CURSOR_BUF_ADDR, (u32)cursor->buf);
463 MDP3_REG_WRITE(MDP3_REG_DMA_P_CURSOR_POS, cursor_pos);
464 MDP3_REG_WRITE(MDP3_REG_DMA_P_CURSOR_BLEND_CONFIG,
465 cursor->blend_config.mode);
466 MDP3_REG_WRITE(MDP3_REG_DMA_P_CURSOR_BLEND_PARAM, blend_param);
467 MDP3_REG_WRITE(MDP3_REG_DMA_P_CURSOR_BLEND_TRANS_MASK, trans_mask);
468 dma->cursor = *cursor;
469 return 0;
470}
471
472static int mdp3_dmap_ccs_config_internal(struct mdp3_dma *dma,
473 struct mdp3_dma_color_correct_config *config,
474 struct mdp3_dma_ccs *ccs)
475{
476 int i;
477 u32 addr;
478
479 if (!ccs)
480 return -EINVAL;
481
482 if (config->ccs_enable) {
483 addr = MDP3_REG_DMA_P_CSC_MV1;
484 if (config->ccs_sel)
485 addr = MDP3_REG_DMA_P_CSC_MV2;
486 for (i = 0; i < 9; i++) {
487 MDP3_REG_WRITE(addr, ccs->mv[i]);
488 addr += 4;
489 }
490
491 addr = MDP3_REG_DMA_P_CSC_PRE_BV1;
492 if (config->pre_bias_sel)
493 addr = MDP3_REG_DMA_P_CSC_PRE_BV2;
494 for (i = 0; i < 3; i++) {
495 MDP3_REG_WRITE(addr, ccs->pre_bv[i]);
496 addr += 4;
497 }
498
499 addr = MDP3_REG_DMA_P_CSC_POST_BV1;
500 if (config->post_bias_sel)
501 addr = MDP3_REG_DMA_P_CSC_POST_BV2;
502 for (i = 0; i < 3; i++) {
503 MDP3_REG_WRITE(addr, ccs->post_bv[i]);
504 addr += 4;
505 }
506
507 addr = MDP3_REG_DMA_P_CSC_PRE_LV1;
508 if (config->pre_limit_sel)
509 addr = MDP3_REG_DMA_P_CSC_PRE_LV2;
510 for (i = 0; i < 6; i++) {
511 MDP3_REG_WRITE(addr, ccs->pre_lv[i]);
512 addr += 4;
513 }
514
515 addr = MDP3_REG_DMA_P_CSC_POST_LV1;
516 if (config->post_limit_sel)
517 addr = MDP3_REG_DMA_P_CSC_POST_LV2;
518 for (i = 0; i < 6; i++) {
519 MDP3_REG_WRITE(addr, ccs->post_lv[i]);
520 addr += 4;
521 }
522 }
523 return 0;
524}
525
526static void mdp3_ccs_update(struct mdp3_dma *dma, bool from_kickoff)
527{
528 u32 cc_config;
529 bool ccs_updated = false, lut_updated = false;
530 struct mdp3_dma_ccs ccs;
531
532 cc_config = MDP3_REG_READ(MDP3_REG_DMA_P_COLOR_CORRECT_CONFIG);
533
534 if (dma->ccs_config.ccs_dirty) {
535 cc_config &= DMA_CCS_CONFIG_MASK;
536 if (dma->ccs_config.ccs_enable)
537 cc_config |= BIT(3);
538 else
539 cc_config &= ~BIT(3);
540 cc_config |= dma->ccs_config.ccs_sel << 5;
541 cc_config |= dma->ccs_config.pre_bias_sel << 6;
542 cc_config |= dma->ccs_config.post_bias_sel << 7;
543 cc_config |= dma->ccs_config.pre_limit_sel << 8;
544 cc_config |= dma->ccs_config.post_limit_sel << 9;
545 /*
546 * CCS dirty flag should be reset when call is made from frame
547 * kickoff, or else upon resume the flag would be dirty and LUT
548 * config could call this function thereby causing no register
549 * programming for CCS, which will cause screen to go dark
550 */
551 if (from_kickoff)
552 dma->ccs_config.ccs_dirty = false;
553 ccs_updated = true;
554 }
555
556 if (dma->lut_config.lut_dirty) {
557 cc_config &= DMA_LUT_CONFIG_MASK;
558 cc_config |= dma->lut_config.lut_enable;
559 cc_config |= dma->lut_config.lut_position << 4;
560 cc_config |= dma->lut_config.lut_sel << 10;
561 dma->lut_config.lut_dirty = false;
562 lut_updated = true;
563 }
564
565 if (ccs_updated && from_kickoff) {
566 ccs.mv = dma->ccs_cache.csc_data.csc_mv;
567 ccs.pre_bv = dma->ccs_cache.csc_data.csc_pre_bv;
568 ccs.post_bv = dma->ccs_cache.csc_data.csc_post_bv;
569 ccs.pre_lv = dma->ccs_cache.csc_data.csc_pre_lv;
570 ccs.post_lv = dma->ccs_cache.csc_data.csc_post_lv;
571 mdp3_dmap_ccs_config_internal(dma, &dma->ccs_config, &ccs);
572 }
573
574 if (lut_updated || ccs_updated) {
575 MDP3_REG_WRITE(MDP3_REG_DMA_P_COLOR_CORRECT_CONFIG, cc_config);
576 /*
577 * Make sure ccs configuration update is done before continuing
578 * with the DMA transfer
579 */
580 wmb(); /* ensure write is finished before progressing */
581 }
582}
583
584static int mdp3_dmap_ccs_config(struct mdp3_dma *dma,
585 struct mdp3_dma_color_correct_config *config,
586 struct mdp3_dma_ccs *ccs)
587{
588 mdp3_dmap_ccs_config_internal(dma, config, ccs);
589
590 dma->ccs_config = *config;
591
592 if (dma->output_config.out_sel != MDP3_DMA_OUTPUT_SEL_DSI_CMD)
593 mdp3_ccs_update(dma, false);
594
595 return 0;
596}
597
598static int mdp3_dmap_lut_config(struct mdp3_dma *dma,
599 struct mdp3_dma_lut_config *config,
600 struct fb_cmap *cmap)
601{
602 u32 addr, color;
603 int i;
604
605 if (config->lut_enable && cmap) {
606 addr = MDP3_REG_DMA_P_CSC_LUT1;
607 if (config->lut_sel)
608 addr = MDP3_REG_DMA_P_CSC_LUT2;
609
610 for (i = 0; i < MDP_LUT_SIZE; i++) {
611 color = cmap->green[i] & 0xff;
612 color |= (cmap->red[i] & 0xff) << 8;
613 color |= (cmap->blue[i] & 0xff) << 16;
614 MDP3_REG_WRITE(addr, color);
615 addr += 4;
616 }
617 }
618
619 dma->lut_config = *config;
620
621 if (dma->output_config.out_sel != MDP3_DMA_OUTPUT_SEL_DSI_CMD)
622 mdp3_ccs_update(dma, false);
623
624 return 0;
625}
626
627static int mdp3_dmap_histo_config(struct mdp3_dma *dma,
628 struct mdp3_dma_histogram_config *histo_config)
629{
630 unsigned long flag;
631 u32 histo_bit_mask = 0, histo_control = 0;
632 u32 histo_isr_mask = MDP3_DMA_P_HIST_INTR_HIST_DONE_BIT |
633 MDP3_DMA_P_HIST_INTR_RESET_DONE_BIT;
634
635 spin_lock_irqsave(&dma->histo_lock, flag);
636
637 if (histo_config->bit_mask_polarity)
638 histo_bit_mask = BIT(31);
639 histo_bit_mask |= histo_config->bit_mask;
640
641 if (histo_config->auto_clear_en)
642 histo_control = BIT(0);
643 MDP3_REG_WRITE(MDP3_REG_DMA_P_HIST_FRAME_CNT,
644 histo_config->frame_count);
645 MDP3_REG_WRITE(MDP3_REG_DMA_P_HIST_BIT_MASK, histo_bit_mask);
646 MDP3_REG_WRITE(MDP3_REG_DMA_P_HIST_CONTROL, histo_control);
647 MDP3_REG_WRITE(MDP3_REG_DMA_P_HIST_INTR_ENABLE, histo_isr_mask);
648
649 spin_unlock_irqrestore(&dma->histo_lock, flag);
650
651 dma->histogram_config = *histo_config;
652 return 0;
653}
654
655int dma_bpp(int format)
656{
657 int bpp;
658
659 switch (format) {
660 case MDP3_DMA_IBUF_FORMAT_RGB888:
661 bpp = 3;
662 break;
663 case MDP3_DMA_IBUF_FORMAT_RGB565:
664 bpp = 2;
665 break;
666 case MDP3_DMA_IBUF_FORMAT_XRGB8888:
667 bpp = 4;
668 break;
669 default:
670 bpp = 0;
671 }
672 return bpp;
673}
674
675static int mdp3_dmap_update(struct mdp3_dma *dma, void *buf,
676 struct mdp3_intf *intf, void *data)
677{
678 unsigned long flag;
679 int cb_type = MDP3_DMA_CALLBACK_TYPE_VSYNC;
680 struct mdss_panel_data *panel;
681 int rc = 0;
682 int retry_count = 2;
683
684 ATRACE_BEGIN(__func__);
685 pr_debug("mdp3_dmap_update\n");
686
Arun kumar6b35a032018-05-23 15:38:00 +0530687 MDSS_XLOG(XLOG_FUNC_ENTRY, __LINE__);
Sachin Bhayareeeb88892018-01-02 16:36:01 +0530688 if (dma->output_config.out_sel == MDP3_DMA_OUTPUT_SEL_DSI_CMD) {
689 cb_type = MDP3_DMA_CALLBACK_TYPE_DMA_DONE;
690 if (intf->active) {
691 ATRACE_BEGIN("mdp3_wait_for_dma_comp");
692retry_dma_done:
693 rc = wait_for_completion_timeout(&dma->dma_comp,
Abhijith Desaib502b282018-09-12 14:48:22 +0530694 dma_timeout_value(dma));
Sachin Bhayareeeb88892018-01-02 16:36:01 +0530695 if (rc <= 0 && --retry_count) {
696 int vsync_status;
697
698 vsync_status = (1 << MDP3_INTR_DMA_P_DONE) &
699 MDP3_REG_READ(MDP3_REG_INTR_STATUS);
700 if (!vsync_status) {
701 pr_err("%s: cmd timeout retry cnt %d\n",
702 __func__, retry_count);
703 goto retry_dma_done;
704 }
705 rc = -1;
706 }
Jayaprakash220ea032018-12-21 17:28:11 +0530707 ATRACE_END("mdp3_wait_for_dma_comp");
708 if (rc <= 0 && retry_count == 0) {
709 MDSS_XLOG_TOUT_HANDLER("mdp", "vbif",
710 "dsi0_ctrl", "dsi0_phy");
711 }
Sachin Bhayareeeb88892018-01-02 16:36:01 +0530712 }
713 }
714 if (dma->update_src_cfg) {
715 if (dma->output_config.out_sel ==
716 MDP3_DMA_OUTPUT_SEL_DSI_VIDEO && intf->active)
717 pr_err("configuring dma source while it is active\n");
718 dma->dma_config_source(dma);
719 if (data) {
720 panel = (struct mdss_panel_data *)data;
721 if (panel->event_handler) {
722 panel->event_handler(panel,
723 MDSS_EVENT_ENABLE_PARTIAL_ROI, NULL);
724 panel->event_handler(panel,
725 MDSS_EVENT_DSI_STREAM_SIZE, NULL);
726 }
727 }
728 dma->update_src_cfg = false;
729 }
730 mutex_lock(&dma->pp_lock);
731 if (dma->ccs_config.ccs_dirty)
732 mdp3_ccs_update(dma, true);
733 mutex_unlock(&dma->pp_lock);
734 spin_lock_irqsave(&dma->dma_lock, flag);
735 MDP3_REG_WRITE(MDP3_REG_DMA_P_IBUF_ADDR, (u32)(buf +
736 dma->roi.y * dma->source_config.stride +
737 dma->roi.x * dma_bpp(dma->source_config.format)));
738 dma->source_config.buf = (int)buf;
739 if (dma->output_config.out_sel == MDP3_DMA_OUTPUT_SEL_DSI_CMD)
740 MDP3_REG_WRITE(MDP3_REG_DMA_P_START, 1);
741
742 if (!intf->active) {
743 pr_debug("%s start interface\n", __func__);
744 intf->start(intf);
745 }
746
747 mb(); /* make sure everything is written before enable */
748 dma->vsync_status = MDP3_REG_READ(MDP3_REG_INTR_STATUS) &
749 (1 << MDP3_INTR_LCDC_START_OF_FRAME);
750 init_completion(&dma->vsync_comp);
751 spin_unlock_irqrestore(&dma->dma_lock, flag);
752
753 mdp3_dma_callback_enable(dma, cb_type);
754 pr_debug("%s wait for vsync_comp\n", __func__);
755 if (dma->output_config.out_sel == MDP3_DMA_OUTPUT_SEL_DSI_VIDEO) {
756 ATRACE_BEGIN("mdp3_wait_for_vsync_comp");
757retry_vsync:
758 rc = wait_for_completion_timeout(&dma->vsync_comp,
Abhijith Desaib502b282018-09-12 14:48:22 +0530759 dma_timeout_value(dma));
Sachin Bhayareeeb88892018-01-02 16:36:01 +0530760 if (rc <= 0 && --retry_count) {
761 int vsync = MDP3_REG_READ(MDP3_REG_INTR_STATUS) &
762 (1 << MDP3_INTR_LCDC_START_OF_FRAME);
763
764 if (!vsync) {
765 pr_err("%s trying again count = %d\n",
766 __func__, retry_count);
767 goto retry_vsync;
768 }
769 rc = -1;
770 }
771 ATRACE_END("mdp3_wait_for_vsync_comp");
772 }
773 pr_debug("$%s wait for vsync_comp out\n", __func__);
774 ATRACE_END(__func__);
775 return rc;
776}
777
778static int mdp3_dmas_update(struct mdp3_dma *dma, void *buf,
779 struct mdp3_intf *intf, void *data)
780{
781 unsigned long flag;
782 int cb_type = MDP3_DMA_CALLBACK_TYPE_VSYNC;
783
Arun kumar6b35a032018-05-23 15:38:00 +0530784 MDSS_XLOG(XLOG_FUNC_ENTRY, __LINE__);
Sachin Bhayareeeb88892018-01-02 16:36:01 +0530785 if (dma->output_config.out_sel == MDP3_DMA_OUTPUT_SEL_DSI_CMD) {
786 cb_type = MDP3_DMA_CALLBACK_TYPE_DMA_DONE;
787 if (intf->active)
788 wait_for_completion_killable(&dma->dma_comp);
789 }
790
791 spin_lock_irqsave(&dma->dma_lock, flag);
792 MDP3_REG_WRITE(MDP3_REG_DMA_S_IBUF_ADDR, (u32)buf);
793 dma->source_config.buf = (int)buf;
794 if (dma->output_config.out_sel == MDP3_DMA_OUTPUT_SEL_DSI_CMD)
795 MDP3_REG_WRITE(MDP3_REG_DMA_S_START, 1);
796
797 if (!intf->active) {
798 pr_debug("mdp3_dmap_update start interface\n");
799 intf->start(intf);
800 }
801
802 wmb(); /* ensure write is finished before progressing */
803 init_completion(&dma->vsync_comp);
804 spin_unlock_irqrestore(&dma->dma_lock, flag);
805
806 mdp3_dma_callback_enable(dma, cb_type);
807 if (dma->output_config.out_sel == MDP3_DMA_OUTPUT_SEL_DSI_VIDEO)
808 wait_for_completion_killable(&dma->vsync_comp);
809 return 0;
810}
811
812static int mdp3_dmap_cursor_update(struct mdp3_dma *dma, int x, int y)
813{
814 u32 cursor_pos;
815
816 cursor_pos = x | (y << 16);
817 MDP3_REG_WRITE(MDP3_REG_DMA_P_CURSOR_POS, cursor_pos);
818 dma->cursor.x = x;
819 dma->cursor.y = y;
820 return 0;
821}
822
823static int mdp3_dmap_histo_get(struct mdp3_dma *dma)
824{
825 int i, state, timeout, ret;
826 u32 addr;
827 unsigned long flag;
828
829 spin_lock_irqsave(&dma->histo_lock, flag);
830 state = dma->histo_state;
831 spin_unlock_irqrestore(&dma->histo_lock, flag);
832
833 if (state != MDP3_DMA_HISTO_STATE_START &&
834 state != MDP3_DMA_HISTO_STATE_READY) {
835 pr_err("mdp3_dmap_histo_get invalid state %d\n", state);
836 return -EINVAL;
837 }
838
839 timeout = HIST_WAIT_TIMEOUT(dma->histogram_config.frame_count);
840 ret = wait_for_completion_killable_timeout(&dma->histo_comp, timeout);
841
842 if (ret == 0) {
843 pr_debug("mdp3_dmap_histo_get time out\n");
844 ret = -ETIMEDOUT;
845 } else if (ret < 0) {
846 pr_err("mdp3_dmap_histo_get interrupted\n");
847 }
848
849 if (ret < 0)
850 return ret;
851
852 if (dma->histo_state != MDP3_DMA_HISTO_STATE_READY) {
853 pr_debug("mdp3_dmap_histo_get after dma shut down\n");
854 return -EPERM;
855 }
856
857 addr = MDP3_REG_DMA_P_HIST_R_DATA;
858 for (i = 0; i < MDP_HISTOGRAM_BIN_NUM; i++) {
859 dma->histo_data.r_data[i] = MDP3_REG_READ(addr);
860 addr += 4;
861 }
862
863 addr = MDP3_REG_DMA_P_HIST_G_DATA;
864 for (i = 0; i < MDP_HISTOGRAM_BIN_NUM; i++) {
865 dma->histo_data.g_data[i] = MDP3_REG_READ(addr);
866 addr += 4;
867 }
868
869 addr = MDP3_REG_DMA_P_HIST_B_DATA;
870 for (i = 0; i < MDP_HISTOGRAM_BIN_NUM; i++) {
871 dma->histo_data.b_data[i] = MDP3_REG_READ(addr);
872 addr += 4;
873 }
874
875 dma->histo_data.extra[0] =
876 MDP3_REG_READ(MDP3_REG_DMA_P_HIST_EXTRA_INFO_0);
877 dma->histo_data.extra[1] =
878 MDP3_REG_READ(MDP3_REG_DMA_P_HIST_EXTRA_INFO_1);
879
880 spin_lock_irqsave(&dma->histo_lock, flag);
881 init_completion(&dma->histo_comp);
882 MDP3_REG_WRITE(MDP3_REG_DMA_P_HIST_START, 1);
883 wmb(); /* ensure write is finished before progressing */
884 dma->histo_state = MDP3_DMA_HISTO_STATE_START;
885 spin_unlock_irqrestore(&dma->histo_lock, flag);
886
887 return 0;
888}
889
890static int mdp3_dmap_histo_start(struct mdp3_dma *dma)
891{
892 unsigned long flag;
893
894 if (dma->histo_state != MDP3_DMA_HISTO_STATE_IDLE)
895 return -EINVAL;
896
897 spin_lock_irqsave(&dma->histo_lock, flag);
898
899 init_completion(&dma->histo_comp);
900 MDP3_REG_WRITE(MDP3_REG_DMA_P_HIST_START, 1);
901 wmb(); /* ensure write is finished before progressing */
902 dma->histo_state = MDP3_DMA_HISTO_STATE_START;
903
904 spin_unlock_irqrestore(&dma->histo_lock, flag);
905
906 mdp3_dma_callback_enable(dma, MDP3_DMA_CALLBACK_TYPE_HIST_DONE);
907 return 0;
908
909}
910
911static int mdp3_dmap_histo_reset(struct mdp3_dma *dma)
912{
913 unsigned long flag;
914 int ret;
915
916 spin_lock_irqsave(&dma->histo_lock, flag);
917
918 init_completion(&dma->histo_comp);
919
920
921 MDP3_REG_WRITE(MDP3_REG_DMA_P_HIST_INTR_ENABLE, BIT(0)|BIT(1));
922 MDP3_REG_WRITE(MDP3_REG_DMA_P_HIST_RESET_SEQ_START, 1);
923 wmb(); /* ensure write is finished before progressing */
924 dma->histo_state = MDP3_DMA_HISTO_STATE_RESET;
925
926 spin_unlock_irqrestore(&dma->histo_lock, flag);
927
928 mdp3_dma_callback_enable(dma, MDP3_DMA_CALLBACK_TYPE_HIST_RESET_DONE);
929 ret = wait_for_completion_killable_timeout(&dma->histo_comp,
930 msecs_to_jiffies(DMA_HISTO_RESET_TIMEOUT_MS));
931
932 if (ret == 0) {
933 pr_err("mdp3_dmap_histo_reset time out\n");
934 ret = -ETIMEDOUT;
935 } else if (ret < 0) {
936 pr_err("mdp3_dmap_histo_reset interrupted\n");
937 } else {
938 ret = 0;
939 }
940 mdp3_dma_callback_disable(dma, MDP3_DMA_CALLBACK_TYPE_HIST_RESET_DONE);
941
942 return ret;
943}
944
945static int mdp3_dmap_histo_stop(struct mdp3_dma *dma)
946{
947 unsigned long flag;
948 int cb_type = MDP3_DMA_CALLBACK_TYPE_HIST_RESET_DONE |
949 MDP3_DMA_CALLBACK_TYPE_HIST_DONE;
950
951 spin_lock_irqsave(&dma->histo_lock, flag);
952
953 MDP3_REG_WRITE(MDP3_REG_DMA_P_HIST_CANCEL_REQ, 1);
954 MDP3_REG_WRITE(MDP3_REG_DMA_P_HIST_INTR_ENABLE, 0);
955 wmb(); /* ensure write is finished before progressing */
956 dma->histo_state = MDP3_DMA_HISTO_STATE_IDLE;
957 complete(&dma->histo_comp);
958
959 spin_unlock_irqrestore(&dma->histo_lock, flag);
960
961 mdp3_dma_callback_disable(dma, cb_type);
962 return 0;
963}
964
965static int mdp3_dmap_histo_op(struct mdp3_dma *dma, u32 op)
966{
967 int ret;
968
969 switch (op) {
970 case MDP3_DMA_HISTO_OP_START:
971 ret = mdp3_dmap_histo_start(dma);
972 break;
973 case MDP3_DMA_HISTO_OP_STOP:
974 case MDP3_DMA_HISTO_OP_CANCEL:
975 ret = mdp3_dmap_histo_stop(dma);
976 break;
977 case MDP3_DMA_HISTO_OP_RESET:
978 ret = mdp3_dmap_histo_reset(dma);
979 break;
980 default:
981 ret = -EINVAL;
982 }
983 return ret;
984}
985
986bool mdp3_dmap_busy(void)
987{
988 u32 val;
989
990 val = MDP3_REG_READ(MDP3_REG_DISPLAY_STATUS);
991 pr_err("%s DMAP Status %s\n", __func__,
992 (val & MDP3_DMA_P_BUSY_BIT) ? "BUSY":"IDLE");
Arun kumar6b35a032018-05-23 15:38:00 +0530993 MDSS_XLOG(XLOG_FUNC_ENTRY, __LINE__,
994 (val & MDP3_DMA_P_BUSY_BIT) ? 1:0);
Sachin Bhayareeeb88892018-01-02 16:36:01 +0530995 return val & MDP3_DMA_P_BUSY_BIT;
996}
997
998/*
999 * During underrun DMA_P registers are reset. Reprogramming CSC to prevent
1000 * black screen
1001 */
1002static void mdp3_dmap_underrun_worker(struct work_struct *work)
1003{
1004 struct mdp3_dma *dma;
1005
1006 dma = container_of(work, struct mdp3_dma, underrun_work);
1007 mutex_lock(&dma->pp_lock);
1008 if (dma->ccs_config.ccs_enable && dma->ccs_config.ccs_dirty) {
1009 dma->cc_vect_sel = (dma->cc_vect_sel + 1) % 2;
1010 dma->ccs_config.ccs_sel = dma->cc_vect_sel;
1011 dma->ccs_config.pre_limit_sel = dma->cc_vect_sel;
1012 dma->ccs_config.post_limit_sel = dma->cc_vect_sel;
1013 dma->ccs_config.pre_bias_sel = dma->cc_vect_sel;
1014 dma->ccs_config.post_bias_sel = dma->cc_vect_sel;
1015 mdp3_ccs_update(dma, true);
1016 }
1017 mutex_unlock(&dma->pp_lock);
1018}
1019
1020static int mdp3_dma_start(struct mdp3_dma *dma, struct mdp3_intf *intf)
1021{
1022 unsigned long flag;
1023 int cb_type = MDP3_DMA_CALLBACK_TYPE_VSYNC;
1024 u32 dma_start_offset = MDP3_REG_DMA_P_START;
1025
1026 if (dma->dma_sel == MDP3_DMA_P)
1027 dma_start_offset = MDP3_REG_DMA_P_START;
1028 else if (dma->dma_sel == MDP3_DMA_S)
1029 dma_start_offset = MDP3_REG_DMA_S_START;
1030 else
1031 return -EINVAL;
1032
1033 spin_lock_irqsave(&dma->dma_lock, flag);
1034 if (dma->output_config.out_sel == MDP3_DMA_OUTPUT_SEL_DSI_CMD) {
1035 cb_type |= MDP3_DMA_CALLBACK_TYPE_DMA_DONE;
1036 MDP3_REG_WRITE(dma_start_offset, 1);
1037 }
1038
1039 intf->start(intf);
1040 wmb(); /* ensure write is finished before progressing */
1041 init_completion(&dma->vsync_comp);
1042 spin_unlock_irqrestore(&dma->dma_lock, flag);
1043
1044 if (dma->dma_sel == MDP3_DMA_P && dma->has_panic_ctrl)
1045 MDP3_REG_WRITE(MDP3_PANIC_ROBUST_CTRL, BIT(0));
1046
1047 mdp3_dma_callback_enable(dma, cb_type);
1048 pr_debug("mdp3_dma_start wait for vsync_comp in\n");
1049 wait_for_completion_killable(&dma->vsync_comp);
1050 pr_debug("mdp3_dma_start wait for vsync_comp out\n");
1051 return 0;
1052}
1053
1054static int mdp3_dma_stop(struct mdp3_dma *dma, struct mdp3_intf *intf)
1055{
1056 int ret = 0;
1057 u32 status, display_status_bit;
1058
1059 if (dma->dma_sel == MDP3_DMA_P)
1060 display_status_bit = BIT(6);
1061 else if (dma->dma_sel == MDP3_DMA_S)
1062 display_status_bit = BIT(7);
1063 else
1064 return -EINVAL;
1065
1066 if (dma->dma_sel == MDP3_DMA_P && dma->has_panic_ctrl)
1067 MDP3_REG_WRITE(MDP3_PANIC_ROBUST_CTRL, 0);
1068
1069 if (dma->output_config.out_sel == MDP3_DMA_OUTPUT_SEL_DSI_VIDEO)
1070 display_status_bit |= BIT(11);
1071
1072 intf->stop(intf);
1073 ret = readl_poll_timeout((mdp3_res->mdp_base + MDP3_REG_DISPLAY_STATUS),
1074 status,
1075 ((status & display_status_bit) == 0),
1076 DMA_STOP_POLL_SLEEP_US,
1077 DMA_STOP_POLL_TIMEOUT_US);
1078
1079 mdp3_dma_callback_disable(dma, MDP3_DMA_CALLBACK_TYPE_VSYNC |
1080 MDP3_DMA_CALLBACK_TYPE_DMA_DONE);
1081 mdp3_irq_disable(MDP3_INTR_LCDC_UNDERFLOW);
1082
1083 MDP3_REG_WRITE(MDP3_REG_INTR_ENABLE, 0);
1084 MDP3_REG_WRITE(MDP3_REG_INTR_CLEAR, 0xfffffff);
1085
Arun kumar52b7f182018-05-23 15:12:27 +05301086 reinit_completion(&dma->dma_comp);
Sachin Bhayareeeb88892018-01-02 16:36:01 +05301087 dma->vsync_client.handler = NULL;
Animesh Kishore3650a562018-11-16 00:26:26 +05301088
1089 /*
1090 * Interrupts are disabled.
Animesh Kishore8b42bd92018-12-04 18:57:13 +05301091 * Check for blocked dma done and vsync interrupt.
1092 * Flush items waiting for interrupts.
Animesh Kishore3650a562018-11-16 00:26:26 +05301093 */
Animesh Kishore8b42bd92018-12-04 18:57:13 +05301094 if (dma->output_config.out_sel == MDP3_DMA_OUTPUT_SEL_DSI_CMD) {
1095 if (atomic_read(&dma->session->dma_done_cnt))
1096 mdp3_flush_dma_done(dma->session);
1097 if (dma->session->retire_cnt) {
1098 mdp3_vsync_retire_signal(dma->session->mfd,
1099 dma->session->retire_cnt);
1100 }
1101 }
Animesh Kishore3650a562018-11-16 00:26:26 +05301102
Sachin Bhayareeeb88892018-01-02 16:36:01 +05301103 return ret;
1104}
1105
1106int mdp3_dma_init(struct mdp3_dma *dma)
1107{
1108 int ret = 0;
1109
1110 pr_debug("mdp3_dma_init\n");
1111 switch (dma->dma_sel) {
1112 case MDP3_DMA_P:
1113 dma->dma_config = mdp3_dmap_config;
1114 dma->dma_sync_config = mdp3_dma_sync_config;
1115 dma->dma_config_source = mdp3_dmap_config_source;
1116 dma->config_cursor = mdp3_dmap_cursor_config;
1117 dma->config_ccs = mdp3_dmap_ccs_config;
1118 dma->config_histo = mdp3_dmap_histo_config;
1119 dma->config_lut = mdp3_dmap_lut_config;
1120 dma->update = mdp3_dmap_update;
1121 dma->update_cursor = mdp3_dmap_cursor_update;
1122 dma->get_histo = mdp3_dmap_histo_get;
1123 dma->histo_op = mdp3_dmap_histo_op;
1124 dma->vsync_enable = mdp3_dma_vsync_enable;
1125 dma->dma_done_notifier = mdp3_dma_done_notifier;
1126 dma->start = mdp3_dma_start;
1127 dma->stop = mdp3_dma_stop;
1128 dma->busy = mdp3_dmap_busy;
1129 INIT_WORK(&dma->underrun_work, mdp3_dmap_underrun_worker);
1130 break;
1131 case MDP3_DMA_S:
1132 dma->dma_config = mdp3_dmas_config;
1133 dma->dma_sync_config = mdp3_dma_sync_config;
1134 dma->dma_config_source = mdp3_dmas_config_source;
1135 dma->config_cursor = NULL;
1136 dma->config_ccs = NULL;
1137 dma->config_histo = NULL;
1138 dma->config_lut = NULL;
1139 dma->update = mdp3_dmas_update;
1140 dma->update_cursor = NULL;
1141 dma->get_histo = NULL;
1142 dma->histo_op = NULL;
1143 dma->vsync_enable = mdp3_dma_vsync_enable;
1144 dma->start = mdp3_dma_start;
1145 dma->stop = mdp3_dma_stop;
1146 break;
1147 case MDP3_DMA_E:
1148 default:
1149 ret = -ENODEV;
1150 break;
1151 }
1152
1153 spin_lock_init(&dma->dma_lock);
1154 spin_lock_init(&dma->histo_lock);
1155 init_completion(&dma->vsync_comp);
1156 init_completion(&dma->dma_comp);
1157 init_completion(&dma->histo_comp);
1158 dma->vsync_client.handler = NULL;
1159 dma->vsync_client.arg = NULL;
1160 dma->histo_state = MDP3_DMA_HISTO_STATE_IDLE;
1161 dma->update_src_cfg = false;
1162
1163 memset(&dma->cursor, 0, sizeof(dma->cursor));
1164 memset(&dma->ccs_config, 0, sizeof(dma->ccs_config));
1165 memset(&dma->histogram_config, 0, sizeof(dma->histogram_config));
1166
1167 return ret;
1168}
1169
1170int lcdc_config(struct mdp3_intf *intf, struct mdp3_intf_cfg *cfg)
1171{
1172 u32 temp;
1173 struct mdp3_video_intf_cfg *v = &cfg->video;
1174
1175 temp = v->hsync_pulse_width | (v->hsync_period << 16);
1176 MDP3_REG_WRITE(MDP3_REG_LCDC_HSYNC_CTL, temp);
1177 MDP3_REG_WRITE(MDP3_REG_LCDC_VSYNC_PERIOD, v->vsync_period);
1178 MDP3_REG_WRITE(MDP3_REG_LCDC_VSYNC_PULSE_WIDTH, v->vsync_pulse_width);
1179 temp = v->display_start_x | (v->display_end_x << 16);
1180 MDP3_REG_WRITE(MDP3_REG_LCDC_DISPLAY_HCTL, temp);
1181 MDP3_REG_WRITE(MDP3_REG_LCDC_DISPLAY_V_START, v->display_start_y);
1182 MDP3_REG_WRITE(MDP3_REG_LCDC_DISPLAY_V_END, v->display_end_y);
1183 temp = v->active_start_x | (v->active_end_x);
1184 if (v->active_h_enable)
1185 temp |= BIT(31);
1186 MDP3_REG_WRITE(MDP3_REG_LCDC_ACTIVE_HCTL, temp);
1187 MDP3_REG_WRITE(MDP3_REG_LCDC_ACTIVE_V_START, v->active_start_y);
1188 MDP3_REG_WRITE(MDP3_REG_LCDC_ACTIVE_V_END, v->active_end_y);
1189 MDP3_REG_WRITE(MDP3_REG_LCDC_HSYNC_SKEW, v->hsync_skew);
1190 temp = 0;
1191 if (!v->hsync_polarity)
1192 temp = BIT(0);
1193 if (!v->vsync_polarity)
1194 temp = BIT(1);
1195 if (!v->de_polarity)
1196 temp = BIT(2);
1197 MDP3_REG_WRITE(MDP3_REG_LCDC_CTL_POLARITY, temp);
1198
1199 return 0;
1200}
1201
1202int lcdc_start(struct mdp3_intf *intf)
1203{
1204 MDP3_REG_WRITE(MDP3_REG_LCDC_EN, BIT(0));
1205 wmb(); /* ensure write is finished before progressing */
1206 intf->active = true;
1207 return 0;
1208}
1209
1210int lcdc_stop(struct mdp3_intf *intf)
1211{
1212 MDP3_REG_WRITE(MDP3_REG_LCDC_EN, 0);
1213 wmb(); /* ensure write is finished before progressing */
1214 intf->active = false;
1215 return 0;
1216}
1217
1218int dsi_video_config(struct mdp3_intf *intf, struct mdp3_intf_cfg *cfg)
1219{
1220 u32 temp;
1221 struct mdp3_video_intf_cfg *v = &cfg->video;
1222
1223 pr_debug("dsi_video_config\n");
1224
1225 temp = v->hsync_pulse_width | (v->hsync_period << 16);
1226 MDP3_REG_WRITE(MDP3_REG_DSI_VIDEO_HSYNC_CTL, temp);
1227 MDP3_REG_WRITE(MDP3_REG_DSI_VIDEO_VSYNC_PERIOD, v->vsync_period);
1228 MDP3_REG_WRITE(MDP3_REG_DSI_VIDEO_VSYNC_PULSE_WIDTH,
1229 v->vsync_pulse_width);
1230 temp = v->display_start_x | (v->display_end_x << 16);
1231 MDP3_REG_WRITE(MDP3_REG_DSI_VIDEO_DISPLAY_HCTL, temp);
1232 MDP3_REG_WRITE(MDP3_REG_DSI_VIDEO_DISPLAY_V_START, v->display_start_y);
1233 MDP3_REG_WRITE(MDP3_REG_DSI_VIDEO_DISPLAY_V_END, v->display_end_y);
1234 temp = v->active_start_x | (v->active_end_x << 16);
1235 if (v->active_h_enable)
1236 temp |= BIT(31);
1237 MDP3_REG_WRITE(MDP3_REG_DSI_VIDEO_ACTIVE_HCTL, temp);
1238
1239 temp = v->active_start_y;
1240 if (v->active_v_enable)
1241 temp |= BIT(31);
1242 MDP3_REG_WRITE(MDP3_REG_DSI_VIDEO_ACTIVE_V_START, temp);
1243 MDP3_REG_WRITE(MDP3_REG_DSI_VIDEO_ACTIVE_V_END, v->active_end_y);
1244 MDP3_REG_WRITE(MDP3_REG_DSI_VIDEO_HSYNC_SKEW, v->hsync_skew);
1245 temp = 0;
1246 if (!v->hsync_polarity)
1247 temp |= BIT(0);
1248 if (!v->vsync_polarity)
1249 temp |= BIT(1);
1250 if (!v->de_polarity)
1251 temp |= BIT(2);
1252 MDP3_REG_WRITE(MDP3_REG_DSI_VIDEO_CTL_POLARITY, temp);
1253
1254 v->underflow_color |= 0x80000000;
1255 MDP3_REG_WRITE(MDP3_REG_DSI_VIDEO_UNDERFLOW_CTL, v->underflow_color);
1256
1257 return 0;
1258}
1259
1260int dsi_video_start(struct mdp3_intf *intf)
1261{
1262 pr_debug("dsi_video_start\n");
1263 MDP3_REG_WRITE(MDP3_REG_DSI_VIDEO_EN, BIT(0));
1264 wmb(); /* ensure write is finished before progressing */
1265 intf->active = true;
1266 return 0;
1267}
1268
1269int dsi_video_stop(struct mdp3_intf *intf)
1270{
1271 pr_debug("dsi_video_stop\n");
1272 MDP3_REG_WRITE(MDP3_REG_DSI_VIDEO_EN, 0);
1273 wmb(); /* ensure write is finished before progressing */
1274 intf->active = false;
1275 return 0;
1276}
1277
1278int dsi_cmd_config(struct mdp3_intf *intf, struct mdp3_intf_cfg *cfg)
1279{
1280 u32 id_map = 0;
1281 u32 trigger_en = 0;
1282
1283 if (cfg->dsi_cmd.primary_dsi_cmd_id)
1284 id_map = BIT(0);
1285 if (cfg->dsi_cmd.secondary_dsi_cmd_id)
1286 id_map = BIT(4);
1287
1288 if (cfg->dsi_cmd.dsi_cmd_tg_intf_sel)
1289 trigger_en = BIT(4);
1290
1291 MDP3_REG_WRITE(MDP3_REG_DSI_CMD_MODE_ID_MAP, id_map);
1292 MDP3_REG_WRITE(MDP3_REG_DSI_CMD_MODE_TRIGGER_EN, trigger_en);
1293
1294 return 0;
1295}
1296
1297int dsi_cmd_start(struct mdp3_intf *intf)
1298{
1299 intf->active = true;
1300 return 0;
1301}
1302
1303int dsi_cmd_stop(struct mdp3_intf *intf)
1304{
1305 intf->active = false;
1306 return 0;
1307}
1308
Arun kumardb962812018-05-30 16:31:52 +05301309static int spi_cmd_config(struct mdp3_intf *intf, struct mdp3_intf_cfg *cfg)
1310{
1311 return 0;
1312}
1313
1314static int spi_cmd_start(struct mdp3_intf *intf)
1315{
1316 intf->active = true;
1317 return 0;
1318}
1319
1320static int spi_cmd_stop(struct mdp3_intf *intf)
1321{
1322 intf->active = false;
1323 return 0;
1324}
Sachin Bhayareeeb88892018-01-02 16:36:01 +05301325int mdp3_intf_init(struct mdp3_intf *intf)
1326{
1327 switch (intf->cfg.type) {
1328 case MDP3_DMA_OUTPUT_SEL_LCDC:
1329 intf->config = lcdc_config;
1330 intf->start = lcdc_start;
1331 intf->stop = lcdc_stop;
1332 break;
1333 case MDP3_DMA_OUTPUT_SEL_DSI_VIDEO:
1334 intf->config = dsi_video_config;
1335 intf->start = dsi_video_start;
1336 intf->stop = dsi_video_stop;
1337 break;
1338 case MDP3_DMA_OUTPUT_SEL_DSI_CMD:
1339 intf->config = dsi_cmd_config;
1340 intf->start = dsi_cmd_start;
1341 intf->stop = dsi_cmd_stop;
1342 break;
Arun kumardb962812018-05-30 16:31:52 +05301343 case MDP3_DMA_OUTPUT_SEL_SPI_CMD:
1344 intf->config = spi_cmd_config;
1345 intf->start = spi_cmd_start;
1346 intf->stop = spi_cmd_stop;
1347 break;
Sachin Bhayareeeb88892018-01-02 16:36:01 +05301348
1349 default:
1350 return -EINVAL;
1351 }
1352 return 0;
1353}