blob: 096cb34c085b340e86318a3f88c098163788cc36 [file] [log] [blame]
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07001/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
2 *
3 * This program is free software; you can redistribute it and/or modify
4 * it under the terms of the GNU General Public License version 2 and
5 * only version 2 as published by the Free Software Foundation.
6 *
7 * This program is distributed in the hope that it will be useful,
8 * but WITHOUT ANY WARRANTY; without even the implied warranty of
9 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 * GNU General Public License for more details.
11 */
12
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040013#include <linux/sort.h>
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070014#include <drm/drm_mode.h>
15#include <drm/drm_crtc.h>
16#include <drm/drm_crtc_helper.h>
17#include <drm/drm_flip_work.h>
18
19#include "sde_kms.h"
20#include "sde_hw_lm.h"
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040021#include "sde_hw_mdp_ctl.h"
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040022#include "sde_crtc.h"
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040023
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040024#define CTL(i) (CTL_0 + (i))
25#define LM(i) (LM_0 + (i))
26#define INTF(i) (INTF_0 + (i))
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070027
Clarence Ip4ce59322016-06-26 22:27:51 -040028/* uncomment to enable higher level IRQ msg's */
29/*#define DBG_IRQ DBG*/
30#define DBG_IRQ(fmt, ...)
31
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040032static struct sde_kms *get_kms(struct drm_crtc *crtc)
33{
34 struct msm_drm_private *priv = crtc->dev->dev_private;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040035
Ben Chan78647cd2016-06-26 22:02:47 -040036 return to_sde_kms(priv->kms);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040037}
38
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040039static int sde_crtc_reserve_hw_resources(struct drm_crtc *crtc,
40 struct drm_encoder *encoder)
41{
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040042 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040043 struct sde_kms *sde_kms = get_kms(crtc);
44 struct sde_encoder_hw_resources enc_hw_res;
45 const struct sde_hw_res_map *plat_hw_res_map;
46 enum sde_lm unused_lm_id[CRTC_DUAL_MIXERS] = {0};
47 enum sde_lm lm_idx;
48 int i, count = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040049
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040050 if (!sde_kms) {
51 DBG("[%s] invalid kms", __func__);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040052 return -EINVAL;
53 }
54
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040055 if (!sde_kms->mmio)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040056 return -EINVAL;
57
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040058 /* Get unused LMs */
59 for (i = 0; i < sde_kms->catalog->mixer_count; i++) {
60 if (!sde_rm_get_mixer(sde_kms, LM(i))) {
61 unused_lm_id[count++] = LM(i);
62 if (count == CRTC_DUAL_MIXERS)
63 break;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040064 }
65 }
66
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040067 /* query encoder resources */
68 sde_encoder_get_hw_resources(sde_crtc->encoder, &enc_hw_res);
69
70 /* parse encoder hw resources, find CTL paths */
71 for (i = CTL_0; i <= sde_kms->catalog->ctl_count; i++) {
72 WARN_ON(sde_crtc->num_ctls > CRTC_DUAL_MIXERS);
73 if (enc_hw_res.ctls[i]) {
74 struct sde_crtc_mixer *mixer =
75 &sde_crtc->mixer[sde_crtc->num_ctls];
76 mixer->hw_ctl = sde_rm_get_ctl_path(sde_kms, i);
77 if (IS_ERR_OR_NULL(mixer->hw_ctl)) {
78 DBG("[%s], Invalid ctl_path", __func__);
79 return -EACCES;
80 }
81 sde_crtc->num_ctls++;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040082 }
83 }
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040084
85 /* shortcut this process if encoder has no ctl paths */
86 if (!sde_crtc->num_ctls)
87 return 0;
88
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040089 /*
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040090 * Get default LMs if specified in platform config
91 * other wise acquire the free LMs
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040092 */
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040093 for (i = INTF_0; i <= sde_kms->catalog->intf_count; i++) {
94 if (enc_hw_res.intfs[i]) {
95 struct sde_crtc_mixer *mixer =
96 &sde_crtc->mixer[sde_crtc->num_mixers];
97 plat_hw_res_map = sde_rm_get_res_map(sde_kms, i);
98
99 lm_idx = plat_hw_res_map->lm;
100 if (!lm_idx)
101 lm_idx = unused_lm_id[sde_crtc->num_mixers];
102
103 DBG("Acquiring LM %d", lm_idx);
104 mixer->hw_lm = sde_rm_acquire_mixer(sde_kms, lm_idx);
105 if (IS_ERR_OR_NULL(mixer->hw_lm)) {
106 DBG("[%s], Invalid mixer", __func__);
107 return -EACCES;
108 }
109 /* interface info */
110 mixer->intf_idx = i;
111 mixer->mode = enc_hw_res.intfs[i];
112 sde_crtc->num_mixers++;
113 }
114 }
115
116 DBG("control paths %d, num_mixers %d, lm[0] %d, ctl[0] %d ",
117 sde_crtc->num_ctls, sde_crtc->num_mixers,
118 sde_crtc->mixer[0].hw_lm->idx,
119 sde_crtc->mixer[0].hw_ctl->idx);
120 if (sde_crtc->num_mixers == CRTC_DUAL_MIXERS)
121 DBG("lm[1] %d, ctl[1], %d",
122 sde_crtc->mixer[1].hw_lm->idx,
123 sde_crtc->mixer[1].hw_ctl->idx);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400124 return 0;
125}
126
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700127static void sde_crtc_destroy(struct drm_crtc *crtc)
128{
129 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
130
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400131 DBG("");
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700132 drm_crtc_cleanup(crtc);
133 kfree(sde_crtc);
134}
135
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700136static bool sde_crtc_mode_fixup(struct drm_crtc *crtc,
137 const struct drm_display_mode *mode,
138 struct drm_display_mode *adjusted_mode)
139{
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400140 DBG("");
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700141 return true;
142}
143
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400144static void sde_crtc_mode_set_nofb(struct drm_crtc *crtc)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700145{
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400146 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
147 struct sde_crtc_mixer *mixer = sde_crtc->mixer;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400148 struct sde_hw_mixer *lm;
149 unsigned long flags;
150 struct drm_display_mode *mode;
151 struct sde_hw_mixer_cfg cfg;
152 u32 mixer_width;
153 int i;
154 int rc;
155
156 DBG("");
157 if (WARN_ON(!crtc->state))
158 return;
159
160 mode = &crtc->state->adjusted_mode;
161
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -0400162 drm_mode_debug_printmodeline(mode);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400163
164 /*
165 * reserve mixer(s) if not already avaialable
166 * if dual mode, mixer_width = half mode width
167 * program mode configuration on mixer(s)
168 */
169 if ((sde_crtc->num_ctls == 0) ||
170 (sde_crtc->num_mixers == 0)) {
171 rc = sde_crtc_reserve_hw_resources(crtc, sde_crtc->encoder);
172 if (rc) {
Lloyd Atkinsond49de562016-05-30 13:23:48 -0400173 DRM_ERROR("error reserving HW resource for CRTC\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400174 return;
175 }
176 }
177
178 if (sde_crtc->num_mixers == CRTC_DUAL_MIXERS)
179 mixer_width = mode->hdisplay >> 1;
180 else
181 mixer_width = mode->hdisplay;
182
183 spin_lock_irqsave(&sde_crtc->lm_lock, flags);
184
185 for (i = 0; i < sde_crtc->num_mixers; i++) {
186 lm = mixer[i].hw_lm;
187 cfg.out_width = mixer_width;
188 cfg.out_height = mode->vdisplay;
189 cfg.right_mixer = (i == 0) ? false : true;
190 cfg.flags = 0;
191 lm->ops.setup_mixer_out(lm, &cfg);
192 }
193
194 spin_unlock_irqrestore(&sde_crtc->lm_lock, flags);
195}
196
197static void sde_crtc_get_blend_cfg(struct sde_hw_blend_cfg *cfg,
198 struct sde_plane_state *pstate)
199{
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400200 struct drm_plane *plane;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400201 const struct mdp_format *format;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400202
203 format = to_mdp_format(
204 msm_framebuffer_format(pstate->base.fb));
205 plane = pstate->base.plane;
206
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -0400207 memset(cfg, 0, sizeof(*cfg));
Clarence Ipe78efb72016-06-24 18:35:21 -0400208 cfg->fg.const_alpha = pstate->property_values[PLANE_PROP_ALPHA];
209 cfg->bg.const_alpha = 0xFF - cfg->fg.const_alpha;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400210
Clarence Ipe78efb72016-06-24 18:35:21 -0400211 if (format->alpha_enable &&
212 pstate->property_values[PLANE_PROP_PREMULTIPLIED]) {
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400213 cfg->fg.alpha_sel = ALPHA_FG_CONST;
214 cfg->bg.alpha_sel = ALPHA_FG_PIXEL;
Clarence Ipe78efb72016-06-24 18:35:21 -0400215 if (pstate->property_values[PLANE_PROP_ALPHA] != 0xff) {
216 cfg->bg.const_alpha =
217 (u32)pstate->property_values[PLANE_PROP_ALPHA];
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400218 cfg->bg.inv_alpha_sel = 1;
219 cfg->bg.mod_alpha = 1;
220 } else {
221 cfg->bg.inv_mode_alpha = 1;
222 }
223 } else if (format->alpha_enable) {
224 cfg->fg.alpha_sel = ALPHA_FG_PIXEL;
225 cfg->bg.alpha_sel = ALPHA_FG_PIXEL;
Clarence Ipe78efb72016-06-24 18:35:21 -0400226 if (pstate->property_values[PLANE_PROP_ALPHA] != 0xff) {
227 cfg->bg.const_alpha =
228 (u32)pstate->property_values[PLANE_PROP_ALPHA];
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400229 cfg->fg.mod_alpha = 1;
230 cfg->bg.inv_alpha_sel = 1;
231 cfg->bg.mod_alpha = 1;
232 cfg->bg.inv_mode_alpha = 1;
233 } else {
234 cfg->bg.inv_mode_alpha = 1;
235 }
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -0400236 } else {
237 /* opaque blending */
238 cfg->fg.alpha_sel = ALPHA_FG_CONST;
239 cfg->bg.alpha_sel = ALPHA_BG_CONST;
240 cfg->bg.inv_alpha_sel = 1;
241 cfg->fg.const_alpha = 0xFF;
242 cfg->bg.const_alpha = 0x00;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400243 }
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400244
245 DBG("format 0x%x, alpha_enable %u premultiplied %llu",
246 format->base.pixel_format, format->alpha_enable,
247 pstate->property_values[PLANE_PROP_PREMULTIPLIED]);
248 DBG("fg alpha config %d %d %d %d %d",
249 cfg->fg.alpha_sel, cfg->fg.const_alpha, cfg->fg.mod_alpha,
250 cfg->fg.inv_alpha_sel, cfg->fg.inv_mode_alpha);
251 DBG("bg alpha config %d %d %d %d %d",
252 cfg->bg.alpha_sel, cfg->bg.const_alpha, cfg->bg.mod_alpha,
253 cfg->bg.inv_alpha_sel, cfg->bg.inv_mode_alpha);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400254}
255
256static void blend_setup(struct drm_crtc *crtc)
257{
258 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
259 struct sde_crtc_mixer *mixer = sde_crtc->mixer;
260 struct drm_plane *plane;
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -0400261 struct sde_plane_state *pstate;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400262 struct sde_hw_stage_cfg stage_cfg;
263 struct sde_hw_blend_cfg blend;
264 struct sde_hw_ctl *ctl;
265 struct sde_hw_mixer *lm;
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -0400266 struct sde_hw_color3_cfg alpha_out;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400267 u32 flush_mask = 0;
268 unsigned long flags;
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -0400269 int i, plane_cnt = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400270
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400271 DBG("");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400272 spin_lock_irqsave(&sde_crtc->lm_lock, flags);
273
274 /* ctl could be reserved already */
275 if (!sde_crtc->num_ctls)
276 goto out;
277
278 /* initialize stage cfg */
279 memset(&stage_cfg, 0, sizeof(stage_cfg));
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400280
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -0400281 for (i = 0; i < sde_crtc->num_mixers; i++) {
282 if ((!mixer[i].hw_lm) || (!mixer[i].hw_ctl))
283 continue;
284
285 ctl = mixer[i].hw_ctl;
286 lm = mixer[i].hw_lm;
287 memset(&alpha_out, 0, sizeof(alpha_out));
288
289 drm_atomic_crtc_for_each_plane(plane, crtc) {
290 pstate = to_sde_plane_state(plane->state);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400291 stage_cfg.stage[pstate->stage][i] =
292 sde_plane_pipe(plane);
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -0400293 DBG(" crtc_id %d, layer %d, at stage %d\n",
294 sde_crtc->id,
295 sde_plane_pipe(plane),
296 pstate->stage);
297 plane_cnt++;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400298
299 /* Cache the flushmask for this layer
300 * sourcesplit is always enabled, so this layer will
301 * be staged on both the mixers
302 */
303 ctl = mixer[i].hw_ctl;
304 ctl->ops.get_bitmask_sspp(ctl, &flush_mask,
305 sde_plane_pipe(plane));
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -0400306
307 /* blend config */
308 sde_crtc_get_blend_cfg(&blend, pstate);
309 lm->ops.setup_blend_config(lm, pstate->stage, &blend);
310 alpha_out.keep_fg[pstate->stage] = 1;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400311 }
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -0400312 lm->ops.setup_alpha_out(lm, &alpha_out);
313
314 /* stage config flush mask */
315 mixer[i].flush_mask = flush_mask;
316 /* get the flush mask for mixer */
317 ctl->ops.get_bitmask_mixer(ctl, &mixer[i].flush_mask,
318 mixer[i].hw_lm->idx);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400319 }
320
321 /*
322 * If there is no base layer, enable border color.
323 * currently border color is always black
324 */
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400325 if ((stage_cfg.stage[SDE_STAGE_BASE][0] == SSPP_NONE) && plane_cnt) {
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400326 stage_cfg.border_enable = 1;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400327 DBG("Border Color is enabled");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400328 }
329
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -0400330 /* Program ctl_paths */
331 for (i = 0; i < sde_crtc->num_ctls; i++) {
332 if ((!mixer[i].hw_lm) || (!mixer[i].hw_ctl))
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400333 continue;
334
335 ctl = mixer[i].hw_ctl;
336 lm = mixer[i].hw_lm;
337
338 /* stage config */
339 ctl->ops.setup_blendstage(ctl, mixer[i].hw_lm->idx,
340 &stage_cfg);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400341 }
342out:
343 spin_unlock_irqrestore(&sde_crtc->lm_lock, flags);
344}
345
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400346/* if file!=NULL, this is preclose potential cancel-flip path */
347static void complete_flip(struct drm_crtc *crtc, struct drm_file *file)
348{
349 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
350 struct drm_device *dev = crtc->dev;
351 struct drm_pending_vblank_event *event;
352 unsigned long flags;
353
354 spin_lock_irqsave(&dev->event_lock, flags);
355 event = sde_crtc->event;
356 if (event) {
357 /* if regular vblank case (!file) or if cancel-flip from
358 * preclose on file that requested flip, then send the
359 * event:
360 */
361 if (!file || (event->base.file_priv == file)) {
362 sde_crtc->event = NULL;
363 DBG("%s: send event: %pK", sde_crtc->name, event);
364 drm_send_vblank_event(dev, sde_crtc->id, event);
365 }
366 }
367 spin_unlock_irqrestore(&dev->event_lock, flags);
368}
369
370static void sde_crtc_vblank_cb(void *data)
371{
372 struct drm_crtc *crtc = (struct drm_crtc *)data;
373 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -0400374 struct sde_kms *sde_kms = get_kms(crtc);
375 struct drm_device *dev = sde_kms->dev;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400376 unsigned int pending;
377
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400378 pending = atomic_xchg(&sde_crtc->pending, 0);
379
380 if (pending & PENDING_FLIP)
381 complete_flip(crtc, NULL);
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -0400382
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400383 if (sde_crtc->drm_requested_vblank) {
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -0400384 drm_handle_vblank(dev, sde_crtc->id);
Clarence Ip4ce59322016-06-26 22:27:51 -0400385 DBG_IRQ("");
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -0400386 }
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400387}
388
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -0400389static bool frame_flushed(struct sde_crtc *sde_crtc)
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400390{
391 struct vsync_info vsync;
392
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400393 /*
394 * encoder get vsync_info
395 * if frame_count does not match
396 * frame is flushed
397 */
398 sde_encoder_get_vblank_status(sde_crtc->encoder, &vsync);
399
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -0400400 return (vsync.frame_count != sde_crtc->vsync_count) ? true : false;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400401}
402
403void sde_crtc_wait_for_commit_done(struct drm_crtc *crtc)
404{
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400405 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -0400406 struct drm_device *dev = crtc->dev;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400407 int i, ret;
408
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -0400409 if (!sde_crtc->num_ctls)
410 return;
411
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400412 /* ref count the vblank event */
413 ret = drm_crtc_vblank_get(crtc);
414 if (ret)
415 return;
416
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400417 /* wait */
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -0400418 wait_event_timeout(dev->vblank[drm_crtc_index(crtc)].queue,
419 frame_flushed(sde_crtc),
420 msecs_to_jiffies(50));
421 if (ret <= 0)
422 dev_warn(dev->dev, "vblank time out, crtc=%d, ret %u\n",
423 sde_crtc->id, ret);
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400424
425 for (i = 0; i < sde_crtc->num_ctls; i++)
426 sde_crtc->mixer[i].flush_mask = 0;
427
428 /* release */
429 drm_crtc_vblank_put(crtc);
430}
431
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400432static void request_pending(struct drm_crtc *crtc, u32 pending)
433{
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400434 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
435 struct vsync_info vsync;
436
437 /* request vsync info, cache the current frame count */
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400438 sde_encoder_get_vblank_status(sde_crtc->encoder, &vsync);
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400439 sde_crtc->vsync_count = vsync.frame_count;
440
441 atomic_or(pending, &sde_crtc->pending);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400442}
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400443
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400444/**
445 * Flush the CTL PATH
446 */
447static u32 crtc_flush_all(struct drm_crtc *crtc)
448{
449 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
450 struct sde_hw_ctl *ctl;
451 int i;
452
453 DBG("");
454
455 for (i = 0; i < sde_crtc->num_ctls; i++) {
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400456 ctl = sde_crtc->mixer[i].hw_ctl;
457 ctl->ops.get_bitmask_intf(ctl,
458 &(sde_crtc->mixer[i].flush_mask),
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400459 sde_crtc->mixer[i].intf_idx);
460 DBG("Flushing CTL_ID %d, flush_mask %x", ctl->idx,
461 sde_crtc->mixer[i].flush_mask);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400462 ctl->ops.setup_flush(ctl,
463 sde_crtc->mixer[i].flush_mask);
464 }
465
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700466 return 0;
467}
468
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400469static void sde_crtc_atomic_begin(struct drm_crtc *crtc,
470 struct drm_crtc_state *old_crtc_state)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700471{
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400472 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
473 struct drm_device *dev = crtc->dev;
474 unsigned long flags;
475
476 DBG("");
477
478 WARN_ON(sde_crtc->event);
479
480 spin_lock_irqsave(&dev->event_lock, flags);
481 sde_crtc->event = crtc->state->event;
482 spin_unlock_irqrestore(&dev->event_lock, flags);
483
484 /*
485 * If no CTL has been allocated in sde_crtc_atomic_check(),
486 * it means we are trying to flush a CRTC whose state is disabled:
487 * nothing else needs to be done.
488 */
489 if (unlikely(!sde_crtc->num_ctls))
490 return;
491
492 blend_setup(crtc);
493
494 /*
495 * PP_DONE irq is only used by command mode for now.
496 * It is better to request pending before FLUSH and START trigger
497 * to make sure no pp_done irq missed.
498 * This is safe because no pp_done will happen before SW trigger
499 * in command mode.
500 */
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700501}
502
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400503static void sde_crtc_atomic_flush(struct drm_crtc *crtc,
504 struct drm_crtc_state *old_crtc_state)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700505{
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400506 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
507 struct drm_device *dev = crtc->dev;
508 unsigned long flags;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700509
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400510 DBG("%s: event: %pK", sde_crtc->name, crtc->state->event);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700511
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400512 WARN_ON(sde_crtc->event);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700513
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400514 spin_lock_irqsave(&dev->event_lock, flags);
515 sde_crtc->event = crtc->state->event;
516 spin_unlock_irqrestore(&dev->event_lock, flags);
517
518 /*
519 * If no CTL has been allocated in sde_crtc_atomic_check(),
520 * it means we are trying to flush a CRTC whose state is disabled:
521 * nothing else needs to be done.
522 */
523 if (unlikely(!sde_crtc->num_ctls))
524 return;
525
526 crtc_flush_all(crtc);
527
528 request_pending(crtc, PENDING_FLIP);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700529}
530
531static int sde_crtc_set_property(struct drm_crtc *crtc,
532 struct drm_property *property, uint64_t val)
533{
534 return -EINVAL;
535}
536
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400537static int sde_crtc_cursor_set(struct drm_crtc *crtc,
538 struct drm_file *file, uint32_t handle,
539 uint32_t width, uint32_t height)
540{
541 return 0;
542}
543
544static int sde_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
545{
546 return 0;
547}
548
549static void sde_crtc_disable(struct drm_crtc *crtc)
550{
551 DBG("");
552}
553
554static void sde_crtc_enable(struct drm_crtc *crtc)
555{
556 DBG("");
557}
558
559struct plane_state {
560 struct drm_plane *plane;
561 struct sde_plane_state *state;
562};
563
564static int pstate_cmp(const void *a, const void *b)
565{
566 struct plane_state *pa = (struct plane_state *)a;
567 struct plane_state *pb = (struct plane_state *)b;
568
Clarence Ipe78efb72016-06-24 18:35:21 -0400569 return (int)pa->state->property_values[PLANE_PROP_ZPOS] -
570 (int)pb->state->property_values[PLANE_PROP_ZPOS];
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400571}
572
573static int sde_crtc_atomic_check(struct drm_crtc *crtc,
574 struct drm_crtc_state *state)
575{
576 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
577 struct sde_kms *sde_kms = get_kms(crtc);
578 struct drm_plane *plane;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400579 struct plane_state pstates[SDE_STAGE_MAX];
580 int max_stages = CRTC_HW_MIXER_MAXSTAGES(sde_kms->catalog, 0);
581 int cnt = 0, i;
582
583 DBG("%s: check", sde_crtc->name);
584
585 /* verify that there are not too many planes attached to crtc
586 * and that we don't have conflicting mixer stages:
587 */
588 drm_atomic_crtc_state_for_each_plane(plane, state) {
589 struct drm_plane_state *pstate;
590
591 if (cnt >= (max_stages)) {
Lloyd Atkinsond49de562016-05-30 13:23:48 -0400592 DRM_ERROR("too many planes!\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400593 return -EINVAL;
594 }
595
596 pstate = state->state->plane_states[drm_plane_index(plane)];
597
598 /* plane might not have changed, in which case take
599 * current state:
600 */
601 if (!pstate)
602 pstate = plane->state;
603 pstates[cnt].plane = plane;
604 pstates[cnt].state = to_sde_plane_state(pstate);
605
606 cnt++;
607 }
608
609 /* assign a stage based on sorted zpos property */
610 sort(pstates, cnt, sizeof(pstates[0]), pstate_cmp, NULL);
611
612 for (i = 0; i < cnt; i++) {
613 pstates[i].state->stage = SDE_STAGE_0 + i;
614 DBG("%s: assign pipe %d on stage=%d", sde_crtc->name,
615 sde_plane_pipe(pstates[i].plane),
616 pstates[i].state->stage);
617 }
618
619 return 0;
620}
621
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700622static const struct drm_crtc_funcs sde_crtc_funcs = {
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400623 .set_config = drm_atomic_helper_set_config,
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700624 .destroy = sde_crtc_destroy,
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400625 .page_flip = drm_atomic_helper_page_flip,
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700626 .set_property = sde_crtc_set_property,
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400627 .reset = drm_atomic_helper_crtc_reset,
628 .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
629 .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
630 .cursor_set = sde_crtc_cursor_set,
631 .cursor_move = sde_crtc_cursor_move,
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700632};
633
634static const struct drm_crtc_helper_funcs sde_crtc_helper_funcs = {
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700635 .mode_fixup = sde_crtc_mode_fixup,
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400636 .mode_set_nofb = sde_crtc_mode_set_nofb,
637 .disable = sde_crtc_disable,
638 .enable = sde_crtc_enable,
639 .atomic_check = sde_crtc_atomic_check,
640 .atomic_begin = sde_crtc_atomic_begin,
641 .atomic_flush = sde_crtc_atomic_flush,
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700642};
643
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -0400644int sde_crtc_vblank(struct drm_crtc *crtc, bool en)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700645{
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -0400646 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
647
648 DBG("%d", en);
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400649
650 /*
651 * Mark that framework requested vblank,
652 * as opposed to enabling vblank only for our internal purposes
653 * Currently this variable isn't required, but may be useful for future
654 * features
655 */
656 sde_crtc->drm_requested_vblank = en;
657
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -0400658 if (en)
659 sde_encoder_register_vblank_callback(sde_crtc->encoder,
660 sde_crtc_vblank_cb, (void *)crtc);
661 else
662 sde_encoder_register_vblank_callback(sde_crtc->encoder,
663 NULL, NULL);
664
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700665 return 0;
666}
667
668void sde_crtc_cancel_pending_flip(struct drm_crtc *crtc, struct drm_file *file)
669{
670}
671
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400672static void sde_crtc_install_properties(struct drm_crtc *crtc,
673 struct drm_mode_object *obj)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700674{
675}
676
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700677
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400678/* initialize crtc */
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700679struct drm_crtc *sde_crtc_init(struct drm_device *dev,
680 struct drm_encoder *encoder,
681 struct drm_plane *plane, int id)
682{
683 struct drm_crtc *crtc = NULL;
684 struct sde_crtc *sde_crtc;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400685 int rc;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700686
687 sde_crtc = kzalloc(sizeof(*sde_crtc), GFP_KERNEL);
688 if (!sde_crtc)
689 return ERR_PTR(-ENOMEM);
690
691 crtc = &sde_crtc->base;
692
693 sde_crtc->id = id;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400694 sde_crtc->encoder = encoder;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700695
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400696 sde_crtc_install_properties(crtc, &crtc->base);
697
698 drm_crtc_init_with_planes(dev, crtc, plane, NULL, &sde_crtc_funcs);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700699
700 drm_crtc_helper_add(crtc, &sde_crtc_helper_funcs);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400701 plane->crtc = crtc;
702
703 rc = sde_crtc_reserve_hw_resources(crtc, encoder);
704 if (rc) {
Lloyd Atkinsond49de562016-05-30 13:23:48 -0400705 DRM_ERROR(" error reserving HW resource for this CRTC\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400706 return ERR_PTR(-EINVAL);
707 }
708
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400709 DBG("%s: Successfully initialized crtc", __func__);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700710 return crtc;
711}