blob: b139ee6a1b6684727b30d59f1b8add811e7c1fff [file] [log] [blame]
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07001/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
2 *
3 * This program is free software; you can redistribute it and/or modify
4 * it under the terms of the GNU General Public License version 2 and
5 * only version 2 as published by the Free Software Foundation.
6 *
7 * This program is distributed in the hope that it will be useful,
8 * but WITHOUT ANY WARRANTY; without even the implied warranty of
9 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 * GNU General Public License for more details.
11 */
12
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040013#include <linux/sort.h>
Clarence Ip8f7366c2016-07-05 12:15:26 -040014#include <linux/debugfs.h>
Clarence Ipcae1bb62016-07-07 12:07:13 -040015#include <linux/ktime.h>
Clarence Ip4c1d9772016-06-26 09:35:38 -040016#include <uapi/drm/sde_drm.h>
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070017#include <drm/drm_mode.h>
18#include <drm/drm_crtc.h>
19#include <drm/drm_crtc_helper.h>
20#include <drm/drm_flip_work.h>
21
22#include "sde_kms.h"
23#include "sde_hw_lm.h"
Clarence Ipc475b082016-06-26 09:27:23 -040024#include "sde_hw_ctl.h"
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040025#include "sde_crtc.h"
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040026
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040027#define CTL(i) (CTL_0 + (i))
28#define LM(i) (LM_0 + (i))
29#define INTF(i) (INTF_0 + (i))
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070030
Clarence Ip4ce59322016-06-26 22:27:51 -040031/* uncomment to enable higher level IRQ msg's */
32/*#define DBG_IRQ DBG*/
33#define DBG_IRQ(fmt, ...)
34
Clarence Ipcae1bb62016-07-07 12:07:13 -040035/* default input fence timeout, in ms */
36#define SDE_CRTC_INPUT_FENCE_TIMEOUT 2000
37
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040038static struct sde_kms *get_kms(struct drm_crtc *crtc)
39{
40 struct msm_drm_private *priv = crtc->dev->dev_private;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040041
Ben Chan78647cd2016-06-26 22:02:47 -040042 return to_sde_kms(priv->kms);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040043}
44
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040045static int sde_crtc_reserve_hw_resources(struct drm_crtc *crtc,
46 struct drm_encoder *encoder)
47{
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040048 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040049 struct sde_kms *sde_kms = get_kms(crtc);
50 struct sde_encoder_hw_resources enc_hw_res;
51 const struct sde_hw_res_map *plat_hw_res_map;
52 enum sde_lm unused_lm_id[CRTC_DUAL_MIXERS] = {0};
53 enum sde_lm lm_idx;
Clarence Ip8f7366c2016-07-05 12:15:26 -040054 int i, unused_lm_count = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040055
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040056 if (!sde_kms) {
57 DBG("[%s] invalid kms", __func__);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040058 return -EINVAL;
59 }
60
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040061 if (!sde_kms->mmio)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040062 return -EINVAL;
63
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040064 /* Get unused LMs */
Clarence Ip8f7366c2016-07-05 12:15:26 -040065 for (i = sde_kms->catalog->mixer_count - 1; i >= 0; --i) {
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040066 if (!sde_rm_get_mixer(sde_kms, LM(i))) {
Clarence Ip8f7366c2016-07-05 12:15:26 -040067 unused_lm_id[unused_lm_count++] = LM(i);
68 if (unused_lm_count == CRTC_DUAL_MIXERS)
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040069 break;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040070 }
71 }
72
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040073 /* query encoder resources */
74 sde_encoder_get_hw_resources(sde_crtc->encoder, &enc_hw_res);
75
76 /* parse encoder hw resources, find CTL paths */
77 for (i = CTL_0; i <= sde_kms->catalog->ctl_count; i++) {
78 WARN_ON(sde_crtc->num_ctls > CRTC_DUAL_MIXERS);
79 if (enc_hw_res.ctls[i]) {
80 struct sde_crtc_mixer *mixer =
81 &sde_crtc->mixer[sde_crtc->num_ctls];
82 mixer->hw_ctl = sde_rm_get_ctl_path(sde_kms, i);
83 if (IS_ERR_OR_NULL(mixer->hw_ctl)) {
Lloyd Atkinson5d722782016-05-30 14:09:41 -040084 DRM_ERROR("Invalid ctl_path\n");
85 return PTR_ERR(mixer->hw_ctl);
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040086 }
87 sde_crtc->num_ctls++;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040088 }
89 }
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040090
91 /* shortcut this process if encoder has no ctl paths */
92 if (!sde_crtc->num_ctls)
93 return 0;
94
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040095 /*
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040096 * Get default LMs if specified in platform config
97 * other wise acquire the free LMs
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040098 */
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040099 for (i = INTF_0; i <= sde_kms->catalog->intf_count; i++) {
100 if (enc_hw_res.intfs[i]) {
101 struct sde_crtc_mixer *mixer =
102 &sde_crtc->mixer[sde_crtc->num_mixers];
103 plat_hw_res_map = sde_rm_get_res_map(sde_kms, i);
104
105 lm_idx = plat_hw_res_map->lm;
Clarence Ip8f7366c2016-07-05 12:15:26 -0400106 if (!lm_idx && unused_lm_count)
107 lm_idx = unused_lm_id[--unused_lm_count];
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400108
109 DBG("Acquiring LM %d", lm_idx);
110 mixer->hw_lm = sde_rm_acquire_mixer(sde_kms, lm_idx);
111 if (IS_ERR_OR_NULL(mixer->hw_lm)) {
Clarence Ip8f7366c2016-07-05 12:15:26 -0400112 DRM_ERROR("Invalid mixer\n");
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400113 return -EACCES;
114 }
115 /* interface info */
116 mixer->intf_idx = i;
117 mixer->mode = enc_hw_res.intfs[i];
118 sde_crtc->num_mixers++;
119 }
120 }
121
122 DBG("control paths %d, num_mixers %d, lm[0] %d, ctl[0] %d ",
123 sde_crtc->num_ctls, sde_crtc->num_mixers,
124 sde_crtc->mixer[0].hw_lm->idx,
125 sde_crtc->mixer[0].hw_ctl->idx);
Clarence Ip8f7366c2016-07-05 12:15:26 -0400126 if (sde_crtc->num_mixers > 1)
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400127 DBG("lm[1] %d, ctl[1], %d",
128 sde_crtc->mixer[1].hw_lm->idx,
129 sde_crtc->mixer[1].hw_ctl->idx);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400130 return 0;
131}
132
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700133static void sde_crtc_destroy(struct drm_crtc *crtc)
134{
135 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
136
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400137 DBG("");
Clarence Ip7a753bb2016-07-07 11:47:44 -0400138
139 if (!crtc)
140 return;
141
142 msm_property_destroy(&sde_crtc->property_info);
Clarence Ip8f7366c2016-07-05 12:15:26 -0400143 debugfs_remove_recursive(sde_crtc->debugfs_root);
Clarence Ip24f80662016-06-13 19:05:32 -0400144 sde_fence_deinit(&sde_crtc->output_fence);
Clarence Ip7a753bb2016-07-07 11:47:44 -0400145
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700146 drm_crtc_cleanup(crtc);
147 kfree(sde_crtc);
148}
149
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700150static bool sde_crtc_mode_fixup(struct drm_crtc *crtc,
151 const struct drm_display_mode *mode,
152 struct drm_display_mode *adjusted_mode)
153{
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400154 DBG("");
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -0400155
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -0400156 if (msm_is_mode_seamless(adjusted_mode)) {
157 DBG("Seamless mode set requested");
158 if (!crtc->enabled || crtc->state->active_changed) {
159 DRM_ERROR("crtc state prevents seamless transition");
160 return false;
161 }
162 }
163
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700164 return true;
165}
166
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400167static void sde_crtc_mode_set_nofb(struct drm_crtc *crtc)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700168{
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400169 DBG("");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400170}
171
172static void sde_crtc_get_blend_cfg(struct sde_hw_blend_cfg *cfg,
173 struct sde_plane_state *pstate)
174{
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400175 struct drm_plane *plane;
Lloyd Atkinson9a673492016-07-05 11:41:57 -0400176 const struct sde_format *format;
Clarence Ip4c1d9772016-06-26 09:35:38 -0400177 uint32_t blend_op;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400178
Lloyd Atkinson9a673492016-07-05 11:41:57 -0400179 format = to_sde_format(
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400180 msm_framebuffer_format(pstate->base.fb));
181 plane = pstate->base.plane;
182
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -0400183 memset(cfg, 0, sizeof(*cfg));
Clarence Ip4c1d9772016-06-26 09:35:38 -0400184
185 /* default to opaque blending */
186 cfg->fg.alpha_sel = ALPHA_FG_CONST;
187 cfg->bg.alpha_sel = ALPHA_BG_CONST;
188 cfg->fg.const_alpha =
189 sde_plane_get_property32(pstate, PLANE_PROP_ALPHA);
Clarence Ipe78efb72016-06-24 18:35:21 -0400190 cfg->bg.const_alpha = 0xFF - cfg->fg.const_alpha;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400191
Clarence Ip4c1d9772016-06-26 09:35:38 -0400192 blend_op = sde_plane_get_property32(pstate, PLANE_PROP_BLEND_OP);
193
194 if (format->alpha_enable) {
195 switch (blend_op) {
196 case SDE_DRM_BLEND_OP_PREMULTIPLIED:
197 cfg->fg.alpha_sel = ALPHA_FG_CONST;
198 cfg->bg.alpha_sel = ALPHA_FG_PIXEL;
199 if (cfg->fg.const_alpha != 0xff) {
200 cfg->bg.const_alpha = cfg->fg.const_alpha;
201 cfg->bg.mod_alpha = 1;
202 cfg->bg.inv_alpha_sel = 1;
203 } else {
204 cfg->bg.inv_mode_alpha = 1;
205 }
206 break;
207 case SDE_DRM_BLEND_OP_COVERAGE:
208 cfg->fg.alpha_sel = ALPHA_FG_PIXEL;
209 cfg->bg.alpha_sel = ALPHA_FG_PIXEL;
210 if (cfg->fg.const_alpha != 0xff) {
211 cfg->bg.const_alpha = cfg->fg.const_alpha;
212 cfg->fg.mod_alpha = 1;
213 cfg->bg.inv_alpha_sel = 1;
214 cfg->bg.mod_alpha = 1;
215 cfg->bg.inv_mode_alpha = 1;
216 } else {
217 cfg->bg.inv_mode_alpha = 1;
218 }
219 break;
220 default:
221 /* do nothing */
222 break;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400223 }
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -0400224 } else {
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -0400225 cfg->bg.inv_alpha_sel = 1;
Clarence Ip8f7366c2016-07-05 12:15:26 -0400226 /* force 100% alpha */
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -0400227 cfg->fg.const_alpha = 0xFF;
228 cfg->bg.const_alpha = 0x00;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400229 }
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400230
Clarence Ip4c1d9772016-06-26 09:35:38 -0400231 DBG("format 0x%x, alpha_enable %u blend_op %u",
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400232 format->base.pixel_format, format->alpha_enable,
Clarence Ip4c1d9772016-06-26 09:35:38 -0400233 blend_op);
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400234 DBG("fg alpha config %d %d %d %d %d",
235 cfg->fg.alpha_sel, cfg->fg.const_alpha, cfg->fg.mod_alpha,
236 cfg->fg.inv_alpha_sel, cfg->fg.inv_mode_alpha);
237 DBG("bg alpha config %d %d %d %d %d",
238 cfg->bg.alpha_sel, cfg->bg.const_alpha, cfg->bg.mod_alpha,
239 cfg->bg.inv_alpha_sel, cfg->bg.inv_mode_alpha);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400240}
241
242static void blend_setup(struct drm_crtc *crtc)
243{
244 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
245 struct sde_crtc_mixer *mixer = sde_crtc->mixer;
246 struct drm_plane *plane;
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -0400247 struct sde_plane_state *pstate;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400248 struct sde_hw_blend_cfg blend;
249 struct sde_hw_ctl *ctl;
250 struct sde_hw_mixer *lm;
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -0400251 struct sde_hw_color3_cfg alpha_out;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400252 u32 flush_mask = 0;
253 unsigned long flags;
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -0400254 int i, plane_cnt = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400255
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400256 DBG("");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400257 spin_lock_irqsave(&sde_crtc->lm_lock, flags);
258
259 /* ctl could be reserved already */
260 if (!sde_crtc->num_ctls)
261 goto out;
262
263 /* initialize stage cfg */
Clarence Ip8f7366c2016-07-05 12:15:26 -0400264 memset(&sde_crtc->stage_cfg, 0, sizeof(struct sde_hw_stage_cfg));
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400265
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -0400266 for (i = 0; i < sde_crtc->num_mixers; i++) {
267 if ((!mixer[i].hw_lm) || (!mixer[i].hw_ctl))
268 continue;
269
270 ctl = mixer[i].hw_ctl;
271 lm = mixer[i].hw_lm;
272 memset(&alpha_out, 0, sizeof(alpha_out));
273
274 drm_atomic_crtc_for_each_plane(plane, crtc) {
275 pstate = to_sde_plane_state(plane->state);
Clarence Ip8f7366c2016-07-05 12:15:26 -0400276 sde_crtc->stage_cfg.stage[pstate->stage][i] =
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400277 sde_plane_pipe(plane);
Lloyd Atkinson0fda6c52016-02-04 15:09:43 -0500278 DBG("crtc_id %d - mixer %d pipe %d at stage %d",
279 i,
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -0400280 sde_crtc->id,
281 sde_plane_pipe(plane),
282 pstate->stage);
283 plane_cnt++;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400284
285 /* Cache the flushmask for this layer
286 * sourcesplit is always enabled, so this layer will
287 * be staged on both the mixers
288 */
289 ctl = mixer[i].hw_ctl;
290 ctl->ops.get_bitmask_sspp(ctl, &flush_mask,
291 sde_plane_pipe(plane));
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -0400292
293 /* blend config */
294 sde_crtc_get_blend_cfg(&blend, pstate);
295 lm->ops.setup_blend_config(lm, pstate->stage, &blend);
296 alpha_out.keep_fg[pstate->stage] = 1;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400297 }
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -0400298 lm->ops.setup_alpha_out(lm, &alpha_out);
299
300 /* stage config flush mask */
301 mixer[i].flush_mask = flush_mask;
302 /* get the flush mask for mixer */
303 ctl->ops.get_bitmask_mixer(ctl, &mixer[i].flush_mask,
304 mixer[i].hw_lm->idx);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400305 }
306
307 /*
308 * If there is no base layer, enable border color.
309 * currently border color is always black
310 */
Clarence Ip8f7366c2016-07-05 12:15:26 -0400311 if ((sde_crtc->stage_cfg.stage[SDE_STAGE_BASE][0] == SSPP_NONE) &&
312 plane_cnt) {
313 sde_crtc->stage_cfg.border_enable = 1;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400314 DBG("Border Color is enabled");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400315 }
316
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -0400317 /* Program ctl_paths */
318 for (i = 0; i < sde_crtc->num_ctls; i++) {
319 if ((!mixer[i].hw_lm) || (!mixer[i].hw_ctl))
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400320 continue;
321
322 ctl = mixer[i].hw_ctl;
323 lm = mixer[i].hw_lm;
324
Clarence Ip8f7366c2016-07-05 12:15:26 -0400325 /* same stage config to all mixers */
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400326 ctl->ops.setup_blendstage(ctl, mixer[i].hw_lm->idx,
Clarence Ip8f7366c2016-07-05 12:15:26 -0400327 &sde_crtc->stage_cfg);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400328 }
329out:
330 spin_unlock_irqrestore(&sde_crtc->lm_lock, flags);
331}
332
Clarence Ip24f80662016-06-13 19:05:32 -0400333void sde_crtc_prepare_fence(struct drm_crtc *crtc)
334{
335 struct sde_crtc *sde_crtc;
336
337 if (!crtc) {
338 SDE_ERROR("invalid crtc\n");
339 return;
340 }
341
342 sde_crtc = to_sde_crtc(crtc);
343
344 MSM_EVT(crtc->dev, sde_crtc->id, crtc->enabled);
345
346 sde_fence_prepare(&sde_crtc->output_fence);
347}
348
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400349/* if file!=NULL, this is preclose potential cancel-flip path */
350static void complete_flip(struct drm_crtc *crtc, struct drm_file *file)
351{
352 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
353 struct drm_device *dev = crtc->dev;
354 struct drm_pending_vblank_event *event;
355 unsigned long flags;
356
357 spin_lock_irqsave(&dev->event_lock, flags);
358 event = sde_crtc->event;
359 if (event) {
360 /* if regular vblank case (!file) or if cancel-flip from
361 * preclose on file that requested flip, then send the
362 * event:
363 */
364 if (!file || (event->base.file_priv == file)) {
365 sde_crtc->event = NULL;
366 DBG("%s: send event: %pK", sde_crtc->name, event);
367 drm_send_vblank_event(dev, sde_crtc->id, event);
368 }
369 }
370 spin_unlock_irqrestore(&dev->event_lock, flags);
371}
372
373static void sde_crtc_vblank_cb(void *data)
374{
375 struct drm_crtc *crtc = (struct drm_crtc *)data;
376 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -0400377 struct sde_kms *sde_kms = get_kms(crtc);
378 struct drm_device *dev = sde_kms->dev;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400379 unsigned int pending;
380
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400381 pending = atomic_xchg(&sde_crtc->pending, 0);
382
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400383 if (pending & PENDING_FLIP) {
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400384 complete_flip(crtc, NULL);
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400385 /* free ref count paired with the atomic_flush */
386 drm_crtc_vblank_put(crtc);
387 }
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -0400388
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400389 if (sde_crtc->drm_requested_vblank) {
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -0400390 drm_handle_vblank(dev, sde_crtc->id);
Clarence Ip4ce59322016-06-26 22:27:51 -0400391 DBG_IRQ("");
Clarence Ip7f23b892016-06-01 10:30:34 -0400392 MSM_EVT(crtc->dev, sde_crtc->id, 0);
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -0400393 }
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400394}
395
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400396static u32 _sde_crtc_update_ctl_flush_mask(struct drm_crtc *crtc)
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400397{
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400398 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
399 struct sde_hw_ctl *ctl;
400 struct sde_crtc_mixer *mixer;
401 int i;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400402
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400403 if (!crtc) {
404 DRM_ERROR("invalid argument\n");
405 return -EINVAL;
406 }
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400407
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400408 MSM_EVT(crtc->dev, sde_crtc->id, 0);
409
410 DBG("");
411
412 for (i = 0; i < sde_crtc->num_ctls; i++) {
413 mixer = &sde_crtc->mixer[i];
414 ctl = mixer->hw_ctl;
415 ctl->ops.get_bitmask_intf(ctl, &mixer->flush_mask,
416 mixer->intf_idx);
417 ctl->ops.update_pending_flush(ctl, mixer->flush_mask);
418 DBG("added CTL_ID %d mask 0x%x to pending flush", ctl->idx,
419 mixer->flush_mask);
420 }
421
422 return 0;
423}
Clarence Ip24f80662016-06-13 19:05:32 -0400424
425void sde_crtc_complete_commit(struct drm_crtc *crtc)
426{
427 if (!crtc) {
428 SDE_ERROR("invalid crtc\n");
429 return;
430 }
431
432 /* signal out fence at end of commit */
433 sde_fence_signal(&to_sde_crtc(crtc)->output_fence, 0);
434}
435
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400436/**
437 * _sde_crtc_trigger_kickoff - Iterate through the control paths and trigger
438 * the hw_ctl object to flush any pending flush mask, and trigger
439 * control start if the interface types require it.
440 *
441 * This is currently designed to be called only once per crtc, per flush.
442 * It should be called from the encoder, through the
443 * sde_encoder_schedule_kickoff callflow, after all the encoders are ready
444 * to have CTL_START triggered.
445 *
446 * It is called from the commit thread context.
447 * @data: crtc pointer
448 */
449static void _sde_crtc_trigger_kickoff(void *data)
450{
451 struct drm_crtc *crtc = (struct drm_crtc *)data;
452 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
453 struct sde_hw_ctl *ctl;
454 u32 i;
455
456 if (!data) {
457 DRM_ERROR("invalid argument\n");
458 return;
459 }
460
461 MSM_EVT(crtc->dev, sde_crtc->id, 0);
462
463 /* Commit all pending flush masks to hardware */
464 for (i = 0; i < sde_crtc->num_ctls; i++) {
465 ctl = sde_crtc->mixer[i].hw_ctl;
466 ctl->ops.trigger_flush(ctl);
467 }
468
469 /* Signal start to any interface types that require it */
470 for (i = 0; i < sde_crtc->num_ctls; i++) {
471 ctl = sde_crtc->mixer[i].hw_ctl;
472 if (sde_crtc->mixer[i].mode != INTF_MODE_VIDEO) {
473 ctl->ops.trigger_start(ctl);
474 DBG("trigger start on ctl %d", ctl->idx);
475 }
476 }
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400477}
478
479void sde_crtc_wait_for_commit_done(struct drm_crtc *crtc)
480{
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400481 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400482 int ret;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400483
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400484 /* ref count the vblank event and interrupts while we wait for it */
485 if (drm_crtc_vblank_get(crtc))
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -0400486 return;
487
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400488 /*
489 * Wait post-flush if necessary to delay before plane_cleanup
490 * For example, wait for vsync in case of video mode panels
491 * This should be a no-op for command mode panels
492 */
493 MSM_EVT(crtc->dev, sde_crtc->id, 0);
494 ret = sde_encoder_wait_for_commit_done(sde_crtc->encoder);
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400495 if (ret)
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400496 DBG("sde_encoder_wait_post_flush returned %d", ret);
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400497
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400498 /* release vblank event ref count */
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400499 drm_crtc_vblank_put(crtc);
500}
501
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400502/**
Clarence Ipcae1bb62016-07-07 12:07:13 -0400503 * _sde_crtc_set_input_fence_timeout - update ns version of in fence timeout
504 * @cstate: Pointer to sde crtc state
505 */
506static void _sde_crtc_set_input_fence_timeout(struct sde_crtc_state *cstate)
507{
508 if (!cstate) {
509 DRM_ERROR("invalid cstate\n");
510 return;
511 }
512 cstate->input_fence_timeout_ns =
513 sde_crtc_get_property(cstate, CRTC_PROP_INPUT_FENCE_TIMEOUT);
514 cstate->input_fence_timeout_ns *= NSEC_PER_MSEC;
515}
516
517/**
518 * _sde_crtc_wait_for_fences - wait for incoming framebuffer sync fences
519 * @crtc: Pointer to CRTC object
520 */
521static void _sde_crtc_wait_for_fences(struct drm_crtc *crtc)
522{
523 struct drm_plane *plane = NULL;
524 uint32_t wait_ms = 1;
525 u64 ktime_end;
526 s64 ktime_wait; /* need signed 64-bit type */
527
528 DBG("");
529
530 if (!crtc || !crtc->state) {
531 DRM_ERROR("invalid crtc/state %pK\n", crtc);
532 return;
533 }
534
535 /* use monotonic timer to limit total fence wait time */
536 ktime_end = ktime_get_ns() +
537 to_sde_crtc_state(crtc->state)->input_fence_timeout_ns;
538
539 /*
540 * Wait for fences sequentially, as all of them need to be signalled
541 * before we can proceed.
542 *
543 * Limit total wait time to INPUT_FENCE_TIMEOUT, but still call
544 * sde_plane_wait_input_fence with wait_ms == 0 after the timeout so
545 * that each plane can check its fence status and react appropriately
546 * if its fence has timed out.
547 */
548 drm_atomic_crtc_for_each_plane(plane, crtc) {
549 if (wait_ms) {
550 /* determine updated wait time */
551 ktime_wait = ktime_end - ktime_get_ns();
552 if (ktime_wait >= 0)
553 wait_ms = ktime_wait / NSEC_PER_MSEC;
554 else
555 wait_ms = 0;
556 }
557 sde_plane_wait_input_fence(plane, wait_ms);
558 }
559}
560
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400561static void sde_crtc_atomic_begin(struct drm_crtc *crtc,
562 struct drm_crtc_state *old_crtc_state)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700563{
Clarence Ipcae1bb62016-07-07 12:07:13 -0400564 struct sde_crtc *sde_crtc;
565 struct drm_device *dev;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400566 unsigned long flags;
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400567 u32 i;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400568
569 DBG("");
570
Clarence Ipcae1bb62016-07-07 12:07:13 -0400571 if (!crtc) {
572 DRM_ERROR("invalid crtc\n");
573 return;
574 }
575
576 sde_crtc = to_sde_crtc(crtc);
577 dev = crtc->dev;
578
Lloyd Atkinson265d2212016-05-30 13:12:01 -0400579 if (sde_crtc->event) {
580 WARN_ON(sde_crtc->event);
581 } else {
582 spin_lock_irqsave(&dev->event_lock, flags);
583 sde_crtc->event = crtc->state->event;
584 spin_unlock_irqrestore(&dev->event_lock, flags);
585 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400586
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400587 /* Reset flush mask from previous commit */
588 for (i = 0; i < sde_crtc->num_ctls; i++) {
589 struct sde_hw_ctl *ctl = sde_crtc->mixer[i].hw_ctl;
590
591 sde_crtc->mixer[i].flush_mask = 0;
592 ctl->ops.clear_pending_flush(ctl);
593 }
594
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400595 /*
596 * If no CTL has been allocated in sde_crtc_atomic_check(),
597 * it means we are trying to flush a CRTC whose state is disabled:
598 * nothing else needs to be done.
599 */
600 if (unlikely(!sde_crtc->num_ctls))
601 return;
602
603 blend_setup(crtc);
604
605 /*
606 * PP_DONE irq is only used by command mode for now.
607 * It is better to request pending before FLUSH and START trigger
608 * to make sure no pp_done irq missed.
609 * This is safe because no pp_done will happen before SW trigger
610 * in command mode.
611 */
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700612}
613
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -0400614static void request_pending(struct drm_crtc *crtc, u32 pending)
615{
616 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
617
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -0400618 atomic_or(pending, &sde_crtc->pending);
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400619
620 /* ref count the vblank event and interrupts over the atomic commit */
621 if (drm_crtc_vblank_get(crtc))
622 return;
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -0400623}
624
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400625static void sde_crtc_atomic_flush(struct drm_crtc *crtc,
626 struct drm_crtc_state *old_crtc_state)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700627{
Clarence Ipcae1bb62016-07-07 12:07:13 -0400628 struct sde_crtc *sde_crtc;
629 struct drm_device *dev;
Lloyd Atkinson265d2212016-05-30 13:12:01 -0400630 struct drm_plane *plane;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400631 unsigned long flags;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700632
Clarence Ipcae1bb62016-07-07 12:07:13 -0400633 if (!crtc) {
634 DRM_ERROR("invalid crtc\n");
635 return;
636 }
637
638 DBG("");
639
640 sde_crtc = to_sde_crtc(crtc);
641
642 dev = crtc->dev;
643
Lloyd Atkinson265d2212016-05-30 13:12:01 -0400644 if (sde_crtc->event) {
645 DBG("already received sde_crtc->event");
646 } else {
Lloyd Atkinson265d2212016-05-30 13:12:01 -0400647 spin_lock_irqsave(&dev->event_lock, flags);
648 sde_crtc->event = crtc->state->event;
649 spin_unlock_irqrestore(&dev->event_lock, flags);
650 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400651
652 /*
653 * If no CTL has been allocated in sde_crtc_atomic_check(),
654 * it means we are trying to flush a CRTC whose state is disabled:
655 * nothing else needs to be done.
656 */
657 if (unlikely(!sde_crtc->num_ctls))
658 return;
659
Clarence Ipcae1bb62016-07-07 12:07:13 -0400660 /* wait for acquire fences before anything else is done */
661 _sde_crtc_wait_for_fences(crtc);
662
663 /*
664 * Final plane updates: Give each plane a chance to complete all
665 * required writes/flushing before crtc's "flush
666 * everything" call below.
667 */
668 drm_atomic_crtc_for_each_plane(plane, crtc)
669 sde_plane_flush(plane);
670
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400671 /* Add pending blocks to the flush mask */
672 if (_sde_crtc_update_ctl_flush_mask(crtc))
673 return;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400674
675 request_pending(crtc, PENDING_FLIP);
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400676
677 /* Kickoff will be scheduled by outer layer */
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700678}
679
Clarence Ip7a753bb2016-07-07 11:47:44 -0400680/**
681 * sde_crtc_destroy_state - state destroy hook
682 * @crtc: drm CRTC
683 * @state: CRTC state object to release
684 */
685static void sde_crtc_destroy_state(struct drm_crtc *crtc,
686 struct drm_crtc_state *state)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700687{
Clarence Ip7a753bb2016-07-07 11:47:44 -0400688 struct sde_crtc *sde_crtc;
689 struct sde_crtc_state *cstate;
690
691 if (!crtc || !state) {
692 DRM_ERROR("invalid argument(s)\n");
693 return;
694 }
695
696 sde_crtc = to_sde_crtc(crtc);
697 cstate = to_sde_crtc_state(state);
698
699 DBG("");
700
701 __drm_atomic_helper_crtc_destroy_state(crtc, state);
702
703 /* destroy value helper */
704 msm_property_destroy_state(&sde_crtc->property_info, cstate,
705 cstate->property_values, cstate->property_blobs);
706}
707
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400708void sde_crtc_commit_kickoff(struct drm_crtc *crtc)
709{
710 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
711
712 if (!crtc) {
713 DRM_ERROR("invalid argument\n");
714 return;
715 }
716
717 /*
718 * Encoder will flush/start now, unless it has a tx pending
719 * in which case it may delay and flush at an irq event (e.g. ppdone)
720 */
721 sde_encoder_schedule_kickoff(sde_crtc->encoder,
722 _sde_crtc_trigger_kickoff, crtc);
723}
724
Clarence Ip7a753bb2016-07-07 11:47:44 -0400725/**
726 * sde_crtc_duplicate_state - state duplicate hook
727 * @crtc: Pointer to drm crtc structure
728 * @Returns: Pointer to new drm_crtc_state structure
729 */
730static struct drm_crtc_state *sde_crtc_duplicate_state(struct drm_crtc *crtc)
731{
732 struct sde_crtc *sde_crtc;
733 struct sde_crtc_state *cstate, *old_cstate;
734
735 if (!crtc || !crtc->state) {
736 DRM_ERROR("invalid argument(s)\n");
737 return NULL;
738 }
739
740 sde_crtc = to_sde_crtc(crtc);
741 old_cstate = to_sde_crtc_state(crtc->state);
742 cstate = msm_property_alloc_state(&sde_crtc->property_info);
743 if (!cstate) {
744 DRM_ERROR("failed to allocate state\n");
745 return NULL;
746 }
747
748 /* duplicate value helper */
749 msm_property_duplicate_state(&sde_crtc->property_info,
750 old_cstate, cstate,
751 cstate->property_values, cstate->property_blobs);
752
753 /* duplicate base helper */
754 __drm_atomic_helper_crtc_duplicate_state(crtc, &cstate->base);
755
756 return &cstate->base;
757}
758
759/**
760 * sde_crtc_reset - reset hook for CRTCs
761 * Resets the atomic state for @crtc by freeing the state pointer (which might
762 * be NULL, e.g. at driver load time) and allocating a new empty state object.
763 * @crtc: Pointer to drm crtc structure
764 */
765static void sde_crtc_reset(struct drm_crtc *crtc)
766{
767 struct sde_crtc *sde_crtc;
768 struct sde_crtc_state *cstate;
769
770 if (!crtc) {
771 DRM_ERROR("invalid crtc\n");
772 return;
773 }
774
775 /* remove previous state, if present */
776 if (crtc->state) {
777 sde_crtc_destroy_state(crtc, crtc->state);
778 crtc->state = 0;
779 }
780
781 sde_crtc = to_sde_crtc(crtc);
782 cstate = msm_property_alloc_state(&sde_crtc->property_info);
783 if (!cstate) {
784 DRM_ERROR("failed to allocate state\n");
785 return;
786 }
787
788 /* reset value helper */
789 msm_property_reset_state(&sde_crtc->property_info, cstate,
790 cstate->property_values, cstate->property_blobs);
791
Clarence Ipcae1bb62016-07-07 12:07:13 -0400792 _sde_crtc_set_input_fence_timeout(cstate);
793
Clarence Ip7a753bb2016-07-07 11:47:44 -0400794 cstate->base.crtc = crtc;
795 crtc->state = &cstate->base;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700796}
797
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400798static int sde_crtc_cursor_set(struct drm_crtc *crtc,
799 struct drm_file *file, uint32_t handle,
800 uint32_t width, uint32_t height)
801{
802 return 0;
803}
804
805static int sde_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
806{
807 return 0;
808}
809
810static void sde_crtc_disable(struct drm_crtc *crtc)
811{
812 DBG("");
813}
814
815static void sde_crtc_enable(struct drm_crtc *crtc)
816{
Clarence Ipcae1bb62016-07-07 12:07:13 -0400817 struct sde_crtc *sde_crtc;
818 struct sde_crtc_mixer *mixer;
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -0400819 struct sde_hw_mixer *lm;
820 unsigned long flags;
821 struct drm_display_mode *mode;
822 struct sde_hw_mixer_cfg cfg;
823 u32 mixer_width;
824 int i;
825 int rc;
826
Clarence Ipcae1bb62016-07-07 12:07:13 -0400827 if (!crtc) {
828 DRM_ERROR("invalid crtc\n");
829 return;
830 }
831
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400832 DBG("");
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -0400833
Clarence Ipcae1bb62016-07-07 12:07:13 -0400834 sde_crtc = to_sde_crtc(crtc);
835 mixer = sde_crtc->mixer;
836
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -0400837 if (WARN_ON(!crtc->state))
838 return;
839
840 mode = &crtc->state->adjusted_mode;
841
842 drm_mode_debug_printmodeline(mode);
843
844 /*
845 * reserve mixer(s) if not already avaialable
846 * if dual mode, mixer_width = half mode width
847 * program mode configuration on mixer(s)
848 */
849 if ((sde_crtc->num_ctls == 0) ||
850 (sde_crtc->num_mixers == 0)) {
851 rc = sde_crtc_reserve_hw_resources(crtc, sde_crtc->encoder);
852 if (rc) {
853 DRM_ERROR("error reserving HW resource for CRTC\n");
854 return;
855 }
856 }
857
858 if (sde_crtc->num_mixers == CRTC_DUAL_MIXERS)
859 mixer_width = mode->hdisplay >> 1;
860 else
861 mixer_width = mode->hdisplay;
862
863 spin_lock_irqsave(&sde_crtc->lm_lock, flags);
864
865 for (i = 0; i < sde_crtc->num_mixers; i++) {
866 lm = mixer[i].hw_lm;
867 cfg.out_width = mixer_width;
868 cfg.out_height = mode->vdisplay;
869 cfg.right_mixer = (i == 0) ? false : true;
870 cfg.flags = 0;
871 lm->ops.setup_mixer_out(lm, &cfg);
872 }
873
874 spin_unlock_irqrestore(&sde_crtc->lm_lock, flags);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400875}
876
877struct plane_state {
878 struct drm_plane *plane;
879 struct sde_plane_state *state;
880};
881
882static int pstate_cmp(const void *a, const void *b)
883{
884 struct plane_state *pa = (struct plane_state *)a;
885 struct plane_state *pb = (struct plane_state *)b;
886
Clarence Ip4c1d9772016-06-26 09:35:38 -0400887 return (int)sde_plane_get_property(pa->state, PLANE_PROP_ZPOS) -
888 (int)sde_plane_get_property(pb->state, PLANE_PROP_ZPOS);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400889}
890
891static int sde_crtc_atomic_check(struct drm_crtc *crtc,
892 struct drm_crtc_state *state)
893{
Clarence Ipcae1bb62016-07-07 12:07:13 -0400894 struct sde_crtc *sde_crtc;
895 struct sde_kms *sde_kms;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400896 struct drm_plane *plane;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400897 struct plane_state pstates[SDE_STAGE_MAX];
Clarence Ipcae1bb62016-07-07 12:07:13 -0400898 int max_stages;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400899 int cnt = 0, i;
900
Clarence Ipcae1bb62016-07-07 12:07:13 -0400901 if (!crtc) {
902 DRM_ERROR("invalid crtc\n");
903 return -EINVAL;
904 }
905
906 sde_crtc = to_sde_crtc(crtc);
907 sde_kms = get_kms(crtc);
908 if (!sde_kms) {
909 DRM_ERROR("invalid kms\n");
910 return -EINVAL;
911 }
912 max_stages = CRTC_HW_MIXER_MAXSTAGES(sde_kms->catalog, 0);
913
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400914 DBG("%s: check", sde_crtc->name);
915
916 /* verify that there are not too many planes attached to crtc
917 * and that we don't have conflicting mixer stages:
918 */
919 drm_atomic_crtc_state_for_each_plane(plane, state) {
920 struct drm_plane_state *pstate;
921
922 if (cnt >= (max_stages)) {
Lloyd Atkinsond49de562016-05-30 13:23:48 -0400923 DRM_ERROR("too many planes!\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400924 return -EINVAL;
925 }
926
927 pstate = state->state->plane_states[drm_plane_index(plane)];
928
929 /* plane might not have changed, in which case take
930 * current state:
931 */
932 if (!pstate)
933 pstate = plane->state;
934 pstates[cnt].plane = plane;
935 pstates[cnt].state = to_sde_plane_state(pstate);
936
937 cnt++;
938 }
939
940 /* assign a stage based on sorted zpos property */
941 sort(pstates, cnt, sizeof(pstates[0]), pstate_cmp, NULL);
942
943 for (i = 0; i < cnt; i++) {
944 pstates[i].state->stage = SDE_STAGE_0 + i;
Lloyd Atkinson0fda6c52016-02-04 15:09:43 -0500945 DBG("%s: assign pipe %d on stage=%d zpos %d", sde_crtc->name,
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400946 sde_plane_pipe(pstates[i].plane),
Lloyd Atkinson0fda6c52016-02-04 15:09:43 -0500947 pstates[i].state->stage,
948 sde_plane_get_property32(pstates[i].state,
949 PLANE_PROP_ZPOS));
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400950 }
951
952 return 0;
953}
954
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -0400955int sde_crtc_vblank(struct drm_crtc *crtc, bool en)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700956{
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -0400957 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
958
959 DBG("%d", en);
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400960
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400961 MSM_EVT(crtc->dev, en, 0);
962
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400963 /*
964 * Mark that framework requested vblank,
965 * as opposed to enabling vblank only for our internal purposes
966 * Currently this variable isn't required, but may be useful for future
967 * features
968 */
969 sde_crtc->drm_requested_vblank = en;
970
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -0400971 if (en)
972 sde_encoder_register_vblank_callback(sde_crtc->encoder,
973 sde_crtc_vblank_cb, (void *)crtc);
974 else
975 sde_encoder_register_vblank_callback(sde_crtc->encoder,
976 NULL, NULL);
977
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700978 return 0;
979}
980
981void sde_crtc_cancel_pending_flip(struct drm_crtc *crtc, struct drm_file *file)
982{
983}
984
Clarence Ip7a753bb2016-07-07 11:47:44 -0400985/**
986 * sde_crtc_install_properties - install all drm properties for crtc
987 * @crtc: Pointer to drm crtc structure
988 */
989static void sde_crtc_install_properties(struct drm_crtc *crtc)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700990{
Clarence Ip7a753bb2016-07-07 11:47:44 -0400991 struct sde_crtc *sde_crtc;
992 struct drm_device *dev;
993
994 DBG("");
995
996 if (!crtc) {
997 DRM_ERROR("invalid crtc\n");
998 return;
999 }
1000
1001 sde_crtc = to_sde_crtc(crtc);
1002 dev = crtc->dev;
1003
1004 /* range properties */
1005 msm_property_install_range(&sde_crtc->property_info,
Clarence Ipcae1bb62016-07-07 12:07:13 -04001006 "input_fence_timeout",
1007 0, ~0, SDE_CRTC_INPUT_FENCE_TIMEOUT,
1008 CRTC_PROP_INPUT_FENCE_TIMEOUT);
Clarence Ip24f80662016-06-13 19:05:32 -04001009 msm_property_install_range(&sde_crtc->property_info,
1010 "output_fence",
1011 0, ~0, ~0,
1012 CRTC_PROP_OUTPUT_FENCE);
Clarence Ip7a753bb2016-07-07 11:47:44 -04001013}
1014
1015/**
1016 * sde_crtc_atomic_set_property - atomically set a crtc drm property
1017 * @crtc: Pointer to drm crtc structure
1018 * @state: Pointer to drm crtc state structure
1019 * @property: Pointer to targeted drm property
1020 * @val: Updated property value
1021 * @Returns: Zero on success
1022 */
1023static int sde_crtc_atomic_set_property(struct drm_crtc *crtc,
1024 struct drm_crtc_state *state,
1025 struct drm_property *property,
1026 uint64_t val)
1027{
1028 struct sde_crtc *sde_crtc;
1029 struct sde_crtc_state *cstate;
Clarence Ipcae1bb62016-07-07 12:07:13 -04001030 int idx, ret = -EINVAL;
Clarence Ip7a753bb2016-07-07 11:47:44 -04001031
1032 if (!crtc || !state || !property) {
1033 DRM_ERROR("invalid argument(s)\n");
1034 } else {
1035 sde_crtc = to_sde_crtc(crtc);
1036 cstate = to_sde_crtc_state(state);
1037 ret = msm_property_atomic_set(&sde_crtc->property_info,
1038 cstate->property_values, cstate->property_blobs,
1039 property, val);
Clarence Ipcae1bb62016-07-07 12:07:13 -04001040 if (!ret) {
1041 idx = msm_property_index(&sde_crtc->property_info,
1042 property);
1043 if (idx == CRTC_PROP_INPUT_FENCE_TIMEOUT)
1044 _sde_crtc_set_input_fence_timeout(cstate);
1045 }
Clarence Ip7a753bb2016-07-07 11:47:44 -04001046 }
1047
1048 return ret;
1049}
1050
1051/**
1052 * sde_crtc_set_property - set a crtc drm property
1053 * @crtc: Pointer to drm crtc structure
1054 * @property: Pointer to targeted drm property
1055 * @val: Updated property value
1056 * @Returns: Zero on success
1057 */
1058static int sde_crtc_set_property(struct drm_crtc *crtc,
1059 struct drm_property *property, uint64_t val)
1060{
1061 DBG("");
1062
1063 return sde_crtc_atomic_set_property(crtc, crtc->state, property, val);
1064}
1065
1066/**
1067 * sde_crtc_atomic_get_property - retrieve a crtc drm property
1068 * @crtc: Pointer to drm crtc structure
1069 * @state: Pointer to drm crtc state structure
1070 * @property: Pointer to targeted drm property
1071 * @val: Pointer to variable for receiving property value
1072 * @Returns: Zero on success
1073 */
1074static int sde_crtc_atomic_get_property(struct drm_crtc *crtc,
1075 const struct drm_crtc_state *state,
1076 struct drm_property *property,
1077 uint64_t *val)
1078{
1079 struct sde_crtc *sde_crtc;
1080 struct sde_crtc_state *cstate;
Clarence Ip24f80662016-06-13 19:05:32 -04001081 int i, ret = -EINVAL;
Clarence Ip7a753bb2016-07-07 11:47:44 -04001082
1083 if (!crtc || !state) {
1084 DRM_ERROR("invalid argument(s)\n");
1085 } else {
1086 sde_crtc = to_sde_crtc(crtc);
1087 cstate = to_sde_crtc_state(state);
Clarence Ip24f80662016-06-13 19:05:32 -04001088 i = msm_property_index(&sde_crtc->property_info, property);
1089 if (i == CRTC_PROP_OUTPUT_FENCE) {
1090 ret = sde_fence_create(&sde_crtc->output_fence, val);
1091 } else {
1092 ret = msm_property_atomic_get(&sde_crtc->property_info,
1093 cstate->property_values,
1094 cstate->property_blobs, property, val);
1095 }
Clarence Ip7a753bb2016-07-07 11:47:44 -04001096 }
1097
1098 return ret;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07001099}
1100
Clarence Ip8f7366c2016-07-05 12:15:26 -04001101static int _sde_debugfs_mixer_read(struct seq_file *s, void *data)
1102{
1103 struct sde_crtc *sde_crtc;
1104 struct sde_crtc_mixer *m;
1105 int i, j;
1106
1107 if (!s || !s->private)
1108 return -EINVAL;
1109
1110 sde_crtc = s->private;
1111 for (i = 0; i < sde_crtc->num_mixers; ++i) {
1112 m = &sde_crtc->mixer[i];
1113 if (!m->hw_lm) {
1114 seq_printf(s, "Mixer[%d] has no LM\n", i);
1115 } else if (!m->hw_ctl) {
1116 seq_printf(s, "Mixer[%d] has no CTL\n", i);
1117 } else {
1118 seq_printf(s, "LM_%d/CTL_%d -> INTF_%d\n",
1119 m->hw_lm->idx - LM_0,
1120 m->hw_ctl->idx - CTL_0,
1121 m->intf_idx - INTF_0);
1122 }
1123 }
1124 seq_printf(s, "Border: %d\n", sde_crtc->stage_cfg.border_enable);
1125 for (i = 0; i < SDE_STAGE_MAX; ++i) {
1126 if (i == SDE_STAGE_BASE)
1127 seq_puts(s, "Base Stage:");
1128 else
1129 seq_printf(s, "Stage %d:", i - SDE_STAGE_0);
1130 for (j = 0; j < PIPES_PER_STAGE; ++j)
1131 seq_printf(s, " % 2d", sde_crtc->stage_cfg.stage[i][j]);
1132 seq_puts(s, "\n");
1133 }
1134 return 0;
1135}
1136
1137static int _sde_debugfs_mixer_open(struct inode *inode, struct file *file)
1138{
1139 return single_open(file, _sde_debugfs_mixer_read, inode->i_private);
1140}
1141
Clarence Ip7a753bb2016-07-07 11:47:44 -04001142static const struct drm_crtc_funcs sde_crtc_funcs = {
1143 .set_config = drm_atomic_helper_set_config,
1144 .destroy = sde_crtc_destroy,
1145 .page_flip = drm_atomic_helper_page_flip,
1146 .set_property = sde_crtc_set_property,
1147 .atomic_set_property = sde_crtc_atomic_set_property,
1148 .atomic_get_property = sde_crtc_atomic_get_property,
1149 .reset = sde_crtc_reset,
1150 .atomic_duplicate_state = sde_crtc_duplicate_state,
1151 .atomic_destroy_state = sde_crtc_destroy_state,
1152 .cursor_set = sde_crtc_cursor_set,
1153 .cursor_move = sde_crtc_cursor_move,
1154};
1155
1156static const struct drm_crtc_helper_funcs sde_crtc_helper_funcs = {
1157 .mode_fixup = sde_crtc_mode_fixup,
1158 .mode_set_nofb = sde_crtc_mode_set_nofb,
1159 .disable = sde_crtc_disable,
1160 .enable = sde_crtc_enable,
1161 .atomic_check = sde_crtc_atomic_check,
1162 .atomic_begin = sde_crtc_atomic_begin,
1163 .atomic_flush = sde_crtc_atomic_flush,
1164};
1165
Clarence Ip8f7366c2016-07-05 12:15:26 -04001166static void _sde_crtc_init_debugfs(struct sde_crtc *sde_crtc,
1167 struct sde_kms *sde_kms)
1168{
1169 static const struct file_operations debugfs_mixer_fops = {
1170 .open = _sde_debugfs_mixer_open,
1171 .read = seq_read,
1172 .llseek = seq_lseek,
1173 .release = single_release,
1174 };
1175 if (sde_crtc && sde_kms) {
1176 sde_crtc->debugfs_root = debugfs_create_dir(sde_crtc->name,
1177 sde_debugfs_get_root(sde_kms));
1178 if (sde_crtc->debugfs_root) {
1179 /* don't error check these */
Clarence Ip8f7366c2016-07-05 12:15:26 -04001180 debugfs_create_file("mixers", 0444,
1181 sde_crtc->debugfs_root,
1182 sde_crtc, &debugfs_mixer_fops);
1183 }
1184 }
1185}
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07001186
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001187/* initialize crtc */
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07001188struct drm_crtc *sde_crtc_init(struct drm_device *dev,
1189 struct drm_encoder *encoder,
1190 struct drm_plane *plane, int id)
1191{
1192 struct drm_crtc *crtc = NULL;
Clarence Ip8f7366c2016-07-05 12:15:26 -04001193 struct sde_crtc *sde_crtc = NULL;
1194 struct msm_drm_private *priv = NULL;
1195 struct sde_kms *kms = NULL;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001196 int rc;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07001197
Clarence Ip8f7366c2016-07-05 12:15:26 -04001198 priv = dev->dev_private;
1199 kms = to_sde_kms(priv->kms);
1200
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07001201 sde_crtc = kzalloc(sizeof(*sde_crtc), GFP_KERNEL);
1202 if (!sde_crtc)
1203 return ERR_PTR(-ENOMEM);
1204
1205 crtc = &sde_crtc->base;
1206
1207 sde_crtc->id = id;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001208 sde_crtc->encoder = encoder;
Camus Wong52794e12016-04-08 17:02:41 -04001209 spin_lock_init(&sde_crtc->lm_lock);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07001210
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001211 drm_crtc_init_with_planes(dev, crtc, plane, NULL, &sde_crtc_funcs);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07001212
1213 drm_crtc_helper_add(crtc, &sde_crtc_helper_funcs);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001214 plane->crtc = crtc;
1215
1216 rc = sde_crtc_reserve_hw_resources(crtc, encoder);
1217 if (rc) {
Lloyd Atkinsond49de562016-05-30 13:23:48 -04001218 DRM_ERROR(" error reserving HW resource for this CRTC\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001219 return ERR_PTR(-EINVAL);
1220 }
1221
Clarence Ip8f7366c2016-07-05 12:15:26 -04001222 /* save user friendly CRTC name for later */
1223 snprintf(sde_crtc->name, SDE_CRTC_NAME_SIZE, "crtc%u", crtc->base.id);
1224
Clarence Ip24f80662016-06-13 19:05:32 -04001225 /*
1226 * Initialize output fence support. Set output fence offset to zero
1227 * so that fences returned during a commit will signal at the end of
1228 * the same commit.
1229 */
1230 sde_fence_init(dev, &sde_crtc->output_fence, sde_crtc->name, 0);
1231
1232 /* initialize debugfs support */
Clarence Ip8f7366c2016-07-05 12:15:26 -04001233 _sde_crtc_init_debugfs(sde_crtc, kms);
1234
Clarence Ip7a753bb2016-07-07 11:47:44 -04001235 /* create CRTC properties */
1236 msm_property_init(&sde_crtc->property_info, &crtc->base, dev,
1237 priv->crtc_property, sde_crtc->property_data,
1238 CRTC_PROP_COUNT, CRTC_PROP_BLOBCOUNT,
1239 sizeof(struct sde_crtc_state));
1240
1241 sde_crtc_install_properties(crtc);
1242
Clarence Ip8f7366c2016-07-05 12:15:26 -04001243 DBG("%s: Successfully initialized crtc", sde_crtc->name);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07001244 return crtc;
1245}