blob: 3d7384b0b4ee208272d8cb13369c065e9690b2bf [file] [log] [blame]
Dhaval Patel14d46ce2017-01-17 16:28:12 -08001/*
Clarence Ip4f339092018-01-05 13:29:04 -05002 * Copyright (c) 2014-2018 The Linux Foundation. All rights reserved.
Dhaval Patel14d46ce2017-01-17 16:28:12 -08003 * Copyright (C) 2013 Red Hat
4 * Author: Rob Clark <robdclark@gmail.com>
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -08006 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published by
8 * the Free Software Foundation.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07009 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -080010 * This program is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
13 * more details.
14 *
15 * You should have received a copy of the GNU General Public License along with
16 * this program. If not, see <http://www.gnu.org/licenses/>.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070017 */
18
Clarence Ipd9f9fa62016-09-09 13:42:32 -040019#define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040020#include <linux/sort.h>
Clarence Ip8f7366c2016-07-05 12:15:26 -040021#include <linux/debugfs.h>
Clarence Ipcae1bb62016-07-07 12:07:13 -040022#include <linux/ktime.h>
Clarence Ip4c1d9772016-06-26 09:35:38 -040023#include <uapi/drm/sde_drm.h>
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070024#include <drm/drm_mode.h>
25#include <drm/drm_crtc.h>
26#include <drm/drm_crtc_helper.h>
27#include <drm/drm_flip_work.h>
28
29#include "sde_kms.h"
30#include "sde_hw_lm.h"
Clarence Ipc475b082016-06-26 09:27:23 -040031#include "sde_hw_ctl.h"
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040032#include "sde_crtc.h"
Alan Kwong83285fb2016-10-21 20:51:17 -040033#include "sde_plane.h"
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -070034#include "sde_color_processing.h"
Alan Kwong83285fb2016-10-21 20:51:17 -040035#include "sde_encoder.h"
36#include "sde_connector.h"
Clarence Ip980405d2017-08-08 18:33:44 -040037#include "sde_vbif.h"
Alan Kwong67a3f792016-11-01 23:16:53 -040038#include "sde_power_handle.h"
Alan Kwong9aa061c2016-11-06 21:17:12 -050039#include "sde_core_perf.h"
Narendra Muppalla77b32932017-05-10 13:53:11 -070040#include "sde_trace.h"
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040041
Raviteja Tamatamf61d7c02017-11-01 19:01:32 +053042#define SDE_PSTATES_MAX (SDE_STAGE_MAX * 4)
43#define SDE_MULTIRECT_PLANE_MAX (SDE_STAGE_MAX * 2)
44
Gopikrishnaiah Anandanb6b401f2017-03-14 16:39:49 -070045struct sde_crtc_custom_events {
46 u32 event;
47 int (*func)(struct drm_crtc *crtc, bool en,
48 struct sde_irq_callback *irq);
49};
50
Gopikrishnaiah Anandan84b4f672017-04-26 10:28:51 -070051static int sde_crtc_power_interrupt_handler(struct drm_crtc *crtc_drm,
52 bool en, struct sde_irq_callback *ad_irq);
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +053053static int sde_crtc_idle_interrupt_handler(struct drm_crtc *crtc_drm,
54 bool en, struct sde_irq_callback *idle_irq);
Dhaval Patelc9e213b2017-11-02 12:13:12 -070055static int sde_crtc_pm_event_handler(struct drm_crtc *crtc, bool en,
56 struct sde_irq_callback *noirq);
Gopikrishnaiah Anandan84b4f672017-04-26 10:28:51 -070057
Gopikrishnaiah Anandanb6b401f2017-03-14 16:39:49 -070058static struct sde_crtc_custom_events custom_events[] = {
Gopikrishnaiah Anandan84b4f672017-04-26 10:28:51 -070059 {DRM_EVENT_AD_BACKLIGHT, sde_cp_ad_interrupt},
Benjamin Chan90139102017-06-21 16:00:39 -040060 {DRM_EVENT_CRTC_POWER, sde_crtc_power_interrupt_handler},
Xu Yang5e53c2e2017-07-11 16:46:28 +080061 {DRM_EVENT_IDLE_NOTIFY, sde_crtc_idle_interrupt_handler},
62 {DRM_EVENT_HISTOGRAM, sde_cp_hist_interrupt},
Dhaval Patelc9e213b2017-11-02 12:13:12 -070063 {DRM_EVENT_SDE_POWER, sde_crtc_pm_event_handler},
Gopikrishnaiah Anandanb6b401f2017-03-14 16:39:49 -070064};
65
Clarence Ipcae1bb62016-07-07 12:07:13 -040066/* default input fence timeout, in ms */
Dhaval Patelb9850c02017-08-07 22:55:47 -070067#define SDE_CRTC_INPUT_FENCE_TIMEOUT 10000
Clarence Ipcae1bb62016-07-07 12:07:13 -040068
Dhaval Patel4e574842016-08-23 15:11:37 -070069/*
70 * The default input fence timeout is 2 seconds while max allowed
71 * range is 10 seconds. Any value above 10 seconds adds glitches beyond
72 * tolerance limit.
73 */
74#define SDE_CRTC_MAX_INPUT_FENCE_TIMEOUT 10000
75
Dhaval Patel48c76022016-09-01 17:51:23 -070076/* layer mixer index on sde_crtc */
77#define LEFT_MIXER 0
78#define RIGHT_MIXER 1
79
Dhaval Patelf9245d62017-03-28 16:24:00 -070080#define MISR_BUFF_SIZE 256
81
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -040082static inline struct sde_kms *_sde_crtc_get_kms(struct drm_crtc *crtc)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040083{
Clarence Ip7f70ce42017-03-20 06:53:46 -070084 struct msm_drm_private *priv;
85
86 if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
87 SDE_ERROR("invalid crtc\n");
88 return NULL;
89 }
90 priv = crtc->dev->dev_private;
91 if (!priv || !priv->kms) {
92 SDE_ERROR("invalid kms\n");
93 return NULL;
94 }
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040095
Ben Chan78647cd2016-06-26 22:02:47 -040096 return to_sde_kms(priv->kms);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040097}
98
Dhaval Patelf9245d62017-03-28 16:24:00 -070099static inline int _sde_crtc_power_enable(struct sde_crtc *sde_crtc, bool enable)
100{
101 struct drm_crtc *crtc;
102 struct msm_drm_private *priv;
103 struct sde_kms *sde_kms;
104
105 if (!sde_crtc) {
106 SDE_ERROR("invalid sde crtc\n");
107 return -EINVAL;
108 }
109
110 crtc = &sde_crtc->base;
111 if (!crtc->dev || !crtc->dev->dev_private) {
112 SDE_ERROR("invalid drm device\n");
113 return -EINVAL;
114 }
115
116 priv = crtc->dev->dev_private;
117 if (!priv->kms) {
118 SDE_ERROR("invalid kms\n");
119 return -EINVAL;
120 }
121
122 sde_kms = to_sde_kms(priv->kms);
123
124 return sde_power_resource_enable(&priv->phandle, sde_kms->core_client,
125 enable);
126}
127
Alan Kwongcdb2f282017-03-18 13:42:06 -0700128/**
129 * _sde_crtc_rp_to_crtc - get crtc from resource pool object
130 * @rp: Pointer to resource pool
131 * return: Pointer to drm crtc if success; null otherwise
132 */
133static struct drm_crtc *_sde_crtc_rp_to_crtc(struct sde_crtc_respool *rp)
134{
135 if (!rp)
136 return NULL;
137
138 return container_of(rp, struct sde_crtc_state, rp)->base.crtc;
139}
140
141/**
142 * _sde_crtc_rp_reclaim - reclaim unused, or all if forced, resources in pool
143 * @rp: Pointer to resource pool
144 * @force: True to reclaim all resources; otherwise, reclaim only unused ones
145 * return: None
146 */
147static void _sde_crtc_rp_reclaim(struct sde_crtc_respool *rp, bool force)
148{
149 struct sde_crtc_res *res, *next;
150 struct drm_crtc *crtc;
151
152 crtc = _sde_crtc_rp_to_crtc(rp);
153 if (!crtc) {
154 SDE_ERROR("invalid crtc\n");
155 return;
156 }
157
158 SDE_DEBUG("crtc%d.%u %s\n", crtc->base.id, rp->sequence_id,
159 force ? "destroy" : "free_unused");
160
161 list_for_each_entry_safe(res, next, &rp->res_list, list) {
162 if (!force && !(res->flags & SDE_CRTC_RES_FLAG_FREE))
163 continue;
164 SDE_DEBUG("crtc%d.%u reclaim res:0x%x/0x%llx/%pK/%d\n",
165 crtc->base.id, rp->sequence_id,
166 res->type, res->tag, res->val,
167 atomic_read(&res->refcount));
168 list_del(&res->list);
169 if (res->ops.put)
170 res->ops.put(res->val);
171 kfree(res);
172 }
173}
174
175/**
176 * _sde_crtc_rp_free_unused - free unused resource in pool
177 * @rp: Pointer to resource pool
178 * return: none
179 */
180static void _sde_crtc_rp_free_unused(struct sde_crtc_respool *rp)
181{
Alan Kwong310e9b02017-08-03 02:04:07 -0400182 mutex_lock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700183 _sde_crtc_rp_reclaim(rp, false);
Alan Kwong310e9b02017-08-03 02:04:07 -0400184 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700185}
186
187/**
188 * _sde_crtc_rp_destroy - destroy resource pool
189 * @rp: Pointer to resource pool
190 * return: None
191 */
192static void _sde_crtc_rp_destroy(struct sde_crtc_respool *rp)
193{
Alan Kwong310e9b02017-08-03 02:04:07 -0400194 mutex_lock(rp->rp_lock);
195 list_del_init(&rp->rp_list);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700196 _sde_crtc_rp_reclaim(rp, true);
Alan Kwong310e9b02017-08-03 02:04:07 -0400197 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700198}
199
200/**
201 * _sde_crtc_hw_blk_get - get callback for hardware block
202 * @val: Resource handle
203 * @type: Resource type
204 * @tag: Search tag for given resource
205 * return: Resource handle
206 */
207static void *_sde_crtc_hw_blk_get(void *val, u32 type, u64 tag)
208{
209 SDE_DEBUG("res:%d/0x%llx/%pK\n", type, tag, val);
210 return sde_hw_blk_get(val, type, tag);
211}
212
213/**
214 * _sde_crtc_hw_blk_put - put callback for hardware block
215 * @val: Resource handle
216 * return: None
217 */
218static void _sde_crtc_hw_blk_put(void *val)
219{
220 SDE_DEBUG("res://%pK\n", val);
221 sde_hw_blk_put(val);
222}
223
224/**
225 * _sde_crtc_rp_duplicate - duplicate resource pool and reset reference count
226 * @rp: Pointer to original resource pool
227 * @dup_rp: Pointer to duplicated resource pool
228 * return: None
229 */
230static void _sde_crtc_rp_duplicate(struct sde_crtc_respool *rp,
231 struct sde_crtc_respool *dup_rp)
232{
233 struct sde_crtc_res *res, *dup_res;
234 struct drm_crtc *crtc;
235
Alan Kwong310e9b02017-08-03 02:04:07 -0400236 if (!rp || !dup_rp || !rp->rp_head) {
Alan Kwongcdb2f282017-03-18 13:42:06 -0700237 SDE_ERROR("invalid resource pool\n");
238 return;
239 }
240
241 crtc = _sde_crtc_rp_to_crtc(rp);
242 if (!crtc) {
243 SDE_ERROR("invalid crtc\n");
244 return;
245 }
246
247 SDE_DEBUG("crtc%d.%u duplicate\n", crtc->base.id, rp->sequence_id);
248
Alan Kwong310e9b02017-08-03 02:04:07 -0400249 mutex_lock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700250 dup_rp->sequence_id = rp->sequence_id + 1;
251 INIT_LIST_HEAD(&dup_rp->res_list);
252 dup_rp->ops = rp->ops;
253 list_for_each_entry(res, &rp->res_list, list) {
254 dup_res = kzalloc(sizeof(struct sde_crtc_res), GFP_KERNEL);
Alan Kwong310e9b02017-08-03 02:04:07 -0400255 if (!dup_res) {
256 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700257 return;
Alan Kwong310e9b02017-08-03 02:04:07 -0400258 }
Alan Kwongcdb2f282017-03-18 13:42:06 -0700259 INIT_LIST_HEAD(&dup_res->list);
260 atomic_set(&dup_res->refcount, 0);
261 dup_res->type = res->type;
262 dup_res->tag = res->tag;
263 dup_res->val = res->val;
264 dup_res->ops = res->ops;
265 dup_res->flags = SDE_CRTC_RES_FLAG_FREE;
266 SDE_DEBUG("crtc%d.%u dup res:0x%x/0x%llx/%pK/%d\n",
267 crtc->base.id, dup_rp->sequence_id,
268 dup_res->type, dup_res->tag, dup_res->val,
269 atomic_read(&dup_res->refcount));
270 list_add_tail(&dup_res->list, &dup_rp->res_list);
271 if (dup_res->ops.get)
272 dup_res->ops.get(dup_res->val, 0, -1);
273 }
Alan Kwong310e9b02017-08-03 02:04:07 -0400274
275 dup_rp->rp_lock = rp->rp_lock;
276 dup_rp->rp_head = rp->rp_head;
277 INIT_LIST_HEAD(&dup_rp->rp_list);
278 list_add_tail(&dup_rp->rp_list, rp->rp_head);
279 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700280}
281
282/**
283 * _sde_crtc_rp_reset - reset resource pool after allocation
284 * @rp: Pointer to original resource pool
Alan Kwong310e9b02017-08-03 02:04:07 -0400285 * @rp_lock: Pointer to serialization resource pool lock
286 * @rp_head: Pointer to crtc resource pool head
Alan Kwongcdb2f282017-03-18 13:42:06 -0700287 * return: None
288 */
Alan Kwong310e9b02017-08-03 02:04:07 -0400289static void _sde_crtc_rp_reset(struct sde_crtc_respool *rp,
290 struct mutex *rp_lock, struct list_head *rp_head)
Alan Kwongcdb2f282017-03-18 13:42:06 -0700291{
Alan Kwong310e9b02017-08-03 02:04:07 -0400292 if (!rp || !rp_lock || !rp_head) {
Alan Kwongcdb2f282017-03-18 13:42:06 -0700293 SDE_ERROR("invalid resource pool\n");
294 return;
295 }
296
Alan Kwong310e9b02017-08-03 02:04:07 -0400297 mutex_lock(rp_lock);
298 rp->rp_lock = rp_lock;
299 rp->rp_head = rp_head;
300 INIT_LIST_HEAD(&rp->rp_list);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700301 rp->sequence_id = 0;
302 INIT_LIST_HEAD(&rp->res_list);
303 rp->ops.get = _sde_crtc_hw_blk_get;
304 rp->ops.put = _sde_crtc_hw_blk_put;
Alan Kwong310e9b02017-08-03 02:04:07 -0400305 list_add_tail(&rp->rp_list, rp->rp_head);
306 mutex_unlock(rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700307}
308
309/**
Alan Kwong310e9b02017-08-03 02:04:07 -0400310 * _sde_crtc_rp_add_no_lock - add given resource to resource pool without lock
Alan Kwongcdb2f282017-03-18 13:42:06 -0700311 * @rp: Pointer to original resource pool
312 * @type: Resource type
313 * @tag: Search tag for given resource
314 * @val: Resource handle
315 * @ops: Resource callback operations
316 * return: 0 if success; error code otherwise
317 */
Alan Kwong310e9b02017-08-03 02:04:07 -0400318static int _sde_crtc_rp_add_no_lock(struct sde_crtc_respool *rp, u32 type,
319 u64 tag, void *val, struct sde_crtc_res_ops *ops)
Alan Kwongcdb2f282017-03-18 13:42:06 -0700320{
321 struct sde_crtc_res *res;
322 struct drm_crtc *crtc;
323
324 if (!rp || !ops) {
325 SDE_ERROR("invalid resource pool/ops\n");
326 return -EINVAL;
327 }
328
329 crtc = _sde_crtc_rp_to_crtc(rp);
330 if (!crtc) {
331 SDE_ERROR("invalid crtc\n");
332 return -EINVAL;
333 }
334
335 list_for_each_entry(res, &rp->res_list, list) {
336 if (res->type != type || res->tag != tag)
337 continue;
338 SDE_ERROR("crtc%d.%u already exist res:0x%x/0x%llx/%pK/%d\n",
339 crtc->base.id, rp->sequence_id,
340 res->type, res->tag, res->val,
341 atomic_read(&res->refcount));
342 return -EEXIST;
343 }
344 res = kzalloc(sizeof(struct sde_crtc_res), GFP_KERNEL);
345 if (!res)
346 return -ENOMEM;
347 INIT_LIST_HEAD(&res->list);
348 atomic_set(&res->refcount, 1);
349 res->type = type;
350 res->tag = tag;
351 res->val = val;
352 res->ops = *ops;
353 list_add_tail(&res->list, &rp->res_list);
354 SDE_DEBUG("crtc%d.%u added res:0x%x/0x%llx\n",
355 crtc->base.id, rp->sequence_id, type, tag);
356 return 0;
357}
358
359/**
Alan Kwong310e9b02017-08-03 02:04:07 -0400360 * _sde_crtc_rp_add - add given resource to resource pool
361 * @rp: Pointer to original resource pool
362 * @type: Resource type
363 * @tag: Search tag for given resource
364 * @val: Resource handle
365 * @ops: Resource callback operations
366 * return: 0 if success; error code otherwise
367 */
368static int _sde_crtc_rp_add(struct sde_crtc_respool *rp, u32 type, u64 tag,
369 void *val, struct sde_crtc_res_ops *ops)
370{
371 int rc;
372
373 if (!rp) {
374 SDE_ERROR("invalid resource pool\n");
375 return -EINVAL;
376 }
377
378 mutex_lock(rp->rp_lock);
379 rc = _sde_crtc_rp_add_no_lock(rp, type, tag, val, ops);
380 mutex_unlock(rp->rp_lock);
381 return rc;
382}
383
384/**
Alan Kwongcdb2f282017-03-18 13:42:06 -0700385 * _sde_crtc_rp_get - lookup the resource from given resource pool and obtain
386 * if available; otherwise, obtain resource from global pool
387 * @rp: Pointer to original resource pool
388 * @type: Resource type
389 * @tag: Search tag for given resource
390 * return: Resource handle if success; pointer error or null otherwise
391 */
392static void *_sde_crtc_rp_get(struct sde_crtc_respool *rp, u32 type, u64 tag)
393{
Alan Kwong310e9b02017-08-03 02:04:07 -0400394 struct sde_crtc_respool *old_rp;
Alan Kwongcdb2f282017-03-18 13:42:06 -0700395 struct sde_crtc_res *res;
396 void *val = NULL;
397 int rc;
398 struct drm_crtc *crtc;
399
400 if (!rp) {
401 SDE_ERROR("invalid resource pool\n");
402 return NULL;
403 }
404
405 crtc = _sde_crtc_rp_to_crtc(rp);
406 if (!crtc) {
407 SDE_ERROR("invalid crtc\n");
408 return NULL;
409 }
410
Alan Kwong310e9b02017-08-03 02:04:07 -0400411 mutex_lock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700412 list_for_each_entry(res, &rp->res_list, list) {
413 if (res->type != type || res->tag != tag)
414 continue;
415 SDE_DEBUG("crtc%d.%u found res:0x%x/0x%llx/%pK/%d\n",
416 crtc->base.id, rp->sequence_id,
417 res->type, res->tag, res->val,
418 atomic_read(&res->refcount));
419 atomic_inc(&res->refcount);
420 res->flags &= ~SDE_CRTC_RES_FLAG_FREE;
Alan Kwong310e9b02017-08-03 02:04:07 -0400421 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700422 return res->val;
423 }
424 list_for_each_entry(res, &rp->res_list, list) {
425 if (res->type != type || !(res->flags & SDE_CRTC_RES_FLAG_FREE))
426 continue;
427 SDE_DEBUG("crtc%d.%u retag res:0x%x/0x%llx/%pK/%d\n",
428 crtc->base.id, rp->sequence_id,
429 res->type, res->tag, res->val,
430 atomic_read(&res->refcount));
431 atomic_inc(&res->refcount);
432 res->tag = tag;
433 res->flags &= ~SDE_CRTC_RES_FLAG_FREE;
Alan Kwong310e9b02017-08-03 02:04:07 -0400434 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700435 return res->val;
436 }
Alan Kwong310e9b02017-08-03 02:04:07 -0400437 /* not in this rp, try to grab from global pool */
Alan Kwongcdb2f282017-03-18 13:42:06 -0700438 if (rp->ops.get)
439 val = rp->ops.get(NULL, type, -1);
Alan Kwong310e9b02017-08-03 02:04:07 -0400440 if (!IS_ERR_OR_NULL(val))
441 goto add_res;
442 /*
443 * Search older resource pools for hw blk with matching type,
444 * necessary when resource is being used by this object,
445 * but in previous states not yet cleaned up.
446 *
447 * This enables searching of all resources currently owned
448 * by this crtc even though the resource might not be used
449 * in the current atomic state. This allows those resources
450 * to be re-acquired by the new atomic state immediately
451 * without waiting for the resources to be fully released.
452 */
453 else if (IS_ERR_OR_NULL(val) && (type < SDE_HW_BLK_MAX)) {
454 list_for_each_entry(old_rp, rp->rp_head, rp_list) {
455 if (old_rp == rp)
456 continue;
457
458 list_for_each_entry(res, &old_rp->res_list, list) {
459 if (res->type != type)
460 continue;
461 SDE_DEBUG(
462 "crtc%d.%u found res:0x%x//%pK/ in crtc%d.%d\n",
463 crtc->base.id,
464 rp->sequence_id,
465 res->type, res->val,
466 crtc->base.id,
467 old_rp->sequence_id);
468 SDE_EVT32_VERBOSE(crtc->base.id,
469 rp->sequence_id,
470 res->type, res->val,
471 crtc->base.id,
472 old_rp->sequence_id);
473 if (res->ops.get)
474 res->ops.get(res->val, 0, -1);
475 val = res->val;
476 break;
477 }
478
479 if (!IS_ERR_OR_NULL(val))
480 break;
481 }
482 }
Alan Kwongcdb2f282017-03-18 13:42:06 -0700483 if (IS_ERR_OR_NULL(val)) {
Alan Kwong42e35052017-05-05 06:52:51 -0700484 SDE_DEBUG("crtc%d.%u failed to get res:0x%x//\n",
Alan Kwongcdb2f282017-03-18 13:42:06 -0700485 crtc->base.id, rp->sequence_id, type);
Alan Kwong310e9b02017-08-03 02:04:07 -0400486 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700487 return NULL;
488 }
Alan Kwong310e9b02017-08-03 02:04:07 -0400489add_res:
490 rc = _sde_crtc_rp_add_no_lock(rp, type, tag, val, &rp->ops);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700491 if (rc) {
492 SDE_ERROR("crtc%d.%u failed to add res:0x%x/0x%llx\n",
493 crtc->base.id, rp->sequence_id, type, tag);
494 if (rp->ops.put)
495 rp->ops.put(val);
496 val = NULL;
497 }
Alan Kwong310e9b02017-08-03 02:04:07 -0400498 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700499 return val;
500}
501
502/**
503 * _sde_crtc_rp_put - return given resource to resource pool
504 * @rp: Pointer to original resource pool
505 * @type: Resource type
506 * @tag: Search tag for given resource
507 * return: None
508 */
509static void _sde_crtc_rp_put(struct sde_crtc_respool *rp, u32 type, u64 tag)
510{
511 struct sde_crtc_res *res, *next;
512 struct drm_crtc *crtc;
513
514 if (!rp) {
515 SDE_ERROR("invalid resource pool\n");
516 return;
517 }
518
519 crtc = _sde_crtc_rp_to_crtc(rp);
520 if (!crtc) {
521 SDE_ERROR("invalid crtc\n");
522 return;
523 }
524
Alan Kwong310e9b02017-08-03 02:04:07 -0400525 mutex_lock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700526 list_for_each_entry_safe(res, next, &rp->res_list, list) {
527 if (res->type != type || res->tag != tag)
528 continue;
529 SDE_DEBUG("crtc%d.%u found res:0x%x/0x%llx/%pK/%d\n",
530 crtc->base.id, rp->sequence_id,
531 res->type, res->tag, res->val,
532 atomic_read(&res->refcount));
533 if (res->flags & SDE_CRTC_RES_FLAG_FREE)
534 SDE_ERROR(
535 "crtc%d.%u already free res:0x%x/0x%llx/%pK/%d\n",
536 crtc->base.id, rp->sequence_id,
537 res->type, res->tag, res->val,
538 atomic_read(&res->refcount));
539 else if (atomic_dec_return(&res->refcount) == 0)
540 res->flags |= SDE_CRTC_RES_FLAG_FREE;
541
Alan Kwong310e9b02017-08-03 02:04:07 -0400542 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700543 return;
544 }
545 SDE_ERROR("crtc%d.%u not found res:0x%x/0x%llx\n",
546 crtc->base.id, rp->sequence_id, type, tag);
Alan Kwong310e9b02017-08-03 02:04:07 -0400547 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700548}
549
550int sde_crtc_res_add(struct drm_crtc_state *state, u32 type, u64 tag,
551 void *val, struct sde_crtc_res_ops *ops)
552{
553 struct sde_crtc_respool *rp;
554
555 if (!state) {
556 SDE_ERROR("invalid parameters\n");
557 return -EINVAL;
558 }
559
560 rp = &to_sde_crtc_state(state)->rp;
561 return _sde_crtc_rp_add(rp, type, tag, val, ops);
562}
563
564void *sde_crtc_res_get(struct drm_crtc_state *state, u32 type, u64 tag)
565{
566 struct sde_crtc_respool *rp;
567 void *val;
568
569 if (!state) {
570 SDE_ERROR("invalid parameters\n");
571 return NULL;
572 }
573
574 rp = &to_sde_crtc_state(state)->rp;
575 val = _sde_crtc_rp_get(rp, type, tag);
576 if (IS_ERR(val)) {
577 SDE_ERROR("failed to get res type:0x%x:0x%llx\n",
578 type, tag);
579 return NULL;
580 }
581
582 return val;
583}
584
585void sde_crtc_res_put(struct drm_crtc_state *state, u32 type, u64 tag)
586{
587 struct sde_crtc_respool *rp;
588
589 if (!state) {
590 SDE_ERROR("invalid parameters\n");
591 return;
592 }
593
594 rp = &to_sde_crtc_state(state)->rp;
595 _sde_crtc_rp_put(rp, type, tag);
596}
597
Clarence Ipa18d4832017-03-13 12:35:44 -0700598static void _sde_crtc_deinit_events(struct sde_crtc *sde_crtc)
599{
600 if (!sde_crtc)
601 return;
Clarence Ipa18d4832017-03-13 12:35:44 -0700602}
603
Dhaval Patel91399a52017-11-27 22:21:27 -0800604static ssize_t vsync_event_show(struct device *device,
605 struct device_attribute *attr, char *buf)
606{
607 struct drm_crtc *crtc;
608 struct sde_crtc *sde_crtc;
609
610 if (!device || !buf) {
611 SDE_ERROR("invalid input param(s)\n");
612 return -EAGAIN;
613 }
614
615 crtc = dev_get_drvdata(device);
616 sde_crtc = to_sde_crtc(crtc);
617 return scnprintf(buf, PAGE_SIZE, "VSYNC=%llu\n",
618 ktime_to_ns(sde_crtc->vblank_last_cb_time));
619}
620
621static DEVICE_ATTR_RO(vsync_event);
622static struct attribute *sde_crtc_dev_attrs[] = {
623 &dev_attr_vsync_event.attr,
624 NULL
625};
626
627static const struct attribute_group sde_crtc_attr_group = {
628 .attrs = sde_crtc_dev_attrs,
629};
630
631static const struct attribute_group *sde_crtc_attr_groups[] = {
632 &sde_crtc_attr_group,
633 NULL,
634};
635
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700636static void sde_crtc_destroy(struct drm_crtc *crtc)
637{
638 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
639
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -0400640 SDE_DEBUG("\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -0400641
642 if (!crtc)
643 return;
644
Dhaval Patel91399a52017-11-27 22:21:27 -0800645 if (sde_crtc->vsync_event_sf)
646 sysfs_put(sde_crtc->vsync_event_sf);
647 if (sde_crtc->sysfs_dev)
648 device_unregister(sde_crtc->sysfs_dev);
649
Dhaval Patele4a5dda2016-10-13 19:29:30 -0700650 if (sde_crtc->blob_info)
651 drm_property_unreference_blob(sde_crtc->blob_info);
Clarence Ip7a753bb2016-07-07 11:47:44 -0400652 msm_property_destroy(&sde_crtc->property_info);
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -0700653 sde_cp_crtc_destroy_properties(crtc);
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -0700654
Clarence Ip24f80662016-06-13 19:05:32 -0400655 sde_fence_deinit(&sde_crtc->output_fence);
Clarence Ipa18d4832017-03-13 12:35:44 -0700656 _sde_crtc_deinit_events(sde_crtc);
Clarence Ip7a753bb2016-07-07 11:47:44 -0400657
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700658 drm_crtc_cleanup(crtc);
Clarence Ip7f70ce42017-03-20 06:53:46 -0700659 mutex_destroy(&sde_crtc->crtc_lock);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700660 kfree(sde_crtc);
661}
662
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700663static bool sde_crtc_mode_fixup(struct drm_crtc *crtc,
664 const struct drm_display_mode *mode,
665 struct drm_display_mode *adjusted_mode)
666{
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -0400667 SDE_DEBUG("\n");
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -0400668
Raviteja Tamatam68892de2017-06-20 04:47:19 +0530669 if ((msm_is_mode_seamless(adjusted_mode) ||
670 msm_is_mode_seamless_vrr(adjusted_mode)) &&
671 (!crtc->enabled)) {
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -0400672 SDE_ERROR("crtc state prevents seamless transition\n");
673 return false;
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -0400674 }
675
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700676 return true;
677}
678
Dhaval Patel48c76022016-09-01 17:51:23 -0700679static void _sde_crtc_setup_blend_cfg(struct sde_crtc_mixer *mixer,
680 struct sde_plane_state *pstate, struct sde_format *format)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400681{
Dhaval Patel48c76022016-09-01 17:51:23 -0700682 uint32_t blend_op, fg_alpha, bg_alpha;
683 uint32_t blend_type;
Dhaval Patel44f12472016-08-29 12:19:47 -0700684 struct sde_hw_mixer *lm = mixer->hw_lm;
685
Dhaval Patel48c76022016-09-01 17:51:23 -0700686 /* default to opaque blending */
687 fg_alpha = sde_plane_get_property(pstate, PLANE_PROP_ALPHA);
688 bg_alpha = 0xFF - fg_alpha;
689 blend_op = SDE_BLEND_FG_ALPHA_FG_CONST | SDE_BLEND_BG_ALPHA_BG_CONST;
690 blend_type = sde_plane_get_property(pstate, PLANE_PROP_BLEND_OP);
Dhaval Patel44f12472016-08-29 12:19:47 -0700691
Dhaval Patel48c76022016-09-01 17:51:23 -0700692 SDE_DEBUG("blend type:0x%x blend alpha:0x%x\n", blend_type, fg_alpha);
693
694 switch (blend_type) {
695
696 case SDE_DRM_BLEND_OP_OPAQUE:
697 blend_op = SDE_BLEND_FG_ALPHA_FG_CONST |
698 SDE_BLEND_BG_ALPHA_BG_CONST;
699 break;
700
701 case SDE_DRM_BLEND_OP_PREMULTIPLIED:
702 if (format->alpha_enable) {
703 blend_op = SDE_BLEND_FG_ALPHA_FG_CONST |
704 SDE_BLEND_BG_ALPHA_FG_PIXEL;
705 if (fg_alpha != 0xff) {
706 bg_alpha = fg_alpha;
707 blend_op |= SDE_BLEND_BG_MOD_ALPHA |
708 SDE_BLEND_BG_INV_MOD_ALPHA;
709 } else {
710 blend_op |= SDE_BLEND_BG_INV_ALPHA;
711 }
712 }
713 break;
714
715 case SDE_DRM_BLEND_OP_COVERAGE:
716 if (format->alpha_enable) {
717 blend_op = SDE_BLEND_FG_ALPHA_FG_PIXEL |
718 SDE_BLEND_BG_ALPHA_FG_PIXEL;
719 if (fg_alpha != 0xff) {
720 bg_alpha = fg_alpha;
721 blend_op |= SDE_BLEND_FG_MOD_ALPHA |
722 SDE_BLEND_FG_INV_MOD_ALPHA |
723 SDE_BLEND_BG_MOD_ALPHA |
724 SDE_BLEND_BG_INV_MOD_ALPHA;
725 } else {
726 blend_op |= SDE_BLEND_BG_INV_ALPHA;
727 }
728 }
729 break;
730 default:
731 /* do nothing */
732 break;
Clarence Ipd9f9fa62016-09-09 13:42:32 -0400733 }
Dhaval Patel48c76022016-09-01 17:51:23 -0700734
735 lm->ops.setup_blend_config(lm, pstate->stage, fg_alpha,
736 bg_alpha, blend_op);
Dhaval Patel6c666622017-03-21 23:02:59 -0700737 SDE_DEBUG(
738 "format: %4.4s, alpha_enable %u fg alpha:0x%x bg alpha:0x%x blend_op:0x%x\n",
739 (char *) &format->base.pixel_format,
Dhaval Patel48c76022016-09-01 17:51:23 -0700740 format->alpha_enable, fg_alpha, bg_alpha, blend_op);
741}
742
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800743static void _sde_crtc_setup_dim_layer_cfg(struct drm_crtc *crtc,
744 struct sde_crtc *sde_crtc, struct sde_crtc_mixer *mixer,
745 struct sde_hw_dim_layer *dim_layer)
746{
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -0500747 struct sde_crtc_state *cstate;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800748 struct sde_hw_mixer *lm;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800749 struct sde_hw_dim_layer split_dim_layer;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800750 int i;
751
752 if (!dim_layer->rect.w || !dim_layer->rect.h) {
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -0700753 SDE_DEBUG("empty dim_layer\n");
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800754 return;
755 }
756
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -0500757 cstate = to_sde_crtc_state(crtc->state);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800758
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -0700759 SDE_DEBUG("dim_layer - flags:%d, stage:%d\n",
760 dim_layer->flags, dim_layer->stage);
761
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800762 split_dim_layer.stage = dim_layer->stage;
763 split_dim_layer.color_fill = dim_layer->color_fill;
764
765 /*
766 * traverse through the layer mixers attached to crtc and find the
767 * intersecting dim layer rect in each LM and program accordingly.
768 */
769 for (i = 0; i < sde_crtc->num_mixers; i++) {
770 split_dim_layer.flags = dim_layer->flags;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800771
Lloyd Atkinson9563bb12017-11-17 19:10:44 -0500772 sde_kms_rect_intersect(&cstate->lm_roi[i], &dim_layer->rect,
Lloyd Atkinsone0e11e22017-01-17 12:08:48 -0500773 &split_dim_layer.rect);
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -0500774 if (sde_kms_rect_is_null(&split_dim_layer.rect)) {
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800775 /*
776 * no extra programming required for non-intersecting
777 * layer mixers with INCLUSIVE dim layer
778 */
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -0500779 if (split_dim_layer.flags & SDE_DRM_DIM_LAYER_INCLUSIVE)
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800780 continue;
781
782 /*
783 * program the other non-intersecting layer mixers with
784 * INCLUSIVE dim layer of full size for uniformity
785 * with EXCLUSIVE dim layer config.
786 */
787 split_dim_layer.flags &= ~SDE_DRM_DIM_LAYER_EXCLUSIVE;
788 split_dim_layer.flags |= SDE_DRM_DIM_LAYER_INCLUSIVE;
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -0500789 memcpy(&split_dim_layer.rect, &cstate->lm_bounds[i],
790 sizeof(split_dim_layer.rect));
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800791
792 } else {
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -0500793 split_dim_layer.rect.x =
794 split_dim_layer.rect.x -
Lloyd Atkinson9563bb12017-11-17 19:10:44 -0500795 cstate->lm_roi[i].x;
796 split_dim_layer.rect.y =
797 split_dim_layer.rect.y -
798 cstate->lm_roi[i].y;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800799 }
800
Lloyd Atkinson9563bb12017-11-17 19:10:44 -0500801 SDE_EVT32_VERBOSE(DRMID(crtc),
802 cstate->lm_roi[i].x,
803 cstate->lm_roi[i].y,
804 cstate->lm_roi[i].w,
805 cstate->lm_roi[i].h,
806 dim_layer->rect.x,
807 dim_layer->rect.y,
808 dim_layer->rect.w,
809 dim_layer->rect.h,
810 split_dim_layer.rect.x,
811 split_dim_layer.rect.y,
812 split_dim_layer.rect.w,
813 split_dim_layer.rect.h);
814
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -0700815 SDE_DEBUG("split_dim_layer - LM:%d, rect:{%d,%d,%d,%d}}\n",
816 i, split_dim_layer.rect.x, split_dim_layer.rect.y,
817 split_dim_layer.rect.w, split_dim_layer.rect.h);
818
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800819 lm = mixer[i].hw_lm;
820 mixer[i].mixer_op_mode |= 1 << split_dim_layer.stage;
821 lm->ops.setup_dim_layer(lm, &split_dim_layer);
822 }
823}
824
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400825void sde_crtc_get_crtc_roi(struct drm_crtc_state *state,
826 const struct sde_rect **crtc_roi)
827{
828 struct sde_crtc_state *crtc_state;
829
830 if (!state || !crtc_roi)
831 return;
832
833 crtc_state = to_sde_crtc_state(state);
834 *crtc_roi = &crtc_state->crtc_roi;
835}
836
Lloyd Atkinson440728e2017-11-22 10:07:08 -0500837bool sde_crtc_is_crtc_roi_dirty(struct drm_crtc_state *state)
838{
839 struct sde_crtc_state *cstate;
840 struct sde_crtc *sde_crtc;
841
842 if (!state || !state->crtc)
843 return false;
844
845 sde_crtc = to_sde_crtc(state->crtc);
846 cstate = to_sde_crtc_state(state);
847
848 return msm_property_is_dirty(&sde_crtc->property_info,
849 &cstate->property_state, CRTC_PROP_ROI_V1);
850}
851
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400852static int _sde_crtc_set_roi_v1(struct drm_crtc_state *state,
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +0530853 void __user *usr_ptr)
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400854{
855 struct drm_crtc *crtc;
856 struct sde_crtc_state *cstate;
857 struct sde_drm_roi_v1 roi_v1;
858 int i;
859
860 if (!state) {
861 SDE_ERROR("invalid args\n");
862 return -EINVAL;
863 }
864
865 cstate = to_sde_crtc_state(state);
866 crtc = cstate->base.crtc;
867
868 memset(&cstate->user_roi_list, 0, sizeof(cstate->user_roi_list));
869
870 if (!usr_ptr) {
871 SDE_DEBUG("crtc%d: rois cleared\n", DRMID(crtc));
872 return 0;
873 }
874
875 if (copy_from_user(&roi_v1, usr_ptr, sizeof(roi_v1))) {
876 SDE_ERROR("crtc%d: failed to copy roi_v1 data\n", DRMID(crtc));
877 return -EINVAL;
878 }
879
880 SDE_DEBUG("crtc%d: num_rects %d\n", DRMID(crtc), roi_v1.num_rects);
881
882 if (roi_v1.num_rects == 0) {
883 SDE_DEBUG("crtc%d: rois cleared\n", DRMID(crtc));
884 return 0;
885 }
886
887 if (roi_v1.num_rects > SDE_MAX_ROI_V1) {
888 SDE_ERROR("crtc%d: too many rects specified: %d\n", DRMID(crtc),
889 roi_v1.num_rects);
890 return -EINVAL;
891 }
892
893 cstate->user_roi_list.num_rects = roi_v1.num_rects;
894 for (i = 0; i < roi_v1.num_rects; ++i) {
895 cstate->user_roi_list.roi[i] = roi_v1.roi[i];
896 SDE_DEBUG("crtc%d: roi%d: roi (%d,%d) (%d,%d)\n",
897 DRMID(crtc), i,
898 cstate->user_roi_list.roi[i].x1,
899 cstate->user_roi_list.roi[i].y1,
900 cstate->user_roi_list.roi[i].x2,
901 cstate->user_roi_list.roi[i].y2);
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -0400902 SDE_EVT32_VERBOSE(DRMID(crtc),
903 cstate->user_roi_list.roi[i].x1,
904 cstate->user_roi_list.roi[i].y1,
905 cstate->user_roi_list.roi[i].x2,
906 cstate->user_roi_list.roi[i].y2);
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400907 }
908
909 return 0;
910}
911
Ingrid Gallardo83532222017-06-02 16:48:51 -0700912static bool _sde_crtc_setup_is_3dmux_dsc(struct drm_crtc_state *state)
913{
914 int i;
915 struct sde_crtc_state *cstate;
916 bool is_3dmux_dsc = false;
917
918 cstate = to_sde_crtc_state(state);
919
920 for (i = 0; i < cstate->num_connectors; i++) {
921 struct drm_connector *conn = cstate->connectors[i];
922
923 if (sde_connector_get_topology_name(conn) ==
924 SDE_RM_TOPOLOGY_DUALPIPE_3DMERGE_DSC)
925 is_3dmux_dsc = true;
926 }
927
928 return is_3dmux_dsc;
929}
930
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400931static int _sde_crtc_set_crtc_roi(struct drm_crtc *crtc,
932 struct drm_crtc_state *state)
933{
934 struct drm_connector *conn;
935 struct drm_connector_state *conn_state;
936 struct sde_crtc *sde_crtc;
937 struct sde_crtc_state *crtc_state;
938 struct sde_rect *crtc_roi;
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400939 int i, num_attached_conns = 0;
Lloyd Atkinson440728e2017-11-22 10:07:08 -0500940 bool is_crtc_roi_dirty;
941 bool is_any_conn_roi_dirty;
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400942
943 if (!crtc || !state)
944 return -EINVAL;
945
946 sde_crtc = to_sde_crtc(crtc);
947 crtc_state = to_sde_crtc_state(state);
948 crtc_roi = &crtc_state->crtc_roi;
949
Lloyd Atkinson440728e2017-11-22 10:07:08 -0500950 is_crtc_roi_dirty = sde_crtc_is_crtc_roi_dirty(state);
951 is_any_conn_roi_dirty = false;
952
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400953 for_each_connector_in_state(state->state, conn, conn_state, i) {
Lloyd Atkinson440728e2017-11-22 10:07:08 -0500954 struct sde_connector *sde_conn;
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400955 struct sde_connector_state *sde_conn_state;
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -0400956 struct sde_rect conn_roi;
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400957
958 if (!conn_state || conn_state->crtc != crtc)
959 continue;
960
961 if (num_attached_conns) {
962 SDE_ERROR(
963 "crtc%d: unsupported: roi on crtc w/ >1 connectors\n",
964 DRMID(crtc));
965 return -EINVAL;
966 }
967 ++num_attached_conns;
968
Lloyd Atkinson440728e2017-11-22 10:07:08 -0500969 sde_conn = to_sde_connector(conn_state->connector);
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400970 sde_conn_state = to_sde_connector_state(conn_state);
971
Lloyd Atkinson440728e2017-11-22 10:07:08 -0500972 is_any_conn_roi_dirty = is_any_conn_roi_dirty ||
973 msm_property_is_dirty(
974 &sde_conn->property_info,
975 &sde_conn_state->property_state,
976 CONNECTOR_PROP_ROI_V1);
977
Ingrid Gallardo83532222017-06-02 16:48:51 -0700978 /*
979 * current driver only supports same connector and crtc size,
980 * but if support for different sizes is added, driver needs
981 * to check the connector roi here to make sure is full screen
982 * for dsc 3d-mux topology that doesn't support partial update.
983 */
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400984 if (memcmp(&sde_conn_state->rois, &crtc_state->user_roi_list,
985 sizeof(crtc_state->user_roi_list))) {
986 SDE_ERROR("%s: crtc -> conn roi scaling unsupported\n",
987 sde_crtc->name);
988 return -EINVAL;
989 }
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -0400990
991 sde_kms_rect_merge_rectangles(&sde_conn_state->rois, &conn_roi);
992 SDE_EVT32_VERBOSE(DRMID(crtc), DRMID(conn),
993 conn_roi.x, conn_roi.y,
994 conn_roi.w, conn_roi.h);
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400995 }
996
Lloyd Atkinson440728e2017-11-22 10:07:08 -0500997 /*
998 * Check against CRTC ROI and Connector ROI not being updated together.
999 * This restriction should be relaxed when Connector ROI scaling is
1000 * supported.
1001 */
1002 if (is_any_conn_roi_dirty != is_crtc_roi_dirty) {
1003 SDE_ERROR("connector/crtc rois not updated together\n");
1004 return -EINVAL;
1005 }
1006
Lloyd Atkinsonc2baf412017-04-19 17:53:09 -04001007 sde_kms_rect_merge_rectangles(&crtc_state->user_roi_list, crtc_roi);
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001008
Lloyd Atkinsonb0e0c2b2017-11-21 09:20:35 -05001009 /* clear the ROI to null if it matches full screen anyways */
1010 if (crtc_roi->x == 0 && crtc_roi->y == 0 &&
1011 crtc_roi->w == state->adjusted_mode.hdisplay &&
1012 crtc_roi->h == state->adjusted_mode.vdisplay)
1013 memset(crtc_roi, 0, sizeof(*crtc_roi));
1014
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001015 SDE_DEBUG("%s: crtc roi (%d,%d,%d,%d)\n", sde_crtc->name,
1016 crtc_roi->x, crtc_roi->y, crtc_roi->w, crtc_roi->h);
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -04001017 SDE_EVT32_VERBOSE(DRMID(crtc), crtc_roi->x, crtc_roi->y, crtc_roi->w,
1018 crtc_roi->h);
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001019
1020 return 0;
1021}
1022
Lloyd Atkinson77382202017-02-01 14:59:43 -05001023static int _sde_crtc_check_autorefresh(struct drm_crtc *crtc,
1024 struct drm_crtc_state *state)
1025{
1026 struct sde_crtc *sde_crtc;
1027 struct sde_crtc_state *crtc_state;
1028 struct drm_connector *conn;
1029 struct drm_connector_state *conn_state;
1030 int i;
1031
1032 if (!crtc || !state)
1033 return -EINVAL;
1034
1035 sde_crtc = to_sde_crtc(crtc);
1036 crtc_state = to_sde_crtc_state(state);
1037
1038 if (sde_kms_rect_is_null(&crtc_state->crtc_roi))
1039 return 0;
1040
1041 /* partial update active, check if autorefresh is also requested */
1042 for_each_connector_in_state(state->state, conn, conn_state, i) {
1043 uint64_t autorefresh;
1044
1045 if (!conn_state || conn_state->crtc != crtc)
1046 continue;
1047
1048 autorefresh = sde_connector_get_property(conn_state,
1049 CONNECTOR_PROP_AUTOREFRESH);
1050 if (autorefresh) {
1051 SDE_ERROR(
1052 "%s: autorefresh & partial crtc roi incompatible %llu\n",
1053 sde_crtc->name, autorefresh);
1054 return -EINVAL;
1055 }
1056 }
1057
1058 return 0;
1059}
1060
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001061static int _sde_crtc_set_lm_roi(struct drm_crtc *crtc,
1062 struct drm_crtc_state *state, int lm_idx)
1063{
1064 struct sde_crtc *sde_crtc;
1065 struct sde_crtc_state *crtc_state;
1066 const struct sde_rect *crtc_roi;
1067 const struct sde_rect *lm_bounds;
1068 struct sde_rect *lm_roi;
1069
1070 if (!crtc || !state || lm_idx >= ARRAY_SIZE(crtc_state->lm_bounds))
1071 return -EINVAL;
1072
1073 sde_crtc = to_sde_crtc(crtc);
1074 crtc_state = to_sde_crtc_state(state);
1075 crtc_roi = &crtc_state->crtc_roi;
1076 lm_bounds = &crtc_state->lm_bounds[lm_idx];
1077 lm_roi = &crtc_state->lm_roi[lm_idx];
1078
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001079 if (sde_kms_rect_is_null(crtc_roi))
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001080 memcpy(lm_roi, lm_bounds, sizeof(*lm_roi));
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001081 else
1082 sde_kms_rect_intersect(crtc_roi, lm_bounds, lm_roi);
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001083
1084 SDE_DEBUG("%s: lm%d roi (%d,%d,%d,%d)\n", sde_crtc->name, lm_idx,
1085 lm_roi->x, lm_roi->y, lm_roi->w, lm_roi->h);
1086
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05301087 /*
1088 * partial update is not supported with 3dmux dsc or dest scaler.
1089 * hence, crtc roi must match the mixer dimensions.
1090 */
1091 if (crtc_state->num_ds_enabled ||
1092 _sde_crtc_setup_is_3dmux_dsc(state)) {
1093 if (memcmp(lm_roi, lm_bounds, sizeof(struct sde_rect))) {
1094 SDE_ERROR("Unsupported: Dest scaler/3d mux DSC + PU\n");
1095 return -EINVAL;
1096 }
1097 }
1098
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001099 /* if any dimension is zero, clear all dimensions for clarity */
1100 if (sde_kms_rect_is_null(lm_roi))
1101 memset(lm_roi, 0, sizeof(*lm_roi));
1102
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001103 return 0;
1104}
1105
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001106static u32 _sde_crtc_get_displays_affected(struct drm_crtc *crtc,
1107 struct drm_crtc_state *state)
1108{
1109 struct sde_crtc *sde_crtc;
1110 struct sde_crtc_state *crtc_state;
1111 u32 disp_bitmask = 0;
1112 int i;
1113
1114 sde_crtc = to_sde_crtc(crtc);
1115 crtc_state = to_sde_crtc_state(state);
1116
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05001117 /* pingpong split: one ROI, one LM, two physical displays */
1118 if (crtc_state->is_ppsplit) {
1119 u32 lm_split_width = crtc_state->lm_bounds[0].w / 2;
1120 struct sde_rect *roi = &crtc_state->lm_roi[0];
1121
1122 if (sde_kms_rect_is_null(roi))
1123 disp_bitmask = 0;
1124 else if ((u32)roi->x + (u32)roi->w <= lm_split_width)
1125 disp_bitmask = BIT(0); /* left only */
1126 else if (roi->x >= lm_split_width)
1127 disp_bitmask = BIT(1); /* right only */
1128 else
1129 disp_bitmask = BIT(0) | BIT(1); /* left and right */
1130 } else {
1131 for (i = 0; i < sde_crtc->num_mixers; i++) {
1132 if (!sde_kms_rect_is_null(&crtc_state->lm_roi[i]))
1133 disp_bitmask |= BIT(i);
1134 }
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001135 }
1136
1137 SDE_DEBUG("affected displays 0x%x\n", disp_bitmask);
1138
1139 return disp_bitmask;
1140}
1141
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001142static int _sde_crtc_check_rois_centered_and_symmetric(struct drm_crtc *crtc,
1143 struct drm_crtc_state *state)
1144{
1145 struct sde_crtc *sde_crtc;
1146 struct sde_crtc_state *crtc_state;
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001147 const struct sde_rect *roi[CRTC_DUAL_MIXERS];
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001148
1149 if (!crtc || !state)
1150 return -EINVAL;
1151
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001152 sde_crtc = to_sde_crtc(crtc);
1153 crtc_state = to_sde_crtc_state(state);
1154
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001155 if (sde_crtc->num_mixers > CRTC_DUAL_MIXERS) {
1156 SDE_ERROR("%s: unsupported number of mixers: %d\n",
1157 sde_crtc->name, sde_crtc->num_mixers);
1158 return -EINVAL;
1159 }
1160
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001161 /*
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05001162 * If using pingpong split: one ROI, one LM, two physical displays
1163 * then the ROI must be centered on the panel split boundary and
1164 * be of equal width across the split.
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001165 */
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05001166 if (crtc_state->is_ppsplit) {
1167 u16 panel_split_width;
1168 u32 display_mask;
1169
1170 roi[0] = &crtc_state->lm_roi[0];
1171
1172 if (sde_kms_rect_is_null(roi[0]))
1173 return 0;
1174
1175 display_mask = _sde_crtc_get_displays_affected(crtc, state);
1176 if (display_mask != (BIT(0) | BIT(1)))
1177 return 0;
1178
1179 panel_split_width = crtc_state->lm_bounds[0].w / 2;
1180 if (roi[0]->x + roi[0]->w / 2 != panel_split_width) {
1181 SDE_ERROR("%s: roi x %d w %d split %d\n",
1182 sde_crtc->name, roi[0]->x, roi[0]->w,
1183 panel_split_width);
1184 return -EINVAL;
1185 }
1186
1187 return 0;
1188 }
1189
1190 /*
1191 * On certain HW, if using 2 LM, ROIs must be split evenly between the
1192 * LMs and be of equal width.
1193 */
Clarence Ipffb87422017-06-30 13:37:48 -04001194 if (sde_crtc->num_mixers < 2)
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05001195 return 0;
1196
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001197 roi[0] = &crtc_state->lm_roi[0];
1198 roi[1] = &crtc_state->lm_roi[1];
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001199
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001200 /* if one of the roi is null it's a left/right-only update */
1201 if (sde_kms_rect_is_null(roi[0]) || sde_kms_rect_is_null(roi[1]))
1202 return 0;
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001203
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001204 /* check lm rois are equal width & first roi ends at 2nd roi */
1205 if (roi[0]->x + roi[0]->w != roi[1]->x || roi[0]->w != roi[1]->w) {
1206 SDE_ERROR(
1207 "%s: rois not centered and symmetric: roi0 x %d w %d roi1 x %d w %d\n",
1208 sde_crtc->name, roi[0]->x, roi[0]->w,
1209 roi[1]->x, roi[1]->w);
1210 return -EINVAL;
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001211 }
1212
1213 return 0;
1214}
1215
1216static int _sde_crtc_check_planes_within_crtc_roi(struct drm_crtc *crtc,
1217 struct drm_crtc_state *state)
1218{
1219 struct sde_crtc *sde_crtc;
1220 struct sde_crtc_state *crtc_state;
1221 const struct sde_rect *crtc_roi;
Veera Sundaram Sankarand916e2a2017-10-12 14:52:26 -07001222 const struct drm_plane_state *pstate;
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001223 struct drm_plane *plane;
1224
1225 if (!crtc || !state)
1226 return -EINVAL;
1227
1228 /*
1229 * Reject commit if a Plane CRTC destination coordinates fall outside
1230 * the partial CRTC ROI. LM output is determined via connector ROIs,
1231 * if they are specified, not Plane CRTC ROIs.
1232 */
1233
1234 sde_crtc = to_sde_crtc(crtc);
1235 crtc_state = to_sde_crtc_state(state);
1236 crtc_roi = &crtc_state->crtc_roi;
1237
1238 if (sde_kms_rect_is_null(crtc_roi))
1239 return 0;
1240
Veera Sundaram Sankarand916e2a2017-10-12 14:52:26 -07001241 drm_atomic_crtc_state_for_each_plane_state(plane, pstate, state) {
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001242 struct sde_rect plane_roi, intersection;
1243
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001244 if (IS_ERR_OR_NULL(pstate)) {
1245 int rc = PTR_ERR(pstate);
1246
1247 SDE_ERROR("%s: failed to get plane%d state, %d\n",
1248 sde_crtc->name, plane->base.id, rc);
1249 return rc;
1250 }
1251
1252 plane_roi.x = pstate->crtc_x;
1253 plane_roi.y = pstate->crtc_y;
1254 plane_roi.w = pstate->crtc_w;
1255 plane_roi.h = pstate->crtc_h;
1256 sde_kms_rect_intersect(crtc_roi, &plane_roi, &intersection);
1257 if (!sde_kms_rect_is_equal(&plane_roi, &intersection)) {
1258 SDE_ERROR(
1259 "%s: plane%d crtc roi (%d,%d,%d,%d) outside crtc roi (%d,%d,%d,%d)\n",
1260 sde_crtc->name, plane->base.id,
1261 plane_roi.x, plane_roi.y,
1262 plane_roi.w, plane_roi.h,
1263 crtc_roi->x, crtc_roi->y,
1264 crtc_roi->w, crtc_roi->h);
1265 return -E2BIG;
1266 }
1267 }
1268
1269 return 0;
1270}
1271
1272static int _sde_crtc_check_rois(struct drm_crtc *crtc,
1273 struct drm_crtc_state *state)
1274{
1275 struct sde_crtc *sde_crtc;
Jeykumar Sankaran736d79d2017-10-05 17:44:24 -07001276 struct sde_crtc_state *sde_crtc_state;
Jeykumar Sankaran736d79d2017-10-05 17:44:24 -07001277 struct msm_mode_info mode_info;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001278 int rc, lm_idx, i;
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001279
1280 if (!crtc || !state)
1281 return -EINVAL;
1282
Jeykumar Sankaran736d79d2017-10-05 17:44:24 -07001283 memset(&mode_info, 0, sizeof(mode_info));
1284
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001285 sde_crtc = to_sde_crtc(crtc);
Lloyd Atkinson4c08e472017-11-22 12:16:04 -05001286 sde_crtc_state = to_sde_crtc_state(state);
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001287
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001288 if (hweight_long(state->connector_mask) != 1) {
1289 SDE_ERROR("invalid connector count(%d) for crtc: %d\n",
1290 (int)hweight_long(state->connector_mask),
1291 crtc->base.id);
1292 return -EINVAL;
1293 }
Jeykumar Sankaran736d79d2017-10-05 17:44:24 -07001294
Lloyd Atkinson4c08e472017-11-22 12:16:04 -05001295 /*
1296 * check connector array cached at modeset time since incoming atomic
1297 * state may not include any connectors if they aren't modified
1298 */
1299 for (i = 0; i < ARRAY_SIZE(sde_crtc_state->connectors); i++) {
1300 struct drm_connector *conn = sde_crtc_state->connectors[i];
1301
1302 if (!conn || !conn->state)
1303 continue;
1304
1305 rc = sde_connector_get_mode_info(conn->state, &mode_info);
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001306 if (rc) {
1307 SDE_ERROR("failed to get mode info\n");
1308 return -EINVAL;
1309 }
1310 break;
Jeykumar Sankaran736d79d2017-10-05 17:44:24 -07001311 }
1312
1313 if (!mode_info.roi_caps.enabled)
1314 return 0;
1315
Jeykumar Sankaran736d79d2017-10-05 17:44:24 -07001316 if (sde_crtc_state->user_roi_list.num_rects >
1317 mode_info.roi_caps.num_roi) {
1318 SDE_ERROR("roi count is more than supported limit, %d > %d\n",
1319 sde_crtc_state->user_roi_list.num_rects,
1320 mode_info.roi_caps.num_roi);
1321 return -E2BIG;
1322 }
1323
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001324 rc = _sde_crtc_set_crtc_roi(crtc, state);
1325 if (rc)
1326 return rc;
1327
Lloyd Atkinson77382202017-02-01 14:59:43 -05001328 rc = _sde_crtc_check_autorefresh(crtc, state);
1329 if (rc)
1330 return rc;
1331
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001332 for (lm_idx = 0; lm_idx < sde_crtc->num_mixers; lm_idx++) {
1333 rc = _sde_crtc_set_lm_roi(crtc, state, lm_idx);
1334 if (rc)
1335 return rc;
1336 }
1337
1338 rc = _sde_crtc_check_rois_centered_and_symmetric(crtc, state);
1339 if (rc)
1340 return rc;
1341
1342 rc = _sde_crtc_check_planes_within_crtc_roi(crtc, state);
1343 if (rc)
1344 return rc;
1345
1346 return 0;
1347}
1348
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001349static void _sde_crtc_program_lm_output_roi(struct drm_crtc *crtc)
1350{
1351 struct sde_crtc *sde_crtc;
1352 struct sde_crtc_state *crtc_state;
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001353 const struct sde_rect *lm_roi;
1354 struct sde_hw_mixer *hw_lm;
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001355 int lm_idx, lm_horiz_position;
1356
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001357 if (!crtc)
1358 return;
1359
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001360 sde_crtc = to_sde_crtc(crtc);
1361 crtc_state = to_sde_crtc_state(crtc->state);
1362
1363 lm_horiz_position = 0;
1364 for (lm_idx = 0; lm_idx < sde_crtc->num_mixers; lm_idx++) {
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001365 struct sde_hw_mixer_cfg cfg;
1366
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001367 lm_roi = &crtc_state->lm_roi[lm_idx];
1368 hw_lm = sde_crtc->mixers[lm_idx].hw_lm;
1369
1370 SDE_EVT32(DRMID(crtc_state->base.crtc), lm_idx,
1371 lm_roi->x, lm_roi->y, lm_roi->w, lm_roi->h);
1372
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001373 if (sde_kms_rect_is_null(lm_roi))
1374 continue;
1375
Ping Lif41c2ef2017-05-04 14:40:45 -07001376 hw_lm->cfg.out_width = lm_roi->w;
1377 hw_lm->cfg.out_height = lm_roi->h;
1378 hw_lm->cfg.right_mixer = lm_horiz_position;
1379
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001380 cfg.out_width = lm_roi->w;
1381 cfg.out_height = lm_roi->h;
1382 cfg.right_mixer = lm_horiz_position++;
1383 cfg.flags = 0;
1384 hw_lm->ops.setup_mixer_out(hw_lm, &cfg);
1385 }
1386}
1387
Clarence Ip8dece622017-12-22 18:25:25 -05001388/**
1389 * _sde_crtc_calc_inline_prefill - calculate rotator start prefill
1390 * @crtc: Pointer to drm crtc
1391 * return: prefill time in lines
1392 */
1393static u32 _sde_crtc_calc_inline_prefill(struct drm_crtc *crtc)
1394{
1395 struct sde_kms *sde_kms;
1396
1397 if (!crtc) {
1398 SDE_ERROR("invalid parameters\n");
1399 return 0;
1400 }
1401
1402 sde_kms = _sde_crtc_get_kms(crtc);
1403 if (!sde_kms || !sde_kms->catalog) {
1404 SDE_ERROR("invalid kms\n");
1405 return 0;
1406 }
1407
1408 return sde_kms->catalog->sbuf_prefill + sde_kms->catalog->sbuf_headroom;
1409}
1410
Clarence Ip4f339092018-01-05 13:29:04 -05001411uint64_t sde_crtc_get_sbuf_clk(struct drm_crtc_state *state)
1412{
1413 struct sde_crtc_state *cstate;
1414 u64 tmp;
1415
1416 if (!state) {
1417 SDE_ERROR("invalid crtc state\n");
1418 return 0;
1419 }
1420 cstate = to_sde_crtc_state(state);
1421
1422 /*
1423 * Select the max of the current and previous frame's user mode
1424 * clock setting so that reductions in clock voting don't take effect
1425 * until the current frame has completed.
1426 *
1427 * If the sbuf_clk_rate[] FIFO hasn't yet been updated in this commit
1428 * cycle (as part of the CRTC's atomic check), compare the current
1429 * clock value against sbuf_clk_rate[1] instead of comparing the
1430 * sbuf_clk_rate[0]/sbuf_clk_rate[1] values.
1431 */
1432 if (cstate->sbuf_clk_shifted)
1433 tmp = cstate->sbuf_clk_rate[0];
1434 else
1435 tmp = sde_crtc_get_property(cstate, CRTC_PROP_ROT_CLK);
1436
1437 return max_t(u64, cstate->sbuf_clk_rate[1], tmp);
1438}
1439
Dhaval Patel48c76022016-09-01 17:51:23 -07001440static void _sde_crtc_blend_setup_mixer(struct drm_crtc *crtc,
Clarence Ip1d1979d2017-12-22 18:24:34 -05001441 struct drm_crtc_state *old_state, struct sde_crtc *sde_crtc,
1442 struct sde_crtc_mixer *mixer)
Dhaval Patel48c76022016-09-01 17:51:23 -07001443{
1444 struct drm_plane *plane;
Dhaval Patel6c666622017-03-21 23:02:59 -07001445 struct drm_framebuffer *fb;
1446 struct drm_plane_state *state;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08001447 struct sde_crtc_state *cstate;
Dhaval Patel48c76022016-09-01 17:51:23 -07001448 struct sde_plane_state *pstate = NULL;
1449 struct sde_format *format;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08001450 struct sde_hw_ctl *ctl;
1451 struct sde_hw_mixer *lm;
1452 struct sde_hw_stage_cfg *stage_cfg;
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001453 struct sde_rect plane_crtc_roi;
Dhaval Patel48c76022016-09-01 17:51:23 -07001454
Clarence Ip8dece622017-12-22 18:25:25 -05001455 u32 flush_mask, flush_sbuf, prefill;
Dhaval Patel572cfd22017-06-12 19:33:39 -07001456 uint32_t stage_idx, lm_idx;
1457 int zpos_cnt[SDE_STAGE_MAX + 1] = { 0 };
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08001458 int i;
Dhaval Patel572cfd22017-06-12 19:33:39 -07001459 bool bg_alpha_enable = false;
Dhaval Patel48c76022016-09-01 17:51:23 -07001460
Clarence Ip1d1979d2017-12-22 18:24:34 -05001461 if (!sde_crtc || !crtc->state || !mixer) {
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08001462 SDE_ERROR("invalid sde_crtc or mixer\n");
1463 return;
1464 }
1465
1466 ctl = mixer->hw_ctl;
1467 lm = mixer->hw_lm;
1468 stage_cfg = &sde_crtc->stage_cfg;
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001469 cstate = to_sde_crtc_state(crtc->state);
Dhaval Patel44f12472016-08-29 12:19:47 -07001470
Clarence Ip8dece622017-12-22 18:25:25 -05001471 cstate->sbuf_prefill_line = _sde_crtc_calc_inline_prefill(crtc);
Clarence Ip1d1979d2017-12-22 18:24:34 -05001472 sde_crtc->sbuf_flush_mask_old = sde_crtc->sbuf_flush_mask_all;
1473 sde_crtc->sbuf_flush_mask_all = 0x0;
1474 sde_crtc->sbuf_flush_mask_delta = 0x0;
Clarence Ip7eb90452017-05-23 11:41:19 -04001475
Dhaval Patel44f12472016-08-29 12:19:47 -07001476 drm_atomic_crtc_for_each_plane(plane, crtc) {
Dhaval Patel6c666622017-03-21 23:02:59 -07001477 state = plane->state;
1478 if (!state)
1479 continue;
Dhaval Patel48c76022016-09-01 17:51:23 -07001480
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001481 plane_crtc_roi.x = state->crtc_x;
1482 plane_crtc_roi.y = state->crtc_y;
1483 plane_crtc_roi.w = state->crtc_w;
1484 plane_crtc_roi.h = state->crtc_h;
1485
Dhaval Patel6c666622017-03-21 23:02:59 -07001486 pstate = to_sde_plane_state(state);
1487 fb = state->fb;
Dhaval Patel44f12472016-08-29 12:19:47 -07001488
Clarence Ip8dece622017-12-22 18:25:25 -05001489 /* assume all rotated planes report the same prefill amount */
1490 prefill = sde_plane_rot_get_prefill(plane);
1491 if (prefill)
Clarence Ip7eb90452017-05-23 11:41:19 -04001492 cstate->sbuf_prefill_line = prefill;
Alan Kwong4dd64c82017-02-04 18:41:51 -08001493
Clarence Ipb776b532017-09-12 18:30:06 -04001494 sde_plane_get_ctl_flush(plane, ctl, &flush_mask, &flush_sbuf);
Dhaval Patel44f12472016-08-29 12:19:47 -07001495
Clarence Ipb776b532017-09-12 18:30:06 -04001496 /* save sbuf flush value for later */
Clarence Ip1d1979d2017-12-22 18:24:34 -05001497 if (old_state && drm_atomic_get_existing_plane_state(
1498 old_state->state, plane))
1499 sde_crtc->sbuf_flush_mask_delta |= flush_sbuf;
1500 sde_crtc->sbuf_flush_mask_all |= flush_sbuf;
Dhaval Patel48c76022016-09-01 17:51:23 -07001501
1502 SDE_DEBUG("crtc %d stage:%d - plane %d sspp %d fb %d\n",
Clarence Ipd9f9fa62016-09-09 13:42:32 -04001503 crtc->base.id,
Clarence Ipd9f9fa62016-09-09 13:42:32 -04001504 pstate->stage,
1505 plane->base.id,
1506 sde_plane_pipe(plane) - SSPP_VIG0,
Dhaval Patel6c666622017-03-21 23:02:59 -07001507 state->fb ? state->fb->base.id : -1);
Dhaval Patel44f12472016-08-29 12:19:47 -07001508
Dhaval Patel48c76022016-09-01 17:51:23 -07001509 format = to_sde_format(msm_framebuffer_format(pstate->base.fb));
Narendra Muppallaec11a0a2017-06-15 15:35:17 -07001510 if (!format) {
1511 SDE_ERROR("invalid format\n");
1512 return;
1513 }
1514
Dhaval Patel572cfd22017-06-12 19:33:39 -07001515 if (pstate->stage == SDE_STAGE_BASE && format->alpha_enable)
1516 bg_alpha_enable = true;
Dhaval Patel44f12472016-08-29 12:19:47 -07001517
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001518 SDE_EVT32(DRMID(crtc), DRMID(plane),
1519 state->fb ? state->fb->base.id : -1,
1520 state->src_x >> 16, state->src_y >> 16,
1521 state->src_w >> 16, state->src_h >> 16,
1522 state->crtc_x, state->crtc_y,
Clarence Ip7eb90452017-05-23 11:41:19 -04001523 state->crtc_w, state->crtc_h,
Clarence Ipb776b532017-09-12 18:30:06 -04001524 flush_sbuf != 0);
Dhaval Patel6c666622017-03-21 23:02:59 -07001525
Dhaval Patel572cfd22017-06-12 19:33:39 -07001526 stage_idx = zpos_cnt[pstate->stage]++;
1527 stage_cfg->stage[pstate->stage][stage_idx] =
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001528 sde_plane_pipe(plane);
Dhaval Patel572cfd22017-06-12 19:33:39 -07001529 stage_cfg->multirect_index[pstate->stage][stage_idx] =
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001530 pstate->multirect_index;
1531
Dhaval Patel572cfd22017-06-12 19:33:39 -07001532 SDE_EVT32(DRMID(crtc), DRMID(plane), stage_idx,
1533 sde_plane_pipe(plane) - SSPP_VIG0, pstate->stage,
1534 pstate->multirect_index, pstate->multirect_mode,
1535 format->base.pixel_format, fb ? fb->modifier[0] : 0);
1536
1537 /* blend config update */
1538 for (lm_idx = 0; lm_idx < sde_crtc->num_mixers; lm_idx++) {
1539 _sde_crtc_setup_blend_cfg(mixer + lm_idx, pstate,
1540 format);
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001541 mixer[lm_idx].flush_mask |= flush_mask;
1542
Dhaval Patel572cfd22017-06-12 19:33:39 -07001543 if (bg_alpha_enable && !format->alpha_enable)
1544 mixer[lm_idx].mixer_op_mode = 0;
1545 else
1546 mixer[lm_idx].mixer_op_mode |=
Dhaval Patel48c76022016-09-01 17:51:23 -07001547 1 << pstate->stage;
Dhaval Patel48c76022016-09-01 17:51:23 -07001548 }
Dhaval Patel44f12472016-08-29 12:19:47 -07001549 }
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08001550
1551 if (lm && lm->ops.setup_dim_layer) {
1552 cstate = to_sde_crtc_state(crtc->state);
1553 for (i = 0; i < cstate->num_dim_layers; i++)
1554 _sde_crtc_setup_dim_layer_cfg(crtc, sde_crtc,
1555 mixer, &cstate->dim_layer[i]);
1556 }
Alan Kwong4dd64c82017-02-04 18:41:51 -08001557
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001558 _sde_crtc_program_lm_output_roi(crtc);
Dhaval Patel44f12472016-08-29 12:19:47 -07001559}
1560
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001561static void _sde_crtc_swap_mixers_for_right_partial_update(
1562 struct drm_crtc *crtc)
1563{
1564 struct sde_crtc *sde_crtc;
1565 struct sde_crtc_state *cstate;
1566 struct drm_encoder *drm_enc;
1567 bool is_right_only;
1568 bool encoder_in_dsc_merge = false;
1569
1570 if (!crtc || !crtc->state)
1571 return;
1572
1573 sde_crtc = to_sde_crtc(crtc);
1574 cstate = to_sde_crtc_state(crtc->state);
1575
1576 if (sde_crtc->num_mixers != CRTC_DUAL_MIXERS)
1577 return;
1578
1579 drm_for_each_encoder(drm_enc, crtc->dev) {
1580 if (drm_enc->crtc == crtc &&
1581 sde_encoder_is_dsc_merge(drm_enc)) {
1582 encoder_in_dsc_merge = true;
1583 break;
1584 }
1585 }
1586
1587 /**
1588 * For right-only partial update with DSC merge, we swap LM0 & LM1.
1589 * This is due to two reasons:
1590 * - On 8996, there is a DSC HW requirement that in DSC Merge Mode,
1591 * the left DSC must be used, right DSC cannot be used alone.
1592 * For right-only partial update, this means swap layer mixers to map
1593 * Left LM to Right INTF. On later HW this was relaxed.
1594 * - In DSC Merge mode, the physical encoder has already registered
1595 * PP0 as the master, to switch to right-only we would have to
1596 * reprogram to be driven by PP1 instead.
1597 * To support both cases, we prefer to support the mixer swap solution.
1598 */
1599 if (!encoder_in_dsc_merge)
1600 return;
1601
1602 is_right_only = sde_kms_rect_is_null(&cstate->lm_roi[0]) &&
1603 !sde_kms_rect_is_null(&cstate->lm_roi[1]);
1604
1605 if (is_right_only && !sde_crtc->mixers_swapped) {
1606 /* right-only update swap mixers */
1607 swap(sde_crtc->mixers[0], sde_crtc->mixers[1]);
1608 sde_crtc->mixers_swapped = true;
1609 } else if (!is_right_only && sde_crtc->mixers_swapped) {
1610 /* left-only or full update, swap back */
1611 swap(sde_crtc->mixers[0], sde_crtc->mixers[1]);
1612 sde_crtc->mixers_swapped = false;
1613 }
1614
1615 SDE_DEBUG("%s: right_only %d swapped %d, mix0->lm%d, mix1->lm%d\n",
1616 sde_crtc->name, is_right_only, sde_crtc->mixers_swapped,
1617 sde_crtc->mixers[0].hw_lm->idx - LM_0,
1618 sde_crtc->mixers[1].hw_lm->idx - LM_0);
1619 SDE_EVT32(DRMID(crtc), is_right_only, sde_crtc->mixers_swapped,
1620 sde_crtc->mixers[0].hw_lm->idx - LM_0,
1621 sde_crtc->mixers[1].hw_lm->idx - LM_0);
1622}
1623
Clarence Ipd9f9fa62016-09-09 13:42:32 -04001624/**
1625 * _sde_crtc_blend_setup - configure crtc mixers
1626 * @crtc: Pointer to drm crtc structure
Clarence Ip1d1979d2017-12-22 18:24:34 -05001627 * @old_state: Pointer to old crtc state
1628 * @add_planes: Whether or not to add planes to mixers
Clarence Ipd9f9fa62016-09-09 13:42:32 -04001629 */
Clarence Ip1d1979d2017-12-22 18:24:34 -05001630static void _sde_crtc_blend_setup(struct drm_crtc *crtc,
1631 struct drm_crtc_state *old_state, bool add_planes)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001632{
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001633 struct sde_crtc *sde_crtc;
1634 struct sde_crtc_state *sde_crtc_state;
1635 struct sde_crtc_mixer *mixer;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001636 struct sde_hw_ctl *ctl;
1637 struct sde_hw_mixer *lm;
Dhaval Patel44f12472016-08-29 12:19:47 -07001638
1639 int i;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001640
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001641 if (!crtc)
1642 return;
1643
1644 sde_crtc = to_sde_crtc(crtc);
1645 sde_crtc_state = to_sde_crtc_state(crtc->state);
1646 mixer = sde_crtc->mixers;
1647
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04001648 SDE_DEBUG("%s\n", sde_crtc->name);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001649
Dhaval Patel48c76022016-09-01 17:51:23 -07001650 if (sde_crtc->num_mixers > CRTC_DUAL_MIXERS) {
1651 SDE_ERROR("invalid number mixers: %d\n", sde_crtc->num_mixers);
1652 return;
1653 }
1654
1655 for (i = 0; i < sde_crtc->num_mixers; i++) {
1656 if (!mixer[i].hw_lm || !mixer[i].hw_ctl) {
1657 SDE_ERROR("invalid lm or ctl assigned to mixer\n");
1658 return;
1659 }
1660 mixer[i].mixer_op_mode = 0;
1661 mixer[i].flush_mask = 0;
Lloyd Atkinsone5ec30d2016-08-23 14:32:32 -04001662 if (mixer[i].hw_ctl->ops.clear_all_blendstages)
1663 mixer[i].hw_ctl->ops.clear_all_blendstages(
1664 mixer[i].hw_ctl);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08001665
1666 /* clear dim_layer settings */
1667 lm = mixer[i].hw_lm;
1668 if (lm->ops.clear_dim_layer)
1669 lm->ops.clear_dim_layer(lm);
Dhaval Patel48c76022016-09-01 17:51:23 -07001670 }
1671
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001672 _sde_crtc_swap_mixers_for_right_partial_update(crtc);
1673
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001674 /* initialize stage cfg */
Clarence Ip8f7366c2016-07-05 12:15:26 -04001675 memset(&sde_crtc->stage_cfg, 0, sizeof(struct sde_hw_stage_cfg));
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001676
Clarence Ip569d5af2017-10-14 21:09:01 -04001677 if (add_planes)
Clarence Ip1d1979d2017-12-22 18:24:34 -05001678 _sde_crtc_blend_setup_mixer(crtc, old_state, sde_crtc, mixer);
Dhaval Patel48c76022016-09-01 17:51:23 -07001679
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -04001680 for (i = 0; i < sde_crtc->num_mixers; i++) {
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001681 const struct sde_rect *lm_roi = &sde_crtc_state->lm_roi[i];
1682
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -04001683 ctl = mixer[i].hw_ctl;
1684 lm = mixer[i].hw_lm;
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -04001685
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001686 if (sde_kms_rect_is_null(lm_roi)) {
1687 SDE_DEBUG(
1688 "%s: lm%d leave ctl%d mask 0 since null roi\n",
1689 sde_crtc->name, lm->idx - LM_0,
1690 ctl->idx - CTL_0);
1691 continue;
1692 }
1693
Dhaval Patel48c76022016-09-01 17:51:23 -07001694 lm->ops.setup_alpha_out(lm, mixer[i].mixer_op_mode);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001695
Clarence Ip662698e2017-09-12 18:34:16 -04001696 mixer[i].pipe_mask = mixer[i].flush_mask;
Dhaval Patel48c76022016-09-01 17:51:23 -07001697 mixer[i].flush_mask |= ctl->ops.get_bitmask_mixer(ctl,
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -04001698 mixer[i].hw_lm->idx);
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04001699
1700 /* stage config flush mask */
Dhaval Patel48c76022016-09-01 17:51:23 -07001701 ctl->ops.update_pending_flush(ctl, mixer[i].flush_mask);
1702
Clarence Ip8e69ad02016-12-09 09:43:57 -05001703 SDE_DEBUG("lm %d, op_mode 0x%X, ctl %d, flush mask 0x%x\n",
1704 mixer[i].hw_lm->idx - LM_0,
1705 mixer[i].mixer_op_mode,
1706 ctl->idx - CTL_0,
1707 mixer[i].flush_mask);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001708
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001709 ctl->ops.setup_blendstage(ctl, mixer[i].hw_lm->idx,
Dhaval Patel572cfd22017-06-12 19:33:39 -07001710 &sde_crtc->stage_cfg);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001711 }
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001712
1713 _sde_crtc_program_lm_output_roi(crtc);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001714}
1715
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001716static int _sde_crtc_find_plane_fb_modes(struct drm_crtc_state *state,
1717 uint32_t *fb_ns,
1718 uint32_t *fb_sec,
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001719 uint32_t *fb_sec_dir)
1720{
1721 struct drm_plane *plane;
1722 const struct drm_plane_state *pstate;
1723 struct sde_plane_state *sde_pstate;
1724 uint32_t mode = 0;
1725 int rc;
1726
1727 if (!state) {
1728 SDE_ERROR("invalid state\n");
1729 return -EINVAL;
1730 }
1731
1732 *fb_ns = 0;
1733 *fb_sec = 0;
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001734 *fb_sec_dir = 0;
1735 drm_atomic_crtc_state_for_each_plane_state(plane, pstate, state) {
1736 if (IS_ERR_OR_NULL(pstate)) {
1737 rc = PTR_ERR(pstate);
1738 SDE_ERROR("crtc%d failed to get plane%d state%d\n",
1739 state->crtc->base.id,
1740 plane->base.id, rc);
1741 return rc;
1742 }
1743 sde_pstate = to_sde_plane_state(pstate);
1744 mode = sde_plane_get_property(sde_pstate,
1745 PLANE_PROP_FB_TRANSLATION_MODE);
1746 switch (mode) {
1747 case SDE_DRM_FB_NON_SEC:
1748 (*fb_ns)++;
1749 break;
1750 case SDE_DRM_FB_SEC:
1751 (*fb_sec)++;
1752 break;
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001753 case SDE_DRM_FB_SEC_DIR_TRANS:
1754 (*fb_sec_dir)++;
1755 break;
1756 default:
1757 SDE_ERROR("Error: Plane[%d], fb_trans_mode:%d",
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001758 plane->base.id, mode);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001759 return -EINVAL;
1760 }
1761 }
1762 return 0;
1763}
1764
1765/**
1766 * sde_crtc_get_secure_transition_ops - determines the operations that
1767 * need to be performed before transitioning to secure state
1768 * This function should be called after swapping the new state
1769 * @crtc: Pointer to drm crtc structure
1770 * Returns the bitmask of operations need to be performed, -Error in
1771 * case of error cases
1772 */
1773int sde_crtc_get_secure_transition_ops(struct drm_crtc *crtc,
1774 struct drm_crtc_state *old_crtc_state,
1775 bool old_valid_fb)
1776{
1777 struct drm_plane *plane;
1778 struct drm_encoder *encoder;
1779 struct sde_crtc *sde_crtc;
Veera Sundaram Sankaran8fd692a2018-02-26 17:49:14 -08001780 struct sde_kms *sde_kms;
Veera Sundaram Sankaran61e4fba2018-02-27 17:59:22 -08001781 struct sde_mdss_cfg *catalog;
Veera Sundaram Sankaran8fd692a2018-02-26 17:49:14 -08001782 struct sde_kms_smmu_state_data *smmu_state;
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001783 uint32_t translation_mode = 0, secure_level;
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001784 int ops = 0;
1785 bool post_commit = false;
1786
1787 if (!crtc || !crtc->state) {
1788 SDE_ERROR("invalid crtc\n");
1789 return -EINVAL;
1790 }
1791
Veera Sundaram Sankaran8fd692a2018-02-26 17:49:14 -08001792 sde_kms = _sde_crtc_get_kms(crtc);
1793 if (!sde_kms)
1794 return -EINVAL;
1795
1796 smmu_state = &sde_kms->smmu_state;
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001797 sde_crtc = to_sde_crtc(crtc);
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001798 secure_level = sde_crtc_get_secure_level(crtc, crtc->state);
Veera Sundaram Sankaran61e4fba2018-02-27 17:59:22 -08001799 catalog = sde_kms->catalog;
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001800
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001801 SDE_DEBUG("crtc%d, secure_level%d old_valid_fb%d\n",
1802 crtc->base.id, secure_level, old_valid_fb);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001803
Veera Sundaram Sankaranfd792402017-10-13 12:50:41 -07001804 SDE_EVT32_VERBOSE(DRMID(crtc), secure_level, smmu_state->state,
1805 old_valid_fb, SDE_EVTLOG_FUNC_ENTRY);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001806 /**
1807 * SMMU operations need to be delayed in case of
1808 * video mode panels when switching back to non_secure
1809 * mode
1810 */
1811 drm_for_each_encoder(encoder, crtc->dev) {
1812 if (encoder->crtc != crtc)
1813 continue;
1814
Veera Sundaram Sankaranae995e02017-10-13 15:12:32 -07001815 post_commit |= sde_encoder_check_mode(encoder,
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05301816 MSM_DISPLAY_CAP_VID_MODE);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001817 }
1818
1819 drm_atomic_crtc_for_each_plane(plane, crtc) {
1820 if (!plane->state)
1821 continue;
1822
1823 translation_mode = sde_plane_get_property(
1824 to_sde_plane_state(plane->state),
1825 PLANE_PROP_FB_TRANSLATION_MODE);
1826 if (translation_mode > SDE_DRM_FB_SEC_DIR_TRANS) {
1827 SDE_ERROR("crtc%d, invalid translation_mode%d\n",
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001828 crtc->base.id, translation_mode);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001829 return -EINVAL;
1830 }
1831
1832 /**
1833 * we can break if we find sec_fir or non_sec_dir
1834 * plane
1835 */
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001836 if (translation_mode == SDE_DRM_FB_SEC_DIR_TRANS)
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001837 break;
1838 }
1839
Veera Sundaram Sankaran8fd692a2018-02-26 17:49:14 -08001840 mutex_lock(&sde_kms->secure_transition_lock);
1841
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001842 switch (translation_mode) {
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001843 case SDE_DRM_FB_SEC_DIR_TRANS:
1844 /* secure display usecase */
1845 if ((smmu_state->state == ATTACHED) &&
1846 (secure_level == SDE_DRM_SEC_ONLY)) {
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001847 smmu_state->state = DETACH_ALL_REQ;
1848 smmu_state->transition_type = PRE_COMMIT;
Veera Sundaram Sankaran8fd692a2018-02-26 17:49:14 -08001849 ops |= SDE_KMS_OPS_SECURE_STATE_CHANGE;
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001850 if (old_valid_fb) {
1851 ops |= (SDE_KMS_OPS_WAIT_FOR_TX_DONE |
1852 SDE_KMS_OPS_CLEANUP_PLANE_FB);
1853 }
Veera Sundaram Sankaran61e4fba2018-02-27 17:59:22 -08001854 if (catalog->sui_misr_supported)
1855 smmu_state->sui_misr_state =
1856 SUI_MISR_ENABLE_REQ;
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001857 /* secure camera usecase */
1858 } else if (smmu_state->state == ATTACHED) {
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001859 smmu_state->state = DETACH_SEC_REQ;
1860 smmu_state->transition_type = PRE_COMMIT;
Veera Sundaram Sankaran8fd692a2018-02-26 17:49:14 -08001861 ops |= SDE_KMS_OPS_SECURE_STATE_CHANGE;
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001862 }
1863 break;
Veera Sundaram Sankaran8fd692a2018-02-26 17:49:14 -08001864
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001865 case SDE_DRM_FB_SEC:
1866 case SDE_DRM_FB_NON_SEC:
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001867 if ((smmu_state->state == DETACHED_SEC) ||
1868 (smmu_state->state == DETACH_SEC_REQ)) {
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001869 smmu_state->state = ATTACH_SEC_REQ;
1870 smmu_state->transition_type = post_commit ?
1871 POST_COMMIT : PRE_COMMIT;
Veera Sundaram Sankaran8fd692a2018-02-26 17:49:14 -08001872 ops |= SDE_KMS_OPS_SECURE_STATE_CHANGE;
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001873 if (old_valid_fb)
1874 ops |= SDE_KMS_OPS_WAIT_FOR_TX_DONE;
1875 } else if ((smmu_state->state == DETACHED) ||
1876 (smmu_state->state == DETACH_ALL_REQ)) {
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001877 smmu_state->state = ATTACH_ALL_REQ;
1878 smmu_state->transition_type = post_commit ?
1879 POST_COMMIT : PRE_COMMIT;
Veera Sundaram Sankaran8fd692a2018-02-26 17:49:14 -08001880 ops |= SDE_KMS_OPS_SECURE_STATE_CHANGE;
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001881 if (old_valid_fb)
1882 ops |= (SDE_KMS_OPS_WAIT_FOR_TX_DONE |
1883 SDE_KMS_OPS_CLEANUP_PLANE_FB);
Veera Sundaram Sankaran61e4fba2018-02-27 17:59:22 -08001884 if (catalog->sui_misr_supported)
1885 smmu_state->sui_misr_state =
1886 SUI_MISR_DISABLE_REQ;
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001887 }
1888 break;
Veera Sundaram Sankaran8fd692a2018-02-26 17:49:14 -08001889
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001890 default:
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001891 SDE_ERROR("invalid plane fb_mode:%d\n", translation_mode);
Veera Sundaram Sankaran8fd692a2018-02-26 17:49:14 -08001892 ops = -EINVAL;
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001893 }
1894
1895 SDE_DEBUG("SMMU State:%d, type:%d ops:%x\n", smmu_state->state,
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001896 smmu_state->transition_type, ops);
Veera Sundaram Sankaranfd792402017-10-13 12:50:41 -07001897 /* log only during actual transition times */
1898 if (ops)
1899 SDE_EVT32(DRMID(crtc), secure_level, translation_mode,
1900 smmu_state->state, smmu_state->transition_type,
1901 ops, old_valid_fb, SDE_EVTLOG_FUNC_EXIT);
Veera Sundaram Sankaran8fd692a2018-02-26 17:49:14 -08001902
1903 mutex_unlock(&sde_kms->secure_transition_lock);
1904
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001905 return ops;
1906}
1907
1908/**
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05301909 * _sde_crtc_setup_scaler3_lut - Set up scaler lut
1910 * LUTs are configured only once during boot
1911 * @sde_crtc: Pointer to sde crtc
1912 * @cstate: Pointer to sde crtc state
1913 */
1914static int _sde_crtc_set_dest_scaler_lut(struct sde_crtc *sde_crtc,
1915 struct sde_crtc_state *cstate, uint32_t lut_idx)
1916{
1917 struct sde_hw_scaler3_lut_cfg *cfg;
1918 u32 *lut_data = NULL;
1919 size_t len = 0;
1920 int ret = 0;
1921
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05301922 if (!sde_crtc || !cstate) {
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05301923 SDE_ERROR("invalid args\n");
1924 return -EINVAL;
1925 }
1926
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05301927 lut_data = msm_property_get_blob(&sde_crtc->property_info,
1928 &cstate->property_state, &len, lut_idx);
1929 if (!lut_data || !len) {
Lloyd Atkinsone08229c2017-10-02 17:53:30 -04001930 SDE_DEBUG("%s: lut(%d): cleared: %pK, %zu\n", sde_crtc->name,
1931 lut_idx, lut_data, len);
1932 lut_data = NULL;
1933 len = 0;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05301934 }
1935
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05301936 cfg = &cstate->scl3_lut_cfg;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05301937
1938 switch (lut_idx) {
1939 case CRTC_PROP_DEST_SCALER_LUT_ED:
1940 cfg->dir_lut = lut_data;
1941 cfg->dir_len = len;
1942 break;
1943 case CRTC_PROP_DEST_SCALER_LUT_CIR:
1944 cfg->cir_lut = lut_data;
1945 cfg->cir_len = len;
1946 break;
1947 case CRTC_PROP_DEST_SCALER_LUT_SEP:
1948 cfg->sep_lut = lut_data;
1949 cfg->sep_len = len;
1950 break;
1951 default:
1952 ret = -EINVAL;
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05301953 SDE_ERROR("%s:invalid LUT idx(%d)\n", sde_crtc->name, lut_idx);
1954 SDE_EVT32(DRMID(&sde_crtc->base), lut_idx, SDE_EVTLOG_ERROR);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05301955 break;
1956 }
1957
Lloyd Atkinsone08229c2017-10-02 17:53:30 -04001958 cfg->is_configured = cfg->dir_lut && cfg->cir_lut && cfg->sep_lut;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05301959
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05301960 SDE_EVT32_VERBOSE(DRMID(&sde_crtc->base), ret, lut_idx, len,
1961 cfg->is_configured);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05301962 return ret;
1963}
1964
Dhaval Patel2a3c37a2017-10-25 12:30:36 -07001965void sde_crtc_timeline_status(struct drm_crtc *crtc)
1966{
1967 struct sde_crtc *sde_crtc;
1968
1969 if (!crtc) {
1970 SDE_ERROR("invalid crtc\n");
1971 return;
1972 }
1973
1974 sde_crtc = to_sde_crtc(crtc);
1975 sde_fence_timeline_status(&sde_crtc->output_fence, &crtc->base);
1976}
1977
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05301978static int _sde_validate_hw_resources(struct sde_crtc *sde_crtc)
1979{
1980 int i;
1981
1982 /**
1983 * Check if sufficient hw resources are
1984 * available as per target caps & topology
1985 */
1986 if (!sde_crtc) {
1987 SDE_ERROR("invalid argument\n");
1988 return -EINVAL;
1989 }
1990
1991 if (!sde_crtc->num_mixers ||
1992 sde_crtc->num_mixers > CRTC_DUAL_MIXERS) {
1993 SDE_ERROR("%s: invalid number mixers: %d\n",
1994 sde_crtc->name, sde_crtc->num_mixers);
1995 SDE_EVT32(DRMID(&sde_crtc->base), sde_crtc->num_mixers,
1996 SDE_EVTLOG_ERROR);
1997 return -EINVAL;
1998 }
1999
2000 for (i = 0; i < sde_crtc->num_mixers; i++) {
2001 if (!sde_crtc->mixers[i].hw_lm || !sde_crtc->mixers[i].hw_ctl
2002 || !sde_crtc->mixers[i].hw_ds) {
2003 SDE_ERROR("%s:insufficient resources for mixer(%d)\n",
2004 sde_crtc->name, i);
2005 SDE_EVT32(DRMID(&sde_crtc->base), sde_crtc->num_mixers,
2006 i, sde_crtc->mixers[i].hw_lm,
2007 sde_crtc->mixers[i].hw_ctl,
2008 sde_crtc->mixers[i].hw_ds, SDE_EVTLOG_ERROR);
2009 return -EINVAL;
2010 }
2011 }
2012
2013 return 0;
2014}
2015
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302016/**
2017 * _sde_crtc_dest_scaler_setup - Set up dest scaler block
2018 * @crtc: Pointer to drm crtc
2019 */
2020static void _sde_crtc_dest_scaler_setup(struct drm_crtc *crtc)
2021{
2022 struct sde_crtc *sde_crtc;
2023 struct sde_crtc_state *cstate;
2024 struct sde_hw_mixer *hw_lm;
2025 struct sde_hw_ctl *hw_ctl;
2026 struct sde_hw_ds *hw_ds;
2027 struct sde_hw_ds_cfg *cfg;
2028 struct sde_kms *kms;
2029 u32 flush_mask = 0, op_mode = 0;
2030 u32 lm_idx = 0, num_mixers = 0;
2031 int i, count = 0;
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302032 bool ds_dirty = false;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302033
2034 if (!crtc)
2035 return;
2036
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302037 sde_crtc = to_sde_crtc(crtc);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302038 cstate = to_sde_crtc_state(crtc->state);
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302039 kms = _sde_crtc_get_kms(crtc);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302040 num_mixers = sde_crtc->num_mixers;
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302041 count = cstate->num_ds;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302042
2043 SDE_DEBUG("crtc%d\n", crtc->base.id);
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302044 SDE_EVT32(DRMID(crtc), num_mixers, count, cstate->ds_dirty,
2045 sde_crtc->ds_reconfig, cstate->num_ds_enabled);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302046
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302047 /**
2048 * destination scaler configuration will be done either
2049 * or on set property or on power collapse (idle/suspend)
2050 */
2051 ds_dirty = (cstate->ds_dirty || sde_crtc->ds_reconfig);
2052 if (sde_crtc->ds_reconfig) {
2053 SDE_DEBUG("reconfigure dest scaler block\n");
2054 sde_crtc->ds_reconfig = false;
2055 }
2056
2057 if (!ds_dirty) {
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302058 SDE_DEBUG("no change in settings, skip commit\n");
2059 } else if (!kms || !kms->catalog) {
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302060 SDE_ERROR("crtc%d:invalid parameters\n", crtc->base.id);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302061 } else if (!kms->catalog->mdp[0].has_dest_scaler) {
2062 SDE_DEBUG("dest scaler feature not supported\n");
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302063 } else if (_sde_validate_hw_resources(sde_crtc)) {
2064 //do nothing
2065 } else if (!cstate->scl3_lut_cfg.is_configured) {
2066 SDE_ERROR("crtc%d:no LUT data available\n", crtc->base.id);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302067 } else {
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302068 for (i = 0; i < count; i++) {
2069 cfg = &cstate->ds_cfg[i];
2070
2071 if (!cfg->flags)
2072 continue;
2073
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302074 lm_idx = cfg->idx;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302075 hw_lm = sde_crtc->mixers[lm_idx].hw_lm;
2076 hw_ctl = sde_crtc->mixers[lm_idx].hw_ctl;
2077 hw_ds = sde_crtc->mixers[lm_idx].hw_ds;
2078
2079 /* Setup op mode - Dual/single */
2080 if (cfg->flags & SDE_DRM_DESTSCALER_ENABLE)
2081 op_mode |= BIT(hw_ds->idx - DS_0);
2082
2083 if ((i == count-1) && hw_ds->ops.setup_opmode) {
2084 op_mode |= (cstate->num_ds_enabled ==
2085 CRTC_DUAL_MIXERS) ?
2086 SDE_DS_OP_MODE_DUAL : 0;
2087 hw_ds->ops.setup_opmode(hw_ds, op_mode);
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302088 SDE_EVT32_VERBOSE(DRMID(crtc), op_mode);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302089 }
2090
2091 /* Setup scaler */
2092 if ((cfg->flags & SDE_DRM_DESTSCALER_SCALE_UPDATE) ||
2093 (cfg->flags &
2094 SDE_DRM_DESTSCALER_ENHANCER_UPDATE)) {
2095 if (hw_ds->ops.setup_scaler)
2096 hw_ds->ops.setup_scaler(hw_ds,
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302097 &cfg->scl3_cfg,
2098 &cstate->scl3_lut_cfg);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302099
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302100 }
2101
2102 /*
2103 * Dest scaler shares the flush bit of the LM in control
2104 */
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302105 if (hw_ctl->ops.get_bitmask_mixer) {
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302106 flush_mask = hw_ctl->ops.get_bitmask_mixer(
2107 hw_ctl, hw_lm->idx);
2108 SDE_DEBUG("Set lm[%d] flush = %d",
2109 hw_lm->idx, flush_mask);
2110 hw_ctl->ops.update_pending_flush(hw_ctl,
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302111 flush_mask);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302112 }
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302113 }
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302114 }
2115}
2116
Clarence Ip0d0e96d2016-10-24 18:13:13 -04002117void sde_crtc_prepare_commit(struct drm_crtc *crtc,
2118 struct drm_crtc_state *old_state)
Clarence Ip24f80662016-06-13 19:05:32 -04002119{
2120 struct sde_crtc *sde_crtc;
Clarence Ip0d0e96d2016-10-24 18:13:13 -04002121 struct sde_crtc_state *cstate;
2122 struct drm_connector *conn;
Dhaval Patel5023c3c2017-08-22 12:40:11 -07002123 struct sde_crtc_retire_event *retire_event = NULL;
2124 unsigned long flags;
2125 int i;
Clarence Ip24f80662016-06-13 19:05:32 -04002126
Clarence Ip0d0e96d2016-10-24 18:13:13 -04002127 if (!crtc || !crtc->state) {
Clarence Ip24f80662016-06-13 19:05:32 -04002128 SDE_ERROR("invalid crtc\n");
2129 return;
2130 }
2131
2132 sde_crtc = to_sde_crtc(crtc);
Clarence Ip0d0e96d2016-10-24 18:13:13 -04002133 cstate = to_sde_crtc_state(crtc->state);
Dhaval Patel6c666622017-03-21 23:02:59 -07002134 SDE_EVT32_VERBOSE(DRMID(crtc));
Clarence Ip24f80662016-06-13 19:05:32 -04002135
Clarence Ip0d0e96d2016-10-24 18:13:13 -04002136 /* identify connectors attached to this crtc */
Clarence Ip0d0e96d2016-10-24 18:13:13 -04002137 cstate->num_connectors = 0;
2138
2139 drm_for_each_connector(conn, crtc->dev)
2140 if (conn->state && conn->state->crtc == crtc &&
2141 cstate->num_connectors < MAX_CONNECTORS) {
2142 cstate->connectors[cstate->num_connectors++] = conn;
2143 sde_connector_prepare_fence(conn);
Clarence Ip0d0e96d2016-10-24 18:13:13 -04002144 }
2145
Dhaval Patel5023c3c2017-08-22 12:40:11 -07002146 for (i = 0; i < SDE_CRTC_FRAME_EVENT_SIZE; i++) {
2147 retire_event = &sde_crtc->retire_events[i];
2148 if (list_empty(&retire_event->list))
2149 break;
2150 retire_event = NULL;
2151 }
2152
2153 if (retire_event) {
2154 retire_event->num_connectors = cstate->num_connectors;
2155 for (i = 0; i < cstate->num_connectors; i++)
2156 retire_event->connectors[i] = cstate->connectors[i];
2157
2158 spin_lock_irqsave(&sde_crtc->spin_lock, flags);
2159 list_add_tail(&retire_event->list,
2160 &sde_crtc->retire_event_list);
2161 spin_unlock_irqrestore(&sde_crtc->spin_lock, flags);
2162 } else {
2163 SDE_ERROR("crtc%d retire event overflow\n", crtc->base.id);
2164 SDE_EVT32(DRMID(crtc), SDE_EVTLOG_ERROR);
2165 }
2166
Clarence Ip0d0e96d2016-10-24 18:13:13 -04002167 /* prepare main output fence */
Clarence Ip24f80662016-06-13 19:05:32 -04002168 sde_fence_prepare(&sde_crtc->output_fence);
2169}
2170
Abhinav Kumarf2e94b52017-02-09 20:27:24 -08002171/**
2172 * _sde_crtc_complete_flip - signal pending page_flip events
2173 * Any pending vblank events are added to the vblank_event_list
2174 * so that the next vblank interrupt shall signal them.
2175 * However PAGE_FLIP events are not handled through the vblank_event_list.
2176 * This API signals any pending PAGE_FLIP events requested through
2177 * DRM_IOCTL_MODE_PAGE_FLIP and are cached in the sde_crtc->event.
2178 * if file!=NULL, this is preclose potential cancel-flip path
2179 * @crtc: Pointer to drm crtc structure
2180 * @file: Pointer to drm file
2181 */
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -04002182static void _sde_crtc_complete_flip(struct drm_crtc *crtc,
2183 struct drm_file *file)
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04002184{
2185 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
2186 struct drm_device *dev = crtc->dev;
2187 struct drm_pending_vblank_event *event;
2188 unsigned long flags;
2189
2190 spin_lock_irqsave(&dev->event_lock, flags);
2191 event = sde_crtc->event;
2192 if (event) {
2193 /* if regular vblank case (!file) or if cancel-flip from
2194 * preclose on file that requested flip, then send the
2195 * event:
2196 */
2197 if (!file || (event->base.file_priv == file)) {
2198 sde_crtc->event = NULL;
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -04002199 DRM_DEBUG_VBL("%s: send event: %pK\n",
Dhaval Patelec10fad2016-08-22 14:40:48 -07002200 sde_crtc->name, event);
Dhaval Patela5f75952017-07-25 11:17:41 -07002201 SDE_EVT32_VERBOSE(DRMID(crtc));
Lloyd Atkinsonac933642016-09-14 11:52:00 -04002202 drm_crtc_send_vblank_event(crtc, event);
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04002203 }
2204 }
2205 spin_unlock_irqrestore(&dev->event_lock, flags);
2206}
2207
Alan Kwong3e985f02017-02-12 15:08:44 -08002208enum sde_intf_mode sde_crtc_get_intf_mode(struct drm_crtc *crtc)
2209{
2210 struct drm_encoder *encoder;
2211
2212 if (!crtc || !crtc->dev) {
2213 SDE_ERROR("invalid crtc\n");
2214 return INTF_MODE_NONE;
2215 }
2216
2217 drm_for_each_encoder(encoder, crtc->dev)
2218 if (encoder->crtc == crtc)
2219 return sde_encoder_get_intf_mode(encoder);
2220
2221 return INTF_MODE_NONE;
2222}
2223
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04002224static void sde_crtc_vblank_cb(void *data)
2225{
2226 struct drm_crtc *crtc = (struct drm_crtc *)data;
Alan Kwong07da0982016-11-04 12:57:45 -04002227 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
2228
2229 /* keep statistics on vblank callback - with auto reset via debugfs */
2230 if (ktime_equal(sde_crtc->vblank_cb_time, ktime_set(0, 0)))
2231 sde_crtc->vblank_cb_time = ktime_get();
2232 else
2233 sde_crtc->vblank_cb_count++;
Dhaval Patel91399a52017-11-27 22:21:27 -08002234
2235 sde_crtc->vblank_last_cb_time = ktime_get();
2236 sysfs_notify_dirent(sde_crtc->vsync_event_sf);
2237
Abhinav Kumarf2e94b52017-02-09 20:27:24 -08002238 _sde_crtc_complete_flip(crtc, NULL);
Lloyd Atkinsonac933642016-09-14 11:52:00 -04002239 drm_crtc_handle_vblank(crtc);
Lloyd Atkinson9eabe7a2016-09-14 13:39:15 -04002240 DRM_DEBUG_VBL("crtc%d\n", crtc->base.id);
Dhaval Patel6c666622017-03-21 23:02:59 -07002241 SDE_EVT32_VERBOSE(DRMID(crtc));
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04002242}
2243
Dhaval Patel5023c3c2017-08-22 12:40:11 -07002244static void _sde_crtc_retire_event(struct drm_crtc *crtc, ktime_t ts)
2245{
2246 struct sde_crtc_retire_event *retire_event;
2247 struct sde_crtc *sde_crtc;
2248 unsigned long flags;
2249 int i;
2250
2251 if (!crtc) {
2252 SDE_ERROR("invalid param\n");
2253 return;
2254 }
2255
2256 sde_crtc = to_sde_crtc(crtc);
2257 spin_lock_irqsave(&sde_crtc->spin_lock, flags);
2258 retire_event = list_first_entry_or_null(&sde_crtc->retire_event_list,
2259 struct sde_crtc_retire_event, list);
2260 if (retire_event)
2261 list_del_init(&retire_event->list);
2262 spin_unlock_irqrestore(&sde_crtc->spin_lock, flags);
2263
2264 if (!retire_event) {
2265 SDE_ERROR("crtc%d retire event without kickoff\n",
2266 crtc->base.id);
2267 SDE_EVT32(DRMID(crtc), SDE_EVTLOG_ERROR);
2268 return;
2269 }
2270
2271 SDE_ATRACE_BEGIN("signal_retire_fence");
2272 for (i = 0; (i < retire_event->num_connectors) &&
2273 retire_event->connectors[i]; ++i)
2274 sde_connector_complete_commit(
2275 retire_event->connectors[i], ts);
2276 SDE_ATRACE_END("signal_retire_fence");
2277}
2278
Alan Kwong628d19e2016-10-31 13:50:13 -04002279static void sde_crtc_frame_event_work(struct kthread_work *work)
2280{
Alan Kwong67a3f792016-11-01 23:16:53 -04002281 struct msm_drm_private *priv;
Alan Kwong628d19e2016-10-31 13:50:13 -04002282 struct sde_crtc_frame_event *fevent;
2283 struct drm_crtc *crtc;
2284 struct sde_crtc *sde_crtc;
2285 struct sde_kms *sde_kms;
2286 unsigned long flags;
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002287 bool frame_done = false;
Alan Kwong628d19e2016-10-31 13:50:13 -04002288
2289 if (!work) {
2290 SDE_ERROR("invalid work handle\n");
2291 return;
2292 }
2293
2294 fevent = container_of(work, struct sde_crtc_frame_event, work);
Alan Kwonga1939682017-05-05 11:30:08 -07002295 if (!fevent->crtc || !fevent->crtc->state) {
Alan Kwong628d19e2016-10-31 13:50:13 -04002296 SDE_ERROR("invalid crtc\n");
2297 return;
2298 }
2299
2300 crtc = fevent->crtc;
2301 sde_crtc = to_sde_crtc(crtc);
2302
2303 sde_kms = _sde_crtc_get_kms(crtc);
2304 if (!sde_kms) {
2305 SDE_ERROR("invalid kms handle\n");
2306 return;
2307 }
Alan Kwong67a3f792016-11-01 23:16:53 -04002308 priv = sde_kms->dev->dev_private;
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07002309 SDE_ATRACE_BEGIN("crtc_frame_event");
Alan Kwong628d19e2016-10-31 13:50:13 -04002310
2311 SDE_DEBUG("crtc%d event:%u ts:%lld\n", crtc->base.id, fevent->event,
2312 ktime_to_ns(fevent->ts));
2313
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002314 SDE_EVT32_VERBOSE(DRMID(crtc), fevent->event, SDE_EVTLOG_FUNC_ENTRY);
2315
2316 if (fevent->event & (SDE_ENCODER_FRAME_EVENT_DONE
2317 | SDE_ENCODER_FRAME_EVENT_ERROR
2318 | SDE_ENCODER_FRAME_EVENT_PANEL_DEAD)) {
Alan Kwong628d19e2016-10-31 13:50:13 -04002319
2320 if (atomic_read(&sde_crtc->frame_pending) < 1) {
2321 /* this should not happen */
2322 SDE_ERROR("crtc%d ts:%lld invalid frame_pending:%d\n",
2323 crtc->base.id,
2324 ktime_to_ns(fevent->ts),
2325 atomic_read(&sde_crtc->frame_pending));
Dhaval Patel6c666622017-03-21 23:02:59 -07002326 SDE_EVT32(DRMID(crtc), fevent->event,
2327 SDE_EVTLOG_FUNC_CASE1);
Alan Kwong628d19e2016-10-31 13:50:13 -04002328 } else if (atomic_dec_return(&sde_crtc->frame_pending) == 0) {
2329 /* release bandwidth and other resources */
2330 SDE_DEBUG("crtc%d ts:%lld last pending\n",
2331 crtc->base.id,
2332 ktime_to_ns(fevent->ts));
Dhaval Patel6c666622017-03-21 23:02:59 -07002333 SDE_EVT32(DRMID(crtc), fevent->event,
2334 SDE_EVTLOG_FUNC_CASE2);
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07002335 sde_core_perf_crtc_release_bw(crtc);
Alan Kwong628d19e2016-10-31 13:50:13 -04002336 } else {
Dhaval Patel6c666622017-03-21 23:02:59 -07002337 SDE_EVT32_VERBOSE(DRMID(crtc), fevent->event,
2338 SDE_EVTLOG_FUNC_CASE3);
Alan Kwong628d19e2016-10-31 13:50:13 -04002339 }
Alan Kwonga1939682017-05-05 11:30:08 -07002340
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002341 if (fevent->event & (SDE_ENCODER_FRAME_EVENT_DONE
2342 | SDE_ENCODER_FRAME_EVENT_ERROR))
2343 frame_done = true;
2344 }
2345
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07002346 if (fevent->event & SDE_ENCODER_FRAME_EVENT_SIGNAL_RELEASE_FENCE) {
2347 SDE_ATRACE_BEGIN("signal_release_fence");
Dhaval Patelfd8f7742017-08-10 13:11:22 -07002348 sde_fence_signal(&sde_crtc->output_fence, fevent->ts, false);
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07002349 SDE_ATRACE_END("signal_release_fence");
2350 }
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002351
Dhaval Patel5023c3c2017-08-22 12:40:11 -07002352 if (fevent->event & SDE_ENCODER_FRAME_EVENT_SIGNAL_RETIRE_FENCE)
2353 /* this api should be called without spin_lock */
2354 _sde_crtc_retire_event(crtc, fevent->ts);
Alan Kwong628d19e2016-10-31 13:50:13 -04002355
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002356 if (fevent->event & SDE_ENCODER_FRAME_EVENT_PANEL_DEAD)
2357 SDE_ERROR("crtc%d ts:%lld received panel dead event\n",
2358 crtc->base.id, ktime_to_ns(fevent->ts));
2359
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002360 if (frame_done)
2361 complete_all(&sde_crtc->frame_done_comp);
2362
Alan Kwong628d19e2016-10-31 13:50:13 -04002363 spin_lock_irqsave(&sde_crtc->spin_lock, flags);
2364 list_add_tail(&fevent->list, &sde_crtc->frame_event_list);
2365 spin_unlock_irqrestore(&sde_crtc->spin_lock, flags);
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07002366 SDE_ATRACE_END("crtc_frame_event");
Alan Kwong628d19e2016-10-31 13:50:13 -04002367}
2368
2369static void sde_crtc_frame_event_cb(void *data, u32 event)
2370{
2371 struct drm_crtc *crtc = (struct drm_crtc *)data;
2372 struct sde_crtc *sde_crtc;
2373 struct msm_drm_private *priv;
Alan Kwong628d19e2016-10-31 13:50:13 -04002374 struct sde_crtc_frame_event *fevent;
2375 unsigned long flags;
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07002376 u32 crtc_id;
Alan Kwong628d19e2016-10-31 13:50:13 -04002377
2378 if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
2379 SDE_ERROR("invalid parameters\n");
2380 return;
2381 }
2382 sde_crtc = to_sde_crtc(crtc);
2383 priv = crtc->dev->dev_private;
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07002384 crtc_id = drm_crtc_index(crtc);
Alan Kwong628d19e2016-10-31 13:50:13 -04002385
2386 SDE_DEBUG("crtc%d\n", crtc->base.id);
Ingrid Gallardo79b44392017-05-30 16:30:52 -07002387 SDE_EVT32_VERBOSE(DRMID(crtc), event);
Alan Kwong628d19e2016-10-31 13:50:13 -04002388
2389 spin_lock_irqsave(&sde_crtc->spin_lock, flags);
Lloyd Atkinson78831f82016-12-09 11:24:56 -05002390 fevent = list_first_entry_or_null(&sde_crtc->frame_event_list,
2391 struct sde_crtc_frame_event, list);
2392 if (fevent)
2393 list_del_init(&fevent->list);
Alan Kwong628d19e2016-10-31 13:50:13 -04002394 spin_unlock_irqrestore(&sde_crtc->spin_lock, flags);
2395
Lloyd Atkinson78831f82016-12-09 11:24:56 -05002396 if (!fevent) {
Alan Kwong628d19e2016-10-31 13:50:13 -04002397 SDE_ERROR("crtc%d event %d overflow\n",
2398 crtc->base.id, event);
2399 SDE_EVT32(DRMID(crtc), event);
2400 return;
2401 }
2402
Alan Kwong628d19e2016-10-31 13:50:13 -04002403 fevent->event = event;
2404 fevent->crtc = crtc;
2405 fevent->ts = ktime_get();
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07002406 kthread_queue_work(&priv->event_thread[crtc_id].worker, &fevent->work);
Alan Kwong628d19e2016-10-31 13:50:13 -04002407}
2408
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07002409void sde_crtc_complete_commit(struct drm_crtc *crtc,
2410 struct drm_crtc_state *old_state)
2411{
2412 struct sde_crtc *sde_crtc;
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07002413
2414 if (!crtc || !crtc->state) {
2415 SDE_ERROR("invalid crtc\n");
2416 return;
2417 }
2418
2419 sde_crtc = to_sde_crtc(crtc);
2420 SDE_EVT32_VERBOSE(DRMID(crtc));
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07002421
Clarence Ip12755012018-01-19 17:45:51 -05002422 sde_core_perf_crtc_update(crtc, 0, false);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07002423}
2424
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002425/**
Clarence Ipcae1bb62016-07-07 12:07:13 -04002426 * _sde_crtc_set_input_fence_timeout - update ns version of in fence timeout
2427 * @cstate: Pointer to sde crtc state
2428 */
2429static void _sde_crtc_set_input_fence_timeout(struct sde_crtc_state *cstate)
2430{
2431 if (!cstate) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07002432 SDE_ERROR("invalid cstate\n");
Clarence Ipcae1bb62016-07-07 12:07:13 -04002433 return;
2434 }
2435 cstate->input_fence_timeout_ns =
2436 sde_crtc_get_property(cstate, CRTC_PROP_INPUT_FENCE_TIMEOUT);
2437 cstate->input_fence_timeout_ns *= NSEC_PER_MSEC;
2438}
2439
2440/**
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002441 * _sde_crtc_set_dim_layer_v1 - copy dim layer settings from userspace
2442 * @cstate: Pointer to sde crtc state
2443 * @user_ptr: User ptr for sde_drm_dim_layer_v1 struct
2444 */
2445static void _sde_crtc_set_dim_layer_v1(struct sde_crtc_state *cstate,
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302446 void __user *usr_ptr)
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002447{
2448 struct sde_drm_dim_layer_v1 dim_layer_v1;
2449 struct sde_drm_dim_layer_cfg *user_cfg;
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07002450 struct sde_hw_dim_layer *dim_layer;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002451 u32 count, i;
2452
2453 if (!cstate) {
2454 SDE_ERROR("invalid cstate\n");
2455 return;
2456 }
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07002457 dim_layer = cstate->dim_layer;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002458
2459 if (!usr_ptr) {
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07002460 SDE_DEBUG("dim_layer data removed\n");
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002461 return;
2462 }
2463
2464 if (copy_from_user(&dim_layer_v1, usr_ptr, sizeof(dim_layer_v1))) {
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07002465 SDE_ERROR("failed to copy dim_layer data\n");
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002466 return;
2467 }
2468
2469 count = dim_layer_v1.num_layers;
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07002470 if (count > SDE_MAX_DIM_LAYERS) {
2471 SDE_ERROR("invalid number of dim_layers:%d", count);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002472 return;
2473 }
2474
2475 /* populate from user space */
2476 cstate->num_dim_layers = count;
2477 for (i = 0; i < count; i++) {
2478 user_cfg = &dim_layer_v1.layer_cfg[i];
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002479
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07002480 dim_layer[i].flags = user_cfg->flags;
2481 dim_layer[i].stage = user_cfg->stage + SDE_STAGE_0;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002482
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07002483 dim_layer[i].rect.x = user_cfg->rect.x1;
2484 dim_layer[i].rect.y = user_cfg->rect.y1;
2485 dim_layer[i].rect.w = user_cfg->rect.x2 - user_cfg->rect.x1;
2486 dim_layer[i].rect.h = user_cfg->rect.y2 - user_cfg->rect.y1;
2487
2488 dim_layer[i].color_fill = (struct sde_mdss_color) {
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002489 user_cfg->color_fill.color_0,
2490 user_cfg->color_fill.color_1,
2491 user_cfg->color_fill.color_2,
2492 user_cfg->color_fill.color_3,
2493 };
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07002494
2495 SDE_DEBUG("dim_layer[%d] - flags:%d, stage:%d\n",
2496 i, dim_layer[i].flags, dim_layer[i].stage);
2497 SDE_DEBUG(" rect:{%d,%d,%d,%d}, color:{%d,%d,%d,%d}\n",
2498 dim_layer[i].rect.x, dim_layer[i].rect.y,
2499 dim_layer[i].rect.w, dim_layer[i].rect.h,
2500 dim_layer[i].color_fill.color_0,
2501 dim_layer[i].color_fill.color_1,
2502 dim_layer[i].color_fill.color_2,
2503 dim_layer[i].color_fill.color_3);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002504 }
2505}
2506
2507/**
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302508 * _sde_crtc_set_dest_scaler - copy dest scaler settings from userspace
2509 * @sde_crtc : Pointer to sde crtc
2510 * @cstate : Pointer to sde crtc state
2511 * @usr_ptr: User ptr for sde_drm_dest_scaler_data struct
2512 */
2513static int _sde_crtc_set_dest_scaler(struct sde_crtc *sde_crtc,
2514 struct sde_crtc_state *cstate,
2515 void __user *usr_ptr)
2516{
2517 struct sde_drm_dest_scaler_data ds_data;
2518 struct sde_drm_dest_scaler_cfg *ds_cfg_usr;
2519 struct sde_drm_scaler_v2 scaler_v2;
2520 void __user *scaler_v2_usr;
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302521 int i, count;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302522
2523 if (!sde_crtc || !cstate) {
2524 SDE_ERROR("invalid sde_crtc/state\n");
2525 return -EINVAL;
2526 }
2527
2528 SDE_DEBUG("crtc %s\n", sde_crtc->name);
2529
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302530 if (!usr_ptr) {
2531 SDE_DEBUG("ds data removed\n");
2532 return 0;
2533 }
2534
2535 if (copy_from_user(&ds_data, usr_ptr, sizeof(ds_data))) {
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302536 SDE_ERROR("%s:failed to copy dest scaler data from user\n",
2537 sde_crtc->name);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302538 return -EINVAL;
2539 }
2540
2541 count = ds_data.num_dest_scaler;
Sravanthi Kollukuduru4ff41642017-10-06 18:17:34 +05302542 if (!count) {
2543 SDE_DEBUG("no ds data available\n");
2544 return 0;
2545 }
2546
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302547 if (count > SDE_MAX_DS_COUNT) {
2548 SDE_ERROR("%s: invalid config: num_ds(%d) max(%d)\n",
2549 sde_crtc->name, count, SDE_MAX_DS_COUNT);
2550 SDE_EVT32(DRMID(&sde_crtc->base), count, SDE_EVTLOG_ERROR);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302551 return -EINVAL;
2552 }
2553
2554 /* Populate from user space */
2555 for (i = 0; i < count; i++) {
2556 ds_cfg_usr = &ds_data.ds_cfg[i];
2557
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302558 cstate->ds_cfg[i].idx = ds_cfg_usr->index;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302559 cstate->ds_cfg[i].flags = ds_cfg_usr->flags;
2560 cstate->ds_cfg[i].lm_width = ds_cfg_usr->lm_width;
2561 cstate->ds_cfg[i].lm_height = ds_cfg_usr->lm_height;
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302562 memset(&scaler_v2, 0, sizeof(scaler_v2));
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302563
2564 if (ds_cfg_usr->scaler_cfg) {
2565 scaler_v2_usr =
2566 (void __user *)((uintptr_t)ds_cfg_usr->scaler_cfg);
2567
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302568 if (copy_from_user(&scaler_v2, scaler_v2_usr,
2569 sizeof(scaler_v2))) {
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302570 SDE_ERROR("%s:scaler: copy from user failed\n",
2571 sde_crtc->name);
2572 return -EINVAL;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302573 }
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302574 }
2575
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302576 sde_set_scaler_v2(&cstate->ds_cfg[i].scl3_cfg, &scaler_v2);
2577
2578 SDE_DEBUG("en(%d)dir(%d)de(%d) src(%dx%d) dst(%dx%d)\n",
2579 scaler_v2.enable, scaler_v2.dir_en, scaler_v2.de.enable,
2580 scaler_v2.src_width[0], scaler_v2.src_height[0],
2581 scaler_v2.dst_width, scaler_v2.dst_height);
2582 SDE_EVT32_VERBOSE(DRMID(&sde_crtc->base),
2583 scaler_v2.enable, scaler_v2.dir_en, scaler_v2.de.enable,
2584 scaler_v2.src_width[0], scaler_v2.src_height[0],
2585 scaler_v2.dst_width, scaler_v2.dst_height);
2586
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302587 SDE_DEBUG("ds cfg[%d]-ndx(%d) flags(%d) lm(%dx%d)\n",
2588 i, ds_cfg_usr->index, ds_cfg_usr->flags,
2589 ds_cfg_usr->lm_width, ds_cfg_usr->lm_height);
2590 SDE_EVT32_VERBOSE(DRMID(&sde_crtc->base), i, ds_cfg_usr->index,
2591 ds_cfg_usr->flags, ds_cfg_usr->lm_width,
2592 ds_cfg_usr->lm_height);
2593 }
2594
2595 cstate->num_ds = count;
2596 cstate->ds_dirty = true;
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302597 SDE_EVT32_VERBOSE(DRMID(&sde_crtc->base), count, cstate->ds_dirty);
2598
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302599 return 0;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302600}
2601
2602/**
2603 * _sde_crtc_check_dest_scaler_data - validate the dest scaler data
2604 * @crtc : Pointer to drm crtc
2605 * @state : Pointer to drm crtc state
2606 */
2607static int _sde_crtc_check_dest_scaler_data(struct drm_crtc *crtc,
2608 struct drm_crtc_state *state)
2609{
2610 struct sde_crtc *sde_crtc;
2611 struct sde_crtc_state *cstate;
2612 struct drm_display_mode *mode;
2613 struct sde_kms *kms;
2614 struct sde_hw_ds *hw_ds;
2615 struct sde_hw_ds_cfg *cfg;
2616 u32 i, ret = 0, lm_idx;
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302617 u32 num_ds_enable = 0, hdisplay = 0;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302618 u32 max_in_width = 0, max_out_width = 0;
2619 u32 prev_lm_width = 0, prev_lm_height = 0;
2620
2621 if (!crtc || !state)
2622 return -EINVAL;
2623
2624 sde_crtc = to_sde_crtc(crtc);
2625 cstate = to_sde_crtc_state(state);
2626 kms = _sde_crtc_get_kms(crtc);
2627 mode = &state->adjusted_mode;
2628
2629 SDE_DEBUG("crtc%d\n", crtc->base.id);
2630
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302631 if (!cstate->ds_dirty) {
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302632 SDE_DEBUG("dest scaler property not set, skip validation\n");
2633 return 0;
2634 }
2635
2636 if (!kms || !kms->catalog) {
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302637 SDE_ERROR("crtc%d: invalid parameters\n", crtc->base.id);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302638 return -EINVAL;
2639 }
2640
2641 if (!kms->catalog->mdp[0].has_dest_scaler) {
2642 SDE_DEBUG("dest scaler feature not supported\n");
2643 return 0;
2644 }
2645
2646 if (!sde_crtc->num_mixers) {
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302647 SDE_DEBUG("mixers not allocated\n");
2648 return 0;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302649 }
2650
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302651 ret = _sde_validate_hw_resources(sde_crtc);
2652 if (ret)
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302653 goto err;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302654
2655 /**
2656 * No of dest scalers shouldn't exceed hw ds block count and
2657 * also, match the num of mixers unless it is partial update
2658 * left only/right only use case - currently PU + DS is not supported
2659 */
2660 if (cstate->num_ds > kms->catalog->ds_count ||
2661 ((cstate->num_ds != sde_crtc->num_mixers) &&
2662 !(cstate->ds_cfg[0].flags & SDE_DRM_DESTSCALER_PU_ENABLE))) {
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302663 SDE_ERROR("crtc%d: num_ds(%d), hw_ds_cnt(%d) flags(%d)\n",
2664 crtc->base.id, cstate->num_ds, kms->catalog->ds_count,
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302665 cstate->ds_cfg[0].flags);
2666 ret = -EINVAL;
2667 goto err;
2668 }
2669
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302670 /**
2671 * Check if DS needs to be enabled or disabled
2672 * In case of enable, validate the data
2673 */
2674 if (!(cstate->ds_cfg[0].flags & SDE_DRM_DESTSCALER_ENABLE)) {
2675 SDE_DEBUG("disable dest scaler, num(%d) flags(%d)\n",
2676 cstate->num_ds, cstate->ds_cfg[0].flags);
2677 goto disable;
2678 }
2679
2680 /* Display resolution */
2681 hdisplay = mode->hdisplay/sde_crtc->num_mixers;
2682
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302683 /* Validate the DS data */
2684 for (i = 0; i < cstate->num_ds; i++) {
2685 cfg = &cstate->ds_cfg[i];
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302686 lm_idx = cfg->idx;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302687
2688 /**
2689 * Validate against topology
2690 * No of dest scalers should match the num of mixers
2691 * unless it is partial update left only/right only use case
2692 */
2693 if (lm_idx >= sde_crtc->num_mixers || (i != lm_idx &&
2694 !(cfg->flags & SDE_DRM_DESTSCALER_PU_ENABLE))) {
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302695 SDE_ERROR("crtc%d: ds_cfg id(%d):idx(%d), flags(%d)\n",
2696 crtc->base.id, i, lm_idx, cfg->flags);
2697 SDE_EVT32(DRMID(crtc), i, lm_idx, cfg->flags,
2698 SDE_EVTLOG_ERROR);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302699 ret = -EINVAL;
2700 goto err;
2701 }
2702
2703 hw_ds = sde_crtc->mixers[lm_idx].hw_ds;
2704
2705 if (!max_in_width && !max_out_width) {
2706 max_in_width = hw_ds->scl->top->maxinputwidth;
2707 max_out_width = hw_ds->scl->top->maxoutputwidth;
2708
2709 if (cstate->num_ds == CRTC_DUAL_MIXERS)
2710 max_in_width -= SDE_DS_OVERFETCH_SIZE;
2711
2712 SDE_DEBUG("max DS width [%d,%d] for num_ds = %d\n",
2713 max_in_width, max_out_width, cstate->num_ds);
2714 }
2715
2716 /* Check LM width and height */
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302717 if (cfg->lm_width > hdisplay || cfg->lm_height > mode->vdisplay
2718 || !cfg->lm_width || !cfg->lm_height) {
2719 SDE_ERROR("crtc%d: lm size[%d,%d] display [%d,%d]\n",
2720 crtc->base.id, cfg->lm_width, cfg->lm_height,
2721 hdisplay, mode->vdisplay);
2722 SDE_EVT32(DRMID(crtc), cfg->lm_width, cfg->lm_height,
2723 hdisplay, mode->vdisplay, SDE_EVTLOG_ERROR);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302724 ret = -E2BIG;
2725 goto err;
2726 }
2727
2728 if (!prev_lm_width && !prev_lm_height) {
2729 prev_lm_width = cfg->lm_width;
2730 prev_lm_height = cfg->lm_height;
2731 } else {
2732 if (cfg->lm_width != prev_lm_width ||
2733 cfg->lm_height != prev_lm_height) {
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302734 SDE_ERROR("crtc%d:lm left[%d,%d]right[%d %d]\n",
2735 crtc->base.id, cfg->lm_width,
2736 cfg->lm_height, prev_lm_width,
2737 prev_lm_height);
2738 SDE_EVT32(DRMID(crtc), cfg->lm_width,
2739 cfg->lm_height, prev_lm_width,
2740 prev_lm_height, SDE_EVTLOG_ERROR);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302741 ret = -EINVAL;
2742 goto err;
2743 }
2744 }
2745
2746 /* Check scaler data */
2747 if (cfg->flags & SDE_DRM_DESTSCALER_SCALE_UPDATE ||
2748 cfg->flags & SDE_DRM_DESTSCALER_ENHANCER_UPDATE) {
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302749
2750 /**
2751 * Scaler src and dst width shouldn't exceed the maximum
2752 * width limitation. Also, if there is no partial update
2753 * dst width and height must match display resolution.
2754 */
2755 if (cfg->scl3_cfg.src_width[0] > max_in_width ||
2756 cfg->scl3_cfg.dst_width > max_out_width ||
2757 !cfg->scl3_cfg.src_width[0] ||
2758 !cfg->scl3_cfg.dst_width ||
2759 (!(cfg->flags & SDE_DRM_DESTSCALER_PU_ENABLE)
2760 && (cfg->scl3_cfg.dst_width != hdisplay ||
2761 cfg->scl3_cfg.dst_height != mode->vdisplay))) {
2762 SDE_ERROR("crtc%d: ", crtc->base.id);
2763 SDE_ERROR("src_w(%d) dst(%dx%d) display(%dx%d)",
2764 cfg->scl3_cfg.src_width[0],
2765 cfg->scl3_cfg.dst_width,
2766 cfg->scl3_cfg.dst_height,
2767 hdisplay, mode->vdisplay);
2768 SDE_ERROR("num_mixers(%d) flags(%d) ds-%d:\n",
2769 sde_crtc->num_mixers, cfg->flags,
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302770 hw_ds->idx - DS_0);
2771 SDE_ERROR("scale_en = %d, DE_en =%d\n",
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302772 cfg->scl3_cfg.enable,
2773 cfg->scl3_cfg.de.enable);
2774
2775 SDE_EVT32(DRMID(crtc), cfg->scl3_cfg.enable,
2776 cfg->scl3_cfg.de.enable, cfg->flags,
2777 max_in_width, max_out_width,
2778 cfg->scl3_cfg.src_width[0],
2779 cfg->scl3_cfg.dst_width,
2780 cfg->scl3_cfg.dst_height, hdisplay,
2781 mode->vdisplay, sde_crtc->num_mixers,
2782 SDE_EVTLOG_ERROR);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302783
2784 cfg->flags &=
2785 ~SDE_DRM_DESTSCALER_SCALE_UPDATE;
2786 cfg->flags &=
2787 ~SDE_DRM_DESTSCALER_ENHANCER_UPDATE;
2788
2789 ret = -EINVAL;
2790 goto err;
2791 }
2792 }
2793
2794 if (cfg->flags & SDE_DRM_DESTSCALER_ENABLE)
2795 num_ds_enable++;
2796
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302797 SDE_DEBUG("ds[%d]: flags[0x%X]\n",
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302798 hw_ds->idx - DS_0, cfg->flags);
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302799 SDE_EVT32_VERBOSE(DRMID(crtc), hw_ds->idx - DS_0, cfg->flags);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302800 }
2801
2802disable:
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302803 SDE_DEBUG("dest scaler status : %d -> %d\n",
2804 cstate->num_ds_enabled, num_ds_enable);
2805 SDE_EVT32_VERBOSE(DRMID(crtc), cstate->num_ds_enabled, num_ds_enable,
2806 cstate->num_ds, cstate->ds_dirty);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302807
2808 if (cstate->num_ds_enabled != num_ds_enable) {
2809 /* Disabling destination scaler */
2810 if (!num_ds_enable) {
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302811 for (i = 0; i < cstate->num_ds; i++) {
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302812 cfg = &cstate->ds_cfg[i];
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302813 cfg->idx = i;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302814 /* Update scaler settings in disable case */
2815 cfg->flags = SDE_DRM_DESTSCALER_SCALE_UPDATE;
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302816 cfg->scl3_cfg.enable = 0;
2817 cfg->scl3_cfg.de.enable = 0;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302818 }
2819 }
2820 cstate->num_ds_enabled = num_ds_enable;
2821 cstate->ds_dirty = true;
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05302822 } else {
2823 if (!cstate->num_ds_enabled)
2824 cstate->ds_dirty = false;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302825 }
2826
2827 return 0;
2828
2829err:
2830 cstate->ds_dirty = false;
2831 return ret;
2832}
2833
2834/**
Clarence Ipcae1bb62016-07-07 12:07:13 -04002835 * _sde_crtc_wait_for_fences - wait for incoming framebuffer sync fences
2836 * @crtc: Pointer to CRTC object
2837 */
2838static void _sde_crtc_wait_for_fences(struct drm_crtc *crtc)
2839{
2840 struct drm_plane *plane = NULL;
2841 uint32_t wait_ms = 1;
Clarence Ip8dedc232016-09-09 16:41:00 -04002842 ktime_t kt_end, kt_wait;
Dhaval Patel39323d42017-03-01 23:48:24 -08002843 int rc = 0;
Clarence Ipcae1bb62016-07-07 12:07:13 -04002844
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -04002845 SDE_DEBUG("\n");
Clarence Ipcae1bb62016-07-07 12:07:13 -04002846
2847 if (!crtc || !crtc->state) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07002848 SDE_ERROR("invalid crtc/state %pK\n", crtc);
Clarence Ipcae1bb62016-07-07 12:07:13 -04002849 return;
2850 }
2851
2852 /* use monotonic timer to limit total fence wait time */
Clarence Ip8dedc232016-09-09 16:41:00 -04002853 kt_end = ktime_add_ns(ktime_get(),
2854 to_sde_crtc_state(crtc->state)->input_fence_timeout_ns);
Clarence Ipcae1bb62016-07-07 12:07:13 -04002855
2856 /*
2857 * Wait for fences sequentially, as all of them need to be signalled
2858 * before we can proceed.
2859 *
2860 * Limit total wait time to INPUT_FENCE_TIMEOUT, but still call
2861 * sde_plane_wait_input_fence with wait_ms == 0 after the timeout so
2862 * that each plane can check its fence status and react appropriately
Dhaval Patel39323d42017-03-01 23:48:24 -08002863 * if its fence has timed out. Call input fence wait multiple times if
2864 * fence wait is interrupted due to interrupt call.
Clarence Ipcae1bb62016-07-07 12:07:13 -04002865 */
Narendra Muppalla77b32932017-05-10 13:53:11 -07002866 SDE_ATRACE_BEGIN("plane_wait_input_fence");
Clarence Ipcae1bb62016-07-07 12:07:13 -04002867 drm_atomic_crtc_for_each_plane(plane, crtc) {
Dhaval Patel39323d42017-03-01 23:48:24 -08002868 do {
Clarence Ip8dedc232016-09-09 16:41:00 -04002869 kt_wait = ktime_sub(kt_end, ktime_get());
2870 if (ktime_compare(kt_wait, ktime_set(0, 0)) >= 0)
2871 wait_ms = ktime_to_ms(kt_wait);
Clarence Ipcae1bb62016-07-07 12:07:13 -04002872 else
2873 wait_ms = 0;
Dhaval Patel39323d42017-03-01 23:48:24 -08002874
2875 rc = sde_plane_wait_input_fence(plane, wait_ms);
2876 } while (wait_ms && rc == -ERESTARTSYS);
Clarence Ipcae1bb62016-07-07 12:07:13 -04002877 }
Narendra Muppalla77b32932017-05-10 13:53:11 -07002878 SDE_ATRACE_END("plane_wait_input_fence");
Clarence Ipcae1bb62016-07-07 12:07:13 -04002879}
2880
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002881static void _sde_crtc_setup_mixer_for_encoder(
2882 struct drm_crtc *crtc,
2883 struct drm_encoder *enc)
2884{
2885 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -04002886 struct sde_kms *sde_kms = _sde_crtc_get_kms(crtc);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002887 struct sde_rm *rm = &sde_kms->rm;
2888 struct sde_crtc_mixer *mixer;
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04002889 struct sde_hw_ctl *last_valid_ctl = NULL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002890 int i;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302891 struct sde_rm_hw_iter lm_iter, ctl_iter, dspp_iter, ds_iter;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002892
2893 sde_rm_init_hw_iter(&lm_iter, enc->base.id, SDE_HW_BLK_LM);
2894 sde_rm_init_hw_iter(&ctl_iter, enc->base.id, SDE_HW_BLK_CTL);
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -07002895 sde_rm_init_hw_iter(&dspp_iter, enc->base.id, SDE_HW_BLK_DSPP);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302896 sde_rm_init_hw_iter(&ds_iter, enc->base.id, SDE_HW_BLK_DS);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002897
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04002898 /* Set up all the mixers and ctls reserved by this encoder */
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002899 for (i = sde_crtc->num_mixers; i < ARRAY_SIZE(sde_crtc->mixers); i++) {
2900 mixer = &sde_crtc->mixers[i];
2901
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002902 if (!sde_rm_get_hw(rm, &lm_iter))
2903 break;
2904 mixer->hw_lm = (struct sde_hw_mixer *)lm_iter.hw;
2905
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04002906 /* CTL may be <= LMs, if <, multiple LMs controlled by 1 CTL */
2907 if (!sde_rm_get_hw(rm, &ctl_iter)) {
2908 SDE_DEBUG("no ctl assigned to lm %d, using previous\n",
Clarence Ip8e69ad02016-12-09 09:43:57 -05002909 mixer->hw_lm->idx - LM_0);
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04002910 mixer->hw_ctl = last_valid_ctl;
2911 } else {
2912 mixer->hw_ctl = (struct sde_hw_ctl *)ctl_iter.hw;
2913 last_valid_ctl = mixer->hw_ctl;
2914 }
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002915
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04002916 /* Shouldn't happen, mixers are always >= ctls */
2917 if (!mixer->hw_ctl) {
2918 SDE_ERROR("no valid ctls found for lm %d\n",
Clarence Ip8e69ad02016-12-09 09:43:57 -05002919 mixer->hw_lm->idx - LM_0);
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04002920 return;
2921 }
2922
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -07002923 /* Dspp may be null */
2924 (void) sde_rm_get_hw(rm, &dspp_iter);
2925 mixer->hw_dspp = (struct sde_hw_dspp *)dspp_iter.hw;
2926
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302927 /* DS may be null */
2928 (void) sde_rm_get_hw(rm, &ds_iter);
2929 mixer->hw_ds = (struct sde_hw_ds *)ds_iter.hw;
2930
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002931 mixer->encoder = enc;
2932
2933 sde_crtc->num_mixers++;
Clarence Ipd9f9fa62016-09-09 13:42:32 -04002934 SDE_DEBUG("setup mixer %d: lm %d\n",
2935 i, mixer->hw_lm->idx - LM_0);
2936 SDE_DEBUG("setup mixer %d: ctl %d\n",
2937 i, mixer->hw_ctl->idx - CTL_0);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302938 if (mixer->hw_ds)
2939 SDE_DEBUG("setup mixer %d: ds %d\n",
2940 i, mixer->hw_ds->idx - DS_0);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002941 }
2942}
2943
2944static void _sde_crtc_setup_mixers(struct drm_crtc *crtc)
2945{
2946 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
2947 struct drm_encoder *enc;
2948
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002949 sde_crtc->num_mixers = 0;
Lloyd Atkinson94710bc2017-09-14 14:10:09 -04002950 sde_crtc->mixers_swapped = false;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002951 memset(sde_crtc->mixers, 0, sizeof(sde_crtc->mixers));
2952
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07002953 mutex_lock(&sde_crtc->crtc_lock);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002954 /* Check for mixers on all encoders attached to this crtc */
2955 list_for_each_entry(enc, &crtc->dev->mode_config.encoder_list, head) {
2956 if (enc->crtc != crtc)
2957 continue;
2958
2959 _sde_crtc_setup_mixer_for_encoder(crtc, enc);
2960 }
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05002961
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07002962 mutex_unlock(&sde_crtc->crtc_lock);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002963}
2964
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05002965static void _sde_crtc_setup_is_ppsplit(struct drm_crtc_state *state)
2966{
2967 int i;
2968 struct sde_crtc_state *cstate;
2969
2970 cstate = to_sde_crtc_state(state);
2971
2972 cstate->is_ppsplit = false;
2973 for (i = 0; i < cstate->num_connectors; i++) {
2974 struct drm_connector *conn = cstate->connectors[i];
2975
2976 if (sde_connector_get_topology_name(conn) ==
2977 SDE_RM_TOPOLOGY_PPSPLIT)
2978 cstate->is_ppsplit = true;
2979 }
2980}
2981
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05002982static void _sde_crtc_setup_lm_bounds(struct drm_crtc *crtc,
2983 struct drm_crtc_state *state)
2984{
2985 struct sde_crtc *sde_crtc;
2986 struct sde_crtc_state *cstate;
2987 struct drm_display_mode *adj_mode;
2988 u32 crtc_split_width;
2989 int i;
2990
2991 if (!crtc || !state) {
2992 SDE_ERROR("invalid args\n");
2993 return;
2994 }
2995
2996 sde_crtc = to_sde_crtc(crtc);
2997 cstate = to_sde_crtc_state(state);
2998
2999 adj_mode = &state->adjusted_mode;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05303000 crtc_split_width = sde_crtc_get_mixer_width(sde_crtc, cstate, adj_mode);
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05003001
3002 for (i = 0; i < sde_crtc->num_mixers; i++) {
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04003003 cstate->lm_bounds[i].x = crtc_split_width * i;
3004 cstate->lm_bounds[i].y = 0;
3005 cstate->lm_bounds[i].w = crtc_split_width;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05303006 cstate->lm_bounds[i].h =
3007 sde_crtc_get_mixer_height(sde_crtc, cstate, adj_mode);
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04003008 memcpy(&cstate->lm_roi[i], &cstate->lm_bounds[i],
3009 sizeof(cstate->lm_roi[i]));
Dhaval Patela5f75952017-07-25 11:17:41 -07003010 SDE_EVT32_VERBOSE(DRMID(crtc), i,
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04003011 cstate->lm_bounds[i].x, cstate->lm_bounds[i].y,
3012 cstate->lm_bounds[i].w, cstate->lm_bounds[i].h);
3013 SDE_DEBUG("%s: lm%d bnd&roi (%d,%d,%d,%d)\n", sde_crtc->name, i,
3014 cstate->lm_roi[i].x, cstate->lm_roi[i].y,
3015 cstate->lm_roi[i].w, cstate->lm_roi[i].h);
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05003016 }
3017
3018 drm_mode_debug_printmodeline(adj_mode);
3019}
3020
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003021static void sde_crtc_atomic_begin(struct drm_crtc *crtc,
Clarence Ip0d0e96d2016-10-24 18:13:13 -04003022 struct drm_crtc_state *old_state)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003023{
Clarence Ipcae1bb62016-07-07 12:07:13 -04003024 struct sde_crtc *sde_crtc;
Dhaval Patel0e558f42017-04-30 00:51:40 -07003025 struct drm_encoder *encoder;
Clarence Ipcae1bb62016-07-07 12:07:13 -04003026 struct drm_device *dev;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003027 unsigned long flags;
Veera Sundaram Sankaran8fd692a2018-02-26 17:49:14 -08003028 struct sde_kms *sde_kms;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003029
Clarence Ipcae1bb62016-07-07 12:07:13 -04003030 if (!crtc) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003031 SDE_ERROR("invalid crtc\n");
Clarence Ipcae1bb62016-07-07 12:07:13 -04003032 return;
3033 }
3034
Alan Kwong163d2612016-11-03 00:56:56 -04003035 if (!crtc->state->enable) {
3036 SDE_DEBUG("crtc%d -> enable %d, skip atomic_begin\n",
3037 crtc->base.id, crtc->state->enable);
3038 return;
3039 }
3040
Alan Kwong1124f1f2017-11-10 18:14:39 -05003041 if (!sde_kms_power_resource_is_enabled(crtc->dev)) {
3042 SDE_ERROR("power resource is not enabled\n");
3043 return;
3044 }
3045
Veera Sundaram Sankaran8fd692a2018-02-26 17:49:14 -08003046 sde_kms = _sde_crtc_get_kms(crtc);
3047 if (!sde_kms)
3048 return;
3049
Alan Kwong163d2612016-11-03 00:56:56 -04003050 SDE_DEBUG("crtc%d\n", crtc->base.id);
3051
Clarence Ipcae1bb62016-07-07 12:07:13 -04003052 sde_crtc = to_sde_crtc(crtc);
3053 dev = crtc->dev;
3054
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04003055 if (!sde_crtc->num_mixers) {
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003056 _sde_crtc_setup_mixers(crtc);
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05003057 _sde_crtc_setup_is_ppsplit(crtc->state);
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04003058 _sde_crtc_setup_lm_bounds(crtc, crtc->state);
3059 }
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05003060
Lloyd Atkinson265d2212016-05-30 13:12:01 -04003061 if (sde_crtc->event) {
3062 WARN_ON(sde_crtc->event);
3063 } else {
3064 spin_lock_irqsave(&dev->event_lock, flags);
3065 sde_crtc->event = crtc->state->event;
3066 spin_unlock_irqrestore(&dev->event_lock, flags);
3067 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003068
Dhaval Patel0e558f42017-04-30 00:51:40 -07003069 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
3070 if (encoder->crtc != crtc)
3071 continue;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003072
Dhaval Patel0e558f42017-04-30 00:51:40 -07003073 /* encoder will trigger pending mask now */
3074 sde_encoder_trigger_kickoff_pending(encoder);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003075 }
3076
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003077 /*
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003078 * If no mixers have been allocated in sde_crtc_atomic_check(),
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003079 * it means we are trying to flush a CRTC whose state is disabled:
3080 * nothing else needs to be done.
3081 */
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003082 if (unlikely(!sde_crtc->num_mixers))
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003083 return;
3084
Clarence Ip1d1979d2017-12-22 18:24:34 -05003085 _sde_crtc_blend_setup(crtc, old_state, true);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05303086 _sde_crtc_dest_scaler_setup(crtc);
Abhijit Kulkarni12cef9c2017-07-13 11:19:03 -07003087
Dhaval Patelc9e213b2017-11-02 12:13:12 -07003088 /* cancel the idle notify delayed work */
3089 if (sde_encoder_check_mode(sde_crtc->mixers[0].encoder,
3090 MSM_DISPLAY_CAP_VID_MODE) &&
3091 kthread_cancel_delayed_work_sync(&sde_crtc->idle_notify_work))
3092 SDE_DEBUG("idle notify work cancelled\n");
3093
Abhijit Kulkarni12cef9c2017-07-13 11:19:03 -07003094 /*
3095 * Since CP properties use AXI buffer to program the
Veera Sundaram Sankaran8fd692a2018-02-26 17:49:14 -08003096 * HW, check if context bank is in attached state,
Abhijit Kulkarni12cef9c2017-07-13 11:19:03 -07003097 * apply color processing properties only if
3098 * smmu state is attached,
3099 */
Veera Sundaram Sankaran8fd692a2018-02-26 17:49:14 -08003100 if (!sde_kms_is_secure_session_inprogress(sde_kms))
Abhijit Kulkarni12cef9c2017-07-13 11:19:03 -07003101 sde_cp_crtc_apply_properties(crtc);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003102
3103 /*
3104 * PP_DONE irq is only used by command mode for now.
3105 * It is better to request pending before FLUSH and START trigger
3106 * to make sure no pp_done irq missed.
3107 * This is safe because no pp_done will happen before SW trigger
3108 * in command mode.
3109 */
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003110}
3111
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003112static void sde_crtc_atomic_flush(struct drm_crtc *crtc,
3113 struct drm_crtc_state *old_crtc_state)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003114{
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08003115 struct drm_encoder *encoder;
Clarence Ipcae1bb62016-07-07 12:07:13 -04003116 struct sde_crtc *sde_crtc;
3117 struct drm_device *dev;
Lloyd Atkinson265d2212016-05-30 13:12:01 -04003118 struct drm_plane *plane;
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05303119 struct msm_drm_private *priv;
3120 struct msm_drm_thread *event_thread;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003121 unsigned long flags;
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08003122 struct sde_crtc_state *cstate;
Veera Sundaram Sankaran8fd692a2018-02-26 17:49:14 -08003123 struct sde_kms *sde_kms;
Dhaval Patelc9e213b2017-11-02 12:13:12 -07003124 int idle_time = 0;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003125
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05303126 if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003127 SDE_ERROR("invalid crtc\n");
Clarence Ipcae1bb62016-07-07 12:07:13 -04003128 return;
3129 }
3130
Alan Kwong163d2612016-11-03 00:56:56 -04003131 if (!crtc->state->enable) {
3132 SDE_DEBUG("crtc%d -> enable %d, skip atomic_flush\n",
3133 crtc->base.id, crtc->state->enable);
3134 return;
3135 }
3136
Alan Kwong1124f1f2017-11-10 18:14:39 -05003137 if (!sde_kms_power_resource_is_enabled(crtc->dev)) {
3138 SDE_ERROR("power resource is not enabled\n");
3139 return;
3140 }
3141
Veera Sundaram Sankaran8fd692a2018-02-26 17:49:14 -08003142 sde_kms = _sde_crtc_get_kms(crtc);
3143 if (!sde_kms) {
3144 SDE_ERROR("invalid kms\n");
3145 return;
3146 }
3147
Alan Kwong163d2612016-11-03 00:56:56 -04003148 SDE_DEBUG("crtc%d\n", crtc->base.id);
Clarence Ipcae1bb62016-07-07 12:07:13 -04003149
3150 sde_crtc = to_sde_crtc(crtc);
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08003151 cstate = to_sde_crtc_state(crtc->state);
Clarence Ipcae1bb62016-07-07 12:07:13 -04003152 dev = crtc->dev;
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05303153 priv = dev->dev_private;
3154
3155 if (crtc->index >= ARRAY_SIZE(priv->event_thread)) {
3156 SDE_ERROR("invalid crtc index[%d]\n", crtc->index);
3157 return;
3158 }
3159
3160 event_thread = &priv->event_thread[crtc->index];
Dhaval Patelc9e213b2017-11-02 12:13:12 -07003161 idle_time = sde_crtc_get_property(cstate, CRTC_PROP_IDLE_TIMEOUT);
Clarence Ipcae1bb62016-07-07 12:07:13 -04003162
Lloyd Atkinson265d2212016-05-30 13:12:01 -04003163 if (sde_crtc->event) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003164 SDE_DEBUG("already received sde_crtc->event\n");
Lloyd Atkinson265d2212016-05-30 13:12:01 -04003165 } else {
Lloyd Atkinson265d2212016-05-30 13:12:01 -04003166 spin_lock_irqsave(&dev->event_lock, flags);
3167 sde_crtc->event = crtc->state->event;
3168 spin_unlock_irqrestore(&dev->event_lock, flags);
3169 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003170
3171 /*
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003172 * If no mixers has been allocated in sde_crtc_atomic_check(),
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003173 * it means we are trying to flush a CRTC whose state is disabled:
3174 * nothing else needs to be done.
3175 */
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003176 if (unlikely(!sde_crtc->num_mixers))
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003177 return;
3178
Alan Kwong346223e2017-06-30 15:29:22 -04003179 /*
3180 * For planes without commit update, drm framework will not add
3181 * those planes to current state since hardware update is not
3182 * required. However, if those planes were power collapsed since
3183 * last commit cycle, driver has to restore the hardware state
3184 * of those planes explicitly here prior to plane flush.
3185 */
3186 drm_atomic_crtc_for_each_plane(plane, crtc)
3187 sde_plane_restore(plane);
3188
Clarence Ipcae1bb62016-07-07 12:07:13 -04003189 /* wait for acquire fences before anything else is done */
3190 _sde_crtc_wait_for_fences(crtc);
3191
Dhaval Patelc9e213b2017-11-02 12:13:12 -07003192 /* schedule the idle notify delayed work */
3193 if (idle_time && sde_encoder_check_mode(sde_crtc->mixers[0].encoder,
3194 MSM_DISPLAY_CAP_VID_MODE)) {
3195 kthread_queue_delayed_work(&event_thread->worker,
3196 &sde_crtc->idle_notify_work,
3197 msecs_to_jiffies(idle_time));
3198 SDE_DEBUG("schedule idle notify work in %dms\n", idle_time);
3199 }
3200
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08003201 if (!cstate->rsc_update) {
3202 drm_for_each_encoder(encoder, dev) {
3203 if (encoder->crtc != crtc)
3204 continue;
3205
3206 cstate->rsc_client =
Dhaval Patel30fae8a2017-04-21 18:42:41 -07003207 sde_encoder_get_rsc_client(encoder);
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08003208 }
3209 cstate->rsc_update = true;
3210 }
3211
Alan Kwong9aa061c2016-11-06 21:17:12 -05003212 /* update performance setting before crtc kickoff */
3213 sde_core_perf_crtc_update(crtc, 1, false);
3214
Clarence Ipcae1bb62016-07-07 12:07:13 -04003215 /*
3216 * Final plane updates: Give each plane a chance to complete all
3217 * required writes/flushing before crtc's "flush
3218 * everything" call below.
3219 */
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07003220 drm_atomic_crtc_for_each_plane(plane, crtc) {
Veera Sundaram Sankaran8fd692a2018-02-26 17:49:14 -08003221 if (sde_kms->smmu_state.transition_error)
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07003222 sde_plane_set_error(plane, true);
Clarence Ipcae1bb62016-07-07 12:07:13 -04003223 sde_plane_flush(plane);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07003224 }
Clarence Ipcae1bb62016-07-07 12:07:13 -04003225
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003226 /* Kickoff will be scheduled by outer layer */
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003227}
3228
Clarence Ip7a753bb2016-07-07 11:47:44 -04003229/**
3230 * sde_crtc_destroy_state - state destroy hook
3231 * @crtc: drm CRTC
3232 * @state: CRTC state object to release
3233 */
3234static void sde_crtc_destroy_state(struct drm_crtc *crtc,
3235 struct drm_crtc_state *state)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003236{
Clarence Ip7a753bb2016-07-07 11:47:44 -04003237 struct sde_crtc *sde_crtc;
3238 struct sde_crtc_state *cstate;
3239
3240 if (!crtc || !state) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003241 SDE_ERROR("invalid argument(s)\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04003242 return;
3243 }
3244
3245 sde_crtc = to_sde_crtc(crtc);
3246 cstate = to_sde_crtc_state(state);
3247
Alan Kwong163d2612016-11-03 00:56:56 -04003248 SDE_DEBUG("crtc%d\n", crtc->base.id);
Clarence Ip7a753bb2016-07-07 11:47:44 -04003249
Alan Kwongcdb2f282017-03-18 13:42:06 -07003250 _sde_crtc_rp_destroy(&cstate->rp);
3251
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07003252 __drm_atomic_helper_crtc_destroy_state(state);
Clarence Ip7a753bb2016-07-07 11:47:44 -04003253
3254 /* destroy value helper */
3255 msm_property_destroy_state(&sde_crtc->property_info, cstate,
Clarence Ip4a2955d2017-07-04 18:04:33 -04003256 &cstate->property_state);
Clarence Ip7a753bb2016-07-07 11:47:44 -04003257}
3258
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003259static int _sde_crtc_wait_for_frame_done(struct drm_crtc *crtc)
3260{
3261 struct sde_crtc *sde_crtc;
Veera Sundaram Sankaran6d21d4c2017-09-30 10:07:15 -07003262 int ret, rc = 0, i;
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003263
3264 if (!crtc) {
3265 SDE_ERROR("invalid argument\n");
3266 return -EINVAL;
3267 }
3268 sde_crtc = to_sde_crtc(crtc);
3269
3270 if (!atomic_read(&sde_crtc->frame_pending)) {
3271 SDE_DEBUG("no frames pending\n");
3272 return 0;
3273 }
3274
Veera Sundaram Sankaran6d21d4c2017-09-30 10:07:15 -07003275 SDE_EVT32(DRMID(crtc), SDE_EVTLOG_FUNC_ENTRY);
3276
3277 /*
3278 * flush all the event thread work to make sure all the
3279 * FRAME_EVENTS from encoder are propagated to crtc
3280 */
3281 for (i = 0; i < ARRAY_SIZE(sde_crtc->frame_events); i++) {
3282 if (list_empty(&sde_crtc->frame_events[i].list))
3283 kthread_flush_work(&sde_crtc->frame_events[i].work);
3284 }
3285
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003286 ret = wait_for_completion_timeout(&sde_crtc->frame_done_comp,
3287 msecs_to_jiffies(SDE_FRAME_DONE_TIMEOUT));
3288 if (!ret) {
3289 SDE_ERROR("frame done completion wait timed out, ret:%d\n",
3290 ret);
3291 SDE_EVT32(DRMID(crtc), SDE_EVTLOG_FATAL);
3292 rc = -ETIMEDOUT;
3293 }
Dhaval Patela5f75952017-07-25 11:17:41 -07003294 SDE_EVT32_VERBOSE(DRMID(crtc), SDE_EVTLOG_FUNC_EXIT);
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003295
3296 return rc;
3297}
3298
Clarence Ip8ee49952017-09-20 11:10:50 -04003299static int _sde_crtc_commit_kickoff_rot(struct drm_crtc *crtc,
Clarence Ip95f530b2017-09-06 17:31:41 -04003300 struct sde_crtc_state *cstate)
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003301{
Clarence Ipeb39cce2017-07-19 14:12:43 -04003302 struct drm_plane *plane;
Clarence Ip95f530b2017-09-06 17:31:41 -04003303 struct sde_crtc *sde_crtc;
3304 struct sde_hw_ctl *ctl, *master_ctl;
Clarence Ipb776b532017-09-12 18:30:06 -04003305 u32 flush_mask;
Clarence Ip8ee49952017-09-20 11:10:50 -04003306 int i, rc = 0;
Clarence Ip95f530b2017-09-06 17:31:41 -04003307
3308 if (!crtc || !cstate)
Clarence Ip8ee49952017-09-20 11:10:50 -04003309 return -EINVAL;
Clarence Ip95f530b2017-09-06 17:31:41 -04003310
3311 sde_crtc = to_sde_crtc(crtc);
3312
Clarence Ipb776b532017-09-12 18:30:06 -04003313 /*
Clarence Ip1d1979d2017-12-22 18:24:34 -05003314 * Update sbuf configuration and flush bits if either the rot_op_mode
3315 * is different or a rotator commit was performed.
Clarence Ipb776b532017-09-12 18:30:06 -04003316 *
Clarence Ip1d1979d2017-12-22 18:24:34 -05003317 * In the case where the rot_op_mode has changed, further require that
3318 * the transition is either to or from offline mode unless
3319 * sbuf_flush_mask_delta is also non-zero (i.e., a corresponding plane
3320 * update was provided to the current commit).
Clarence Ipb776b532017-09-12 18:30:06 -04003321 */
Clarence Ip1d1979d2017-12-22 18:24:34 -05003322 flush_mask = sde_crtc->sbuf_flush_mask_delta;
3323 if ((sde_crtc->sbuf_op_mode_old != cstate->sbuf_cfg.rot_op_mode) &&
3324 (sde_crtc->sbuf_op_mode_old == SDE_CTL_ROT_OP_MODE_OFFLINE ||
3325 cstate->sbuf_cfg.rot_op_mode == SDE_CTL_ROT_OP_MODE_OFFLINE))
3326 flush_mask |= sde_crtc->sbuf_flush_mask_all |
3327 sde_crtc->sbuf_flush_mask_old;
Clarence Ipb776b532017-09-12 18:30:06 -04003328
Clarence Ip1d1979d2017-12-22 18:24:34 -05003329 if (!flush_mask &&
3330 cstate->sbuf_cfg.rot_op_mode == SDE_CTL_ROT_OP_MODE_OFFLINE)
3331 return 0;
Clarence Ipb776b532017-09-12 18:30:06 -04003332
Clarence Ip95f530b2017-09-06 17:31:41 -04003333 SDE_ATRACE_BEGIN("crtc_kickoff_rot");
3334
Clarence Ip1d1979d2017-12-22 18:24:34 -05003335 if (cstate->sbuf_cfg.rot_op_mode != SDE_CTL_ROT_OP_MODE_OFFLINE &&
3336 sde_crtc->sbuf_flush_mask_delta) {
Clarence Ipb776b532017-09-12 18:30:06 -04003337 drm_atomic_crtc_for_each_plane(plane, crtc) {
Clarence Ip8ee49952017-09-20 11:10:50 -04003338 rc = sde_plane_kickoff_rot(plane);
3339 if (rc) {
3340 SDE_ERROR("crtc%d cancelling inline rotation\n",
3341 crtc->base.id);
3342 SDE_EVT32(DRMID(crtc), SDE_EVTLOG_ERROR);
3343
3344 /* revert to offline on errors */
3345 cstate->sbuf_cfg.rot_op_mode =
3346 SDE_CTL_ROT_OP_MODE_OFFLINE;
3347 break;
3348 }
Clarence Ipb776b532017-09-12 18:30:06 -04003349 }
3350 }
Clarence Ip95f530b2017-09-06 17:31:41 -04003351
3352 master_ctl = NULL;
3353 for (i = 0; i < sde_crtc->num_mixers; i++) {
3354 ctl = sde_crtc->mixers[i].hw_ctl;
Clarence Ip8ee49952017-09-20 11:10:50 -04003355 if (!ctl)
Clarence Ip95f530b2017-09-06 17:31:41 -04003356 continue;
3357
3358 if (!master_ctl || master_ctl->idx > ctl->idx)
3359 master_ctl = ctl;
Clarence Ipa8a93eb2017-11-24 10:38:24 -05003360
3361 if (ctl->ops.setup_sbuf_cfg)
3362 ctl->ops.setup_sbuf_cfg(ctl, &cstate->sbuf_cfg);
Clarence Ip95f530b2017-09-06 17:31:41 -04003363 }
3364
Clarence Ip8ee49952017-09-20 11:10:50 -04003365 /* only update sbuf_cfg and flush for master ctl */
Clarence Ipa8a93eb2017-11-24 10:38:24 -05003366 if (master_ctl && master_ctl->ops.update_pending_flush) {
Clarence Ip8ee49952017-09-20 11:10:50 -04003367 master_ctl->ops.update_pending_flush(master_ctl, flush_mask);
3368
3369 /* explicitly trigger rotator for async modes */
3370 if (cstate->sbuf_cfg.rot_op_mode ==
3371 SDE_CTL_ROT_OP_MODE_INLINE_ASYNC &&
Clarence Ip1d1979d2017-12-22 18:24:34 -05003372 master_ctl->ops.trigger_rot_start)
Clarence Ip8ee49952017-09-20 11:10:50 -04003373 master_ctl->ops.trigger_rot_start(master_ctl);
Clarence Ip1d1979d2017-12-22 18:24:34 -05003374 SDE_EVT32(DRMID(crtc), master_ctl->idx - CTL_0,
3375 sde_crtc->sbuf_flush_mask_all,
3376 sde_crtc->sbuf_flush_mask_delta);
Clarence Ip8ee49952017-09-20 11:10:50 -04003377 }
Clarence Ip95f530b2017-09-06 17:31:41 -04003378
Clarence Ip1d1979d2017-12-22 18:24:34 -05003379 /* save this in sde_crtc for next commit cycle */
3380 sde_crtc->sbuf_op_mode_old = cstate->sbuf_cfg.rot_op_mode;
3381
Clarence Ip95f530b2017-09-06 17:31:41 -04003382 SDE_ATRACE_END("crtc_kickoff_rot");
Clarence Ip8ee49952017-09-20 11:10:50 -04003383 return rc;
Clarence Ip95f530b2017-09-06 17:31:41 -04003384}
3385
Clarence Ip662698e2017-09-12 18:34:16 -04003386/**
3387 * _sde_crtc_remove_pipe_flush - remove staged pipes from flush mask
3388 * @sde_crtc: Pointer to sde crtc structure
3389 */
3390static void _sde_crtc_remove_pipe_flush(struct sde_crtc *sde_crtc)
3391{
3392 struct sde_crtc_mixer *mixer;
3393 struct sde_hw_ctl *ctl;
Clarence Ip5adc0fb2017-12-15 16:08:01 -05003394 u32 i, n, flush_mask;
Clarence Ip662698e2017-09-12 18:34:16 -04003395
3396 if (!sde_crtc)
3397 return;
3398
3399 mixer = sde_crtc->mixers;
Clarence Ip5adc0fb2017-12-15 16:08:01 -05003400 n = min_t(size_t, sde_crtc->num_mixers, ARRAY_SIZE(sde_crtc->mixers));
3401 for (i = 0; i < n; i++) {
Clarence Ip662698e2017-09-12 18:34:16 -04003402 ctl = mixer[i].hw_ctl;
3403 if (!ctl || !ctl->ops.get_pending_flush ||
3404 !ctl->ops.clear_pending_flush ||
3405 !ctl->ops.update_pending_flush)
3406 continue;
3407
3408 flush_mask = ctl->ops.get_pending_flush(ctl);
3409 flush_mask &= ~mixer[i].pipe_mask;
3410 ctl->ops.clear_pending_flush(ctl);
3411 ctl->ops.update_pending_flush(ctl, flush_mask);
3412 }
3413}
3414
Clarence Ip569d5af2017-10-14 21:09:01 -04003415/**
3416 * _sde_crtc_reset_hw - attempt hardware reset on errors
3417 * @crtc: Pointer to DRM crtc instance
3418 * @old_state: Pointer to crtc state for previous commit
Clarence Ip5e3df1d2017-11-07 21:28:25 -05003419 * @dump_status: Whether or not to dump debug status before reset
Clarence Ip569d5af2017-10-14 21:09:01 -04003420 * Returns: Zero if current commit should still be attempted
3421 */
3422static int _sde_crtc_reset_hw(struct drm_crtc *crtc,
Clarence Ip5e3df1d2017-11-07 21:28:25 -05003423 struct drm_crtc_state *old_state, bool dump_status)
Clarence Ip569d5af2017-10-14 21:09:01 -04003424{
3425 struct drm_plane *plane_halt[MAX_PLANES];
3426 struct drm_plane *plane;
Clarence Ip5adc0fb2017-12-15 16:08:01 -05003427 struct drm_encoder *encoder;
Clarence Ip569d5af2017-10-14 21:09:01 -04003428 const struct drm_plane_state *pstate;
3429 struct sde_crtc *sde_crtc;
Clarence Ip5adc0fb2017-12-15 16:08:01 -05003430 struct sde_crtc_state *cstate;
Clarence Ip569d5af2017-10-14 21:09:01 -04003431 struct sde_hw_ctl *ctl;
Clarence Ip5e3df1d2017-11-07 21:28:25 -05003432 enum sde_ctl_rot_op_mode old_rot_op_mode;
Clarence Ip5adc0fb2017-12-15 16:08:01 -05003433 signed int i, n, plane_count;
Clarence Ip569d5af2017-10-14 21:09:01 -04003434 int rc;
3435
Clarence Ip5adc0fb2017-12-15 16:08:01 -05003436 if (!crtc || !crtc->dev || !old_state || !crtc->state)
Clarence Ip569d5af2017-10-14 21:09:01 -04003437 return -EINVAL;
3438 sde_crtc = to_sde_crtc(crtc);
Clarence Ip5adc0fb2017-12-15 16:08:01 -05003439 cstate = to_sde_crtc_state(crtc->state);
Clarence Ip569d5af2017-10-14 21:09:01 -04003440
Clarence Ip5e3df1d2017-11-07 21:28:25 -05003441 old_rot_op_mode = to_sde_crtc_state(old_state)->sbuf_cfg.rot_op_mode;
3442 SDE_EVT32(DRMID(crtc), old_rot_op_mode,
3443 dump_status, SDE_EVTLOG_FUNC_ENTRY);
3444
3445 if (dump_status)
3446 SDE_DBG_DUMP("all", "dbg_bus", "vbif_dbg_bus");
3447
Clarence Ip43832362017-12-05 10:16:30 -05003448 /* optionally generate a panic instead of performing a h/w reset */
3449 SDE_DBG_CTRL("stop_ftrace", "reset_hw_panic");
3450
Clarence Ip5adc0fb2017-12-15 16:08:01 -05003451 n = min_t(size_t, sde_crtc->num_mixers, ARRAY_SIZE(sde_crtc->mixers));
3452 for (i = 0; i < n; ++i) {
Clarence Ip569d5af2017-10-14 21:09:01 -04003453 ctl = sde_crtc->mixers[i].hw_ctl;
3454 if (!ctl || !ctl->ops.reset)
3455 continue;
3456
3457 rc = ctl->ops.reset(ctl);
3458 if (rc) {
3459 SDE_DEBUG("crtc%d: ctl%d reset failure\n",
3460 crtc->base.id, ctl->idx - CTL_0);
3461 SDE_EVT32(DRMID(crtc), ctl->idx - CTL_0,
3462 SDE_EVTLOG_ERROR);
3463 break;
3464 }
3465 }
3466
Clarence Ip5e3df1d2017-11-07 21:28:25 -05003467 /*
3468 * Early out if simple ctl reset succeeded and previous commit
3469 * did not involve the rotator.
3470 *
3471 * If the previous commit had rotation enabled, then the ctl
3472 * reset would also have reset the rotator h/w. The rotator
3473 * programming for the current commit may need to be repeated,
3474 * depending on the rotation mode; don't handle this for now
3475 * and just force a hard reset in those cases.
3476 */
Clarence Ip5adc0fb2017-12-15 16:08:01 -05003477 if (i == n && old_rot_op_mode == SDE_CTL_ROT_OP_MODE_OFFLINE)
Clarence Ip569d5af2017-10-14 21:09:01 -04003478 return false;
Clarence Ip569d5af2017-10-14 21:09:01 -04003479
3480 SDE_DEBUG("crtc%d: issuing hard reset\n", DRMID(crtc));
3481
3482 /* force all components in the system into reset at the same time */
Clarence Ip5adc0fb2017-12-15 16:08:01 -05003483 for (i = 0; i < n; ++i) {
Clarence Ip569d5af2017-10-14 21:09:01 -04003484 ctl = sde_crtc->mixers[i].hw_ctl;
3485 if (!ctl || !ctl->ops.hard_reset)
3486 continue;
3487
3488 SDE_EVT32(DRMID(crtc), ctl->idx - CTL_0);
3489 ctl->ops.hard_reset(ctl, true);
3490 }
3491
3492 plane_count = 0;
3493 drm_atomic_crtc_state_for_each_plane(plane, old_state) {
3494 if (plane_count >= ARRAY_SIZE(plane_halt))
3495 break;
3496
3497 plane_halt[plane_count++] = plane;
3498 sde_plane_halt_requests(plane, true);
3499 sde_plane_set_revalidate(plane, true);
3500 }
3501
3502 /* reset both previous... */
Clarence Ipa8a93eb2017-11-24 10:38:24 -05003503 drm_atomic_crtc_state_for_each_plane_state(plane, pstate, old_state) {
Clarence Ip569d5af2017-10-14 21:09:01 -04003504 if (pstate->crtc != crtc)
3505 continue;
3506
3507 sde_plane_reset_rot(plane, (struct drm_plane_state *)pstate);
3508 }
3509
3510 /* ...and current rotation attempts, if applicable */
3511 drm_atomic_crtc_for_each_plane(plane, crtc) {
3512 pstate = plane->state;
3513 if (!pstate)
3514 continue;
3515
3516 sde_plane_reset_rot(plane, (struct drm_plane_state *)pstate);
3517 }
3518
Clarence Ip5adc0fb2017-12-15 16:08:01 -05003519 /* provide safe "border color only" commit configuration for later */
3520 cstate->sbuf_cfg.rot_op_mode = SDE_CTL_ROT_OP_MODE_OFFLINE;
3521 _sde_crtc_commit_kickoff_rot(crtc, cstate);
3522 _sde_crtc_remove_pipe_flush(sde_crtc);
Clarence Ip1d1979d2017-12-22 18:24:34 -05003523 _sde_crtc_blend_setup(crtc, old_state, false);
Clarence Ip5adc0fb2017-12-15 16:08:01 -05003524
Clarence Ip569d5af2017-10-14 21:09:01 -04003525 /* take h/w components out of reset */
3526 for (i = plane_count - 1; i >= 0; --i)
3527 sde_plane_halt_requests(plane_halt[i], false);
3528
Clarence Ip5adc0fb2017-12-15 16:08:01 -05003529 /* attempt to poll for start of frame cycle before reset release */
3530 list_for_each_entry(encoder,
3531 &crtc->dev->mode_config.encoder_list, head) {
3532 if (encoder->crtc != crtc)
3533 continue;
3534 if (sde_encoder_get_intf_mode(encoder) == INTF_MODE_VIDEO)
3535 sde_encoder_poll_line_counts(encoder);
3536 }
3537
3538 for (i = 0; i < n; ++i) {
Clarence Ip569d5af2017-10-14 21:09:01 -04003539 ctl = sde_crtc->mixers[i].hw_ctl;
3540 if (!ctl || !ctl->ops.hard_reset)
3541 continue;
3542
3543 ctl->ops.hard_reset(ctl, false);
3544 }
3545
Clarence Ip5adc0fb2017-12-15 16:08:01 -05003546 list_for_each_entry(encoder,
3547 &crtc->dev->mode_config.encoder_list, head) {
3548 if (encoder->crtc != crtc)
3549 continue;
3550
3551 if (sde_encoder_get_intf_mode(encoder) == INTF_MODE_VIDEO)
3552 sde_encoder_kickoff(encoder, false);
3553 }
3554
Clarence Ip569d5af2017-10-14 21:09:01 -04003555 return -EAGAIN;
3556}
3557
Clarence Ipb2f6dfc2017-10-24 17:09:53 -04003558/**
3559 * _sde_crtc_prepare_for_kickoff_rot - rotator related kickoff preparation
3560 * @dev: Pointer to drm device
3561 * @crtc: Pointer to crtc structure
3562 * Returns: true on preparation errors
3563 */
3564static bool _sde_crtc_prepare_for_kickoff_rot(struct drm_device *dev,
3565 struct drm_crtc *crtc)
3566{
3567 struct drm_encoder *encoder;
3568 struct sde_crtc *sde_crtc;
3569 struct sde_crtc_state *cstate;
3570
3571 if (!crtc || !dev) {
3572 SDE_ERROR("invalid argument(s)\n");
3573 return false;
3574 }
3575 sde_crtc = to_sde_crtc(crtc);
3576 cstate = to_sde_crtc_state(crtc->state);
3577
3578 /* default to ASYNC mode for inline rotation */
Clarence Ip1d1979d2017-12-22 18:24:34 -05003579 cstate->sbuf_cfg.rot_op_mode = sde_crtc->sbuf_flush_mask_all ?
Clarence Ipb2f6dfc2017-10-24 17:09:53 -04003580 SDE_CTL_ROT_OP_MODE_INLINE_ASYNC : SDE_CTL_ROT_OP_MODE_OFFLINE;
3581
3582 if (cstate->sbuf_cfg.rot_op_mode == SDE_CTL_ROT_OP_MODE_OFFLINE)
3583 return false;
3584
3585 /* extra steps needed for inline ASYNC modes */
3586 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
3587 if (encoder->crtc != crtc)
3588 continue;
3589
3590 /*
3591 * For inline ASYNC modes, the flush bits are not written
Clarence Ip2e9e5882017-12-04 15:03:40 -05003592 * to hardware atomically. This is not fully supported for
3593 * non-command mode encoders, so force SYNC mode if any
3594 * of them are attached to the CRTC.
Clarence Ipb2f6dfc2017-10-24 17:09:53 -04003595 */
Clarence Ip2e9e5882017-12-04 15:03:40 -05003596 if (sde_encoder_get_intf_mode(encoder) != INTF_MODE_CMD) {
Clarence Ipb2f6dfc2017-10-24 17:09:53 -04003597 cstate->sbuf_cfg.rot_op_mode =
3598 SDE_CTL_ROT_OP_MODE_INLINE_SYNC;
3599 return false;
3600 }
3601 }
3602
3603 /*
3604 * For ASYNC inline modes, kick off the rotator now so that the H/W
3605 * can start as soon as it's ready.
3606 */
3607 if (_sde_crtc_commit_kickoff_rot(crtc, cstate))
3608 return true;
3609
3610 return false;
3611}
3612
Clarence Ip569d5af2017-10-14 21:09:01 -04003613void sde_crtc_commit_kickoff(struct drm_crtc *crtc,
3614 struct drm_crtc_state *old_state)
Clarence Ip95f530b2017-09-06 17:31:41 -04003615{
Lloyd Atkinsone7bcdd22016-08-11 10:53:37 -04003616 struct drm_encoder *encoder;
3617 struct drm_device *dev;
Alan Kwong628d19e2016-10-31 13:50:13 -04003618 struct sde_crtc *sde_crtc;
Alan Kwong67a3f792016-11-01 23:16:53 -04003619 struct msm_drm_private *priv;
3620 struct sde_kms *sde_kms;
Alan Kwong4aacd532017-02-04 18:51:33 -08003621 struct sde_crtc_state *cstate;
Clarence Ip569d5af2017-10-14 21:09:01 -04003622 bool is_error, reset_req;
Clarence Ip95f530b2017-09-06 17:31:41 -04003623 int ret;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003624
3625 if (!crtc) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003626 SDE_ERROR("invalid argument\n");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003627 return;
3628 }
Lloyd Atkinsone7bcdd22016-08-11 10:53:37 -04003629 dev = crtc->dev;
Alan Kwong628d19e2016-10-31 13:50:13 -04003630 sde_crtc = to_sde_crtc(crtc);
Alan Kwong67a3f792016-11-01 23:16:53 -04003631 sde_kms = _sde_crtc_get_kms(crtc);
Clarence Ip569d5af2017-10-14 21:09:01 -04003632 reset_req = false;
Narendra Muppallaec11a0a2017-06-15 15:35:17 -07003633
3634 if (!sde_kms || !sde_kms->dev || !sde_kms->dev->dev_private) {
3635 SDE_ERROR("invalid argument\n");
3636 return;
3637 }
3638
Alan Kwong67a3f792016-11-01 23:16:53 -04003639 priv = sde_kms->dev->dev_private;
Alan Kwong4aacd532017-02-04 18:51:33 -08003640 cstate = to_sde_crtc_state(crtc->state);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003641
Clarence Ip90b282d2017-05-04 10:00:32 -07003642 /*
3643 * If no mixers has been allocated in sde_crtc_atomic_check(),
3644 * it means we are trying to start a CRTC whose state is disabled:
3645 * nothing else needs to be done.
3646 */
3647 if (unlikely(!sde_crtc->num_mixers))
3648 return;
3649
Narendra Muppalla77b32932017-05-10 13:53:11 -07003650 SDE_ATRACE_BEGIN("crtc_commit");
Clarence Ip95f530b2017-09-06 17:31:41 -04003651
Clarence Ipb2f6dfc2017-10-24 17:09:53 -04003652 is_error = _sde_crtc_prepare_for_kickoff_rot(dev, crtc);
Clarence Ip95f530b2017-09-06 17:31:41 -04003653
Lloyd Atkinsone7bcdd22016-08-11 10:53:37 -04003654 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
Alan Kwong4aacd532017-02-04 18:51:33 -08003655 struct sde_encoder_kickoff_params params = { 0 };
3656
Lloyd Atkinsone7bcdd22016-08-11 10:53:37 -04003657 if (encoder->crtc != crtc)
3658 continue;
3659
3660 /*
3661 * Encoder will flush/start now, unless it has a tx pending.
3662 * If so, it may delay and flush at an irq event (e.g. ppdone)
3663 */
Alan Kwong4aacd532017-02-04 18:51:33 -08003664 params.inline_rotate_prefill = cstate->sbuf_prefill_line;
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003665 params.affected_displays = _sde_crtc_get_displays_affected(crtc,
3666 crtc->state);
Clarence Ip569d5af2017-10-14 21:09:01 -04003667 if (sde_encoder_prepare_for_kickoff(encoder, &params))
3668 reset_req = true;
Alan Kwong628d19e2016-10-31 13:50:13 -04003669 }
3670
Clarence Ip95f530b2017-09-06 17:31:41 -04003671 /*
Clarence Ip569d5af2017-10-14 21:09:01 -04003672 * Optionally attempt h/w recovery if any errors were detected while
3673 * preparing for the kickoff
3674 */
3675 if (reset_req) {
Clarence Ip5e3df1d2017-11-07 21:28:25 -05003676 if (_sde_crtc_reset_hw(crtc, old_state,
3677 !sde_crtc->reset_request))
Clarence Ip569d5af2017-10-14 21:09:01 -04003678 is_error = true;
Clarence Ip569d5af2017-10-14 21:09:01 -04003679 }
Clarence Ip5e3df1d2017-11-07 21:28:25 -05003680 sde_crtc->reset_request = reset_req;
Clarence Ip569d5af2017-10-14 21:09:01 -04003681
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003682 /* wait for frame_event_done completion */
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07003683 SDE_ATRACE_BEGIN("wait_for_frame_done_event");
3684 ret = _sde_crtc_wait_for_frame_done(crtc);
3685 SDE_ATRACE_END("wait_for_frame_done_event");
3686 if (ret) {
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003687 SDE_ERROR("crtc%d wait for frame done failed;frame_pending%d\n",
3688 crtc->base.id,
3689 atomic_read(&sde_crtc->frame_pending));
Clarence Ip662698e2017-09-12 18:34:16 -04003690
3691 is_error = true;
3692
3693 /* force offline rotation mode since the commit has no pipes */
3694 cstate->sbuf_cfg.rot_op_mode = SDE_CTL_ROT_OP_MODE_OFFLINE;
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003695 }
3696
3697 if (atomic_inc_return(&sde_crtc->frame_pending) == 1) {
Alan Kwong628d19e2016-10-31 13:50:13 -04003698 /* acquire bandwidth and other resources */
3699 SDE_DEBUG("crtc%d first commit\n", crtc->base.id);
Clarence Ip95f530b2017-09-06 17:31:41 -04003700 SDE_EVT32(DRMID(crtc), cstate->sbuf_cfg.rot_op_mode,
3701 SDE_EVTLOG_FUNC_CASE1);
Alan Kwong628d19e2016-10-31 13:50:13 -04003702 } else {
3703 SDE_DEBUG("crtc%d commit\n", crtc->base.id);
Clarence Ip95f530b2017-09-06 17:31:41 -04003704 SDE_EVT32(DRMID(crtc), cstate->sbuf_cfg.rot_op_mode,
3705 SDE_EVTLOG_FUNC_CASE2);
Alan Kwong628d19e2016-10-31 13:50:13 -04003706 }
Dhaval Pateld67cf4a2017-06-14 18:08:32 -07003707 sde_crtc->play_count++;
Alan Kwong628d19e2016-10-31 13:50:13 -04003708
Clarence Ip95f530b2017-09-06 17:31:41 -04003709 /*
3710 * For SYNC inline modes, delay the kick off until after the
3711 * wait for frame done in case the wait times out.
Clarence Ipb776b532017-09-12 18:30:06 -04003712 *
3713 * Also perform a final kickoff when transitioning back to
3714 * offline mode.
Clarence Ip95f530b2017-09-06 17:31:41 -04003715 */
Clarence Ipb776b532017-09-12 18:30:06 -04003716 if (cstate->sbuf_cfg.rot_op_mode != SDE_CTL_ROT_OP_MODE_INLINE_ASYNC)
Clarence Ip8ee49952017-09-20 11:10:50 -04003717 if (_sde_crtc_commit_kickoff_rot(crtc, cstate))
3718 is_error = true;
Clarence Ipf6b530a2017-08-21 19:39:18 -04003719
Clarence Ip980405d2017-08-08 18:33:44 -04003720 sde_vbif_clear_errors(sde_kms);
3721
Clarence Ip569d5af2017-10-14 21:09:01 -04003722 if (is_error) {
Clarence Ip662698e2017-09-12 18:34:16 -04003723 _sde_crtc_remove_pipe_flush(sde_crtc);
Clarence Ip1d1979d2017-12-22 18:24:34 -05003724 _sde_crtc_blend_setup(crtc, old_state, false);
Clarence Ip569d5af2017-10-14 21:09:01 -04003725 }
Clarence Ip662698e2017-09-12 18:34:16 -04003726
Alan Kwong628d19e2016-10-31 13:50:13 -04003727 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
3728 if (encoder->crtc != crtc)
3729 continue;
3730
Clarence Ip569d5af2017-10-14 21:09:01 -04003731 sde_encoder_kickoff(encoder, false);
Lloyd Atkinsone7bcdd22016-08-11 10:53:37 -04003732 }
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003733
Dhaval Patelb9850c02017-08-07 22:55:47 -07003734 reinit_completion(&sde_crtc->frame_done_comp);
Narendra Muppalla77b32932017-05-10 13:53:11 -07003735 SDE_ATRACE_END("crtc_commit");
3736 return;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003737}
3738
Clarence Ip7a753bb2016-07-07 11:47:44 -04003739/**
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003740 * _sde_crtc_vblank_enable_no_lock - update power resource and vblank request
Clarence Ip7f70ce42017-03-20 06:53:46 -07003741 * @sde_crtc: Pointer to sde crtc structure
3742 * @enable: Whether to enable/disable vblanks
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003743 *
3744 * @Return: error code
Clarence Ip7f70ce42017-03-20 06:53:46 -07003745 */
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003746static int _sde_crtc_vblank_enable_no_lock(
Clarence Ip7f70ce42017-03-20 06:53:46 -07003747 struct sde_crtc *sde_crtc, bool enable)
3748{
3749 struct drm_device *dev;
3750 struct drm_crtc *crtc;
3751 struct drm_encoder *enc;
Clarence Ip7f70ce42017-03-20 06:53:46 -07003752
3753 if (!sde_crtc) {
3754 SDE_ERROR("invalid crtc\n");
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003755 return -EINVAL;
Clarence Ip7f70ce42017-03-20 06:53:46 -07003756 }
3757
3758 crtc = &sde_crtc->base;
3759 dev = crtc->dev;
Clarence Ip7f70ce42017-03-20 06:53:46 -07003760
3761 if (enable) {
Lloyd Atkinson2c554eb2017-05-24 16:22:39 -04003762 int ret;
3763
3764 /* drop lock since power crtc cb may try to re-acquire lock */
3765 mutex_unlock(&sde_crtc->crtc_lock);
3766 ret = _sde_crtc_power_enable(sde_crtc, true);
3767 mutex_lock(&sde_crtc->crtc_lock);
3768 if (ret)
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003769 return ret;
Dhaval Patelf9245d62017-03-28 16:24:00 -07003770
Clarence Ip7f70ce42017-03-20 06:53:46 -07003771 list_for_each_entry(enc, &dev->mode_config.encoder_list, head) {
3772 if (enc->crtc != crtc)
3773 continue;
3774
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003775 SDE_EVT32(DRMID(&sde_crtc->base), DRMID(enc), enable,
3776 sde_crtc->enabled,
3777 sde_crtc->suspend,
3778 sde_crtc->vblank_requested);
Clarence Ip7f70ce42017-03-20 06:53:46 -07003779
3780 sde_encoder_register_vblank_callback(enc,
3781 sde_crtc_vblank_cb, (void *)crtc);
3782 }
3783 } else {
3784 list_for_each_entry(enc, &dev->mode_config.encoder_list, head) {
3785 if (enc->crtc != crtc)
3786 continue;
3787
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003788 SDE_EVT32(DRMID(&sde_crtc->base), DRMID(enc), enable,
3789 sde_crtc->enabled,
3790 sde_crtc->suspend,
3791 sde_crtc->vblank_requested);
Clarence Ip7f70ce42017-03-20 06:53:46 -07003792
3793 sde_encoder_register_vblank_callback(enc, NULL, NULL);
3794 }
Lloyd Atkinson2c554eb2017-05-24 16:22:39 -04003795
3796 /* drop lock since power crtc cb may try to re-acquire lock */
3797 mutex_unlock(&sde_crtc->crtc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07003798 _sde_crtc_power_enable(sde_crtc, false);
Lloyd Atkinson2c554eb2017-05-24 16:22:39 -04003799 mutex_lock(&sde_crtc->crtc_lock);
Clarence Ip7f70ce42017-03-20 06:53:46 -07003800 }
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003801
3802 return 0;
Clarence Ip7f70ce42017-03-20 06:53:46 -07003803}
3804
3805/**
3806 * _sde_crtc_set_suspend - notify crtc of suspend enable/disable
3807 * @crtc: Pointer to drm crtc object
3808 * @enable: true to enable suspend, false to indicate resume
3809 */
3810static void _sde_crtc_set_suspend(struct drm_crtc *crtc, bool enable)
3811{
3812 struct sde_crtc *sde_crtc;
3813 struct msm_drm_private *priv;
3814 struct sde_kms *sde_kms;
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003815 int ret = 0;
Clarence Ip7f70ce42017-03-20 06:53:46 -07003816
3817 if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
3818 SDE_ERROR("invalid crtc\n");
3819 return;
3820 }
3821 sde_crtc = to_sde_crtc(crtc);
3822 priv = crtc->dev->dev_private;
3823
3824 if (!priv->kms) {
3825 SDE_ERROR("invalid crtc kms\n");
3826 return;
3827 }
3828 sde_kms = to_sde_kms(priv->kms);
3829
3830 SDE_DEBUG("crtc%d suspend = %d\n", crtc->base.id, enable);
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003831 SDE_EVT32_VERBOSE(DRMID(crtc), enable);
Clarence Ip7f70ce42017-03-20 06:53:46 -07003832
3833 mutex_lock(&sde_crtc->crtc_lock);
3834
Clarence Ip2f9beeb2017-03-16 11:04:53 -04003835 /*
Lloyd Atkinsonb2be0c42017-07-17 16:41:00 -04003836 * If the vblank is enabled, release a power reference on suspend
3837 * and take it back during resume (if it is still enabled).
Clarence Ip7f70ce42017-03-20 06:53:46 -07003838 */
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003839 SDE_EVT32(DRMID(&sde_crtc->base), enable, sde_crtc->enabled,
3840 sde_crtc->suspend, sde_crtc->vblank_requested);
Clarence Ip7f70ce42017-03-20 06:53:46 -07003841 if (sde_crtc->suspend == enable)
3842 SDE_DEBUG("crtc%d suspend already set to %d, ignoring update\n",
3843 crtc->base.id, enable);
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003844 else if (sde_crtc->enabled && sde_crtc->vblank_requested) {
3845 ret = _sde_crtc_vblank_enable_no_lock(sde_crtc, !enable);
3846 if (ret)
3847 SDE_ERROR("%s vblank enable failed: %d\n",
3848 sde_crtc->name, ret);
3849 }
Clarence Ip7f70ce42017-03-20 06:53:46 -07003850
3851 sde_crtc->suspend = enable;
Clarence Ip7f70ce42017-03-20 06:53:46 -07003852 mutex_unlock(&sde_crtc->crtc_lock);
3853}
3854
3855/**
Clarence Ip7a753bb2016-07-07 11:47:44 -04003856 * sde_crtc_duplicate_state - state duplicate hook
3857 * @crtc: Pointer to drm crtc structure
3858 * @Returns: Pointer to new drm_crtc_state structure
3859 */
3860static struct drm_crtc_state *sde_crtc_duplicate_state(struct drm_crtc *crtc)
3861{
3862 struct sde_crtc *sde_crtc;
3863 struct sde_crtc_state *cstate, *old_cstate;
3864
3865 if (!crtc || !crtc->state) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003866 SDE_ERROR("invalid argument(s)\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04003867 return NULL;
3868 }
3869
3870 sde_crtc = to_sde_crtc(crtc);
3871 old_cstate = to_sde_crtc_state(crtc->state);
3872 cstate = msm_property_alloc_state(&sde_crtc->property_info);
3873 if (!cstate) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003874 SDE_ERROR("failed to allocate state\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04003875 return NULL;
3876 }
3877
3878 /* duplicate value helper */
3879 msm_property_duplicate_state(&sde_crtc->property_info,
3880 old_cstate, cstate,
Clarence Ip4a2955d2017-07-04 18:04:33 -04003881 &cstate->property_state, cstate->property_values);
Clarence Ip7a753bb2016-07-07 11:47:44 -04003882
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05303883 /* clear destination scaler dirty bit */
3884 cstate->ds_dirty = false;
3885
Clarence Ip4f339092018-01-05 13:29:04 -05003886 /* record whether or not the sbuf_clk_rate fifo has been shifted */
3887 cstate->sbuf_clk_shifted = false;
3888
Clarence Ip7a753bb2016-07-07 11:47:44 -04003889 /* duplicate base helper */
3890 __drm_atomic_helper_crtc_duplicate_state(crtc, &cstate->base);
3891
Alan Kwongcdb2f282017-03-18 13:42:06 -07003892 _sde_crtc_rp_duplicate(&old_cstate->rp, &cstate->rp);
3893
Clarence Ip7a753bb2016-07-07 11:47:44 -04003894 return &cstate->base;
3895}
3896
3897/**
3898 * sde_crtc_reset - reset hook for CRTCs
3899 * Resets the atomic state for @crtc by freeing the state pointer (which might
3900 * be NULL, e.g. at driver load time) and allocating a new empty state object.
3901 * @crtc: Pointer to drm crtc structure
3902 */
3903static void sde_crtc_reset(struct drm_crtc *crtc)
3904{
3905 struct sde_crtc *sde_crtc;
3906 struct sde_crtc_state *cstate;
3907
3908 if (!crtc) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003909 SDE_ERROR("invalid crtc\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04003910 return;
3911 }
3912
Clarence Ip7f70ce42017-03-20 06:53:46 -07003913 /* revert suspend actions, if necessary */
Veera Sundaram Sankarandb43e282017-09-19 18:32:52 -07003914 if (sde_kms_is_suspend_state(crtc->dev)) {
Clarence Ip7f70ce42017-03-20 06:53:46 -07003915 _sde_crtc_set_suspend(crtc, false);
3916
Veera Sundaram Sankarandb43e282017-09-19 18:32:52 -07003917 if (!sde_crtc_is_reset_required(crtc)) {
3918 SDE_DEBUG("avoiding reset for crtc:%d\n",
3919 crtc->base.id);
3920 return;
3921 }
3922 }
3923
Clarence Ip7a753bb2016-07-07 11:47:44 -04003924 /* remove previous state, if present */
3925 if (crtc->state) {
3926 sde_crtc_destroy_state(crtc, crtc->state);
3927 crtc->state = 0;
3928 }
3929
3930 sde_crtc = to_sde_crtc(crtc);
3931 cstate = msm_property_alloc_state(&sde_crtc->property_info);
3932 if (!cstate) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003933 SDE_ERROR("failed to allocate state\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04003934 return;
3935 }
3936
3937 /* reset value helper */
3938 msm_property_reset_state(&sde_crtc->property_info, cstate,
Clarence Ip4a2955d2017-07-04 18:04:33 -04003939 &cstate->property_state,
3940 cstate->property_values);
Clarence Ip7a753bb2016-07-07 11:47:44 -04003941
Clarence Ipcae1bb62016-07-07 12:07:13 -04003942 _sde_crtc_set_input_fence_timeout(cstate);
3943
Alan Kwong310e9b02017-08-03 02:04:07 -04003944 _sde_crtc_rp_reset(&cstate->rp, &sde_crtc->rp_lock,
3945 &sde_crtc->rp_head);
Alan Kwongcdb2f282017-03-18 13:42:06 -07003946
Clarence Ip7a753bb2016-07-07 11:47:44 -04003947 cstate->base.crtc = crtc;
3948 crtc->state = &cstate->base;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003949}
3950
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003951static void sde_crtc_handle_power_event(u32 event_type, void *arg)
3952{
3953 struct drm_crtc *crtc = arg;
3954 struct sde_crtc *sde_crtc;
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05303955 struct sde_crtc_state *cstate;
Dhaval Patel010f5172017-08-01 22:40:09 -07003956 struct drm_plane *plane;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003957 struct drm_encoder *encoder;
Dhaval Patel010f5172017-08-01 22:40:09 -07003958 struct sde_crtc_mixer *m;
Dhaval Patelc9e213b2017-11-02 12:13:12 -07003959 u32 i, misr_status, power_on;
Ping Licc868fc2017-08-11 16:56:44 -07003960 unsigned long flags;
3961 struct sde_crtc_irq_info *node = NULL;
3962 int ret = 0;
Dhaval Patelc9e213b2017-11-02 12:13:12 -07003963 struct drm_event event;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003964
3965 if (!crtc) {
3966 SDE_ERROR("invalid crtc\n");
3967 return;
3968 }
3969 sde_crtc = to_sde_crtc(crtc);
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05303970 cstate = to_sde_crtc_state(crtc->state);
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003971
3972 mutex_lock(&sde_crtc->crtc_lock);
3973
3974 SDE_EVT32(DRMID(crtc), event_type);
3975
Dhaval Patel010f5172017-08-01 22:40:09 -07003976 switch (event_type) {
3977 case SDE_POWER_EVENT_POST_ENABLE:
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003978 /* restore encoder; crtc will be programmed during commit */
3979 drm_for_each_encoder(encoder, crtc->dev) {
3980 if (encoder->crtc != crtc)
3981 continue;
3982
3983 sde_encoder_virt_restore(encoder);
3984 }
Ping Licc868fc2017-08-11 16:56:44 -07003985
3986 spin_lock_irqsave(&sde_crtc->spin_lock, flags);
3987 list_for_each_entry(node, &sde_crtc->user_event_list, list) {
3988 ret = 0;
3989 if (node->func)
3990 ret = node->func(crtc, true, &node->irq);
3991 if (ret)
3992 SDE_ERROR("%s failed to enable event %x\n",
3993 sde_crtc->name, node->event);
3994 }
3995 spin_unlock_irqrestore(&sde_crtc->spin_lock, flags);
3996
Ping Lie505f3b2017-06-19 14:19:08 -07003997 sde_cp_crtc_post_ipc(crtc);
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003998
Dhaval Patel010f5172017-08-01 22:40:09 -07003999 for (i = 0; i < sde_crtc->num_mixers; ++i) {
4000 m = &sde_crtc->mixers[i];
4001 if (!m->hw_lm || !m->hw_lm->ops.setup_misr ||
4002 !sde_crtc->misr_enable)
4003 continue;
4004
4005 m->hw_lm->ops.setup_misr(m->hw_lm, true,
4006 sde_crtc->misr_frame_count);
4007 }
4008 break;
4009 case SDE_POWER_EVENT_PRE_DISABLE:
Veera Sundaram Sankaran33db4282017-11-01 12:45:25 -07004010 drm_for_each_encoder(encoder, crtc->dev) {
4011 if (encoder->crtc != crtc)
4012 continue;
4013 /*
4014 * disable the vsync source after updating the
4015 * rsc state. rsc state update might have vsync wait
4016 * and vsync source must be disabled after it.
4017 * It will avoid generating any vsync from this point
4018 * till mode-2 entry. It is SW workaround for HW
4019 * limitation and should not be removed without
4020 * checking the updated design.
4021 */
4022 sde_encoder_control_te(encoder, false);
4023 }
4024
Dhaval Patel010f5172017-08-01 22:40:09 -07004025 for (i = 0; i < sde_crtc->num_mixers; ++i) {
4026 m = &sde_crtc->mixers[i];
4027 if (!m->hw_lm || !m->hw_lm->ops.collect_misr ||
4028 !sde_crtc->misr_enable)
4029 continue;
4030
4031 misr_status = m->hw_lm->ops.collect_misr(m->hw_lm);
4032 sde_crtc->misr_data[i] = misr_status ? misr_status :
4033 sde_crtc->misr_data[i];
4034 }
Ping Licc868fc2017-08-11 16:56:44 -07004035
4036 spin_lock_irqsave(&sde_crtc->spin_lock, flags);
4037 node = NULL;
4038 list_for_each_entry(node, &sde_crtc->user_event_list, list) {
4039 ret = 0;
4040 if (node->func)
4041 ret = node->func(crtc, false, &node->irq);
4042 if (ret)
4043 SDE_ERROR("%s failed to disable event %x\n",
4044 sde_crtc->name, node->event);
4045 }
4046 spin_unlock_irqrestore(&sde_crtc->spin_lock, flags);
4047
Dhaval Patel010f5172017-08-01 22:40:09 -07004048 sde_cp_crtc_pre_ipc(crtc);
4049 break;
4050 case SDE_POWER_EVENT_POST_DISABLE:
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07004051 /*
4052 * set revalidate flag in planes, so it will be re-programmed
4053 * in the next frame update
4054 */
4055 drm_atomic_crtc_for_each_plane(plane, crtc)
4056 sde_plane_set_revalidate(plane, true);
Alan Kwong8a9b38a2017-06-22 11:30:52 -04004057
Gopikrishnaiah Anandandb90fa12017-05-09 17:56:08 -07004058 sde_cp_crtc_suspend(crtc);
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05304059
4060 /**
4061 * destination scaler if enabled should be reconfigured
4062 * in the next frame update
4063 */
4064 if (cstate->num_ds_enabled)
4065 sde_crtc->ds_reconfig = true;
4066
Dhaval Patelc9e213b2017-11-02 12:13:12 -07004067 event.type = DRM_EVENT_SDE_POWER;
4068 event.length = sizeof(power_on);
4069 power_on = 0;
4070 msm_mode_object_event_notify(&crtc->base, crtc->dev, &event,
4071 (u8 *)&power_on);
Dhaval Patel010f5172017-08-01 22:40:09 -07004072 break;
4073 default:
4074 SDE_DEBUG("event:%d not handled\n", event_type);
4075 break;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07004076 }
4077
4078 mutex_unlock(&sde_crtc->crtc_lock);
4079}
4080
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004081static void sde_crtc_disable(struct drm_crtc *crtc)
4082{
Veera Sundaram Sankaran45bab682018-03-20 18:25:59 -07004083 struct sde_kms *sde_kms;
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04004084 struct sde_crtc *sde_crtc;
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08004085 struct sde_crtc_state *cstate;
Alan Kwong07da0982016-11-04 12:57:45 -04004086 struct drm_encoder *encoder;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07004087 struct msm_drm_private *priv;
Gopikrishnaiah Anandande2c81b2017-03-15 12:41:29 -07004088 unsigned long flags;
4089 struct sde_crtc_irq_info *node = NULL;
Ping Lic5c2e0b2017-08-02 15:17:59 -07004090 struct drm_event event;
4091 u32 power_on;
Dhaval Patelfd8f7742017-08-10 13:11:22 -07004092 int ret, i;
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04004093
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07004094 if (!crtc || !crtc->dev || !crtc->dev->dev_private || !crtc->state) {
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -04004095 SDE_ERROR("invalid crtc\n");
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04004096 return;
4097 }
Alan Kwong1124f1f2017-11-10 18:14:39 -05004098
Veera Sundaram Sankaran45bab682018-03-20 18:25:59 -07004099 sde_kms = _sde_crtc_get_kms(crtc);
4100 if (!sde_kms) {
4101 SDE_ERROR("invalid kms\n");
4102 return;
4103 }
4104
Alan Kwong1124f1f2017-11-10 18:14:39 -05004105 if (!sde_kms_power_resource_is_enabled(crtc->dev)) {
4106 SDE_ERROR("power resource is not enabled\n");
4107 return;
4108 }
4109
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04004110 sde_crtc = to_sde_crtc(crtc);
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08004111 cstate = to_sde_crtc_state(crtc->state);
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07004112 priv = crtc->dev->dev_private;
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04004113
Alan Kwong163d2612016-11-03 00:56:56 -04004114 SDE_DEBUG("crtc%d\n", crtc->base.id);
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04004115
Clarence Ipd86f6e42017-08-08 18:31:00 -04004116 if (sde_kms_is_suspend_state(crtc->dev))
Clarence Ip7f70ce42017-03-20 06:53:46 -07004117 _sde_crtc_set_suspend(crtc, true);
4118
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07004119 mutex_lock(&sde_crtc->crtc_lock);
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04004120 SDE_EVT32_VERBOSE(DRMID(crtc));
Alan Kwong628d19e2016-10-31 13:50:13 -04004121
Ping Lic5c2e0b2017-08-02 15:17:59 -07004122 /* update color processing on suspend */
4123 event.type = DRM_EVENT_CRTC_POWER;
4124 event.length = sizeof(u32);
4125 sde_cp_crtc_suspend(crtc);
4126 power_on = 0;
4127 msm_mode_object_event_notify(&crtc->base, crtc->dev, &event,
4128 (u8 *)&power_on);
4129
Sravanthi Kollukuduru7d080802017-10-05 15:49:12 +05304130 /* destination scaler if enabled should be reconfigured on resume */
4131 if (cstate->num_ds_enabled)
4132 sde_crtc->ds_reconfig = true;
4133
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07004134 /* wait for frame_event_done completion */
4135 if (_sde_crtc_wait_for_frame_done(crtc))
4136 SDE_ERROR("crtc%d wait for frame done failed;frame_pending%d\n",
4137 crtc->base.id,
4138 atomic_read(&sde_crtc->frame_pending));
4139
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04004140 SDE_EVT32(DRMID(crtc), sde_crtc->enabled, sde_crtc->suspend,
Alan Kwong8f43c012017-10-06 08:59:00 -04004141 sde_crtc->vblank_requested,
4142 crtc->state->active, crtc->state->enable);
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04004143 if (sde_crtc->enabled && !sde_crtc->suspend &&
4144 sde_crtc->vblank_requested) {
4145 ret = _sde_crtc_vblank_enable_no_lock(sde_crtc, false);
4146 if (ret)
4147 SDE_ERROR("%s vblank enable failed: %d\n",
4148 sde_crtc->name, ret);
Alan Kwong07da0982016-11-04 12:57:45 -04004149 }
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04004150 sde_crtc->enabled = false;
Alan Kwong07da0982016-11-04 12:57:45 -04004151
Alan Kwong628d19e2016-10-31 13:50:13 -04004152 if (atomic_read(&sde_crtc->frame_pending)) {
Dhaval Patel6c666622017-03-21 23:02:59 -07004153 SDE_EVT32(DRMID(crtc), atomic_read(&sde_crtc->frame_pending),
4154 SDE_EVTLOG_FUNC_CASE2);
Alan Kwong9aa061c2016-11-06 21:17:12 -05004155 sde_core_perf_crtc_release_bw(crtc);
Alan Kwong628d19e2016-10-31 13:50:13 -04004156 atomic_set(&sde_crtc->frame_pending, 0);
4157 }
4158
Ping Li6d5bf542017-06-27 11:40:28 -07004159 spin_lock_irqsave(&sde_crtc->spin_lock, flags);
4160 list_for_each_entry(node, &sde_crtc->user_event_list, list) {
4161 ret = 0;
4162 if (node->func)
4163 ret = node->func(crtc, false, &node->irq);
4164 if (ret)
4165 SDE_ERROR("%s failed to disable event %x\n",
4166 sde_crtc->name, node->event);
4167 }
4168 spin_unlock_irqrestore(&sde_crtc->spin_lock, flags);
4169
Veera Sundaram Sankaran45bab682018-03-20 18:25:59 -07004170 /* avoid clk/bw downvote if cont-splash is enabled */
4171 if (!sde_kms->splash_data.cont_splash_en)
4172 sde_core_perf_crtc_update(crtc, 0, true);
Alan Kwong9aa061c2016-11-06 21:17:12 -05004173
Alan Kwong628d19e2016-10-31 13:50:13 -04004174 drm_for_each_encoder(encoder, crtc->dev) {
4175 if (encoder->crtc != crtc)
4176 continue;
4177 sde_encoder_register_frame_event_callback(encoder, NULL, NULL);
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08004178 cstate->rsc_client = NULL;
4179 cstate->rsc_update = false;
Alan Kwong628d19e2016-10-31 13:50:13 -04004180 }
4181
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07004182 if (sde_crtc->power_event)
4183 sde_power_handle_unregister_event(&priv->phandle,
4184 sde_crtc->power_event);
4185
Dhaval Patelfd8f7742017-08-10 13:11:22 -07004186 /**
4187 * All callbacks are unregistered and frame done waits are complete
4188 * at this point. No buffers are accessed by hardware.
Alan Kwong8f43c012017-10-06 08:59:00 -04004189 * reset the fence timeline if crtc will not be enabled for this commit
Dhaval Patelfd8f7742017-08-10 13:11:22 -07004190 */
Alan Kwong8f43c012017-10-06 08:59:00 -04004191 if (!crtc->state->active || !crtc->state->enable) {
4192 sde_fence_signal(&sde_crtc->output_fence, ktime_get(), true);
4193 for (i = 0; i < cstate->num_connectors; ++i)
4194 sde_connector_commit_reset(cstate->connectors[i],
4195 ktime_get());
4196 }
Dhaval Patelfd8f7742017-08-10 13:11:22 -07004197
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04004198 memset(sde_crtc->mixers, 0, sizeof(sde_crtc->mixers));
4199 sde_crtc->num_mixers = 0;
Lloyd Atkinson94710bc2017-09-14 14:10:09 -04004200 sde_crtc->mixers_swapped = false;
Gopikrishnaiah Anandande2c81b2017-03-15 12:41:29 -07004201
Alan Kwong8411a9112017-06-06 19:29:01 -04004202 /* disable clk & bw control until clk & bw properties are set */
4203 cstate->bw_control = false;
Alan Kwong0230a102017-05-16 11:36:44 -07004204 cstate->bw_split_vote = false;
Alan Kwong8411a9112017-06-06 19:29:01 -04004205
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07004206 mutex_unlock(&sde_crtc->crtc_lock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004207}
4208
4209static void sde_crtc_enable(struct drm_crtc *crtc)
4210{
Clarence Ipcae1bb62016-07-07 12:07:13 -04004211 struct sde_crtc *sde_crtc;
Alan Kwong628d19e2016-10-31 13:50:13 -04004212 struct drm_encoder *encoder;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07004213 struct msm_drm_private *priv;
Gopikrishnaiah Anandande2c81b2017-03-15 12:41:29 -07004214 unsigned long flags;
4215 struct sde_crtc_irq_info *node = NULL;
Ping Lic5c2e0b2017-08-02 15:17:59 -07004216 struct drm_event event;
4217 u32 power_on;
Sandeep Panda0a5946e2017-12-10 12:11:32 +05304218 int ret, i;
4219 struct sde_crtc_state *cstate;
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04004220
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07004221 if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07004222 SDE_ERROR("invalid crtc\n");
Clarence Ipcae1bb62016-07-07 12:07:13 -04004223 return;
4224 }
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07004225 priv = crtc->dev->dev_private;
Sandeep Panda0a5946e2017-12-10 12:11:32 +05304226 cstate = to_sde_crtc_state(crtc->state);
Clarence Ipcae1bb62016-07-07 12:07:13 -04004227
Alan Kwong1124f1f2017-11-10 18:14:39 -05004228 if (!sde_kms_power_resource_is_enabled(crtc->dev)) {
4229 SDE_ERROR("power resource is not enabled\n");
4230 return;
4231 }
4232
Alan Kwong163d2612016-11-03 00:56:56 -04004233 SDE_DEBUG("crtc%d\n", crtc->base.id);
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04004234 SDE_EVT32_VERBOSE(DRMID(crtc));
Clarence Ipcae1bb62016-07-07 12:07:13 -04004235 sde_crtc = to_sde_crtc(crtc);
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04004236
Veera Sundaram Sankaran97dc5152017-10-10 20:24:48 -07004237 mutex_lock(&sde_crtc->crtc_lock);
4238 SDE_EVT32(DRMID(crtc), sde_crtc->enabled, sde_crtc->suspend,
4239 sde_crtc->vblank_requested);
4240
4241 /* return early if crtc is already enabled */
4242 if (sde_crtc->enabled) {
4243 if (msm_is_mode_seamless_dms(&crtc->state->adjusted_mode))
4244 SDE_DEBUG("%s extra crtc enable expected during DMS\n",
4245 sde_crtc->name);
4246 else
4247 WARN(1, "%s unexpected crtc enable\n", sde_crtc->name);
4248
4249 mutex_unlock(&sde_crtc->crtc_lock);
4250 return;
4251 }
4252
Alan Kwong628d19e2016-10-31 13:50:13 -04004253 drm_for_each_encoder(encoder, crtc->dev) {
4254 if (encoder->crtc != crtc)
4255 continue;
4256 sde_encoder_register_frame_event_callback(encoder,
4257 sde_crtc_frame_event_cb, (void *)crtc);
4258 }
4259
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04004260 if (!sde_crtc->enabled && !sde_crtc->suspend &&
4261 sde_crtc->vblank_requested) {
4262 ret = _sde_crtc_vblank_enable_no_lock(sde_crtc, true);
4263 if (ret)
4264 SDE_ERROR("%s vblank enable failed: %d\n",
4265 sde_crtc->name, ret);
Lloyd Atkinsonb2be0c42017-07-17 16:41:00 -04004266 }
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04004267 sde_crtc->enabled = true;
Ping Lic5c2e0b2017-08-02 15:17:59 -07004268
4269 /* update color processing on resume */
4270 event.type = DRM_EVENT_CRTC_POWER;
4271 event.length = sizeof(u32);
4272 sde_cp_crtc_resume(crtc);
4273 power_on = 1;
4274 msm_mode_object_event_notify(&crtc->base, crtc->dev, &event,
4275 (u8 *)&power_on);
4276
Lloyd Atkinsonb2be0c42017-07-17 16:41:00 -04004277 mutex_unlock(&sde_crtc->crtc_lock);
4278
Gopikrishnaiah Anandande2c81b2017-03-15 12:41:29 -07004279 spin_lock_irqsave(&sde_crtc->spin_lock, flags);
4280 list_for_each_entry(node, &sde_crtc->user_event_list, list) {
4281 ret = 0;
4282 if (node->func)
4283 ret = node->func(crtc, true, &node->irq);
4284 if (ret)
4285 SDE_ERROR("%s failed to enable event %x\n",
4286 sde_crtc->name, node->event);
4287 }
4288 spin_unlock_irqrestore(&sde_crtc->spin_lock, flags);
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07004289
4290 sde_crtc->power_event = sde_power_handle_register_event(
4291 &priv->phandle,
Ping Lie505f3b2017-06-19 14:19:08 -07004292 SDE_POWER_EVENT_POST_ENABLE | SDE_POWER_EVENT_POST_DISABLE |
4293 SDE_POWER_EVENT_PRE_DISABLE,
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07004294 sde_crtc_handle_power_event, crtc, sde_crtc->name);
Sandeep Panda0a5946e2017-12-10 12:11:32 +05304295
4296 /* Enable ESD thread */
4297 for (i = 0; i < cstate->num_connectors; i++)
4298 sde_connector_schedule_status_work(cstate->connectors[i], true);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004299}
4300
4301struct plane_state {
Dhaval Patelec10fad2016-08-22 14:40:48 -07004302 struct sde_plane_state *sde_pstate;
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07004303 const struct drm_plane_state *drm_pstate;
Clarence Ipc47a0692016-10-11 10:54:17 -04004304 int stage;
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004305 u32 pipe_id;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004306};
4307
Clarence Ipc47a0692016-10-11 10:54:17 -04004308static int pstate_cmp(const void *a, const void *b)
4309{
4310 struct plane_state *pa = (struct plane_state *)a;
4311 struct plane_state *pb = (struct plane_state *)b;
4312 int rc = 0;
4313 int pa_zpos, pb_zpos;
4314
4315 pa_zpos = sde_plane_get_property(pa->sde_pstate, PLANE_PROP_ZPOS);
4316 pb_zpos = sde_plane_get_property(pb->sde_pstate, PLANE_PROP_ZPOS);
4317
4318 if (pa_zpos != pb_zpos)
4319 rc = pa_zpos - pb_zpos;
4320 else
4321 rc = pa->drm_pstate->crtc_x - pb->drm_pstate->crtc_x;
4322
4323 return rc;
4324}
4325
Dhaval Patela8d6bc62017-05-10 17:40:18 -07004326/* no input validation - caller API has all the checks */
4327static int _sde_crtc_excl_dim_layer_check(struct drm_crtc_state *state,
4328 struct plane_state pstates[], int cnt)
4329{
4330 struct sde_crtc_state *cstate = to_sde_crtc_state(state);
4331 struct drm_display_mode *mode = &state->adjusted_mode;
4332 const struct drm_plane_state *pstate;
4333 struct sde_plane_state *sde_pstate;
4334 int rc = 0, i;
4335
4336 /* Check dim layer rect bounds and stage */
4337 for (i = 0; i < cstate->num_dim_layers; i++) {
4338 if ((CHECK_LAYER_BOUNDS(cstate->dim_layer[i].rect.y,
4339 cstate->dim_layer[i].rect.h, mode->vdisplay)) ||
4340 (CHECK_LAYER_BOUNDS(cstate->dim_layer[i].rect.x,
4341 cstate->dim_layer[i].rect.w, mode->hdisplay)) ||
4342 (cstate->dim_layer[i].stage >= SDE_STAGE_MAX) ||
4343 (!cstate->dim_layer[i].rect.w) ||
4344 (!cstate->dim_layer[i].rect.h)) {
4345 SDE_ERROR("invalid dim_layer:{%d,%d,%d,%d}, stage:%d\n",
4346 cstate->dim_layer[i].rect.x,
4347 cstate->dim_layer[i].rect.y,
4348 cstate->dim_layer[i].rect.w,
4349 cstate->dim_layer[i].rect.h,
4350 cstate->dim_layer[i].stage);
4351 SDE_ERROR("display: %dx%d\n", mode->hdisplay,
4352 mode->vdisplay);
4353 rc = -E2BIG;
4354 goto end;
4355 }
4356 }
4357
Veera Sundaram Sankaran9887b9c2018-02-05 12:04:21 -08004358 /* log all src and excl_rect, useful for debugging */
Dhaval Patela8d6bc62017-05-10 17:40:18 -07004359 for (i = 0; i < cnt; i++) {
4360 pstate = pstates[i].drm_pstate;
4361 sde_pstate = to_sde_plane_state(pstate);
Veera Sundaram Sankaran9887b9c2018-02-05 12:04:21 -08004362 SDE_DEBUG("p %d z %d src{%d,%d,%d,%d} excl_rect{%d,%d,%d,%d}\n",
4363 pstate->plane->base.id, pstates[i].stage,
4364 pstate->crtc_x, pstate->crtc_y,
4365 pstate->crtc_w, pstate->crtc_h,
4366 sde_pstate->excl_rect.x, sde_pstate->excl_rect.y,
4367 sde_pstate->excl_rect.w, sde_pstate->excl_rect.h);
Dhaval Patela8d6bc62017-05-10 17:40:18 -07004368 }
4369
4370end:
4371 return rc;
4372}
4373
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004374static int _sde_crtc_check_secure_state(struct drm_crtc *crtc,
Veera Sundaram Sankaranf9fca5f2017-10-30 14:30:29 -07004375 struct drm_crtc_state *state, struct plane_state pstates[],
4376 int cnt)
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004377{
Veera Sundaram Sankaran7b121cb2018-03-02 08:43:04 -08004378 struct drm_plane *plane;
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004379 struct drm_encoder *encoder;
4380 struct sde_crtc_state *cstate;
Raviteja Tamatam90b19fd2018-02-08 03:57:10 +05304381 struct sde_crtc *sde_crtc;
Veera Sundaram Sankaran8fd692a2018-02-26 17:49:14 -08004382 struct sde_kms *sde_kms;
4383 struct sde_kms_smmu_state_data *smmu_state;
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004384 uint32_t secure;
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07004385 uint32_t fb_ns = 0, fb_sec = 0, fb_sec_dir = 0;
Veera Sundaram Sankaranf9fca5f2017-10-30 14:30:29 -07004386 int encoder_cnt = 0, i;
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004387 int rc;
Raviteja Tamatam90b19fd2018-02-08 03:57:10 +05304388 bool is_video_mode = false;
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004389
4390 if (!crtc || !state) {
4391 SDE_ERROR("invalid arguments\n");
4392 return -EINVAL;
4393 }
4394
Veera Sundaram Sankaran8fd692a2018-02-26 17:49:14 -08004395 sde_kms = _sde_crtc_get_kms(crtc);
4396 if (!sde_kms) {
4397 SDE_ERROR("invalid kms\n");
4398 return -EINVAL;
4399 }
4400
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004401 cstate = to_sde_crtc_state(state);
4402
Veera Sundaram Sankaranf9fca5f2017-10-30 14:30:29 -07004403 secure = sde_crtc_get_property(cstate, CRTC_PROP_SECURITY_LEVEL);
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004404
Veera Sundaram Sankaranf9fca5f2017-10-30 14:30:29 -07004405 rc = _sde_crtc_find_plane_fb_modes(state, &fb_ns, &fb_sec, &fb_sec_dir);
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004406 if (rc)
4407 return rc;
4408
Veera Sundaram Sankaranf9fca5f2017-10-30 14:30:29 -07004409 if (secure == SDE_DRM_SEC_ONLY) {
4410 /*
4411 * validate planes - only fb_sec_dir is allowed during sec_crtc
4412 * - fb_sec_dir is for secure camera preview and
4413 * secure display use case
4414 * - fb_sec is for secure video playback
4415 * - fb_ns is for normal non secure use cases
4416 */
4417 if (fb_ns || fb_sec) {
4418 SDE_ERROR(
4419 "crtc%d: invalid fb_modes Sec:%d, NS:%d, Sec_Dir:%d\n",
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07004420 crtc->base.id, fb_sec, fb_ns, fb_sec_dir);
Veera Sundaram Sankaranf9fca5f2017-10-30 14:30:29 -07004421 return -EINVAL;
4422 }
4423
Veera Sundaram Sankaran7b121cb2018-03-02 08:43:04 -08004424 /*
4425 * - only one blending stage is allowed in sec_crtc
4426 * - validate if pipe is allowed for sec-ui updates
4427 */
Veera Sundaram Sankaranf9fca5f2017-10-30 14:30:29 -07004428 for (i = 1; i < cnt; i++) {
Veera Sundaram Sankaran7b121cb2018-03-02 08:43:04 -08004429 if (!pstates[i].drm_pstate
4430 || !pstates[i].drm_pstate->plane) {
4431 SDE_ERROR("crtc%d: invalid pstate at i:%d\n",
4432 crtc->base.id, i);
4433 return -EINVAL;
4434 }
4435 plane = pstates[i].drm_pstate->plane;
4436
4437 if (!sde_plane_is_sec_ui_allowed(plane)) {
4438 SDE_ERROR("crtc%d: sec-ui not allowed in p%d\n",
4439 crtc->base.id, plane->base.id);
4440 return -EINVAL;
4441 }
4442
Veera Sundaram Sankaranf9fca5f2017-10-30 14:30:29 -07004443 if (pstates[i].stage != pstates[i-1].stage) {
4444 SDE_ERROR(
4445 "crtc%d: invalid blend stages %d:%d, %d:%d\n",
4446 crtc->base.id, i, pstates[i].stage,
4447 i-1, pstates[i-1].stage);
4448 return -EINVAL;
4449 }
4450 }
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004451 }
4452
Veera Sundaram Sankaranf9fca5f2017-10-30 14:30:29 -07004453 /*
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004454 * secure_crtc is not allowed in a shared toppolgy
4455 * across different encoders.
4456 */
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07004457 if (fb_sec_dir) {
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004458 drm_for_each_encoder(encoder, crtc->dev)
4459 if (encoder->crtc == crtc)
4460 encoder_cnt++;
4461
Veera Sundaram Sankaranf9fca5f2017-10-30 14:30:29 -07004462 if (encoder_cnt > MAX_ALLOWED_ENCODER_CNT_PER_SECURE_CRTC) {
4463 SDE_ERROR("crtc%d, invalid virtual encoder crtc%d\n",
4464 crtc->base.id, encoder_cnt);
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004465 return -EINVAL;
4466
4467 }
4468 }
Raviteja Tamatam90b19fd2018-02-08 03:57:10 +05304469
4470 drm_for_each_encoder(encoder, crtc->dev) {
4471 if (encoder->crtc != crtc)
4472 continue;
4473
4474 is_video_mode |= sde_encoder_check_mode(encoder,
4475 MSM_DISPLAY_CAP_VID_MODE);
4476 }
4477
4478 sde_crtc = to_sde_crtc(crtc);
Veera Sundaram Sankaran8fd692a2018-02-26 17:49:14 -08004479 smmu_state = &sde_kms->smmu_state;
Raviteja Tamatam90b19fd2018-02-08 03:57:10 +05304480 /*
4481 * In video mode check for null commit before transition
4482 * from secure to non secure and vice versa
4483 */
4484 if (is_video_mode && smmu_state &&
4485 state->plane_mask && crtc->state->plane_mask &&
4486 ((fb_sec_dir && ((smmu_state->state == ATTACHED) &&
4487 (secure == SDE_DRM_SEC_ONLY))) ||
4488 (fb_ns && ((smmu_state->state == DETACHED) ||
4489 (smmu_state->state == DETACH_ALL_REQ))))) {
4490
4491 SDE_EVT32(DRMID(&sde_crtc->base), fb_ns, fb_sec_dir,
4492 smmu_state->state, crtc->state->plane_mask,
4493 crtc->state->plane_mask);
4494 SDE_DEBUG("crtc %d, Invalid secure transition %x\n",
4495 crtc->base.id, smmu_state->state);
4496 return -EINVAL;
4497
4498 }
4499
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004500 SDE_DEBUG("crtc:%d Secure validation successful\n", crtc->base.id);
Veera Sundaram Sankaranf9fca5f2017-10-30 14:30:29 -07004501
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004502 return 0;
4503}
4504
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004505static int sde_crtc_atomic_check(struct drm_crtc *crtc,
4506 struct drm_crtc_state *state)
4507{
Clarence Ipcae1bb62016-07-07 12:07:13 -04004508 struct sde_crtc *sde_crtc;
Raviteja Tamatamf61d7c02017-11-01 19:01:32 +05304509 struct plane_state *pstates = NULL;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08004510 struct sde_crtc_state *cstate;
Dhaval Patelec10fad2016-08-22 14:40:48 -07004511
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07004512 const struct drm_plane_state *pstate;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004513 struct drm_plane *plane;
Dhaval Patelec10fad2016-08-22 14:40:48 -07004514 struct drm_display_mode *mode;
4515
4516 int cnt = 0, rc = 0, mixer_width, i, z_pos;
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004517
Raviteja Tamatamf61d7c02017-11-01 19:01:32 +05304518 struct sde_multirect_plane_states *multirect_plane = NULL;
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004519 int multirect_count = 0;
4520 const struct drm_plane_state *pipe_staged[SSPP_MAX];
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004521 int left_zpos_cnt = 0, right_zpos_cnt = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004522
Clarence Ipcae1bb62016-07-07 12:07:13 -04004523 if (!crtc) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07004524 SDE_ERROR("invalid crtc\n");
Clarence Ipcae1bb62016-07-07 12:07:13 -04004525 return -EINVAL;
4526 }
4527
Alan Kwongcdb2f282017-03-18 13:42:06 -07004528 sde_crtc = to_sde_crtc(crtc);
4529 cstate = to_sde_crtc_state(state);
4530
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04004531 if (!state->enable || !state->active) {
4532 SDE_DEBUG("crtc%d -> enable %d, active %d, skip atomic_check\n",
4533 crtc->base.id, state->enable, state->active);
Alan Kwongcdb2f282017-03-18 13:42:06 -07004534 goto end;
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04004535 }
4536
Raviteja Tamatamf61d7c02017-11-01 19:01:32 +05304537 pstates = kzalloc(SDE_PSTATES_MAX *
4538 sizeof(struct plane_state), GFP_KERNEL);
4539
4540 multirect_plane = kzalloc(SDE_MULTIRECT_PLANE_MAX *
4541 sizeof(struct sde_multirect_plane_states), GFP_KERNEL);
4542
4543 if (!pstates || !multirect_plane) {
4544 rc = -ENOMEM;
4545 goto end;
4546 }
4547
Dhaval Patelec10fad2016-08-22 14:40:48 -07004548 mode = &state->adjusted_mode;
4549 SDE_DEBUG("%s: check", sde_crtc->name);
Clarence Ipcae1bb62016-07-07 12:07:13 -04004550
Clarence Ip90b282d2017-05-04 10:00:32 -07004551 /* force a full mode set if active state changed */
4552 if (state->active_changed)
4553 state->mode_changed = true;
4554
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004555 memset(pipe_staged, 0, sizeof(pipe_staged));
4556
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05304557 rc = _sde_crtc_check_dest_scaler_data(crtc, state);
4558 if (rc) {
4559 SDE_ERROR("crtc%d failed dest scaler check %d\n",
4560 crtc->base.id, rc);
4561 goto end;
4562 }
4563
4564 mixer_width = sde_crtc_get_mixer_width(sde_crtc, cstate, mode);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004565
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05004566 _sde_crtc_setup_is_ppsplit(state);
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05004567 _sde_crtc_setup_lm_bounds(crtc, state);
4568
Clarence Ip4f339092018-01-05 13:29:04 -05004569 /* record current/previous sbuf clock rate for later */
4570 cstate->sbuf_clk_rate[0] = cstate->sbuf_clk_rate[1];
4571 cstate->sbuf_clk_rate[1] = sde_crtc_get_property(
4572 cstate, CRTC_PROP_ROT_CLK);
4573 cstate->sbuf_clk_shifted = true;
4574
Dhaval Patelec10fad2016-08-22 14:40:48 -07004575 /* get plane state for all drm planes associated with crtc state */
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07004576 drm_atomic_crtc_state_for_each_plane_state(plane, pstate, state) {
Clarence Ipc47a0692016-10-11 10:54:17 -04004577 if (IS_ERR_OR_NULL(pstate)) {
4578 rc = PTR_ERR(pstate);
4579 SDE_ERROR("%s: failed to get plane%d state, %d\n",
4580 sde_crtc->name, plane->base.id, rc);
Alan Kwong85767282016-10-03 18:03:37 -04004581 goto end;
4582 }
Clarence Ipa8a93eb2017-11-24 10:38:24 -05004583
4584 /* identify attached planes that are not in the delta state */
4585 if (!drm_atomic_get_existing_plane_state(state->state, plane)) {
Alan Kwonga15ae762017-12-16 18:03:16 -05004586 rc = sde_plane_confirm_hw_rsvps(plane, pstate, state);
Clarence Ipa8a93eb2017-11-24 10:38:24 -05004587 if (rc) {
4588 SDE_ERROR("crtc%d confirmation hw failed %d\n",
4589 crtc->base.id, rc);
4590 goto end;
4591 }
4592 }
4593
Raviteja Tamatamf61d7c02017-11-01 19:01:32 +05304594 if (cnt >= SDE_PSTATES_MAX)
Clarence Ipc47a0692016-10-11 10:54:17 -04004595 continue;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004596
Dhaval Patelec10fad2016-08-22 14:40:48 -07004597 pstates[cnt].sde_pstate = to_sde_plane_state(pstate);
4598 pstates[cnt].drm_pstate = pstate;
Clarence Ipc47a0692016-10-11 10:54:17 -04004599 pstates[cnt].stage = sde_plane_get_property(
4600 pstates[cnt].sde_pstate, PLANE_PROP_ZPOS);
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004601 pstates[cnt].pipe_id = sde_plane_pipe(plane);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08004602
4603 /* check dim layer stage with every plane */
4604 for (i = 0; i < cstate->num_dim_layers; i++) {
Veera Sundaram Sankaranb9ed6bd2017-07-11 19:18:03 -07004605 if (cstate->dim_layer[i].stage
4606 == (pstates[cnt].stage + SDE_STAGE_0)) {
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07004607 SDE_ERROR(
4608 "plane:%d/dim_layer:%i-same stage:%d\n",
4609 plane->base.id, i,
4610 cstate->dim_layer[i].stage);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08004611 rc = -EINVAL;
4612 goto end;
4613 }
4614 }
4615
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004616 if (pipe_staged[pstates[cnt].pipe_id]) {
4617 multirect_plane[multirect_count].r0 =
4618 pipe_staged[pstates[cnt].pipe_id];
4619 multirect_plane[multirect_count].r1 = pstate;
4620 multirect_count++;
4621
4622 pipe_staged[pstates[cnt].pipe_id] = NULL;
4623 } else {
4624 pipe_staged[pstates[cnt].pipe_id] = pstate;
4625 }
4626
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004627 cnt++;
Dhaval Patelec10fad2016-08-22 14:40:48 -07004628
4629 if (CHECK_LAYER_BOUNDS(pstate->crtc_y, pstate->crtc_h,
4630 mode->vdisplay) ||
4631 CHECK_LAYER_BOUNDS(pstate->crtc_x, pstate->crtc_w,
4632 mode->hdisplay)) {
4633 SDE_ERROR("invalid vertical/horizontal destination\n");
4634 SDE_ERROR("y:%d h:%d vdisp:%d x:%d w:%d hdisp:%d\n",
4635 pstate->crtc_y, pstate->crtc_h, mode->vdisplay,
4636 pstate->crtc_x, pstate->crtc_w, mode->hdisplay);
4637 rc = -E2BIG;
4638 goto end;
4639 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004640 }
4641
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004642 for (i = 1; i < SSPP_MAX; i++) {
Jeykumar Sankarane964dc72017-05-10 19:26:43 -07004643 if (pipe_staged[i]) {
4644 sde_plane_clear_multirect(pipe_staged[i]);
4645
4646 if (is_sde_plane_virtual(pipe_staged[i]->plane)) {
Veera Sundaram Sankaran372596d2017-06-21 17:57:25 -07004647 SDE_ERROR(
4648 "r1 only virt plane:%d not supported\n",
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004649 pipe_staged[i]->plane->base.id);
Veera Sundaram Sankaran372596d2017-06-21 17:57:25 -07004650 rc = -EINVAL;
Jeykumar Sankarane964dc72017-05-10 19:26:43 -07004651 goto end;
4652 }
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004653 }
4654 }
4655
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004656 /* assign mixer stages based on sorted zpos property */
4657 sort(pstates, cnt, sizeof(pstates[0]), pstate_cmp, NULL);
4658
Veera Sundaram Sankaranf9fca5f2017-10-30 14:30:29 -07004659 rc = _sde_crtc_check_secure_state(crtc, state, pstates, cnt);
4660 if (rc)
4661 goto end;
4662
Dhaval Patela8d6bc62017-05-10 17:40:18 -07004663 rc = _sde_crtc_excl_dim_layer_check(state, pstates, cnt);
4664 if (rc)
4665 goto end;
4666
Clarence Ipc47a0692016-10-11 10:54:17 -04004667 if (!sde_is_custom_client()) {
4668 int stage_old = pstates[0].stage;
Dhaval Patelec10fad2016-08-22 14:40:48 -07004669
Clarence Ipc47a0692016-10-11 10:54:17 -04004670 z_pos = 0;
4671 for (i = 0; i < cnt; i++) {
4672 if (stage_old != pstates[i].stage)
4673 ++z_pos;
4674 stage_old = pstates[i].stage;
4675 pstates[i].stage = z_pos;
4676 }
4677 }
4678
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004679 z_pos = -1;
Clarence Ipc47a0692016-10-11 10:54:17 -04004680 for (i = 0; i < cnt; i++) {
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004681 /* reset counts at every new blend stage */
4682 if (pstates[i].stage != z_pos) {
4683 left_zpos_cnt = 0;
4684 right_zpos_cnt = 0;
4685 z_pos = pstates[i].stage;
4686 }
Clarence Ipc47a0692016-10-11 10:54:17 -04004687
4688 /* verify z_pos setting before using it */
Clarence Ip649989a2016-10-21 14:28:34 -04004689 if (z_pos >= SDE_STAGE_MAX - SDE_STAGE_0) {
Clarence Ipc47a0692016-10-11 10:54:17 -04004690 SDE_ERROR("> %d plane stages assigned\n",
4691 SDE_STAGE_MAX - SDE_STAGE_0);
4692 rc = -EINVAL;
4693 goto end;
4694 } else if (pstates[i].drm_pstate->crtc_x < mixer_width) {
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004695 if (left_zpos_cnt == 2) {
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004696 SDE_ERROR("> 2 planes @ stage %d on left\n",
Dhaval Patelec10fad2016-08-22 14:40:48 -07004697 z_pos);
4698 rc = -EINVAL;
4699 goto end;
4700 }
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004701 left_zpos_cnt++;
4702
Dhaval Patelec10fad2016-08-22 14:40:48 -07004703 } else {
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004704 if (right_zpos_cnt == 2) {
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004705 SDE_ERROR("> 2 planes @ stage %d on right\n",
Dhaval Patelec10fad2016-08-22 14:40:48 -07004706 z_pos);
4707 rc = -EINVAL;
4708 goto end;
4709 }
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004710 right_zpos_cnt++;
Dhaval Patelec10fad2016-08-22 14:40:48 -07004711 }
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004712
Clarence Ipc47a0692016-10-11 10:54:17 -04004713 pstates[i].sde_pstate->stage = z_pos + SDE_STAGE_0;
Dhaval Patelec10fad2016-08-22 14:40:48 -07004714 SDE_DEBUG("%s: zpos %d", sde_crtc->name, z_pos);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004715 }
4716
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004717 for (i = 0; i < multirect_count; i++) {
4718 if (sde_plane_validate_multirect_v2(&multirect_plane[i])) {
4719 SDE_ERROR(
4720 "multirect validation failed for planes (%d - %d)\n",
4721 multirect_plane[i].r0->plane->base.id,
4722 multirect_plane[i].r1->plane->base.id);
4723 rc = -EINVAL;
Alan Kwong9aa061c2016-11-06 21:17:12 -05004724 goto end;
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004725 }
4726 }
4727
Alan Kwong9aa061c2016-11-06 21:17:12 -05004728 rc = sde_core_perf_crtc_check(crtc, state);
4729 if (rc) {
4730 SDE_ERROR("crtc%d failed performance check %d\n",
4731 crtc->base.id, rc);
4732 goto end;
4733 }
4734
Jeykumar Sankaranaaaa0712017-06-12 17:59:16 -07004735 /* validate source split:
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004736 * use pstates sorted by stage to check planes on same stage
4737 * we assume that all pipes are in source split so its valid to compare
4738 * without taking into account left/right mixer placement
4739 */
4740 for (i = 1; i < cnt; i++) {
4741 struct plane_state *prv_pstate, *cur_pstate;
Jeykumar Sankaranaaaa0712017-06-12 17:59:16 -07004742 struct sde_rect left_rect, right_rect;
4743 int32_t left_pid, right_pid;
4744 int32_t stage;
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004745
4746 prv_pstate = &pstates[i - 1];
4747 cur_pstate = &pstates[i];
4748 if (prv_pstate->stage != cur_pstate->stage)
4749 continue;
4750
Jeykumar Sankaranaaaa0712017-06-12 17:59:16 -07004751 stage = cur_pstate->stage;
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004752
Jeykumar Sankaranaaaa0712017-06-12 17:59:16 -07004753 left_pid = prv_pstate->sde_pstate->base.plane->base.id;
4754 POPULATE_RECT(&left_rect, prv_pstate->drm_pstate->crtc_x,
4755 prv_pstate->drm_pstate->crtc_y,
4756 prv_pstate->drm_pstate->crtc_w,
4757 prv_pstate->drm_pstate->crtc_h, false);
4758
4759 right_pid = cur_pstate->sde_pstate->base.plane->base.id;
4760 POPULATE_RECT(&right_rect, cur_pstate->drm_pstate->crtc_x,
4761 cur_pstate->drm_pstate->crtc_y,
4762 cur_pstate->drm_pstate->crtc_w,
4763 cur_pstate->drm_pstate->crtc_h, false);
4764
4765 if (right_rect.x < left_rect.x) {
4766 swap(left_pid, right_pid);
4767 swap(left_rect, right_rect);
4768 }
4769
4770 /**
4771 * - planes are enumerated in pipe-priority order such that
4772 * planes with lower drm_id must be left-most in a shared
4773 * blend-stage when using source split.
4774 * - planes in source split must be contiguous in width
4775 * - planes in source split must have same dest yoff and height
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004776 */
Jeykumar Sankaranaaaa0712017-06-12 17:59:16 -07004777 if (right_pid < left_pid) {
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004778 SDE_ERROR(
Jeykumar Sankaranaaaa0712017-06-12 17:59:16 -07004779 "invalid src split cfg. priority mismatch. stage: %d left: %d right: %d\n",
4780 stage, left_pid, right_pid);
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004781 rc = -EINVAL;
4782 goto end;
Jeykumar Sankaranaaaa0712017-06-12 17:59:16 -07004783 } else if (right_rect.x != (left_rect.x + left_rect.w)) {
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004784 SDE_ERROR(
Jeykumar Sankaranaaaa0712017-06-12 17:59:16 -07004785 "non-contiguous coordinates for src split. stage: %d left: %d - %d right: %d - %d\n",
4786 stage, left_rect.x, left_rect.w,
4787 right_rect.x, right_rect.w);
4788 rc = -EINVAL;
4789 goto end;
4790 } else if ((left_rect.y != right_rect.y) ||
4791 (left_rect.h != right_rect.h)) {
4792 SDE_ERROR(
4793 "source split at stage: %d. invalid yoff/height: l_y: %d r_y: %d l_h: %d r_h: %d\n",
4794 stage, left_rect.y, right_rect.y,
4795 left_rect.h, right_rect.h);
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004796 rc = -EINVAL;
4797 goto end;
4798 }
4799 }
4800
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04004801 rc = _sde_crtc_check_rois(crtc, state);
4802 if (rc) {
4803 SDE_ERROR("crtc%d failed roi check %d\n", crtc->base.id, rc);
4804 goto end;
4805 }
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004806
Dhaval Patelec10fad2016-08-22 14:40:48 -07004807end:
Raviteja Tamatamf61d7c02017-11-01 19:01:32 +05304808 kfree(pstates);
4809 kfree(multirect_plane);
Alan Kwongcdb2f282017-03-18 13:42:06 -07004810 _sde_crtc_rp_free_unused(&cstate->rp);
Dhaval Patelec10fad2016-08-22 14:40:48 -07004811 return rc;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004812}
4813
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -04004814int sde_crtc_vblank(struct drm_crtc *crtc, bool en)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004815{
Clarence Ip7f70ce42017-03-20 06:53:46 -07004816 struct sde_crtc *sde_crtc;
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04004817 int ret;
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -04004818
Clarence Ip7f70ce42017-03-20 06:53:46 -07004819 if (!crtc) {
4820 SDE_ERROR("invalid crtc\n");
4821 return -EINVAL;
4822 }
4823 sde_crtc = to_sde_crtc(crtc);
4824
4825 mutex_lock(&sde_crtc->crtc_lock);
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04004826 SDE_EVT32(DRMID(&sde_crtc->base), en, sde_crtc->enabled,
4827 sde_crtc->suspend, sde_crtc->vblank_requested);
4828 if (sde_crtc->enabled && !sde_crtc->suspend) {
4829 ret = _sde_crtc_vblank_enable_no_lock(sde_crtc, en);
4830 if (ret)
4831 SDE_ERROR("%s vblank enable failed: %d\n",
4832 sde_crtc->name, ret);
4833 }
4834 sde_crtc->vblank_requested = en;
Clarence Ip7f70ce42017-03-20 06:53:46 -07004835 mutex_unlock(&sde_crtc->crtc_lock);
Clarence Ip9728a1d2017-04-18 22:22:13 -04004836
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04004837 return 0;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004838}
4839
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04004840void sde_crtc_cancel_pending_flip(struct drm_crtc *crtc, struct drm_file *file)
4841{
4842 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
4843
Alan Kwong163d2612016-11-03 00:56:56 -04004844 SDE_DEBUG("%s: cancel: %p\n", sde_crtc->name, file);
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -04004845 _sde_crtc_complete_flip(crtc, file);
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04004846}
4847
Lloyd Atkinsone08229c2017-10-02 17:53:30 -04004848int sde_crtc_helper_reset_custom_properties(struct drm_crtc *crtc,
4849 struct drm_crtc_state *crtc_state)
4850{
4851 struct sde_crtc *sde_crtc;
4852 struct sde_crtc_state *cstate;
4853 struct drm_property *drm_prop;
4854 enum msm_mdp_crtc_property prop_idx;
4855
4856 if (!crtc || !crtc_state) {
4857 SDE_ERROR("invalid params\n");
4858 return -EINVAL;
4859 }
4860
4861 sde_crtc = to_sde_crtc(crtc);
4862 cstate = to_sde_crtc_state(crtc_state);
4863
4864 for (prop_idx = 0; prop_idx < CRTC_PROP_COUNT; prop_idx++) {
4865 uint64_t val = cstate->property_values[prop_idx].value;
4866 uint64_t def;
4867 int ret;
4868
4869 drm_prop = msm_property_index_to_drm_property(
4870 &sde_crtc->property_info, prop_idx);
4871 if (!drm_prop) {
4872 /* not all props will be installed, based on caps */
4873 SDE_DEBUG("%s: invalid property index %d\n",
4874 sde_crtc->name, prop_idx);
4875 continue;
4876 }
4877
4878 def = msm_property_get_default(&sde_crtc->property_info,
4879 prop_idx);
4880 if (val == def)
4881 continue;
4882
4883 SDE_DEBUG("%s: set prop %s idx %d from %llu to %llu\n",
4884 sde_crtc->name, drm_prop->name, prop_idx, val,
4885 def);
4886
4887 ret = drm_atomic_crtc_set_property(crtc, crtc_state, drm_prop,
4888 def);
4889 if (ret) {
4890 SDE_ERROR("%s: set property failed, idx %d ret %d\n",
4891 sde_crtc->name, prop_idx, ret);
4892 continue;
4893 }
4894 }
4895
4896 return 0;
4897}
4898
Clarence Ip7a753bb2016-07-07 11:47:44 -04004899/**
4900 * sde_crtc_install_properties - install all drm properties for crtc
4901 * @crtc: Pointer to drm crtc structure
4902 */
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004903static void sde_crtc_install_properties(struct drm_crtc *crtc,
4904 struct sde_mdss_cfg *catalog)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004905{
Clarence Ip7a753bb2016-07-07 11:47:44 -04004906 struct sde_crtc *sde_crtc;
4907 struct drm_device *dev;
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004908 struct sde_kms_info *info;
Alan Kwong9aa061c2016-11-06 21:17:12 -05004909 struct sde_kms *sde_kms;
Abhijit Kulkarni50d69442017-04-11 19:50:47 -07004910 static const struct drm_prop_enum_list e_secure_level[] = {
4911 {SDE_DRM_SEC_NON_SEC, "sec_and_non_sec"},
4912 {SDE_DRM_SEC_ONLY, "sec_only"},
4913 };
Clarence Ip7a753bb2016-07-07 11:47:44 -04004914
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -04004915 SDE_DEBUG("\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04004916
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004917 if (!crtc || !catalog) {
4918 SDE_ERROR("invalid crtc or catalog\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04004919 return;
4920 }
4921
4922 sde_crtc = to_sde_crtc(crtc);
4923 dev = crtc->dev;
Alan Kwong9aa061c2016-11-06 21:17:12 -05004924 sde_kms = _sde_crtc_get_kms(crtc);
Clarence Ip7a753bb2016-07-07 11:47:44 -04004925
Narendra Muppallaec11a0a2017-06-15 15:35:17 -07004926 if (!sde_kms) {
4927 SDE_ERROR("invalid argument\n");
4928 return;
4929 }
4930
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004931 info = kzalloc(sizeof(struct sde_kms_info), GFP_KERNEL);
4932 if (!info) {
4933 SDE_ERROR("failed to allocate info memory\n");
4934 return;
4935 }
4936
Clarence Ip7a753bb2016-07-07 11:47:44 -04004937 /* range properties */
4938 msm_property_install_range(&sde_crtc->property_info,
Dhaval Patel4e574842016-08-23 15:11:37 -07004939 "input_fence_timeout", 0x0, 0, SDE_CRTC_MAX_INPUT_FENCE_TIMEOUT,
4940 SDE_CRTC_INPUT_FENCE_TIMEOUT, CRTC_PROP_INPUT_FENCE_TIMEOUT);
4941
Dhaval Patelb24b2d62017-11-03 18:10:26 -07004942 msm_property_install_volatile_range(&sde_crtc->property_info,
4943 "output_fence", 0x0, 0, ~0, 0, CRTC_PROP_OUTPUT_FENCE);
Clarence Ip1d9728b2016-09-01 11:10:54 -04004944
4945 msm_property_install_range(&sde_crtc->property_info,
4946 "output_fence_offset", 0x0, 0, 1, 0,
4947 CRTC_PROP_OUTPUT_FENCE_OFFSET);
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004948
Alan Kwong9aa061c2016-11-06 21:17:12 -05004949 msm_property_install_range(&sde_crtc->property_info,
4950 "core_clk", 0x0, 0, U64_MAX,
4951 sde_kms->perf.max_core_clk_rate,
4952 CRTC_PROP_CORE_CLK);
4953 msm_property_install_range(&sde_crtc->property_info,
4954 "core_ab", 0x0, 0, U64_MAX,
Alan Kwongff30f4a2017-05-23 12:02:00 -07004955 catalog->perf.max_bw_high * 1000ULL,
Alan Kwong9aa061c2016-11-06 21:17:12 -05004956 CRTC_PROP_CORE_AB);
4957 msm_property_install_range(&sde_crtc->property_info,
4958 "core_ib", 0x0, 0, U64_MAX,
Alan Kwongff30f4a2017-05-23 12:02:00 -07004959 catalog->perf.max_bw_high * 1000ULL,
Alan Kwong9aa061c2016-11-06 21:17:12 -05004960 CRTC_PROP_CORE_IB);
Alan Kwong4aacd532017-02-04 18:51:33 -08004961 msm_property_install_range(&sde_crtc->property_info,
Alan Kwong0230a102017-05-16 11:36:44 -07004962 "llcc_ab", 0x0, 0, U64_MAX,
Alan Kwongff30f4a2017-05-23 12:02:00 -07004963 catalog->perf.max_bw_high * 1000ULL,
Alan Kwong0230a102017-05-16 11:36:44 -07004964 CRTC_PROP_LLCC_AB);
Alan Kwong8c176bf2017-02-09 19:34:32 -08004965 msm_property_install_range(&sde_crtc->property_info,
Alan Kwong0230a102017-05-16 11:36:44 -07004966 "llcc_ib", 0x0, 0, U64_MAX,
Alan Kwongff30f4a2017-05-23 12:02:00 -07004967 catalog->perf.max_bw_high * 1000ULL,
Alan Kwong0230a102017-05-16 11:36:44 -07004968 CRTC_PROP_LLCC_IB);
4969 msm_property_install_range(&sde_crtc->property_info,
4970 "dram_ab", 0x0, 0, U64_MAX,
4971 catalog->perf.max_bw_high * 1000ULL,
4972 CRTC_PROP_DRAM_AB);
4973 msm_property_install_range(&sde_crtc->property_info,
4974 "dram_ib", 0x0, 0, U64_MAX,
4975 catalog->perf.max_bw_high * 1000ULL,
4976 CRTC_PROP_DRAM_IB);
Alan Kwong8c176bf2017-02-09 19:34:32 -08004977 msm_property_install_range(&sde_crtc->property_info,
Alan Kwong4aacd532017-02-04 18:51:33 -08004978 "rot_prefill_bw", 0, 0, U64_MAX,
4979 catalog->perf.max_bw_high * 1000ULL,
4980 CRTC_PROP_ROT_PREFILL_BW);
Alan Kwong8c176bf2017-02-09 19:34:32 -08004981 msm_property_install_range(&sde_crtc->property_info,
4982 "rot_clk", 0, 0, U64_MAX,
4983 sde_kms->perf.max_core_clk_rate,
4984 CRTC_PROP_ROT_CLK);
Alan Kwong9aa061c2016-11-06 21:17:12 -05004985
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05304986 msm_property_install_range(&sde_crtc->property_info,
Dhaval Patelc9e213b2017-11-02 12:13:12 -07004987 "idle_time", 0, 0, U64_MAX, 0,
Dhaval Patele17e0ee2017-08-23 18:01:42 -07004988 CRTC_PROP_IDLE_TIMEOUT);
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05304989
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004990 msm_property_install_blob(&sde_crtc->property_info, "capabilities",
4991 DRM_MODE_PROP_IMMUTABLE, CRTC_PROP_INFO);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08004992
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04004993 msm_property_install_volatile_range(&sde_crtc->property_info,
4994 "sde_drm_roi_v1", 0x0, 0, ~0, 0, CRTC_PROP_ROI_V1);
4995
Abhijit Kulkarni50d69442017-04-11 19:50:47 -07004996 msm_property_install_enum(&sde_crtc->property_info, "security_level",
4997 0x0, 0, e_secure_level,
4998 ARRAY_SIZE(e_secure_level),
4999 CRTC_PROP_SECURITY_LEVEL);
5000
Dhaval Patele4a5dda2016-10-13 19:29:30 -07005001 sde_kms_info_reset(info);
5002
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07005003 if (catalog->has_dim_layer) {
5004 msm_property_install_volatile_range(&sde_crtc->property_info,
5005 "dim_layer_v1", 0x0, 0, ~0, 0, CRTC_PROP_DIM_LAYER_V1);
5006 sde_kms_info_add_keyint(info, "dim_layer_v1_max_layers",
5007 SDE_MAX_DIM_LAYERS);
5008 }
5009
Dhaval Patele4a5dda2016-10-13 19:29:30 -07005010 sde_kms_info_add_keyint(info, "hw_version", catalog->hwversion);
5011 sde_kms_info_add_keyint(info, "max_linewidth",
5012 catalog->max_mixer_width);
5013 sde_kms_info_add_keyint(info, "max_blendstages",
5014 catalog->max_mixer_blendstages);
5015 if (catalog->qseed_type == SDE_SSPP_SCALER_QSEED2)
5016 sde_kms_info_add_keystr(info, "qseed_type", "qseed2");
5017 if (catalog->qseed_type == SDE_SSPP_SCALER_QSEED3)
5018 sde_kms_info_add_keystr(info, "qseed_type", "qseed3");
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08005019
5020 if (sde_is_custom_client()) {
5021 if (catalog->smart_dma_rev == SDE_SSPP_SMART_DMA_V1)
5022 sde_kms_info_add_keystr(info,
5023 "smart_dma_rev", "smart_dma_v1");
5024 if (catalog->smart_dma_rev == SDE_SSPP_SMART_DMA_V2)
5025 sde_kms_info_add_keystr(info,
5026 "smart_dma_rev", "smart_dma_v2");
5027 }
5028
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05305029 if (catalog->mdp[0].has_dest_scaler) {
5030 sde_kms_info_add_keyint(info, "has_dest_scaler",
5031 catalog->mdp[0].has_dest_scaler);
5032 sde_kms_info_add_keyint(info, "dest_scaler_count",
5033 catalog->ds_count);
5034
5035 if (catalog->ds[0].top) {
5036 sde_kms_info_add_keyint(info,
5037 "max_dest_scaler_input_width",
5038 catalog->ds[0].top->maxinputwidth);
5039 sde_kms_info_add_keyint(info,
5040 "max_dest_scaler_output_width",
5041 catalog->ds[0].top->maxinputwidth);
5042 sde_kms_info_add_keyint(info, "max_dest_scale_up",
5043 catalog->ds[0].top->maxupscale);
5044 }
5045
5046 if (catalog->ds[0].features & BIT(SDE_SSPP_SCALER_QSEED3)) {
5047 msm_property_install_volatile_range(
5048 &sde_crtc->property_info, "dest_scaler",
5049 0x0, 0, ~0, 0, CRTC_PROP_DEST_SCALER);
5050 msm_property_install_blob(&sde_crtc->property_info,
5051 "ds_lut_ed", 0,
5052 CRTC_PROP_DEST_SCALER_LUT_ED);
5053 msm_property_install_blob(&sde_crtc->property_info,
5054 "ds_lut_cir", 0,
5055 CRTC_PROP_DEST_SCALER_LUT_CIR);
5056 msm_property_install_blob(&sde_crtc->property_info,
5057 "ds_lut_sep", 0,
5058 CRTC_PROP_DEST_SCALER_LUT_SEP);
5059 }
5060 }
5061
Dhaval Patele4a5dda2016-10-13 19:29:30 -07005062 sde_kms_info_add_keyint(info, "has_src_split", catalog->has_src_split);
Srikanth Rajagopalan203b2782017-07-05 22:08:52 -07005063 sde_kms_info_add_keyint(info, "has_hdr", catalog->has_hdr);
Alan Kwong2f84f8a2016-12-29 13:07:47 -05005064 if (catalog->perf.max_bw_low)
5065 sde_kms_info_add_keyint(info, "max_bandwidth_low",
Alan Kwong6259a382017-04-04 06:18:02 -07005066 catalog->perf.max_bw_low * 1000LL);
Alan Kwong2f84f8a2016-12-29 13:07:47 -05005067 if (catalog->perf.max_bw_high)
5068 sde_kms_info_add_keyint(info, "max_bandwidth_high",
Alan Kwong6259a382017-04-04 06:18:02 -07005069 catalog->perf.max_bw_high * 1000LL);
Narendra Muppallaa50934b2017-08-15 19:43:37 -07005070 if (catalog->perf.min_core_ib)
5071 sde_kms_info_add_keyint(info, "min_core_ib",
5072 catalog->perf.min_core_ib * 1000LL);
5073 if (catalog->perf.min_llcc_ib)
5074 sde_kms_info_add_keyint(info, "min_llcc_ib",
5075 catalog->perf.min_llcc_ib * 1000LL);
5076 if (catalog->perf.min_dram_ib)
5077 sde_kms_info_add_keyint(info, "min_dram_ib",
5078 catalog->perf.min_dram_ib * 1000LL);
Alan Kwong2f84f8a2016-12-29 13:07:47 -05005079 if (sde_kms->perf.max_core_clk_rate)
5080 sde_kms_info_add_keyint(info, "max_mdp_clk",
5081 sde_kms->perf.max_core_clk_rate);
Alan Kwong6259a382017-04-04 06:18:02 -07005082 sde_kms_info_add_keystr(info, "core_ib_ff",
5083 catalog->perf.core_ib_ff);
5084 sde_kms_info_add_keystr(info, "core_clk_ff",
5085 catalog->perf.core_clk_ff);
5086 sde_kms_info_add_keystr(info, "comp_ratio_rt",
5087 catalog->perf.comp_ratio_rt);
5088 sde_kms_info_add_keystr(info, "comp_ratio_nrt",
5089 catalog->perf.comp_ratio_nrt);
5090 sde_kms_info_add_keyint(info, "dest_scale_prefill_lines",
5091 catalog->perf.dest_scale_prefill_lines);
5092 sde_kms_info_add_keyint(info, "undersized_prefill_lines",
5093 catalog->perf.undersized_prefill_lines);
5094 sde_kms_info_add_keyint(info, "macrotile_prefill_lines",
5095 catalog->perf.macrotile_prefill_lines);
5096 sde_kms_info_add_keyint(info, "yuv_nv12_prefill_lines",
5097 catalog->perf.yuv_nv12_prefill_lines);
5098 sde_kms_info_add_keyint(info, "linear_prefill_lines",
5099 catalog->perf.linear_prefill_lines);
5100 sde_kms_info_add_keyint(info, "downscaling_prefill_lines",
5101 catalog->perf.downscaling_prefill_lines);
5102 sde_kms_info_add_keyint(info, "xtra_prefill_lines",
5103 catalog->perf.xtra_prefill_lines);
5104 sde_kms_info_add_keyint(info, "amortizable_threshold",
5105 catalog->perf.amortizable_threshold);
5106 sde_kms_info_add_keyint(info, "min_prefill_lines",
5107 catalog->perf.min_prefill_lines);
5108
Dhaval Patele4a5dda2016-10-13 19:29:30 -07005109 msm_property_set_blob(&sde_crtc->property_info, &sde_crtc->blob_info,
Narendra Muppalla22d17252017-05-31 15:13:39 -07005110 info->data, SDE_KMS_INFO_DATALEN(info), CRTC_PROP_INFO);
Dhaval Patele4a5dda2016-10-13 19:29:30 -07005111
5112 kfree(info);
Clarence Ip7a753bb2016-07-07 11:47:44 -04005113}
5114
Dhaval Patelb24b2d62017-11-03 18:10:26 -07005115static int _sde_crtc_get_output_fence(struct drm_crtc *crtc,
5116 const struct drm_crtc_state *state, uint64_t *val)
5117{
5118 struct drm_encoder *encoder;
5119 struct sde_crtc *sde_crtc;
5120 struct sde_crtc_state *cstate;
5121 uint32_t offset, i;
5122 bool conn_offset = 0, is_cmd = true;
5123
5124 sde_crtc = to_sde_crtc(crtc);
5125 cstate = to_sde_crtc_state(state);
5126
5127 for (i = 0; i < cstate->num_connectors; ++i) {
5128 conn_offset = sde_connector_needs_offset(cstate->connectors[i]);
5129 if (conn_offset)
5130 break;
5131 }
5132
5133 /**
5134 * set the cmd flag only when all the encoders attached
5135 * to the crtc are in cmd mode. Consider all other cases
5136 * as video mode.
5137 */
5138 drm_for_each_encoder(encoder, crtc->dev) {
5139 if (encoder->crtc == crtc)
5140 is_cmd = sde_encoder_check_mode(encoder,
5141 MSM_DISPLAY_CAP_CMD_MODE);
5142 }
5143
5144 offset = sde_crtc_get_property(cstate, CRTC_PROP_OUTPUT_FENCE_OFFSET);
5145
5146 /**
5147 * set the offset to 0 only for cmd mode panels, so
5148 * the release fence for the current frame can be
5149 * triggered right after PP_DONE interrupt.
5150 */
5151 offset = is_cmd ? 0 : (offset + conn_offset);
5152
5153 return sde_fence_create(&sde_crtc->output_fence, val, offset);
5154}
5155
Clarence Ip7a753bb2016-07-07 11:47:44 -04005156/**
5157 * sde_crtc_atomic_set_property - atomically set a crtc drm property
5158 * @crtc: Pointer to drm crtc structure
5159 * @state: Pointer to drm crtc state structure
5160 * @property: Pointer to targeted drm property
5161 * @val: Updated property value
5162 * @Returns: Zero on success
5163 */
5164static int sde_crtc_atomic_set_property(struct drm_crtc *crtc,
5165 struct drm_crtc_state *state,
5166 struct drm_property *property,
5167 uint64_t val)
5168{
5169 struct sde_crtc *sde_crtc;
5170 struct sde_crtc_state *cstate;
Lloyd Atkinsonadd42952017-10-31 14:27:55 -04005171 int idx, ret;
Dhaval Patelb24b2d62017-11-03 18:10:26 -07005172 uint64_t fence_fd;
Clarence Ip7a753bb2016-07-07 11:47:44 -04005173
5174 if (!crtc || !state || !property) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07005175 SDE_ERROR("invalid argument(s)\n");
Lloyd Atkinsonadd42952017-10-31 14:27:55 -04005176 return -EINVAL;
Clarence Ip7a753bb2016-07-07 11:47:44 -04005177 }
5178
Lloyd Atkinsonadd42952017-10-31 14:27:55 -04005179 sde_crtc = to_sde_crtc(crtc);
5180 cstate = to_sde_crtc_state(state);
5181
5182 /* check with cp property system first */
5183 ret = sde_cp_crtc_set_property(crtc, property, val);
5184 if (ret != -ENOENT)
5185 goto exit;
5186
5187 /* if not handled by cp, check msm_property system */
5188 ret = msm_property_atomic_set(&sde_crtc->property_info,
5189 &cstate->property_state, property, val);
5190 if (ret)
5191 goto exit;
5192
5193 idx = msm_property_index(&sde_crtc->property_info, property);
5194 switch (idx) {
5195 case CRTC_PROP_INPUT_FENCE_TIMEOUT:
5196 _sde_crtc_set_input_fence_timeout(cstate);
5197 break;
5198 case CRTC_PROP_DIM_LAYER_V1:
5199 _sde_crtc_set_dim_layer_v1(cstate, (void __user *)val);
5200 break;
5201 case CRTC_PROP_ROI_V1:
5202 ret = _sde_crtc_set_roi_v1(state, (void __user *)val);
5203 break;
5204 case CRTC_PROP_DEST_SCALER:
5205 ret = _sde_crtc_set_dest_scaler(sde_crtc, cstate,
5206 (void __user *)val);
5207 break;
5208 case CRTC_PROP_DEST_SCALER_LUT_ED:
5209 case CRTC_PROP_DEST_SCALER_LUT_CIR:
5210 case CRTC_PROP_DEST_SCALER_LUT_SEP:
5211 ret = _sde_crtc_set_dest_scaler_lut(sde_crtc, cstate, idx);
5212 break;
5213 case CRTC_PROP_CORE_CLK:
5214 case CRTC_PROP_CORE_AB:
5215 case CRTC_PROP_CORE_IB:
5216 cstate->bw_control = true;
5217 break;
5218 case CRTC_PROP_LLCC_AB:
5219 case CRTC_PROP_LLCC_IB:
5220 case CRTC_PROP_DRAM_AB:
5221 case CRTC_PROP_DRAM_IB:
5222 cstate->bw_control = true;
5223 cstate->bw_split_vote = true;
5224 break;
Dhaval Patelb24b2d62017-11-03 18:10:26 -07005225 case CRTC_PROP_OUTPUT_FENCE:
Dhaval Pateladbac6f2017-11-16 17:16:18 -08005226 if (!val)
5227 goto exit;
5228
Dhaval Patelb24b2d62017-11-03 18:10:26 -07005229 ret = _sde_crtc_get_output_fence(crtc, state, &fence_fd);
5230 if (ret) {
5231 SDE_ERROR("fence create failed rc:%d\n", ret);
5232 goto exit;
5233 }
5234
5235 ret = copy_to_user((uint64_t __user *)val, &fence_fd,
5236 sizeof(uint64_t));
5237 if (ret) {
5238 SDE_ERROR("copy to user failed rc:%d\n", ret);
5239 put_unused_fd(fence_fd);
5240 ret = -EFAULT;
5241 goto exit;
5242 }
5243 break;
Lloyd Atkinsonadd42952017-10-31 14:27:55 -04005244 default:
5245 /* nothing to do */
5246 break;
5247 }
5248
5249exit:
Xu Yangcbbefd72017-12-14 14:44:00 +08005250 if (ret) {
5251 if (ret != -EPERM)
5252 SDE_ERROR("%s: failed to set property%d %s: %d\n",
5253 crtc->name, DRMID(property),
5254 property->name, ret);
5255 else
5256 SDE_DEBUG("%s: failed to set property%d %s: %d\n",
5257 crtc->name, DRMID(property),
5258 property->name, ret);
5259 } else {
Lloyd Atkinsonadd42952017-10-31 14:27:55 -04005260 SDE_DEBUG("%s: %s[%d] <= 0x%llx\n", crtc->name, property->name,
5261 property->base.id, val);
Xu Yangcbbefd72017-12-14 14:44:00 +08005262 }
Lloyd Atkinsonadd42952017-10-31 14:27:55 -04005263
Clarence Ip7a753bb2016-07-07 11:47:44 -04005264 return ret;
5265}
5266
5267/**
5268 * sde_crtc_set_property - set a crtc drm property
5269 * @crtc: Pointer to drm crtc structure
5270 * @property: Pointer to targeted drm property
5271 * @val: Updated property value
5272 * @Returns: Zero on success
5273 */
5274static int sde_crtc_set_property(struct drm_crtc *crtc,
5275 struct drm_property *property, uint64_t val)
5276{
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -04005277 SDE_DEBUG("\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04005278
5279 return sde_crtc_atomic_set_property(crtc, crtc->state, property, val);
5280}
5281
5282/**
5283 * sde_crtc_atomic_get_property - retrieve a crtc drm property
5284 * @crtc: Pointer to drm crtc structure
5285 * @state: Pointer to drm crtc state structure
5286 * @property: Pointer to targeted drm property
5287 * @val: Pointer to variable for receiving property value
5288 * @Returns: Zero on success
5289 */
5290static int sde_crtc_atomic_get_property(struct drm_crtc *crtc,
5291 const struct drm_crtc_state *state,
5292 struct drm_property *property,
5293 uint64_t *val)
5294{
5295 struct sde_crtc *sde_crtc;
5296 struct sde_crtc_state *cstate;
Dhaval Patelb24b2d62017-11-03 18:10:26 -07005297 int ret = -EINVAL, i;
Clarence Ip7a753bb2016-07-07 11:47:44 -04005298
5299 if (!crtc || !state) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07005300 SDE_ERROR("invalid argument(s)\n");
Dhaval Patelb24b2d62017-11-03 18:10:26 -07005301 goto end;
Clarence Ip7a753bb2016-07-07 11:47:44 -04005302 }
Dhaval Patelb24b2d62017-11-03 18:10:26 -07005303
5304 sde_crtc = to_sde_crtc(crtc);
5305 cstate = to_sde_crtc_state(state);
5306
5307 i = msm_property_index(&sde_crtc->property_info, property);
5308 if (i == CRTC_PROP_OUTPUT_FENCE) {
Dhaval Patel1f5a5a22017-11-06 23:06:09 -08005309 *val = ~0;
5310 ret = 0;
Dhaval Patelb24b2d62017-11-03 18:10:26 -07005311 } else {
5312 ret = msm_property_atomic_get(&sde_crtc->property_info,
5313 &cstate->property_state, property, val);
5314 if (ret)
5315 ret = sde_cp_crtc_get_property(crtc, property, val);
5316 }
5317 if (ret)
5318 DRM_ERROR("get property failed\n");
5319
5320end:
Clarence Ip7a753bb2016-07-07 11:47:44 -04005321 return ret;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005322}
5323
Veera Sundaram Sankaran61e4fba2018-02-27 17:59:22 -08005324void sde_crtc_misr_setup(struct drm_crtc *crtc, bool enable, u32 frame_count)
5325{
5326 struct sde_kms *sde_kms;
5327 struct sde_crtc *sde_crtc;
5328 struct sde_crtc_mixer *m;
5329 int i;
5330
5331 if (!crtc) {
5332 SDE_ERROR("invalid argument\n");
5333 return;
5334 }
5335 sde_crtc = to_sde_crtc(crtc);
5336
5337 sde_kms = _sde_crtc_get_kms(crtc);
5338 if (!sde_kms) {
5339 SDE_ERROR("invalid sde_kms\n");
5340 return;
5341 }
5342
5343 mutex_lock(&sde_crtc->crtc_lock);
5344 if (sde_kms_is_secure_session_inprogress(sde_kms)) {
5345 SDE_DEBUG("crtc:%d misr enable/disable not allowed\n",
5346 DRMID(crtc));
5347 mutex_unlock(&sde_crtc->crtc_lock);
5348 return;
5349 }
5350
5351 sde_crtc->misr_enable = enable;
5352 sde_crtc->misr_frame_count = frame_count;
5353 for (i = 0; i < sde_crtc->num_mixers; ++i) {
5354 sde_crtc->misr_data[i] = 0;
5355 m = &sde_crtc->mixers[i];
5356 if (!m->hw_lm || !m->hw_lm->ops.setup_misr)
5357 continue;
5358
5359 m->hw_lm->ops.setup_misr(m->hw_lm, enable, frame_count);
5360 }
5361 mutex_unlock(&sde_crtc->crtc_lock);
5362}
5363
Alan Kwong67a3f792016-11-01 23:16:53 -04005364#ifdef CONFIG_DEBUG_FS
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07005365static int _sde_debugfs_status_show(struct seq_file *s, void *data)
Clarence Ip8f7366c2016-07-05 12:15:26 -04005366{
5367 struct sde_crtc *sde_crtc;
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07005368 struct sde_plane_state *pstate = NULL;
Clarence Ip8f7366c2016-07-05 12:15:26 -04005369 struct sde_crtc_mixer *m;
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07005370
5371 struct drm_crtc *crtc;
5372 struct drm_plane *plane;
5373 struct drm_display_mode *mode;
5374 struct drm_framebuffer *fb;
5375 struct drm_plane_state *state;
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07005376 struct sde_crtc_state *cstate;
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07005377
Sravanthi Kollukudurue2320172017-12-18 12:23:25 +05305378 int i, out_width, out_height;
Clarence Ip8f7366c2016-07-05 12:15:26 -04005379
5380 if (!s || !s->private)
5381 return -EINVAL;
5382
5383 sde_crtc = s->private;
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07005384 crtc = &sde_crtc->base;
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07005385 cstate = to_sde_crtc_state(crtc->state);
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07005386
5387 mutex_lock(&sde_crtc->crtc_lock);
5388 mode = &crtc->state->adjusted_mode;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05305389 out_width = sde_crtc_get_mixer_width(sde_crtc, cstate, mode);
Sravanthi Kollukudurue2320172017-12-18 12:23:25 +05305390 out_height = sde_crtc_get_mixer_height(sde_crtc, cstate, mode);
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07005391
5392 seq_printf(s, "crtc:%d width:%d height:%d\n", crtc->base.id,
5393 mode->hdisplay, mode->vdisplay);
5394
5395 seq_puts(s, "\n");
5396
Clarence Ip8f7366c2016-07-05 12:15:26 -04005397 for (i = 0; i < sde_crtc->num_mixers; ++i) {
Lloyd Atkinsone7bcdd22016-08-11 10:53:37 -04005398 m = &sde_crtc->mixers[i];
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07005399 if (!m->hw_lm)
5400 seq_printf(s, "\tmixer[%d] has no lm\n", i);
5401 else if (!m->hw_ctl)
5402 seq_printf(s, "\tmixer[%d] has no ctl\n", i);
5403 else
5404 seq_printf(s, "\tmixer:%d ctl:%d width:%d height:%d\n",
5405 m->hw_lm->idx - LM_0, m->hw_ctl->idx - CTL_0,
Sravanthi Kollukudurue2320172017-12-18 12:23:25 +05305406 out_width, out_height);
Clarence Ip8f7366c2016-07-05 12:15:26 -04005407 }
Dhaval Patel44f12472016-08-29 12:19:47 -07005408
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07005409 seq_puts(s, "\n");
Dhaval Patel48c76022016-09-01 17:51:23 -07005410
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07005411 for (i = 0; i < cstate->num_dim_layers; i++) {
5412 struct sde_hw_dim_layer *dim_layer = &cstate->dim_layer[i];
5413
5414 seq_printf(s, "\tdim_layer:%d] stage:%d flags:%d\n",
5415 i, dim_layer->stage, dim_layer->flags);
5416 seq_printf(s, "\tdst_x:%d dst_y:%d dst_w:%d dst_h:%d\n",
5417 dim_layer->rect.x, dim_layer->rect.y,
5418 dim_layer->rect.w, dim_layer->rect.h);
5419 seq_printf(s,
5420 "\tcolor_0:%d color_1:%d color_2:%d color_3:%d\n",
5421 dim_layer->color_fill.color_0,
5422 dim_layer->color_fill.color_1,
5423 dim_layer->color_fill.color_2,
5424 dim_layer->color_fill.color_3);
5425 seq_puts(s, "\n");
5426 }
5427
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07005428 drm_atomic_crtc_for_each_plane(plane, crtc) {
5429 pstate = to_sde_plane_state(plane->state);
5430 state = plane->state;
5431
5432 if (!pstate || !state)
5433 continue;
5434
5435 seq_printf(s, "\tplane:%u stage:%d\n", plane->base.id,
5436 pstate->stage);
5437
5438 if (plane->state->fb) {
5439 fb = plane->state->fb;
5440
5441 seq_printf(s, "\tfb:%d image format:%4.4s wxh:%ux%u bpp:%d\n",
5442 fb->base.id, (char *) &fb->pixel_format,
5443 fb->width, fb->height, fb->bits_per_pixel);
5444
5445 seq_puts(s, "\t");
5446 for (i = 0; i < ARRAY_SIZE(fb->modifier); i++)
5447 seq_printf(s, "modifier[%d]:%8llu ", i,
5448 fb->modifier[i]);
5449 seq_puts(s, "\n");
5450
5451 seq_puts(s, "\t");
5452 for (i = 0; i < ARRAY_SIZE(fb->pitches); i++)
5453 seq_printf(s, "pitches[%d]:%8u ", i,
5454 fb->pitches[i]);
5455 seq_puts(s, "\n");
5456
5457 seq_puts(s, "\t");
5458 for (i = 0; i < ARRAY_SIZE(fb->offsets); i++)
5459 seq_printf(s, "offsets[%d]:%8u ", i,
5460 fb->offsets[i]);
Dhaval Patel48c76022016-09-01 17:51:23 -07005461 seq_puts(s, "\n");
5462 }
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07005463
5464 seq_printf(s, "\tsrc_x:%4d src_y:%4d src_w:%4d src_h:%4d\n",
5465 state->src_x, state->src_y, state->src_w, state->src_h);
5466
5467 seq_printf(s, "\tdst x:%4d dst_y:%4d dst_w:%4d dst_h:%4d\n",
5468 state->crtc_x, state->crtc_y, state->crtc_w,
5469 state->crtc_h);
Jeykumar Sankarane964dc72017-05-10 19:26:43 -07005470 seq_printf(s, "\tmultirect: mode: %d index: %d\n",
5471 pstate->multirect_mode, pstate->multirect_index);
Veera Sundaram Sankaran58e12812017-05-05 11:51:09 -07005472
5473 seq_printf(s, "\texcl_rect: x:%4d y:%4d w:%4d h:%4d\n",
5474 pstate->excl_rect.x, pstate->excl_rect.y,
5475 pstate->excl_rect.w, pstate->excl_rect.h);
5476
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07005477 seq_puts(s, "\n");
Clarence Ip8f7366c2016-07-05 12:15:26 -04005478 }
Alan Kwong07da0982016-11-04 12:57:45 -04005479
5480 if (sde_crtc->vblank_cb_count) {
5481 ktime_t diff = ktime_sub(ktime_get(), sde_crtc->vblank_cb_time);
5482 s64 diff_ms = ktime_to_ms(diff);
5483 s64 fps = diff_ms ? DIV_ROUND_CLOSEST(
5484 sde_crtc->vblank_cb_count * 1000, diff_ms) : 0;
5485
5486 seq_printf(s,
Dhaval Pateld67cf4a2017-06-14 18:08:32 -07005487 "vblank fps:%lld count:%u total:%llums total_framecount:%llu\n",
5488 fps, sde_crtc->vblank_cb_count,
5489 ktime_to_ms(diff), sde_crtc->play_count);
Alan Kwong07da0982016-11-04 12:57:45 -04005490
5491 /* reset time & count for next measurement */
5492 sde_crtc->vblank_cb_count = 0;
5493 sde_crtc->vblank_cb_time = ktime_set(0, 0);
5494 }
5495
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04005496 seq_printf(s, "vblank_enable:%d\n", sde_crtc->vblank_requested);
Alan Kwong07da0982016-11-04 12:57:45 -04005497
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07005498 mutex_unlock(&sde_crtc->crtc_lock);
5499
Clarence Ip8f7366c2016-07-05 12:15:26 -04005500 return 0;
5501}
5502
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07005503static int _sde_debugfs_status_open(struct inode *inode, struct file *file)
Clarence Ip8f7366c2016-07-05 12:15:26 -04005504{
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07005505 return single_open(file, _sde_debugfs_status_show, inode->i_private);
Clarence Ip8f7366c2016-07-05 12:15:26 -04005506}
5507
Dhaval Patelf9245d62017-03-28 16:24:00 -07005508static ssize_t _sde_crtc_misr_setup(struct file *file,
5509 const char __user *user_buf, size_t count, loff_t *ppos)
5510{
Veera Sundaram Sankaran61e4fba2018-02-27 17:59:22 -08005511 struct drm_crtc *crtc;
Dhaval Patelf9245d62017-03-28 16:24:00 -07005512 struct sde_crtc *sde_crtc;
Veera Sundaram Sankaran61e4fba2018-02-27 17:59:22 -08005513 int rc;
Dhaval Patelf9245d62017-03-28 16:24:00 -07005514 char buf[MISR_BUFF_SIZE + 1];
5515 u32 frame_count, enable;
5516 size_t buff_copy;
5517
5518 if (!file || !file->private_data)
5519 return -EINVAL;
5520
5521 sde_crtc = file->private_data;
Veera Sundaram Sankaran61e4fba2018-02-27 17:59:22 -08005522 crtc = &sde_crtc->base;
5523
Dhaval Patelf9245d62017-03-28 16:24:00 -07005524 buff_copy = min_t(size_t, count, MISR_BUFF_SIZE);
5525 if (copy_from_user(buf, user_buf, buff_copy)) {
5526 SDE_ERROR("buffer copy failed\n");
5527 return -EINVAL;
5528 }
5529
5530 buf[buff_copy] = 0; /* end of string */
5531
5532 if (sscanf(buf, "%u %u", &enable, &frame_count) != 2)
5533 return -EINVAL;
5534
5535 rc = _sde_crtc_power_enable(sde_crtc, true);
5536 if (rc)
5537 return rc;
5538
Veera Sundaram Sankaran61e4fba2018-02-27 17:59:22 -08005539 sde_crtc_misr_setup(crtc, enable, frame_count);
Dhaval Patelf9245d62017-03-28 16:24:00 -07005540 _sde_crtc_power_enable(sde_crtc, false);
5541
5542 return count;
5543}
5544
5545static ssize_t _sde_crtc_misr_read(struct file *file,
5546 char __user *user_buff, size_t count, loff_t *ppos)
5547{
Veera Sundaram Sankaran61e4fba2018-02-27 17:59:22 -08005548 struct drm_crtc *crtc;
Dhaval Patelf9245d62017-03-28 16:24:00 -07005549 struct sde_crtc *sde_crtc;
Veera Sundaram Sankaran61e4fba2018-02-27 17:59:22 -08005550 struct sde_kms *sde_kms;
Dhaval Patelf9245d62017-03-28 16:24:00 -07005551 struct sde_crtc_mixer *m;
5552 int i = 0, rc;
Dhaval Patel010f5172017-08-01 22:40:09 -07005553 u32 misr_status;
Dhaval Patelf9245d62017-03-28 16:24:00 -07005554 ssize_t len = 0;
5555 char buf[MISR_BUFF_SIZE + 1] = {'\0'};
5556
5557 if (*ppos)
5558 return 0;
5559
5560 if (!file || !file->private_data)
5561 return -EINVAL;
5562
5563 sde_crtc = file->private_data;
Veera Sundaram Sankaran61e4fba2018-02-27 17:59:22 -08005564 crtc = &sde_crtc->base;
5565 sde_kms = _sde_crtc_get_kms(crtc);
5566 if (!sde_kms)
5567 return -EINVAL;
5568
Dhaval Patelf9245d62017-03-28 16:24:00 -07005569 rc = _sde_crtc_power_enable(sde_crtc, true);
5570 if (rc)
5571 return rc;
5572
5573 mutex_lock(&sde_crtc->crtc_lock);
Veera Sundaram Sankaran61e4fba2018-02-27 17:59:22 -08005574 if (sde_kms_is_secure_session_inprogress(sde_kms)) {
5575 SDE_DEBUG("crtc:%d misr read not allowed\n", DRMID(crtc));
5576 goto end;
5577 }
5578
Dhaval Patelf9245d62017-03-28 16:24:00 -07005579 if (!sde_crtc->misr_enable) {
5580 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
5581 "disabled\n");
5582 goto buff_check;
5583 }
5584
5585 for (i = 0; i < sde_crtc->num_mixers; ++i) {
5586 m = &sde_crtc->mixers[i];
Dhaval Patel010f5172017-08-01 22:40:09 -07005587 if (!m->hw_lm || !m->hw_lm->ops.collect_misr)
Dhaval Patelf9245d62017-03-28 16:24:00 -07005588 continue;
5589
Dhaval Patel010f5172017-08-01 22:40:09 -07005590 misr_status = m->hw_lm->ops.collect_misr(m->hw_lm);
5591 sde_crtc->misr_data[i] = misr_status ? misr_status :
5592 sde_crtc->misr_data[i];
Dhaval Patelf9245d62017-03-28 16:24:00 -07005593 len += snprintf(buf + len, MISR_BUFF_SIZE - len, "lm idx:%d\n",
5594 m->hw_lm->idx - LM_0);
5595 len += snprintf(buf + len, MISR_BUFF_SIZE - len, "0x%x\n",
Dhaval Patel010f5172017-08-01 22:40:09 -07005596 sde_crtc->misr_data[i]);
Dhaval Patelf9245d62017-03-28 16:24:00 -07005597 }
5598
5599buff_check:
5600 if (count <= len) {
5601 len = 0;
5602 goto end;
5603 }
5604
5605 if (copy_to_user(user_buff, buf, len)) {
5606 len = -EFAULT;
5607 goto end;
5608 }
5609
5610 *ppos += len; /* increase offset */
5611
5612end:
5613 mutex_unlock(&sde_crtc->crtc_lock);
5614 _sde_crtc_power_enable(sde_crtc, false);
5615 return len;
5616}
5617
5618#define DEFINE_SDE_DEBUGFS_SEQ_FOPS(__prefix) \
Alan Kwong67a3f792016-11-01 23:16:53 -04005619static int __prefix ## _open(struct inode *inode, struct file *file) \
5620{ \
5621 return single_open(file, __prefix ## _show, inode->i_private); \
5622} \
5623static const struct file_operations __prefix ## _fops = { \
5624 .owner = THIS_MODULE, \
5625 .open = __prefix ## _open, \
5626 .release = single_release, \
5627 .read = seq_read, \
5628 .llseek = seq_lseek, \
5629}
5630
5631static int sde_crtc_debugfs_state_show(struct seq_file *s, void *v)
5632{
5633 struct drm_crtc *crtc = (struct drm_crtc *) s->private;
Alan Kwong751cf462017-06-08 10:26:46 -04005634 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
Alan Kwong67a3f792016-11-01 23:16:53 -04005635 struct sde_crtc_state *cstate = to_sde_crtc_state(crtc->state);
Alan Kwongcdb2f282017-03-18 13:42:06 -07005636 struct sde_crtc_res *res;
Alan Kwong310e9b02017-08-03 02:04:07 -04005637 struct sde_crtc_respool *rp;
Alan Kwong0230a102017-05-16 11:36:44 -07005638 int i;
Alan Kwong67a3f792016-11-01 23:16:53 -04005639
5640 seq_printf(s, "num_connectors: %d\n", cstate->num_connectors);
Dhaval Patel4d424602017-02-18 19:40:14 -08005641 seq_printf(s, "client type: %d\n", sde_crtc_get_client_type(crtc));
Alan Kwong3e985f02017-02-12 15:08:44 -08005642 seq_printf(s, "intf_mode: %d\n", sde_crtc_get_intf_mode(crtc));
Alan Kwong751cf462017-06-08 10:26:46 -04005643 seq_printf(s, "core_clk_rate: %llu\n",
5644 sde_crtc->cur_perf.core_clk_rate);
Alan Kwong0230a102017-05-16 11:36:44 -07005645 for (i = SDE_POWER_HANDLE_DBUS_ID_MNOC;
5646 i < SDE_POWER_HANDLE_DBUS_ID_MAX; i++) {
5647 seq_printf(s, "bw_ctl[%s]: %llu\n",
5648 sde_power_handle_get_dbus_name(i),
5649 sde_crtc->cur_perf.bw_ctl[i]);
5650 seq_printf(s, "max_per_pipe_ib[%s]: %llu\n",
5651 sde_power_handle_get_dbus_name(i),
5652 sde_crtc->cur_perf.max_per_pipe_ib[i]);
5653 }
Alan Kwong67a3f792016-11-01 23:16:53 -04005654
Alan Kwong310e9b02017-08-03 02:04:07 -04005655 mutex_lock(&sde_crtc->rp_lock);
5656 list_for_each_entry(rp, &sde_crtc->rp_head, rp_list) {
5657 seq_printf(s, "rp.%d: ", rp->sequence_id);
5658 list_for_each_entry(res, &rp->res_list, list)
5659 seq_printf(s, "0x%x/0x%llx/%pK/%d ",
5660 res->type, res->tag, res->val,
5661 atomic_read(&res->refcount));
5662 seq_puts(s, "\n");
5663 }
5664 mutex_unlock(&sde_crtc->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -07005665
Alan Kwong67a3f792016-11-01 23:16:53 -04005666 return 0;
5667}
5668DEFINE_SDE_DEBUGFS_SEQ_FOPS(sde_crtc_debugfs_state);
5669
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005670static int _sde_crtc_init_debugfs(struct drm_crtc *crtc)
Clarence Ip8f7366c2016-07-05 12:15:26 -04005671{
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005672 struct sde_crtc *sde_crtc;
5673 struct sde_kms *sde_kms;
5674
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07005675 static const struct file_operations debugfs_status_fops = {
5676 .open = _sde_debugfs_status_open,
Clarence Ip8f7366c2016-07-05 12:15:26 -04005677 .read = seq_read,
5678 .llseek = seq_lseek,
5679 .release = single_release,
5680 };
Dhaval Patelf9245d62017-03-28 16:24:00 -07005681 static const struct file_operations debugfs_misr_fops = {
5682 .open = simple_open,
5683 .read = _sde_crtc_misr_read,
5684 .write = _sde_crtc_misr_setup,
5685 };
Alan Kwong67a3f792016-11-01 23:16:53 -04005686
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005687 if (!crtc)
5688 return -EINVAL;
5689 sde_crtc = to_sde_crtc(crtc);
5690
5691 sde_kms = _sde_crtc_get_kms(crtc);
5692 if (!sde_kms)
5693 return -EINVAL;
5694
5695 sde_crtc->debugfs_root = debugfs_create_dir(sde_crtc->name,
Lloyd Atkinson09e64bf2017-04-13 14:09:59 -07005696 crtc->dev->primary->debugfs_root);
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005697 if (!sde_crtc->debugfs_root)
5698 return -ENOMEM;
5699
5700 /* don't error check these */
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04005701 debugfs_create_file("status", 0400,
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005702 sde_crtc->debugfs_root,
5703 sde_crtc, &debugfs_status_fops);
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04005704 debugfs_create_file("state", 0600,
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005705 sde_crtc->debugfs_root,
5706 &sde_crtc->base,
5707 &sde_crtc_debugfs_state_fops);
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04005708 debugfs_create_file("misr_data", 0600, sde_crtc->debugfs_root,
Dhaval Patelf9245d62017-03-28 16:24:00 -07005709 sde_crtc, &debugfs_misr_fops);
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005710
5711 return 0;
5712}
5713
5714static void _sde_crtc_destroy_debugfs(struct drm_crtc *crtc)
5715{
5716 struct sde_crtc *sde_crtc;
5717
5718 if (!crtc)
5719 return;
5720 sde_crtc = to_sde_crtc(crtc);
5721 debugfs_remove_recursive(sde_crtc->debugfs_root);
Clarence Ip8f7366c2016-07-05 12:15:26 -04005722}
Alan Kwong67a3f792016-11-01 23:16:53 -04005723#else
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005724static int _sde_crtc_init_debugfs(struct drm_crtc *crtc)
Alan Kwong67a3f792016-11-01 23:16:53 -04005725{
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005726 return 0;
Alan Kwong67a3f792016-11-01 23:16:53 -04005727}
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005728
5729static void _sde_crtc_destroy_debugfs(struct drm_crtc *crtc)
5730{
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005731}
5732#endif /* CONFIG_DEBUG_FS */
5733
5734static int sde_crtc_late_register(struct drm_crtc *crtc)
5735{
5736 return _sde_crtc_init_debugfs(crtc);
5737}
5738
5739static void sde_crtc_early_unregister(struct drm_crtc *crtc)
5740{
5741 _sde_crtc_destroy_debugfs(crtc);
5742}
5743
5744static const struct drm_crtc_funcs sde_crtc_funcs = {
5745 .set_config = drm_atomic_helper_set_config,
5746 .destroy = sde_crtc_destroy,
5747 .page_flip = drm_atomic_helper_page_flip,
5748 .set_property = sde_crtc_set_property,
5749 .atomic_set_property = sde_crtc_atomic_set_property,
5750 .atomic_get_property = sde_crtc_atomic_get_property,
5751 .reset = sde_crtc_reset,
5752 .atomic_duplicate_state = sde_crtc_duplicate_state,
5753 .atomic_destroy_state = sde_crtc_destroy_state,
5754 .late_register = sde_crtc_late_register,
5755 .early_unregister = sde_crtc_early_unregister,
5756};
5757
5758static const struct drm_crtc_helper_funcs sde_crtc_helper_funcs = {
5759 .mode_fixup = sde_crtc_mode_fixup,
5760 .disable = sde_crtc_disable,
5761 .enable = sde_crtc_enable,
5762 .atomic_check = sde_crtc_atomic_check,
5763 .atomic_begin = sde_crtc_atomic_begin,
5764 .atomic_flush = sde_crtc_atomic_flush,
5765};
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005766
Clarence Ipa18d4832017-03-13 12:35:44 -07005767static void _sde_crtc_event_cb(struct kthread_work *work)
5768{
5769 struct sde_crtc_event *event;
5770 struct sde_crtc *sde_crtc;
5771 unsigned long irq_flags;
5772
5773 if (!work) {
5774 SDE_ERROR("invalid work item\n");
5775 return;
5776 }
5777
5778 event = container_of(work, struct sde_crtc_event, kt_work);
Clarence Ipa18d4832017-03-13 12:35:44 -07005779
5780 /* set sde_crtc to NULL for static work structures */
5781 sde_crtc = event->sde_crtc;
5782 if (!sde_crtc)
5783 return;
5784
Gopikrishnaiah Anandanb6b401f2017-03-14 16:39:49 -07005785 if (event->cb_func)
5786 event->cb_func(&sde_crtc->base, event->usr);
5787
Clarence Ipa18d4832017-03-13 12:35:44 -07005788 spin_lock_irqsave(&sde_crtc->event_lock, irq_flags);
5789 list_add_tail(&event->list, &sde_crtc->event_free_list);
5790 spin_unlock_irqrestore(&sde_crtc->event_lock, irq_flags);
5791}
5792
5793int sde_crtc_event_queue(struct drm_crtc *crtc,
Raviteja Tamatam1345f2e2018-02-08 16:15:51 +05305794 void (*func)(struct drm_crtc *crtc, void *usr),
5795 void *usr, bool color_processing_event)
Clarence Ipa18d4832017-03-13 12:35:44 -07005796{
5797 unsigned long irq_flags;
5798 struct sde_crtc *sde_crtc;
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07005799 struct msm_drm_private *priv;
Clarence Ipa18d4832017-03-13 12:35:44 -07005800 struct sde_crtc_event *event = NULL;
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07005801 u32 crtc_id;
Clarence Ipa18d4832017-03-13 12:35:44 -07005802
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07005803 if (!crtc || !crtc->dev || !crtc->dev->dev_private || !func) {
5804 SDE_ERROR("invalid parameters\n");
Clarence Ipa18d4832017-03-13 12:35:44 -07005805 return -EINVAL;
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07005806 }
Clarence Ipa18d4832017-03-13 12:35:44 -07005807 sde_crtc = to_sde_crtc(crtc);
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07005808 priv = crtc->dev->dev_private;
5809 crtc_id = drm_crtc_index(crtc);
Clarence Ipa18d4832017-03-13 12:35:44 -07005810
5811 /*
5812 * Obtain an event struct from the private cache. This event
5813 * queue may be called from ISR contexts, so use a private
5814 * cache to avoid calling any memory allocation functions.
5815 */
5816 spin_lock_irqsave(&sde_crtc->event_lock, irq_flags);
5817 if (!list_empty(&sde_crtc->event_free_list)) {
5818 event = list_first_entry(&sde_crtc->event_free_list,
5819 struct sde_crtc_event, list);
5820 list_del_init(&event->list);
5821 }
5822 spin_unlock_irqrestore(&sde_crtc->event_lock, irq_flags);
5823
5824 if (!event)
5825 return -ENOMEM;
5826
5827 /* populate event node */
5828 event->sde_crtc = sde_crtc;
5829 event->cb_func = func;
5830 event->usr = usr;
5831
5832 /* queue new event request */
5833 kthread_init_work(&event->kt_work, _sde_crtc_event_cb);
Raviteja Tamatam1345f2e2018-02-08 16:15:51 +05305834 if (color_processing_event)
5835 kthread_queue_work(&priv->pp_event_worker,
5836 &event->kt_work);
5837 else
5838 kthread_queue_work(&priv->event_thread[crtc_id].worker,
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07005839 &event->kt_work);
Clarence Ipa18d4832017-03-13 12:35:44 -07005840
5841 return 0;
5842}
5843
5844static int _sde_crtc_init_events(struct sde_crtc *sde_crtc)
5845{
5846 int i, rc = 0;
5847
5848 if (!sde_crtc) {
5849 SDE_ERROR("invalid crtc\n");
5850 return -EINVAL;
5851 }
5852
5853 spin_lock_init(&sde_crtc->event_lock);
5854
5855 INIT_LIST_HEAD(&sde_crtc->event_free_list);
5856 for (i = 0; i < SDE_CRTC_MAX_EVENT_COUNT; ++i)
5857 list_add_tail(&sde_crtc->event_cache[i].list,
5858 &sde_crtc->event_free_list);
5859
Dhaval Patel5023c3c2017-08-22 12:40:11 -07005860 INIT_LIST_HEAD(&sde_crtc->retire_event_list);
5861 for (i = 0; i < ARRAY_SIZE(sde_crtc->retire_events); i++)
5862 INIT_LIST_HEAD(&sde_crtc->retire_events[i].list);
5863
Clarence Ipa18d4832017-03-13 12:35:44 -07005864 return rc;
5865}
5866
Dhaval Patelc9e213b2017-11-02 12:13:12 -07005867/*
5868 * __sde_crtc_idle_notify_work - signal idle timeout to user space
5869 */
5870static void __sde_crtc_idle_notify_work(struct kthread_work *work)
5871{
5872 struct sde_crtc *sde_crtc = container_of(work, struct sde_crtc,
5873 idle_notify_work.work);
5874 struct drm_crtc *crtc;
5875 struct drm_event event;
5876 int ret = 0;
5877
5878 if (!sde_crtc) {
5879 SDE_ERROR("invalid sde crtc\n");
5880 } else {
5881 crtc = &sde_crtc->base;
5882 event.type = DRM_EVENT_IDLE_NOTIFY;
5883 event.length = sizeof(u32);
5884 msm_mode_object_event_notify(&crtc->base, crtc->dev,
5885 &event, (u8 *)&ret);
5886
5887 SDE_DEBUG("crtc[%d]: idle timeout notified\n", crtc->base.id);
5888 }
5889}
5890
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04005891/* initialize crtc */
Lloyd Atkinsonac933642016-09-14 11:52:00 -04005892struct drm_crtc *sde_crtc_init(struct drm_device *dev, struct drm_plane *plane)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005893{
5894 struct drm_crtc *crtc = NULL;
Clarence Ip8f7366c2016-07-05 12:15:26 -04005895 struct sde_crtc *sde_crtc = NULL;
5896 struct msm_drm_private *priv = NULL;
5897 struct sde_kms *kms = NULL;
Clarence Ipa18d4832017-03-13 12:35:44 -07005898 int i, rc;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005899
Clarence Ip8f7366c2016-07-05 12:15:26 -04005900 priv = dev->dev_private;
5901 kms = to_sde_kms(priv->kms);
5902
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005903 sde_crtc = kzalloc(sizeof(*sde_crtc), GFP_KERNEL);
5904 if (!sde_crtc)
5905 return ERR_PTR(-ENOMEM);
5906
5907 crtc = &sde_crtc->base;
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -07005908 crtc->dev = dev;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005909
Clarence Ip7f70ce42017-03-20 06:53:46 -07005910 mutex_init(&sde_crtc->crtc_lock);
Alan Kwong628d19e2016-10-31 13:50:13 -04005911 spin_lock_init(&sde_crtc->spin_lock);
5912 atomic_set(&sde_crtc->frame_pending, 0);
5913
Alan Kwong310e9b02017-08-03 02:04:07 -04005914 mutex_init(&sde_crtc->rp_lock);
5915 INIT_LIST_HEAD(&sde_crtc->rp_head);
5916
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07005917 init_completion(&sde_crtc->frame_done_comp);
Veera Sundaram Sankaran97dc5152017-10-10 20:24:48 -07005918 sde_crtc->enabled = false;
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07005919
Alan Kwong628d19e2016-10-31 13:50:13 -04005920 INIT_LIST_HEAD(&sde_crtc->frame_event_list);
Gopikrishnaiah Anandande2c81b2017-03-15 12:41:29 -07005921 INIT_LIST_HEAD(&sde_crtc->user_event_list);
Alan Kwong628d19e2016-10-31 13:50:13 -04005922 for (i = 0; i < ARRAY_SIZE(sde_crtc->frame_events); i++) {
5923 INIT_LIST_HEAD(&sde_crtc->frame_events[i].list);
5924 list_add(&sde_crtc->frame_events[i].list,
5925 &sde_crtc->frame_event_list);
5926 kthread_init_work(&sde_crtc->frame_events[i].work,
5927 sde_crtc_frame_event_work);
5928 }
5929
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07005930 drm_crtc_init_with_planes(dev, crtc, plane, NULL, &sde_crtc_funcs,
5931 NULL);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005932
5933 drm_crtc_helper_add(crtc, &sde_crtc_helper_funcs);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04005934 plane->crtc = crtc;
5935
Clarence Ip8f7366c2016-07-05 12:15:26 -04005936 /* save user friendly CRTC name for later */
5937 snprintf(sde_crtc->name, SDE_CRTC_NAME_SIZE, "crtc%u", crtc->base.id);
5938
Clarence Ipa18d4832017-03-13 12:35:44 -07005939 /* initialize event handling */
5940 rc = _sde_crtc_init_events(sde_crtc);
5941 if (rc) {
5942 drm_crtc_cleanup(crtc);
5943 kfree(sde_crtc);
5944 return ERR_PTR(rc);
5945 }
5946
Clarence Ip9a74a442016-08-25 18:29:03 -04005947 /* initialize output fence support */
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04005948 sde_fence_init(&sde_crtc->output_fence, sde_crtc->name, crtc->base.id);
Clarence Ip24f80662016-06-13 19:05:32 -04005949
Clarence Ip7a753bb2016-07-07 11:47:44 -04005950 /* create CRTC properties */
5951 msm_property_init(&sde_crtc->property_info, &crtc->base, dev,
5952 priv->crtc_property, sde_crtc->property_data,
5953 CRTC_PROP_COUNT, CRTC_PROP_BLOBCOUNT,
5954 sizeof(struct sde_crtc_state));
5955
Dhaval Patele4a5dda2016-10-13 19:29:30 -07005956 sde_crtc_install_properties(crtc, kms->catalog);
Gopikrishnaiah Anandan703eb902016-10-06 18:43:57 -07005957
5958 /* Install color processing properties */
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -07005959 sde_cp_crtc_init(crtc);
Gopikrishnaiah Anandan703eb902016-10-06 18:43:57 -07005960 sde_cp_crtc_install_properties(crtc);
Clarence Ip7a753bb2016-07-07 11:47:44 -04005961
Dhaval Patelc9e213b2017-11-02 12:13:12 -07005962 kthread_init_delayed_work(&sde_crtc->idle_notify_work,
5963 __sde_crtc_idle_notify_work);
5964
Dhaval Patelec10fad2016-08-22 14:40:48 -07005965 SDE_DEBUG("%s: successfully initialized crtc\n", sde_crtc->name);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005966 return crtc;
5967}
Gopikrishnaiah Anandande2c81b2017-03-15 12:41:29 -07005968
Dhaval Patel91399a52017-11-27 22:21:27 -08005969int sde_crtc_post_init(struct drm_device *dev, struct drm_crtc *crtc)
5970{
5971 struct sde_crtc *sde_crtc;
5972 int rc = 0;
5973
5974 if (!dev || !dev->primary || !dev->primary->kdev || !crtc) {
5975 SDE_ERROR("invalid input param(s)\n");
5976 rc = -EINVAL;
5977 goto end;
5978 }
5979
5980 sde_crtc = to_sde_crtc(crtc);
5981 sde_crtc->sysfs_dev = device_create_with_groups(
5982 dev->primary->kdev->class, dev->primary->kdev, 0, crtc,
5983 sde_crtc_attr_groups, "sde-crtc-%d", crtc->index);
5984 if (IS_ERR_OR_NULL(sde_crtc->sysfs_dev)) {
5985 SDE_ERROR("crtc:%d sysfs create failed rc:%ld\n", crtc->index,
5986 PTR_ERR(sde_crtc->sysfs_dev));
5987 if (!sde_crtc->sysfs_dev)
5988 rc = -EINVAL;
5989 else
5990 rc = PTR_ERR(sde_crtc->sysfs_dev);
5991 goto end;
5992 }
5993
5994 sde_crtc->vsync_event_sf = sysfs_get_dirent(
5995 sde_crtc->sysfs_dev->kobj.sd, "vsync_event");
5996 if (!sde_crtc->vsync_event_sf)
5997 SDE_ERROR("crtc:%d vsync_event sysfs create failed\n",
5998 crtc->base.id);
5999
6000end:
6001 return rc;
6002}
6003
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08006004static int _sde_crtc_event_enable(struct sde_kms *kms,
6005 struct drm_crtc *crtc_drm, u32 event)
Gopikrishnaiah Anandande2c81b2017-03-15 12:41:29 -07006006{
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08006007 struct sde_crtc *crtc = NULL;
6008 struct sde_crtc_irq_info *node;
6009 struct msm_drm_private *priv;
6010 unsigned long flags;
6011 bool found = false;
Gopikrishnaiah Anandanb6b401f2017-03-14 16:39:49 -07006012 int ret, i = 0;
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08006013
6014 crtc = to_sde_crtc(crtc_drm);
6015 spin_lock_irqsave(&crtc->spin_lock, flags);
6016 list_for_each_entry(node, &crtc->user_event_list, list) {
6017 if (node->event == event) {
6018 found = true;
6019 break;
6020 }
6021 }
6022 spin_unlock_irqrestore(&crtc->spin_lock, flags);
6023
6024 /* event already enabled */
6025 if (found)
6026 return 0;
6027
Gopikrishnaiah Anandanb6b401f2017-03-14 16:39:49 -07006028 node = NULL;
6029 for (i = 0; i < ARRAY_SIZE(custom_events); i++) {
6030 if (custom_events[i].event == event &&
6031 custom_events[i].func) {
6032 node = kzalloc(sizeof(*node), GFP_KERNEL);
6033 if (!node)
6034 return -ENOMEM;
Gopikrishnaiah Anandanb6b401f2017-03-14 16:39:49 -07006035 INIT_LIST_HEAD(&node->list);
6036 node->func = custom_events[i].func;
6037 node->event = event;
Xu Yangdca8eeb2018-03-26 14:16:09 +08006038 node->state = IRQ_NOINIT;
Xu Yanged79cec2018-01-10 21:04:05 +08006039 spin_lock_init(&node->state_lock);
Gopikrishnaiah Anandanb6b401f2017-03-14 16:39:49 -07006040 break;
6041 }
6042 }
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08006043
Gopikrishnaiah Anandanb6b401f2017-03-14 16:39:49 -07006044 if (!node) {
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08006045 SDE_ERROR("unsupported event %x\n", event);
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08006046 return -EINVAL;
6047 }
6048
6049 priv = kms->dev->dev_private;
6050 ret = 0;
6051 if (crtc_drm->enabled) {
Alan Kwong1124f1f2017-11-10 18:14:39 -05006052 ret = sde_power_resource_enable(&priv->phandle,
6053 kms->core_client, true);
6054 if (ret) {
6055 SDE_ERROR("failed to enable power resource %d\n", ret);
6056 SDE_EVT32(ret, SDE_EVTLOG_ERROR);
6057 kfree(node);
6058 return ret;
6059 }
6060
Xu Yang37752282017-08-21 13:50:23 +08006061 INIT_LIST_HEAD(&node->irq.list);
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08006062 ret = node->func(crtc_drm, true, &node->irq);
6063 sde_power_resource_enable(&priv->phandle, kms->core_client,
6064 false);
6065 }
6066
6067 if (!ret) {
6068 spin_lock_irqsave(&crtc->spin_lock, flags);
6069 list_add_tail(&node->list, &crtc->user_event_list);
6070 spin_unlock_irqrestore(&crtc->spin_lock, flags);
6071 } else {
6072 kfree(node);
6073 }
6074
6075 return ret;
6076}
6077
6078static int _sde_crtc_event_disable(struct sde_kms *kms,
6079 struct drm_crtc *crtc_drm, u32 event)
6080{
6081 struct sde_crtc *crtc = NULL;
6082 struct sde_crtc_irq_info *node = NULL;
6083 struct msm_drm_private *priv;
6084 unsigned long flags;
6085 bool found = false;
6086 int ret;
6087
6088 crtc = to_sde_crtc(crtc_drm);
6089 spin_lock_irqsave(&crtc->spin_lock, flags);
6090 list_for_each_entry(node, &crtc->user_event_list, list) {
6091 if (node->event == event) {
Gopikrishnaiah Anandan15d2f442018-02-21 14:09:08 -08006092 list_del(&node->list);
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08006093 found = true;
6094 break;
6095 }
6096 }
6097 spin_unlock_irqrestore(&crtc->spin_lock, flags);
6098
6099 /* event already disabled */
6100 if (!found)
6101 return 0;
6102
6103 /**
6104 * crtc is disabled interrupts are cleared remove from the list,
6105 * no need to disable/de-register.
6106 */
6107 if (!crtc_drm->enabled) {
6108 kfree(node);
6109 return 0;
6110 }
6111 priv = kms->dev->dev_private;
Alan Kwong1124f1f2017-11-10 18:14:39 -05006112 ret = sde_power_resource_enable(&priv->phandle, kms->core_client, true);
6113 if (ret) {
6114 SDE_ERROR("failed to enable power resource %d\n", ret);
6115 SDE_EVT32(ret, SDE_EVTLOG_ERROR);
Alan Kwong1124f1f2017-11-10 18:14:39 -05006116 kfree(node);
6117 return ret;
6118 }
6119
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08006120 ret = node->func(crtc_drm, false, &node->irq);
Xu Yang5e53c2e2017-07-11 16:46:28 +08006121 kfree(node);
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08006122 sde_power_resource_enable(&priv->phandle, kms->core_client, false);
6123 return ret;
6124}
6125
6126int sde_crtc_register_custom_event(struct sde_kms *kms,
6127 struct drm_crtc *crtc_drm, u32 event, bool en)
6128{
6129 struct sde_crtc *crtc = NULL;
6130 int ret;
6131
6132 crtc = to_sde_crtc(crtc_drm);
6133 if (!crtc || !kms || !kms->dev) {
6134 DRM_ERROR("invalid sde_crtc %pK kms %pK dev %pK\n", crtc,
6135 kms, ((kms) ? (kms->dev) : NULL));
6136 return -EINVAL;
6137 }
6138
6139 if (en)
6140 ret = _sde_crtc_event_enable(kms, crtc_drm, event);
6141 else
6142 ret = _sde_crtc_event_disable(kms, crtc_drm, event);
6143
6144 return ret;
Gopikrishnaiah Anandande2c81b2017-03-15 12:41:29 -07006145}
Gopikrishnaiah Anandan84b4f672017-04-26 10:28:51 -07006146
6147static int sde_crtc_power_interrupt_handler(struct drm_crtc *crtc_drm,
6148 bool en, struct sde_irq_callback *irq)
6149{
6150 return 0;
6151}
Benjamin Chan90139102017-06-21 16:00:39 -04006152
Dhaval Patelc9e213b2017-11-02 12:13:12 -07006153static int sde_crtc_pm_event_handler(struct drm_crtc *crtc, bool en,
6154 struct sde_irq_callback *noirq)
6155{
6156 /*
6157 * IRQ object noirq is not being used here since there is
6158 * no crtc irq from pm event.
6159 */
6160 return 0;
6161}
6162
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05306163static int sde_crtc_idle_interrupt_handler(struct drm_crtc *crtc_drm,
6164 bool en, struct sde_irq_callback *irq)
6165{
6166 return 0;
6167}
Chandan Uddaraju3f2cf422017-06-15 15:37:39 -07006168
6169/**
6170 * sde_crtc_update_cont_splash_mixer_settings - update mixer settings
6171 * during device bootup for cont_splash use case
6172 * @crtc: Pointer to drm crtc structure
6173 */
6174void sde_crtc_update_cont_splash_mixer_settings(
6175 struct drm_crtc *crtc)
6176{
6177 _sde_crtc_setup_mixers(crtc);
6178 crtc->enabled = true;
6179}