blob: 6f76e206c17d93ed62e3c51396fe855746f0ed9e [file] [log] [blame]
Dhaval Patel14d46ce2017-01-17 16:28:12 -08001/*
2 * Copyright (c) 2014-2017 The Linux Foundation. All rights reserved.
3 * Copyright (C) 2013 Red Hat
4 * Author: Rob Clark <robdclark@gmail.com>
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -08006 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published by
8 * the Free Software Foundation.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07009 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -080010 * This program is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
13 * more details.
14 *
15 * You should have received a copy of the GNU General Public License along with
16 * this program. If not, see <http://www.gnu.org/licenses/>.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070017 */
18
Clarence Ipd9f9fa62016-09-09 13:42:32 -040019#define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040020#include <linux/sort.h>
Clarence Ip8f7366c2016-07-05 12:15:26 -040021#include <linux/debugfs.h>
Clarence Ipcae1bb62016-07-07 12:07:13 -040022#include <linux/ktime.h>
Clarence Ip4c1d9772016-06-26 09:35:38 -040023#include <uapi/drm/sde_drm.h>
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070024#include <drm/drm_mode.h>
25#include <drm/drm_crtc.h>
26#include <drm/drm_crtc_helper.h>
27#include <drm/drm_flip_work.h>
28
29#include "sde_kms.h"
30#include "sde_hw_lm.h"
Clarence Ipc475b082016-06-26 09:27:23 -040031#include "sde_hw_ctl.h"
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040032#include "sde_crtc.h"
Alan Kwong83285fb2016-10-21 20:51:17 -040033#include "sde_plane.h"
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -070034#include "sde_color_processing.h"
Alan Kwong83285fb2016-10-21 20:51:17 -040035#include "sde_encoder.h"
36#include "sde_connector.h"
Clarence Ip980405d2017-08-08 18:33:44 -040037#include "sde_vbif.h"
Alan Kwong67a3f792016-11-01 23:16:53 -040038#include "sde_power_handle.h"
Alan Kwong9aa061c2016-11-06 21:17:12 -050039#include "sde_core_perf.h"
Narendra Muppalla77b32932017-05-10 13:53:11 -070040#include "sde_trace.h"
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -070041#include <soc/qcom/scm.h>
42#include "soc/qcom/secure_buffer.h"
43
44/* defines for secure channel call */
45#define SEC_SID_CNT 2
46#define SEC_SID_MASK_0 0x80881
47#define SEC_SID_MASK_1 0x80C81
48#define MEM_PROTECT_SD_CTRL_SWITCH 0x18
49#define MDP_DEVICE_ID 0x1A
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040050
Gopikrishnaiah Anandanb6b401f2017-03-14 16:39:49 -070051struct sde_crtc_custom_events {
52 u32 event;
53 int (*func)(struct drm_crtc *crtc, bool en,
54 struct sde_irq_callback *irq);
55};
56
Gopikrishnaiah Anandan84b4f672017-04-26 10:28:51 -070057static int sde_crtc_power_interrupt_handler(struct drm_crtc *crtc_drm,
58 bool en, struct sde_irq_callback *ad_irq);
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +053059static int sde_crtc_idle_interrupt_handler(struct drm_crtc *crtc_drm,
60 bool en, struct sde_irq_callback *idle_irq);
Gopikrishnaiah Anandan84b4f672017-04-26 10:28:51 -070061
Gopikrishnaiah Anandanb6b401f2017-03-14 16:39:49 -070062static struct sde_crtc_custom_events custom_events[] = {
Gopikrishnaiah Anandan84b4f672017-04-26 10:28:51 -070063 {DRM_EVENT_AD_BACKLIGHT, sde_cp_ad_interrupt},
Benjamin Chan90139102017-06-21 16:00:39 -040064 {DRM_EVENT_CRTC_POWER, sde_crtc_power_interrupt_handler},
Xu Yang5e53c2e2017-07-11 16:46:28 +080065 {DRM_EVENT_IDLE_NOTIFY, sde_crtc_idle_interrupt_handler},
66 {DRM_EVENT_HISTOGRAM, sde_cp_hist_interrupt},
Gopikrishnaiah Anandanb6b401f2017-03-14 16:39:49 -070067};
68
Clarence Ipcae1bb62016-07-07 12:07:13 -040069/* default input fence timeout, in ms */
Dhaval Patelb9850c02017-08-07 22:55:47 -070070#define SDE_CRTC_INPUT_FENCE_TIMEOUT 10000
Clarence Ipcae1bb62016-07-07 12:07:13 -040071
Dhaval Patel4e574842016-08-23 15:11:37 -070072/*
73 * The default input fence timeout is 2 seconds while max allowed
74 * range is 10 seconds. Any value above 10 seconds adds glitches beyond
75 * tolerance limit.
76 */
77#define SDE_CRTC_MAX_INPUT_FENCE_TIMEOUT 10000
78
Dhaval Patel48c76022016-09-01 17:51:23 -070079/* layer mixer index on sde_crtc */
80#define LEFT_MIXER 0
81#define RIGHT_MIXER 1
82
Dhaval Patelf9245d62017-03-28 16:24:00 -070083#define MISR_BUFF_SIZE 256
84
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -040085static inline struct sde_kms *_sde_crtc_get_kms(struct drm_crtc *crtc)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040086{
Clarence Ip7f70ce42017-03-20 06:53:46 -070087 struct msm_drm_private *priv;
88
89 if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
90 SDE_ERROR("invalid crtc\n");
91 return NULL;
92 }
93 priv = crtc->dev->dev_private;
94 if (!priv || !priv->kms) {
95 SDE_ERROR("invalid kms\n");
96 return NULL;
97 }
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040098
Ben Chan78647cd2016-06-26 22:02:47 -040099 return to_sde_kms(priv->kms);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400100}
101
Dhaval Patelf9245d62017-03-28 16:24:00 -0700102static inline int _sde_crtc_power_enable(struct sde_crtc *sde_crtc, bool enable)
103{
104 struct drm_crtc *crtc;
105 struct msm_drm_private *priv;
106 struct sde_kms *sde_kms;
107
108 if (!sde_crtc) {
109 SDE_ERROR("invalid sde crtc\n");
110 return -EINVAL;
111 }
112
113 crtc = &sde_crtc->base;
114 if (!crtc->dev || !crtc->dev->dev_private) {
115 SDE_ERROR("invalid drm device\n");
116 return -EINVAL;
117 }
118
119 priv = crtc->dev->dev_private;
120 if (!priv->kms) {
121 SDE_ERROR("invalid kms\n");
122 return -EINVAL;
123 }
124
125 sde_kms = to_sde_kms(priv->kms);
126
127 return sde_power_resource_enable(&priv->phandle, sde_kms->core_client,
128 enable);
129}
130
Alan Kwongcdb2f282017-03-18 13:42:06 -0700131/**
132 * _sde_crtc_rp_to_crtc - get crtc from resource pool object
133 * @rp: Pointer to resource pool
134 * return: Pointer to drm crtc if success; null otherwise
135 */
136static struct drm_crtc *_sde_crtc_rp_to_crtc(struct sde_crtc_respool *rp)
137{
138 if (!rp)
139 return NULL;
140
141 return container_of(rp, struct sde_crtc_state, rp)->base.crtc;
142}
143
144/**
145 * _sde_crtc_rp_reclaim - reclaim unused, or all if forced, resources in pool
146 * @rp: Pointer to resource pool
147 * @force: True to reclaim all resources; otherwise, reclaim only unused ones
148 * return: None
149 */
150static void _sde_crtc_rp_reclaim(struct sde_crtc_respool *rp, bool force)
151{
152 struct sde_crtc_res *res, *next;
153 struct drm_crtc *crtc;
154
155 crtc = _sde_crtc_rp_to_crtc(rp);
156 if (!crtc) {
157 SDE_ERROR("invalid crtc\n");
158 return;
159 }
160
161 SDE_DEBUG("crtc%d.%u %s\n", crtc->base.id, rp->sequence_id,
162 force ? "destroy" : "free_unused");
163
164 list_for_each_entry_safe(res, next, &rp->res_list, list) {
165 if (!force && !(res->flags & SDE_CRTC_RES_FLAG_FREE))
166 continue;
167 SDE_DEBUG("crtc%d.%u reclaim res:0x%x/0x%llx/%pK/%d\n",
168 crtc->base.id, rp->sequence_id,
169 res->type, res->tag, res->val,
170 atomic_read(&res->refcount));
171 list_del(&res->list);
172 if (res->ops.put)
173 res->ops.put(res->val);
174 kfree(res);
175 }
176}
177
178/**
179 * _sde_crtc_rp_free_unused - free unused resource in pool
180 * @rp: Pointer to resource pool
181 * return: none
182 */
183static void _sde_crtc_rp_free_unused(struct sde_crtc_respool *rp)
184{
Alan Kwong310e9b02017-08-03 02:04:07 -0400185 mutex_lock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700186 _sde_crtc_rp_reclaim(rp, false);
Alan Kwong310e9b02017-08-03 02:04:07 -0400187 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700188}
189
190/**
191 * _sde_crtc_rp_destroy - destroy resource pool
192 * @rp: Pointer to resource pool
193 * return: None
194 */
195static void _sde_crtc_rp_destroy(struct sde_crtc_respool *rp)
196{
Alan Kwong310e9b02017-08-03 02:04:07 -0400197 mutex_lock(rp->rp_lock);
198 list_del_init(&rp->rp_list);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700199 _sde_crtc_rp_reclaim(rp, true);
Alan Kwong310e9b02017-08-03 02:04:07 -0400200 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700201}
202
203/**
204 * _sde_crtc_hw_blk_get - get callback for hardware block
205 * @val: Resource handle
206 * @type: Resource type
207 * @tag: Search tag for given resource
208 * return: Resource handle
209 */
210static void *_sde_crtc_hw_blk_get(void *val, u32 type, u64 tag)
211{
212 SDE_DEBUG("res:%d/0x%llx/%pK\n", type, tag, val);
213 return sde_hw_blk_get(val, type, tag);
214}
215
216/**
217 * _sde_crtc_hw_blk_put - put callback for hardware block
218 * @val: Resource handle
219 * return: None
220 */
221static void _sde_crtc_hw_blk_put(void *val)
222{
223 SDE_DEBUG("res://%pK\n", val);
224 sde_hw_blk_put(val);
225}
226
227/**
228 * _sde_crtc_rp_duplicate - duplicate resource pool and reset reference count
229 * @rp: Pointer to original resource pool
230 * @dup_rp: Pointer to duplicated resource pool
231 * return: None
232 */
233static void _sde_crtc_rp_duplicate(struct sde_crtc_respool *rp,
234 struct sde_crtc_respool *dup_rp)
235{
236 struct sde_crtc_res *res, *dup_res;
237 struct drm_crtc *crtc;
238
Alan Kwong310e9b02017-08-03 02:04:07 -0400239 if (!rp || !dup_rp || !rp->rp_head) {
Alan Kwongcdb2f282017-03-18 13:42:06 -0700240 SDE_ERROR("invalid resource pool\n");
241 return;
242 }
243
244 crtc = _sde_crtc_rp_to_crtc(rp);
245 if (!crtc) {
246 SDE_ERROR("invalid crtc\n");
247 return;
248 }
249
250 SDE_DEBUG("crtc%d.%u duplicate\n", crtc->base.id, rp->sequence_id);
251
Alan Kwong310e9b02017-08-03 02:04:07 -0400252 mutex_lock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700253 dup_rp->sequence_id = rp->sequence_id + 1;
254 INIT_LIST_HEAD(&dup_rp->res_list);
255 dup_rp->ops = rp->ops;
256 list_for_each_entry(res, &rp->res_list, list) {
257 dup_res = kzalloc(sizeof(struct sde_crtc_res), GFP_KERNEL);
Alan Kwong310e9b02017-08-03 02:04:07 -0400258 if (!dup_res) {
259 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700260 return;
Alan Kwong310e9b02017-08-03 02:04:07 -0400261 }
Alan Kwongcdb2f282017-03-18 13:42:06 -0700262 INIT_LIST_HEAD(&dup_res->list);
263 atomic_set(&dup_res->refcount, 0);
264 dup_res->type = res->type;
265 dup_res->tag = res->tag;
266 dup_res->val = res->val;
267 dup_res->ops = res->ops;
268 dup_res->flags = SDE_CRTC_RES_FLAG_FREE;
269 SDE_DEBUG("crtc%d.%u dup res:0x%x/0x%llx/%pK/%d\n",
270 crtc->base.id, dup_rp->sequence_id,
271 dup_res->type, dup_res->tag, dup_res->val,
272 atomic_read(&dup_res->refcount));
273 list_add_tail(&dup_res->list, &dup_rp->res_list);
274 if (dup_res->ops.get)
275 dup_res->ops.get(dup_res->val, 0, -1);
276 }
Alan Kwong310e9b02017-08-03 02:04:07 -0400277
278 dup_rp->rp_lock = rp->rp_lock;
279 dup_rp->rp_head = rp->rp_head;
280 INIT_LIST_HEAD(&dup_rp->rp_list);
281 list_add_tail(&dup_rp->rp_list, rp->rp_head);
282 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700283}
284
285/**
286 * _sde_crtc_rp_reset - reset resource pool after allocation
287 * @rp: Pointer to original resource pool
Alan Kwong310e9b02017-08-03 02:04:07 -0400288 * @rp_lock: Pointer to serialization resource pool lock
289 * @rp_head: Pointer to crtc resource pool head
Alan Kwongcdb2f282017-03-18 13:42:06 -0700290 * return: None
291 */
Alan Kwong310e9b02017-08-03 02:04:07 -0400292static void _sde_crtc_rp_reset(struct sde_crtc_respool *rp,
293 struct mutex *rp_lock, struct list_head *rp_head)
Alan Kwongcdb2f282017-03-18 13:42:06 -0700294{
Alan Kwong310e9b02017-08-03 02:04:07 -0400295 if (!rp || !rp_lock || !rp_head) {
Alan Kwongcdb2f282017-03-18 13:42:06 -0700296 SDE_ERROR("invalid resource pool\n");
297 return;
298 }
299
Alan Kwong310e9b02017-08-03 02:04:07 -0400300 mutex_lock(rp_lock);
301 rp->rp_lock = rp_lock;
302 rp->rp_head = rp_head;
303 INIT_LIST_HEAD(&rp->rp_list);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700304 rp->sequence_id = 0;
305 INIT_LIST_HEAD(&rp->res_list);
306 rp->ops.get = _sde_crtc_hw_blk_get;
307 rp->ops.put = _sde_crtc_hw_blk_put;
Alan Kwong310e9b02017-08-03 02:04:07 -0400308 list_add_tail(&rp->rp_list, rp->rp_head);
309 mutex_unlock(rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700310}
311
312/**
Alan Kwong310e9b02017-08-03 02:04:07 -0400313 * _sde_crtc_rp_add_no_lock - add given resource to resource pool without lock
Alan Kwongcdb2f282017-03-18 13:42:06 -0700314 * @rp: Pointer to original resource pool
315 * @type: Resource type
316 * @tag: Search tag for given resource
317 * @val: Resource handle
318 * @ops: Resource callback operations
319 * return: 0 if success; error code otherwise
320 */
Alan Kwong310e9b02017-08-03 02:04:07 -0400321static int _sde_crtc_rp_add_no_lock(struct sde_crtc_respool *rp, u32 type,
322 u64 tag, void *val, struct sde_crtc_res_ops *ops)
Alan Kwongcdb2f282017-03-18 13:42:06 -0700323{
324 struct sde_crtc_res *res;
325 struct drm_crtc *crtc;
326
327 if (!rp || !ops) {
328 SDE_ERROR("invalid resource pool/ops\n");
329 return -EINVAL;
330 }
331
332 crtc = _sde_crtc_rp_to_crtc(rp);
333 if (!crtc) {
334 SDE_ERROR("invalid crtc\n");
335 return -EINVAL;
336 }
337
338 list_for_each_entry(res, &rp->res_list, list) {
339 if (res->type != type || res->tag != tag)
340 continue;
341 SDE_ERROR("crtc%d.%u already exist res:0x%x/0x%llx/%pK/%d\n",
342 crtc->base.id, rp->sequence_id,
343 res->type, res->tag, res->val,
344 atomic_read(&res->refcount));
345 return -EEXIST;
346 }
347 res = kzalloc(sizeof(struct sde_crtc_res), GFP_KERNEL);
348 if (!res)
349 return -ENOMEM;
350 INIT_LIST_HEAD(&res->list);
351 atomic_set(&res->refcount, 1);
352 res->type = type;
353 res->tag = tag;
354 res->val = val;
355 res->ops = *ops;
356 list_add_tail(&res->list, &rp->res_list);
357 SDE_DEBUG("crtc%d.%u added res:0x%x/0x%llx\n",
358 crtc->base.id, rp->sequence_id, type, tag);
359 return 0;
360}
361
362/**
Alan Kwong310e9b02017-08-03 02:04:07 -0400363 * _sde_crtc_rp_add - add given resource to resource pool
364 * @rp: Pointer to original resource pool
365 * @type: Resource type
366 * @tag: Search tag for given resource
367 * @val: Resource handle
368 * @ops: Resource callback operations
369 * return: 0 if success; error code otherwise
370 */
371static int _sde_crtc_rp_add(struct sde_crtc_respool *rp, u32 type, u64 tag,
372 void *val, struct sde_crtc_res_ops *ops)
373{
374 int rc;
375
376 if (!rp) {
377 SDE_ERROR("invalid resource pool\n");
378 return -EINVAL;
379 }
380
381 mutex_lock(rp->rp_lock);
382 rc = _sde_crtc_rp_add_no_lock(rp, type, tag, val, ops);
383 mutex_unlock(rp->rp_lock);
384 return rc;
385}
386
387/**
Alan Kwongcdb2f282017-03-18 13:42:06 -0700388 * _sde_crtc_rp_get - lookup the resource from given resource pool and obtain
389 * if available; otherwise, obtain resource from global pool
390 * @rp: Pointer to original resource pool
391 * @type: Resource type
392 * @tag: Search tag for given resource
393 * return: Resource handle if success; pointer error or null otherwise
394 */
395static void *_sde_crtc_rp_get(struct sde_crtc_respool *rp, u32 type, u64 tag)
396{
Alan Kwong310e9b02017-08-03 02:04:07 -0400397 struct sde_crtc_respool *old_rp;
Alan Kwongcdb2f282017-03-18 13:42:06 -0700398 struct sde_crtc_res *res;
399 void *val = NULL;
400 int rc;
401 struct drm_crtc *crtc;
402
403 if (!rp) {
404 SDE_ERROR("invalid resource pool\n");
405 return NULL;
406 }
407
408 crtc = _sde_crtc_rp_to_crtc(rp);
409 if (!crtc) {
410 SDE_ERROR("invalid crtc\n");
411 return NULL;
412 }
413
Alan Kwong310e9b02017-08-03 02:04:07 -0400414 mutex_lock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700415 list_for_each_entry(res, &rp->res_list, list) {
416 if (res->type != type || res->tag != tag)
417 continue;
418 SDE_DEBUG("crtc%d.%u found res:0x%x/0x%llx/%pK/%d\n",
419 crtc->base.id, rp->sequence_id,
420 res->type, res->tag, res->val,
421 atomic_read(&res->refcount));
422 atomic_inc(&res->refcount);
423 res->flags &= ~SDE_CRTC_RES_FLAG_FREE;
Alan Kwong310e9b02017-08-03 02:04:07 -0400424 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700425 return res->val;
426 }
427 list_for_each_entry(res, &rp->res_list, list) {
428 if (res->type != type || !(res->flags & SDE_CRTC_RES_FLAG_FREE))
429 continue;
430 SDE_DEBUG("crtc%d.%u retag res:0x%x/0x%llx/%pK/%d\n",
431 crtc->base.id, rp->sequence_id,
432 res->type, res->tag, res->val,
433 atomic_read(&res->refcount));
434 atomic_inc(&res->refcount);
435 res->tag = tag;
436 res->flags &= ~SDE_CRTC_RES_FLAG_FREE;
Alan Kwong310e9b02017-08-03 02:04:07 -0400437 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700438 return res->val;
439 }
Alan Kwong310e9b02017-08-03 02:04:07 -0400440 /* not in this rp, try to grab from global pool */
Alan Kwongcdb2f282017-03-18 13:42:06 -0700441 if (rp->ops.get)
442 val = rp->ops.get(NULL, type, -1);
Alan Kwong310e9b02017-08-03 02:04:07 -0400443 if (!IS_ERR_OR_NULL(val))
444 goto add_res;
445 /*
446 * Search older resource pools for hw blk with matching type,
447 * necessary when resource is being used by this object,
448 * but in previous states not yet cleaned up.
449 *
450 * This enables searching of all resources currently owned
451 * by this crtc even though the resource might not be used
452 * in the current atomic state. This allows those resources
453 * to be re-acquired by the new atomic state immediately
454 * without waiting for the resources to be fully released.
455 */
456 else if (IS_ERR_OR_NULL(val) && (type < SDE_HW_BLK_MAX)) {
457 list_for_each_entry(old_rp, rp->rp_head, rp_list) {
458 if (old_rp == rp)
459 continue;
460
461 list_for_each_entry(res, &old_rp->res_list, list) {
462 if (res->type != type)
463 continue;
464 SDE_DEBUG(
465 "crtc%d.%u found res:0x%x//%pK/ in crtc%d.%d\n",
466 crtc->base.id,
467 rp->sequence_id,
468 res->type, res->val,
469 crtc->base.id,
470 old_rp->sequence_id);
471 SDE_EVT32_VERBOSE(crtc->base.id,
472 rp->sequence_id,
473 res->type, res->val,
474 crtc->base.id,
475 old_rp->sequence_id);
476 if (res->ops.get)
477 res->ops.get(res->val, 0, -1);
478 val = res->val;
479 break;
480 }
481
482 if (!IS_ERR_OR_NULL(val))
483 break;
484 }
485 }
Alan Kwongcdb2f282017-03-18 13:42:06 -0700486 if (IS_ERR_OR_NULL(val)) {
Alan Kwong42e35052017-05-05 06:52:51 -0700487 SDE_DEBUG("crtc%d.%u failed to get res:0x%x//\n",
Alan Kwongcdb2f282017-03-18 13:42:06 -0700488 crtc->base.id, rp->sequence_id, type);
Alan Kwong310e9b02017-08-03 02:04:07 -0400489 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700490 return NULL;
491 }
Alan Kwong310e9b02017-08-03 02:04:07 -0400492add_res:
493 rc = _sde_crtc_rp_add_no_lock(rp, type, tag, val, &rp->ops);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700494 if (rc) {
495 SDE_ERROR("crtc%d.%u failed to add res:0x%x/0x%llx\n",
496 crtc->base.id, rp->sequence_id, type, tag);
497 if (rp->ops.put)
498 rp->ops.put(val);
499 val = NULL;
500 }
Alan Kwong310e9b02017-08-03 02:04:07 -0400501 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700502 return val;
503}
504
505/**
506 * _sde_crtc_rp_put - return given resource to resource pool
507 * @rp: Pointer to original resource pool
508 * @type: Resource type
509 * @tag: Search tag for given resource
510 * return: None
511 */
512static void _sde_crtc_rp_put(struct sde_crtc_respool *rp, u32 type, u64 tag)
513{
514 struct sde_crtc_res *res, *next;
515 struct drm_crtc *crtc;
516
517 if (!rp) {
518 SDE_ERROR("invalid resource pool\n");
519 return;
520 }
521
522 crtc = _sde_crtc_rp_to_crtc(rp);
523 if (!crtc) {
524 SDE_ERROR("invalid crtc\n");
525 return;
526 }
527
Alan Kwong310e9b02017-08-03 02:04:07 -0400528 mutex_lock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700529 list_for_each_entry_safe(res, next, &rp->res_list, list) {
530 if (res->type != type || res->tag != tag)
531 continue;
532 SDE_DEBUG("crtc%d.%u found res:0x%x/0x%llx/%pK/%d\n",
533 crtc->base.id, rp->sequence_id,
534 res->type, res->tag, res->val,
535 atomic_read(&res->refcount));
536 if (res->flags & SDE_CRTC_RES_FLAG_FREE)
537 SDE_ERROR(
538 "crtc%d.%u already free res:0x%x/0x%llx/%pK/%d\n",
539 crtc->base.id, rp->sequence_id,
540 res->type, res->tag, res->val,
541 atomic_read(&res->refcount));
542 else if (atomic_dec_return(&res->refcount) == 0)
543 res->flags |= SDE_CRTC_RES_FLAG_FREE;
544
Alan Kwong310e9b02017-08-03 02:04:07 -0400545 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700546 return;
547 }
548 SDE_ERROR("crtc%d.%u not found res:0x%x/0x%llx\n",
549 crtc->base.id, rp->sequence_id, type, tag);
Alan Kwong310e9b02017-08-03 02:04:07 -0400550 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700551}
552
553int sde_crtc_res_add(struct drm_crtc_state *state, u32 type, u64 tag,
554 void *val, struct sde_crtc_res_ops *ops)
555{
556 struct sde_crtc_respool *rp;
557
558 if (!state) {
559 SDE_ERROR("invalid parameters\n");
560 return -EINVAL;
561 }
562
563 rp = &to_sde_crtc_state(state)->rp;
564 return _sde_crtc_rp_add(rp, type, tag, val, ops);
565}
566
567void *sde_crtc_res_get(struct drm_crtc_state *state, u32 type, u64 tag)
568{
569 struct sde_crtc_respool *rp;
570 void *val;
571
572 if (!state) {
573 SDE_ERROR("invalid parameters\n");
574 return NULL;
575 }
576
577 rp = &to_sde_crtc_state(state)->rp;
578 val = _sde_crtc_rp_get(rp, type, tag);
579 if (IS_ERR(val)) {
580 SDE_ERROR("failed to get res type:0x%x:0x%llx\n",
581 type, tag);
582 return NULL;
583 }
584
585 return val;
586}
587
588void sde_crtc_res_put(struct drm_crtc_state *state, u32 type, u64 tag)
589{
590 struct sde_crtc_respool *rp;
591
592 if (!state) {
593 SDE_ERROR("invalid parameters\n");
594 return;
595 }
596
597 rp = &to_sde_crtc_state(state)->rp;
598 _sde_crtc_rp_put(rp, type, tag);
599}
600
Clarence Ipa18d4832017-03-13 12:35:44 -0700601static void _sde_crtc_deinit_events(struct sde_crtc *sde_crtc)
602{
603 if (!sde_crtc)
604 return;
Clarence Ipa18d4832017-03-13 12:35:44 -0700605}
606
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +0530607/**
608 * sde_crtc_destroy_dest_scaler - free memory allocated for scaler lut
609 * @sde_crtc: Pointer to sde crtc
610 */
611static void _sde_crtc_destroy_dest_scaler(struct sde_crtc *sde_crtc)
612{
613 if (!sde_crtc)
614 return;
615
616 kfree(sde_crtc->scl3_lut_cfg);
617}
618
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700619static void sde_crtc_destroy(struct drm_crtc *crtc)
620{
621 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
622
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -0400623 SDE_DEBUG("\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -0400624
625 if (!crtc)
626 return;
627
Dhaval Patele4a5dda2016-10-13 19:29:30 -0700628 if (sde_crtc->blob_info)
629 drm_property_unreference_blob(sde_crtc->blob_info);
Clarence Ip7a753bb2016-07-07 11:47:44 -0400630 msm_property_destroy(&sde_crtc->property_info);
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -0700631 sde_cp_crtc_destroy_properties(crtc);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +0530632 _sde_crtc_destroy_dest_scaler(sde_crtc);
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -0700633
Clarence Ip24f80662016-06-13 19:05:32 -0400634 sde_fence_deinit(&sde_crtc->output_fence);
Clarence Ipa18d4832017-03-13 12:35:44 -0700635 _sde_crtc_deinit_events(sde_crtc);
Clarence Ip7a753bb2016-07-07 11:47:44 -0400636
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700637 drm_crtc_cleanup(crtc);
Clarence Ip7f70ce42017-03-20 06:53:46 -0700638 mutex_destroy(&sde_crtc->crtc_lock);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700639 kfree(sde_crtc);
640}
641
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700642static bool sde_crtc_mode_fixup(struct drm_crtc *crtc,
643 const struct drm_display_mode *mode,
644 struct drm_display_mode *adjusted_mode)
645{
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -0400646 SDE_DEBUG("\n");
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -0400647
Raviteja Tamatam68892de2017-06-20 04:47:19 +0530648 if ((msm_is_mode_seamless(adjusted_mode) ||
649 msm_is_mode_seamless_vrr(adjusted_mode)) &&
650 (!crtc->enabled)) {
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -0400651 SDE_ERROR("crtc state prevents seamless transition\n");
652 return false;
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -0400653 }
654
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700655 return true;
656}
657
Dhaval Patel48c76022016-09-01 17:51:23 -0700658static void _sde_crtc_setup_blend_cfg(struct sde_crtc_mixer *mixer,
659 struct sde_plane_state *pstate, struct sde_format *format)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400660{
Dhaval Patel48c76022016-09-01 17:51:23 -0700661 uint32_t blend_op, fg_alpha, bg_alpha;
662 uint32_t blend_type;
Dhaval Patel44f12472016-08-29 12:19:47 -0700663 struct sde_hw_mixer *lm = mixer->hw_lm;
664
Dhaval Patel48c76022016-09-01 17:51:23 -0700665 /* default to opaque blending */
666 fg_alpha = sde_plane_get_property(pstate, PLANE_PROP_ALPHA);
667 bg_alpha = 0xFF - fg_alpha;
668 blend_op = SDE_BLEND_FG_ALPHA_FG_CONST | SDE_BLEND_BG_ALPHA_BG_CONST;
669 blend_type = sde_plane_get_property(pstate, PLANE_PROP_BLEND_OP);
Dhaval Patel44f12472016-08-29 12:19:47 -0700670
Dhaval Patel48c76022016-09-01 17:51:23 -0700671 SDE_DEBUG("blend type:0x%x blend alpha:0x%x\n", blend_type, fg_alpha);
672
673 switch (blend_type) {
674
675 case SDE_DRM_BLEND_OP_OPAQUE:
676 blend_op = SDE_BLEND_FG_ALPHA_FG_CONST |
677 SDE_BLEND_BG_ALPHA_BG_CONST;
678 break;
679
680 case SDE_DRM_BLEND_OP_PREMULTIPLIED:
681 if (format->alpha_enable) {
682 blend_op = SDE_BLEND_FG_ALPHA_FG_CONST |
683 SDE_BLEND_BG_ALPHA_FG_PIXEL;
684 if (fg_alpha != 0xff) {
685 bg_alpha = fg_alpha;
686 blend_op |= SDE_BLEND_BG_MOD_ALPHA |
687 SDE_BLEND_BG_INV_MOD_ALPHA;
688 } else {
689 blend_op |= SDE_BLEND_BG_INV_ALPHA;
690 }
691 }
692 break;
693
694 case SDE_DRM_BLEND_OP_COVERAGE:
695 if (format->alpha_enable) {
696 blend_op = SDE_BLEND_FG_ALPHA_FG_PIXEL |
697 SDE_BLEND_BG_ALPHA_FG_PIXEL;
698 if (fg_alpha != 0xff) {
699 bg_alpha = fg_alpha;
700 blend_op |= SDE_BLEND_FG_MOD_ALPHA |
701 SDE_BLEND_FG_INV_MOD_ALPHA |
702 SDE_BLEND_BG_MOD_ALPHA |
703 SDE_BLEND_BG_INV_MOD_ALPHA;
704 } else {
705 blend_op |= SDE_BLEND_BG_INV_ALPHA;
706 }
707 }
708 break;
709 default:
710 /* do nothing */
711 break;
Clarence Ipd9f9fa62016-09-09 13:42:32 -0400712 }
Dhaval Patel48c76022016-09-01 17:51:23 -0700713
714 lm->ops.setup_blend_config(lm, pstate->stage, fg_alpha,
715 bg_alpha, blend_op);
Dhaval Patel6c666622017-03-21 23:02:59 -0700716 SDE_DEBUG(
717 "format: %4.4s, alpha_enable %u fg alpha:0x%x bg alpha:0x%x blend_op:0x%x\n",
718 (char *) &format->base.pixel_format,
Dhaval Patel48c76022016-09-01 17:51:23 -0700719 format->alpha_enable, fg_alpha, bg_alpha, blend_op);
720}
721
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800722static void _sde_crtc_setup_dim_layer_cfg(struct drm_crtc *crtc,
723 struct sde_crtc *sde_crtc, struct sde_crtc_mixer *mixer,
724 struct sde_hw_dim_layer *dim_layer)
725{
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -0500726 struct sde_crtc_state *cstate;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800727 struct sde_hw_mixer *lm;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800728 struct sde_hw_dim_layer split_dim_layer;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800729 int i;
730
731 if (!dim_layer->rect.w || !dim_layer->rect.h) {
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -0700732 SDE_DEBUG("empty dim_layer\n");
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800733 return;
734 }
735
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -0500736 cstate = to_sde_crtc_state(crtc->state);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800737
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -0700738 SDE_DEBUG("dim_layer - flags:%d, stage:%d\n",
739 dim_layer->flags, dim_layer->stage);
740
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800741 split_dim_layer.stage = dim_layer->stage;
742 split_dim_layer.color_fill = dim_layer->color_fill;
743
744 /*
745 * traverse through the layer mixers attached to crtc and find the
746 * intersecting dim layer rect in each LM and program accordingly.
747 */
748 for (i = 0; i < sde_crtc->num_mixers; i++) {
749 split_dim_layer.flags = dim_layer->flags;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800750
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -0500751 sde_kms_rect_intersect(&cstate->lm_bounds[i], &dim_layer->rect,
Lloyd Atkinsone0e11e22017-01-17 12:08:48 -0500752 &split_dim_layer.rect);
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -0500753 if (sde_kms_rect_is_null(&split_dim_layer.rect)) {
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800754 /*
755 * no extra programming required for non-intersecting
756 * layer mixers with INCLUSIVE dim layer
757 */
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -0500758 if (split_dim_layer.flags & SDE_DRM_DIM_LAYER_INCLUSIVE)
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800759 continue;
760
761 /*
762 * program the other non-intersecting layer mixers with
763 * INCLUSIVE dim layer of full size for uniformity
764 * with EXCLUSIVE dim layer config.
765 */
766 split_dim_layer.flags &= ~SDE_DRM_DIM_LAYER_EXCLUSIVE;
767 split_dim_layer.flags |= SDE_DRM_DIM_LAYER_INCLUSIVE;
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -0500768 memcpy(&split_dim_layer.rect, &cstate->lm_bounds[i],
769 sizeof(split_dim_layer.rect));
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800770
771 } else {
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -0500772 split_dim_layer.rect.x =
773 split_dim_layer.rect.x -
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -0700774 cstate->lm_bounds[i].x;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800775 }
776
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -0700777 SDE_DEBUG("split_dim_layer - LM:%d, rect:{%d,%d,%d,%d}}\n",
778 i, split_dim_layer.rect.x, split_dim_layer.rect.y,
779 split_dim_layer.rect.w, split_dim_layer.rect.h);
780
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800781 lm = mixer[i].hw_lm;
782 mixer[i].mixer_op_mode |= 1 << split_dim_layer.stage;
783 lm->ops.setup_dim_layer(lm, &split_dim_layer);
784 }
785}
786
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400787void sde_crtc_get_crtc_roi(struct drm_crtc_state *state,
788 const struct sde_rect **crtc_roi)
789{
790 struct sde_crtc_state *crtc_state;
791
792 if (!state || !crtc_roi)
793 return;
794
795 crtc_state = to_sde_crtc_state(state);
796 *crtc_roi = &crtc_state->crtc_roi;
797}
798
799static int _sde_crtc_set_roi_v1(struct drm_crtc_state *state,
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +0530800 void __user *usr_ptr)
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400801{
802 struct drm_crtc *crtc;
803 struct sde_crtc_state *cstate;
804 struct sde_drm_roi_v1 roi_v1;
805 int i;
806
807 if (!state) {
808 SDE_ERROR("invalid args\n");
809 return -EINVAL;
810 }
811
812 cstate = to_sde_crtc_state(state);
813 crtc = cstate->base.crtc;
814
815 memset(&cstate->user_roi_list, 0, sizeof(cstate->user_roi_list));
816
817 if (!usr_ptr) {
818 SDE_DEBUG("crtc%d: rois cleared\n", DRMID(crtc));
819 return 0;
820 }
821
822 if (copy_from_user(&roi_v1, usr_ptr, sizeof(roi_v1))) {
823 SDE_ERROR("crtc%d: failed to copy roi_v1 data\n", DRMID(crtc));
824 return -EINVAL;
825 }
826
827 SDE_DEBUG("crtc%d: num_rects %d\n", DRMID(crtc), roi_v1.num_rects);
828
829 if (roi_v1.num_rects == 0) {
830 SDE_DEBUG("crtc%d: rois cleared\n", DRMID(crtc));
831 return 0;
832 }
833
834 if (roi_v1.num_rects > SDE_MAX_ROI_V1) {
835 SDE_ERROR("crtc%d: too many rects specified: %d\n", DRMID(crtc),
836 roi_v1.num_rects);
837 return -EINVAL;
838 }
839
840 cstate->user_roi_list.num_rects = roi_v1.num_rects;
841 for (i = 0; i < roi_v1.num_rects; ++i) {
842 cstate->user_roi_list.roi[i] = roi_v1.roi[i];
843 SDE_DEBUG("crtc%d: roi%d: roi (%d,%d) (%d,%d)\n",
844 DRMID(crtc), i,
845 cstate->user_roi_list.roi[i].x1,
846 cstate->user_roi_list.roi[i].y1,
847 cstate->user_roi_list.roi[i].x2,
848 cstate->user_roi_list.roi[i].y2);
849 }
850
851 return 0;
852}
853
Ingrid Gallardo83532222017-06-02 16:48:51 -0700854static bool _sde_crtc_setup_is_3dmux_dsc(struct drm_crtc_state *state)
855{
856 int i;
857 struct sde_crtc_state *cstate;
858 bool is_3dmux_dsc = false;
859
860 cstate = to_sde_crtc_state(state);
861
862 for (i = 0; i < cstate->num_connectors; i++) {
863 struct drm_connector *conn = cstate->connectors[i];
864
865 if (sde_connector_get_topology_name(conn) ==
866 SDE_RM_TOPOLOGY_DUALPIPE_3DMERGE_DSC)
867 is_3dmux_dsc = true;
868 }
869
870 return is_3dmux_dsc;
871}
872
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400873static int _sde_crtc_set_crtc_roi(struct drm_crtc *crtc,
874 struct drm_crtc_state *state)
875{
876 struct drm_connector *conn;
877 struct drm_connector_state *conn_state;
878 struct sde_crtc *sde_crtc;
879 struct sde_crtc_state *crtc_state;
880 struct sde_rect *crtc_roi;
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400881 int i, num_attached_conns = 0;
882
883 if (!crtc || !state)
884 return -EINVAL;
885
886 sde_crtc = to_sde_crtc(crtc);
887 crtc_state = to_sde_crtc_state(state);
888 crtc_roi = &crtc_state->crtc_roi;
889
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400890 for_each_connector_in_state(state->state, conn, conn_state, i) {
891 struct sde_connector_state *sde_conn_state;
892
893 if (!conn_state || conn_state->crtc != crtc)
894 continue;
895
896 if (num_attached_conns) {
897 SDE_ERROR(
898 "crtc%d: unsupported: roi on crtc w/ >1 connectors\n",
899 DRMID(crtc));
900 return -EINVAL;
901 }
902 ++num_attached_conns;
903
904 sde_conn_state = to_sde_connector_state(conn_state);
905
Ingrid Gallardo83532222017-06-02 16:48:51 -0700906 /*
907 * current driver only supports same connector and crtc size,
908 * but if support for different sizes is added, driver needs
909 * to check the connector roi here to make sure is full screen
910 * for dsc 3d-mux topology that doesn't support partial update.
911 */
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400912 if (memcmp(&sde_conn_state->rois, &crtc_state->user_roi_list,
913 sizeof(crtc_state->user_roi_list))) {
914 SDE_ERROR("%s: crtc -> conn roi scaling unsupported\n",
915 sde_crtc->name);
916 return -EINVAL;
917 }
918 }
919
Lloyd Atkinsonc2baf412017-04-19 17:53:09 -0400920 sde_kms_rect_merge_rectangles(&crtc_state->user_roi_list, crtc_roi);
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400921
922 SDE_DEBUG("%s: crtc roi (%d,%d,%d,%d)\n", sde_crtc->name,
923 crtc_roi->x, crtc_roi->y, crtc_roi->w, crtc_roi->h);
924
925 return 0;
926}
927
Lloyd Atkinson77382202017-02-01 14:59:43 -0500928static int _sde_crtc_check_autorefresh(struct drm_crtc *crtc,
929 struct drm_crtc_state *state)
930{
931 struct sde_crtc *sde_crtc;
932 struct sde_crtc_state *crtc_state;
933 struct drm_connector *conn;
934 struct drm_connector_state *conn_state;
935 int i;
936
937 if (!crtc || !state)
938 return -EINVAL;
939
940 sde_crtc = to_sde_crtc(crtc);
941 crtc_state = to_sde_crtc_state(state);
942
943 if (sde_kms_rect_is_null(&crtc_state->crtc_roi))
944 return 0;
945
946 /* partial update active, check if autorefresh is also requested */
947 for_each_connector_in_state(state->state, conn, conn_state, i) {
948 uint64_t autorefresh;
949
950 if (!conn_state || conn_state->crtc != crtc)
951 continue;
952
953 autorefresh = sde_connector_get_property(conn_state,
954 CONNECTOR_PROP_AUTOREFRESH);
955 if (autorefresh) {
956 SDE_ERROR(
957 "%s: autorefresh & partial crtc roi incompatible %llu\n",
958 sde_crtc->name, autorefresh);
959 return -EINVAL;
960 }
961 }
962
963 return 0;
964}
965
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400966static int _sde_crtc_set_lm_roi(struct drm_crtc *crtc,
967 struct drm_crtc_state *state, int lm_idx)
968{
969 struct sde_crtc *sde_crtc;
970 struct sde_crtc_state *crtc_state;
971 const struct sde_rect *crtc_roi;
972 const struct sde_rect *lm_bounds;
973 struct sde_rect *lm_roi;
974
975 if (!crtc || !state || lm_idx >= ARRAY_SIZE(crtc_state->lm_bounds))
976 return -EINVAL;
977
978 sde_crtc = to_sde_crtc(crtc);
979 crtc_state = to_sde_crtc_state(state);
980 crtc_roi = &crtc_state->crtc_roi;
981 lm_bounds = &crtc_state->lm_bounds[lm_idx];
982 lm_roi = &crtc_state->lm_roi[lm_idx];
983
Lloyd Atkinson73fb8092017-02-08 16:02:55 -0500984 if (sde_kms_rect_is_null(crtc_roi))
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400985 memcpy(lm_roi, lm_bounds, sizeof(*lm_roi));
Lloyd Atkinson73fb8092017-02-08 16:02:55 -0500986 else
987 sde_kms_rect_intersect(crtc_roi, lm_bounds, lm_roi);
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400988
989 SDE_DEBUG("%s: lm%d roi (%d,%d,%d,%d)\n", sde_crtc->name, lm_idx,
990 lm_roi->x, lm_roi->y, lm_roi->w, lm_roi->h);
991
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +0530992 /*
993 * partial update is not supported with 3dmux dsc or dest scaler.
994 * hence, crtc roi must match the mixer dimensions.
995 */
996 if (crtc_state->num_ds_enabled ||
997 _sde_crtc_setup_is_3dmux_dsc(state)) {
998 if (memcmp(lm_roi, lm_bounds, sizeof(struct sde_rect))) {
999 SDE_ERROR("Unsupported: Dest scaler/3d mux DSC + PU\n");
1000 return -EINVAL;
1001 }
1002 }
1003
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001004 /* if any dimension is zero, clear all dimensions for clarity */
1005 if (sde_kms_rect_is_null(lm_roi))
1006 memset(lm_roi, 0, sizeof(*lm_roi));
1007
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001008 return 0;
1009}
1010
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001011static u32 _sde_crtc_get_displays_affected(struct drm_crtc *crtc,
1012 struct drm_crtc_state *state)
1013{
1014 struct sde_crtc *sde_crtc;
1015 struct sde_crtc_state *crtc_state;
1016 u32 disp_bitmask = 0;
1017 int i;
1018
1019 sde_crtc = to_sde_crtc(crtc);
1020 crtc_state = to_sde_crtc_state(state);
1021
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05001022 /* pingpong split: one ROI, one LM, two physical displays */
1023 if (crtc_state->is_ppsplit) {
1024 u32 lm_split_width = crtc_state->lm_bounds[0].w / 2;
1025 struct sde_rect *roi = &crtc_state->lm_roi[0];
1026
1027 if (sde_kms_rect_is_null(roi))
1028 disp_bitmask = 0;
1029 else if ((u32)roi->x + (u32)roi->w <= lm_split_width)
1030 disp_bitmask = BIT(0); /* left only */
1031 else if (roi->x >= lm_split_width)
1032 disp_bitmask = BIT(1); /* right only */
1033 else
1034 disp_bitmask = BIT(0) | BIT(1); /* left and right */
1035 } else {
1036 for (i = 0; i < sde_crtc->num_mixers; i++) {
1037 if (!sde_kms_rect_is_null(&crtc_state->lm_roi[i]))
1038 disp_bitmask |= BIT(i);
1039 }
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001040 }
1041
1042 SDE_DEBUG("affected displays 0x%x\n", disp_bitmask);
1043
1044 return disp_bitmask;
1045}
1046
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001047static int _sde_crtc_check_rois_centered_and_symmetric(struct drm_crtc *crtc,
1048 struct drm_crtc_state *state)
1049{
1050 struct sde_crtc *sde_crtc;
1051 struct sde_crtc_state *crtc_state;
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001052 const struct sde_rect *roi[CRTC_DUAL_MIXERS];
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001053
1054 if (!crtc || !state)
1055 return -EINVAL;
1056
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001057 sde_crtc = to_sde_crtc(crtc);
1058 crtc_state = to_sde_crtc_state(state);
1059
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001060 if (sde_crtc->num_mixers > CRTC_DUAL_MIXERS) {
1061 SDE_ERROR("%s: unsupported number of mixers: %d\n",
1062 sde_crtc->name, sde_crtc->num_mixers);
1063 return -EINVAL;
1064 }
1065
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001066 /*
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05001067 * If using pingpong split: one ROI, one LM, two physical displays
1068 * then the ROI must be centered on the panel split boundary and
1069 * be of equal width across the split.
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001070 */
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05001071 if (crtc_state->is_ppsplit) {
1072 u16 panel_split_width;
1073 u32 display_mask;
1074
1075 roi[0] = &crtc_state->lm_roi[0];
1076
1077 if (sde_kms_rect_is_null(roi[0]))
1078 return 0;
1079
1080 display_mask = _sde_crtc_get_displays_affected(crtc, state);
1081 if (display_mask != (BIT(0) | BIT(1)))
1082 return 0;
1083
1084 panel_split_width = crtc_state->lm_bounds[0].w / 2;
1085 if (roi[0]->x + roi[0]->w / 2 != panel_split_width) {
1086 SDE_ERROR("%s: roi x %d w %d split %d\n",
1087 sde_crtc->name, roi[0]->x, roi[0]->w,
1088 panel_split_width);
1089 return -EINVAL;
1090 }
1091
1092 return 0;
1093 }
1094
1095 /*
1096 * On certain HW, if using 2 LM, ROIs must be split evenly between the
1097 * LMs and be of equal width.
1098 */
Clarence Ipffb87422017-06-30 13:37:48 -04001099 if (sde_crtc->num_mixers < 2)
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05001100 return 0;
1101
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001102 roi[0] = &crtc_state->lm_roi[0];
1103 roi[1] = &crtc_state->lm_roi[1];
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001104
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001105 /* if one of the roi is null it's a left/right-only update */
1106 if (sde_kms_rect_is_null(roi[0]) || sde_kms_rect_is_null(roi[1]))
1107 return 0;
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001108
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001109 /* check lm rois are equal width & first roi ends at 2nd roi */
1110 if (roi[0]->x + roi[0]->w != roi[1]->x || roi[0]->w != roi[1]->w) {
1111 SDE_ERROR(
1112 "%s: rois not centered and symmetric: roi0 x %d w %d roi1 x %d w %d\n",
1113 sde_crtc->name, roi[0]->x, roi[0]->w,
1114 roi[1]->x, roi[1]->w);
1115 return -EINVAL;
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001116 }
1117
1118 return 0;
1119}
1120
1121static int _sde_crtc_check_planes_within_crtc_roi(struct drm_crtc *crtc,
1122 struct drm_crtc_state *state)
1123{
1124 struct sde_crtc *sde_crtc;
1125 struct sde_crtc_state *crtc_state;
1126 const struct sde_rect *crtc_roi;
Veera Sundaram Sankarand916e2a2017-10-12 14:52:26 -07001127 const struct drm_plane_state *pstate;
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001128 struct drm_plane *plane;
1129
1130 if (!crtc || !state)
1131 return -EINVAL;
1132
1133 /*
1134 * Reject commit if a Plane CRTC destination coordinates fall outside
1135 * the partial CRTC ROI. LM output is determined via connector ROIs,
1136 * if they are specified, not Plane CRTC ROIs.
1137 */
1138
1139 sde_crtc = to_sde_crtc(crtc);
1140 crtc_state = to_sde_crtc_state(state);
1141 crtc_roi = &crtc_state->crtc_roi;
1142
1143 if (sde_kms_rect_is_null(crtc_roi))
1144 return 0;
1145
Veera Sundaram Sankarand916e2a2017-10-12 14:52:26 -07001146 drm_atomic_crtc_state_for_each_plane_state(plane, pstate, state) {
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001147 struct sde_rect plane_roi, intersection;
1148
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001149 if (IS_ERR_OR_NULL(pstate)) {
1150 int rc = PTR_ERR(pstate);
1151
1152 SDE_ERROR("%s: failed to get plane%d state, %d\n",
1153 sde_crtc->name, plane->base.id, rc);
1154 return rc;
1155 }
1156
1157 plane_roi.x = pstate->crtc_x;
1158 plane_roi.y = pstate->crtc_y;
1159 plane_roi.w = pstate->crtc_w;
1160 plane_roi.h = pstate->crtc_h;
1161 sde_kms_rect_intersect(crtc_roi, &plane_roi, &intersection);
1162 if (!sde_kms_rect_is_equal(&plane_roi, &intersection)) {
1163 SDE_ERROR(
1164 "%s: plane%d crtc roi (%d,%d,%d,%d) outside crtc roi (%d,%d,%d,%d)\n",
1165 sde_crtc->name, plane->base.id,
1166 plane_roi.x, plane_roi.y,
1167 plane_roi.w, plane_roi.h,
1168 crtc_roi->x, crtc_roi->y,
1169 crtc_roi->w, crtc_roi->h);
1170 return -E2BIG;
1171 }
1172 }
1173
1174 return 0;
1175}
1176
1177static int _sde_crtc_check_rois(struct drm_crtc *crtc,
1178 struct drm_crtc_state *state)
1179{
1180 struct sde_crtc *sde_crtc;
1181 int lm_idx;
1182 int rc;
1183
1184 if (!crtc || !state)
1185 return -EINVAL;
1186
1187 sde_crtc = to_sde_crtc(crtc);
1188
1189 rc = _sde_crtc_set_crtc_roi(crtc, state);
1190 if (rc)
1191 return rc;
1192
Lloyd Atkinson77382202017-02-01 14:59:43 -05001193 rc = _sde_crtc_check_autorefresh(crtc, state);
1194 if (rc)
1195 return rc;
1196
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001197 for (lm_idx = 0; lm_idx < sde_crtc->num_mixers; lm_idx++) {
1198 rc = _sde_crtc_set_lm_roi(crtc, state, lm_idx);
1199 if (rc)
1200 return rc;
1201 }
1202
1203 rc = _sde_crtc_check_rois_centered_and_symmetric(crtc, state);
1204 if (rc)
1205 return rc;
1206
1207 rc = _sde_crtc_check_planes_within_crtc_roi(crtc, state);
1208 if (rc)
1209 return rc;
1210
1211 return 0;
1212}
1213
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001214static void _sde_crtc_program_lm_output_roi(struct drm_crtc *crtc)
1215{
1216 struct sde_crtc *sde_crtc;
1217 struct sde_crtc_state *crtc_state;
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001218 const struct sde_rect *lm_roi;
1219 struct sde_hw_mixer *hw_lm;
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001220 int lm_idx, lm_horiz_position;
1221
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001222 if (!crtc)
1223 return;
1224
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001225 sde_crtc = to_sde_crtc(crtc);
1226 crtc_state = to_sde_crtc_state(crtc->state);
1227
1228 lm_horiz_position = 0;
1229 for (lm_idx = 0; lm_idx < sde_crtc->num_mixers; lm_idx++) {
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001230 struct sde_hw_mixer_cfg cfg;
1231
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001232 lm_roi = &crtc_state->lm_roi[lm_idx];
1233 hw_lm = sde_crtc->mixers[lm_idx].hw_lm;
1234
1235 SDE_EVT32(DRMID(crtc_state->base.crtc), lm_idx,
1236 lm_roi->x, lm_roi->y, lm_roi->w, lm_roi->h);
1237
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001238 if (sde_kms_rect_is_null(lm_roi))
1239 continue;
1240
Ping Lif41c2ef2017-05-04 14:40:45 -07001241 hw_lm->cfg.out_width = lm_roi->w;
1242 hw_lm->cfg.out_height = lm_roi->h;
1243 hw_lm->cfg.right_mixer = lm_horiz_position;
1244
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001245 cfg.out_width = lm_roi->w;
1246 cfg.out_height = lm_roi->h;
1247 cfg.right_mixer = lm_horiz_position++;
1248 cfg.flags = 0;
1249 hw_lm->ops.setup_mixer_out(hw_lm, &cfg);
1250 }
1251}
1252
Dhaval Patel48c76022016-09-01 17:51:23 -07001253static void _sde_crtc_blend_setup_mixer(struct drm_crtc *crtc,
1254 struct sde_crtc *sde_crtc, struct sde_crtc_mixer *mixer)
1255{
1256 struct drm_plane *plane;
Dhaval Patel6c666622017-03-21 23:02:59 -07001257 struct drm_framebuffer *fb;
1258 struct drm_plane_state *state;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08001259 struct sde_crtc_state *cstate;
Dhaval Patel48c76022016-09-01 17:51:23 -07001260 struct sde_plane_state *pstate = NULL;
1261 struct sde_format *format;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08001262 struct sde_hw_ctl *ctl;
1263 struct sde_hw_mixer *lm;
1264 struct sde_hw_stage_cfg *stage_cfg;
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001265 struct sde_rect plane_crtc_roi;
Dhaval Patel48c76022016-09-01 17:51:23 -07001266
Clarence Ipb776b532017-09-12 18:30:06 -04001267 u32 flush_mask, flush_sbuf;
Dhaval Patel572cfd22017-06-12 19:33:39 -07001268 uint32_t stage_idx, lm_idx;
1269 int zpos_cnt[SDE_STAGE_MAX + 1] = { 0 };
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08001270 int i;
Dhaval Patel572cfd22017-06-12 19:33:39 -07001271 bool bg_alpha_enable = false;
Alan Kwong4dd64c82017-02-04 18:41:51 -08001272 u32 prefill = 0;
Dhaval Patel48c76022016-09-01 17:51:23 -07001273
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08001274 if (!sde_crtc || !mixer) {
1275 SDE_ERROR("invalid sde_crtc or mixer\n");
1276 return;
1277 }
1278
1279 ctl = mixer->hw_ctl;
1280 lm = mixer->hw_lm;
1281 stage_cfg = &sde_crtc->stage_cfg;
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001282 cstate = to_sde_crtc_state(crtc->state);
Dhaval Patel44f12472016-08-29 12:19:47 -07001283
Clarence Ip7eb90452017-05-23 11:41:19 -04001284 cstate->sbuf_prefill_line = 0;
Clarence Ipb776b532017-09-12 18:30:06 -04001285 sde_crtc->sbuf_flush_mask = 0x0;
Clarence Ip7eb90452017-05-23 11:41:19 -04001286
Dhaval Patel44f12472016-08-29 12:19:47 -07001287 drm_atomic_crtc_for_each_plane(plane, crtc) {
Dhaval Patel6c666622017-03-21 23:02:59 -07001288 state = plane->state;
1289 if (!state)
1290 continue;
Dhaval Patel48c76022016-09-01 17:51:23 -07001291
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001292 plane_crtc_roi.x = state->crtc_x;
1293 plane_crtc_roi.y = state->crtc_y;
1294 plane_crtc_roi.w = state->crtc_w;
1295 plane_crtc_roi.h = state->crtc_h;
1296
Dhaval Patel6c666622017-03-21 23:02:59 -07001297 pstate = to_sde_plane_state(state);
1298 fb = state->fb;
Dhaval Patel44f12472016-08-29 12:19:47 -07001299
Clarence Ipb776b532017-09-12 18:30:06 -04001300 prefill = sde_plane_rot_calc_prefill(plane);
Clarence Ip7eb90452017-05-23 11:41:19 -04001301 if (prefill > cstate->sbuf_prefill_line)
1302 cstate->sbuf_prefill_line = prefill;
Alan Kwong4dd64c82017-02-04 18:41:51 -08001303
Clarence Ipb776b532017-09-12 18:30:06 -04001304 sde_plane_get_ctl_flush(plane, ctl, &flush_mask, &flush_sbuf);
Dhaval Patel44f12472016-08-29 12:19:47 -07001305
Clarence Ipb776b532017-09-12 18:30:06 -04001306 /* save sbuf flush value for later */
1307 sde_crtc->sbuf_flush_mask |= flush_sbuf;
Dhaval Patel48c76022016-09-01 17:51:23 -07001308
1309 SDE_DEBUG("crtc %d stage:%d - plane %d sspp %d fb %d\n",
Clarence Ipd9f9fa62016-09-09 13:42:32 -04001310 crtc->base.id,
Clarence Ipd9f9fa62016-09-09 13:42:32 -04001311 pstate->stage,
1312 plane->base.id,
1313 sde_plane_pipe(plane) - SSPP_VIG0,
Dhaval Patel6c666622017-03-21 23:02:59 -07001314 state->fb ? state->fb->base.id : -1);
Dhaval Patel44f12472016-08-29 12:19:47 -07001315
Dhaval Patel48c76022016-09-01 17:51:23 -07001316 format = to_sde_format(msm_framebuffer_format(pstate->base.fb));
Narendra Muppallaec11a0a2017-06-15 15:35:17 -07001317 if (!format) {
1318 SDE_ERROR("invalid format\n");
1319 return;
1320 }
1321
Dhaval Patel572cfd22017-06-12 19:33:39 -07001322 if (pstate->stage == SDE_STAGE_BASE && format->alpha_enable)
1323 bg_alpha_enable = true;
Dhaval Patel44f12472016-08-29 12:19:47 -07001324
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001325 SDE_EVT32(DRMID(crtc), DRMID(plane),
1326 state->fb ? state->fb->base.id : -1,
1327 state->src_x >> 16, state->src_y >> 16,
1328 state->src_w >> 16, state->src_h >> 16,
1329 state->crtc_x, state->crtc_y,
Clarence Ip7eb90452017-05-23 11:41:19 -04001330 state->crtc_w, state->crtc_h,
Clarence Ipb776b532017-09-12 18:30:06 -04001331 flush_sbuf != 0);
Dhaval Patel6c666622017-03-21 23:02:59 -07001332
Dhaval Patel572cfd22017-06-12 19:33:39 -07001333 stage_idx = zpos_cnt[pstate->stage]++;
1334 stage_cfg->stage[pstate->stage][stage_idx] =
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001335 sde_plane_pipe(plane);
Dhaval Patel572cfd22017-06-12 19:33:39 -07001336 stage_cfg->multirect_index[pstate->stage][stage_idx] =
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001337 pstate->multirect_index;
1338
Dhaval Patel572cfd22017-06-12 19:33:39 -07001339 SDE_EVT32(DRMID(crtc), DRMID(plane), stage_idx,
1340 sde_plane_pipe(plane) - SSPP_VIG0, pstate->stage,
1341 pstate->multirect_index, pstate->multirect_mode,
1342 format->base.pixel_format, fb ? fb->modifier[0] : 0);
1343
1344 /* blend config update */
1345 for (lm_idx = 0; lm_idx < sde_crtc->num_mixers; lm_idx++) {
1346 _sde_crtc_setup_blend_cfg(mixer + lm_idx, pstate,
1347 format);
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001348 mixer[lm_idx].flush_mask |= flush_mask;
1349
Dhaval Patel572cfd22017-06-12 19:33:39 -07001350 if (bg_alpha_enable && !format->alpha_enable)
1351 mixer[lm_idx].mixer_op_mode = 0;
1352 else
1353 mixer[lm_idx].mixer_op_mode |=
Dhaval Patel48c76022016-09-01 17:51:23 -07001354 1 << pstate->stage;
Dhaval Patel48c76022016-09-01 17:51:23 -07001355 }
Dhaval Patel44f12472016-08-29 12:19:47 -07001356 }
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08001357
1358 if (lm && lm->ops.setup_dim_layer) {
1359 cstate = to_sde_crtc_state(crtc->state);
1360 for (i = 0; i < cstate->num_dim_layers; i++)
1361 _sde_crtc_setup_dim_layer_cfg(crtc, sde_crtc,
1362 mixer, &cstate->dim_layer[i]);
1363 }
Alan Kwong4dd64c82017-02-04 18:41:51 -08001364
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001365 _sde_crtc_program_lm_output_roi(crtc);
Dhaval Patel44f12472016-08-29 12:19:47 -07001366}
1367
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001368static void _sde_crtc_swap_mixers_for_right_partial_update(
1369 struct drm_crtc *crtc)
1370{
1371 struct sde_crtc *sde_crtc;
1372 struct sde_crtc_state *cstate;
1373 struct drm_encoder *drm_enc;
1374 bool is_right_only;
1375 bool encoder_in_dsc_merge = false;
1376
1377 if (!crtc || !crtc->state)
1378 return;
1379
1380 sde_crtc = to_sde_crtc(crtc);
1381 cstate = to_sde_crtc_state(crtc->state);
1382
1383 if (sde_crtc->num_mixers != CRTC_DUAL_MIXERS)
1384 return;
1385
1386 drm_for_each_encoder(drm_enc, crtc->dev) {
1387 if (drm_enc->crtc == crtc &&
1388 sde_encoder_is_dsc_merge(drm_enc)) {
1389 encoder_in_dsc_merge = true;
1390 break;
1391 }
1392 }
1393
1394 /**
1395 * For right-only partial update with DSC merge, we swap LM0 & LM1.
1396 * This is due to two reasons:
1397 * - On 8996, there is a DSC HW requirement that in DSC Merge Mode,
1398 * the left DSC must be used, right DSC cannot be used alone.
1399 * For right-only partial update, this means swap layer mixers to map
1400 * Left LM to Right INTF. On later HW this was relaxed.
1401 * - In DSC Merge mode, the physical encoder has already registered
1402 * PP0 as the master, to switch to right-only we would have to
1403 * reprogram to be driven by PP1 instead.
1404 * To support both cases, we prefer to support the mixer swap solution.
1405 */
1406 if (!encoder_in_dsc_merge)
1407 return;
1408
1409 is_right_only = sde_kms_rect_is_null(&cstate->lm_roi[0]) &&
1410 !sde_kms_rect_is_null(&cstate->lm_roi[1]);
1411
1412 if (is_right_only && !sde_crtc->mixers_swapped) {
1413 /* right-only update swap mixers */
1414 swap(sde_crtc->mixers[0], sde_crtc->mixers[1]);
1415 sde_crtc->mixers_swapped = true;
1416 } else if (!is_right_only && sde_crtc->mixers_swapped) {
1417 /* left-only or full update, swap back */
1418 swap(sde_crtc->mixers[0], sde_crtc->mixers[1]);
1419 sde_crtc->mixers_swapped = false;
1420 }
1421
1422 SDE_DEBUG("%s: right_only %d swapped %d, mix0->lm%d, mix1->lm%d\n",
1423 sde_crtc->name, is_right_only, sde_crtc->mixers_swapped,
1424 sde_crtc->mixers[0].hw_lm->idx - LM_0,
1425 sde_crtc->mixers[1].hw_lm->idx - LM_0);
1426 SDE_EVT32(DRMID(crtc), is_right_only, sde_crtc->mixers_swapped,
1427 sde_crtc->mixers[0].hw_lm->idx - LM_0,
1428 sde_crtc->mixers[1].hw_lm->idx - LM_0);
1429}
1430
Clarence Ipd9f9fa62016-09-09 13:42:32 -04001431/**
1432 * _sde_crtc_blend_setup - configure crtc mixers
1433 * @crtc: Pointer to drm crtc structure
1434 */
1435static void _sde_crtc_blend_setup(struct drm_crtc *crtc)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001436{
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001437 struct sde_crtc *sde_crtc;
1438 struct sde_crtc_state *sde_crtc_state;
1439 struct sde_crtc_mixer *mixer;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001440 struct sde_hw_ctl *ctl;
1441 struct sde_hw_mixer *lm;
Dhaval Patel44f12472016-08-29 12:19:47 -07001442
1443 int i;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001444
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001445 if (!crtc)
1446 return;
1447
1448 sde_crtc = to_sde_crtc(crtc);
1449 sde_crtc_state = to_sde_crtc_state(crtc->state);
1450 mixer = sde_crtc->mixers;
1451
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04001452 SDE_DEBUG("%s\n", sde_crtc->name);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001453
Dhaval Patel48c76022016-09-01 17:51:23 -07001454 if (sde_crtc->num_mixers > CRTC_DUAL_MIXERS) {
1455 SDE_ERROR("invalid number mixers: %d\n", sde_crtc->num_mixers);
1456 return;
1457 }
1458
1459 for (i = 0; i < sde_crtc->num_mixers; i++) {
1460 if (!mixer[i].hw_lm || !mixer[i].hw_ctl) {
1461 SDE_ERROR("invalid lm or ctl assigned to mixer\n");
1462 return;
1463 }
1464 mixer[i].mixer_op_mode = 0;
1465 mixer[i].flush_mask = 0;
Lloyd Atkinsone5ec30d2016-08-23 14:32:32 -04001466 if (mixer[i].hw_ctl->ops.clear_all_blendstages)
1467 mixer[i].hw_ctl->ops.clear_all_blendstages(
1468 mixer[i].hw_ctl);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08001469
1470 /* clear dim_layer settings */
1471 lm = mixer[i].hw_lm;
1472 if (lm->ops.clear_dim_layer)
1473 lm->ops.clear_dim_layer(lm);
Dhaval Patel48c76022016-09-01 17:51:23 -07001474 }
1475
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001476 _sde_crtc_swap_mixers_for_right_partial_update(crtc);
1477
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001478 /* initialize stage cfg */
Clarence Ip8f7366c2016-07-05 12:15:26 -04001479 memset(&sde_crtc->stage_cfg, 0, sizeof(struct sde_hw_stage_cfg));
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001480
Dhaval Patel48c76022016-09-01 17:51:23 -07001481 _sde_crtc_blend_setup_mixer(crtc, sde_crtc, mixer);
1482
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -04001483 for (i = 0; i < sde_crtc->num_mixers; i++) {
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001484 const struct sde_rect *lm_roi = &sde_crtc_state->lm_roi[i];
1485
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -04001486 ctl = mixer[i].hw_ctl;
1487 lm = mixer[i].hw_lm;
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -04001488
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001489 if (sde_kms_rect_is_null(lm_roi)) {
1490 SDE_DEBUG(
1491 "%s: lm%d leave ctl%d mask 0 since null roi\n",
1492 sde_crtc->name, lm->idx - LM_0,
1493 ctl->idx - CTL_0);
1494 continue;
1495 }
1496
Dhaval Patel48c76022016-09-01 17:51:23 -07001497 lm->ops.setup_alpha_out(lm, mixer[i].mixer_op_mode);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001498
Clarence Ip662698e2017-09-12 18:34:16 -04001499 mixer[i].pipe_mask = mixer[i].flush_mask;
Dhaval Patel48c76022016-09-01 17:51:23 -07001500 mixer[i].flush_mask |= ctl->ops.get_bitmask_mixer(ctl,
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -04001501 mixer[i].hw_lm->idx);
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04001502
1503 /* stage config flush mask */
Dhaval Patel48c76022016-09-01 17:51:23 -07001504 ctl->ops.update_pending_flush(ctl, mixer[i].flush_mask);
1505
Clarence Ip8e69ad02016-12-09 09:43:57 -05001506 SDE_DEBUG("lm %d, op_mode 0x%X, ctl %d, flush mask 0x%x\n",
1507 mixer[i].hw_lm->idx - LM_0,
1508 mixer[i].mixer_op_mode,
1509 ctl->idx - CTL_0,
1510 mixer[i].flush_mask);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001511
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001512 ctl->ops.setup_blendstage(ctl, mixer[i].hw_lm->idx,
Dhaval Patel572cfd22017-06-12 19:33:39 -07001513 &sde_crtc->stage_cfg);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001514 }
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001515
1516 _sde_crtc_program_lm_output_roi(crtc);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001517}
1518
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001519static int _sde_crtc_find_plane_fb_modes(struct drm_crtc_state *state,
1520 uint32_t *fb_ns,
1521 uint32_t *fb_sec,
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001522 uint32_t *fb_sec_dir)
1523{
1524 struct drm_plane *plane;
1525 const struct drm_plane_state *pstate;
1526 struct sde_plane_state *sde_pstate;
1527 uint32_t mode = 0;
1528 int rc;
1529
1530 if (!state) {
1531 SDE_ERROR("invalid state\n");
1532 return -EINVAL;
1533 }
1534
1535 *fb_ns = 0;
1536 *fb_sec = 0;
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001537 *fb_sec_dir = 0;
1538 drm_atomic_crtc_state_for_each_plane_state(plane, pstate, state) {
1539 if (IS_ERR_OR_NULL(pstate)) {
1540 rc = PTR_ERR(pstate);
1541 SDE_ERROR("crtc%d failed to get plane%d state%d\n",
1542 state->crtc->base.id,
1543 plane->base.id, rc);
1544 return rc;
1545 }
1546 sde_pstate = to_sde_plane_state(pstate);
1547 mode = sde_plane_get_property(sde_pstate,
1548 PLANE_PROP_FB_TRANSLATION_MODE);
1549 switch (mode) {
1550 case SDE_DRM_FB_NON_SEC:
1551 (*fb_ns)++;
1552 break;
1553 case SDE_DRM_FB_SEC:
1554 (*fb_sec)++;
1555 break;
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001556 case SDE_DRM_FB_SEC_DIR_TRANS:
1557 (*fb_sec_dir)++;
1558 break;
1559 default:
1560 SDE_ERROR("Error: Plane[%d], fb_trans_mode:%d",
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001561 plane->base.id, mode);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001562 return -EINVAL;
1563 }
1564 }
1565 return 0;
1566}
1567
1568/**
1569 * sde_crtc_get_secure_transition_ops - determines the operations that
1570 * need to be performed before transitioning to secure state
1571 * This function should be called after swapping the new state
1572 * @crtc: Pointer to drm crtc structure
1573 * Returns the bitmask of operations need to be performed, -Error in
1574 * case of error cases
1575 */
1576int sde_crtc_get_secure_transition_ops(struct drm_crtc *crtc,
1577 struct drm_crtc_state *old_crtc_state,
1578 bool old_valid_fb)
1579{
1580 struct drm_plane *plane;
1581 struct drm_encoder *encoder;
1582 struct sde_crtc *sde_crtc;
1583 struct sde_crtc_state *cstate;
1584 struct sde_crtc_smmu_state_data *smmu_state;
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001585 uint32_t translation_mode = 0, secure_level;
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001586 int ops = 0;
1587 bool post_commit = false;
1588
1589 if (!crtc || !crtc->state) {
1590 SDE_ERROR("invalid crtc\n");
1591 return -EINVAL;
1592 }
1593
1594 sde_crtc = to_sde_crtc(crtc);
1595 cstate = to_sde_crtc_state(crtc->state);
1596 smmu_state = &sde_crtc->smmu_state;
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001597 secure_level = sde_crtc_get_secure_level(crtc, crtc->state);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001598
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001599 SDE_DEBUG("crtc%d, secure_level%d old_valid_fb%d\n",
1600 crtc->base.id, secure_level, old_valid_fb);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001601
Veera Sundaram Sankaranfd792402017-10-13 12:50:41 -07001602 SDE_EVT32_VERBOSE(DRMID(crtc), secure_level, smmu_state->state,
1603 old_valid_fb, SDE_EVTLOG_FUNC_ENTRY);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001604 /**
1605 * SMMU operations need to be delayed in case of
1606 * video mode panels when switching back to non_secure
1607 * mode
1608 */
1609 drm_for_each_encoder(encoder, crtc->dev) {
1610 if (encoder->crtc != crtc)
1611 continue;
1612
Veera Sundaram Sankaranae995e02017-10-13 15:12:32 -07001613 post_commit |= sde_encoder_check_mode(encoder,
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05301614 MSM_DISPLAY_CAP_VID_MODE);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001615 }
1616
1617 drm_atomic_crtc_for_each_plane(plane, crtc) {
1618 if (!plane->state)
1619 continue;
1620
1621 translation_mode = sde_plane_get_property(
1622 to_sde_plane_state(plane->state),
1623 PLANE_PROP_FB_TRANSLATION_MODE);
1624 if (translation_mode > SDE_DRM_FB_SEC_DIR_TRANS) {
1625 SDE_ERROR("crtc%d, invalid translation_mode%d\n",
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001626 crtc->base.id, translation_mode);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001627 return -EINVAL;
1628 }
1629
1630 /**
1631 * we can break if we find sec_fir or non_sec_dir
1632 * plane
1633 */
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001634 if (translation_mode == SDE_DRM_FB_SEC_DIR_TRANS)
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001635 break;
1636 }
1637
1638 switch (translation_mode) {
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001639 case SDE_DRM_FB_SEC_DIR_TRANS:
1640 /* secure display usecase */
1641 if ((smmu_state->state == ATTACHED) &&
1642 (secure_level == SDE_DRM_SEC_ONLY)) {
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001643 smmu_state->state = DETACH_ALL_REQ;
1644 smmu_state->transition_type = PRE_COMMIT;
1645 ops |= SDE_KMS_OPS_CRTC_SECURE_STATE_CHANGE;
1646 if (old_valid_fb) {
1647 ops |= (SDE_KMS_OPS_WAIT_FOR_TX_DONE |
1648 SDE_KMS_OPS_CLEANUP_PLANE_FB);
1649 }
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001650 /* secure camera usecase */
1651 } else if (smmu_state->state == ATTACHED) {
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001652 smmu_state->state = DETACH_SEC_REQ;
1653 smmu_state->transition_type = PRE_COMMIT;
1654 ops |= SDE_KMS_OPS_CRTC_SECURE_STATE_CHANGE;
1655 }
1656 break;
1657 case SDE_DRM_FB_SEC:
1658 case SDE_DRM_FB_NON_SEC:
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001659 if ((smmu_state->state == DETACHED_SEC) ||
1660 (smmu_state->state == DETACH_SEC_REQ)) {
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001661 smmu_state->state = ATTACH_SEC_REQ;
1662 smmu_state->transition_type = post_commit ?
1663 POST_COMMIT : PRE_COMMIT;
1664 ops |= SDE_KMS_OPS_CRTC_SECURE_STATE_CHANGE;
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001665 if (old_valid_fb)
1666 ops |= SDE_KMS_OPS_WAIT_FOR_TX_DONE;
1667 } else if ((smmu_state->state == DETACHED) ||
1668 (smmu_state->state == DETACH_ALL_REQ)) {
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001669 smmu_state->state = ATTACH_ALL_REQ;
1670 smmu_state->transition_type = post_commit ?
1671 POST_COMMIT : PRE_COMMIT;
Veera Sundaram Sankaran47e718f2017-09-13 16:47:23 -07001672 ops |= SDE_KMS_OPS_CRTC_SECURE_STATE_CHANGE;
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001673 if (old_valid_fb)
1674 ops |= (SDE_KMS_OPS_WAIT_FOR_TX_DONE |
1675 SDE_KMS_OPS_CLEANUP_PLANE_FB);
1676 }
1677 break;
1678 default:
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001679 SDE_ERROR("invalid plane fb_mode:%d\n", translation_mode);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001680 ops = 0;
1681 return -EINVAL;
1682 }
1683
1684 SDE_DEBUG("SMMU State:%d, type:%d ops:%x\n", smmu_state->state,
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001685 smmu_state->transition_type, ops);
Veera Sundaram Sankaranfd792402017-10-13 12:50:41 -07001686 /* log only during actual transition times */
1687 if (ops)
1688 SDE_EVT32(DRMID(crtc), secure_level, translation_mode,
1689 smmu_state->state, smmu_state->transition_type,
1690 ops, old_valid_fb, SDE_EVTLOG_FUNC_EXIT);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001691 return ops;
1692}
1693
1694/**
1695 * _sde_crtc_scm_call - makes secure channel call to switch the VMIDs
1696 * @vimd: switch the stage 2 translation to this VMID.
1697 */
1698static int _sde_crtc_scm_call(int vmid)
1699{
1700 struct scm_desc desc = {0};
1701 uint32_t num_sids;
1702 uint32_t *sec_sid;
1703 uint32_t mem_protect_sd_ctrl_id = MEM_PROTECT_SD_CTRL_SWITCH;
1704 int ret = 0;
1705
1706 /* This info should be queried from catalog */
1707 num_sids = SEC_SID_CNT;
1708 sec_sid = kcalloc(num_sids, sizeof(uint32_t), GFP_KERNEL);
1709 if (!sec_sid)
1710 return -ENOMEM;
1711
1712 /**
1713 * derive this info from device tree/catalog, this is combination of
1714 * smr mask and SID for secure
1715 */
1716 sec_sid[0] = SEC_SID_MASK_0;
1717 sec_sid[1] = SEC_SID_MASK_1;
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001718 dmac_flush_range(sec_sid, sec_sid + num_sids);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001719
1720 SDE_DEBUG("calling scm_call for vmid %d", vmid);
1721
1722 desc.arginfo = SCM_ARGS(4, SCM_VAL, SCM_RW, SCM_VAL, SCM_VAL);
1723 desc.args[0] = MDP_DEVICE_ID;
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001724 desc.args[1] = SCM_BUFFER_PHYS(sec_sid);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001725 desc.args[2] = sizeof(uint32_t) * num_sids;
1726 desc.args[3] = vmid;
1727
1728 ret = scm_call2(SCM_SIP_FNID(SCM_SVC_MP,
1729 mem_protect_sd_ctrl_id), &desc);
1730 if (ret) {
1731 SDE_ERROR("Error:scm_call2, vmid (%lld): ret%d\n",
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001732 desc.args[3], ret);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001733 }
Veera Sundaram Sankaranfd792402017-10-13 12:50:41 -07001734 SDE_EVT32(mem_protect_sd_ctrl_id,
1735 desc.args[0], desc.args[3], num_sids,
1736 sec_sid[0], sec_sid[1], ret);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001737
1738 kfree(sec_sid);
1739 return ret;
1740}
1741
1742/**
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05301743 * _sde_crtc_setup_scaler3_lut - Set up scaler lut
1744 * LUTs are configured only once during boot
1745 * @sde_crtc: Pointer to sde crtc
1746 * @cstate: Pointer to sde crtc state
1747 */
1748static int _sde_crtc_set_dest_scaler_lut(struct sde_crtc *sde_crtc,
1749 struct sde_crtc_state *cstate, uint32_t lut_idx)
1750{
1751 struct sde_hw_scaler3_lut_cfg *cfg;
1752 u32 *lut_data = NULL;
1753 size_t len = 0;
1754 int ret = 0;
1755
1756 if (!sde_crtc || !cstate || !sde_crtc->scl3_lut_cfg) {
1757 SDE_ERROR("invalid args\n");
1758 return -EINVAL;
1759 }
1760
1761 if (sde_crtc->scl3_lut_cfg->is_configured) {
Lloyd Atkinsone08229c2017-10-02 17:53:30 -04001762 SDE_DEBUG("%s: lut already configured\n", sde_crtc->name);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05301763 return 0;
1764 }
1765
1766 lut_data = msm_property_get_blob(&sde_crtc->property_info,
1767 &cstate->property_state, &len, lut_idx);
1768 if (!lut_data || !len) {
Lloyd Atkinsone08229c2017-10-02 17:53:30 -04001769 SDE_DEBUG("%s: lut(%d): cleared: %pK, %zu\n", sde_crtc->name,
1770 lut_idx, lut_data, len);
1771 lut_data = NULL;
1772 len = 0;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05301773 }
1774
1775 cfg = sde_crtc->scl3_lut_cfg;
1776
1777 switch (lut_idx) {
1778 case CRTC_PROP_DEST_SCALER_LUT_ED:
1779 cfg->dir_lut = lut_data;
1780 cfg->dir_len = len;
1781 break;
1782 case CRTC_PROP_DEST_SCALER_LUT_CIR:
1783 cfg->cir_lut = lut_data;
1784 cfg->cir_len = len;
1785 break;
1786 case CRTC_PROP_DEST_SCALER_LUT_SEP:
1787 cfg->sep_lut = lut_data;
1788 cfg->sep_len = len;
1789 break;
1790 default:
1791 ret = -EINVAL;
1792 SDE_ERROR("invalid LUT index = %d", lut_idx);
1793 break;
1794 }
1795
Lloyd Atkinsone08229c2017-10-02 17:53:30 -04001796 cfg->is_configured = cfg->dir_lut && cfg->cir_lut && cfg->sep_lut;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05301797
1798 return ret;
1799}
1800
1801/**
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001802 * sde_crtc_secure_ctrl - Initiates the operations to swtich between secure
1803 * and non-secure mode
1804 * @crtc: Pointer to crtc
1805 * @post_commit: if this operation is triggered after commit
1806 */
1807int sde_crtc_secure_ctrl(struct drm_crtc *crtc, bool post_commit)
1808{
1809 struct sde_crtc *sde_crtc;
1810 struct sde_crtc_state *cstate;
1811 struct sde_kms *sde_kms;
1812 struct sde_crtc_smmu_state_data *smmu_state;
1813 int ret = 0;
1814 int old_smmu_state;
1815
1816 if (!crtc || !crtc->state) {
1817 SDE_ERROR("invalid crtc\n");
1818 return -EINVAL;
1819 }
1820
1821 sde_kms = _sde_crtc_get_kms(crtc);
1822 if (!sde_kms) {
1823 SDE_ERROR("invalid kms\n");
1824 return -EINVAL;
1825 }
1826
1827 sde_crtc = to_sde_crtc(crtc);
1828 cstate = to_sde_crtc_state(crtc->state);
1829 smmu_state = &sde_crtc->smmu_state;
1830 old_smmu_state = smmu_state->state;
1831
Veera Sundaram Sankaranfd792402017-10-13 12:50:41 -07001832 SDE_EVT32(DRMID(crtc), smmu_state->state, smmu_state->transition_type,
1833 post_commit, SDE_EVTLOG_FUNC_ENTRY);
1834
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001835 if ((!smmu_state->transition_type) ||
1836 ((smmu_state->transition_type == POST_COMMIT) && !post_commit))
1837 /* Bail out */
1838 return 0;
1839
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001840 /* Secure UI use case enable */
1841 switch (smmu_state->state) {
1842 case DETACH_ALL_REQ:
1843 /* detach_all_contexts */
1844 ret = sde_kms_mmu_detach(sde_kms, false);
1845 if (ret) {
1846 SDE_ERROR("crtc: %d, failed to detach %d\n",
1847 crtc->base.id, ret);
1848 goto error;
1849 }
1850
1851 ret = _sde_crtc_scm_call(VMID_CP_SEC_DISPLAY);
1852 if (ret)
1853 goto error;
1854
1855 smmu_state->state = DETACHED;
1856 break;
1857 /* Secure UI use case disable */
1858 case ATTACH_ALL_REQ:
1859 ret = _sde_crtc_scm_call(VMID_CP_PIXEL);
1860 if (ret)
1861 goto error;
1862
1863 /* attach_all_contexts */
1864 ret = sde_kms_mmu_attach(sde_kms, false);
1865 if (ret) {
1866 SDE_ERROR("crtc: %d, failed to attach %d\n",
1867 crtc->base.id,
1868 ret);
1869 goto error;
1870 }
1871
1872 smmu_state->state = ATTACHED;
1873
1874 break;
1875 /* Secure preview enable */
1876 case DETACH_SEC_REQ:
1877 /* detach secure_context */
1878 ret = sde_kms_mmu_detach(sde_kms, true);
1879 if (ret) {
1880 SDE_ERROR("crtc: %d, failed to detach %d\n",
1881 crtc->base.id,
1882 ret);
1883 goto error;
1884 }
1885
1886 smmu_state->state = DETACHED_SEC;
1887 ret = _sde_crtc_scm_call(VMID_CP_CAMERA_PREVIEW);
1888 if (ret)
1889 goto error;
1890
1891 break;
1892
1893 /* Secure preview disable */
1894 case ATTACH_SEC_REQ:
1895 ret = _sde_crtc_scm_call(VMID_CP_PIXEL);
1896 if (ret)
1897 goto error;
1898
1899 ret = sde_kms_mmu_attach(sde_kms, true);
1900 if (ret) {
1901 SDE_ERROR("crtc: %d, failed to attach %d\n",
1902 crtc->base.id,
1903 ret);
1904 goto error;
1905 }
1906 smmu_state->state = ATTACHED;
1907 break;
1908 default:
1909 break;
1910 }
1911
1912 SDE_DEBUG("crtc: %d, old_state %d new_state %d\n", crtc->base.id,
1913 old_smmu_state,
1914 smmu_state->state);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001915 smmu_state->transition_type = NONE;
1916
1917error:
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001918 smmu_state->transition_error = ret ? true : false;
Veera Sundaram Sankaranfd792402017-10-13 12:50:41 -07001919 SDE_EVT32(DRMID(crtc), smmu_state->state, smmu_state->transition_type,
1920 smmu_state->transition_error, ret,
1921 SDE_EVTLOG_FUNC_EXIT);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001922 return ret;
1923}
1924
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05301925/**
1926 * _sde_crtc_dest_scaler_setup - Set up dest scaler block
1927 * @crtc: Pointer to drm crtc
1928 */
1929static void _sde_crtc_dest_scaler_setup(struct drm_crtc *crtc)
1930{
1931 struct sde_crtc *sde_crtc;
1932 struct sde_crtc_state *cstate;
1933 struct sde_hw_mixer *hw_lm;
1934 struct sde_hw_ctl *hw_ctl;
1935 struct sde_hw_ds *hw_ds;
1936 struct sde_hw_ds_cfg *cfg;
1937 struct sde_kms *kms;
1938 u32 flush_mask = 0, op_mode = 0;
1939 u32 lm_idx = 0, num_mixers = 0;
1940 int i, count = 0;
1941
1942 if (!crtc)
1943 return;
1944
1945 sde_crtc = to_sde_crtc(crtc);
1946 cstate = to_sde_crtc_state(crtc->state);
1947 kms = _sde_crtc_get_kms(crtc);
1948 num_mixers = sde_crtc->num_mixers;
1949
1950 SDE_DEBUG("crtc%d\n", crtc->base.id);
1951
1952 if (!cstate->ds_dirty) {
1953 SDE_DEBUG("no change in settings, skip commit\n");
1954 } else if (!kms || !kms->catalog) {
1955 SDE_ERROR("invalid parameters\n");
1956 } else if (!kms->catalog->mdp[0].has_dest_scaler) {
1957 SDE_DEBUG("dest scaler feature not supported\n");
1958 } else if (num_mixers > CRTC_DUAL_MIXERS) {
1959 SDE_ERROR("invalid number mixers: %d\n", num_mixers);
1960 } else if (!sde_crtc->scl3_lut_cfg->is_configured) {
1961 SDE_DEBUG("no LUT data available\n");
1962 } else {
1963 count = cstate->num_ds_enabled ? cstate->num_ds : num_mixers;
1964
1965 for (i = 0; i < count; i++) {
1966 cfg = &cstate->ds_cfg[i];
1967
1968 if (!cfg->flags)
1969 continue;
1970
1971 lm_idx = cfg->ndx;
1972 hw_lm = sde_crtc->mixers[lm_idx].hw_lm;
1973 hw_ctl = sde_crtc->mixers[lm_idx].hw_ctl;
1974 hw_ds = sde_crtc->mixers[lm_idx].hw_ds;
1975
1976 /* Setup op mode - Dual/single */
1977 if (cfg->flags & SDE_DRM_DESTSCALER_ENABLE)
1978 op_mode |= BIT(hw_ds->idx - DS_0);
1979
1980 if ((i == count-1) && hw_ds->ops.setup_opmode) {
1981 op_mode |= (cstate->num_ds_enabled ==
1982 CRTC_DUAL_MIXERS) ?
1983 SDE_DS_OP_MODE_DUAL : 0;
1984 hw_ds->ops.setup_opmode(hw_ds, op_mode);
1985 SDE_EVT32(DRMID(crtc), op_mode);
1986 }
1987
1988 /* Setup scaler */
1989 if ((cfg->flags & SDE_DRM_DESTSCALER_SCALE_UPDATE) ||
1990 (cfg->flags &
1991 SDE_DRM_DESTSCALER_ENHANCER_UPDATE)) {
1992 if (hw_ds->ops.setup_scaler)
1993 hw_ds->ops.setup_scaler(hw_ds,
1994 cfg->scl3_cfg,
1995 sde_crtc->scl3_lut_cfg);
1996
1997 /**
1998 * Clear the flags as the block doesn't have to
1999 * be programmed in each commit if no updates
2000 */
2001 cfg->flags &= ~SDE_DRM_DESTSCALER_SCALE_UPDATE;
2002 cfg->flags &=
2003 ~SDE_DRM_DESTSCALER_ENHANCER_UPDATE;
2004 }
2005
2006 /*
2007 * Dest scaler shares the flush bit of the LM in control
2008 */
2009 if (cfg->set_lm_flush && hw_lm && hw_ctl &&
2010 hw_ctl->ops.get_bitmask_mixer) {
2011 flush_mask = hw_ctl->ops.get_bitmask_mixer(
2012 hw_ctl, hw_lm->idx);
2013 SDE_DEBUG("Set lm[%d] flush = %d",
2014 hw_lm->idx, flush_mask);
2015 hw_ctl->ops.update_pending_flush(hw_ctl,
2016 flush_mask);
2017 }
2018 cfg->set_lm_flush = false;
2019 }
2020 cstate->ds_dirty = false;
2021 }
2022}
2023
Clarence Ip0d0e96d2016-10-24 18:13:13 -04002024void sde_crtc_prepare_commit(struct drm_crtc *crtc,
2025 struct drm_crtc_state *old_state)
Clarence Ip24f80662016-06-13 19:05:32 -04002026{
2027 struct sde_crtc *sde_crtc;
Clarence Ip0d0e96d2016-10-24 18:13:13 -04002028 struct sde_crtc_state *cstate;
2029 struct drm_connector *conn;
Dhaval Patel5023c3c2017-08-22 12:40:11 -07002030 struct sde_crtc_retire_event *retire_event = NULL;
2031 unsigned long flags;
2032 int i;
Clarence Ip24f80662016-06-13 19:05:32 -04002033
Clarence Ip0d0e96d2016-10-24 18:13:13 -04002034 if (!crtc || !crtc->state) {
Clarence Ip24f80662016-06-13 19:05:32 -04002035 SDE_ERROR("invalid crtc\n");
2036 return;
2037 }
2038
2039 sde_crtc = to_sde_crtc(crtc);
Clarence Ip0d0e96d2016-10-24 18:13:13 -04002040 cstate = to_sde_crtc_state(crtc->state);
Dhaval Patel6c666622017-03-21 23:02:59 -07002041 SDE_EVT32_VERBOSE(DRMID(crtc));
Clarence Ip24f80662016-06-13 19:05:32 -04002042
Clarence Ip0d0e96d2016-10-24 18:13:13 -04002043 /* identify connectors attached to this crtc */
Clarence Ip0d0e96d2016-10-24 18:13:13 -04002044 cstate->num_connectors = 0;
2045
2046 drm_for_each_connector(conn, crtc->dev)
2047 if (conn->state && conn->state->crtc == crtc &&
2048 cstate->num_connectors < MAX_CONNECTORS) {
2049 cstate->connectors[cstate->num_connectors++] = conn;
2050 sde_connector_prepare_fence(conn);
Clarence Ip0d0e96d2016-10-24 18:13:13 -04002051 }
2052
Dhaval Patel5023c3c2017-08-22 12:40:11 -07002053 for (i = 0; i < SDE_CRTC_FRAME_EVENT_SIZE; i++) {
2054 retire_event = &sde_crtc->retire_events[i];
2055 if (list_empty(&retire_event->list))
2056 break;
2057 retire_event = NULL;
2058 }
2059
2060 if (retire_event) {
2061 retire_event->num_connectors = cstate->num_connectors;
2062 for (i = 0; i < cstate->num_connectors; i++)
2063 retire_event->connectors[i] = cstate->connectors[i];
2064
2065 spin_lock_irqsave(&sde_crtc->spin_lock, flags);
2066 list_add_tail(&retire_event->list,
2067 &sde_crtc->retire_event_list);
2068 spin_unlock_irqrestore(&sde_crtc->spin_lock, flags);
2069 } else {
2070 SDE_ERROR("crtc%d retire event overflow\n", crtc->base.id);
2071 SDE_EVT32(DRMID(crtc), SDE_EVTLOG_ERROR);
2072 }
2073
Clarence Ip0d0e96d2016-10-24 18:13:13 -04002074 /* prepare main output fence */
Clarence Ip24f80662016-06-13 19:05:32 -04002075 sde_fence_prepare(&sde_crtc->output_fence);
2076}
2077
Abhinav Kumarf2e94b52017-02-09 20:27:24 -08002078/**
2079 * _sde_crtc_complete_flip - signal pending page_flip events
2080 * Any pending vblank events are added to the vblank_event_list
2081 * so that the next vblank interrupt shall signal them.
2082 * However PAGE_FLIP events are not handled through the vblank_event_list.
2083 * This API signals any pending PAGE_FLIP events requested through
2084 * DRM_IOCTL_MODE_PAGE_FLIP and are cached in the sde_crtc->event.
2085 * if file!=NULL, this is preclose potential cancel-flip path
2086 * @crtc: Pointer to drm crtc structure
2087 * @file: Pointer to drm file
2088 */
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -04002089static void _sde_crtc_complete_flip(struct drm_crtc *crtc,
2090 struct drm_file *file)
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04002091{
2092 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
2093 struct drm_device *dev = crtc->dev;
2094 struct drm_pending_vblank_event *event;
2095 unsigned long flags;
2096
2097 spin_lock_irqsave(&dev->event_lock, flags);
2098 event = sde_crtc->event;
2099 if (event) {
2100 /* if regular vblank case (!file) or if cancel-flip from
2101 * preclose on file that requested flip, then send the
2102 * event:
2103 */
2104 if (!file || (event->base.file_priv == file)) {
2105 sde_crtc->event = NULL;
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -04002106 DRM_DEBUG_VBL("%s: send event: %pK\n",
Dhaval Patelec10fad2016-08-22 14:40:48 -07002107 sde_crtc->name, event);
Dhaval Patela5f75952017-07-25 11:17:41 -07002108 SDE_EVT32_VERBOSE(DRMID(crtc));
Lloyd Atkinsonac933642016-09-14 11:52:00 -04002109 drm_crtc_send_vblank_event(crtc, event);
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04002110 }
2111 }
2112 spin_unlock_irqrestore(&dev->event_lock, flags);
2113}
2114
Alan Kwong3e985f02017-02-12 15:08:44 -08002115enum sde_intf_mode sde_crtc_get_intf_mode(struct drm_crtc *crtc)
2116{
2117 struct drm_encoder *encoder;
2118
2119 if (!crtc || !crtc->dev) {
2120 SDE_ERROR("invalid crtc\n");
2121 return INTF_MODE_NONE;
2122 }
2123
2124 drm_for_each_encoder(encoder, crtc->dev)
2125 if (encoder->crtc == crtc)
2126 return sde_encoder_get_intf_mode(encoder);
2127
2128 return INTF_MODE_NONE;
2129}
2130
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04002131static void sde_crtc_vblank_cb(void *data)
2132{
2133 struct drm_crtc *crtc = (struct drm_crtc *)data;
Alan Kwong07da0982016-11-04 12:57:45 -04002134 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
2135
2136 /* keep statistics on vblank callback - with auto reset via debugfs */
2137 if (ktime_equal(sde_crtc->vblank_cb_time, ktime_set(0, 0)))
2138 sde_crtc->vblank_cb_time = ktime_get();
2139 else
2140 sde_crtc->vblank_cb_count++;
Abhinav Kumarf2e94b52017-02-09 20:27:24 -08002141 _sde_crtc_complete_flip(crtc, NULL);
Lloyd Atkinsonac933642016-09-14 11:52:00 -04002142 drm_crtc_handle_vblank(crtc);
Lloyd Atkinson9eabe7a2016-09-14 13:39:15 -04002143 DRM_DEBUG_VBL("crtc%d\n", crtc->base.id);
Dhaval Patel6c666622017-03-21 23:02:59 -07002144 SDE_EVT32_VERBOSE(DRMID(crtc));
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04002145}
2146
Dhaval Patel5023c3c2017-08-22 12:40:11 -07002147static void _sde_crtc_retire_event(struct drm_crtc *crtc, ktime_t ts)
2148{
2149 struct sde_crtc_retire_event *retire_event;
2150 struct sde_crtc *sde_crtc;
2151 unsigned long flags;
2152 int i;
2153
2154 if (!crtc) {
2155 SDE_ERROR("invalid param\n");
2156 return;
2157 }
2158
2159 sde_crtc = to_sde_crtc(crtc);
2160 spin_lock_irqsave(&sde_crtc->spin_lock, flags);
2161 retire_event = list_first_entry_or_null(&sde_crtc->retire_event_list,
2162 struct sde_crtc_retire_event, list);
2163 if (retire_event)
2164 list_del_init(&retire_event->list);
2165 spin_unlock_irqrestore(&sde_crtc->spin_lock, flags);
2166
2167 if (!retire_event) {
2168 SDE_ERROR("crtc%d retire event without kickoff\n",
2169 crtc->base.id);
2170 SDE_EVT32(DRMID(crtc), SDE_EVTLOG_ERROR);
2171 return;
2172 }
2173
2174 SDE_ATRACE_BEGIN("signal_retire_fence");
2175 for (i = 0; (i < retire_event->num_connectors) &&
2176 retire_event->connectors[i]; ++i)
2177 sde_connector_complete_commit(
2178 retire_event->connectors[i], ts);
2179 SDE_ATRACE_END("signal_retire_fence");
2180}
2181
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002182/* _sde_crtc_idle_notify - signal idle timeout to client */
2183static void _sde_crtc_idle_notify(struct sde_crtc *sde_crtc)
2184{
2185 struct drm_crtc *crtc;
2186 struct drm_event event;
2187 int ret = 0;
2188
2189 if (!sde_crtc) {
2190 SDE_ERROR("invalid sde crtc\n");
2191 return;
2192 }
2193
2194 crtc = &sde_crtc->base;
2195 event.type = DRM_EVENT_IDLE_NOTIFY;
2196 event.length = sizeof(u32);
2197 msm_mode_object_event_notify(&crtc->base, crtc->dev, &event,
2198 (u8 *)&ret);
2199
2200 SDE_DEBUG("crtc:%d idle timeout notified\n", crtc->base.id);
2201}
2202
2203/*
2204 * sde_crtc_handle_event - crtc frame event handle.
2205 * This API must manage only non-IRQ context events.
2206 */
2207static bool _sde_crtc_handle_event(struct sde_crtc *sde_crtc, u32 event)
2208{
2209 bool event_processed = false;
2210
2211 /**
2212 * idle events are originated from commit thread and can be processed
2213 * in same context
2214 */
2215 if (event & SDE_ENCODER_FRAME_EVENT_IDLE) {
2216 _sde_crtc_idle_notify(sde_crtc);
2217 event_processed = true;
2218 }
2219
2220 return event_processed;
2221}
2222
Alan Kwong628d19e2016-10-31 13:50:13 -04002223static void sde_crtc_frame_event_work(struct kthread_work *work)
2224{
Alan Kwong67a3f792016-11-01 23:16:53 -04002225 struct msm_drm_private *priv;
Alan Kwong628d19e2016-10-31 13:50:13 -04002226 struct sde_crtc_frame_event *fevent;
2227 struct drm_crtc *crtc;
2228 struct sde_crtc *sde_crtc;
2229 struct sde_kms *sde_kms;
2230 unsigned long flags;
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002231 bool frame_done = false;
Alan Kwong628d19e2016-10-31 13:50:13 -04002232
2233 if (!work) {
2234 SDE_ERROR("invalid work handle\n");
2235 return;
2236 }
2237
2238 fevent = container_of(work, struct sde_crtc_frame_event, work);
Alan Kwonga1939682017-05-05 11:30:08 -07002239 if (!fevent->crtc || !fevent->crtc->state) {
Alan Kwong628d19e2016-10-31 13:50:13 -04002240 SDE_ERROR("invalid crtc\n");
2241 return;
2242 }
2243
2244 crtc = fevent->crtc;
2245 sde_crtc = to_sde_crtc(crtc);
2246
2247 sde_kms = _sde_crtc_get_kms(crtc);
2248 if (!sde_kms) {
2249 SDE_ERROR("invalid kms handle\n");
2250 return;
2251 }
Alan Kwong67a3f792016-11-01 23:16:53 -04002252 priv = sde_kms->dev->dev_private;
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07002253 SDE_ATRACE_BEGIN("crtc_frame_event");
Alan Kwong628d19e2016-10-31 13:50:13 -04002254
2255 SDE_DEBUG("crtc%d event:%u ts:%lld\n", crtc->base.id, fevent->event,
2256 ktime_to_ns(fevent->ts));
2257
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002258 SDE_EVT32_VERBOSE(DRMID(crtc), fevent->event, SDE_EVTLOG_FUNC_ENTRY);
2259
2260 if (fevent->event & (SDE_ENCODER_FRAME_EVENT_DONE
2261 | SDE_ENCODER_FRAME_EVENT_ERROR
2262 | SDE_ENCODER_FRAME_EVENT_PANEL_DEAD)) {
Alan Kwong628d19e2016-10-31 13:50:13 -04002263
2264 if (atomic_read(&sde_crtc->frame_pending) < 1) {
2265 /* this should not happen */
2266 SDE_ERROR("crtc%d ts:%lld invalid frame_pending:%d\n",
2267 crtc->base.id,
2268 ktime_to_ns(fevent->ts),
2269 atomic_read(&sde_crtc->frame_pending));
Dhaval Patel6c666622017-03-21 23:02:59 -07002270 SDE_EVT32(DRMID(crtc), fevent->event,
2271 SDE_EVTLOG_FUNC_CASE1);
Alan Kwong628d19e2016-10-31 13:50:13 -04002272 } else if (atomic_dec_return(&sde_crtc->frame_pending) == 0) {
2273 /* release bandwidth and other resources */
2274 SDE_DEBUG("crtc%d ts:%lld last pending\n",
2275 crtc->base.id,
2276 ktime_to_ns(fevent->ts));
Dhaval Patel6c666622017-03-21 23:02:59 -07002277 SDE_EVT32(DRMID(crtc), fevent->event,
2278 SDE_EVTLOG_FUNC_CASE2);
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07002279 sde_core_perf_crtc_release_bw(crtc);
Alan Kwong628d19e2016-10-31 13:50:13 -04002280 } else {
Dhaval Patel6c666622017-03-21 23:02:59 -07002281 SDE_EVT32_VERBOSE(DRMID(crtc), fevent->event,
2282 SDE_EVTLOG_FUNC_CASE3);
Alan Kwong628d19e2016-10-31 13:50:13 -04002283 }
Alan Kwonga1939682017-05-05 11:30:08 -07002284
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002285 if (fevent->event & SDE_ENCODER_FRAME_EVENT_DONE)
Alan Kwonga1939682017-05-05 11:30:08 -07002286 sde_core_perf_crtc_update(crtc, 0, false);
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002287
2288 if (fevent->event & (SDE_ENCODER_FRAME_EVENT_DONE
2289 | SDE_ENCODER_FRAME_EVENT_ERROR))
2290 frame_done = true;
2291 }
2292
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07002293 if (fevent->event & SDE_ENCODER_FRAME_EVENT_SIGNAL_RELEASE_FENCE) {
2294 SDE_ATRACE_BEGIN("signal_release_fence");
Dhaval Patelfd8f7742017-08-10 13:11:22 -07002295 sde_fence_signal(&sde_crtc->output_fence, fevent->ts, false);
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07002296 SDE_ATRACE_END("signal_release_fence");
2297 }
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002298
Dhaval Patel5023c3c2017-08-22 12:40:11 -07002299 if (fevent->event & SDE_ENCODER_FRAME_EVENT_SIGNAL_RETIRE_FENCE)
2300 /* this api should be called without spin_lock */
2301 _sde_crtc_retire_event(crtc, fevent->ts);
Alan Kwong628d19e2016-10-31 13:50:13 -04002302
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002303 if (fevent->event & SDE_ENCODER_FRAME_EVENT_PANEL_DEAD)
2304 SDE_ERROR("crtc%d ts:%lld received panel dead event\n",
2305 crtc->base.id, ktime_to_ns(fevent->ts));
2306
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002307 if (frame_done)
2308 complete_all(&sde_crtc->frame_done_comp);
2309
Alan Kwong628d19e2016-10-31 13:50:13 -04002310 spin_lock_irqsave(&sde_crtc->spin_lock, flags);
2311 list_add_tail(&fevent->list, &sde_crtc->frame_event_list);
2312 spin_unlock_irqrestore(&sde_crtc->spin_lock, flags);
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07002313 SDE_ATRACE_END("crtc_frame_event");
Alan Kwong628d19e2016-10-31 13:50:13 -04002314}
2315
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002316/*
2317 * sde_crtc_frame_event_cb - crtc frame event callback API. CRTC module
2318 * registers this API to encoder for all frame event callbacks like
2319 * release_fence, retire_fence, frame_error, frame_done, idle_timeout,
2320 * etc. Encoder may call different events from different context - IRQ,
2321 * user thread, commit_thread, etc. Each event should be carefully
2322 * reviewed and should be processed in proper task context to avoid scheduling
2323 * delay or properly manage the irq context's bottom half processing.
2324 */
Alan Kwong628d19e2016-10-31 13:50:13 -04002325static void sde_crtc_frame_event_cb(void *data, u32 event)
2326{
2327 struct drm_crtc *crtc = (struct drm_crtc *)data;
2328 struct sde_crtc *sde_crtc;
2329 struct msm_drm_private *priv;
Alan Kwong628d19e2016-10-31 13:50:13 -04002330 struct sde_crtc_frame_event *fevent;
2331 unsigned long flags;
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07002332 u32 crtc_id;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002333 bool event_processed = false;
Alan Kwong628d19e2016-10-31 13:50:13 -04002334
2335 if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
2336 SDE_ERROR("invalid parameters\n");
2337 return;
2338 }
2339 sde_crtc = to_sde_crtc(crtc);
2340 priv = crtc->dev->dev_private;
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07002341 crtc_id = drm_crtc_index(crtc);
Alan Kwong628d19e2016-10-31 13:50:13 -04002342
2343 SDE_DEBUG("crtc%d\n", crtc->base.id);
Ingrid Gallardo79b44392017-05-30 16:30:52 -07002344 SDE_EVT32_VERBOSE(DRMID(crtc), event);
Alan Kwong628d19e2016-10-31 13:50:13 -04002345
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002346 /* try to process the event in caller context */
2347 event_processed = _sde_crtc_handle_event(sde_crtc, event);
2348 if (event_processed)
2349 return;
2350
Alan Kwong628d19e2016-10-31 13:50:13 -04002351 spin_lock_irqsave(&sde_crtc->spin_lock, flags);
Lloyd Atkinson78831f82016-12-09 11:24:56 -05002352 fevent = list_first_entry_or_null(&sde_crtc->frame_event_list,
2353 struct sde_crtc_frame_event, list);
2354 if (fevent)
2355 list_del_init(&fevent->list);
Alan Kwong628d19e2016-10-31 13:50:13 -04002356 spin_unlock_irqrestore(&sde_crtc->spin_lock, flags);
2357
Lloyd Atkinson78831f82016-12-09 11:24:56 -05002358 if (!fevent) {
Alan Kwong628d19e2016-10-31 13:50:13 -04002359 SDE_ERROR("crtc%d event %d overflow\n",
2360 crtc->base.id, event);
2361 SDE_EVT32(DRMID(crtc), event);
2362 return;
2363 }
2364
Alan Kwong628d19e2016-10-31 13:50:13 -04002365 fevent->event = event;
2366 fevent->crtc = crtc;
2367 fevent->ts = ktime_get();
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07002368 kthread_queue_work(&priv->event_thread[crtc_id].worker, &fevent->work);
Alan Kwong628d19e2016-10-31 13:50:13 -04002369}
2370
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07002371void sde_crtc_complete_commit(struct drm_crtc *crtc,
2372 struct drm_crtc_state *old_state)
2373{
2374 struct sde_crtc *sde_crtc;
2375 struct sde_crtc_smmu_state_data *smmu_state;
2376
2377 if (!crtc || !crtc->state) {
2378 SDE_ERROR("invalid crtc\n");
2379 return;
2380 }
2381
2382 sde_crtc = to_sde_crtc(crtc);
2383 SDE_EVT32_VERBOSE(DRMID(crtc));
2384 smmu_state = &sde_crtc->smmu_state;
2385
2386 /* complete secure transitions if any */
2387 if (smmu_state->transition_type == POST_COMMIT)
2388 sde_crtc_secure_ctrl(crtc, true);
2389}
2390
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002391/* _sde_crtc_set_idle_timeout - update idle timeout wait duration */
2392static void _sde_crtc_set_idle_timeout(struct drm_crtc *crtc, u64 val)
2393{
2394 struct drm_encoder *encoder;
2395
2396 if (!crtc) {
2397 SDE_ERROR("invalid crtc\n");
2398 return;
2399 }
2400
2401 drm_for_each_encoder(encoder, crtc->dev) {
2402 if (encoder->crtc != crtc)
2403 continue;
2404
2405 sde_encoder_set_idle_timeout(encoder, (u32) val);
2406 }
2407}
2408
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002409/**
Clarence Ipcae1bb62016-07-07 12:07:13 -04002410 * _sde_crtc_set_input_fence_timeout - update ns version of in fence timeout
2411 * @cstate: Pointer to sde crtc state
2412 */
2413static void _sde_crtc_set_input_fence_timeout(struct sde_crtc_state *cstate)
2414{
2415 if (!cstate) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07002416 SDE_ERROR("invalid cstate\n");
Clarence Ipcae1bb62016-07-07 12:07:13 -04002417 return;
2418 }
2419 cstate->input_fence_timeout_ns =
2420 sde_crtc_get_property(cstate, CRTC_PROP_INPUT_FENCE_TIMEOUT);
2421 cstate->input_fence_timeout_ns *= NSEC_PER_MSEC;
2422}
2423
2424/**
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002425 * _sde_crtc_set_dim_layer_v1 - copy dim layer settings from userspace
2426 * @cstate: Pointer to sde crtc state
2427 * @user_ptr: User ptr for sde_drm_dim_layer_v1 struct
2428 */
2429static void _sde_crtc_set_dim_layer_v1(struct sde_crtc_state *cstate,
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302430 void __user *usr_ptr)
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002431{
2432 struct sde_drm_dim_layer_v1 dim_layer_v1;
2433 struct sde_drm_dim_layer_cfg *user_cfg;
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07002434 struct sde_hw_dim_layer *dim_layer;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002435 u32 count, i;
2436
2437 if (!cstate) {
2438 SDE_ERROR("invalid cstate\n");
2439 return;
2440 }
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07002441 dim_layer = cstate->dim_layer;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002442
2443 if (!usr_ptr) {
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07002444 SDE_DEBUG("dim_layer data removed\n");
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002445 return;
2446 }
2447
2448 if (copy_from_user(&dim_layer_v1, usr_ptr, sizeof(dim_layer_v1))) {
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07002449 SDE_ERROR("failed to copy dim_layer data\n");
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002450 return;
2451 }
2452
2453 count = dim_layer_v1.num_layers;
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07002454 if (count > SDE_MAX_DIM_LAYERS) {
2455 SDE_ERROR("invalid number of dim_layers:%d", count);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002456 return;
2457 }
2458
2459 /* populate from user space */
2460 cstate->num_dim_layers = count;
2461 for (i = 0; i < count; i++) {
2462 user_cfg = &dim_layer_v1.layer_cfg[i];
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002463
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07002464 dim_layer[i].flags = user_cfg->flags;
2465 dim_layer[i].stage = user_cfg->stage + SDE_STAGE_0;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002466
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07002467 dim_layer[i].rect.x = user_cfg->rect.x1;
2468 dim_layer[i].rect.y = user_cfg->rect.y1;
2469 dim_layer[i].rect.w = user_cfg->rect.x2 - user_cfg->rect.x1;
2470 dim_layer[i].rect.h = user_cfg->rect.y2 - user_cfg->rect.y1;
2471
2472 dim_layer[i].color_fill = (struct sde_mdss_color) {
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002473 user_cfg->color_fill.color_0,
2474 user_cfg->color_fill.color_1,
2475 user_cfg->color_fill.color_2,
2476 user_cfg->color_fill.color_3,
2477 };
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07002478
2479 SDE_DEBUG("dim_layer[%d] - flags:%d, stage:%d\n",
2480 i, dim_layer[i].flags, dim_layer[i].stage);
2481 SDE_DEBUG(" rect:{%d,%d,%d,%d}, color:{%d,%d,%d,%d}\n",
2482 dim_layer[i].rect.x, dim_layer[i].rect.y,
2483 dim_layer[i].rect.w, dim_layer[i].rect.h,
2484 dim_layer[i].color_fill.color_0,
2485 dim_layer[i].color_fill.color_1,
2486 dim_layer[i].color_fill.color_2,
2487 dim_layer[i].color_fill.color_3);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002488 }
2489}
2490
2491/**
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302492 * _sde_crtc_dest_scaler_init - allocate memory for scaler lut
2493 * @sde_crtc : Pointer to sde crtc
2494 * @catalog : Pointer to mdss catalog info
2495 */
2496static void _sde_crtc_dest_scaler_init(struct sde_crtc *sde_crtc,
2497 struct sde_mdss_cfg *catalog)
2498{
2499 if (!sde_crtc || !catalog)
2500 return;
2501
2502 if (!catalog->mdp[0].has_dest_scaler) {
2503 SDE_DEBUG("dest scaler feature not supported\n");
2504 return;
2505 }
2506
2507 sde_crtc->scl3_lut_cfg = kzalloc(sizeof(struct sde_hw_scaler3_lut_cfg),
2508 GFP_KERNEL);
2509 if (!sde_crtc->scl3_lut_cfg)
2510 SDE_ERROR("failed to create scale LUT for dest scaler");
2511}
2512
2513/**
2514 * _sde_crtc_set_dest_scaler - copy dest scaler settings from userspace
2515 * @sde_crtc : Pointer to sde crtc
2516 * @cstate : Pointer to sde crtc state
2517 * @usr_ptr: User ptr for sde_drm_dest_scaler_data struct
2518 */
2519static int _sde_crtc_set_dest_scaler(struct sde_crtc *sde_crtc,
2520 struct sde_crtc_state *cstate,
2521 void __user *usr_ptr)
2522{
2523 struct sde_drm_dest_scaler_data ds_data;
2524 struct sde_drm_dest_scaler_cfg *ds_cfg_usr;
2525 struct sde_drm_scaler_v2 scaler_v2;
2526 void __user *scaler_v2_usr;
2527 int i, count, ret = 0;
2528
2529 if (!sde_crtc || !cstate) {
2530 SDE_ERROR("invalid sde_crtc/state\n");
2531 return -EINVAL;
2532 }
2533
2534 SDE_DEBUG("crtc %s\n", sde_crtc->name);
2535
2536 cstate->num_ds = 0;
2537 cstate->ds_dirty = false;
2538 if (!usr_ptr) {
2539 SDE_DEBUG("ds data removed\n");
2540 return 0;
2541 }
2542
2543 if (copy_from_user(&ds_data, usr_ptr, sizeof(ds_data))) {
2544 SDE_ERROR("failed to copy dest scaler data from user\n");
2545 return -EINVAL;
2546 }
2547
2548 count = ds_data.num_dest_scaler;
Sravanthi Kollukuduru4ff41642017-10-06 18:17:34 +05302549 if (!count) {
2550 SDE_DEBUG("no ds data available\n");
2551 return 0;
2552 }
2553
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302554 if (!sde_crtc->num_mixers || count > sde_crtc->num_mixers ||
2555 (count && (count != sde_crtc->num_mixers) &&
2556 !(ds_data.ds_cfg[0].flags & SDE_DRM_DESTSCALER_PU_ENABLE))) {
2557 SDE_ERROR("invalid config:num ds(%d), mixers(%d),flags(%d)\n",
2558 count, sde_crtc->num_mixers, ds_data.ds_cfg[0].flags);
2559 return -EINVAL;
2560 }
2561
2562 /* Populate from user space */
2563 for (i = 0; i < count; i++) {
2564 ds_cfg_usr = &ds_data.ds_cfg[i];
2565
2566 cstate->ds_cfg[i].ndx = ds_cfg_usr->index;
2567 cstate->ds_cfg[i].flags = ds_cfg_usr->flags;
2568 cstate->ds_cfg[i].lm_width = ds_cfg_usr->lm_width;
2569 cstate->ds_cfg[i].lm_height = ds_cfg_usr->lm_height;
2570 cstate->ds_cfg[i].scl3_cfg = NULL;
2571
2572 if (ds_cfg_usr->scaler_cfg) {
2573 scaler_v2_usr =
2574 (void __user *)((uintptr_t)ds_cfg_usr->scaler_cfg);
2575
2576 memset(&scaler_v2, 0, sizeof(scaler_v2));
2577
2578 cstate->ds_cfg[i].scl3_cfg =
2579 kzalloc(sizeof(struct sde_hw_scaler3_cfg),
2580 GFP_KERNEL);
2581
2582 if (!cstate->ds_cfg[i].scl3_cfg) {
2583 ret = -ENOMEM;
2584 goto err;
2585 }
2586
2587 if (copy_from_user(&scaler_v2, scaler_v2_usr,
2588 sizeof(scaler_v2))) {
2589 SDE_ERROR("scale data:copy from user failed\n");
2590 ret = -EINVAL;
2591 goto err;
2592 }
2593
2594 sde_set_scaler_v2(cstate->ds_cfg[i].scl3_cfg,
2595 &scaler_v2);
2596
2597 SDE_DEBUG("en(%d)dir(%d)de(%d) src(%dx%d) dst(%dx%d)\n",
2598 scaler_v2.enable, scaler_v2.dir_en,
2599 scaler_v2.de.enable, scaler_v2.src_width[0],
2600 scaler_v2.src_height[0], scaler_v2.dst_width,
2601 scaler_v2.dst_height);
2602 SDE_EVT32_VERBOSE(DRMID(&sde_crtc->base),
2603 scaler_v2.enable, scaler_v2.dir_en,
2604 scaler_v2.de.enable, scaler_v2.src_width[0],
2605 scaler_v2.src_height[0], scaler_v2.dst_width,
2606 scaler_v2.dst_height);
2607 }
2608
2609 SDE_DEBUG("ds cfg[%d]-ndx(%d) flags(%d) lm(%dx%d)\n",
2610 i, ds_cfg_usr->index, ds_cfg_usr->flags,
2611 ds_cfg_usr->lm_width, ds_cfg_usr->lm_height);
2612 SDE_EVT32_VERBOSE(DRMID(&sde_crtc->base), i, ds_cfg_usr->index,
2613 ds_cfg_usr->flags, ds_cfg_usr->lm_width,
2614 ds_cfg_usr->lm_height);
2615 }
2616
2617 cstate->num_ds = count;
2618 cstate->ds_dirty = true;
2619 return 0;
2620
2621err:
2622 for (; i >= 0; i--)
2623 kfree(cstate->ds_cfg[i].scl3_cfg);
2624
2625 return ret;
2626}
2627
2628/**
2629 * _sde_crtc_check_dest_scaler_data - validate the dest scaler data
2630 * @crtc : Pointer to drm crtc
2631 * @state : Pointer to drm crtc state
2632 */
2633static int _sde_crtc_check_dest_scaler_data(struct drm_crtc *crtc,
2634 struct drm_crtc_state *state)
2635{
2636 struct sde_crtc *sde_crtc;
2637 struct sde_crtc_state *cstate;
2638 struct drm_display_mode *mode;
2639 struct sde_kms *kms;
2640 struct sde_hw_ds *hw_ds;
2641 struct sde_hw_ds_cfg *cfg;
2642 u32 i, ret = 0, lm_idx;
2643 u32 num_ds_enable = 0;
2644 u32 max_in_width = 0, max_out_width = 0;
2645 u32 prev_lm_width = 0, prev_lm_height = 0;
2646
2647 if (!crtc || !state)
2648 return -EINVAL;
2649
2650 sde_crtc = to_sde_crtc(crtc);
2651 cstate = to_sde_crtc_state(state);
2652 kms = _sde_crtc_get_kms(crtc);
2653 mode = &state->adjusted_mode;
2654
2655 SDE_DEBUG("crtc%d\n", crtc->base.id);
2656
2657 if (!cstate->ds_dirty && !cstate->num_ds_enabled) {
2658 SDE_DEBUG("dest scaler property not set, skip validation\n");
2659 return 0;
2660 }
2661
2662 if (!kms || !kms->catalog) {
2663 SDE_ERROR("invalid parameters\n");
2664 return -EINVAL;
2665 }
2666
2667 if (!kms->catalog->mdp[0].has_dest_scaler) {
2668 SDE_DEBUG("dest scaler feature not supported\n");
2669 return 0;
2670 }
2671
2672 if (!sde_crtc->num_mixers) {
2673 SDE_ERROR("mixers not allocated\n");
2674 return -EINVAL;
2675 }
2676
2677 /**
2678 * Check if sufficient hw resources are
2679 * available as per target caps & topology
2680 */
2681 if (sde_crtc->num_mixers > CRTC_DUAL_MIXERS) {
2682 SDE_ERROR("invalid config: mixers(%d) max(%d)\n",
2683 sde_crtc->num_mixers, CRTC_DUAL_MIXERS);
2684 ret = -EINVAL;
2685 goto err;
2686 }
2687
2688 for (i = 0; i < sde_crtc->num_mixers; i++) {
2689 if (!sde_crtc->mixers[i].hw_lm || !sde_crtc->mixers[i].hw_ds) {
2690 SDE_ERROR("insufficient HW resources allocated\n");
2691 ret = -EINVAL;
2692 goto err;
2693 }
2694 }
2695
2696 /**
2697 * Check if DS needs to be enabled or disabled
2698 * In case of enable, validate the data
2699 */
2700 if (!cstate->ds_dirty || !cstate->num_ds ||
2701 !(cstate->ds_cfg[0].flags & SDE_DRM_DESTSCALER_ENABLE)) {
2702 SDE_DEBUG("disable dest scaler,dirty(%d)num(%d)flags(%d)\n",
2703 cstate->ds_dirty, cstate->num_ds,
2704 cstate->ds_cfg[0].flags);
2705 goto disable;
2706 }
2707
2708 /**
2709 * No of dest scalers shouldn't exceed hw ds block count and
2710 * also, match the num of mixers unless it is partial update
2711 * left only/right only use case - currently PU + DS is not supported
2712 */
2713 if (cstate->num_ds > kms->catalog->ds_count ||
2714 ((cstate->num_ds != sde_crtc->num_mixers) &&
2715 !(cstate->ds_cfg[0].flags & SDE_DRM_DESTSCALER_PU_ENABLE))) {
2716 SDE_ERROR("invalid cfg: num_ds(%d), hw_ds_cnt(%d) flags(%d)\n",
2717 cstate->num_ds, kms->catalog->ds_count,
2718 cstate->ds_cfg[0].flags);
2719 ret = -EINVAL;
2720 goto err;
2721 }
2722
2723 /* Validate the DS data */
2724 for (i = 0; i < cstate->num_ds; i++) {
2725 cfg = &cstate->ds_cfg[i];
2726 lm_idx = cfg->ndx;
2727
2728 /**
2729 * Validate against topology
2730 * No of dest scalers should match the num of mixers
2731 * unless it is partial update left only/right only use case
2732 */
2733 if (lm_idx >= sde_crtc->num_mixers || (i != lm_idx &&
2734 !(cfg->flags & SDE_DRM_DESTSCALER_PU_ENABLE))) {
2735 SDE_ERROR("invalid user data(%d):idx(%d), flags(%d)\n",
2736 i, lm_idx, cfg->flags);
2737 ret = -EINVAL;
2738 goto err;
2739 }
2740
2741 hw_ds = sde_crtc->mixers[lm_idx].hw_ds;
2742
2743 if (!max_in_width && !max_out_width) {
2744 max_in_width = hw_ds->scl->top->maxinputwidth;
2745 max_out_width = hw_ds->scl->top->maxoutputwidth;
2746
2747 if (cstate->num_ds == CRTC_DUAL_MIXERS)
2748 max_in_width -= SDE_DS_OVERFETCH_SIZE;
2749
2750 SDE_DEBUG("max DS width [%d,%d] for num_ds = %d\n",
2751 max_in_width, max_out_width, cstate->num_ds);
2752 }
2753
2754 /* Check LM width and height */
2755 if (cfg->lm_width > (mode->hdisplay/sde_crtc->num_mixers) ||
2756 cfg->lm_height > mode->vdisplay ||
2757 !cfg->lm_width || !cfg->lm_height) {
2758 SDE_ERROR("invalid lm size[%d,%d] display [%d,%d]\n",
2759 cfg->lm_width,
2760 cfg->lm_height,
2761 mode->hdisplay/sde_crtc->num_mixers,
2762 mode->vdisplay);
2763 ret = -E2BIG;
2764 goto err;
2765 }
2766
2767 if (!prev_lm_width && !prev_lm_height) {
2768 prev_lm_width = cfg->lm_width;
2769 prev_lm_height = cfg->lm_height;
2770 } else {
2771 if (cfg->lm_width != prev_lm_width ||
2772 cfg->lm_height != prev_lm_height) {
2773 SDE_ERROR("lm size:left[%d,%d], right[%d %d]\n",
2774 cfg->lm_width, cfg->lm_height,
2775 prev_lm_width, prev_lm_height);
2776 ret = -EINVAL;
2777 goto err;
2778 }
2779 }
2780
2781 /* Check scaler data */
2782 if (cfg->flags & SDE_DRM_DESTSCALER_SCALE_UPDATE ||
2783 cfg->flags & SDE_DRM_DESTSCALER_ENHANCER_UPDATE) {
2784 if (!cfg->scl3_cfg) {
2785 ret = -EINVAL;
2786 SDE_ERROR("null scale data\n");
2787 goto err;
2788 }
2789 if (cfg->scl3_cfg->src_width[0] > max_in_width ||
2790 cfg->scl3_cfg->dst_width > max_out_width ||
2791 !cfg->scl3_cfg->src_width[0] ||
2792 !cfg->scl3_cfg->dst_width) {
2793 SDE_ERROR("scale width(%d %d) for ds-%d:\n",
2794 cfg->scl3_cfg->src_width[0],
2795 cfg->scl3_cfg->dst_width,
2796 hw_ds->idx - DS_0);
2797 SDE_ERROR("scale_en = %d, DE_en =%d\n",
2798 cfg->scl3_cfg->enable,
2799 cfg->scl3_cfg->de.enable);
2800
2801 cfg->flags &=
2802 ~SDE_DRM_DESTSCALER_SCALE_UPDATE;
2803 cfg->flags &=
2804 ~SDE_DRM_DESTSCALER_ENHANCER_UPDATE;
2805
2806 ret = -EINVAL;
2807 goto err;
2808 }
2809 }
2810
2811 if (cfg->flags & SDE_DRM_DESTSCALER_ENABLE)
2812 num_ds_enable++;
2813
2814 /**
2815 * Validation successful, indicator for flush to be issued
2816 */
2817 cfg->set_lm_flush = true;
2818
2819 SDE_DEBUG("ds[%d]: flags = 0x%X\n",
2820 hw_ds->idx - DS_0, cfg->flags);
2821 }
2822
2823disable:
2824 SDE_DEBUG("dest scaler enable status, old = %d, new = %d",
2825 cstate->num_ds_enabled, num_ds_enable);
2826 SDE_EVT32(DRMID(crtc), cstate->num_ds_enabled, num_ds_enable,
2827 cstate->ds_dirty);
2828
2829 if (cstate->num_ds_enabled != num_ds_enable) {
2830 /* Disabling destination scaler */
2831 if (!num_ds_enable) {
2832 for (i = 0; i < sde_crtc->num_mixers; i++) {
2833 cfg = &cstate->ds_cfg[i];
2834 cfg->ndx = i;
2835 /* Update scaler settings in disable case */
2836 cfg->flags = SDE_DRM_DESTSCALER_SCALE_UPDATE;
2837 cfg->scl3_cfg->enable = 0;
2838 cfg->scl3_cfg->de.enable = 0;
2839 cfg->set_lm_flush = true;
2840 }
2841 }
2842 cstate->num_ds_enabled = num_ds_enable;
2843 cstate->ds_dirty = true;
2844 }
2845
2846 return 0;
2847
2848err:
2849 cstate->ds_dirty = false;
2850 return ret;
2851}
2852
2853/**
Clarence Ipcae1bb62016-07-07 12:07:13 -04002854 * _sde_crtc_wait_for_fences - wait for incoming framebuffer sync fences
2855 * @crtc: Pointer to CRTC object
2856 */
2857static void _sde_crtc_wait_for_fences(struct drm_crtc *crtc)
2858{
2859 struct drm_plane *plane = NULL;
2860 uint32_t wait_ms = 1;
Clarence Ip8dedc232016-09-09 16:41:00 -04002861 ktime_t kt_end, kt_wait;
Dhaval Patel39323d42017-03-01 23:48:24 -08002862 int rc = 0;
Clarence Ipcae1bb62016-07-07 12:07:13 -04002863
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -04002864 SDE_DEBUG("\n");
Clarence Ipcae1bb62016-07-07 12:07:13 -04002865
2866 if (!crtc || !crtc->state) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07002867 SDE_ERROR("invalid crtc/state %pK\n", crtc);
Clarence Ipcae1bb62016-07-07 12:07:13 -04002868 return;
2869 }
2870
2871 /* use monotonic timer to limit total fence wait time */
Clarence Ip8dedc232016-09-09 16:41:00 -04002872 kt_end = ktime_add_ns(ktime_get(),
2873 to_sde_crtc_state(crtc->state)->input_fence_timeout_ns);
Clarence Ipcae1bb62016-07-07 12:07:13 -04002874
2875 /*
2876 * Wait for fences sequentially, as all of them need to be signalled
2877 * before we can proceed.
2878 *
2879 * Limit total wait time to INPUT_FENCE_TIMEOUT, but still call
2880 * sde_plane_wait_input_fence with wait_ms == 0 after the timeout so
2881 * that each plane can check its fence status and react appropriately
Dhaval Patel39323d42017-03-01 23:48:24 -08002882 * if its fence has timed out. Call input fence wait multiple times if
2883 * fence wait is interrupted due to interrupt call.
Clarence Ipcae1bb62016-07-07 12:07:13 -04002884 */
Narendra Muppalla77b32932017-05-10 13:53:11 -07002885 SDE_ATRACE_BEGIN("plane_wait_input_fence");
Clarence Ipcae1bb62016-07-07 12:07:13 -04002886 drm_atomic_crtc_for_each_plane(plane, crtc) {
Dhaval Patel39323d42017-03-01 23:48:24 -08002887 do {
Clarence Ip8dedc232016-09-09 16:41:00 -04002888 kt_wait = ktime_sub(kt_end, ktime_get());
2889 if (ktime_compare(kt_wait, ktime_set(0, 0)) >= 0)
2890 wait_ms = ktime_to_ms(kt_wait);
Clarence Ipcae1bb62016-07-07 12:07:13 -04002891 else
2892 wait_ms = 0;
Dhaval Patel39323d42017-03-01 23:48:24 -08002893
2894 rc = sde_plane_wait_input_fence(plane, wait_ms);
2895 } while (wait_ms && rc == -ERESTARTSYS);
Clarence Ipcae1bb62016-07-07 12:07:13 -04002896 }
Narendra Muppalla77b32932017-05-10 13:53:11 -07002897 SDE_ATRACE_END("plane_wait_input_fence");
Clarence Ipcae1bb62016-07-07 12:07:13 -04002898}
2899
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002900static void _sde_crtc_setup_mixer_for_encoder(
2901 struct drm_crtc *crtc,
2902 struct drm_encoder *enc)
2903{
2904 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -04002905 struct sde_kms *sde_kms = _sde_crtc_get_kms(crtc);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002906 struct sde_rm *rm = &sde_kms->rm;
2907 struct sde_crtc_mixer *mixer;
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04002908 struct sde_hw_ctl *last_valid_ctl = NULL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002909 int i;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302910 struct sde_rm_hw_iter lm_iter, ctl_iter, dspp_iter, ds_iter;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002911
2912 sde_rm_init_hw_iter(&lm_iter, enc->base.id, SDE_HW_BLK_LM);
2913 sde_rm_init_hw_iter(&ctl_iter, enc->base.id, SDE_HW_BLK_CTL);
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -07002914 sde_rm_init_hw_iter(&dspp_iter, enc->base.id, SDE_HW_BLK_DSPP);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302915 sde_rm_init_hw_iter(&ds_iter, enc->base.id, SDE_HW_BLK_DS);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002916
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04002917 /* Set up all the mixers and ctls reserved by this encoder */
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002918 for (i = sde_crtc->num_mixers; i < ARRAY_SIZE(sde_crtc->mixers); i++) {
2919 mixer = &sde_crtc->mixers[i];
2920
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002921 if (!sde_rm_get_hw(rm, &lm_iter))
2922 break;
2923 mixer->hw_lm = (struct sde_hw_mixer *)lm_iter.hw;
2924
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04002925 /* CTL may be <= LMs, if <, multiple LMs controlled by 1 CTL */
2926 if (!sde_rm_get_hw(rm, &ctl_iter)) {
2927 SDE_DEBUG("no ctl assigned to lm %d, using previous\n",
Clarence Ip8e69ad02016-12-09 09:43:57 -05002928 mixer->hw_lm->idx - LM_0);
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04002929 mixer->hw_ctl = last_valid_ctl;
2930 } else {
2931 mixer->hw_ctl = (struct sde_hw_ctl *)ctl_iter.hw;
2932 last_valid_ctl = mixer->hw_ctl;
2933 }
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002934
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04002935 /* Shouldn't happen, mixers are always >= ctls */
2936 if (!mixer->hw_ctl) {
2937 SDE_ERROR("no valid ctls found for lm %d\n",
Clarence Ip8e69ad02016-12-09 09:43:57 -05002938 mixer->hw_lm->idx - LM_0);
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04002939 return;
2940 }
2941
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -07002942 /* Dspp may be null */
2943 (void) sde_rm_get_hw(rm, &dspp_iter);
2944 mixer->hw_dspp = (struct sde_hw_dspp *)dspp_iter.hw;
2945
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302946 /* DS may be null */
2947 (void) sde_rm_get_hw(rm, &ds_iter);
2948 mixer->hw_ds = (struct sde_hw_ds *)ds_iter.hw;
2949
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002950 mixer->encoder = enc;
2951
2952 sde_crtc->num_mixers++;
Clarence Ipd9f9fa62016-09-09 13:42:32 -04002953 SDE_DEBUG("setup mixer %d: lm %d\n",
2954 i, mixer->hw_lm->idx - LM_0);
2955 SDE_DEBUG("setup mixer %d: ctl %d\n",
2956 i, mixer->hw_ctl->idx - CTL_0);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302957 if (mixer->hw_ds)
2958 SDE_DEBUG("setup mixer %d: ds %d\n",
2959 i, mixer->hw_ds->idx - DS_0);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002960 }
2961}
2962
2963static void _sde_crtc_setup_mixers(struct drm_crtc *crtc)
2964{
2965 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
2966 struct drm_encoder *enc;
2967
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002968 sde_crtc->num_mixers = 0;
Lloyd Atkinson94710bc2017-09-14 14:10:09 -04002969 sde_crtc->mixers_swapped = false;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002970 memset(sde_crtc->mixers, 0, sizeof(sde_crtc->mixers));
2971
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07002972 mutex_lock(&sde_crtc->crtc_lock);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002973 /* Check for mixers on all encoders attached to this crtc */
2974 list_for_each_entry(enc, &crtc->dev->mode_config.encoder_list, head) {
2975 if (enc->crtc != crtc)
2976 continue;
2977
2978 _sde_crtc_setup_mixer_for_encoder(crtc, enc);
2979 }
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05002980
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07002981 mutex_unlock(&sde_crtc->crtc_lock);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002982}
2983
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05002984static void _sde_crtc_setup_is_ppsplit(struct drm_crtc_state *state)
2985{
2986 int i;
2987 struct sde_crtc_state *cstate;
2988
2989 cstate = to_sde_crtc_state(state);
2990
2991 cstate->is_ppsplit = false;
2992 for (i = 0; i < cstate->num_connectors; i++) {
2993 struct drm_connector *conn = cstate->connectors[i];
2994
2995 if (sde_connector_get_topology_name(conn) ==
2996 SDE_RM_TOPOLOGY_PPSPLIT)
2997 cstate->is_ppsplit = true;
2998 }
2999}
3000
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05003001static void _sde_crtc_setup_lm_bounds(struct drm_crtc *crtc,
3002 struct drm_crtc_state *state)
3003{
3004 struct sde_crtc *sde_crtc;
3005 struct sde_crtc_state *cstate;
3006 struct drm_display_mode *adj_mode;
3007 u32 crtc_split_width;
3008 int i;
3009
3010 if (!crtc || !state) {
3011 SDE_ERROR("invalid args\n");
3012 return;
3013 }
3014
3015 sde_crtc = to_sde_crtc(crtc);
3016 cstate = to_sde_crtc_state(state);
3017
3018 adj_mode = &state->adjusted_mode;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05303019 crtc_split_width = sde_crtc_get_mixer_width(sde_crtc, cstate, adj_mode);
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05003020
3021 for (i = 0; i < sde_crtc->num_mixers; i++) {
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04003022 cstate->lm_bounds[i].x = crtc_split_width * i;
3023 cstate->lm_bounds[i].y = 0;
3024 cstate->lm_bounds[i].w = crtc_split_width;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05303025 cstate->lm_bounds[i].h =
3026 sde_crtc_get_mixer_height(sde_crtc, cstate, adj_mode);
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04003027 memcpy(&cstate->lm_roi[i], &cstate->lm_bounds[i],
3028 sizeof(cstate->lm_roi[i]));
Dhaval Patela5f75952017-07-25 11:17:41 -07003029 SDE_EVT32_VERBOSE(DRMID(crtc), i,
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04003030 cstate->lm_bounds[i].x, cstate->lm_bounds[i].y,
3031 cstate->lm_bounds[i].w, cstate->lm_bounds[i].h);
3032 SDE_DEBUG("%s: lm%d bnd&roi (%d,%d,%d,%d)\n", sde_crtc->name, i,
3033 cstate->lm_roi[i].x, cstate->lm_roi[i].y,
3034 cstate->lm_roi[i].w, cstate->lm_roi[i].h);
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05003035 }
3036
3037 drm_mode_debug_printmodeline(adj_mode);
3038}
3039
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003040static void sde_crtc_atomic_begin(struct drm_crtc *crtc,
Clarence Ip0d0e96d2016-10-24 18:13:13 -04003041 struct drm_crtc_state *old_state)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003042{
Clarence Ipcae1bb62016-07-07 12:07:13 -04003043 struct sde_crtc *sde_crtc;
Dhaval Patel0e558f42017-04-30 00:51:40 -07003044 struct drm_encoder *encoder;
Clarence Ipcae1bb62016-07-07 12:07:13 -04003045 struct drm_device *dev;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003046 unsigned long flags;
Abhijit Kulkarni12cef9c2017-07-13 11:19:03 -07003047 struct sde_crtc_smmu_state_data *smmu_state;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003048
Clarence Ipcae1bb62016-07-07 12:07:13 -04003049 if (!crtc) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003050 SDE_ERROR("invalid crtc\n");
Clarence Ipcae1bb62016-07-07 12:07:13 -04003051 return;
3052 }
3053
Alan Kwong163d2612016-11-03 00:56:56 -04003054 if (!crtc->state->enable) {
3055 SDE_DEBUG("crtc%d -> enable %d, skip atomic_begin\n",
3056 crtc->base.id, crtc->state->enable);
3057 return;
3058 }
3059
3060 SDE_DEBUG("crtc%d\n", crtc->base.id);
3061
Clarence Ipcae1bb62016-07-07 12:07:13 -04003062 sde_crtc = to_sde_crtc(crtc);
3063 dev = crtc->dev;
Abhijit Kulkarni12cef9c2017-07-13 11:19:03 -07003064 smmu_state = &sde_crtc->smmu_state;
Clarence Ipcae1bb62016-07-07 12:07:13 -04003065
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04003066 if (!sde_crtc->num_mixers) {
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003067 _sde_crtc_setup_mixers(crtc);
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05003068 _sde_crtc_setup_is_ppsplit(crtc->state);
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04003069 _sde_crtc_setup_lm_bounds(crtc, crtc->state);
3070 }
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05003071
Lloyd Atkinson265d2212016-05-30 13:12:01 -04003072 if (sde_crtc->event) {
3073 WARN_ON(sde_crtc->event);
3074 } else {
3075 spin_lock_irqsave(&dev->event_lock, flags);
3076 sde_crtc->event = crtc->state->event;
3077 spin_unlock_irqrestore(&dev->event_lock, flags);
3078 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003079
Dhaval Patel0e558f42017-04-30 00:51:40 -07003080 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
3081 if (encoder->crtc != crtc)
3082 continue;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003083
Dhaval Patel0e558f42017-04-30 00:51:40 -07003084 /* encoder will trigger pending mask now */
3085 sde_encoder_trigger_kickoff_pending(encoder);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003086 }
3087
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003088 /*
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003089 * If no mixers have been allocated in sde_crtc_atomic_check(),
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003090 * it means we are trying to flush a CRTC whose state is disabled:
3091 * nothing else needs to be done.
3092 */
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003093 if (unlikely(!sde_crtc->num_mixers))
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003094 return;
3095
Clarence Ipd9f9fa62016-09-09 13:42:32 -04003096 _sde_crtc_blend_setup(crtc);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05303097 _sde_crtc_dest_scaler_setup(crtc);
Abhijit Kulkarni12cef9c2017-07-13 11:19:03 -07003098
3099 /*
3100 * Since CP properties use AXI buffer to program the
3101 * HW, check if context bank is in attached
3102 * state,
3103 * apply color processing properties only if
3104 * smmu state is attached,
3105 */
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07003106 if ((smmu_state->state != DETACHED) &&
Abhijit Kulkarni12cef9c2017-07-13 11:19:03 -07003107 (smmu_state->state != DETACH_ALL_REQ))
3108 sde_cp_crtc_apply_properties(crtc);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003109
3110 /*
3111 * PP_DONE irq is only used by command mode for now.
3112 * It is better to request pending before FLUSH and START trigger
3113 * to make sure no pp_done irq missed.
3114 * This is safe because no pp_done will happen before SW trigger
3115 * in command mode.
3116 */
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003117}
3118
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003119static void sde_crtc_atomic_flush(struct drm_crtc *crtc,
3120 struct drm_crtc_state *old_crtc_state)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003121{
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08003122 struct drm_encoder *encoder;
Clarence Ipcae1bb62016-07-07 12:07:13 -04003123 struct sde_crtc *sde_crtc;
3124 struct drm_device *dev;
Lloyd Atkinson265d2212016-05-30 13:12:01 -04003125 struct drm_plane *plane;
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05303126 struct msm_drm_private *priv;
3127 struct msm_drm_thread *event_thread;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003128 unsigned long flags;
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08003129 struct sde_crtc_state *cstate;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003130
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05303131 if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003132 SDE_ERROR("invalid crtc\n");
Clarence Ipcae1bb62016-07-07 12:07:13 -04003133 return;
3134 }
3135
Alan Kwong163d2612016-11-03 00:56:56 -04003136 if (!crtc->state->enable) {
3137 SDE_DEBUG("crtc%d -> enable %d, skip atomic_flush\n",
3138 crtc->base.id, crtc->state->enable);
3139 return;
3140 }
3141
3142 SDE_DEBUG("crtc%d\n", crtc->base.id);
Clarence Ipcae1bb62016-07-07 12:07:13 -04003143
3144 sde_crtc = to_sde_crtc(crtc);
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08003145 cstate = to_sde_crtc_state(crtc->state);
Clarence Ipcae1bb62016-07-07 12:07:13 -04003146 dev = crtc->dev;
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05303147 priv = dev->dev_private;
3148
3149 if (crtc->index >= ARRAY_SIZE(priv->event_thread)) {
3150 SDE_ERROR("invalid crtc index[%d]\n", crtc->index);
3151 return;
3152 }
3153
3154 event_thread = &priv->event_thread[crtc->index];
Clarence Ipcae1bb62016-07-07 12:07:13 -04003155
Lloyd Atkinson265d2212016-05-30 13:12:01 -04003156 if (sde_crtc->event) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003157 SDE_DEBUG("already received sde_crtc->event\n");
Lloyd Atkinson265d2212016-05-30 13:12:01 -04003158 } else {
Lloyd Atkinson265d2212016-05-30 13:12:01 -04003159 spin_lock_irqsave(&dev->event_lock, flags);
3160 sde_crtc->event = crtc->state->event;
3161 spin_unlock_irqrestore(&dev->event_lock, flags);
3162 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003163
3164 /*
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003165 * If no mixers has been allocated in sde_crtc_atomic_check(),
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003166 * it means we are trying to flush a CRTC whose state is disabled:
3167 * nothing else needs to be done.
3168 */
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003169 if (unlikely(!sde_crtc->num_mixers))
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003170 return;
3171
Alan Kwong346223e2017-06-30 15:29:22 -04003172 /*
3173 * For planes without commit update, drm framework will not add
3174 * those planes to current state since hardware update is not
3175 * required. However, if those planes were power collapsed since
3176 * last commit cycle, driver has to restore the hardware state
3177 * of those planes explicitly here prior to plane flush.
3178 */
3179 drm_atomic_crtc_for_each_plane(plane, crtc)
3180 sde_plane_restore(plane);
3181
Clarence Ipcae1bb62016-07-07 12:07:13 -04003182 /* wait for acquire fences before anything else is done */
3183 _sde_crtc_wait_for_fences(crtc);
3184
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08003185 if (!cstate->rsc_update) {
3186 drm_for_each_encoder(encoder, dev) {
3187 if (encoder->crtc != crtc)
3188 continue;
3189
3190 cstate->rsc_client =
Dhaval Patel30fae8a2017-04-21 18:42:41 -07003191 sde_encoder_get_rsc_client(encoder);
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08003192 }
3193 cstate->rsc_update = true;
3194 }
3195
Alan Kwong9aa061c2016-11-06 21:17:12 -05003196 /* update performance setting before crtc kickoff */
3197 sde_core_perf_crtc_update(crtc, 1, false);
3198
Clarence Ipcae1bb62016-07-07 12:07:13 -04003199 /*
3200 * Final plane updates: Give each plane a chance to complete all
3201 * required writes/flushing before crtc's "flush
3202 * everything" call below.
3203 */
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07003204 drm_atomic_crtc_for_each_plane(plane, crtc) {
3205 if (sde_crtc->smmu_state.transition_error)
3206 sde_plane_set_error(plane, true);
Clarence Ipcae1bb62016-07-07 12:07:13 -04003207 sde_plane_flush(plane);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07003208 }
Clarence Ipcae1bb62016-07-07 12:07:13 -04003209
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003210 /* Kickoff will be scheduled by outer layer */
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003211}
3212
Clarence Ip7a753bb2016-07-07 11:47:44 -04003213/**
3214 * sde_crtc_destroy_state - state destroy hook
3215 * @crtc: drm CRTC
3216 * @state: CRTC state object to release
3217 */
3218static void sde_crtc_destroy_state(struct drm_crtc *crtc,
3219 struct drm_crtc_state *state)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003220{
Clarence Ip7a753bb2016-07-07 11:47:44 -04003221 struct sde_crtc *sde_crtc;
3222 struct sde_crtc_state *cstate;
3223
3224 if (!crtc || !state) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003225 SDE_ERROR("invalid argument(s)\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04003226 return;
3227 }
3228
3229 sde_crtc = to_sde_crtc(crtc);
3230 cstate = to_sde_crtc_state(state);
3231
Alan Kwong163d2612016-11-03 00:56:56 -04003232 SDE_DEBUG("crtc%d\n", crtc->base.id);
Clarence Ip7a753bb2016-07-07 11:47:44 -04003233
Alan Kwongcdb2f282017-03-18 13:42:06 -07003234 _sde_crtc_rp_destroy(&cstate->rp);
3235
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07003236 __drm_atomic_helper_crtc_destroy_state(state);
Clarence Ip7a753bb2016-07-07 11:47:44 -04003237
3238 /* destroy value helper */
3239 msm_property_destroy_state(&sde_crtc->property_info, cstate,
Clarence Ip4a2955d2017-07-04 18:04:33 -04003240 &cstate->property_state);
Clarence Ip7a753bb2016-07-07 11:47:44 -04003241}
3242
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003243static int _sde_crtc_wait_for_frame_done(struct drm_crtc *crtc)
3244{
3245 struct sde_crtc *sde_crtc;
Veera Sundaram Sankaran6d21d4c2017-09-30 10:07:15 -07003246 int ret, rc = 0, i;
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003247
3248 if (!crtc) {
3249 SDE_ERROR("invalid argument\n");
3250 return -EINVAL;
3251 }
3252 sde_crtc = to_sde_crtc(crtc);
3253
3254 if (!atomic_read(&sde_crtc->frame_pending)) {
3255 SDE_DEBUG("no frames pending\n");
3256 return 0;
3257 }
3258
Veera Sundaram Sankaran6d21d4c2017-09-30 10:07:15 -07003259 SDE_EVT32(DRMID(crtc), SDE_EVTLOG_FUNC_ENTRY);
3260
3261 /*
3262 * flush all the event thread work to make sure all the
3263 * FRAME_EVENTS from encoder are propagated to crtc
3264 */
3265 for (i = 0; i < ARRAY_SIZE(sde_crtc->frame_events); i++) {
3266 if (list_empty(&sde_crtc->frame_events[i].list))
3267 kthread_flush_work(&sde_crtc->frame_events[i].work);
3268 }
3269
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003270 ret = wait_for_completion_timeout(&sde_crtc->frame_done_comp,
3271 msecs_to_jiffies(SDE_FRAME_DONE_TIMEOUT));
3272 if (!ret) {
3273 SDE_ERROR("frame done completion wait timed out, ret:%d\n",
3274 ret);
3275 SDE_EVT32(DRMID(crtc), SDE_EVTLOG_FATAL);
3276 rc = -ETIMEDOUT;
3277 }
Dhaval Patela5f75952017-07-25 11:17:41 -07003278 SDE_EVT32_VERBOSE(DRMID(crtc), SDE_EVTLOG_FUNC_EXIT);
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003279
3280 return rc;
3281}
3282
Clarence Ip8ee49952017-09-20 11:10:50 -04003283static int _sde_crtc_commit_kickoff_rot(struct drm_crtc *crtc,
Clarence Ip95f530b2017-09-06 17:31:41 -04003284 struct sde_crtc_state *cstate)
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003285{
Clarence Ipeb39cce2017-07-19 14:12:43 -04003286 struct drm_plane *plane;
Clarence Ip95f530b2017-09-06 17:31:41 -04003287 struct sde_crtc *sde_crtc;
3288 struct sde_hw_ctl *ctl, *master_ctl;
Clarence Ipb776b532017-09-12 18:30:06 -04003289 u32 flush_mask;
Clarence Ip8ee49952017-09-20 11:10:50 -04003290 int i, rc = 0;
Clarence Ip95f530b2017-09-06 17:31:41 -04003291
3292 if (!crtc || !cstate)
Clarence Ip8ee49952017-09-20 11:10:50 -04003293 return -EINVAL;
Clarence Ip95f530b2017-09-06 17:31:41 -04003294
3295 sde_crtc = to_sde_crtc(crtc);
3296
Clarence Ipb776b532017-09-12 18:30:06 -04003297 /*
3298 * Update sbuf configuration and flush bits if a flush
3299 * mask has been defined for either the current or
3300 * previous commit.
3301 *
3302 * Updates are also required for the first commit after
3303 * sbuf_flush_mask becomes 0x0, to properly transition
3304 * the hardware out of sbuf mode.
3305 */
3306 if (!sde_crtc->sbuf_flush_mask_old && !sde_crtc->sbuf_flush_mask)
Clarence Ip8ee49952017-09-20 11:10:50 -04003307 return 0;
Clarence Ipb776b532017-09-12 18:30:06 -04003308
3309 flush_mask = sde_crtc->sbuf_flush_mask_old | sde_crtc->sbuf_flush_mask;
3310 sde_crtc->sbuf_flush_mask_old = sde_crtc->sbuf_flush_mask;
3311
Clarence Ip95f530b2017-09-06 17:31:41 -04003312 SDE_ATRACE_BEGIN("crtc_kickoff_rot");
3313
Clarence Ipb776b532017-09-12 18:30:06 -04003314 if (cstate->sbuf_cfg.rot_op_mode != SDE_CTL_ROT_OP_MODE_OFFLINE) {
3315 drm_atomic_crtc_for_each_plane(plane, crtc) {
Clarence Ip8ee49952017-09-20 11:10:50 -04003316 rc = sde_plane_kickoff_rot(plane);
3317 if (rc) {
3318 SDE_ERROR("crtc%d cancelling inline rotation\n",
3319 crtc->base.id);
3320 SDE_EVT32(DRMID(crtc), SDE_EVTLOG_ERROR);
3321
3322 /* revert to offline on errors */
3323 cstate->sbuf_cfg.rot_op_mode =
3324 SDE_CTL_ROT_OP_MODE_OFFLINE;
3325 break;
3326 }
Clarence Ipb776b532017-09-12 18:30:06 -04003327 }
3328 }
Clarence Ip95f530b2017-09-06 17:31:41 -04003329
3330 master_ctl = NULL;
3331 for (i = 0; i < sde_crtc->num_mixers; i++) {
3332 ctl = sde_crtc->mixers[i].hw_ctl;
Clarence Ip8ee49952017-09-20 11:10:50 -04003333 if (!ctl)
Clarence Ip95f530b2017-09-06 17:31:41 -04003334 continue;
3335
3336 if (!master_ctl || master_ctl->idx > ctl->idx)
3337 master_ctl = ctl;
Clarence Ip95f530b2017-09-06 17:31:41 -04003338 }
3339
Clarence Ip8ee49952017-09-20 11:10:50 -04003340 /* only update sbuf_cfg and flush for master ctl */
3341 if (master_ctl && master_ctl->ops.setup_sbuf_cfg &&
3342 master_ctl->ops.update_pending_flush) {
3343 master_ctl->ops.setup_sbuf_cfg(master_ctl, &cstate->sbuf_cfg);
3344 master_ctl->ops.update_pending_flush(master_ctl, flush_mask);
3345
3346 /* explicitly trigger rotator for async modes */
3347 if (cstate->sbuf_cfg.rot_op_mode ==
3348 SDE_CTL_ROT_OP_MODE_INLINE_ASYNC &&
3349 master_ctl->ops.trigger_rot_start) {
3350 master_ctl->ops.trigger_rot_start(master_ctl);
3351 SDE_EVT32(DRMID(crtc), master_ctl->idx - CTL_0);
3352 }
3353 }
Clarence Ip95f530b2017-09-06 17:31:41 -04003354
3355 SDE_ATRACE_END("crtc_kickoff_rot");
Clarence Ip8ee49952017-09-20 11:10:50 -04003356 return rc;
Clarence Ip95f530b2017-09-06 17:31:41 -04003357}
3358
Clarence Ip662698e2017-09-12 18:34:16 -04003359/**
3360 * _sde_crtc_remove_pipe_flush - remove staged pipes from flush mask
3361 * @sde_crtc: Pointer to sde crtc structure
3362 */
3363static void _sde_crtc_remove_pipe_flush(struct sde_crtc *sde_crtc)
3364{
3365 struct sde_crtc_mixer *mixer;
3366 struct sde_hw_ctl *ctl;
3367 u32 i, flush_mask;
3368
3369 if (!sde_crtc)
3370 return;
3371
3372 mixer = sde_crtc->mixers;
3373 for (i = 0; i < sde_crtc->num_mixers; i++) {
3374 ctl = mixer[i].hw_ctl;
3375 if (!ctl || !ctl->ops.get_pending_flush ||
3376 !ctl->ops.clear_pending_flush ||
3377 !ctl->ops.update_pending_flush)
3378 continue;
3379
3380 flush_mask = ctl->ops.get_pending_flush(ctl);
3381 flush_mask &= ~mixer[i].pipe_mask;
3382 ctl->ops.clear_pending_flush(ctl);
3383 ctl->ops.update_pending_flush(ctl, flush_mask);
3384 }
3385}
3386
Clarence Ip95f530b2017-09-06 17:31:41 -04003387void sde_crtc_commit_kickoff(struct drm_crtc *crtc)
3388{
Lloyd Atkinsone7bcdd22016-08-11 10:53:37 -04003389 struct drm_encoder *encoder;
3390 struct drm_device *dev;
Alan Kwong628d19e2016-10-31 13:50:13 -04003391 struct sde_crtc *sde_crtc;
Alan Kwong67a3f792016-11-01 23:16:53 -04003392 struct msm_drm_private *priv;
3393 struct sde_kms *sde_kms;
Alan Kwong4aacd532017-02-04 18:51:33 -08003394 struct sde_crtc_state *cstate;
Clarence Ip662698e2017-09-12 18:34:16 -04003395 bool is_error;
Clarence Ip95f530b2017-09-06 17:31:41 -04003396 int ret;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003397
3398 if (!crtc) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003399 SDE_ERROR("invalid argument\n");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003400 return;
3401 }
Lloyd Atkinsone7bcdd22016-08-11 10:53:37 -04003402 dev = crtc->dev;
Alan Kwong628d19e2016-10-31 13:50:13 -04003403 sde_crtc = to_sde_crtc(crtc);
Alan Kwong67a3f792016-11-01 23:16:53 -04003404 sde_kms = _sde_crtc_get_kms(crtc);
Clarence Ip662698e2017-09-12 18:34:16 -04003405 is_error = false;
Narendra Muppallaec11a0a2017-06-15 15:35:17 -07003406
3407 if (!sde_kms || !sde_kms->dev || !sde_kms->dev->dev_private) {
3408 SDE_ERROR("invalid argument\n");
3409 return;
3410 }
3411
Alan Kwong67a3f792016-11-01 23:16:53 -04003412 priv = sde_kms->dev->dev_private;
Alan Kwong4aacd532017-02-04 18:51:33 -08003413 cstate = to_sde_crtc_state(crtc->state);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003414
Clarence Ip90b282d2017-05-04 10:00:32 -07003415 /*
3416 * If no mixers has been allocated in sde_crtc_atomic_check(),
3417 * it means we are trying to start a CRTC whose state is disabled:
3418 * nothing else needs to be done.
3419 */
3420 if (unlikely(!sde_crtc->num_mixers))
3421 return;
3422
Narendra Muppalla77b32932017-05-10 13:53:11 -07003423 SDE_ATRACE_BEGIN("crtc_commit");
Clarence Ip95f530b2017-09-06 17:31:41 -04003424
3425 /* default to ASYNC mode for inline rotation */
Clarence Ipb776b532017-09-12 18:30:06 -04003426 cstate->sbuf_cfg.rot_op_mode = sde_crtc->sbuf_flush_mask ?
Clarence Ip95f530b2017-09-06 17:31:41 -04003427 SDE_CTL_ROT_OP_MODE_INLINE_ASYNC : SDE_CTL_ROT_OP_MODE_OFFLINE;
3428
Lloyd Atkinsone7bcdd22016-08-11 10:53:37 -04003429 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
Alan Kwong4aacd532017-02-04 18:51:33 -08003430 struct sde_encoder_kickoff_params params = { 0 };
3431
Lloyd Atkinsone7bcdd22016-08-11 10:53:37 -04003432 if (encoder->crtc != crtc)
3433 continue;
3434
3435 /*
3436 * Encoder will flush/start now, unless it has a tx pending.
3437 * If so, it may delay and flush at an irq event (e.g. ppdone)
3438 */
Alan Kwong4aacd532017-02-04 18:51:33 -08003439 params.inline_rotate_prefill = cstate->sbuf_prefill_line;
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003440 params.affected_displays = _sde_crtc_get_displays_affected(crtc,
3441 crtc->state);
Alan Kwong4aacd532017-02-04 18:51:33 -08003442 sde_encoder_prepare_for_kickoff(encoder, &params);
Clarence Ip95f530b2017-09-06 17:31:41 -04003443
3444 /*
3445 * For inline ASYNC modes, the flush bits are not written
3446 * to hardware atomically, so avoid using it if a video
3447 * mode encoder is active on this CRTC.
3448 */
3449 if (cstate->sbuf_cfg.rot_op_mode ==
3450 SDE_CTL_ROT_OP_MODE_INLINE_ASYNC &&
3451 sde_encoder_get_intf_mode(encoder) ==
3452 INTF_MODE_VIDEO)
3453 cstate->sbuf_cfg.rot_op_mode =
3454 SDE_CTL_ROT_OP_MODE_INLINE_SYNC;
Alan Kwong628d19e2016-10-31 13:50:13 -04003455 }
3456
Clarence Ip95f530b2017-09-06 17:31:41 -04003457 /*
3458 * For ASYNC inline modes, kick off the rotator now so that the H/W
3459 * can start as soon as it's ready.
3460 */
3461 if (cstate->sbuf_cfg.rot_op_mode == SDE_CTL_ROT_OP_MODE_INLINE_ASYNC)
Clarence Ip8ee49952017-09-20 11:10:50 -04003462 if (_sde_crtc_commit_kickoff_rot(crtc, cstate))
3463 is_error = true;
Clarence Ip95f530b2017-09-06 17:31:41 -04003464
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003465 /* wait for frame_event_done completion */
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07003466 SDE_ATRACE_BEGIN("wait_for_frame_done_event");
3467 ret = _sde_crtc_wait_for_frame_done(crtc);
3468 SDE_ATRACE_END("wait_for_frame_done_event");
3469 if (ret) {
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003470 SDE_ERROR("crtc%d wait for frame done failed;frame_pending%d\n",
3471 crtc->base.id,
3472 atomic_read(&sde_crtc->frame_pending));
Clarence Ip662698e2017-09-12 18:34:16 -04003473
3474 is_error = true;
3475
3476 /* force offline rotation mode since the commit has no pipes */
3477 cstate->sbuf_cfg.rot_op_mode = SDE_CTL_ROT_OP_MODE_OFFLINE;
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003478 }
3479
3480 if (atomic_inc_return(&sde_crtc->frame_pending) == 1) {
Alan Kwong628d19e2016-10-31 13:50:13 -04003481 /* acquire bandwidth and other resources */
3482 SDE_DEBUG("crtc%d first commit\n", crtc->base.id);
Clarence Ip95f530b2017-09-06 17:31:41 -04003483 SDE_EVT32(DRMID(crtc), cstate->sbuf_cfg.rot_op_mode,
3484 SDE_EVTLOG_FUNC_CASE1);
Alan Kwong628d19e2016-10-31 13:50:13 -04003485 } else {
3486 SDE_DEBUG("crtc%d commit\n", crtc->base.id);
Clarence Ip95f530b2017-09-06 17:31:41 -04003487 SDE_EVT32(DRMID(crtc), cstate->sbuf_cfg.rot_op_mode,
3488 SDE_EVTLOG_FUNC_CASE2);
Alan Kwong628d19e2016-10-31 13:50:13 -04003489 }
Dhaval Pateld67cf4a2017-06-14 18:08:32 -07003490 sde_crtc->play_count++;
Alan Kwong628d19e2016-10-31 13:50:13 -04003491
Clarence Ip95f530b2017-09-06 17:31:41 -04003492 /*
3493 * For SYNC inline modes, delay the kick off until after the
3494 * wait for frame done in case the wait times out.
Clarence Ipb776b532017-09-12 18:30:06 -04003495 *
3496 * Also perform a final kickoff when transitioning back to
3497 * offline mode.
Clarence Ip95f530b2017-09-06 17:31:41 -04003498 */
Clarence Ipb776b532017-09-12 18:30:06 -04003499 if (cstate->sbuf_cfg.rot_op_mode != SDE_CTL_ROT_OP_MODE_INLINE_ASYNC)
Clarence Ip8ee49952017-09-20 11:10:50 -04003500 if (_sde_crtc_commit_kickoff_rot(crtc, cstate))
3501 is_error = true;
Clarence Ipf6b530a2017-08-21 19:39:18 -04003502
Clarence Ip980405d2017-08-08 18:33:44 -04003503 sde_vbif_clear_errors(sde_kms);
3504
Clarence Ip662698e2017-09-12 18:34:16 -04003505 if (is_error)
3506 _sde_crtc_remove_pipe_flush(sde_crtc);
3507
Alan Kwong628d19e2016-10-31 13:50:13 -04003508 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
3509 if (encoder->crtc != crtc)
3510 continue;
3511
Clarence Ip662698e2017-09-12 18:34:16 -04003512 sde_encoder_kickoff(encoder, is_error);
Lloyd Atkinsone7bcdd22016-08-11 10:53:37 -04003513 }
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003514
Dhaval Patelb9850c02017-08-07 22:55:47 -07003515 reinit_completion(&sde_crtc->frame_done_comp);
Narendra Muppalla77b32932017-05-10 13:53:11 -07003516 SDE_ATRACE_END("crtc_commit");
3517 return;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003518}
3519
Clarence Ip7a753bb2016-07-07 11:47:44 -04003520/**
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003521 * _sde_crtc_vblank_enable_no_lock - update power resource and vblank request
Clarence Ip7f70ce42017-03-20 06:53:46 -07003522 * @sde_crtc: Pointer to sde crtc structure
3523 * @enable: Whether to enable/disable vblanks
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003524 *
3525 * @Return: error code
Clarence Ip7f70ce42017-03-20 06:53:46 -07003526 */
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003527static int _sde_crtc_vblank_enable_no_lock(
Clarence Ip7f70ce42017-03-20 06:53:46 -07003528 struct sde_crtc *sde_crtc, bool enable)
3529{
3530 struct drm_device *dev;
3531 struct drm_crtc *crtc;
3532 struct drm_encoder *enc;
Clarence Ip7f70ce42017-03-20 06:53:46 -07003533
3534 if (!sde_crtc) {
3535 SDE_ERROR("invalid crtc\n");
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003536 return -EINVAL;
Clarence Ip7f70ce42017-03-20 06:53:46 -07003537 }
3538
3539 crtc = &sde_crtc->base;
3540 dev = crtc->dev;
Clarence Ip7f70ce42017-03-20 06:53:46 -07003541
3542 if (enable) {
Lloyd Atkinson2c554eb2017-05-24 16:22:39 -04003543 int ret;
3544
3545 /* drop lock since power crtc cb may try to re-acquire lock */
3546 mutex_unlock(&sde_crtc->crtc_lock);
3547 ret = _sde_crtc_power_enable(sde_crtc, true);
3548 mutex_lock(&sde_crtc->crtc_lock);
3549 if (ret)
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003550 return ret;
Dhaval Patelf9245d62017-03-28 16:24:00 -07003551
Clarence Ip7f70ce42017-03-20 06:53:46 -07003552 list_for_each_entry(enc, &dev->mode_config.encoder_list, head) {
3553 if (enc->crtc != crtc)
3554 continue;
3555
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003556 SDE_EVT32(DRMID(&sde_crtc->base), DRMID(enc), enable,
3557 sde_crtc->enabled,
3558 sde_crtc->suspend,
3559 sde_crtc->vblank_requested);
Clarence Ip7f70ce42017-03-20 06:53:46 -07003560
3561 sde_encoder_register_vblank_callback(enc,
3562 sde_crtc_vblank_cb, (void *)crtc);
3563 }
3564 } else {
3565 list_for_each_entry(enc, &dev->mode_config.encoder_list, head) {
3566 if (enc->crtc != crtc)
3567 continue;
3568
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003569 SDE_EVT32(DRMID(&sde_crtc->base), DRMID(enc), enable,
3570 sde_crtc->enabled,
3571 sde_crtc->suspend,
3572 sde_crtc->vblank_requested);
Clarence Ip7f70ce42017-03-20 06:53:46 -07003573
3574 sde_encoder_register_vblank_callback(enc, NULL, NULL);
3575 }
Lloyd Atkinson2c554eb2017-05-24 16:22:39 -04003576
3577 /* drop lock since power crtc cb may try to re-acquire lock */
3578 mutex_unlock(&sde_crtc->crtc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07003579 _sde_crtc_power_enable(sde_crtc, false);
Lloyd Atkinson2c554eb2017-05-24 16:22:39 -04003580 mutex_lock(&sde_crtc->crtc_lock);
Clarence Ip7f70ce42017-03-20 06:53:46 -07003581 }
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003582
3583 return 0;
Clarence Ip7f70ce42017-03-20 06:53:46 -07003584}
3585
3586/**
3587 * _sde_crtc_set_suspend - notify crtc of suspend enable/disable
3588 * @crtc: Pointer to drm crtc object
3589 * @enable: true to enable suspend, false to indicate resume
3590 */
3591static void _sde_crtc_set_suspend(struct drm_crtc *crtc, bool enable)
3592{
3593 struct sde_crtc *sde_crtc;
3594 struct msm_drm_private *priv;
3595 struct sde_kms *sde_kms;
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003596 int ret = 0;
Clarence Ip7f70ce42017-03-20 06:53:46 -07003597
3598 if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
3599 SDE_ERROR("invalid crtc\n");
3600 return;
3601 }
3602 sde_crtc = to_sde_crtc(crtc);
3603 priv = crtc->dev->dev_private;
3604
3605 if (!priv->kms) {
3606 SDE_ERROR("invalid crtc kms\n");
3607 return;
3608 }
3609 sde_kms = to_sde_kms(priv->kms);
3610
3611 SDE_DEBUG("crtc%d suspend = %d\n", crtc->base.id, enable);
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003612 SDE_EVT32_VERBOSE(DRMID(crtc), enable);
Clarence Ip7f70ce42017-03-20 06:53:46 -07003613
3614 mutex_lock(&sde_crtc->crtc_lock);
3615
Clarence Ip2f9beeb2017-03-16 11:04:53 -04003616 /*
Lloyd Atkinsonb2be0c42017-07-17 16:41:00 -04003617 * If the vblank is enabled, release a power reference on suspend
3618 * and take it back during resume (if it is still enabled).
Clarence Ip7f70ce42017-03-20 06:53:46 -07003619 */
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003620 SDE_EVT32(DRMID(&sde_crtc->base), enable, sde_crtc->enabled,
3621 sde_crtc->suspend, sde_crtc->vblank_requested);
Clarence Ip7f70ce42017-03-20 06:53:46 -07003622 if (sde_crtc->suspend == enable)
3623 SDE_DEBUG("crtc%d suspend already set to %d, ignoring update\n",
3624 crtc->base.id, enable);
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003625 else if (sde_crtc->enabled && sde_crtc->vblank_requested) {
3626 ret = _sde_crtc_vblank_enable_no_lock(sde_crtc, !enable);
3627 if (ret)
3628 SDE_ERROR("%s vblank enable failed: %d\n",
3629 sde_crtc->name, ret);
3630 }
Clarence Ip7f70ce42017-03-20 06:53:46 -07003631
3632 sde_crtc->suspend = enable;
Clarence Ip7f70ce42017-03-20 06:53:46 -07003633 mutex_unlock(&sde_crtc->crtc_lock);
3634}
3635
3636/**
Clarence Ip7a753bb2016-07-07 11:47:44 -04003637 * sde_crtc_duplicate_state - state duplicate hook
3638 * @crtc: Pointer to drm crtc structure
3639 * @Returns: Pointer to new drm_crtc_state structure
3640 */
3641static struct drm_crtc_state *sde_crtc_duplicate_state(struct drm_crtc *crtc)
3642{
3643 struct sde_crtc *sde_crtc;
3644 struct sde_crtc_state *cstate, *old_cstate;
3645
3646 if (!crtc || !crtc->state) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003647 SDE_ERROR("invalid argument(s)\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04003648 return NULL;
3649 }
3650
3651 sde_crtc = to_sde_crtc(crtc);
3652 old_cstate = to_sde_crtc_state(crtc->state);
3653 cstate = msm_property_alloc_state(&sde_crtc->property_info);
3654 if (!cstate) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003655 SDE_ERROR("failed to allocate state\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04003656 return NULL;
3657 }
3658
3659 /* duplicate value helper */
3660 msm_property_duplicate_state(&sde_crtc->property_info,
3661 old_cstate, cstate,
Clarence Ip4a2955d2017-07-04 18:04:33 -04003662 &cstate->property_state, cstate->property_values);
Clarence Ip7a753bb2016-07-07 11:47:44 -04003663
3664 /* duplicate base helper */
3665 __drm_atomic_helper_crtc_duplicate_state(crtc, &cstate->base);
3666
Alan Kwongcdb2f282017-03-18 13:42:06 -07003667 _sde_crtc_rp_duplicate(&old_cstate->rp, &cstate->rp);
3668
Clarence Ip7a753bb2016-07-07 11:47:44 -04003669 return &cstate->base;
3670}
3671
3672/**
3673 * sde_crtc_reset - reset hook for CRTCs
3674 * Resets the atomic state for @crtc by freeing the state pointer (which might
3675 * be NULL, e.g. at driver load time) and allocating a new empty state object.
3676 * @crtc: Pointer to drm crtc structure
3677 */
3678static void sde_crtc_reset(struct drm_crtc *crtc)
3679{
3680 struct sde_crtc *sde_crtc;
3681 struct sde_crtc_state *cstate;
3682
3683 if (!crtc) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003684 SDE_ERROR("invalid crtc\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04003685 return;
3686 }
3687
Clarence Ip7f70ce42017-03-20 06:53:46 -07003688 /* revert suspend actions, if necessary */
Veera Sundaram Sankarandb43e282017-09-19 18:32:52 -07003689 if (sde_kms_is_suspend_state(crtc->dev)) {
Clarence Ip7f70ce42017-03-20 06:53:46 -07003690 _sde_crtc_set_suspend(crtc, false);
3691
Veera Sundaram Sankarandb43e282017-09-19 18:32:52 -07003692 if (!sde_crtc_is_reset_required(crtc)) {
3693 SDE_DEBUG("avoiding reset for crtc:%d\n",
3694 crtc->base.id);
3695 return;
3696 }
3697 }
3698
Clarence Ip7a753bb2016-07-07 11:47:44 -04003699 /* remove previous state, if present */
3700 if (crtc->state) {
3701 sde_crtc_destroy_state(crtc, crtc->state);
3702 crtc->state = 0;
3703 }
3704
3705 sde_crtc = to_sde_crtc(crtc);
3706 cstate = msm_property_alloc_state(&sde_crtc->property_info);
3707 if (!cstate) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003708 SDE_ERROR("failed to allocate state\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04003709 return;
3710 }
3711
3712 /* reset value helper */
3713 msm_property_reset_state(&sde_crtc->property_info, cstate,
Clarence Ip4a2955d2017-07-04 18:04:33 -04003714 &cstate->property_state,
3715 cstate->property_values);
Clarence Ip7a753bb2016-07-07 11:47:44 -04003716
Clarence Ipcae1bb62016-07-07 12:07:13 -04003717 _sde_crtc_set_input_fence_timeout(cstate);
3718
Alan Kwong310e9b02017-08-03 02:04:07 -04003719 _sde_crtc_rp_reset(&cstate->rp, &sde_crtc->rp_lock,
3720 &sde_crtc->rp_head);
Alan Kwongcdb2f282017-03-18 13:42:06 -07003721
Clarence Ip7a753bb2016-07-07 11:47:44 -04003722 cstate->base.crtc = crtc;
3723 crtc->state = &cstate->base;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003724}
3725
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003726static void sde_crtc_handle_power_event(u32 event_type, void *arg)
3727{
3728 struct drm_crtc *crtc = arg;
3729 struct sde_crtc *sde_crtc;
Dhaval Patel010f5172017-08-01 22:40:09 -07003730 struct drm_plane *plane;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003731 struct drm_encoder *encoder;
Dhaval Patel010f5172017-08-01 22:40:09 -07003732 struct sde_crtc_mixer *m;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07003733 u32 i, misr_status;
Ping Licc868fc2017-08-11 16:56:44 -07003734 unsigned long flags;
3735 struct sde_crtc_irq_info *node = NULL;
3736 int ret = 0;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003737
3738 if (!crtc) {
3739 SDE_ERROR("invalid crtc\n");
3740 return;
3741 }
3742 sde_crtc = to_sde_crtc(crtc);
3743
3744 mutex_lock(&sde_crtc->crtc_lock);
3745
3746 SDE_EVT32(DRMID(crtc), event_type);
3747
Dhaval Patel010f5172017-08-01 22:40:09 -07003748 switch (event_type) {
3749 case SDE_POWER_EVENT_POST_ENABLE:
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003750 /* restore encoder; crtc will be programmed during commit */
3751 drm_for_each_encoder(encoder, crtc->dev) {
3752 if (encoder->crtc != crtc)
3753 continue;
3754
3755 sde_encoder_virt_restore(encoder);
3756 }
Ping Licc868fc2017-08-11 16:56:44 -07003757
3758 spin_lock_irqsave(&sde_crtc->spin_lock, flags);
3759 list_for_each_entry(node, &sde_crtc->user_event_list, list) {
3760 ret = 0;
3761 if (node->func)
3762 ret = node->func(crtc, true, &node->irq);
3763 if (ret)
3764 SDE_ERROR("%s failed to enable event %x\n",
3765 sde_crtc->name, node->event);
3766 }
3767 spin_unlock_irqrestore(&sde_crtc->spin_lock, flags);
3768
Ping Lie505f3b2017-06-19 14:19:08 -07003769 sde_cp_crtc_post_ipc(crtc);
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003770
Dhaval Patel010f5172017-08-01 22:40:09 -07003771 for (i = 0; i < sde_crtc->num_mixers; ++i) {
3772 m = &sde_crtc->mixers[i];
3773 if (!m->hw_lm || !m->hw_lm->ops.setup_misr ||
3774 !sde_crtc->misr_enable)
3775 continue;
3776
3777 m->hw_lm->ops.setup_misr(m->hw_lm, true,
3778 sde_crtc->misr_frame_count);
3779 }
3780 break;
3781 case SDE_POWER_EVENT_PRE_DISABLE:
3782 for (i = 0; i < sde_crtc->num_mixers; ++i) {
3783 m = &sde_crtc->mixers[i];
3784 if (!m->hw_lm || !m->hw_lm->ops.collect_misr ||
3785 !sde_crtc->misr_enable)
3786 continue;
3787
3788 misr_status = m->hw_lm->ops.collect_misr(m->hw_lm);
3789 sde_crtc->misr_data[i] = misr_status ? misr_status :
3790 sde_crtc->misr_data[i];
3791 }
Ping Licc868fc2017-08-11 16:56:44 -07003792
3793 spin_lock_irqsave(&sde_crtc->spin_lock, flags);
3794 node = NULL;
3795 list_for_each_entry(node, &sde_crtc->user_event_list, list) {
3796 ret = 0;
3797 if (node->func)
3798 ret = node->func(crtc, false, &node->irq);
3799 if (ret)
3800 SDE_ERROR("%s failed to disable event %x\n",
3801 sde_crtc->name, node->event);
3802 }
3803 spin_unlock_irqrestore(&sde_crtc->spin_lock, flags);
3804
Dhaval Patel010f5172017-08-01 22:40:09 -07003805 sde_cp_crtc_pre_ipc(crtc);
3806 break;
3807 case SDE_POWER_EVENT_POST_DISABLE:
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003808 /*
3809 * set revalidate flag in planes, so it will be re-programmed
3810 * in the next frame update
3811 */
3812 drm_atomic_crtc_for_each_plane(plane, crtc)
3813 sde_plane_set_revalidate(plane, true);
Alan Kwong8a9b38a2017-06-22 11:30:52 -04003814
Gopikrishnaiah Anandandb90fa12017-05-09 17:56:08 -07003815 sde_cp_crtc_suspend(crtc);
Dhaval Patel010f5172017-08-01 22:40:09 -07003816 break;
3817 default:
3818 SDE_DEBUG("event:%d not handled\n", event_type);
3819 break;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003820 }
3821
3822 mutex_unlock(&sde_crtc->crtc_lock);
3823}
3824
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003825static void sde_crtc_disable(struct drm_crtc *crtc)
3826{
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003827 struct sde_crtc *sde_crtc;
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08003828 struct sde_crtc_state *cstate;
Alan Kwong07da0982016-11-04 12:57:45 -04003829 struct drm_encoder *encoder;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003830 struct msm_drm_private *priv;
Gopikrishnaiah Anandande2c81b2017-03-15 12:41:29 -07003831 unsigned long flags;
3832 struct sde_crtc_irq_info *node = NULL;
Ping Lic5c2e0b2017-08-02 15:17:59 -07003833 struct drm_event event;
3834 u32 power_on;
Dhaval Patelfd8f7742017-08-10 13:11:22 -07003835 int ret, i;
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003836
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003837 if (!crtc || !crtc->dev || !crtc->dev->dev_private || !crtc->state) {
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -04003838 SDE_ERROR("invalid crtc\n");
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003839 return;
3840 }
3841 sde_crtc = to_sde_crtc(crtc);
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08003842 cstate = to_sde_crtc_state(crtc->state);
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003843 priv = crtc->dev->dev_private;
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003844
Alan Kwong163d2612016-11-03 00:56:56 -04003845 SDE_DEBUG("crtc%d\n", crtc->base.id);
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003846
Sandeep Panda98d6ab22017-09-05 08:03:16 +05303847 for (i = 0; i < cstate->num_connectors; i++)
3848 sde_connector_schedule_status_work(cstate->connectors[i],
3849 false);
3850
Clarence Ipd86f6e42017-08-08 18:31:00 -04003851 if (sde_kms_is_suspend_state(crtc->dev))
Clarence Ip7f70ce42017-03-20 06:53:46 -07003852 _sde_crtc_set_suspend(crtc, true);
3853
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07003854 mutex_lock(&sde_crtc->crtc_lock);
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003855 SDE_EVT32_VERBOSE(DRMID(crtc));
Alan Kwong628d19e2016-10-31 13:50:13 -04003856
Ping Lic5c2e0b2017-08-02 15:17:59 -07003857 /* update color processing on suspend */
3858 event.type = DRM_EVENT_CRTC_POWER;
3859 event.length = sizeof(u32);
3860 sde_cp_crtc_suspend(crtc);
3861 power_on = 0;
3862 msm_mode_object_event_notify(&crtc->base, crtc->dev, &event,
3863 (u8 *)&power_on);
3864
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003865 /* wait for frame_event_done completion */
3866 if (_sde_crtc_wait_for_frame_done(crtc))
3867 SDE_ERROR("crtc%d wait for frame done failed;frame_pending%d\n",
3868 crtc->base.id,
3869 atomic_read(&sde_crtc->frame_pending));
3870
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003871 SDE_EVT32(DRMID(crtc), sde_crtc->enabled, sde_crtc->suspend,
Alan Kwong8f43c012017-10-06 08:59:00 -04003872 sde_crtc->vblank_requested,
3873 crtc->state->active, crtc->state->enable);
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003874 if (sde_crtc->enabled && !sde_crtc->suspend &&
3875 sde_crtc->vblank_requested) {
3876 ret = _sde_crtc_vblank_enable_no_lock(sde_crtc, false);
3877 if (ret)
3878 SDE_ERROR("%s vblank enable failed: %d\n",
3879 sde_crtc->name, ret);
Alan Kwong07da0982016-11-04 12:57:45 -04003880 }
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003881 sde_crtc->enabled = false;
Alan Kwong07da0982016-11-04 12:57:45 -04003882
Alan Kwong628d19e2016-10-31 13:50:13 -04003883 if (atomic_read(&sde_crtc->frame_pending)) {
Dhaval Patel6c666622017-03-21 23:02:59 -07003884 SDE_EVT32(DRMID(crtc), atomic_read(&sde_crtc->frame_pending),
3885 SDE_EVTLOG_FUNC_CASE2);
Alan Kwong9aa061c2016-11-06 21:17:12 -05003886 sde_core_perf_crtc_release_bw(crtc);
Alan Kwong628d19e2016-10-31 13:50:13 -04003887 atomic_set(&sde_crtc->frame_pending, 0);
3888 }
3889
Ping Li6d5bf542017-06-27 11:40:28 -07003890 spin_lock_irqsave(&sde_crtc->spin_lock, flags);
3891 list_for_each_entry(node, &sde_crtc->user_event_list, list) {
3892 ret = 0;
3893 if (node->func)
3894 ret = node->func(crtc, false, &node->irq);
3895 if (ret)
3896 SDE_ERROR("%s failed to disable event %x\n",
3897 sde_crtc->name, node->event);
3898 }
3899 spin_unlock_irqrestore(&sde_crtc->spin_lock, flags);
3900
Alan Kwong9aa061c2016-11-06 21:17:12 -05003901 sde_core_perf_crtc_update(crtc, 0, true);
3902
Alan Kwong628d19e2016-10-31 13:50:13 -04003903 drm_for_each_encoder(encoder, crtc->dev) {
3904 if (encoder->crtc != crtc)
3905 continue;
3906 sde_encoder_register_frame_event_callback(encoder, NULL, NULL);
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08003907 cstate->rsc_client = NULL;
3908 cstate->rsc_update = false;
Alan Kwong628d19e2016-10-31 13:50:13 -04003909 }
3910
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003911 if (sde_crtc->power_event)
3912 sde_power_handle_unregister_event(&priv->phandle,
3913 sde_crtc->power_event);
3914
Dhaval Patelfd8f7742017-08-10 13:11:22 -07003915 /**
3916 * All callbacks are unregistered and frame done waits are complete
3917 * at this point. No buffers are accessed by hardware.
Alan Kwong8f43c012017-10-06 08:59:00 -04003918 * reset the fence timeline if crtc will not be enabled for this commit
Dhaval Patelfd8f7742017-08-10 13:11:22 -07003919 */
Alan Kwong8f43c012017-10-06 08:59:00 -04003920 if (!crtc->state->active || !crtc->state->enable) {
3921 sde_fence_signal(&sde_crtc->output_fence, ktime_get(), true);
3922 for (i = 0; i < cstate->num_connectors; ++i)
3923 sde_connector_commit_reset(cstate->connectors[i],
3924 ktime_get());
3925 }
Dhaval Patelfd8f7742017-08-10 13:11:22 -07003926
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003927 memset(sde_crtc->mixers, 0, sizeof(sde_crtc->mixers));
3928 sde_crtc->num_mixers = 0;
Lloyd Atkinson94710bc2017-09-14 14:10:09 -04003929 sde_crtc->mixers_swapped = false;
Gopikrishnaiah Anandande2c81b2017-03-15 12:41:29 -07003930
Alan Kwong8411a9112017-06-06 19:29:01 -04003931 /* disable clk & bw control until clk & bw properties are set */
3932 cstate->bw_control = false;
Alan Kwong0230a102017-05-16 11:36:44 -07003933 cstate->bw_split_vote = false;
Alan Kwong8411a9112017-06-06 19:29:01 -04003934
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07003935 mutex_unlock(&sde_crtc->crtc_lock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003936}
3937
3938static void sde_crtc_enable(struct drm_crtc *crtc)
3939{
Clarence Ipcae1bb62016-07-07 12:07:13 -04003940 struct sde_crtc *sde_crtc;
Alan Kwong628d19e2016-10-31 13:50:13 -04003941 struct drm_encoder *encoder;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003942 struct msm_drm_private *priv;
Gopikrishnaiah Anandande2c81b2017-03-15 12:41:29 -07003943 unsigned long flags;
3944 struct sde_crtc_irq_info *node = NULL;
Ping Lic5c2e0b2017-08-02 15:17:59 -07003945 struct drm_event event;
3946 u32 power_on;
Sandeep Panda98d6ab22017-09-05 08:03:16 +05303947 int ret, i;
3948 struct sde_crtc_state *cstate;
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04003949
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003950 if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003951 SDE_ERROR("invalid crtc\n");
Clarence Ipcae1bb62016-07-07 12:07:13 -04003952 return;
3953 }
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003954 priv = crtc->dev->dev_private;
Sandeep Panda98d6ab22017-09-05 08:03:16 +05303955 cstate = to_sde_crtc_state(crtc->state);
Clarence Ipcae1bb62016-07-07 12:07:13 -04003956
Alan Kwong163d2612016-11-03 00:56:56 -04003957 SDE_DEBUG("crtc%d\n", crtc->base.id);
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003958 SDE_EVT32_VERBOSE(DRMID(crtc));
Clarence Ipcae1bb62016-07-07 12:07:13 -04003959 sde_crtc = to_sde_crtc(crtc);
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04003960
Veera Sundaram Sankaran97dc5152017-10-10 20:24:48 -07003961 mutex_lock(&sde_crtc->crtc_lock);
3962 SDE_EVT32(DRMID(crtc), sde_crtc->enabled, sde_crtc->suspend,
3963 sde_crtc->vblank_requested);
3964
3965 /* return early if crtc is already enabled */
3966 if (sde_crtc->enabled) {
3967 if (msm_is_mode_seamless_dms(&crtc->state->adjusted_mode))
3968 SDE_DEBUG("%s extra crtc enable expected during DMS\n",
3969 sde_crtc->name);
3970 else
3971 WARN(1, "%s unexpected crtc enable\n", sde_crtc->name);
3972
3973 mutex_unlock(&sde_crtc->crtc_lock);
3974 return;
3975 }
3976
Alan Kwong628d19e2016-10-31 13:50:13 -04003977 drm_for_each_encoder(encoder, crtc->dev) {
3978 if (encoder->crtc != crtc)
3979 continue;
3980 sde_encoder_register_frame_event_callback(encoder,
3981 sde_crtc_frame_event_cb, (void *)crtc);
3982 }
3983
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003984 if (!sde_crtc->enabled && !sde_crtc->suspend &&
3985 sde_crtc->vblank_requested) {
3986 ret = _sde_crtc_vblank_enable_no_lock(sde_crtc, true);
3987 if (ret)
3988 SDE_ERROR("%s vblank enable failed: %d\n",
3989 sde_crtc->name, ret);
Lloyd Atkinsonb2be0c42017-07-17 16:41:00 -04003990 }
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003991 sde_crtc->enabled = true;
Ping Lic5c2e0b2017-08-02 15:17:59 -07003992
3993 /* update color processing on resume */
3994 event.type = DRM_EVENT_CRTC_POWER;
3995 event.length = sizeof(u32);
3996 sde_cp_crtc_resume(crtc);
3997 power_on = 1;
3998 msm_mode_object_event_notify(&crtc->base, crtc->dev, &event,
3999 (u8 *)&power_on);
4000
Lloyd Atkinsonb2be0c42017-07-17 16:41:00 -04004001 mutex_unlock(&sde_crtc->crtc_lock);
4002
Gopikrishnaiah Anandande2c81b2017-03-15 12:41:29 -07004003 spin_lock_irqsave(&sde_crtc->spin_lock, flags);
4004 list_for_each_entry(node, &sde_crtc->user_event_list, list) {
4005 ret = 0;
4006 if (node->func)
4007 ret = node->func(crtc, true, &node->irq);
4008 if (ret)
4009 SDE_ERROR("%s failed to enable event %x\n",
4010 sde_crtc->name, node->event);
4011 }
4012 spin_unlock_irqrestore(&sde_crtc->spin_lock, flags);
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07004013
4014 sde_crtc->power_event = sde_power_handle_register_event(
4015 &priv->phandle,
Ping Lie505f3b2017-06-19 14:19:08 -07004016 SDE_POWER_EVENT_POST_ENABLE | SDE_POWER_EVENT_POST_DISABLE |
4017 SDE_POWER_EVENT_PRE_DISABLE,
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07004018 sde_crtc_handle_power_event, crtc, sde_crtc->name);
Sandeep Panda98d6ab22017-09-05 08:03:16 +05304019
4020 for (i = 0; i < cstate->num_connectors; i++)
4021 sde_connector_schedule_status_work(cstate->connectors[i], true);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004022}
4023
4024struct plane_state {
Dhaval Patelec10fad2016-08-22 14:40:48 -07004025 struct sde_plane_state *sde_pstate;
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07004026 const struct drm_plane_state *drm_pstate;
Clarence Ipc47a0692016-10-11 10:54:17 -04004027 int stage;
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004028 u32 pipe_id;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004029};
4030
Clarence Ipc47a0692016-10-11 10:54:17 -04004031static int pstate_cmp(const void *a, const void *b)
4032{
4033 struct plane_state *pa = (struct plane_state *)a;
4034 struct plane_state *pb = (struct plane_state *)b;
4035 int rc = 0;
4036 int pa_zpos, pb_zpos;
4037
4038 pa_zpos = sde_plane_get_property(pa->sde_pstate, PLANE_PROP_ZPOS);
4039 pb_zpos = sde_plane_get_property(pb->sde_pstate, PLANE_PROP_ZPOS);
4040
4041 if (pa_zpos != pb_zpos)
4042 rc = pa_zpos - pb_zpos;
4043 else
4044 rc = pa->drm_pstate->crtc_x - pb->drm_pstate->crtc_x;
4045
4046 return rc;
4047}
4048
Dhaval Patela8d6bc62017-05-10 17:40:18 -07004049static int _sde_crtc_excl_rect_overlap_check(struct plane_state pstates[],
4050 int cnt, int curr_cnt, struct sde_rect *excl_rect, int z_pos)
4051{
4052 struct sde_rect dst_rect, intersect;
4053 int i, rc = -EINVAL;
4054 const struct drm_plane_state *pstate;
4055
4056 /* start checking from next plane */
4057 for (i = curr_cnt; i < cnt; i++) {
4058 pstate = pstates[i].drm_pstate;
4059 POPULATE_RECT(&dst_rect, pstate->crtc_x, pstate->crtc_y,
Veera Sundaram Sankaran9d9ff912017-06-20 10:41:21 -07004060 pstate->crtc_w, pstate->crtc_h, false);
Dhaval Patela8d6bc62017-05-10 17:40:18 -07004061 sde_kms_rect_intersect(&dst_rect, excl_rect, &intersect);
4062
4063 if (intersect.w == excl_rect->w && intersect.h == excl_rect->h
4064 /* next plane may be on same z-order */
4065 && z_pos != pstates[i].stage) {
4066 rc = 0;
4067 goto end;
4068 }
4069 }
4070
4071 SDE_ERROR("excl rect does not find top overlapping rect\n");
4072end:
4073 return rc;
4074}
4075
4076/* no input validation - caller API has all the checks */
4077static int _sde_crtc_excl_dim_layer_check(struct drm_crtc_state *state,
4078 struct plane_state pstates[], int cnt)
4079{
4080 struct sde_crtc_state *cstate = to_sde_crtc_state(state);
4081 struct drm_display_mode *mode = &state->adjusted_mode;
4082 const struct drm_plane_state *pstate;
4083 struct sde_plane_state *sde_pstate;
4084 int rc = 0, i;
4085
4086 /* Check dim layer rect bounds and stage */
4087 for (i = 0; i < cstate->num_dim_layers; i++) {
4088 if ((CHECK_LAYER_BOUNDS(cstate->dim_layer[i].rect.y,
4089 cstate->dim_layer[i].rect.h, mode->vdisplay)) ||
4090 (CHECK_LAYER_BOUNDS(cstate->dim_layer[i].rect.x,
4091 cstate->dim_layer[i].rect.w, mode->hdisplay)) ||
4092 (cstate->dim_layer[i].stage >= SDE_STAGE_MAX) ||
4093 (!cstate->dim_layer[i].rect.w) ||
4094 (!cstate->dim_layer[i].rect.h)) {
4095 SDE_ERROR("invalid dim_layer:{%d,%d,%d,%d}, stage:%d\n",
4096 cstate->dim_layer[i].rect.x,
4097 cstate->dim_layer[i].rect.y,
4098 cstate->dim_layer[i].rect.w,
4099 cstate->dim_layer[i].rect.h,
4100 cstate->dim_layer[i].stage);
4101 SDE_ERROR("display: %dx%d\n", mode->hdisplay,
4102 mode->vdisplay);
4103 rc = -E2BIG;
4104 goto end;
4105 }
4106 }
4107
4108 /* this is traversing on sorted z-order pstates */
4109 for (i = 0; i < cnt; i++) {
4110 pstate = pstates[i].drm_pstate;
4111 sde_pstate = to_sde_plane_state(pstate);
4112 if (sde_pstate->excl_rect.w && sde_pstate->excl_rect.h) {
4113 /* check overlap on all top z-order */
4114 rc = _sde_crtc_excl_rect_overlap_check(pstates, cnt,
4115 i + 1, &sde_pstate->excl_rect, pstates[i].stage);
4116 if (rc)
4117 goto end;
4118 }
4119 }
4120
4121end:
4122 return rc;
4123}
4124
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004125static int _sde_crtc_check_secure_state(struct drm_crtc *crtc,
4126 struct drm_crtc_state *state)
4127{
4128 struct drm_encoder *encoder;
4129 struct sde_crtc_state *cstate;
4130 uint32_t secure;
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07004131 uint32_t fb_ns = 0, fb_sec = 0, fb_sec_dir = 0;
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004132 int encoder_cnt = 0;
4133 int rc;
4134
4135 if (!crtc || !state) {
4136 SDE_ERROR("invalid arguments\n");
4137 return -EINVAL;
4138 }
4139
4140 cstate = to_sde_crtc_state(state);
4141
4142 secure = sde_crtc_get_property(cstate,
4143 CRTC_PROP_SECURITY_LEVEL);
4144
4145 rc = _sde_crtc_find_plane_fb_modes(state,
4146 &fb_ns,
4147 &fb_sec,
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004148 &fb_sec_dir);
4149 if (rc)
4150 return rc;
4151
4152 /**
4153 * validate planes
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07004154 * fb_sec_dir is for secure camera preview and secure display use case,
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004155 * fb_sec is for secure video playback,
4156 * fb_ns is for normal non secure use cases.
4157 */
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07004158 if ((secure == SDE_DRM_SEC_ONLY) &&
4159 (fb_ns || fb_sec || (fb_sec && fb_sec_dir))) {
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004160 SDE_ERROR(
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07004161 "crtc%d: invalid planes fb_modes Sec:%d, NS:%d, Sec_Dir:%d\n",
4162 crtc->base.id, fb_sec, fb_ns, fb_sec_dir);
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004163 return -EINVAL;
4164 }
4165
4166 /**
4167 * secure_crtc is not allowed in a shared toppolgy
4168 * across different encoders.
4169 */
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07004170 if (fb_sec_dir) {
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004171 drm_for_each_encoder(encoder, crtc->dev)
4172 if (encoder->crtc == crtc)
4173 encoder_cnt++;
4174
4175 if (encoder_cnt >
4176 MAX_ALLOWED_ENCODER_CNT_PER_SECURE_CRTC) {
4177 SDE_ERROR(
4178 "crtc%d, invalid virtual encoder crtc%d\n",
4179 crtc->base.id,
4180 encoder_cnt);
4181 return -EINVAL;
4182
4183 }
4184 }
4185 SDE_DEBUG("crtc:%d Secure validation successful\n", crtc->base.id);
4186 return 0;
4187}
4188
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004189static int sde_crtc_atomic_check(struct drm_crtc *crtc,
4190 struct drm_crtc_state *state)
4191{
Clarence Ipcae1bb62016-07-07 12:07:13 -04004192 struct sde_crtc *sde_crtc;
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004193 struct plane_state pstates[SDE_STAGE_MAX * 4];
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08004194 struct sde_crtc_state *cstate;
Dhaval Patelec10fad2016-08-22 14:40:48 -07004195
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07004196 const struct drm_plane_state *pstate;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004197 struct drm_plane *plane;
Dhaval Patelec10fad2016-08-22 14:40:48 -07004198 struct drm_display_mode *mode;
4199
4200 int cnt = 0, rc = 0, mixer_width, i, z_pos;
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004201
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004202 struct sde_multirect_plane_states multirect_plane[SDE_STAGE_MAX * 2];
4203 int multirect_count = 0;
4204 const struct drm_plane_state *pipe_staged[SSPP_MAX];
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004205 int left_zpos_cnt = 0, right_zpos_cnt = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004206
Clarence Ipcae1bb62016-07-07 12:07:13 -04004207 if (!crtc) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07004208 SDE_ERROR("invalid crtc\n");
Clarence Ipcae1bb62016-07-07 12:07:13 -04004209 return -EINVAL;
4210 }
4211
Alan Kwongcdb2f282017-03-18 13:42:06 -07004212 sde_crtc = to_sde_crtc(crtc);
4213 cstate = to_sde_crtc_state(state);
4214
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04004215 if (!state->enable || !state->active) {
4216 SDE_DEBUG("crtc%d -> enable %d, active %d, skip atomic_check\n",
4217 crtc->base.id, state->enable, state->active);
Alan Kwongcdb2f282017-03-18 13:42:06 -07004218 goto end;
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04004219 }
4220
Dhaval Patelec10fad2016-08-22 14:40:48 -07004221 mode = &state->adjusted_mode;
4222 SDE_DEBUG("%s: check", sde_crtc->name);
Clarence Ipcae1bb62016-07-07 12:07:13 -04004223
Clarence Ip90b282d2017-05-04 10:00:32 -07004224 /* force a full mode set if active state changed */
4225 if (state->active_changed)
4226 state->mode_changed = true;
4227
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004228 memset(pipe_staged, 0, sizeof(pipe_staged));
4229
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05304230 rc = _sde_crtc_check_dest_scaler_data(crtc, state);
4231 if (rc) {
4232 SDE_ERROR("crtc%d failed dest scaler check %d\n",
4233 crtc->base.id, rc);
4234 goto end;
4235 }
4236
4237 mixer_width = sde_crtc_get_mixer_width(sde_crtc, cstate, mode);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004238
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05004239 _sde_crtc_setup_is_ppsplit(state);
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05004240 _sde_crtc_setup_lm_bounds(crtc, state);
4241
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004242 rc = _sde_crtc_check_secure_state(crtc, state);
4243 if (rc)
4244 return rc;
4245
Dhaval Patelec10fad2016-08-22 14:40:48 -07004246 /* get plane state for all drm planes associated with crtc state */
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07004247 drm_atomic_crtc_state_for_each_plane_state(plane, pstate, state) {
Clarence Ipc47a0692016-10-11 10:54:17 -04004248 if (IS_ERR_OR_NULL(pstate)) {
4249 rc = PTR_ERR(pstate);
4250 SDE_ERROR("%s: failed to get plane%d state, %d\n",
4251 sde_crtc->name, plane->base.id, rc);
Alan Kwong85767282016-10-03 18:03:37 -04004252 goto end;
4253 }
Clarence Ipc47a0692016-10-11 10:54:17 -04004254 if (cnt >= ARRAY_SIZE(pstates))
4255 continue;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004256
Dhaval Patelec10fad2016-08-22 14:40:48 -07004257 pstates[cnt].sde_pstate = to_sde_plane_state(pstate);
4258 pstates[cnt].drm_pstate = pstate;
Clarence Ipc47a0692016-10-11 10:54:17 -04004259 pstates[cnt].stage = sde_plane_get_property(
4260 pstates[cnt].sde_pstate, PLANE_PROP_ZPOS);
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004261 pstates[cnt].pipe_id = sde_plane_pipe(plane);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08004262
4263 /* check dim layer stage with every plane */
4264 for (i = 0; i < cstate->num_dim_layers; i++) {
Veera Sundaram Sankaranb9ed6bd2017-07-11 19:18:03 -07004265 if (cstate->dim_layer[i].stage
4266 == (pstates[cnt].stage + SDE_STAGE_0)) {
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07004267 SDE_ERROR(
4268 "plane:%d/dim_layer:%i-same stage:%d\n",
4269 plane->base.id, i,
4270 cstate->dim_layer[i].stage);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08004271 rc = -EINVAL;
4272 goto end;
4273 }
4274 }
4275
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004276 if (pipe_staged[pstates[cnt].pipe_id]) {
4277 multirect_plane[multirect_count].r0 =
4278 pipe_staged[pstates[cnt].pipe_id];
4279 multirect_plane[multirect_count].r1 = pstate;
4280 multirect_count++;
4281
4282 pipe_staged[pstates[cnt].pipe_id] = NULL;
4283 } else {
4284 pipe_staged[pstates[cnt].pipe_id] = pstate;
4285 }
4286
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004287 cnt++;
Dhaval Patelec10fad2016-08-22 14:40:48 -07004288
4289 if (CHECK_LAYER_BOUNDS(pstate->crtc_y, pstate->crtc_h,
4290 mode->vdisplay) ||
4291 CHECK_LAYER_BOUNDS(pstate->crtc_x, pstate->crtc_w,
4292 mode->hdisplay)) {
4293 SDE_ERROR("invalid vertical/horizontal destination\n");
4294 SDE_ERROR("y:%d h:%d vdisp:%d x:%d w:%d hdisp:%d\n",
4295 pstate->crtc_y, pstate->crtc_h, mode->vdisplay,
4296 pstate->crtc_x, pstate->crtc_w, mode->hdisplay);
4297 rc = -E2BIG;
4298 goto end;
4299 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004300 }
4301
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004302 for (i = 1; i < SSPP_MAX; i++) {
Jeykumar Sankarane964dc72017-05-10 19:26:43 -07004303 if (pipe_staged[i]) {
4304 sde_plane_clear_multirect(pipe_staged[i]);
4305
4306 if (is_sde_plane_virtual(pipe_staged[i]->plane)) {
Veera Sundaram Sankaran372596d2017-06-21 17:57:25 -07004307 SDE_ERROR(
4308 "r1 only virt plane:%d not supported\n",
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004309 pipe_staged[i]->plane->base.id);
Veera Sundaram Sankaran372596d2017-06-21 17:57:25 -07004310 rc = -EINVAL;
Jeykumar Sankarane964dc72017-05-10 19:26:43 -07004311 goto end;
4312 }
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004313 }
4314 }
4315
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004316 /* assign mixer stages based on sorted zpos property */
4317 sort(pstates, cnt, sizeof(pstates[0]), pstate_cmp, NULL);
4318
Dhaval Patela8d6bc62017-05-10 17:40:18 -07004319 rc = _sde_crtc_excl_dim_layer_check(state, pstates, cnt);
4320 if (rc)
4321 goto end;
4322
Clarence Ipc47a0692016-10-11 10:54:17 -04004323 if (!sde_is_custom_client()) {
4324 int stage_old = pstates[0].stage;
Dhaval Patelec10fad2016-08-22 14:40:48 -07004325
Clarence Ipc47a0692016-10-11 10:54:17 -04004326 z_pos = 0;
4327 for (i = 0; i < cnt; i++) {
4328 if (stage_old != pstates[i].stage)
4329 ++z_pos;
4330 stage_old = pstates[i].stage;
4331 pstates[i].stage = z_pos;
4332 }
4333 }
4334
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004335 z_pos = -1;
Clarence Ipc47a0692016-10-11 10:54:17 -04004336 for (i = 0; i < cnt; i++) {
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004337 /* reset counts at every new blend stage */
4338 if (pstates[i].stage != z_pos) {
4339 left_zpos_cnt = 0;
4340 right_zpos_cnt = 0;
4341 z_pos = pstates[i].stage;
4342 }
Clarence Ipc47a0692016-10-11 10:54:17 -04004343
4344 /* verify z_pos setting before using it */
Clarence Ip649989a2016-10-21 14:28:34 -04004345 if (z_pos >= SDE_STAGE_MAX - SDE_STAGE_0) {
Clarence Ipc47a0692016-10-11 10:54:17 -04004346 SDE_ERROR("> %d plane stages assigned\n",
4347 SDE_STAGE_MAX - SDE_STAGE_0);
4348 rc = -EINVAL;
4349 goto end;
4350 } else if (pstates[i].drm_pstate->crtc_x < mixer_width) {
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004351 if (left_zpos_cnt == 2) {
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004352 SDE_ERROR("> 2 planes @ stage %d on left\n",
Dhaval Patelec10fad2016-08-22 14:40:48 -07004353 z_pos);
4354 rc = -EINVAL;
4355 goto end;
4356 }
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004357 left_zpos_cnt++;
4358
Dhaval Patelec10fad2016-08-22 14:40:48 -07004359 } else {
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004360 if (right_zpos_cnt == 2) {
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004361 SDE_ERROR("> 2 planes @ stage %d on right\n",
Dhaval Patelec10fad2016-08-22 14:40:48 -07004362 z_pos);
4363 rc = -EINVAL;
4364 goto end;
4365 }
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004366 right_zpos_cnt++;
Dhaval Patelec10fad2016-08-22 14:40:48 -07004367 }
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004368
Clarence Ipc47a0692016-10-11 10:54:17 -04004369 pstates[i].sde_pstate->stage = z_pos + SDE_STAGE_0;
Dhaval Patelec10fad2016-08-22 14:40:48 -07004370 SDE_DEBUG("%s: zpos %d", sde_crtc->name, z_pos);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004371 }
4372
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004373 for (i = 0; i < multirect_count; i++) {
4374 if (sde_plane_validate_multirect_v2(&multirect_plane[i])) {
4375 SDE_ERROR(
4376 "multirect validation failed for planes (%d - %d)\n",
4377 multirect_plane[i].r0->plane->base.id,
4378 multirect_plane[i].r1->plane->base.id);
4379 rc = -EINVAL;
Alan Kwong9aa061c2016-11-06 21:17:12 -05004380 goto end;
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004381 }
4382 }
4383
Alan Kwong9aa061c2016-11-06 21:17:12 -05004384 rc = sde_core_perf_crtc_check(crtc, state);
4385 if (rc) {
4386 SDE_ERROR("crtc%d failed performance check %d\n",
4387 crtc->base.id, rc);
4388 goto end;
4389 }
4390
Jeykumar Sankaranaaaa0712017-06-12 17:59:16 -07004391 /* validate source split:
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004392 * use pstates sorted by stage to check planes on same stage
4393 * we assume that all pipes are in source split so its valid to compare
4394 * without taking into account left/right mixer placement
4395 */
4396 for (i = 1; i < cnt; i++) {
4397 struct plane_state *prv_pstate, *cur_pstate;
Jeykumar Sankaranaaaa0712017-06-12 17:59:16 -07004398 struct sde_rect left_rect, right_rect;
4399 int32_t left_pid, right_pid;
4400 int32_t stage;
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004401
4402 prv_pstate = &pstates[i - 1];
4403 cur_pstate = &pstates[i];
4404 if (prv_pstate->stage != cur_pstate->stage)
4405 continue;
4406
Jeykumar Sankaranaaaa0712017-06-12 17:59:16 -07004407 stage = cur_pstate->stage;
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004408
Jeykumar Sankaranaaaa0712017-06-12 17:59:16 -07004409 left_pid = prv_pstate->sde_pstate->base.plane->base.id;
4410 POPULATE_RECT(&left_rect, prv_pstate->drm_pstate->crtc_x,
4411 prv_pstate->drm_pstate->crtc_y,
4412 prv_pstate->drm_pstate->crtc_w,
4413 prv_pstate->drm_pstate->crtc_h, false);
4414
4415 right_pid = cur_pstate->sde_pstate->base.plane->base.id;
4416 POPULATE_RECT(&right_rect, cur_pstate->drm_pstate->crtc_x,
4417 cur_pstate->drm_pstate->crtc_y,
4418 cur_pstate->drm_pstate->crtc_w,
4419 cur_pstate->drm_pstate->crtc_h, false);
4420
4421 if (right_rect.x < left_rect.x) {
4422 swap(left_pid, right_pid);
4423 swap(left_rect, right_rect);
4424 }
4425
4426 /**
4427 * - planes are enumerated in pipe-priority order such that
4428 * planes with lower drm_id must be left-most in a shared
4429 * blend-stage when using source split.
4430 * - planes in source split must be contiguous in width
4431 * - planes in source split must have same dest yoff and height
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004432 */
Jeykumar Sankaranaaaa0712017-06-12 17:59:16 -07004433 if (right_pid < left_pid) {
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004434 SDE_ERROR(
Jeykumar Sankaranaaaa0712017-06-12 17:59:16 -07004435 "invalid src split cfg. priority mismatch. stage: %d left: %d right: %d\n",
4436 stage, left_pid, right_pid);
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004437 rc = -EINVAL;
4438 goto end;
Jeykumar Sankaranaaaa0712017-06-12 17:59:16 -07004439 } else if (right_rect.x != (left_rect.x + left_rect.w)) {
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004440 SDE_ERROR(
Jeykumar Sankaranaaaa0712017-06-12 17:59:16 -07004441 "non-contiguous coordinates for src split. stage: %d left: %d - %d right: %d - %d\n",
4442 stage, left_rect.x, left_rect.w,
4443 right_rect.x, right_rect.w);
4444 rc = -EINVAL;
4445 goto end;
4446 } else if ((left_rect.y != right_rect.y) ||
4447 (left_rect.h != right_rect.h)) {
4448 SDE_ERROR(
4449 "source split at stage: %d. invalid yoff/height: l_y: %d r_y: %d l_h: %d r_h: %d\n",
4450 stage, left_rect.y, right_rect.y,
4451 left_rect.h, right_rect.h);
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004452 rc = -EINVAL;
4453 goto end;
4454 }
4455 }
4456
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04004457 rc = _sde_crtc_check_rois(crtc, state);
4458 if (rc) {
4459 SDE_ERROR("crtc%d failed roi check %d\n", crtc->base.id, rc);
4460 goto end;
4461 }
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004462
Dhaval Patelec10fad2016-08-22 14:40:48 -07004463end:
Alan Kwongcdb2f282017-03-18 13:42:06 -07004464 _sde_crtc_rp_free_unused(&cstate->rp);
Dhaval Patelec10fad2016-08-22 14:40:48 -07004465 return rc;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004466}
4467
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -04004468int sde_crtc_vblank(struct drm_crtc *crtc, bool en)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004469{
Clarence Ip7f70ce42017-03-20 06:53:46 -07004470 struct sde_crtc *sde_crtc;
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04004471 int ret;
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -04004472
Clarence Ip7f70ce42017-03-20 06:53:46 -07004473 if (!crtc) {
4474 SDE_ERROR("invalid crtc\n");
4475 return -EINVAL;
4476 }
4477 sde_crtc = to_sde_crtc(crtc);
4478
4479 mutex_lock(&sde_crtc->crtc_lock);
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04004480 SDE_EVT32(DRMID(&sde_crtc->base), en, sde_crtc->enabled,
4481 sde_crtc->suspend, sde_crtc->vblank_requested);
4482 if (sde_crtc->enabled && !sde_crtc->suspend) {
4483 ret = _sde_crtc_vblank_enable_no_lock(sde_crtc, en);
4484 if (ret)
4485 SDE_ERROR("%s vblank enable failed: %d\n",
4486 sde_crtc->name, ret);
4487 }
4488 sde_crtc->vblank_requested = en;
Clarence Ip7f70ce42017-03-20 06:53:46 -07004489 mutex_unlock(&sde_crtc->crtc_lock);
Clarence Ip9728a1d2017-04-18 22:22:13 -04004490
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04004491 return 0;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004492}
4493
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04004494void sde_crtc_cancel_pending_flip(struct drm_crtc *crtc, struct drm_file *file)
4495{
4496 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
4497
Alan Kwong163d2612016-11-03 00:56:56 -04004498 SDE_DEBUG("%s: cancel: %p\n", sde_crtc->name, file);
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -04004499 _sde_crtc_complete_flip(crtc, file);
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04004500}
4501
Lloyd Atkinsone08229c2017-10-02 17:53:30 -04004502int sde_crtc_helper_reset_custom_properties(struct drm_crtc *crtc,
4503 struct drm_crtc_state *crtc_state)
4504{
4505 struct sde_crtc *sde_crtc;
4506 struct sde_crtc_state *cstate;
4507 struct drm_property *drm_prop;
4508 enum msm_mdp_crtc_property prop_idx;
4509
4510 if (!crtc || !crtc_state) {
4511 SDE_ERROR("invalid params\n");
4512 return -EINVAL;
4513 }
4514
4515 sde_crtc = to_sde_crtc(crtc);
4516 cstate = to_sde_crtc_state(crtc_state);
4517
4518 for (prop_idx = 0; prop_idx < CRTC_PROP_COUNT; prop_idx++) {
4519 uint64_t val = cstate->property_values[prop_idx].value;
4520 uint64_t def;
4521 int ret;
4522
4523 drm_prop = msm_property_index_to_drm_property(
4524 &sde_crtc->property_info, prop_idx);
4525 if (!drm_prop) {
4526 /* not all props will be installed, based on caps */
4527 SDE_DEBUG("%s: invalid property index %d\n",
4528 sde_crtc->name, prop_idx);
4529 continue;
4530 }
4531
4532 def = msm_property_get_default(&sde_crtc->property_info,
4533 prop_idx);
4534 if (val == def)
4535 continue;
4536
4537 SDE_DEBUG("%s: set prop %s idx %d from %llu to %llu\n",
4538 sde_crtc->name, drm_prop->name, prop_idx, val,
4539 def);
4540
4541 ret = drm_atomic_crtc_set_property(crtc, crtc_state, drm_prop,
4542 def);
4543 if (ret) {
4544 SDE_ERROR("%s: set property failed, idx %d ret %d\n",
4545 sde_crtc->name, prop_idx, ret);
4546 continue;
4547 }
4548 }
4549
4550 return 0;
4551}
4552
Clarence Ip7a753bb2016-07-07 11:47:44 -04004553/**
4554 * sde_crtc_install_properties - install all drm properties for crtc
4555 * @crtc: Pointer to drm crtc structure
4556 */
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004557static void sde_crtc_install_properties(struct drm_crtc *crtc,
4558 struct sde_mdss_cfg *catalog)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004559{
Clarence Ip7a753bb2016-07-07 11:47:44 -04004560 struct sde_crtc *sde_crtc;
4561 struct drm_device *dev;
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004562 struct sde_kms_info *info;
Alan Kwong9aa061c2016-11-06 21:17:12 -05004563 struct sde_kms *sde_kms;
Abhijit Kulkarni50d69442017-04-11 19:50:47 -07004564 static const struct drm_prop_enum_list e_secure_level[] = {
4565 {SDE_DRM_SEC_NON_SEC, "sec_and_non_sec"},
4566 {SDE_DRM_SEC_ONLY, "sec_only"},
4567 };
Clarence Ip7a753bb2016-07-07 11:47:44 -04004568
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -04004569 SDE_DEBUG("\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04004570
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004571 if (!crtc || !catalog) {
4572 SDE_ERROR("invalid crtc or catalog\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04004573 return;
4574 }
4575
4576 sde_crtc = to_sde_crtc(crtc);
4577 dev = crtc->dev;
Alan Kwong9aa061c2016-11-06 21:17:12 -05004578 sde_kms = _sde_crtc_get_kms(crtc);
Clarence Ip7a753bb2016-07-07 11:47:44 -04004579
Narendra Muppallaec11a0a2017-06-15 15:35:17 -07004580 if (!sde_kms) {
4581 SDE_ERROR("invalid argument\n");
4582 return;
4583 }
4584
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004585 info = kzalloc(sizeof(struct sde_kms_info), GFP_KERNEL);
4586 if (!info) {
4587 SDE_ERROR("failed to allocate info memory\n");
4588 return;
4589 }
4590
Clarence Ip7a753bb2016-07-07 11:47:44 -04004591 /* range properties */
4592 msm_property_install_range(&sde_crtc->property_info,
Dhaval Patel4e574842016-08-23 15:11:37 -07004593 "input_fence_timeout", 0x0, 0, SDE_CRTC_MAX_INPUT_FENCE_TIMEOUT,
4594 SDE_CRTC_INPUT_FENCE_TIMEOUT, CRTC_PROP_INPUT_FENCE_TIMEOUT);
4595
4596 msm_property_install_range(&sde_crtc->property_info, "output_fence",
Lloyd Atkinsone08229c2017-10-02 17:53:30 -04004597 0x0, 0, INR_OPEN_MAX, 0, CRTC_PROP_OUTPUT_FENCE);
Clarence Ip1d9728b2016-09-01 11:10:54 -04004598
4599 msm_property_install_range(&sde_crtc->property_info,
4600 "output_fence_offset", 0x0, 0, 1, 0,
4601 CRTC_PROP_OUTPUT_FENCE_OFFSET);
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004602
Alan Kwong9aa061c2016-11-06 21:17:12 -05004603 msm_property_install_range(&sde_crtc->property_info,
4604 "core_clk", 0x0, 0, U64_MAX,
4605 sde_kms->perf.max_core_clk_rate,
4606 CRTC_PROP_CORE_CLK);
4607 msm_property_install_range(&sde_crtc->property_info,
4608 "core_ab", 0x0, 0, U64_MAX,
Alan Kwongff30f4a2017-05-23 12:02:00 -07004609 catalog->perf.max_bw_high * 1000ULL,
Alan Kwong9aa061c2016-11-06 21:17:12 -05004610 CRTC_PROP_CORE_AB);
4611 msm_property_install_range(&sde_crtc->property_info,
4612 "core_ib", 0x0, 0, U64_MAX,
Alan Kwongff30f4a2017-05-23 12:02:00 -07004613 catalog->perf.max_bw_high * 1000ULL,
Alan Kwong9aa061c2016-11-06 21:17:12 -05004614 CRTC_PROP_CORE_IB);
Alan Kwong4aacd532017-02-04 18:51:33 -08004615 msm_property_install_range(&sde_crtc->property_info,
Alan Kwong0230a102017-05-16 11:36:44 -07004616 "llcc_ab", 0x0, 0, U64_MAX,
Alan Kwongff30f4a2017-05-23 12:02:00 -07004617 catalog->perf.max_bw_high * 1000ULL,
Alan Kwong0230a102017-05-16 11:36:44 -07004618 CRTC_PROP_LLCC_AB);
Alan Kwong8c176bf2017-02-09 19:34:32 -08004619 msm_property_install_range(&sde_crtc->property_info,
Alan Kwong0230a102017-05-16 11:36:44 -07004620 "llcc_ib", 0x0, 0, U64_MAX,
Alan Kwongff30f4a2017-05-23 12:02:00 -07004621 catalog->perf.max_bw_high * 1000ULL,
Alan Kwong0230a102017-05-16 11:36:44 -07004622 CRTC_PROP_LLCC_IB);
4623 msm_property_install_range(&sde_crtc->property_info,
4624 "dram_ab", 0x0, 0, U64_MAX,
4625 catalog->perf.max_bw_high * 1000ULL,
4626 CRTC_PROP_DRAM_AB);
4627 msm_property_install_range(&sde_crtc->property_info,
4628 "dram_ib", 0x0, 0, U64_MAX,
4629 catalog->perf.max_bw_high * 1000ULL,
4630 CRTC_PROP_DRAM_IB);
Alan Kwong8c176bf2017-02-09 19:34:32 -08004631 msm_property_install_range(&sde_crtc->property_info,
Alan Kwong4aacd532017-02-04 18:51:33 -08004632 "rot_prefill_bw", 0, 0, U64_MAX,
4633 catalog->perf.max_bw_high * 1000ULL,
4634 CRTC_PROP_ROT_PREFILL_BW);
Alan Kwong8c176bf2017-02-09 19:34:32 -08004635 msm_property_install_range(&sde_crtc->property_info,
4636 "rot_clk", 0, 0, U64_MAX,
4637 sde_kms->perf.max_core_clk_rate,
4638 CRTC_PROP_ROT_CLK);
Alan Kwong9aa061c2016-11-06 21:17:12 -05004639
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05304640 msm_property_install_range(&sde_crtc->property_info,
Sravanthi Kollukudurubf4c7b82017-09-22 18:04:30 +05304641 "idle_time", IDLE_TIMEOUT, 0, U64_MAX, 0,
Dhaval Patele17e0ee2017-08-23 18:01:42 -07004642 CRTC_PROP_IDLE_TIMEOUT);
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05304643
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004644 msm_property_install_blob(&sde_crtc->property_info, "capabilities",
4645 DRM_MODE_PROP_IMMUTABLE, CRTC_PROP_INFO);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08004646
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04004647 msm_property_install_volatile_range(&sde_crtc->property_info,
4648 "sde_drm_roi_v1", 0x0, 0, ~0, 0, CRTC_PROP_ROI_V1);
4649
Abhijit Kulkarni50d69442017-04-11 19:50:47 -07004650 msm_property_install_enum(&sde_crtc->property_info, "security_level",
4651 0x0, 0, e_secure_level,
4652 ARRAY_SIZE(e_secure_level),
4653 CRTC_PROP_SECURITY_LEVEL);
4654
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004655 sde_kms_info_reset(info);
4656
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07004657 if (catalog->has_dim_layer) {
4658 msm_property_install_volatile_range(&sde_crtc->property_info,
4659 "dim_layer_v1", 0x0, 0, ~0, 0, CRTC_PROP_DIM_LAYER_V1);
4660 sde_kms_info_add_keyint(info, "dim_layer_v1_max_layers",
4661 SDE_MAX_DIM_LAYERS);
4662 }
4663
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004664 sde_kms_info_add_keyint(info, "hw_version", catalog->hwversion);
4665 sde_kms_info_add_keyint(info, "max_linewidth",
4666 catalog->max_mixer_width);
4667 sde_kms_info_add_keyint(info, "max_blendstages",
4668 catalog->max_mixer_blendstages);
4669 if (catalog->qseed_type == SDE_SSPP_SCALER_QSEED2)
4670 sde_kms_info_add_keystr(info, "qseed_type", "qseed2");
4671 if (catalog->qseed_type == SDE_SSPP_SCALER_QSEED3)
4672 sde_kms_info_add_keystr(info, "qseed_type", "qseed3");
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004673
4674 if (sde_is_custom_client()) {
4675 if (catalog->smart_dma_rev == SDE_SSPP_SMART_DMA_V1)
4676 sde_kms_info_add_keystr(info,
4677 "smart_dma_rev", "smart_dma_v1");
4678 if (catalog->smart_dma_rev == SDE_SSPP_SMART_DMA_V2)
4679 sde_kms_info_add_keystr(info,
4680 "smart_dma_rev", "smart_dma_v2");
4681 }
4682
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05304683 if (catalog->mdp[0].has_dest_scaler) {
4684 sde_kms_info_add_keyint(info, "has_dest_scaler",
4685 catalog->mdp[0].has_dest_scaler);
4686 sde_kms_info_add_keyint(info, "dest_scaler_count",
4687 catalog->ds_count);
4688
4689 if (catalog->ds[0].top) {
4690 sde_kms_info_add_keyint(info,
4691 "max_dest_scaler_input_width",
4692 catalog->ds[0].top->maxinputwidth);
4693 sde_kms_info_add_keyint(info,
4694 "max_dest_scaler_output_width",
4695 catalog->ds[0].top->maxinputwidth);
4696 sde_kms_info_add_keyint(info, "max_dest_scale_up",
4697 catalog->ds[0].top->maxupscale);
4698 }
4699
4700 if (catalog->ds[0].features & BIT(SDE_SSPP_SCALER_QSEED3)) {
4701 msm_property_install_volatile_range(
4702 &sde_crtc->property_info, "dest_scaler",
4703 0x0, 0, ~0, 0, CRTC_PROP_DEST_SCALER);
4704 msm_property_install_blob(&sde_crtc->property_info,
4705 "ds_lut_ed", 0,
4706 CRTC_PROP_DEST_SCALER_LUT_ED);
4707 msm_property_install_blob(&sde_crtc->property_info,
4708 "ds_lut_cir", 0,
4709 CRTC_PROP_DEST_SCALER_LUT_CIR);
4710 msm_property_install_blob(&sde_crtc->property_info,
4711 "ds_lut_sep", 0,
4712 CRTC_PROP_DEST_SCALER_LUT_SEP);
4713 }
4714 }
4715
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004716 sde_kms_info_add_keyint(info, "has_src_split", catalog->has_src_split);
Alan Kwong2f84f8a2016-12-29 13:07:47 -05004717 if (catalog->perf.max_bw_low)
4718 sde_kms_info_add_keyint(info, "max_bandwidth_low",
Alan Kwong6259a382017-04-04 06:18:02 -07004719 catalog->perf.max_bw_low * 1000LL);
Alan Kwong2f84f8a2016-12-29 13:07:47 -05004720 if (catalog->perf.max_bw_high)
4721 sde_kms_info_add_keyint(info, "max_bandwidth_high",
Alan Kwong6259a382017-04-04 06:18:02 -07004722 catalog->perf.max_bw_high * 1000LL);
Narendra Muppallaa50934b2017-08-15 19:43:37 -07004723 if (catalog->perf.min_core_ib)
4724 sde_kms_info_add_keyint(info, "min_core_ib",
4725 catalog->perf.min_core_ib * 1000LL);
4726 if (catalog->perf.min_llcc_ib)
4727 sde_kms_info_add_keyint(info, "min_llcc_ib",
4728 catalog->perf.min_llcc_ib * 1000LL);
4729 if (catalog->perf.min_dram_ib)
4730 sde_kms_info_add_keyint(info, "min_dram_ib",
4731 catalog->perf.min_dram_ib * 1000LL);
Alan Kwong2f84f8a2016-12-29 13:07:47 -05004732 if (sde_kms->perf.max_core_clk_rate)
4733 sde_kms_info_add_keyint(info, "max_mdp_clk",
4734 sde_kms->perf.max_core_clk_rate);
Alan Kwong6259a382017-04-04 06:18:02 -07004735 sde_kms_info_add_keystr(info, "core_ib_ff",
4736 catalog->perf.core_ib_ff);
4737 sde_kms_info_add_keystr(info, "core_clk_ff",
4738 catalog->perf.core_clk_ff);
4739 sde_kms_info_add_keystr(info, "comp_ratio_rt",
4740 catalog->perf.comp_ratio_rt);
4741 sde_kms_info_add_keystr(info, "comp_ratio_nrt",
4742 catalog->perf.comp_ratio_nrt);
4743 sde_kms_info_add_keyint(info, "dest_scale_prefill_lines",
4744 catalog->perf.dest_scale_prefill_lines);
4745 sde_kms_info_add_keyint(info, "undersized_prefill_lines",
4746 catalog->perf.undersized_prefill_lines);
4747 sde_kms_info_add_keyint(info, "macrotile_prefill_lines",
4748 catalog->perf.macrotile_prefill_lines);
4749 sde_kms_info_add_keyint(info, "yuv_nv12_prefill_lines",
4750 catalog->perf.yuv_nv12_prefill_lines);
4751 sde_kms_info_add_keyint(info, "linear_prefill_lines",
4752 catalog->perf.linear_prefill_lines);
4753 sde_kms_info_add_keyint(info, "downscaling_prefill_lines",
4754 catalog->perf.downscaling_prefill_lines);
4755 sde_kms_info_add_keyint(info, "xtra_prefill_lines",
4756 catalog->perf.xtra_prefill_lines);
4757 sde_kms_info_add_keyint(info, "amortizable_threshold",
4758 catalog->perf.amortizable_threshold);
4759 sde_kms_info_add_keyint(info, "min_prefill_lines",
4760 catalog->perf.min_prefill_lines);
4761
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004762 msm_property_set_blob(&sde_crtc->property_info, &sde_crtc->blob_info,
Narendra Muppalla22d17252017-05-31 15:13:39 -07004763 info->data, SDE_KMS_INFO_DATALEN(info), CRTC_PROP_INFO);
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004764
4765 kfree(info);
Clarence Ip7a753bb2016-07-07 11:47:44 -04004766}
4767
4768/**
4769 * sde_crtc_atomic_set_property - atomically set a crtc drm property
4770 * @crtc: Pointer to drm crtc structure
4771 * @state: Pointer to drm crtc state structure
4772 * @property: Pointer to targeted drm property
4773 * @val: Updated property value
4774 * @Returns: Zero on success
4775 */
4776static int sde_crtc_atomic_set_property(struct drm_crtc *crtc,
4777 struct drm_crtc_state *state,
4778 struct drm_property *property,
4779 uint64_t val)
4780{
4781 struct sde_crtc *sde_crtc;
4782 struct sde_crtc_state *cstate;
Clarence Ipcae1bb62016-07-07 12:07:13 -04004783 int idx, ret = -EINVAL;
Clarence Ip7a753bb2016-07-07 11:47:44 -04004784
4785 if (!crtc || !state || !property) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07004786 SDE_ERROR("invalid argument(s)\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04004787 } else {
4788 sde_crtc = to_sde_crtc(crtc);
4789 cstate = to_sde_crtc_state(state);
4790 ret = msm_property_atomic_set(&sde_crtc->property_info,
Clarence Ip4a2955d2017-07-04 18:04:33 -04004791 &cstate->property_state, property, val);
Clarence Ipcae1bb62016-07-07 12:07:13 -04004792 if (!ret) {
4793 idx = msm_property_index(&sde_crtc->property_info,
4794 property);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08004795 switch (idx) {
4796 case CRTC_PROP_INPUT_FENCE_TIMEOUT:
Clarence Ipcae1bb62016-07-07 12:07:13 -04004797 _sde_crtc_set_input_fence_timeout(cstate);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08004798 break;
4799 case CRTC_PROP_DIM_LAYER_V1:
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05304800 _sde_crtc_set_dim_layer_v1(cstate,
4801 (void __user *)val);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08004802 break;
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04004803 case CRTC_PROP_ROI_V1:
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05304804 ret = _sde_crtc_set_roi_v1(state,
4805 (void __user *)val);
4806 break;
4807 case CRTC_PROP_DEST_SCALER:
4808 ret = _sde_crtc_set_dest_scaler(sde_crtc,
4809 cstate, (void __user *)val);
4810 break;
4811 case CRTC_PROP_DEST_SCALER_LUT_ED:
4812 case CRTC_PROP_DEST_SCALER_LUT_CIR:
4813 case CRTC_PROP_DEST_SCALER_LUT_SEP:
4814 ret = _sde_crtc_set_dest_scaler_lut(sde_crtc,
4815 cstate, idx);
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04004816 break;
Alan Kwong8411a9112017-06-06 19:29:01 -04004817 case CRTC_PROP_CORE_CLK:
Alan Kwongff30f4a2017-05-23 12:02:00 -07004818 case CRTC_PROP_CORE_AB:
4819 case CRTC_PROP_CORE_IB:
Alan Kwongff30f4a2017-05-23 12:02:00 -07004820 cstate->bw_control = true;
4821 break;
Alan Kwong0230a102017-05-16 11:36:44 -07004822 case CRTC_PROP_LLCC_AB:
4823 case CRTC_PROP_LLCC_IB:
4824 case CRTC_PROP_DRAM_AB:
4825 case CRTC_PROP_DRAM_IB:
4826 cstate->bw_control = true;
4827 cstate->bw_split_vote = true;
4828 break;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07004829 case CRTC_PROP_IDLE_TIMEOUT:
4830 _sde_crtc_set_idle_timeout(crtc, val);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08004831 default:
4832 /* nothing to do */
4833 break;
4834 }
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -07004835 } else {
4836 ret = sde_cp_crtc_set_property(crtc,
4837 property, val);
Clarence Ipcae1bb62016-07-07 12:07:13 -04004838 }
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -07004839 if (ret)
4840 DRM_ERROR("failed to set the property\n");
Alan Kwongcdb2f282017-03-18 13:42:06 -07004841
4842 SDE_DEBUG("crtc%d %s[%d] <= 0x%llx ret=%d\n", crtc->base.id,
4843 property->name, property->base.id, val, ret);
Clarence Ip7a753bb2016-07-07 11:47:44 -04004844 }
4845
4846 return ret;
4847}
4848
4849/**
4850 * sde_crtc_set_property - set a crtc drm property
4851 * @crtc: Pointer to drm crtc structure
4852 * @property: Pointer to targeted drm property
4853 * @val: Updated property value
4854 * @Returns: Zero on success
4855 */
4856static int sde_crtc_set_property(struct drm_crtc *crtc,
4857 struct drm_property *property, uint64_t val)
4858{
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -04004859 SDE_DEBUG("\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04004860
4861 return sde_crtc_atomic_set_property(crtc, crtc->state, property, val);
4862}
4863
4864/**
4865 * sde_crtc_atomic_get_property - retrieve a crtc drm property
4866 * @crtc: Pointer to drm crtc structure
4867 * @state: Pointer to drm crtc state structure
4868 * @property: Pointer to targeted drm property
4869 * @val: Pointer to variable for receiving property value
4870 * @Returns: Zero on success
4871 */
4872static int sde_crtc_atomic_get_property(struct drm_crtc *crtc,
4873 const struct drm_crtc_state *state,
4874 struct drm_property *property,
4875 uint64_t *val)
4876{
4877 struct sde_crtc *sde_crtc;
4878 struct sde_crtc_state *cstate;
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07004879 struct drm_encoder *encoder;
Clarence Ip24f80662016-06-13 19:05:32 -04004880 int i, ret = -EINVAL;
Dhaval Patel5cb59be2017-04-20 20:00:56 -07004881 bool conn_offset = 0;
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07004882 bool is_cmd = true;
Clarence Ip7a753bb2016-07-07 11:47:44 -04004883
4884 if (!crtc || !state) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07004885 SDE_ERROR("invalid argument(s)\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04004886 } else {
4887 sde_crtc = to_sde_crtc(crtc);
4888 cstate = to_sde_crtc_state(state);
Dhaval Patel5cb59be2017-04-20 20:00:56 -07004889
4890 for (i = 0; i < cstate->num_connectors; ++i) {
4891 conn_offset = sde_connector_needs_offset(
4892 cstate->connectors[i]);
4893 if (conn_offset)
4894 break;
4895 }
4896
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07004897 /**
4898 * set the cmd flag only when all the encoders attached
4899 * to the crtc are in cmd mode. Consider all other cases
4900 * as video mode.
4901 */
4902 drm_for_each_encoder(encoder, crtc->dev) {
4903 if (encoder->crtc == crtc)
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05304904 is_cmd = sde_encoder_check_mode(encoder,
4905 MSM_DISPLAY_CAP_CMD_MODE);
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07004906 }
4907
Clarence Ip24f80662016-06-13 19:05:32 -04004908 i = msm_property_index(&sde_crtc->property_info, property);
4909 if (i == CRTC_PROP_OUTPUT_FENCE) {
Dhaval Patel39323d42017-03-01 23:48:24 -08004910 uint32_t offset = sde_crtc_get_property(cstate,
Clarence Ip1d9728b2016-09-01 11:10:54 -04004911 CRTC_PROP_OUTPUT_FENCE_OFFSET);
4912
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07004913 /**
4914 * set the offset to 0 only for cmd mode panels, so
4915 * the release fence for the current frame can be
4916 * triggered right after PP_DONE interrupt.
4917 */
4918 offset = is_cmd ? 0 : (offset + conn_offset);
4919
Dhaval Patel5cb59be2017-04-20 20:00:56 -07004920 ret = sde_fence_create(&sde_crtc->output_fence, val,
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07004921 offset);
Clarence Ip1d9728b2016-09-01 11:10:54 -04004922 if (ret)
4923 SDE_ERROR("fence create failed\n");
Clarence Ip24f80662016-06-13 19:05:32 -04004924 } else {
4925 ret = msm_property_atomic_get(&sde_crtc->property_info,
Clarence Ip4a2955d2017-07-04 18:04:33 -04004926 &cstate->property_state,
4927 property, val);
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -07004928 if (ret)
4929 ret = sde_cp_crtc_get_property(crtc,
4930 property, val);
Clarence Ip24f80662016-06-13 19:05:32 -04004931 }
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -07004932 if (ret)
4933 DRM_ERROR("get property failed\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04004934 }
Clarence Ip7a753bb2016-07-07 11:47:44 -04004935 return ret;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004936}
4937
Alan Kwong67a3f792016-11-01 23:16:53 -04004938#ifdef CONFIG_DEBUG_FS
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07004939static int _sde_debugfs_status_show(struct seq_file *s, void *data)
Clarence Ip8f7366c2016-07-05 12:15:26 -04004940{
4941 struct sde_crtc *sde_crtc;
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07004942 struct sde_plane_state *pstate = NULL;
Clarence Ip8f7366c2016-07-05 12:15:26 -04004943 struct sde_crtc_mixer *m;
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07004944
4945 struct drm_crtc *crtc;
4946 struct drm_plane *plane;
4947 struct drm_display_mode *mode;
4948 struct drm_framebuffer *fb;
4949 struct drm_plane_state *state;
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07004950 struct sde_crtc_state *cstate;
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07004951
4952 int i, out_width;
Clarence Ip8f7366c2016-07-05 12:15:26 -04004953
4954 if (!s || !s->private)
4955 return -EINVAL;
4956
4957 sde_crtc = s->private;
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07004958 crtc = &sde_crtc->base;
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07004959 cstate = to_sde_crtc_state(crtc->state);
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07004960
4961 mutex_lock(&sde_crtc->crtc_lock);
4962 mode = &crtc->state->adjusted_mode;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05304963 out_width = sde_crtc_get_mixer_width(sde_crtc, cstate, mode);
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07004964
4965 seq_printf(s, "crtc:%d width:%d height:%d\n", crtc->base.id,
4966 mode->hdisplay, mode->vdisplay);
4967
4968 seq_puts(s, "\n");
4969
Clarence Ip8f7366c2016-07-05 12:15:26 -04004970 for (i = 0; i < sde_crtc->num_mixers; ++i) {
Lloyd Atkinsone7bcdd22016-08-11 10:53:37 -04004971 m = &sde_crtc->mixers[i];
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07004972 if (!m->hw_lm)
4973 seq_printf(s, "\tmixer[%d] has no lm\n", i);
4974 else if (!m->hw_ctl)
4975 seq_printf(s, "\tmixer[%d] has no ctl\n", i);
4976 else
4977 seq_printf(s, "\tmixer:%d ctl:%d width:%d height:%d\n",
4978 m->hw_lm->idx - LM_0, m->hw_ctl->idx - CTL_0,
4979 out_width, mode->vdisplay);
Clarence Ip8f7366c2016-07-05 12:15:26 -04004980 }
Dhaval Patel44f12472016-08-29 12:19:47 -07004981
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07004982 seq_puts(s, "\n");
Dhaval Patel48c76022016-09-01 17:51:23 -07004983
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07004984 for (i = 0; i < cstate->num_dim_layers; i++) {
4985 struct sde_hw_dim_layer *dim_layer = &cstate->dim_layer[i];
4986
4987 seq_printf(s, "\tdim_layer:%d] stage:%d flags:%d\n",
4988 i, dim_layer->stage, dim_layer->flags);
4989 seq_printf(s, "\tdst_x:%d dst_y:%d dst_w:%d dst_h:%d\n",
4990 dim_layer->rect.x, dim_layer->rect.y,
4991 dim_layer->rect.w, dim_layer->rect.h);
4992 seq_printf(s,
4993 "\tcolor_0:%d color_1:%d color_2:%d color_3:%d\n",
4994 dim_layer->color_fill.color_0,
4995 dim_layer->color_fill.color_1,
4996 dim_layer->color_fill.color_2,
4997 dim_layer->color_fill.color_3);
4998 seq_puts(s, "\n");
4999 }
5000
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07005001 drm_atomic_crtc_for_each_plane(plane, crtc) {
5002 pstate = to_sde_plane_state(plane->state);
5003 state = plane->state;
5004
5005 if (!pstate || !state)
5006 continue;
5007
5008 seq_printf(s, "\tplane:%u stage:%d\n", plane->base.id,
5009 pstate->stage);
5010
5011 if (plane->state->fb) {
5012 fb = plane->state->fb;
5013
5014 seq_printf(s, "\tfb:%d image format:%4.4s wxh:%ux%u bpp:%d\n",
5015 fb->base.id, (char *) &fb->pixel_format,
5016 fb->width, fb->height, fb->bits_per_pixel);
5017
5018 seq_puts(s, "\t");
5019 for (i = 0; i < ARRAY_SIZE(fb->modifier); i++)
5020 seq_printf(s, "modifier[%d]:%8llu ", i,
5021 fb->modifier[i]);
5022 seq_puts(s, "\n");
5023
5024 seq_puts(s, "\t");
5025 for (i = 0; i < ARRAY_SIZE(fb->pitches); i++)
5026 seq_printf(s, "pitches[%d]:%8u ", i,
5027 fb->pitches[i]);
5028 seq_puts(s, "\n");
5029
5030 seq_puts(s, "\t");
5031 for (i = 0; i < ARRAY_SIZE(fb->offsets); i++)
5032 seq_printf(s, "offsets[%d]:%8u ", i,
5033 fb->offsets[i]);
Dhaval Patel48c76022016-09-01 17:51:23 -07005034 seq_puts(s, "\n");
5035 }
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07005036
5037 seq_printf(s, "\tsrc_x:%4d src_y:%4d src_w:%4d src_h:%4d\n",
5038 state->src_x, state->src_y, state->src_w, state->src_h);
5039
5040 seq_printf(s, "\tdst x:%4d dst_y:%4d dst_w:%4d dst_h:%4d\n",
5041 state->crtc_x, state->crtc_y, state->crtc_w,
5042 state->crtc_h);
Jeykumar Sankarane964dc72017-05-10 19:26:43 -07005043 seq_printf(s, "\tmultirect: mode: %d index: %d\n",
5044 pstate->multirect_mode, pstate->multirect_index);
Veera Sundaram Sankaran58e12812017-05-05 11:51:09 -07005045
5046 seq_printf(s, "\texcl_rect: x:%4d y:%4d w:%4d h:%4d\n",
5047 pstate->excl_rect.x, pstate->excl_rect.y,
5048 pstate->excl_rect.w, pstate->excl_rect.h);
5049
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07005050 seq_puts(s, "\n");
Clarence Ip8f7366c2016-07-05 12:15:26 -04005051 }
Alan Kwong07da0982016-11-04 12:57:45 -04005052
5053 if (sde_crtc->vblank_cb_count) {
5054 ktime_t diff = ktime_sub(ktime_get(), sde_crtc->vblank_cb_time);
5055 s64 diff_ms = ktime_to_ms(diff);
5056 s64 fps = diff_ms ? DIV_ROUND_CLOSEST(
5057 sde_crtc->vblank_cb_count * 1000, diff_ms) : 0;
5058
5059 seq_printf(s,
Dhaval Pateld67cf4a2017-06-14 18:08:32 -07005060 "vblank fps:%lld count:%u total:%llums total_framecount:%llu\n",
5061 fps, sde_crtc->vblank_cb_count,
5062 ktime_to_ms(diff), sde_crtc->play_count);
Alan Kwong07da0982016-11-04 12:57:45 -04005063
5064 /* reset time & count for next measurement */
5065 sde_crtc->vblank_cb_count = 0;
5066 sde_crtc->vblank_cb_time = ktime_set(0, 0);
5067 }
5068
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04005069 seq_printf(s, "vblank_enable:%d\n", sde_crtc->vblank_requested);
Alan Kwong07da0982016-11-04 12:57:45 -04005070
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07005071 mutex_unlock(&sde_crtc->crtc_lock);
5072
Clarence Ip8f7366c2016-07-05 12:15:26 -04005073 return 0;
5074}
5075
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07005076static int _sde_debugfs_status_open(struct inode *inode, struct file *file)
Clarence Ip8f7366c2016-07-05 12:15:26 -04005077{
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07005078 return single_open(file, _sde_debugfs_status_show, inode->i_private);
Clarence Ip8f7366c2016-07-05 12:15:26 -04005079}
5080
Dhaval Patelf9245d62017-03-28 16:24:00 -07005081static ssize_t _sde_crtc_misr_setup(struct file *file,
5082 const char __user *user_buf, size_t count, loff_t *ppos)
5083{
5084 struct sde_crtc *sde_crtc;
5085 struct sde_crtc_mixer *m;
5086 int i = 0, rc;
5087 char buf[MISR_BUFF_SIZE + 1];
5088 u32 frame_count, enable;
5089 size_t buff_copy;
5090
5091 if (!file || !file->private_data)
5092 return -EINVAL;
5093
5094 sde_crtc = file->private_data;
5095 buff_copy = min_t(size_t, count, MISR_BUFF_SIZE);
5096 if (copy_from_user(buf, user_buf, buff_copy)) {
5097 SDE_ERROR("buffer copy failed\n");
5098 return -EINVAL;
5099 }
5100
5101 buf[buff_copy] = 0; /* end of string */
5102
5103 if (sscanf(buf, "%u %u", &enable, &frame_count) != 2)
5104 return -EINVAL;
5105
5106 rc = _sde_crtc_power_enable(sde_crtc, true);
5107 if (rc)
5108 return rc;
5109
5110 mutex_lock(&sde_crtc->crtc_lock);
5111 sde_crtc->misr_enable = enable;
Dhaval Patel010f5172017-08-01 22:40:09 -07005112 sde_crtc->misr_frame_count = frame_count;
Dhaval Patelf9245d62017-03-28 16:24:00 -07005113 for (i = 0; i < sde_crtc->num_mixers; ++i) {
Dhaval Patel010f5172017-08-01 22:40:09 -07005114 sde_crtc->misr_data[i] = 0;
Dhaval Patelf9245d62017-03-28 16:24:00 -07005115 m = &sde_crtc->mixers[i];
Dhaval Patel010f5172017-08-01 22:40:09 -07005116 if (!m->hw_lm || !m->hw_lm->ops.setup_misr)
Dhaval Patelf9245d62017-03-28 16:24:00 -07005117 continue;
5118
5119 m->hw_lm->ops.setup_misr(m->hw_lm, enable, frame_count);
5120 }
5121 mutex_unlock(&sde_crtc->crtc_lock);
5122 _sde_crtc_power_enable(sde_crtc, false);
5123
5124 return count;
5125}
5126
5127static ssize_t _sde_crtc_misr_read(struct file *file,
5128 char __user *user_buff, size_t count, loff_t *ppos)
5129{
5130 struct sde_crtc *sde_crtc;
5131 struct sde_crtc_mixer *m;
5132 int i = 0, rc;
Dhaval Patel010f5172017-08-01 22:40:09 -07005133 u32 misr_status;
Dhaval Patelf9245d62017-03-28 16:24:00 -07005134 ssize_t len = 0;
5135 char buf[MISR_BUFF_SIZE + 1] = {'\0'};
5136
5137 if (*ppos)
5138 return 0;
5139
5140 if (!file || !file->private_data)
5141 return -EINVAL;
5142
5143 sde_crtc = file->private_data;
5144 rc = _sde_crtc_power_enable(sde_crtc, true);
5145 if (rc)
5146 return rc;
5147
5148 mutex_lock(&sde_crtc->crtc_lock);
5149 if (!sde_crtc->misr_enable) {
5150 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
5151 "disabled\n");
5152 goto buff_check;
5153 }
5154
5155 for (i = 0; i < sde_crtc->num_mixers; ++i) {
5156 m = &sde_crtc->mixers[i];
Dhaval Patel010f5172017-08-01 22:40:09 -07005157 if (!m->hw_lm || !m->hw_lm->ops.collect_misr)
Dhaval Patelf9245d62017-03-28 16:24:00 -07005158 continue;
5159
Dhaval Patel010f5172017-08-01 22:40:09 -07005160 misr_status = m->hw_lm->ops.collect_misr(m->hw_lm);
5161 sde_crtc->misr_data[i] = misr_status ? misr_status :
5162 sde_crtc->misr_data[i];
Dhaval Patelf9245d62017-03-28 16:24:00 -07005163 len += snprintf(buf + len, MISR_BUFF_SIZE - len, "lm idx:%d\n",
5164 m->hw_lm->idx - LM_0);
5165 len += snprintf(buf + len, MISR_BUFF_SIZE - len, "0x%x\n",
Dhaval Patel010f5172017-08-01 22:40:09 -07005166 sde_crtc->misr_data[i]);
Dhaval Patelf9245d62017-03-28 16:24:00 -07005167 }
5168
5169buff_check:
5170 if (count <= len) {
5171 len = 0;
5172 goto end;
5173 }
5174
5175 if (copy_to_user(user_buff, buf, len)) {
5176 len = -EFAULT;
5177 goto end;
5178 }
5179
5180 *ppos += len; /* increase offset */
5181
5182end:
5183 mutex_unlock(&sde_crtc->crtc_lock);
5184 _sde_crtc_power_enable(sde_crtc, false);
5185 return len;
5186}
5187
5188#define DEFINE_SDE_DEBUGFS_SEQ_FOPS(__prefix) \
Alan Kwong67a3f792016-11-01 23:16:53 -04005189static int __prefix ## _open(struct inode *inode, struct file *file) \
5190{ \
5191 return single_open(file, __prefix ## _show, inode->i_private); \
5192} \
5193static const struct file_operations __prefix ## _fops = { \
5194 .owner = THIS_MODULE, \
5195 .open = __prefix ## _open, \
5196 .release = single_release, \
5197 .read = seq_read, \
5198 .llseek = seq_lseek, \
5199}
5200
5201static int sde_crtc_debugfs_state_show(struct seq_file *s, void *v)
5202{
5203 struct drm_crtc *crtc = (struct drm_crtc *) s->private;
Alan Kwong751cf462017-06-08 10:26:46 -04005204 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
Alan Kwong67a3f792016-11-01 23:16:53 -04005205 struct sde_crtc_state *cstate = to_sde_crtc_state(crtc->state);
Alan Kwongcdb2f282017-03-18 13:42:06 -07005206 struct sde_crtc_res *res;
Alan Kwong310e9b02017-08-03 02:04:07 -04005207 struct sde_crtc_respool *rp;
Alan Kwong0230a102017-05-16 11:36:44 -07005208 int i;
Alan Kwong67a3f792016-11-01 23:16:53 -04005209
5210 seq_printf(s, "num_connectors: %d\n", cstate->num_connectors);
Dhaval Patel4d424602017-02-18 19:40:14 -08005211 seq_printf(s, "client type: %d\n", sde_crtc_get_client_type(crtc));
Alan Kwong3e985f02017-02-12 15:08:44 -08005212 seq_printf(s, "intf_mode: %d\n", sde_crtc_get_intf_mode(crtc));
Alan Kwong751cf462017-06-08 10:26:46 -04005213 seq_printf(s, "core_clk_rate: %llu\n",
5214 sde_crtc->cur_perf.core_clk_rate);
Alan Kwong0230a102017-05-16 11:36:44 -07005215 for (i = SDE_POWER_HANDLE_DBUS_ID_MNOC;
5216 i < SDE_POWER_HANDLE_DBUS_ID_MAX; i++) {
5217 seq_printf(s, "bw_ctl[%s]: %llu\n",
5218 sde_power_handle_get_dbus_name(i),
5219 sde_crtc->cur_perf.bw_ctl[i]);
5220 seq_printf(s, "max_per_pipe_ib[%s]: %llu\n",
5221 sde_power_handle_get_dbus_name(i),
5222 sde_crtc->cur_perf.max_per_pipe_ib[i]);
5223 }
Alan Kwong67a3f792016-11-01 23:16:53 -04005224
Alan Kwong310e9b02017-08-03 02:04:07 -04005225 mutex_lock(&sde_crtc->rp_lock);
5226 list_for_each_entry(rp, &sde_crtc->rp_head, rp_list) {
5227 seq_printf(s, "rp.%d: ", rp->sequence_id);
5228 list_for_each_entry(res, &rp->res_list, list)
5229 seq_printf(s, "0x%x/0x%llx/%pK/%d ",
5230 res->type, res->tag, res->val,
5231 atomic_read(&res->refcount));
5232 seq_puts(s, "\n");
5233 }
5234 mutex_unlock(&sde_crtc->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -07005235
Alan Kwong67a3f792016-11-01 23:16:53 -04005236 return 0;
5237}
5238DEFINE_SDE_DEBUGFS_SEQ_FOPS(sde_crtc_debugfs_state);
5239
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005240static int _sde_crtc_init_debugfs(struct drm_crtc *crtc)
Clarence Ip8f7366c2016-07-05 12:15:26 -04005241{
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005242 struct sde_crtc *sde_crtc;
5243 struct sde_kms *sde_kms;
5244
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07005245 static const struct file_operations debugfs_status_fops = {
5246 .open = _sde_debugfs_status_open,
Clarence Ip8f7366c2016-07-05 12:15:26 -04005247 .read = seq_read,
5248 .llseek = seq_lseek,
5249 .release = single_release,
5250 };
Dhaval Patelf9245d62017-03-28 16:24:00 -07005251 static const struct file_operations debugfs_misr_fops = {
5252 .open = simple_open,
5253 .read = _sde_crtc_misr_read,
5254 .write = _sde_crtc_misr_setup,
5255 };
Alan Kwong67a3f792016-11-01 23:16:53 -04005256
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005257 if (!crtc)
5258 return -EINVAL;
5259 sde_crtc = to_sde_crtc(crtc);
5260
5261 sde_kms = _sde_crtc_get_kms(crtc);
5262 if (!sde_kms)
5263 return -EINVAL;
5264
5265 sde_crtc->debugfs_root = debugfs_create_dir(sde_crtc->name,
Lloyd Atkinson09e64bf2017-04-13 14:09:59 -07005266 crtc->dev->primary->debugfs_root);
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005267 if (!sde_crtc->debugfs_root)
5268 return -ENOMEM;
5269
5270 /* don't error check these */
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04005271 debugfs_create_file("status", 0400,
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005272 sde_crtc->debugfs_root,
5273 sde_crtc, &debugfs_status_fops);
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04005274 debugfs_create_file("state", 0600,
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005275 sde_crtc->debugfs_root,
5276 &sde_crtc->base,
5277 &sde_crtc_debugfs_state_fops);
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04005278 debugfs_create_file("misr_data", 0600, sde_crtc->debugfs_root,
Dhaval Patelf9245d62017-03-28 16:24:00 -07005279 sde_crtc, &debugfs_misr_fops);
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005280
5281 return 0;
5282}
5283
5284static void _sde_crtc_destroy_debugfs(struct drm_crtc *crtc)
5285{
5286 struct sde_crtc *sde_crtc;
5287
5288 if (!crtc)
5289 return;
5290 sde_crtc = to_sde_crtc(crtc);
5291 debugfs_remove_recursive(sde_crtc->debugfs_root);
Clarence Ip8f7366c2016-07-05 12:15:26 -04005292}
Alan Kwong67a3f792016-11-01 23:16:53 -04005293#else
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005294static int _sde_crtc_init_debugfs(struct drm_crtc *crtc)
Alan Kwong67a3f792016-11-01 23:16:53 -04005295{
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005296 return 0;
Alan Kwong67a3f792016-11-01 23:16:53 -04005297}
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005298
5299static void _sde_crtc_destroy_debugfs(struct drm_crtc *crtc)
5300{
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005301}
5302#endif /* CONFIG_DEBUG_FS */
5303
5304static int sde_crtc_late_register(struct drm_crtc *crtc)
5305{
5306 return _sde_crtc_init_debugfs(crtc);
5307}
5308
5309static void sde_crtc_early_unregister(struct drm_crtc *crtc)
5310{
5311 _sde_crtc_destroy_debugfs(crtc);
5312}
5313
5314static const struct drm_crtc_funcs sde_crtc_funcs = {
5315 .set_config = drm_atomic_helper_set_config,
5316 .destroy = sde_crtc_destroy,
5317 .page_flip = drm_atomic_helper_page_flip,
5318 .set_property = sde_crtc_set_property,
5319 .atomic_set_property = sde_crtc_atomic_set_property,
5320 .atomic_get_property = sde_crtc_atomic_get_property,
5321 .reset = sde_crtc_reset,
5322 .atomic_duplicate_state = sde_crtc_duplicate_state,
5323 .atomic_destroy_state = sde_crtc_destroy_state,
5324 .late_register = sde_crtc_late_register,
5325 .early_unregister = sde_crtc_early_unregister,
5326};
5327
5328static const struct drm_crtc_helper_funcs sde_crtc_helper_funcs = {
5329 .mode_fixup = sde_crtc_mode_fixup,
5330 .disable = sde_crtc_disable,
5331 .enable = sde_crtc_enable,
5332 .atomic_check = sde_crtc_atomic_check,
5333 .atomic_begin = sde_crtc_atomic_begin,
5334 .atomic_flush = sde_crtc_atomic_flush,
5335};
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005336
Clarence Ipa18d4832017-03-13 12:35:44 -07005337static void _sde_crtc_event_cb(struct kthread_work *work)
5338{
5339 struct sde_crtc_event *event;
5340 struct sde_crtc *sde_crtc;
5341 unsigned long irq_flags;
5342
5343 if (!work) {
5344 SDE_ERROR("invalid work item\n");
5345 return;
5346 }
5347
5348 event = container_of(work, struct sde_crtc_event, kt_work);
Clarence Ipa18d4832017-03-13 12:35:44 -07005349
5350 /* set sde_crtc to NULL for static work structures */
5351 sde_crtc = event->sde_crtc;
5352 if (!sde_crtc)
5353 return;
5354
Gopikrishnaiah Anandanb6b401f2017-03-14 16:39:49 -07005355 if (event->cb_func)
5356 event->cb_func(&sde_crtc->base, event->usr);
5357
Clarence Ipa18d4832017-03-13 12:35:44 -07005358 spin_lock_irqsave(&sde_crtc->event_lock, irq_flags);
5359 list_add_tail(&event->list, &sde_crtc->event_free_list);
5360 spin_unlock_irqrestore(&sde_crtc->event_lock, irq_flags);
5361}
5362
5363int sde_crtc_event_queue(struct drm_crtc *crtc,
Gopikrishnaiah Anandanb6b401f2017-03-14 16:39:49 -07005364 void (*func)(struct drm_crtc *crtc, void *usr), void *usr)
Clarence Ipa18d4832017-03-13 12:35:44 -07005365{
5366 unsigned long irq_flags;
5367 struct sde_crtc *sde_crtc;
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07005368 struct msm_drm_private *priv;
Clarence Ipa18d4832017-03-13 12:35:44 -07005369 struct sde_crtc_event *event = NULL;
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07005370 u32 crtc_id;
Clarence Ipa18d4832017-03-13 12:35:44 -07005371
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07005372 if (!crtc || !crtc->dev || !crtc->dev->dev_private || !func) {
5373 SDE_ERROR("invalid parameters\n");
Clarence Ipa18d4832017-03-13 12:35:44 -07005374 return -EINVAL;
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07005375 }
Clarence Ipa18d4832017-03-13 12:35:44 -07005376 sde_crtc = to_sde_crtc(crtc);
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07005377 priv = crtc->dev->dev_private;
5378 crtc_id = drm_crtc_index(crtc);
Clarence Ipa18d4832017-03-13 12:35:44 -07005379
5380 /*
5381 * Obtain an event struct from the private cache. This event
5382 * queue may be called from ISR contexts, so use a private
5383 * cache to avoid calling any memory allocation functions.
5384 */
5385 spin_lock_irqsave(&sde_crtc->event_lock, irq_flags);
5386 if (!list_empty(&sde_crtc->event_free_list)) {
5387 event = list_first_entry(&sde_crtc->event_free_list,
5388 struct sde_crtc_event, list);
5389 list_del_init(&event->list);
5390 }
5391 spin_unlock_irqrestore(&sde_crtc->event_lock, irq_flags);
5392
5393 if (!event)
5394 return -ENOMEM;
5395
5396 /* populate event node */
5397 event->sde_crtc = sde_crtc;
5398 event->cb_func = func;
5399 event->usr = usr;
5400
5401 /* queue new event request */
5402 kthread_init_work(&event->kt_work, _sde_crtc_event_cb);
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07005403 kthread_queue_work(&priv->event_thread[crtc_id].worker,
5404 &event->kt_work);
Clarence Ipa18d4832017-03-13 12:35:44 -07005405
5406 return 0;
5407}
5408
5409static int _sde_crtc_init_events(struct sde_crtc *sde_crtc)
5410{
5411 int i, rc = 0;
5412
5413 if (!sde_crtc) {
5414 SDE_ERROR("invalid crtc\n");
5415 return -EINVAL;
5416 }
5417
5418 spin_lock_init(&sde_crtc->event_lock);
5419
5420 INIT_LIST_HEAD(&sde_crtc->event_free_list);
5421 for (i = 0; i < SDE_CRTC_MAX_EVENT_COUNT; ++i)
5422 list_add_tail(&sde_crtc->event_cache[i].list,
5423 &sde_crtc->event_free_list);
5424
Dhaval Patel5023c3c2017-08-22 12:40:11 -07005425 INIT_LIST_HEAD(&sde_crtc->retire_event_list);
5426 for (i = 0; i < ARRAY_SIZE(sde_crtc->retire_events); i++)
5427 INIT_LIST_HEAD(&sde_crtc->retire_events[i].list);
5428
Clarence Ipa18d4832017-03-13 12:35:44 -07005429 return rc;
5430}
5431
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04005432/* initialize crtc */
Lloyd Atkinsonac933642016-09-14 11:52:00 -04005433struct drm_crtc *sde_crtc_init(struct drm_device *dev, struct drm_plane *plane)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005434{
5435 struct drm_crtc *crtc = NULL;
Clarence Ip8f7366c2016-07-05 12:15:26 -04005436 struct sde_crtc *sde_crtc = NULL;
5437 struct msm_drm_private *priv = NULL;
5438 struct sde_kms *kms = NULL;
Clarence Ipa18d4832017-03-13 12:35:44 -07005439 int i, rc;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005440
Clarence Ip8f7366c2016-07-05 12:15:26 -04005441 priv = dev->dev_private;
5442 kms = to_sde_kms(priv->kms);
5443
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005444 sde_crtc = kzalloc(sizeof(*sde_crtc), GFP_KERNEL);
5445 if (!sde_crtc)
5446 return ERR_PTR(-ENOMEM);
5447
5448 crtc = &sde_crtc->base;
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -07005449 crtc->dev = dev;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005450
Clarence Ip7f70ce42017-03-20 06:53:46 -07005451 mutex_init(&sde_crtc->crtc_lock);
Alan Kwong628d19e2016-10-31 13:50:13 -04005452 spin_lock_init(&sde_crtc->spin_lock);
5453 atomic_set(&sde_crtc->frame_pending, 0);
5454
Alan Kwong310e9b02017-08-03 02:04:07 -04005455 mutex_init(&sde_crtc->rp_lock);
5456 INIT_LIST_HEAD(&sde_crtc->rp_head);
5457
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07005458 init_completion(&sde_crtc->frame_done_comp);
Veera Sundaram Sankaran97dc5152017-10-10 20:24:48 -07005459 sde_crtc->enabled = false;
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07005460
Alan Kwong628d19e2016-10-31 13:50:13 -04005461 INIT_LIST_HEAD(&sde_crtc->frame_event_list);
Gopikrishnaiah Anandande2c81b2017-03-15 12:41:29 -07005462 INIT_LIST_HEAD(&sde_crtc->user_event_list);
Alan Kwong628d19e2016-10-31 13:50:13 -04005463 for (i = 0; i < ARRAY_SIZE(sde_crtc->frame_events); i++) {
5464 INIT_LIST_HEAD(&sde_crtc->frame_events[i].list);
5465 list_add(&sde_crtc->frame_events[i].list,
5466 &sde_crtc->frame_event_list);
5467 kthread_init_work(&sde_crtc->frame_events[i].work,
5468 sde_crtc_frame_event_work);
5469 }
5470
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07005471 drm_crtc_init_with_planes(dev, crtc, plane, NULL, &sde_crtc_funcs,
5472 NULL);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005473
5474 drm_crtc_helper_add(crtc, &sde_crtc_helper_funcs);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04005475 plane->crtc = crtc;
5476
Clarence Ip8f7366c2016-07-05 12:15:26 -04005477 /* save user friendly CRTC name for later */
5478 snprintf(sde_crtc->name, SDE_CRTC_NAME_SIZE, "crtc%u", crtc->base.id);
5479
Clarence Ipa18d4832017-03-13 12:35:44 -07005480 /* initialize event handling */
5481 rc = _sde_crtc_init_events(sde_crtc);
5482 if (rc) {
5483 drm_crtc_cleanup(crtc);
5484 kfree(sde_crtc);
5485 return ERR_PTR(rc);
5486 }
5487
Clarence Ip9a74a442016-08-25 18:29:03 -04005488 /* initialize output fence support */
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04005489 sde_fence_init(&sde_crtc->output_fence, sde_crtc->name, crtc->base.id);
Clarence Ip24f80662016-06-13 19:05:32 -04005490
Clarence Ip7a753bb2016-07-07 11:47:44 -04005491 /* create CRTC properties */
5492 msm_property_init(&sde_crtc->property_info, &crtc->base, dev,
5493 priv->crtc_property, sde_crtc->property_data,
5494 CRTC_PROP_COUNT, CRTC_PROP_BLOBCOUNT,
5495 sizeof(struct sde_crtc_state));
5496
Dhaval Patele4a5dda2016-10-13 19:29:30 -07005497 sde_crtc_install_properties(crtc, kms->catalog);
Gopikrishnaiah Anandan703eb902016-10-06 18:43:57 -07005498
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05305499 /* Init dest scaler */
5500 _sde_crtc_dest_scaler_init(sde_crtc, kms->catalog);
5501
Gopikrishnaiah Anandan703eb902016-10-06 18:43:57 -07005502 /* Install color processing properties */
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -07005503 sde_cp_crtc_init(crtc);
Gopikrishnaiah Anandan703eb902016-10-06 18:43:57 -07005504 sde_cp_crtc_install_properties(crtc);
Clarence Ip7a753bb2016-07-07 11:47:44 -04005505
Dhaval Patelec10fad2016-08-22 14:40:48 -07005506 SDE_DEBUG("%s: successfully initialized crtc\n", sde_crtc->name);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005507 return crtc;
5508}
Gopikrishnaiah Anandande2c81b2017-03-15 12:41:29 -07005509
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08005510static int _sde_crtc_event_enable(struct sde_kms *kms,
5511 struct drm_crtc *crtc_drm, u32 event)
Gopikrishnaiah Anandande2c81b2017-03-15 12:41:29 -07005512{
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08005513 struct sde_crtc *crtc = NULL;
5514 struct sde_crtc_irq_info *node;
5515 struct msm_drm_private *priv;
5516 unsigned long flags;
5517 bool found = false;
Gopikrishnaiah Anandanb6b401f2017-03-14 16:39:49 -07005518 int ret, i = 0;
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08005519
5520 crtc = to_sde_crtc(crtc_drm);
5521 spin_lock_irqsave(&crtc->spin_lock, flags);
5522 list_for_each_entry(node, &crtc->user_event_list, list) {
5523 if (node->event == event) {
5524 found = true;
5525 break;
5526 }
5527 }
5528 spin_unlock_irqrestore(&crtc->spin_lock, flags);
5529
5530 /* event already enabled */
5531 if (found)
5532 return 0;
5533
Gopikrishnaiah Anandanb6b401f2017-03-14 16:39:49 -07005534 node = NULL;
5535 for (i = 0; i < ARRAY_SIZE(custom_events); i++) {
5536 if (custom_events[i].event == event &&
5537 custom_events[i].func) {
5538 node = kzalloc(sizeof(*node), GFP_KERNEL);
5539 if (!node)
5540 return -ENOMEM;
5541 node->event = event;
5542 INIT_LIST_HEAD(&node->list);
5543 node->func = custom_events[i].func;
5544 node->event = event;
5545 break;
5546 }
5547 }
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08005548
Gopikrishnaiah Anandanb6b401f2017-03-14 16:39:49 -07005549 if (!node) {
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08005550 SDE_ERROR("unsupported event %x\n", event);
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08005551 return -EINVAL;
5552 }
5553
5554 priv = kms->dev->dev_private;
5555 ret = 0;
5556 if (crtc_drm->enabled) {
5557 sde_power_resource_enable(&priv->phandle, kms->core_client,
5558 true);
Xu Yang37752282017-08-21 13:50:23 +08005559 INIT_LIST_HEAD(&node->irq.list);
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08005560 ret = node->func(crtc_drm, true, &node->irq);
5561 sde_power_resource_enable(&priv->phandle, kms->core_client,
5562 false);
5563 }
5564
5565 if (!ret) {
5566 spin_lock_irqsave(&crtc->spin_lock, flags);
Xu Yang5e53c2e2017-07-11 16:46:28 +08005567 /* irq is regiestered and enabled and set the state */
5568 node->state = IRQ_ENABLED;
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08005569 list_add_tail(&node->list, &crtc->user_event_list);
5570 spin_unlock_irqrestore(&crtc->spin_lock, flags);
5571 } else {
5572 kfree(node);
5573 }
5574
5575 return ret;
5576}
5577
5578static int _sde_crtc_event_disable(struct sde_kms *kms,
5579 struct drm_crtc *crtc_drm, u32 event)
5580{
5581 struct sde_crtc *crtc = NULL;
5582 struct sde_crtc_irq_info *node = NULL;
5583 struct msm_drm_private *priv;
5584 unsigned long flags;
5585 bool found = false;
5586 int ret;
5587
5588 crtc = to_sde_crtc(crtc_drm);
5589 spin_lock_irqsave(&crtc->spin_lock, flags);
5590 list_for_each_entry(node, &crtc->user_event_list, list) {
5591 if (node->event == event) {
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08005592 found = true;
5593 break;
5594 }
5595 }
5596 spin_unlock_irqrestore(&crtc->spin_lock, flags);
5597
5598 /* event already disabled */
5599 if (!found)
5600 return 0;
5601
5602 /**
5603 * crtc is disabled interrupts are cleared remove from the list,
5604 * no need to disable/de-register.
5605 */
5606 if (!crtc_drm->enabled) {
Xu Yang5e53c2e2017-07-11 16:46:28 +08005607 list_del(&node->list);
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08005608 kfree(node);
5609 return 0;
5610 }
5611 priv = kms->dev->dev_private;
5612 sde_power_resource_enable(&priv->phandle, kms->core_client, true);
5613 ret = node->func(crtc_drm, false, &node->irq);
Xu Yang5e53c2e2017-07-11 16:46:28 +08005614 list_del(&node->list);
5615 kfree(node);
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08005616 sde_power_resource_enable(&priv->phandle, kms->core_client, false);
5617 return ret;
5618}
5619
5620int sde_crtc_register_custom_event(struct sde_kms *kms,
5621 struct drm_crtc *crtc_drm, u32 event, bool en)
5622{
5623 struct sde_crtc *crtc = NULL;
5624 int ret;
5625
5626 crtc = to_sde_crtc(crtc_drm);
5627 if (!crtc || !kms || !kms->dev) {
5628 DRM_ERROR("invalid sde_crtc %pK kms %pK dev %pK\n", crtc,
5629 kms, ((kms) ? (kms->dev) : NULL));
5630 return -EINVAL;
5631 }
5632
5633 if (en)
5634 ret = _sde_crtc_event_enable(kms, crtc_drm, event);
5635 else
5636 ret = _sde_crtc_event_disable(kms, crtc_drm, event);
5637
5638 return ret;
Gopikrishnaiah Anandande2c81b2017-03-15 12:41:29 -07005639}
Gopikrishnaiah Anandan84b4f672017-04-26 10:28:51 -07005640
5641static int sde_crtc_power_interrupt_handler(struct drm_crtc *crtc_drm,
5642 bool en, struct sde_irq_callback *irq)
5643{
5644 return 0;
5645}
Benjamin Chan90139102017-06-21 16:00:39 -04005646
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05305647static int sde_crtc_idle_interrupt_handler(struct drm_crtc *crtc_drm,
5648 bool en, struct sde_irq_callback *irq)
5649{
5650 return 0;
5651}